From 805d878f428a34b5b43b94d4c17dfea62f6e6dec Mon Sep 17 00:00:00 2001 From: Sriman Achanta <68172138+srimanachanta@users.noreply.github.com> Date: Tue, 24 Oct 2023 17:25:38 -0400 Subject: [PATCH 1/3] Update spotless config --- build.gradle | 40 +++++++++++++++++++++++++++++++++------- 1 file changed, 33 insertions(+), 7 deletions(-) diff --git a/build.gradle b/build.gradle index f593638b90..58f9d98a94 100644 --- a/build.gradle +++ b/build.gradle @@ -1,5 +1,5 @@ plugins { - id "com.diffplug.spotless" version "6.19.0" + id "com.diffplug.spotless" version "6.22.0" id "com.github.johnrengelman.shadow" version "7.1.2" id "com.github.node-gradle.node" version "3.1.1" apply false id "edu.wpi.first.GradleJni" version "1.1.0" @@ -49,18 +49,44 @@ wpilibTools.deps.wpilibVersion = wpilibVersion spotless { java { + target fileTree('.') { + include '**/*.java' + exclude '**/build/**', '**/build-*/**', "photon-core\\src\\main\\java\\org\\photonvision\\PhotonVersion.java", "photon-lib\\src\\main\\java\\org\\photonvision\\PhotonVersion.java" + } toggleOffOn() googleJavaFormat() - indentWithTabs(2) - indentWithSpaces(4) removeUnusedImports() trimTrailingWhitespace() endWithNewline() } - java { - target "**/*.java" - targetExclude("photon-core/src/main/java/org/photonvision/PhotonVersion.java") - targetExclude("photon-lib/src/main/java/org/photonvision/PhotonVersion.java") + groovyGradle { + target fileTree('.') { + include '**/*.gradle' + exclude '**/build/**', '**/build-*/**' + } + greclipse() + indentWithSpaces(4) + trimTrailingWhitespace() + endWithNewline() + } + format 'xml', { + target fileTree('.') { + include '**/*.xml' + exclude '**/build/**', '**/build-*/**', "**/.idea/**" + } + eclipseWtp('xml') + trimTrailingWhitespace() + indentWithSpaces(2) + endWithNewline() + } + format 'misc', { + target fileTree('.') { + include '**/*.md', '**/.gitignore' + exclude '**/build/**', '**/build-*/**' + } + trimTrailingWhitespace() + indentWithSpaces(2) + endWithNewline() } } From 816b14b2f6c7aa7335d936d3c0f611784c1fe907 Mon Sep 17 00:00:00 2001 From: Sriman Achanta <68172138+srimanachanta@users.noreply.github.com> Date: Tue, 24 Oct 2023 17:27:41 -0400 Subject: [PATCH 2/3] Formatting fixes --- .../photonvision/common/ProgramStatus.java | 8 +- .../configuration/CameraConfiguration.java | 282 ++-- .../common/configuration/ConfigManager.java | 482 +++--- .../common/configuration/ConfigProvider.java | 26 +- .../common/configuration/HardwareConfig.java | 330 ++-- .../configuration/HardwareSettings.java | 10 +- .../configuration/LegacyConfigProvider.java | 814 +++++----- .../common/configuration/NetworkConfig.java | 202 +-- .../configuration/PhotonConfiguration.java | 284 ++-- .../configuration/SqlConfigProvider.java | 868 +++++------ .../dataflow/DataChangeDestination.java | 16 +- .../common/dataflow/DataChangeService.java | 144 +- .../common/dataflow/DataChangeSource.java | 14 +- .../common/dataflow/DataChangeSubscriber.java | 46 +- .../dataflow/events/DataChangeEvent.java | 56 +- .../dataflow/events/HTTPRequestEvent.java | 14 +- .../events/IncomingWebSocketEvent.java | 72 +- .../dataflow/events/OutgoingUIEvent.java | 50 +- .../networktables/NTDataChangeListener.java | 32 +- .../networktables/NTDataPublisher.java | 284 ++-- .../networktables/NetworkTablesManager.java | 310 ++-- .../dataflow/websocket/UIDataPublisher.java | 70 +- .../common/hardware/GPIO/CustomGPIO.java | 126 +- .../common/hardware/GPIO/GPIOBase.java | 108 +- .../hardware/GPIO/pi/PigpioCommand.java | 32 +- .../hardware/GPIO/pi/PigpioException.java | 596 ++++---- .../common/hardware/GPIO/pi/PigpioPin.java | 110 +- .../common/hardware/GPIO/pi/PigpioPulse.java | 34 +- .../common/hardware/GPIO/pi/PigpioSocket.java | 644 ++++---- .../hardware/GPIO/pi/PigpioSocketLock.java | 220 +-- .../common/hardware/HardwareManager.java | 258 ++-- .../common/hardware/PiVersion.java | 80 +- .../common/hardware/Platform.java | 344 ++--- .../common/hardware/StatusLED.java | 38 +- .../common/hardware/VisionLED.java | 322 ++-- .../hardware/metrics/MetricsManager.java | 214 +-- .../common/hardware/metrics/cmds/CmdBase.java | 32 +- .../hardware/metrics/cmds/FileCmds.java | 24 +- .../hardware/metrics/cmds/LinuxCmds.java | 24 +- .../common/hardware/metrics/cmds/PiCmds.java | 32 +- .../photonvision/common/logging/LogGroup.java | 12 +- .../photonvision/common/logging/LogLevel.java | 22 +- .../photonvision/common/logging/Logger.java | 622 ++++---- .../common/networking/NetworkInterface.java | 92 +- .../common/networking/NetworkManager.java | 220 +-- .../common/networking/NetworkMode.java | 12 +- .../common/networking/NetworkUtils.java | 178 +-- .../common/networking/RoborioFinder.java | 152 +- .../common/scripting/ScriptCommandType.java | 16 +- .../common/scripting/ScriptConfig.java | 26 +- .../common/scripting/ScriptEvent.java | 48 +- .../common/scripting/ScriptEventType.java | 30 +- .../common/scripting/ScriptManager.java | 170 +-- .../photonvision/common/util/ColorHelper.java | 6 +- .../common/util/MemoryManager.java | 100 +- .../common/util/NativeLibHelper.java | 24 +- .../common/util/ReflectionUtils.java | 62 +- .../common/util/SerializationUtils.java | 60 +- .../photonvision/common/util/ShellExec.java | 328 ++-- .../photonvision/common/util/TestUtils.java | 616 ++++---- .../common/util/TimedTaskManager.java | 86 +- .../common/util/file/FileUtils.java | 178 +-- .../common/util/file/JacksonUtils.java | 184 +-- .../util/file/ProgramDirectoryUtilities.java | 68 +- .../common/util/java/TriConsumer.java | 2 +- .../common/util/math/IPUtils.java | 60 +- .../common/util/math/MathUtils.java | 336 ++--- .../common/util/numbers/DoubleCouple.java | 48 +- .../common/util/numbers/IntegerCouple.java | 12 +- .../common/util/numbers/NumberCouple.java | 74 +- .../common/util/numbers/NumberListUtils.java | 168 +-- .../org/photonvision/raspi/LibCameraJNI.java | 268 ++-- .../vision/apriltag/AprilTagFamily.java | 24 +- .../vision/aruco/ArucoDetectionResult.java | 94 +- .../vision/aruco/ArucoDetectorParams.java | 74 +- .../vision/aruco/PhotonArucoDetector.java | 172 +-- .../CameraCalibrationCoefficients.java | 224 +-- .../vision/calibration/JsonMat.java | 184 +-- .../vision/camera/CameraQuirk.java | 24 +- .../vision/camera/CameraType.java | 6 +- .../vision/camera/FileVisionSource.java | 146 +- .../vision/camera/LibcameraGpuSettables.java | 372 ++--- .../vision/camera/LibcameraGpuSource.java | 92 +- .../vision/camera/QuirkyCamera.java | 196 +-- .../vision/camera/USBCameraSource.java | 568 +++---- .../org/photonvision/vision/frame/Frame.java | 84 +- .../vision/frame/FrameDivisor.java | 16 +- .../vision/frame/FrameProvider.java | 18 +- .../vision/frame/FrameStaticProperties.java | 142 +- .../vision/frame/FrameThresholdType.java | 6 +- .../frame/consumer/DummyFrameConsumer.java | 8 +- .../frame/consumer/FileSaveFrameConsumer.java | 116 +- .../frame/consumer/MJPGFrameConsumer.java | 390 ++--- .../frame/provider/CpuImageProcessor.java | 150 +- .../frame/provider/FileFrameProvider.java | 162 +- .../provider/LibcameraGpuFrameProvider.java | 162 +- .../frame/provider/USBFrameProvider.java | 58 +- .../org/photonvision/vision/opencv/CVMat.java | 116 +- .../photonvision/vision/opencv/CVShape.java | 58 +- .../photonvision/vision/opencv/Contour.java | 364 ++--- .../vision/opencv/ContourGroupingMode.java | 14 +- .../opencv/ContourIntersectionDirection.java | 10 +- .../vision/opencv/ContourShape.java | 32 +- .../vision/opencv/ContourSortMode.java | 40 +- .../photonvision/vision/opencv/DualMat.java | 4 +- .../vision/opencv/DualOffsetValues.java | 40 +- .../vision/opencv/ImageFlipMode.java | 16 +- .../vision/opencv/ImageRotationMode.java | 22 +- .../vision/opencv/Releasable.java | 2 +- .../org/photonvision/vision/pipe/CVPipe.java | 52 +- .../pipe/impl/AprilTagDetectionPipe.java | 52 +- .../impl/AprilTagDetectionPipeParams.java | 50 +- .../pipe/impl/AprilTagPoseEstimatorPipe.java | 176 +-- .../vision/pipe/impl/ArucoDetectionPipe.java | 36 +- .../pipe/impl/ArucoDetectionPipeParams.java | 56 +- .../vision/pipe/impl/BlurPipe.java | 62 +- .../vision/pipe/impl/CalculateFPSPipe.java | 30 +- .../vision/pipe/impl/Calibrate3dPipe.java | 240 +-- .../pipe/impl/Collect2dTargetsPipe.java | 90 +- .../vision/pipe/impl/CornerDetectionPipe.java | 332 ++-- .../vision/pipe/impl/Draw2dAprilTagsPipe.java | 18 +- .../vision/pipe/impl/Draw2dArucoPipe.java | 18 +- .../vision/pipe/impl/Draw2dCrosshairPipe.java | 166 +- .../vision/pipe/impl/Draw2dTargetsPipe.java | 408 ++--- .../vision/pipe/impl/Draw3dAprilTagsPipe.java | 20 +- .../vision/pipe/impl/Draw3dArucoPipe.java | 18 +- .../vision/pipe/impl/Draw3dTargetsPipe.java | 524 +++---- .../pipe/impl/DrawCornerDetectionPipe.java | 32 +- .../vision/pipe/impl/ErodeDilatePipe.java | 64 +- .../vision/pipe/impl/FilterContoursPipe.java | 212 +-- .../vision/pipe/impl/FilterShapesPipe.java | 102 +- .../pipe/impl/FindBoardCornersPipe.java | 576 +++---- .../vision/pipe/impl/FindCirclesPipe.java | 210 +-- .../vision/pipe/impl/FindContoursPipe.java | 26 +- .../vision/pipe/impl/FindPolygonPipe.java | 98 +- .../pipe/impl/GPUAcceleratedHSVPipe.java | 972 ++++++------ .../vision/pipe/impl/GrayscalePipe.java | 22 +- .../vision/pipe/impl/GroupContoursPipe.java | 126 +- .../vision/pipe/impl/HSVPipe.java | 116 +- .../vision/pipe/impl/MultiTargetPNPPipe.java | 90 +- .../vision/pipe/impl/OutputMatPipe.java | 14 +- .../vision/pipe/impl/ResizeImagePipe.java | 40 +- .../vision/pipe/impl/RotateImagePipe.java | 60 +- .../vision/pipe/impl/SolvePNPPipe.java | 130 +- .../vision/pipe/impl/SortContoursPipe.java | 88 +- .../vision/pipe/impl/SpeckleRejectPipe.java | 64 +- .../pipeline/AdvancedPipelineSettings.java | 258 ++-- .../vision/pipeline/AprilTagPipeline.java | 358 ++--- .../pipeline/AprilTagPipelineSettings.java | 118 +- .../vision/pipeline/ArucoPipeline.java | 114 +- .../pipeline/ArucoPipelineSettings.java | 30 +- .../vision/pipeline/CVPipeline.java | 76 +- .../vision/pipeline/CVPipelineSettings.java | 214 +-- .../vision/pipeline/Calibrate3dPipeline.java | 346 ++--- .../Calibration3dPipelineSettings.java | 24 +- .../vision/pipeline/ColoredShapePipeline.java | 380 ++--- .../ColoredShapePipelineSettings.java | 126 +- .../vision/pipeline/DriverModePipeline.java | 136 +- .../pipeline/DriverModePipelineSettings.java | 18 +- .../vision/pipeline/OutputStreamPipeline.java | 378 ++--- .../vision/pipeline/PipelineProfiler.java | 124 +- .../vision/pipeline/PipelineType.java | 24 +- .../vision/pipeline/ReflectivePipeline.java | 318 ++-- .../pipeline/ReflectivePipelineSettings.java | 16 +- .../vision/pipeline/UICalibrationData.java | 124 +- .../vision/pipeline/result/BytePackable.java | 238 +-- .../pipeline/result/CVPipelineResult.java | 112 +- .../result/DriverModePipelineResult.java | 6 +- .../photonvision/vision/processes/Data.java | 10 +- .../vision/processes/PipelineManager.java | 768 +++++----- .../vision/processes/VisionModule.java | 1008 ++++++------- .../VisionModuleChangeSubscriber.java | 386 ++--- .../vision/processes/VisionModuleManager.java | 132 +- .../vision/processes/VisionRunner.java | 126 +- .../vision/processes/VisionSource.java | 20 +- .../vision/processes/VisionSourceManager.java | 588 ++++---- .../processes/VisionSourceSettables.java | 186 +-- .../vision/target/PotentialTarget.java | 62 +- .../vision/target/RobotOffsetPointMode.java | 6 +- .../target/RobotOffsetPointOperation.java | 42 +- .../vision/target/TargetCalculations.java | 264 ++-- .../vision/target/TargetModel.java | 326 ++-- .../vision/target/TargetOffsetPointEdge.java | 10 +- .../vision/target/TargetOrientation.java | 4 +- .../vision/target/TrackedTarget.java | 900 +++++------ .../vision/videoStream/SocketVideoStream.java | 140 +- .../videoStream/SocketVideoStreamManager.java | 116 +- .../photonvision/common/BenchmarkTest.java | 308 ++-- .../common/ShapeBenchmarkTest.java | 334 ++--- .../common/configuration/ConfigTest.java | 206 +-- .../configuration/NetworkConfigTest.java | 44 +- .../common/configuration/SQLConfigTest.java | 60 +- .../common/util/CoordinateConversionTest.java | 92 +- .../common/util/LogFileManagementTest.java | 80 +- .../common/util/TimedTaskManagerTest.java | 14 +- .../hardware/HardwareConfigTest.java | 30 +- .../hardware/HardwareManagerTest.java | 34 +- .../photonvision/hardware/HardwareTest.java | 92 +- .../photonvision/vision/QuirkyCameraTest.java | 56 +- .../frame/provider/FileFrameProviderTest.java | 102 +- .../vision/frame/provider/LibcameraTest.java | 28 +- .../vision/opencv/ContourTest.java | 74 +- .../vision/pipeline/AprilTagTest.java | 220 +-- .../vision/pipeline/ArucoPipelineTest.java | 92 +- .../vision/pipeline/Calibrate3dPipeTest.java | 576 +++---- .../vision/pipeline/CirclePNPTest.java | 258 ++-- .../pipeline/ColoredShapePipelineTest.java | 174 +-- .../vision/pipeline/PipelineProfilerTest.java | 26 +- .../pipeline/ReflectivePipelineTest.java | 198 +-- .../vision/pipeline/SolvePNPTest.java | 412 ++--- .../vision/processes/PipelineManagerTest.java | 54 +- .../processes/VisionModuleManagerTest.java | 276 ++-- .../processes/VisionSourceManagerTest.java | 54 +- .../vision/target/TargetCalculationsTest.java | 480 +++--- .../vision/target/TrackedTargetTest.java | 64 +- photon-lib/build.gradle | 14 +- .../org/photonvision/EstimatedRobotPose.java | 48 +- .../java/org/photonvision/PhotonCamera.java | 712 ++++----- .../org/photonvision/PhotonPoseEstimator.java | 1240 +++++++-------- .../photonvision/PhotonTargetSortMode.java | 32 +- .../java/org/photonvision/PhotonUtils.java | 340 ++--- .../simulation/PhotonCameraSim.java | 1020 ++++++------- .../simulation/SimCameraProperties.java | 1332 ++++++++--------- .../simulation/SimPhotonCamera.java | 244 +-- .../simulation/SimVisionSystem.java | 446 +++--- .../simulation/SimVisionTarget.java | 40 +- .../photonvision/simulation/VideoSimUtil.java | 1052 ++++++------- .../simulation/VisionSystemSim.java | 696 ++++----- .../simulation/VisionTargetSim.java | 100 +- .../java/org/photonvision/OpenCVTest.java | 386 ++--- .../java/org/photonvision/PacketTest.java | 276 ++-- .../org/photonvision/PhotonCameraTest.java | 26 +- .../photonvision/PhotonPoseEstimatorTest.java | 1146 +++++++------- .../java/org/photonvision/PhotonUtilTest.java | 118 +- .../org/photonvision/PhotonVersionTest.java | 48 +- .../org/photonvision/VisionSystemSimTest.java | 948 ++++++------ .../estimation/ApriltagWorkbenchTest.java | 98 +- photon-server/build.gradle | 20 +- .../src/main/java/org/photonvision/Main.java | 616 ++++---- .../server/CameraSocketHandler.java | 198 +-- .../server/CameraSocketMessageType.java | 28 +- .../server/DataSocketHandler.java | 594 ++++---- .../server/DataSocketMessageType.java | 64 +- .../photonvision/server/RequestHandler.java | 1062 ++++++------- .../java/org/photonvision/server/Server.java | 162 +- .../server/UIInboundSubscriber.java | 40 +- .../server/UIOutboundSubscriber.java | 40 +- .../org/photonvision/server/UISettings.java | 4 +- .../common/dataflow/structures/Packet.java | 388 ++--- .../common/hardware/VisionLEDMode.java | 42 +- .../common/networktables/NTTopicSet.java | 176 +-- .../estimation/CameraTargetRelation.java | 58 +- .../photonvision/estimation/OpenCVHelp.java | 1010 ++++++------- .../estimation/RotTrlTransform3d.java | 388 ++--- .../photonvision/estimation/TargetModel.java | 282 ++-- .../estimation/VisionEstimation.java | 180 +-- .../targeting/MultiTargetPNPResults.java | 116 +- .../photonvision/targeting/PNPResults.java | 270 ++-- .../targeting/PhotonPipelineResult.java | 424 +++--- .../targeting/PhotonTrackedTarget.java | 516 +++---- .../photonvision/targeting/TargetCorner.java | 42 +- .../org/photonvision/utils/PacketUtils.java | 42 +- photonlib-cpp-examples/examples.gradle | 16 +- .../aimandrange/build.gradle | 6 +- .../src/main/java/frc/robot/Main.java | 18 +- .../src/main/java/frc/robot/Robot.java | 112 +- .../aimattarget/build.gradle | 6 +- .../src/main/java/frc/robot/Main.java | 18 +- .../src/main/java/frc/robot/Robot.java | 90 +- photonlib-java-examples/examples.gradle | 16 +- .../getinrange/build.gradle | 6 +- .../src/main/java/frc/robot/Main.java | 18 +- .../src/main/java/frc/robot/Robot.java | 104 +- .../simaimandrange/build.gradle | 6 +- .../src/main/java/frc/robot/Constants.java | 92 +- .../src/main/java/frc/robot/Main.java | 18 +- .../src/main/java/frc/robot/Robot.java | 144 +- .../java/frc/robot/sim/DrivetrainSim.java | 100 +- .../main/java/frc/robot/sim/VisionSim.java | 136 +- .../swervedriveposeestsim/build.gradle | 6 +- .../src/main/java/frc/robot/Constants.java | 180 +-- .../src/main/java/frc/robot/Main.java | 8 +- .../src/main/java/frc/robot/Robot.java | 222 +-- .../src/main/java/frc/robot/Vision.java | 214 +-- .../subsystems/drivetrain/SwerveDrive.java | 544 +++---- .../subsystems/drivetrain/SwerveDriveSim.java | 832 +++++----- .../subsystems/drivetrain/SwerveModule.java | 292 ++-- shared/common.gradle | 2 +- shared/config.gradle | 5 +- versioningHelper.gradle | 3 +- 290 files changed, 27479 insertions(+), 27457 deletions(-) diff --git a/photon-core/src/main/java/org/photonvision/common/ProgramStatus.java b/photon-core/src/main/java/org/photonvision/common/ProgramStatus.java index a1ba6d091d..c86671081f 100644 --- a/photon-core/src/main/java/org/photonvision/common/ProgramStatus.java +++ b/photon-core/src/main/java/org/photonvision/common/ProgramStatus.java @@ -18,8 +18,8 @@ package org.photonvision.common; public enum ProgramStatus { - UHOH, - RUNNING, - RUNNING_NT, - RUNNING_NT_TARGET + UHOH, + RUNNING, + RUNNING_NT, + RUNNING_NT_TARGET } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/CameraConfiguration.java b/photon-core/src/main/java/org/photonvision/common/configuration/CameraConfiguration.java index b2f8cf0f70..a72df617bc 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/CameraConfiguration.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/CameraConfiguration.java @@ -32,151 +32,151 @@ import org.photonvision.vision.processes.PipelineManager; public class CameraConfiguration { - private static final Logger logger = new Logger(CameraConfiguration.class, LogGroup.Camera); - - /** Name as reported by CSCore */ - public String baseName = ""; - - /** Name used to title the subfolder of this config */ - public String uniqueName = ""; - - /** User-set nickname */ - public String nickname = ""; - - /** Can be either path (ex /dev/videoX) or index (ex 1). */ - public String path = ""; - - @JsonIgnore public String[] otherPaths = {}; - - public CameraType cameraType = CameraType.UsbCamera; - public double FOV = 70; - public final List calibrations; - public int currentPipelineIndex = 0; - - public int streamIndex = 0; // 0 index means ports [1181, 1182], 1 means [1183, 1184], etc... - - @JsonIgnore // this ignores the pipes as we serialize them to their own subfolder - public List pipelineSettings = new ArrayList<>(); - - @JsonIgnore - public DriverModePipelineSettings driveModeSettings = new DriverModePipelineSettings(); - - public CameraConfiguration(String baseName, String path) { - this(baseName, baseName, baseName, path, new String[0]); + private static final Logger logger = new Logger(CameraConfiguration.class, LogGroup.Camera); + + /** Name as reported by CSCore */ + public String baseName = ""; + + /** Name used to title the subfolder of this config */ + public String uniqueName = ""; + + /** User-set nickname */ + public String nickname = ""; + + /** Can be either path (ex /dev/videoX) or index (ex 1). */ + public String path = ""; + + @JsonIgnore public String[] otherPaths = {}; + + public CameraType cameraType = CameraType.UsbCamera; + public double FOV = 70; + public final List calibrations; + public int currentPipelineIndex = 0; + + public int streamIndex = 0; // 0 index means ports [1181, 1182], 1 means [1183, 1184], etc... + + @JsonIgnore // this ignores the pipes as we serialize them to their own subfolder + public List pipelineSettings = new ArrayList<>(); + + @JsonIgnore + public DriverModePipelineSettings driveModeSettings = new DriverModePipelineSettings(); + + public CameraConfiguration(String baseName, String path) { + this(baseName, baseName, baseName, path, new String[0]); + } + + public CameraConfiguration( + String baseName, String uniqueName, String nickname, String path, String[] alternates) { + this.baseName = baseName; + this.uniqueName = uniqueName; + this.nickname = nickname; + this.path = path; + this.calibrations = new ArrayList<>(); + this.otherPaths = alternates; + + logger.debug( + "Creating USB camera configuration for " + + cameraType + + " " + + baseName + + " (AKA " + + nickname + + ") at " + + path); + } + + @JsonCreator + public CameraConfiguration( + @JsonProperty("baseName") String baseName, + @JsonProperty("uniqueName") String uniqueName, + @JsonProperty("nickname") String nickname, + @JsonProperty("FOV") double FOV, + @JsonProperty("path") String path, + @JsonProperty("cameraType") CameraType cameraType, + @JsonProperty("calibration") List calibrations, + @JsonProperty("currentPipelineIndex") int currentPipelineIndex) { + this.baseName = baseName; + this.uniqueName = uniqueName; + this.nickname = nickname; + this.FOV = FOV; + this.path = path; + this.cameraType = cameraType; + this.calibrations = calibrations != null ? calibrations : new ArrayList<>(); + this.currentPipelineIndex = currentPipelineIndex; + + logger.debug( + "Creating camera configuration for " + + cameraType + + " " + + baseName + + " (AKA " + + nickname + + ") at " + + path); + } + + public void addPipelineSettings(List settings) { + for (var setting : settings) { + addPipelineSetting(setting); } + } - public CameraConfiguration( - String baseName, String uniqueName, String nickname, String path, String[] alternates) { - this.baseName = baseName; - this.uniqueName = uniqueName; - this.nickname = nickname; - this.path = path; - this.calibrations = new ArrayList<>(); - this.otherPaths = alternates; - - logger.debug( - "Creating USB camera configuration for " - + cameraType - + " " - + baseName - + " (AKA " - + nickname - + ") at " - + path); + public void addPipelineSetting(CVPipelineSettings setting) { + if (pipelineSettings.stream() + .anyMatch(s -> s.pipelineNickname.equalsIgnoreCase(setting.pipelineNickname))) { + logger.error("Could not name two pipelines the same thing! Renaming"); + setting.pipelineNickname += "_1"; // TODO verify this logic } - @JsonCreator - public CameraConfiguration( - @JsonProperty("baseName") String baseName, - @JsonProperty("uniqueName") String uniqueName, - @JsonProperty("nickname") String nickname, - @JsonProperty("FOV") double FOV, - @JsonProperty("path") String path, - @JsonProperty("cameraType") CameraType cameraType, - @JsonProperty("calibration") List calibrations, - @JsonProperty("currentPipelineIndex") int currentPipelineIndex) { - this.baseName = baseName; - this.uniqueName = uniqueName; - this.nickname = nickname; - this.FOV = FOV; - this.path = path; - this.cameraType = cameraType; - this.calibrations = calibrations != null ? calibrations : new ArrayList<>(); - this.currentPipelineIndex = currentPipelineIndex; - - logger.debug( - "Creating camera configuration for " - + cameraType - + " " - + baseName - + " (AKA " - + nickname - + ") at " - + path); + if (pipelineSettings.stream().anyMatch(s -> s.pipelineIndex == setting.pipelineIndex)) { + var newIndex = pipelineSettings.size(); + logger.error("Could not insert two pipelines at same index! Changing to " + newIndex); + setting.pipelineIndex = newIndex; // TODO verify this logic } - public void addPipelineSettings(List settings) { - for (var setting : settings) { - addPipelineSetting(setting); - } - } - - public void addPipelineSetting(CVPipelineSettings setting) { - if (pipelineSettings.stream() - .anyMatch(s -> s.pipelineNickname.equalsIgnoreCase(setting.pipelineNickname))) { - logger.error("Could not name two pipelines the same thing! Renaming"); - setting.pipelineNickname += "_1"; // TODO verify this logic - } - - if (pipelineSettings.stream().anyMatch(s -> s.pipelineIndex == setting.pipelineIndex)) { - var newIndex = pipelineSettings.size(); - logger.error("Could not insert two pipelines at same index! Changing to " + newIndex); - setting.pipelineIndex = newIndex; // TODO verify this logic - } - - pipelineSettings.add(setting); - pipelineSettings.sort(PipelineManager.PipelineSettingsIndexComparator); - } - - public void setPipelineSettings(List settings) { - pipelineSettings = settings; - } - - public void addCalibration(CameraCalibrationCoefficients calibration) { - logger.info("adding calibration " + calibration.resolution); - calibrations.stream() - .filter(it -> it.resolution.equals(calibration.resolution)) - .findAny() - .ifPresent(calibrations::remove); - calibrations.add(calibration); - } - - @Override - public String toString() { - return "CameraConfiguration [baseName=" - + baseName - + ", uniqueName=" - + uniqueName - + ", nickname=" - + nickname - + ", path=" - + path - + ", otherPaths=" - + Arrays.toString(otherPaths) - + ", cameraType=" - + cameraType - + ", FOV=" - + FOV - + ", calibrations=" - + calibrations - + ", currentPipelineIndex=" - + currentPipelineIndex - + ", streamIndex=" - + streamIndex - + ", pipelineSettings=" - + pipelineSettings - + ", driveModeSettings=" - + driveModeSettings - + "]"; - } + pipelineSettings.add(setting); + pipelineSettings.sort(PipelineManager.PipelineSettingsIndexComparator); + } + + public void setPipelineSettings(List settings) { + pipelineSettings = settings; + } + + public void addCalibration(CameraCalibrationCoefficients calibration) { + logger.info("adding calibration " + calibration.resolution); + calibrations.stream() + .filter(it -> it.resolution.equals(calibration.resolution)) + .findAny() + .ifPresent(calibrations::remove); + calibrations.add(calibration); + } + + @Override + public String toString() { + return "CameraConfiguration [baseName=" + + baseName + + ", uniqueName=" + + uniqueName + + ", nickname=" + + nickname + + ", path=" + + path + + ", otherPaths=" + + Arrays.toString(otherPaths) + + ", cameraType=" + + cameraType + + ", FOV=" + + FOV + + ", calibrations=" + + calibrations + + ", currentPipelineIndex=" + + currentPipelineIndex + + ", streamIndex=" + + streamIndex + + ", pipelineSettings=" + + pipelineSettings + + ", driveModeSettings=" + + driveModeSettings + + "]"; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/ConfigManager.java b/photon-core/src/main/java/org/photonvision/common/configuration/ConfigManager.java index 1d87dd17ab..e9738111fc 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/ConfigManager.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/ConfigManager.java @@ -37,261 +37,261 @@ import org.zeroturnaround.zip.ZipUtil; public class ConfigManager { - private static final Logger logger = new Logger(ConfigManager.class, LogGroup.General); - private static ConfigManager INSTANCE; - - public static final String HW_CFG_FNAME = "hardwareConfig.json"; - public static final String HW_SET_FNAME = "hardwareSettings.json"; - public static final String NET_SET_FNAME = "networkSettings.json"; - - final File configDirectoryFile; - - private final ConfigProvider m_provider; - - private final Thread settingsSaveThread; - private long saveRequestTimestamp = -1; - - enum ConfigSaveStrategy { - SQL, - LEGACY, - ATOMIC_ZIP - } - - // This logic decides which kind of ConfigManager we load as the default. If we want - // to switch back to the legacy config manager, change this constant - private static final ConfigSaveStrategy m_saveStrat = ConfigSaveStrategy.SQL; - - public static ConfigManager getInstance() { - if (INSTANCE == null) { - switch (m_saveStrat) { - case SQL: - INSTANCE = new ConfigManager(getRootFolder(), new SqlConfigProvider(getRootFolder())); - break; - case LEGACY: - INSTANCE = new ConfigManager(getRootFolder(), new LegacyConfigProvider(getRootFolder())); - break; - case ATOMIC_ZIP: - // not yet done, fall through - default: - break; - } - } - return INSTANCE; - } - - private void translateLegacyIfPresent(Path folderPath) { - if (!(m_provider instanceof SqlConfigProvider)) { - // Cannot import into SQL if we aren't in SQL mode rn - return; - } - - var maybeCams = Path.of(folderPath.toAbsolutePath().toString(), "cameras").toFile(); - var maybeCamsBak = Path.of(folderPath.toAbsolutePath().toString(), "cameras_backup").toFile(); - - if (maybeCams.exists() && maybeCams.isDirectory()) { - logger.info("Translating settings zip!"); - var legacy = new LegacyConfigProvider(folderPath); - legacy.load(); - var loadedConfig = legacy.getConfig(); - - // yeet our current cameras directory, not needed anymore - if (maybeCamsBak.exists()) FileUtils.deleteDirectory(maybeCamsBak.toPath()); - if (!maybeCams.canWrite()) { - maybeCams.setWritable(true); - } - - try { - Files.move(maybeCams.toPath(), maybeCamsBak.toPath(), StandardCopyOption.REPLACE_EXISTING); - } catch (IOException e) { - logger.error("Exception moving cameras to cameras_bak!", e); - - // Try to just copy from cams to cams-bak instead of moving? Windows sometimes needs us to - // do that - try { - org.apache.commons.io.FileUtils.copyDirectory(maybeCams, maybeCamsBak); - } catch (IOException e1) { - // So we can't move to cams_bak, and we can't copy and delete either? We just have to give - // up here on preserving the old folder - logger.error("Exception while backup-copying cameras to cameras_bak!", e); - e1.printStackTrace(); - } - - // Delete the directory because we were successfully able to load the config but were unable - // to save or copy the folder. - if (maybeCams.exists()) FileUtils.deleteDirectory(maybeCams.toPath()); - } - - // Save the same config out using SQL loader - var sql = new SqlConfigProvider(getRootFolder()); - sql.setConfig(loadedConfig); - sql.saveToDisk(); - } + private static final Logger logger = new Logger(ConfigManager.class, LogGroup.General); + private static ConfigManager INSTANCE; + + public static final String HW_CFG_FNAME = "hardwareConfig.json"; + public static final String HW_SET_FNAME = "hardwareSettings.json"; + public static final String NET_SET_FNAME = "networkSettings.json"; + + final File configDirectoryFile; + + private final ConfigProvider m_provider; + + private final Thread settingsSaveThread; + private long saveRequestTimestamp = -1; + + enum ConfigSaveStrategy { + SQL, + LEGACY, + ATOMIC_ZIP + } + + // This logic decides which kind of ConfigManager we load as the default. If we want + // to switch back to the legacy config manager, change this constant + private static final ConfigSaveStrategy m_saveStrat = ConfigSaveStrategy.SQL; + + public static ConfigManager getInstance() { + if (INSTANCE == null) { + switch (m_saveStrat) { + case SQL: + INSTANCE = new ConfigManager(getRootFolder(), new SqlConfigProvider(getRootFolder())); + break; + case LEGACY: + INSTANCE = new ConfigManager(getRootFolder(), new LegacyConfigProvider(getRootFolder())); + break; + case ATOMIC_ZIP: + // not yet done, fall through + default: + break; + } } + return INSTANCE; + } - public static boolean saveUploadedSettingsZip(File uploadPath) { - // Unpack to /tmp/something/photonvision - var folderPath = Path.of(System.getProperty("java.io.tmpdir"), "photonvision").toFile(); - folderPath.mkdirs(); - ZipUtil.unpack(uploadPath, folderPath); - - // Nuke the current settings directory - FileUtils.deleteDirectory(getRootFolder()); - - // If there's a cameras folder in the upload, we know we need to import from the - // old style - var maybeCams = Path.of(folderPath.getAbsolutePath(), "cameras").toFile(); - if (maybeCams.exists() && maybeCams.isDirectory()) { - var legacy = new LegacyConfigProvider(folderPath.toPath()); - legacy.load(); - var loadedConfig = legacy.getConfig(); - - var sql = new SqlConfigProvider(getRootFolder()); - sql.setConfig(loadedConfig); - return sql.saveToDisk(); - } else { - // new structure -- just copy and save like we used to - try { - org.apache.commons.io.FileUtils.copyDirectory(folderPath, getRootFolder().toFile()); - logger.info("Copied settings successfully!"); - return true; - } catch (IOException e) { - logger.error("Exception copying uploaded settings!", e); - return false; - } - } + private void translateLegacyIfPresent(Path folderPath) { + if (!(m_provider instanceof SqlConfigProvider)) { + // Cannot import into SQL if we aren't in SQL mode rn + return; } - public PhotonConfiguration getConfig() { - return m_provider.getConfig(); - } + var maybeCams = Path.of(folderPath.toAbsolutePath().toString(), "cameras").toFile(); + var maybeCamsBak = Path.of(folderPath.toAbsolutePath().toString(), "cameras_backup").toFile(); - private static Path getRootFolder() { - return Path.of("photonvision_config"); - } + if (maybeCams.exists() && maybeCams.isDirectory()) { + logger.info("Translating settings zip!"); + var legacy = new LegacyConfigProvider(folderPath); + legacy.load(); + var loadedConfig = legacy.getConfig(); - ConfigManager(Path configDirectory, ConfigProvider provider) { - this.configDirectoryFile = new File(configDirectory.toUri()); - m_provider = provider; + // yeet our current cameras directory, not needed anymore + if (maybeCamsBak.exists()) FileUtils.deleteDirectory(maybeCamsBak.toPath()); + if (!maybeCams.canWrite()) { + maybeCams.setWritable(true); + } - settingsSaveThread = new Thread(this::saveAndWriteTask); - settingsSaveThread.start(); - } + try { + Files.move(maybeCams.toPath(), maybeCamsBak.toPath(), StandardCopyOption.REPLACE_EXISTING); + } catch (IOException e) { + logger.error("Exception moving cameras to cameras_bak!", e); - public void load() { - translateLegacyIfPresent(this.configDirectoryFile.toPath()); - m_provider.load(); - } - - public void addCameraConfigurations(List sources) { - getConfig().addCameraConfigs(sources); - requestSave(); - } - - public void saveModule(CameraConfiguration config, String uniqueName) { - getConfig().addCameraConfig(uniqueName, config); - requestSave(); - } - - public File getSettingsFolderAsZip() { - File out = Path.of(System.getProperty("java.io.tmpdir"), "photonvision-settings.zip").toFile(); + // Try to just copy from cams to cams-bak instead of moving? Windows sometimes needs us to + // do that try { - ZipUtil.pack(configDirectoryFile, out); - } catch (Exception e) { - e.printStackTrace(); + org.apache.commons.io.FileUtils.copyDirectory(maybeCams, maybeCamsBak); + } catch (IOException e1) { + // So we can't move to cams_bak, and we can't copy and delete either? We just have to give + // up here on preserving the old folder + logger.error("Exception while backup-copying cameras to cameras_bak!", e); + e1.printStackTrace(); } - return out; - } - - public void setNetworkSettings(NetworkConfig networkConfig) { - getConfig().setNetworkConfig(networkConfig); - requestSave(); - } - public Path getLogsDir() { - return Path.of(configDirectoryFile.toString(), "logs"); - } - - public Path getCalibDir() { - return Path.of(configDirectoryFile.toString(), "calibImgs"); - } - - public static final String LOG_PREFIX = "photonvision-"; - public static final String LOG_EXT = ".log"; - public static final String LOG_DATE_TIME_FORMAT = "yyyy-M-d_hh-mm-ss"; - - public String taToLogFname(TemporalAccessor date) { - var dateString = DateTimeFormatter.ofPattern(LOG_DATE_TIME_FORMAT).format(date); - return LOG_PREFIX + dateString + LOG_EXT; - } - - public Date logFnameToDate(String fname) throws ParseException { - // Strip away known unneeded portions of the log file name - fname = fname.replace(LOG_PREFIX, "").replace(LOG_EXT, ""); - DateFormat format = new SimpleDateFormat(LOG_DATE_TIME_FORMAT); - return format.parse(fname); - } - - public Path getLogPath() { - var logFile = Path.of(this.getLogsDir().toString(), taToLogFname(LocalDateTime.now())).toFile(); - if (!logFile.getParentFile().exists()) logFile.getParentFile().mkdirs(); - return logFile.toPath(); - } - - public Path getImageSavePath() { - var imgFilePath = Path.of(configDirectoryFile.toString(), "imgSaves").toFile(); - if (!imgFilePath.exists()) imgFilePath.mkdirs(); - return imgFilePath.toPath(); - } - - public boolean saveUploadedHardwareConfig(Path uploadPath) { - return m_provider.saveUploadedHardwareConfig(uploadPath); - } - - public boolean saveUploadedHardwareSettings(Path uploadPath) { - return m_provider.saveUploadedHardwareSettings(uploadPath); - } + // Delete the directory because we were successfully able to load the config but were unable + // to save or copy the folder. + if (maybeCams.exists()) FileUtils.deleteDirectory(maybeCams.toPath()); + } - public boolean saveUploadedNetworkConfig(Path uploadPath) { - return m_provider.saveUploadedNetworkConfig(uploadPath); + // Save the same config out using SQL loader + var sql = new SqlConfigProvider(getRootFolder()); + sql.setConfig(loadedConfig); + sql.saveToDisk(); } - - public boolean saveUploadedAprilTagFieldLayout(Path uploadPath) { - return m_provider.saveUploadedAprilTagFieldLayout(uploadPath); + } + + public static boolean saveUploadedSettingsZip(File uploadPath) { + // Unpack to /tmp/something/photonvision + var folderPath = Path.of(System.getProperty("java.io.tmpdir"), "photonvision").toFile(); + folderPath.mkdirs(); + ZipUtil.unpack(uploadPath, folderPath); + + // Nuke the current settings directory + FileUtils.deleteDirectory(getRootFolder()); + + // If there's a cameras folder in the upload, we know we need to import from the + // old style + var maybeCams = Path.of(folderPath.getAbsolutePath(), "cameras").toFile(); + if (maybeCams.exists() && maybeCams.isDirectory()) { + var legacy = new LegacyConfigProvider(folderPath.toPath()); + legacy.load(); + var loadedConfig = legacy.getConfig(); + + var sql = new SqlConfigProvider(getRootFolder()); + sql.setConfig(loadedConfig); + return sql.saveToDisk(); + } else { + // new structure -- just copy and save like we used to + try { + org.apache.commons.io.FileUtils.copyDirectory(folderPath, getRootFolder().toFile()); + logger.info("Copied settings successfully!"); + return true; + } catch (IOException e) { + logger.error("Exception copying uploaded settings!", e); + return false; + } } - - public void requestSave() { - logger.trace("Requesting save..."); - saveRequestTimestamp = System.currentTimeMillis(); - } - - public void unloadCameraConfigs() { - this.getConfig().getCameraConfigurations().clear(); + } + + public PhotonConfiguration getConfig() { + return m_provider.getConfig(); + } + + private static Path getRootFolder() { + return Path.of("photonvision_config"); + } + + ConfigManager(Path configDirectory, ConfigProvider provider) { + this.configDirectoryFile = new File(configDirectory.toUri()); + m_provider = provider; + + settingsSaveThread = new Thread(this::saveAndWriteTask); + settingsSaveThread.start(); + } + + public void load() { + translateLegacyIfPresent(this.configDirectoryFile.toPath()); + m_provider.load(); + } + + public void addCameraConfigurations(List sources) { + getConfig().addCameraConfigs(sources); + requestSave(); + } + + public void saveModule(CameraConfiguration config, String uniqueName) { + getConfig().addCameraConfig(uniqueName, config); + requestSave(); + } + + public File getSettingsFolderAsZip() { + File out = Path.of(System.getProperty("java.io.tmpdir"), "photonvision-settings.zip").toFile(); + try { + ZipUtil.pack(configDirectoryFile, out); + } catch (Exception e) { + e.printStackTrace(); } - - public void clearConfig() { - logger.info("Clearing configuration!"); - m_provider.clearConfig(); - m_provider.saveToDisk(); - } - - public void saveToDisk() { - m_provider.saveToDisk(); - } - - private void saveAndWriteTask() { - // Only save if 1 second has past since the request was made - while (!Thread.currentThread().isInterrupted()) { - if (saveRequestTimestamp > 0 && (System.currentTimeMillis() - saveRequestTimestamp) > 1000L) { - saveRequestTimestamp = -1; - logger.debug("Saving to disk..."); - saveToDisk(); - } - - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - logger.error("Exception waiting for settings semaphore", e); - } - } + return out; + } + + public void setNetworkSettings(NetworkConfig networkConfig) { + getConfig().setNetworkConfig(networkConfig); + requestSave(); + } + + public Path getLogsDir() { + return Path.of(configDirectoryFile.toString(), "logs"); + } + + public Path getCalibDir() { + return Path.of(configDirectoryFile.toString(), "calibImgs"); + } + + public static final String LOG_PREFIX = "photonvision-"; + public static final String LOG_EXT = ".log"; + public static final String LOG_DATE_TIME_FORMAT = "yyyy-M-d_hh-mm-ss"; + + public String taToLogFname(TemporalAccessor date) { + var dateString = DateTimeFormatter.ofPattern(LOG_DATE_TIME_FORMAT).format(date); + return LOG_PREFIX + dateString + LOG_EXT; + } + + public Date logFnameToDate(String fname) throws ParseException { + // Strip away known unneeded portions of the log file name + fname = fname.replace(LOG_PREFIX, "").replace(LOG_EXT, ""); + DateFormat format = new SimpleDateFormat(LOG_DATE_TIME_FORMAT); + return format.parse(fname); + } + + public Path getLogPath() { + var logFile = Path.of(this.getLogsDir().toString(), taToLogFname(LocalDateTime.now())).toFile(); + if (!logFile.getParentFile().exists()) logFile.getParentFile().mkdirs(); + return logFile.toPath(); + } + + public Path getImageSavePath() { + var imgFilePath = Path.of(configDirectoryFile.toString(), "imgSaves").toFile(); + if (!imgFilePath.exists()) imgFilePath.mkdirs(); + return imgFilePath.toPath(); + } + + public boolean saveUploadedHardwareConfig(Path uploadPath) { + return m_provider.saveUploadedHardwareConfig(uploadPath); + } + + public boolean saveUploadedHardwareSettings(Path uploadPath) { + return m_provider.saveUploadedHardwareSettings(uploadPath); + } + + public boolean saveUploadedNetworkConfig(Path uploadPath) { + return m_provider.saveUploadedNetworkConfig(uploadPath); + } + + public boolean saveUploadedAprilTagFieldLayout(Path uploadPath) { + return m_provider.saveUploadedAprilTagFieldLayout(uploadPath); + } + + public void requestSave() { + logger.trace("Requesting save..."); + saveRequestTimestamp = System.currentTimeMillis(); + } + + public void unloadCameraConfigs() { + this.getConfig().getCameraConfigurations().clear(); + } + + public void clearConfig() { + logger.info("Clearing configuration!"); + m_provider.clearConfig(); + m_provider.saveToDisk(); + } + + public void saveToDisk() { + m_provider.saveToDisk(); + } + + private void saveAndWriteTask() { + // Only save if 1 second has past since the request was made + while (!Thread.currentThread().isInterrupted()) { + if (saveRequestTimestamp > 0 && (System.currentTimeMillis() - saveRequestTimestamp) > 1000L) { + saveRequestTimestamp = -1; + logger.debug("Saving to disk..."); + saveToDisk(); + } + + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + logger.error("Exception waiting for settings semaphore", e); + } } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/ConfigProvider.java b/photon-core/src/main/java/org/photonvision/common/configuration/ConfigProvider.java index 9964d0ca93..1655283bf5 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/ConfigProvider.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/ConfigProvider.java @@ -20,25 +20,25 @@ import java.nio.file.Path; public abstract class ConfigProvider { - protected PhotonConfiguration config; + protected PhotonConfiguration config; - abstract void load(); + abstract void load(); - abstract boolean saveToDisk(); + abstract boolean saveToDisk(); - PhotonConfiguration getConfig() { - return config; - } + PhotonConfiguration getConfig() { + return config; + } - public void clearConfig() { - config = new PhotonConfiguration(); - } + public void clearConfig() { + config = new PhotonConfiguration(); + } - public abstract boolean saveUploadedHardwareConfig(Path uploadPath); + public abstract boolean saveUploadedHardwareConfig(Path uploadPath); - public abstract boolean saveUploadedHardwareSettings(Path uploadPath); + public abstract boolean saveUploadedHardwareSettings(Path uploadPath); - public abstract boolean saveUploadedNetworkConfig(Path uploadPath); + public abstract boolean saveUploadedNetworkConfig(Path uploadPath); - public abstract boolean saveUploadedAprilTagFieldLayout(Path uploadPath); + public abstract boolean saveUploadedAprilTagFieldLayout(Path uploadPath); } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/HardwareConfig.java b/photon-core/src/main/java/org/photonvision/common/configuration/HardwareConfig.java index 2c8323f159..9651932e34 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/HardwareConfig.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/HardwareConfig.java @@ -24,179 +24,179 @@ @JsonIgnoreProperties(ignoreUnknown = true) public class HardwareConfig { - public final String deviceName; - public final String deviceLogoPath; - public final String supportURL; + public final String deviceName; + public final String deviceLogoPath; + public final String supportURL; - // LED control - public final ArrayList ledPins; - public final String ledSetCommand; - public final boolean ledsCanDim; - public final ArrayList ledBrightnessRange; - public final String ledDimCommand; - public final String ledBlinkCommand; - public final ArrayList statusRGBPins; + // LED control + public final ArrayList ledPins; + public final String ledSetCommand; + public final boolean ledsCanDim; + public final ArrayList ledBrightnessRange; + public final String ledDimCommand; + public final String ledBlinkCommand; + public final ArrayList statusRGBPins; - // Metrics - public final String cpuTempCommand; - public final String cpuMemoryCommand; - public final String cpuUtilCommand; - public final String cpuThrottleReasonCmd; - public final String cpuUptimeCommand; - public final String gpuMemoryCommand; - public final String ramUtilCommand; - public final String gpuMemUsageCommand; - public final String diskUsageCommand; + // Metrics + public final String cpuTempCommand; + public final String cpuMemoryCommand; + public final String cpuUtilCommand; + public final String cpuThrottleReasonCmd; + public final String cpuUptimeCommand; + public final String gpuMemoryCommand; + public final String ramUtilCommand; + public final String gpuMemUsageCommand; + public final String diskUsageCommand; - // Device stuff - public final String restartHardwareCommand; - public final double vendorFOV; // -1 for unmanaged - public final List blacklistedResIndices; // this happens before the defaults are applied + // Device stuff + public final String restartHardwareCommand; + public final double vendorFOV; // -1 for unmanaged + public final List blacklistedResIndices; // this happens before the defaults are applied - public HardwareConfig() { - deviceName = ""; - deviceLogoPath = ""; - supportURL = ""; - ledPins = new ArrayList<>(); - ledSetCommand = ""; - ledsCanDim = false; - ledBrightnessRange = new ArrayList<>(); - statusRGBPins = new ArrayList<>(); - ledDimCommand = ""; + public HardwareConfig() { + deviceName = ""; + deviceLogoPath = ""; + supportURL = ""; + ledPins = new ArrayList<>(); + ledSetCommand = ""; + ledsCanDim = false; + ledBrightnessRange = new ArrayList<>(); + statusRGBPins = new ArrayList<>(); + ledDimCommand = ""; - cpuTempCommand = ""; - cpuMemoryCommand = ""; - cpuUtilCommand = ""; - cpuThrottleReasonCmd = ""; - cpuUptimeCommand = ""; - gpuMemoryCommand = ""; - ramUtilCommand = ""; - ledBlinkCommand = ""; - gpuMemUsageCommand = ""; - diskUsageCommand = ""; + cpuTempCommand = ""; + cpuMemoryCommand = ""; + cpuUtilCommand = ""; + cpuThrottleReasonCmd = ""; + cpuUptimeCommand = ""; + gpuMemoryCommand = ""; + ramUtilCommand = ""; + ledBlinkCommand = ""; + gpuMemUsageCommand = ""; + diskUsageCommand = ""; - restartHardwareCommand = ""; - vendorFOV = -1; - blacklistedResIndices = Collections.emptyList(); - } + restartHardwareCommand = ""; + vendorFOV = -1; + blacklistedResIndices = Collections.emptyList(); + } - @SuppressWarnings("unused") - public HardwareConfig( - String deviceName, - String deviceLogoPath, - String supportURL, - ArrayList ledPins, - String ledSetCommand, - boolean ledsCanDim, - ArrayList ledBrightnessRange, - String ledDimCommand, - String ledBlinkCommand, - ArrayList statusRGBPins, - String cpuTempCommand, - String cpuMemoryCommand, - String cpuUtilCommand, - String cpuThrottleReasonCmd, - String cpuUptimeCommand, - String gpuMemoryCommand, - String ramUtilCommand, - String gpuMemUsageCommand, - String diskUsageCommand, - String restartHardwareCommand, - double vendorFOV, - List blacklistedResIndices) { - this.deviceName = deviceName; - this.deviceLogoPath = deviceLogoPath; - this.supportURL = supportURL; - this.ledPins = ledPins; - this.ledSetCommand = ledSetCommand; - this.ledsCanDim = ledsCanDim; - this.ledBrightnessRange = ledBrightnessRange; - this.ledDimCommand = ledDimCommand; - this.ledBlinkCommand = ledBlinkCommand; - this.statusRGBPins = statusRGBPins; - this.cpuTempCommand = cpuTempCommand; - this.cpuMemoryCommand = cpuMemoryCommand; - this.cpuUtilCommand = cpuUtilCommand; - this.cpuThrottleReasonCmd = cpuThrottleReasonCmd; - this.cpuUptimeCommand = cpuUptimeCommand; - this.gpuMemoryCommand = gpuMemoryCommand; - this.ramUtilCommand = ramUtilCommand; - this.gpuMemUsageCommand = gpuMemUsageCommand; - this.diskUsageCommand = diskUsageCommand; - this.restartHardwareCommand = restartHardwareCommand; - this.vendorFOV = vendorFOV; - this.blacklistedResIndices = blacklistedResIndices; - } + @SuppressWarnings("unused") + public HardwareConfig( + String deviceName, + String deviceLogoPath, + String supportURL, + ArrayList ledPins, + String ledSetCommand, + boolean ledsCanDim, + ArrayList ledBrightnessRange, + String ledDimCommand, + String ledBlinkCommand, + ArrayList statusRGBPins, + String cpuTempCommand, + String cpuMemoryCommand, + String cpuUtilCommand, + String cpuThrottleReasonCmd, + String cpuUptimeCommand, + String gpuMemoryCommand, + String ramUtilCommand, + String gpuMemUsageCommand, + String diskUsageCommand, + String restartHardwareCommand, + double vendorFOV, + List blacklistedResIndices) { + this.deviceName = deviceName; + this.deviceLogoPath = deviceLogoPath; + this.supportURL = supportURL; + this.ledPins = ledPins; + this.ledSetCommand = ledSetCommand; + this.ledsCanDim = ledsCanDim; + this.ledBrightnessRange = ledBrightnessRange; + this.ledDimCommand = ledDimCommand; + this.ledBlinkCommand = ledBlinkCommand; + this.statusRGBPins = statusRGBPins; + this.cpuTempCommand = cpuTempCommand; + this.cpuMemoryCommand = cpuMemoryCommand; + this.cpuUtilCommand = cpuUtilCommand; + this.cpuThrottleReasonCmd = cpuThrottleReasonCmd; + this.cpuUptimeCommand = cpuUptimeCommand; + this.gpuMemoryCommand = gpuMemoryCommand; + this.ramUtilCommand = ramUtilCommand; + this.gpuMemUsageCommand = gpuMemUsageCommand; + this.diskUsageCommand = diskUsageCommand; + this.restartHardwareCommand = restartHardwareCommand; + this.vendorFOV = vendorFOV; + this.blacklistedResIndices = blacklistedResIndices; + } - /** - * @return True if the FOV has been preset to a sane value, false otherwise - */ - public final boolean hasPresetFOV() { - return vendorFOV > 0; - } + /** + * @return True if the FOV has been preset to a sane value, false otherwise + */ + public final boolean hasPresetFOV() { + return vendorFOV > 0; + } - /** - * @return True if any command has been configured to a non-default empty, false otherwise - */ - public final boolean hasCommandsConfigured() { - return cpuTempCommand != "" - || cpuMemoryCommand != "" - || cpuUtilCommand != "" - || cpuThrottleReasonCmd != "" - || cpuUptimeCommand != "" - || gpuMemoryCommand != "" - || ramUtilCommand != "" - || ledBlinkCommand != "" - || gpuMemUsageCommand != "" - || diskUsageCommand != ""; - } + /** + * @return True if any command has been configured to a non-default empty, false otherwise + */ + public final boolean hasCommandsConfigured() { + return cpuTempCommand != "" + || cpuMemoryCommand != "" + || cpuUtilCommand != "" + || cpuThrottleReasonCmd != "" + || cpuUptimeCommand != "" + || gpuMemoryCommand != "" + || ramUtilCommand != "" + || ledBlinkCommand != "" + || gpuMemUsageCommand != "" + || diskUsageCommand != ""; + } - @Override - public String toString() { - return "HardwareConfig [deviceName=" - + deviceName - + ", deviceLogoPath=" - + deviceLogoPath - + ", supportURL=" - + supportURL - + ", ledPins=" - + ledPins - + ", ledSetCommand=" - + ledSetCommand - + ", ledsCanDim=" - + ledsCanDim - + ", ledBrightnessRange=" - + ledBrightnessRange - + ", ledDimCommand=" - + ledDimCommand - + ", ledBlinkCommand=" - + ledBlinkCommand - + ", statusRGBPins=" - + statusRGBPins - + ", cpuTempCommand=" - + cpuTempCommand - + ", cpuMemoryCommand=" - + cpuMemoryCommand - + ", cpuUtilCommand=" - + cpuUtilCommand - + ", cpuThrottleReasonCmd=" - + cpuThrottleReasonCmd - + ", cpuUptimeCommand=" - + cpuUptimeCommand - + ", gpuMemoryCommand=" - + gpuMemoryCommand - + ", ramUtilCommand=" - + ramUtilCommand - + ", gpuMemUsageCommand=" - + gpuMemUsageCommand - + ", diskUsageCommand=" - + diskUsageCommand - + ", restartHardwareCommand=" - + restartHardwareCommand - + ", vendorFOV=" - + vendorFOV - + ", blacklistedResIndices=" - + blacklistedResIndices - + "]"; - } + @Override + public String toString() { + return "HardwareConfig [deviceName=" + + deviceName + + ", deviceLogoPath=" + + deviceLogoPath + + ", supportURL=" + + supportURL + + ", ledPins=" + + ledPins + + ", ledSetCommand=" + + ledSetCommand + + ", ledsCanDim=" + + ledsCanDim + + ", ledBrightnessRange=" + + ledBrightnessRange + + ", ledDimCommand=" + + ledDimCommand + + ", ledBlinkCommand=" + + ledBlinkCommand + + ", statusRGBPins=" + + statusRGBPins + + ", cpuTempCommand=" + + cpuTempCommand + + ", cpuMemoryCommand=" + + cpuMemoryCommand + + ", cpuUtilCommand=" + + cpuUtilCommand + + ", cpuThrottleReasonCmd=" + + cpuThrottleReasonCmd + + ", cpuUptimeCommand=" + + cpuUptimeCommand + + ", gpuMemoryCommand=" + + gpuMemoryCommand + + ", ramUtilCommand=" + + ramUtilCommand + + ", gpuMemUsageCommand=" + + gpuMemUsageCommand + + ", diskUsageCommand=" + + diskUsageCommand + + ", restartHardwareCommand=" + + restartHardwareCommand + + ", vendorFOV=" + + vendorFOV + + ", blacklistedResIndices=" + + blacklistedResIndices + + "]"; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/HardwareSettings.java b/photon-core/src/main/java/org/photonvision/common/configuration/HardwareSettings.java index 3ef424dfe2..82ba3442a1 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/HardwareSettings.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/HardwareSettings.java @@ -18,10 +18,10 @@ package org.photonvision.common.configuration; public class HardwareSettings { - public int ledBrightnessPercentage = 100; + public int ledBrightnessPercentage = 100; - @Override - public String toString() { - return "HardwareSettings [ledBrightnessPercentage=" + ledBrightnessPercentage + "]"; - } + @Override + public String toString() { + return "HardwareSettings [ledBrightnessPercentage=" + ledBrightnessPercentage + "]"; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/LegacyConfigProvider.java b/photon-core/src/main/java/org/photonvision/common/configuration/LegacyConfigProvider.java index 74d5d472fe..60b38cb002 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/LegacyConfigProvider.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/LegacyConfigProvider.java @@ -43,445 +43,445 @@ import org.zeroturnaround.zip.ZipUtil; class LegacyConfigProvider extends ConfigProvider { - private static final Logger logger = new Logger(LegacyConfigProvider.class, LogGroup.General); - - public static final String HW_CFG_FNAME = "hardwareConfig.json"; - public static final String HW_SET_FNAME = "hardwareSettings.json"; - public static final String NET_SET_FNAME = "networkSettings.json"; - public static final String ATFL_SET_FNAME = "apriltagFieldLayout.json"; - - private PhotonConfiguration config; - private final File hardwareConfigFile; - private final File hardwareSettingsFile; - private final File networkConfigFile; - private final File camerasFolder; - private final File apriltagFieldLayoutFile; - - final File configDirectoryFile; - - private long saveRequestTimestamp = -1; - private final Thread settingsSaveThread; - - public static void saveUploadedSettingsZip(File uploadPath) { - var folderPath = Path.of(System.getProperty("java.io.tmpdir"), "photonvision").toFile(); - folderPath.mkdirs(); - ZipUtil.unpack(uploadPath, folderPath); - FileUtils.deleteDirectory(getRootFolder()); - try { - org.apache.commons.io.FileUtils.copyDirectory(folderPath, getRootFolder().toFile()); - logger.info("Copied settings successfully!"); - } catch (IOException e) { - logger.error("Exception copying uploaded settings!", e); - } + private static final Logger logger = new Logger(LegacyConfigProvider.class, LogGroup.General); + + public static final String HW_CFG_FNAME = "hardwareConfig.json"; + public static final String HW_SET_FNAME = "hardwareSettings.json"; + public static final String NET_SET_FNAME = "networkSettings.json"; + public static final String ATFL_SET_FNAME = "apriltagFieldLayout.json"; + + private PhotonConfiguration config; + private final File hardwareConfigFile; + private final File hardwareSettingsFile; + private final File networkConfigFile; + private final File camerasFolder; + private final File apriltagFieldLayoutFile; + + final File configDirectoryFile; + + private long saveRequestTimestamp = -1; + private final Thread settingsSaveThread; + + public static void saveUploadedSettingsZip(File uploadPath) { + var folderPath = Path.of(System.getProperty("java.io.tmpdir"), "photonvision").toFile(); + folderPath.mkdirs(); + ZipUtil.unpack(uploadPath, folderPath); + FileUtils.deleteDirectory(getRootFolder()); + try { + org.apache.commons.io.FileUtils.copyDirectory(folderPath, getRootFolder().toFile()); + logger.info("Copied settings successfully!"); + } catch (IOException e) { + logger.error("Exception copying uploaded settings!", e); } - - public PhotonConfiguration getConfig() { - return config; + } + + public PhotonConfiguration getConfig() { + return config; + } + + private static Path getRootFolder() { + return Path.of("photonvision_config"); + } + + protected LegacyConfigProvider(Path configDirectoryFile) { + this.configDirectoryFile = new File(configDirectoryFile.toUri()); + this.hardwareConfigFile = + new File(Path.of(configDirectoryFile.toString(), HW_CFG_FNAME).toUri()); + this.hardwareSettingsFile = + new File(Path.of(configDirectoryFile.toString(), HW_SET_FNAME).toUri()); + this.networkConfigFile = + new File(Path.of(configDirectoryFile.toString(), NET_SET_FNAME).toUri()); + this.apriltagFieldLayoutFile = + new File(Path.of(configDirectoryFile.toString(), ATFL_SET_FNAME).toUri()); + this.camerasFolder = new File(Path.of(configDirectoryFile.toString(), "cameras").toUri()); + + settingsSaveThread = new Thread(this::saveAndWriteTask); + settingsSaveThread.start(); + } + + @Override + public void load() { + logger.info("Loading settings..."); + if (!configDirectoryFile.exists()) { + if (configDirectoryFile.mkdirs()) { + logger.debug("Root config folder did not exist. Created!"); + } else { + logger.error("Failed to create root config folder!"); + } } - - private static Path getRootFolder() { - return Path.of("photonvision_config"); + if (!configDirectoryFile.canWrite()) { + logger.debug("Making root dir writeable..."); + try { + var success = configDirectoryFile.setWritable(true); + if (success) logger.debug("Set root dir writeable!"); + else logger.error("Could not make root dir writeable!"); + } catch (SecurityException e) { + logger.error("Could not make root dir writeable!", e); + } } - protected LegacyConfigProvider(Path configDirectoryFile) { - this.configDirectoryFile = new File(configDirectoryFile.toUri()); - this.hardwareConfigFile = - new File(Path.of(configDirectoryFile.toString(), HW_CFG_FNAME).toUri()); - this.hardwareSettingsFile = - new File(Path.of(configDirectoryFile.toString(), HW_SET_FNAME).toUri()); - this.networkConfigFile = - new File(Path.of(configDirectoryFile.toString(), NET_SET_FNAME).toUri()); - this.apriltagFieldLayoutFile = - new File(Path.of(configDirectoryFile.toString(), ATFL_SET_FNAME).toUri()); - this.camerasFolder = new File(Path.of(configDirectoryFile.toString(), "cameras").toUri()); - - settingsSaveThread = new Thread(this::saveAndWriteTask); - settingsSaveThread.start(); + HardwareConfig hardwareConfig; + HardwareSettings hardwareSettings; + NetworkConfig networkConfig; + AprilTagFieldLayout atfl = null; + + if (hardwareConfigFile.exists()) { + try { + hardwareConfig = + JacksonUtils.deserialize(hardwareConfigFile.toPath(), HardwareConfig.class); + if (hardwareConfig == null) { + logger.error("Could not deserialize hardware config! Loading defaults"); + hardwareConfig = new HardwareConfig(); + } + } catch (IOException e) { + logger.error("Could not deserialize hardware config! Loading defaults"); + hardwareConfig = new HardwareConfig(); + } + } else { + logger.info("Hardware config does not exist! Loading defaults"); + hardwareConfig = new HardwareConfig(); } - @Override - public void load() { - logger.info("Loading settings..."); - if (!configDirectoryFile.exists()) { - if (configDirectoryFile.mkdirs()) { - logger.debug("Root config folder did not exist. Created!"); - } else { - logger.error("Failed to create root config folder!"); - } - } - if (!configDirectoryFile.canWrite()) { - logger.debug("Making root dir writeable..."); - try { - var success = configDirectoryFile.setWritable(true); - if (success) logger.debug("Set root dir writeable!"); - else logger.error("Could not make root dir writeable!"); - } catch (SecurityException e) { - logger.error("Could not make root dir writeable!", e); - } + if (hardwareSettingsFile.exists()) { + try { + hardwareSettings = + JacksonUtils.deserialize(hardwareSettingsFile.toPath(), HardwareSettings.class); + if (hardwareSettings == null) { + logger.error("Could not deserialize hardware settings! Loading defaults"); + hardwareSettings = new HardwareSettings(); } + } catch (IOException e) { + logger.error("Could not deserialize hardware settings! Loading defaults"); + hardwareSettings = new HardwareSettings(); + } + } else { + logger.info("Hardware settings does not exist! Loading defaults"); + hardwareSettings = new HardwareSettings(); + } - HardwareConfig hardwareConfig; - HardwareSettings hardwareSettings; - NetworkConfig networkConfig; - AprilTagFieldLayout atfl = null; - - if (hardwareConfigFile.exists()) { - try { - hardwareConfig = - JacksonUtils.deserialize(hardwareConfigFile.toPath(), HardwareConfig.class); - if (hardwareConfig == null) { - logger.error("Could not deserialize hardware config! Loading defaults"); - hardwareConfig = new HardwareConfig(); - } - } catch (IOException e) { - logger.error("Could not deserialize hardware config! Loading defaults"); - hardwareConfig = new HardwareConfig(); - } - } else { - logger.info("Hardware config does not exist! Loading defaults"); - hardwareConfig = new HardwareConfig(); + if (networkConfigFile.exists()) { + try { + networkConfig = JacksonUtils.deserialize(networkConfigFile.toPath(), NetworkConfig.class); + if (networkConfig == null) { + logger.error("Could not deserialize network config! Loading defaults"); + networkConfig = new NetworkConfig(); } + } catch (IOException e) { + logger.error("Could not deserialize network config! Loading defaults"); + networkConfig = new NetworkConfig(); + } + } else { + logger.info("Network config file does not exist! Loading defaults"); + networkConfig = new NetworkConfig(); + } - if (hardwareSettingsFile.exists()) { - try { - hardwareSettings = - JacksonUtils.deserialize(hardwareSettingsFile.toPath(), HardwareSettings.class); - if (hardwareSettings == null) { - logger.error("Could not deserialize hardware settings! Loading defaults"); - hardwareSettings = new HardwareSettings(); - } - } catch (IOException e) { - logger.error("Could not deserialize hardware settings! Loading defaults"); - hardwareSettings = new HardwareSettings(); - } - } else { - logger.info("Hardware settings does not exist! Loading defaults"); - hardwareSettings = new HardwareSettings(); - } + if (!camerasFolder.exists()) { + if (camerasFolder.mkdirs()) { + logger.debug("Cameras config folder did not exist. Created!"); + } else { + logger.error("Failed to create cameras config folder!"); + } + } - if (networkConfigFile.exists()) { - try { - networkConfig = JacksonUtils.deserialize(networkConfigFile.toPath(), NetworkConfig.class); - if (networkConfig == null) { - logger.error("Could not deserialize network config! Loading defaults"); - networkConfig = new NetworkConfig(); - } - } catch (IOException e) { - logger.error("Could not deserialize network config! Loading defaults"); - networkConfig = new NetworkConfig(); - } - } else { - logger.info("Network config file does not exist! Loading defaults"); - networkConfig = new NetworkConfig(); + if (apriltagFieldLayoutFile.exists()) { + try { + atfl = + JacksonUtils.deserialize(apriltagFieldLayoutFile.toPath(), AprilTagFieldLayout.class); + if (atfl == null) { + logger.error("Could not deserialize apriltag field layout! (still null)"); } + } catch (IOException e) { + logger.error("Could not deserialize apriltag field layout!", e); + atfl = null; // not required, nice to be explicit + } + } + if (atfl == null) { + logger.info("Loading default apriltags for 2023 field..."); + try { + atfl = AprilTagFields.kDefaultField.loadAprilTagLayoutField(); + } catch (UncheckedIOException e) { + logger.error("Error loading WPILib field", e); + atfl = null; + } + if (atfl == null) { + // what do we even do here lmao -- wpilib built-in should always work + logger.error("Field layout is *still* null??????"); + atfl = new AprilTagFieldLayout(List.of(), 1, 1); + } + } - if (!camerasFolder.exists()) { - if (camerasFolder.mkdirs()) { - logger.debug("Cameras config folder did not exist. Created!"); - } else { - logger.error("Failed to create cameras config folder!"); - } - } + HashMap cameraConfigurations = loadCameraConfigs(); - if (apriltagFieldLayoutFile.exists()) { - try { - atfl = - JacksonUtils.deserialize(apriltagFieldLayoutFile.toPath(), AprilTagFieldLayout.class); - if (atfl == null) { - logger.error("Could not deserialize apriltag field layout! (still null)"); - } - } catch (IOException e) { - logger.error("Could not deserialize apriltag field layout!", e); - atfl = null; // not required, nice to be explicit - } - } - if (atfl == null) { - logger.info("Loading default apriltags for 2023 field..."); - try { - atfl = AprilTagFields.kDefaultField.loadAprilTagLayoutField(); - } catch (UncheckedIOException e) { - logger.error("Error loading WPILib field", e); - atfl = null; - } - if (atfl == null) { - // what do we even do here lmao -- wpilib built-in should always work - logger.error("Field layout is *still* null??????"); - atfl = new AprilTagFieldLayout(List.of(), 1, 1); - } - } + this.config = + new PhotonConfiguration( + hardwareConfig, hardwareSettings, networkConfig, atfl, cameraConfigurations); + } - HashMap cameraConfigurations = loadCameraConfigs(); + @Override + public boolean saveToDisk() { + // Delete old configs + FileUtils.deleteDirectory(camerasFolder.toPath()); - this.config = - new PhotonConfiguration( - hardwareConfig, hardwareSettings, networkConfig, atfl, cameraConfigurations); + try { + JacksonUtils.serialize(networkConfigFile.toPath(), config.getNetworkConfig()); + } catch (IOException e) { + logger.error("Could not save network config!", e); + } + try { + JacksonUtils.serialize(hardwareSettingsFile.toPath(), config.getHardwareSettings()); + } catch (IOException e) { + logger.error("Could not save hardware config!", e); } - @Override - public boolean saveToDisk() { - // Delete old configs - FileUtils.deleteDirectory(camerasFolder.toPath()); + // save all of our cameras + var cameraConfigMap = config.getCameraConfigurations(); + for (var subdirName : cameraConfigMap.keySet()) { + var camConfig = cameraConfigMap.get(subdirName); + var subdir = Path.of(camerasFolder.toPath().toString(), subdirName); + + if (!subdir.toFile().exists()) { + // TODO: check for error + subdir.toFile().mkdirs(); + } + + try { + JacksonUtils.serialize(Path.of(subdir.toString(), "config.json"), camConfig); + } catch (IOException e) { + logger.error("Could not save config.json for " + subdir, e); + } + + try { + JacksonUtils.serialize( + Path.of(subdir.toString(), "drivermode.json"), camConfig.driveModeSettings); + } catch (IOException e) { + logger.error("Could not save drivermode.json for " + subdir, e); + } + + for (var pipe : camConfig.pipelineSettings) { + var pipePath = Path.of(subdir.toString(), "pipelines", pipe.pipelineNickname + ".json"); + + if (!pipePath.getParent().toFile().exists()) { + // TODO: check for error + pipePath.getParent().toFile().mkdirs(); + } try { - JacksonUtils.serialize(networkConfigFile.toPath(), config.getNetworkConfig()); + JacksonUtils.serialize(pipePath, pipe); } catch (IOException e) { - logger.error("Could not save network config!", e); + logger.error("Could not save " + pipe.pipelineNickname + ".json!", e); } + } + } + logger.info("Settings saved!"); + return false; // TODO, deal with this. Do I need to? + } + + private HashMap loadCameraConfigs() { + HashMap loadedConfigurations = new HashMap<>(); + try { + var subdirectories = + Files.list(camerasFolder.toPath()) + .filter(f -> f.toFile().isDirectory()) + .collect(Collectors.toList()); + + for (var subdir : subdirectories) { + var cameraConfigPath = Path.of(subdir.toString(), "config.json"); + CameraConfiguration loadedConfig = null; try { - JacksonUtils.serialize(hardwareSettingsFile.toPath(), config.getHardwareSettings()); - } catch (IOException e) { - logger.error("Could not save hardware config!", e); + loadedConfig = + JacksonUtils.deserialize( + cameraConfigPath.toAbsolutePath(), CameraConfiguration.class); + } catch (JsonProcessingException e) { + logger.error("Camera config deserialization failed!", e); + e.printStackTrace(); } - - // save all of our cameras - var cameraConfigMap = config.getCameraConfigurations(); - for (var subdirName : cameraConfigMap.keySet()) { - var camConfig = cameraConfigMap.get(subdirName); - var subdir = Path.of(camerasFolder.toPath().toString(), subdirName); - - if (!subdir.toFile().exists()) { - // TODO: check for error - subdir.toFile().mkdirs(); - } - - try { - JacksonUtils.serialize(Path.of(subdir.toString(), "config.json"), camConfig); - } catch (IOException e) { - logger.error("Could not save config.json for " + subdir, e); - } - - try { - JacksonUtils.serialize( - Path.of(subdir.toString(), "drivermode.json"), camConfig.driveModeSettings); - } catch (IOException e) { - logger.error("Could not save drivermode.json for " + subdir, e); - } - - for (var pipe : camConfig.pipelineSettings) { - var pipePath = Path.of(subdir.toString(), "pipelines", pipe.pipelineNickname + ".json"); - - if (!pipePath.getParent().toFile().exists()) { - // TODO: check for error - pipePath.getParent().toFile().mkdirs(); - } - - try { - JacksonUtils.serialize(pipePath, pipe); - } catch (IOException e) { - logger.error("Could not save " + pipe.pipelineNickname + ".json!", e); - } - } + if (loadedConfig == null) { // If the file could not be deserialized + logger.warn("Could not load camera " + subdir + "'s config.json! Loading " + "default"); + continue; // TODO how do we later try to load this camera if it gets reconnected? } - logger.info("Settings saved!"); - return false; // TODO, deal with this. Do I need to? - } - private HashMap loadCameraConfigs() { - HashMap loadedConfigurations = new HashMap<>(); + // At this point we have only loaded the base stuff + // We still need to deserialize pipelines, as well as + // driver mode settings + var driverModeFile = Path.of(subdir.toString(), "drivermode.json"); + DriverModePipelineSettings driverMode; try { - var subdirectories = - Files.list(camerasFolder.toPath()) - .filter(f -> f.toFile().isDirectory()) - .collect(Collectors.toList()); - - for (var subdir : subdirectories) { - var cameraConfigPath = Path.of(subdir.toString(), "config.json"); - CameraConfiguration loadedConfig = null; - try { - loadedConfig = - JacksonUtils.deserialize( - cameraConfigPath.toAbsolutePath(), CameraConfiguration.class); - } catch (JsonProcessingException e) { - logger.error("Camera config deserialization failed!", e); - e.printStackTrace(); - } - if (loadedConfig == null) { // If the file could not be deserialized - logger.warn("Could not load camera " + subdir + "'s config.json! Loading " + "default"); - continue; // TODO how do we later try to load this camera if it gets reconnected? - } - - // At this point we have only loaded the base stuff - // We still need to deserialize pipelines, as well as - // driver mode settings - var driverModeFile = Path.of(subdir.toString(), "drivermode.json"); - DriverModePipelineSettings driverMode; - try { - driverMode = - JacksonUtils.deserialize( - driverModeFile.toAbsolutePath(), DriverModePipelineSettings.class); - } catch (JsonProcessingException e) { - logger.error("Could not deserialize drivermode.json! Loading defaults"); - logger.debug(Arrays.toString(e.getStackTrace())); - driverMode = new DriverModePipelineSettings(); - } - if (driverMode == null) { - logger.warn( - "Could not load camera " + subdir + "'s drivermode.json! Loading" + " default"); - driverMode = new DriverModePipelineSettings(); - } - - // Load pipelines by mapping the files within the pipelines subdir - // to their deserialized equivalents - var pipelineSubdirectory = Path.of(subdir.toString(), "pipelines"); - List settings = - pipelineSubdirectory.toFile().exists() - ? Files.list(pipelineSubdirectory) - .filter(p -> p.toFile().isFile()) - .map( - p -> { - var relativizedFilePath = - configDirectoryFile - .toPath() - .toAbsolutePath() - .relativize(p) - .toString(); - try { - return JacksonUtils.deserialize(p, CVPipelineSettings.class); - } catch (JsonProcessingException e) { - logger.error("Exception while deserializing " + relativizedFilePath, e); - } catch (IOException e) { - logger.warn( - "Could not load pipeline at " - + relativizedFilePath - + "! Skipping..."); - } - return null; - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()) - : Collections.emptyList(); - - loadedConfig.driveModeSettings = driverMode; - loadedConfig.addPipelineSettings(settings); - - loadedConfigurations.put(subdir.toFile().getName(), loadedConfig); - } - } catch (IOException e) { - logger.error("Error loading camera configs!", e); + driverMode = + JacksonUtils.deserialize( + driverModeFile.toAbsolutePath(), DriverModePipelineSettings.class); + } catch (JsonProcessingException e) { + logger.error("Could not deserialize drivermode.json! Loading defaults"); + logger.debug(Arrays.toString(e.getStackTrace())); + driverMode = new DriverModePipelineSettings(); } - return loadedConfigurations; - } - - public void addCameraConfigurations(List sources) { - getConfig().addCameraConfigs(sources); - requestSave(); - } - - public void saveModule(CameraConfiguration config, String uniqueName) { - getConfig().addCameraConfig(uniqueName, config); - requestSave(); - } - - public File getSettingsFolderAsZip() { - File out = Path.of(System.getProperty("java.io.tmpdir"), "photonvision-settings.zip").toFile(); - try { - ZipUtil.pack(configDirectoryFile, out); - } catch (Exception e) { - e.printStackTrace(); + if (driverMode == null) { + logger.warn( + "Could not load camera " + subdir + "'s drivermode.json! Loading" + " default"); + driverMode = new DriverModePipelineSettings(); } - return out; - } - public void setNetworkSettings(NetworkConfig networkConfig) { - getConfig().setNetworkConfig(networkConfig); - requestSave(); + // Load pipelines by mapping the files within the pipelines subdir + // to their deserialized equivalents + var pipelineSubdirectory = Path.of(subdir.toString(), "pipelines"); + List settings = + pipelineSubdirectory.toFile().exists() + ? Files.list(pipelineSubdirectory) + .filter(p -> p.toFile().isFile()) + .map( + p -> { + var relativizedFilePath = + configDirectoryFile + .toPath() + .toAbsolutePath() + .relativize(p) + .toString(); + try { + return JacksonUtils.deserialize(p, CVPipelineSettings.class); + } catch (JsonProcessingException e) { + logger.error("Exception while deserializing " + relativizedFilePath, e); + } catch (IOException e) { + logger.warn( + "Could not load pipeline at " + + relativizedFilePath + + "! Skipping..."); + } + return null; + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()) + : Collections.emptyList(); + + loadedConfig.driveModeSettings = driverMode; + loadedConfig.addPipelineSettings(settings); + + loadedConfigurations.put(subdir.toFile().getName(), loadedConfig); + } + } catch (IOException e) { + logger.error("Error loading camera configs!", e); } - - public Path getLogsDir() { - return Path.of(configDirectoryFile.toString(), "logs"); - } - - public Path getCalibDir() { - return Path.of(configDirectoryFile.toString(), "calibImgs"); + return loadedConfigurations; + } + + public void addCameraConfigurations(List sources) { + getConfig().addCameraConfigs(sources); + requestSave(); + } + + public void saveModule(CameraConfiguration config, String uniqueName) { + getConfig().addCameraConfig(uniqueName, config); + requestSave(); + } + + public File getSettingsFolderAsZip() { + File out = Path.of(System.getProperty("java.io.tmpdir"), "photonvision-settings.zip").toFile(); + try { + ZipUtil.pack(configDirectoryFile, out); + } catch (Exception e) { + e.printStackTrace(); } - - public static final String LOG_PREFIX = "photonvision-"; - public static final String LOG_EXT = ".log"; - public static final String LOG_DATE_TIME_FORMAT = "yyyy-M-d_hh-mm-ss"; - - public String taToLogFname(TemporalAccessor date) { - var dateString = DateTimeFormatter.ofPattern(LOG_DATE_TIME_FORMAT).format(date); - return LOG_PREFIX + dateString + LOG_EXT; - } - - public Date logFnameToDate(String fname) throws ParseException { - // Strip away known unneeded portions of the log file name - fname = fname.replace(LOG_PREFIX, "").replace(LOG_EXT, ""); - DateFormat format = new SimpleDateFormat(LOG_DATE_TIME_FORMAT); - return format.parse(fname); - } - - public Path getLogPath() { - var logFile = Path.of(this.getLogsDir().toString(), taToLogFname(LocalDateTime.now())).toFile(); - if (!logFile.getParentFile().exists()) logFile.getParentFile().mkdirs(); - return logFile.toPath(); - } - - public Path getImageSavePath() { - var imgFilePath = Path.of(configDirectoryFile.toString(), "imgSaves").toFile(); - if (!imgFilePath.exists()) imgFilePath.mkdirs(); - return imgFilePath.toPath(); - } - - public Path getHardwareConfigFile() { - return this.hardwareConfigFile.toPath(); - } - - public Path getHardwareSettingsFile() { - return this.hardwareSettingsFile.toPath(); - } - - public Path getNetworkConfigFile() { - return this.networkConfigFile.toPath(); + return out; + } + + public void setNetworkSettings(NetworkConfig networkConfig) { + getConfig().setNetworkConfig(networkConfig); + requestSave(); + } + + public Path getLogsDir() { + return Path.of(configDirectoryFile.toString(), "logs"); + } + + public Path getCalibDir() { + return Path.of(configDirectoryFile.toString(), "calibImgs"); + } + + public static final String LOG_PREFIX = "photonvision-"; + public static final String LOG_EXT = ".log"; + public static final String LOG_DATE_TIME_FORMAT = "yyyy-M-d_hh-mm-ss"; + + public String taToLogFname(TemporalAccessor date) { + var dateString = DateTimeFormatter.ofPattern(LOG_DATE_TIME_FORMAT).format(date); + return LOG_PREFIX + dateString + LOG_EXT; + } + + public Date logFnameToDate(String fname) throws ParseException { + // Strip away known unneeded portions of the log file name + fname = fname.replace(LOG_PREFIX, "").replace(LOG_EXT, ""); + DateFormat format = new SimpleDateFormat(LOG_DATE_TIME_FORMAT); + return format.parse(fname); + } + + public Path getLogPath() { + var logFile = Path.of(this.getLogsDir().toString(), taToLogFname(LocalDateTime.now())).toFile(); + if (!logFile.getParentFile().exists()) logFile.getParentFile().mkdirs(); + return logFile.toPath(); + } + + public Path getImageSavePath() { + var imgFilePath = Path.of(configDirectoryFile.toString(), "imgSaves").toFile(); + if (!imgFilePath.exists()) imgFilePath.mkdirs(); + return imgFilePath.toPath(); + } + + public Path getHardwareConfigFile() { + return this.hardwareConfigFile.toPath(); + } + + public Path getHardwareSettingsFile() { + return this.hardwareSettingsFile.toPath(); + } + + public Path getNetworkConfigFile() { + return this.networkConfigFile.toPath(); + } + + public Path getAprilTagFieldLayoutFile() { + return this.apriltagFieldLayoutFile.toPath(); + } + + @Override + public boolean saveUploadedHardwareConfig(Path uploadPath) { + return FileUtils.replaceFile(uploadPath, this.getHardwareConfigFile()); + } + + @Override + public boolean saveUploadedHardwareSettings(Path uploadPath) { + return FileUtils.replaceFile(uploadPath, this.getHardwareSettingsFile()); + } + + @Override + public boolean saveUploadedNetworkConfig(Path uploadPath) { + return FileUtils.replaceFile(uploadPath, this.getNetworkConfigFile()); + } + + @Override + public boolean saveUploadedAprilTagFieldLayout(Path uploadPath) { + return FileUtils.replaceFile(uploadPath, this.getAprilTagFieldLayoutFile()); + } + + public void requestSave() { + logger.trace("Requesting save..."); + saveRequestTimestamp = System.currentTimeMillis(); + } + + private void saveAndWriteTask() { + // Only save if 1 second has past since the request was made + while (!Thread.currentThread().isInterrupted()) { + if (saveRequestTimestamp > 0 && (System.currentTimeMillis() - saveRequestTimestamp) > 1000L) { + saveRequestTimestamp = -1; + logger.debug("Saving to disk..."); + saveToDisk(); + } + + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + logger.error("Exception waiting for settings semaphore", e); + } } + } - public Path getAprilTagFieldLayoutFile() { - return this.apriltagFieldLayoutFile.toPath(); - } - - @Override - public boolean saveUploadedHardwareConfig(Path uploadPath) { - return FileUtils.replaceFile(uploadPath, this.getHardwareConfigFile()); - } - - @Override - public boolean saveUploadedHardwareSettings(Path uploadPath) { - return FileUtils.replaceFile(uploadPath, this.getHardwareSettingsFile()); - } - - @Override - public boolean saveUploadedNetworkConfig(Path uploadPath) { - return FileUtils.replaceFile(uploadPath, this.getNetworkConfigFile()); - } - - @Override - public boolean saveUploadedAprilTagFieldLayout(Path uploadPath) { - return FileUtils.replaceFile(uploadPath, this.getAprilTagFieldLayoutFile()); - } - - public void requestSave() { - logger.trace("Requesting save..."); - saveRequestTimestamp = System.currentTimeMillis(); - } - - private void saveAndWriteTask() { - // Only save if 1 second has past since the request was made - while (!Thread.currentThread().isInterrupted()) { - if (saveRequestTimestamp > 0 && (System.currentTimeMillis() - saveRequestTimestamp) > 1000L) { - saveRequestTimestamp = -1; - logger.debug("Saving to disk..."); - saveToDisk(); - } - - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - logger.error("Exception waiting for settings semaphore", e); - } - } - } - - public void unloadCameraConfigs() { - this.config.getCameraConfigurations().clear(); - } + public void unloadCameraConfigs() { + this.config.getCameraConfigurations().clear(); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/NetworkConfig.java b/photon-core/src/main/java/org/photonvision/common/configuration/NetworkConfig.java index 36e1efe8e3..4af3317f14 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/NetworkConfig.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/NetworkConfig.java @@ -30,118 +30,118 @@ import org.photonvision.common.util.file.JacksonUtils; public class NetworkConfig { - // Can be an integer team number, or an IP address - public String ntServerAddress = "0"; - public NetworkMode connectionType = NetworkMode.DHCP; - public String staticIp = ""; - public String hostname = "photonvision"; - public boolean runNTServer = false; - public boolean shouldManage; + // Can be an integer team number, or an IP address + public String ntServerAddress = "0"; + public NetworkMode connectionType = NetworkMode.DHCP; + public String staticIp = ""; + public String hostname = "photonvision"; + public boolean runNTServer = false; + public boolean shouldManage; - @JsonIgnore public static final String NM_IFACE_STRING = "${interface}"; - @JsonIgnore public static final String NM_IP_STRING = "${ipaddr}"; + @JsonIgnore public static final String NM_IFACE_STRING = "${interface}"; + @JsonIgnore public static final String NM_IP_STRING = "${ipaddr}"; - public String networkManagerIface; - public String setStaticCommand = - "nmcli con mod ${interface} ipv4.addresses ${ipaddr}/8 ipv4.method \"manual\" ipv6.method \"disabled\""; - public String setDHCPcommand = - "nmcli con mod ${interface} ipv4.method \"auto\" ipv6.method \"disabled\""; + public String networkManagerIface; + public String setStaticCommand = + "nmcli con mod ${interface} ipv4.addresses ${ipaddr}/8 ipv4.method \"manual\" ipv6.method \"disabled\""; + public String setDHCPcommand = + "nmcli con mod ${interface} ipv4.method \"auto\" ipv6.method \"disabled\""; - public NetworkConfig() { - if (Platform.isLinux()) { - // Default to the name of the first Ethernet connection. Otherwise, "Wired connection 1" is a - // reasonable guess - this.networkManagerIface = - NetworkUtils.getAllWiredInterfaces().stream() - .map(it -> it.connName) - .findFirst() - .orElse("Wired connection 1"); - } - - // We can (usually) manage networking on Linux devices, and if we can, we should try to. Command - // line inhibitions happen at a level above this class - setShouldManage(deviceCanManageNetwork()); + public NetworkConfig() { + if (Platform.isLinux()) { + // Default to the name of the first Ethernet connection. Otherwise, "Wired connection 1" is a + // reasonable guess + this.networkManagerIface = + NetworkUtils.getAllWiredInterfaces().stream() + .map(it -> it.connName) + .findFirst() + .orElse("Wired connection 1"); } - @JsonCreator - public NetworkConfig( - @JsonProperty("ntServerAddress") @JsonAlias({"ntServerAddress", "teamNumber"}) - String ntServerAddress, - @JsonProperty("connectionType") NetworkMode connectionType, - @JsonProperty("staticIp") String staticIp, - @JsonProperty("hostname") String hostname, - @JsonProperty("runNTServer") boolean runNTServer, - @JsonProperty("shouldManage") boolean shouldManage, - @JsonProperty("networkManagerIface") String networkManagerIface, - @JsonProperty("setStaticCommand") String setStaticCommand, - @JsonProperty("setDHCPcommand") String setDHCPcommand) { - this.ntServerAddress = ntServerAddress; - this.connectionType = connectionType; - this.staticIp = staticIp; - this.hostname = hostname; - this.runNTServer = runNTServer; - this.networkManagerIface = networkManagerIface; - this.setStaticCommand = setStaticCommand; - this.setDHCPcommand = setDHCPcommand; - setShouldManage(shouldManage); - } + // We can (usually) manage networking on Linux devices, and if we can, we should try to. Command + // line inhibitions happen at a level above this class + setShouldManage(deviceCanManageNetwork()); + } - public Map toHashMap() { - try { - var ret = new ObjectMapper().convertValue(this, JacksonUtils.UIMap.class); - ret.put("canManage", this.deviceCanManageNetwork()); - return ret; - } catch (Exception e) { - e.printStackTrace(); - return new HashMap<>(); - } - } + @JsonCreator + public NetworkConfig( + @JsonProperty("ntServerAddress") @JsonAlias({"ntServerAddress", "teamNumber"}) + String ntServerAddress, + @JsonProperty("connectionType") NetworkMode connectionType, + @JsonProperty("staticIp") String staticIp, + @JsonProperty("hostname") String hostname, + @JsonProperty("runNTServer") boolean runNTServer, + @JsonProperty("shouldManage") boolean shouldManage, + @JsonProperty("networkManagerIface") String networkManagerIface, + @JsonProperty("setStaticCommand") String setStaticCommand, + @JsonProperty("setDHCPcommand") String setDHCPcommand) { + this.ntServerAddress = ntServerAddress; + this.connectionType = connectionType; + this.staticIp = staticIp; + this.hostname = hostname; + this.runNTServer = runNTServer; + this.networkManagerIface = networkManagerIface; + this.setStaticCommand = setStaticCommand; + this.setDHCPcommand = setDHCPcommand; + setShouldManage(shouldManage); + } - @JsonIgnore - public String getPhysicalInterfaceName() { - return NetworkUtils.getNMinfoForConnName(this.networkManagerIface).devName; + public Map toHashMap() { + try { + var ret = new ObjectMapper().convertValue(this, JacksonUtils.UIMap.class); + ret.put("canManage", this.deviceCanManageNetwork()); + return ret; + } catch (Exception e) { + e.printStackTrace(); + return new HashMap<>(); } + } - @JsonIgnore - public String getEscapedInterfaceName() { - return "\"" + networkManagerIface + "\""; - } + @JsonIgnore + public String getPhysicalInterfaceName() { + return NetworkUtils.getNMinfoForConnName(this.networkManagerIface).devName; + } - @JsonIgnore - public boolean shouldManage() { - return this.shouldManage; - } + @JsonIgnore + public String getEscapedInterfaceName() { + return "\"" + networkManagerIface + "\""; + } - @JsonIgnore - public void setShouldManage(boolean shouldManage) { - this.shouldManage = shouldManage && this.deviceCanManageNetwork(); - } + @JsonIgnore + public boolean shouldManage() { + return this.shouldManage; + } - @JsonIgnore - private boolean deviceCanManageNetwork() { - return Platform.isLinux(); - } + @JsonIgnore + public void setShouldManage(boolean shouldManage) { + this.shouldManage = shouldManage && this.deviceCanManageNetwork(); + } - @Override - public String toString() { - return "NetworkConfig [serverAddr=" - + ntServerAddress - + ", connectionType=" - + connectionType - + ", staticIp=" - + staticIp - + ", hostname=" - + hostname - + ", runNTServer=" - + runNTServer - + ", networkManagerIface=" - + networkManagerIface - + ", setStaticCommand=" - + setStaticCommand - + ", setDHCPcommand=" - + setDHCPcommand - + ", shouldManage=" - + shouldManage - + "]"; - } + @JsonIgnore + private boolean deviceCanManageNetwork() { + return Platform.isLinux(); + } + + @Override + public String toString() { + return "NetworkConfig [serverAddr=" + + ntServerAddress + + ", connectionType=" + + connectionType + + ", staticIp=" + + staticIp + + ", hostname=" + + hostname + + ", runNTServer=" + + runNTServer + + ", networkManagerIface=" + + networkManagerIface + + ", setStaticCommand=" + + setStaticCommand + + ", setDHCPcommand=" + + setDHCPcommand + + ", shouldManage=" + + shouldManage + + "]"; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/PhotonConfiguration.java b/photon-core/src/main/java/org/photonvision/common/configuration/PhotonConfiguration.java index fac672636c..a823ada5fb 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/PhotonConfiguration.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/PhotonConfiguration.java @@ -33,146 +33,146 @@ import org.photonvision.vision.processes.VisionSource; public class PhotonConfiguration { - private final HardwareConfig hardwareConfig; - private final HardwareSettings hardwareSettings; - private NetworkConfig networkConfig; - private AprilTagFieldLayout atfl; - private HashMap cameraConfigurations; - - public PhotonConfiguration( - HardwareConfig hardwareConfig, - HardwareSettings hardwareSettings, - NetworkConfig networkConfig, - AprilTagFieldLayout atfl) { - this(hardwareConfig, hardwareSettings, networkConfig, atfl, new HashMap<>()); - } - - public PhotonConfiguration( - HardwareConfig hardwareConfig, - HardwareSettings hardwareSettings, - NetworkConfig networkConfig, - AprilTagFieldLayout atfl, - HashMap cameraConfigurations) { - this.hardwareConfig = hardwareConfig; - this.hardwareSettings = hardwareSettings; - this.networkConfig = networkConfig; - this.cameraConfigurations = cameraConfigurations; - this.atfl = atfl; - } - - public PhotonConfiguration() { - this( - new HardwareConfig(), - new HardwareSettings(), - new NetworkConfig(), - new AprilTagFieldLayout(List.of(), 0, 0)); - } - - public HardwareConfig getHardwareConfig() { - return hardwareConfig; - } - - public NetworkConfig getNetworkConfig() { - return networkConfig; - } - - public HardwareSettings getHardwareSettings() { - return hardwareSettings; - } - - public AprilTagFieldLayout getApriltagFieldLayout() { - return atfl; - } - - public void setApriltagFieldLayout(AprilTagFieldLayout atfl) { - this.atfl = atfl; - } - - public void setNetworkConfig(NetworkConfig networkConfig) { - this.networkConfig = networkConfig; - } - - public HashMap getCameraConfigurations() { - return cameraConfigurations; - } - - public void addCameraConfigs(Collection sources) { - for (var s : sources) { - addCameraConfig(s.getCameraConfiguration()); - } - } - - public void addCameraConfig(CameraConfiguration config) { - addCameraConfig(config.uniqueName, config); - } - - public void addCameraConfig(String name, CameraConfiguration config) { - cameraConfigurations.put(name, config); - } - - public Map toHashMap() { - Map map = new HashMap<>(); - var settingsSubmap = new HashMap(); - - // Hack active interfaces into networkSettings - var netConfigMap = networkConfig.toHashMap(); - netConfigMap.put("networkInterfaceNames", NetworkUtils.getAllWiredInterfaces()); - - settingsSubmap.put("networkSettings", netConfigMap); - - map.put( - "cameraSettings", - VisionModuleManager.getInstance().getModules().stream() - .map(VisionModule::toUICameraConfig) - .map(SerializationUtils::objectToHashMap) - .collect(Collectors.toList())); - - var lightingConfig = new UILightingConfig(); - lightingConfig.brightness = hardwareSettings.ledBrightnessPercentage; - lightingConfig.supported = !hardwareConfig.ledPins.isEmpty(); - settingsSubmap.put("lighting", SerializationUtils.objectToHashMap(lightingConfig)); - // General Settings - var generalSubmap = new HashMap(); - generalSubmap.put("version", PhotonVersion.versionString); - generalSubmap.put( - "gpuAcceleration", - LibCameraJNI.isSupported() - ? "Zerocopy Libcamera on " + LibCameraJNI.getSensorModel().getFriendlyName() - : ""); // TODO add support for other types of GPU accel - generalSubmap.put("hardwareModel", hardwareConfig.deviceName); - generalSubmap.put("hardwarePlatform", Platform.getPlatformName()); - settingsSubmap.put("general", generalSubmap); - // AprilTagFieldLayout - settingsSubmap.put("atfl", this.atfl); - - map.put( - "cameraSettings", - VisionModuleManager.getInstance().getModules().stream() - .map(VisionModule::toUICameraConfig) - .map(SerializationUtils::objectToHashMap) - .collect(Collectors.toList())); - map.put("settings", settingsSubmap); - - return map; - } - - public static class UILightingConfig { - public int brightness = 0; - public boolean supported = true; - } - - public static class UICameraConfiguration { - @SuppressWarnings("unused") - public double fov; - - public String nickname; - public HashMap currentPipelineSettings; - public int currentPipelineIndex; - public List pipelineNicknames; - public HashMap> videoFormatList; - public int outputStreamPort; - public int inputStreamPort; - public List> calibrations; - public boolean isFovConfigurable = true; - } + private final HardwareConfig hardwareConfig; + private final HardwareSettings hardwareSettings; + private NetworkConfig networkConfig; + private AprilTagFieldLayout atfl; + private HashMap cameraConfigurations; + + public PhotonConfiguration( + HardwareConfig hardwareConfig, + HardwareSettings hardwareSettings, + NetworkConfig networkConfig, + AprilTagFieldLayout atfl) { + this(hardwareConfig, hardwareSettings, networkConfig, atfl, new HashMap<>()); + } + + public PhotonConfiguration( + HardwareConfig hardwareConfig, + HardwareSettings hardwareSettings, + NetworkConfig networkConfig, + AprilTagFieldLayout atfl, + HashMap cameraConfigurations) { + this.hardwareConfig = hardwareConfig; + this.hardwareSettings = hardwareSettings; + this.networkConfig = networkConfig; + this.cameraConfigurations = cameraConfigurations; + this.atfl = atfl; + } + + public PhotonConfiguration() { + this( + new HardwareConfig(), + new HardwareSettings(), + new NetworkConfig(), + new AprilTagFieldLayout(List.of(), 0, 0)); + } + + public HardwareConfig getHardwareConfig() { + return hardwareConfig; + } + + public NetworkConfig getNetworkConfig() { + return networkConfig; + } + + public HardwareSettings getHardwareSettings() { + return hardwareSettings; + } + + public AprilTagFieldLayout getApriltagFieldLayout() { + return atfl; + } + + public void setApriltagFieldLayout(AprilTagFieldLayout atfl) { + this.atfl = atfl; + } + + public void setNetworkConfig(NetworkConfig networkConfig) { + this.networkConfig = networkConfig; + } + + public HashMap getCameraConfigurations() { + return cameraConfigurations; + } + + public void addCameraConfigs(Collection sources) { + for (var s : sources) { + addCameraConfig(s.getCameraConfiguration()); + } + } + + public void addCameraConfig(CameraConfiguration config) { + addCameraConfig(config.uniqueName, config); + } + + public void addCameraConfig(String name, CameraConfiguration config) { + cameraConfigurations.put(name, config); + } + + public Map toHashMap() { + Map map = new HashMap<>(); + var settingsSubmap = new HashMap(); + + // Hack active interfaces into networkSettings + var netConfigMap = networkConfig.toHashMap(); + netConfigMap.put("networkInterfaceNames", NetworkUtils.getAllWiredInterfaces()); + + settingsSubmap.put("networkSettings", netConfigMap); + + map.put( + "cameraSettings", + VisionModuleManager.getInstance().getModules().stream() + .map(VisionModule::toUICameraConfig) + .map(SerializationUtils::objectToHashMap) + .collect(Collectors.toList())); + + var lightingConfig = new UILightingConfig(); + lightingConfig.brightness = hardwareSettings.ledBrightnessPercentage; + lightingConfig.supported = !hardwareConfig.ledPins.isEmpty(); + settingsSubmap.put("lighting", SerializationUtils.objectToHashMap(lightingConfig)); + // General Settings + var generalSubmap = new HashMap(); + generalSubmap.put("version", PhotonVersion.versionString); + generalSubmap.put( + "gpuAcceleration", + LibCameraJNI.isSupported() + ? "Zerocopy Libcamera on " + LibCameraJNI.getSensorModel().getFriendlyName() + : ""); // TODO add support for other types of GPU accel + generalSubmap.put("hardwareModel", hardwareConfig.deviceName); + generalSubmap.put("hardwarePlatform", Platform.getPlatformName()); + settingsSubmap.put("general", generalSubmap); + // AprilTagFieldLayout + settingsSubmap.put("atfl", this.atfl); + + map.put( + "cameraSettings", + VisionModuleManager.getInstance().getModules().stream() + .map(VisionModule::toUICameraConfig) + .map(SerializationUtils::objectToHashMap) + .collect(Collectors.toList())); + map.put("settings", settingsSubmap); + + return map; + } + + public static class UILightingConfig { + public int brightness = 0; + public boolean supported = true; + } + + public static class UICameraConfiguration { + @SuppressWarnings("unused") + public double fov; + + public String nickname; + public HashMap currentPipelineSettings; + public int currentPipelineIndex; + public List pipelineNicknames; + public HashMap> videoFormatList; + public int outputStreamPort; + public int inputStreamPort; + public List> calibrations; + public boolean isFovConfigurable = true; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/SqlConfigProvider.java b/photon-core/src/main/java/org/photonvision/common/configuration/SqlConfigProvider.java index 8a0c755ad8..3b9253e088 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/SqlConfigProvider.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/SqlConfigProvider.java @@ -45,463 +45,463 @@ *

Global has one row per global config file (like hardware settings and network settings) */ public class SqlConfigProvider extends ConfigProvider { - private final Logger logger = new Logger(SqlConfigProvider.class, LogGroup.Config); - - static class TableKeys { - static final String CAM_UNIQUE_NAME = "unique_name"; - static final String CONFIG_JSON = "config_json"; - static final String DRIVERMODE_JSON = "drivermode_json"; - static final String PIPELINE_JSONS = "pipeline_jsons"; - - static final String NETWORK_CONFIG = "networkConfig"; - static final String HARDWARE_CONFIG = "hardwareConfig"; - static final String HARDWARE_SETTINGS = "hardwareSettings"; - static final String ATFL_CONFIG_FILE = "apriltagFieldLayout"; + private final Logger logger = new Logger(SqlConfigProvider.class, LogGroup.Config); + + static class TableKeys { + static final String CAM_UNIQUE_NAME = "unique_name"; + static final String CONFIG_JSON = "config_json"; + static final String DRIVERMODE_JSON = "drivermode_json"; + static final String PIPELINE_JSONS = "pipeline_jsons"; + + static final String NETWORK_CONFIG = "networkConfig"; + static final String HARDWARE_CONFIG = "hardwareConfig"; + static final String HARDWARE_SETTINGS = "hardwareSettings"; + static final String ATFL_CONFIG_FILE = "apriltagFieldLayout"; + } + + private static final String dbName = "photon.sqlite"; + private final String dbPath; + + private final Object m_mutex = new Object(); + private final File rootFolder; + + public SqlConfigProvider(Path rootFolder) { + this.rootFolder = rootFolder.toFile(); + dbPath = Path.of(rootFolder.toString(), dbName).toAbsolutePath().toString(); + logger.debug("Using database " + dbPath); + initDatabase(); + } + + public PhotonConfiguration getConfig() { + if (config == null) { + logger.warn("CONFIG IS NULL!"); } - - private static final String dbName = "photon.sqlite"; - private final String dbPath; - - private final Object m_mutex = new Object(); - private final File rootFolder; - - public SqlConfigProvider(Path rootFolder) { - this.rootFolder = rootFolder.toFile(); - dbPath = Path.of(rootFolder.toString(), dbName).toAbsolutePath().toString(); - logger.debug("Using database " + dbPath); - initDatabase(); + return config; + } + + private Connection createConn() { + String url = "jdbc:sqlite:" + dbPath; + + try { + var conn = DriverManager.getConnection(url); + conn.setAutoCommit(false); + return conn; + } catch (SQLException e) { + logger.error("Error creating connection", e); + return null; } - - public PhotonConfiguration getConfig() { - if (config == null) { - logger.warn("CONFIG IS NULL!"); - } - return config; + } + + private void tryCommit(Connection conn) { + try { + conn.commit(); + } catch (SQLException e) { + logger.error("Err committing changes: ", e); + try { + conn.rollback(); + } catch (SQLException e1) { + logger.error("Err rolling back changes: ", e); + } } + } - private Connection createConn() { - String url = "jdbc:sqlite:" + dbPath; + private void initDatabase() { + // Make sure root dir exists - try { - var conn = DriverManager.getConnection(url); - conn.setAutoCommit(false); - return conn; - } catch (SQLException e) { - logger.error("Error creating connection", e); - return null; - } + if (!rootFolder.exists()) { + if (rootFolder.mkdirs()) { + logger.debug("Root config folder did not exist. Created!"); + } else { + logger.error("Failed to create root config folder!"); + } } - private void tryCommit(Connection conn) { - try { - conn.commit(); - } catch (SQLException e) { - logger.error("Err committing changes: ", e); - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error("Err rolling back changes: ", e); - } - } + Connection conn = null; + Statement createGlobalTableStatement = null, createCameraTableStatement = null; + try { + conn = createConn(); + if (conn == null) { + logger.error("No connection, cannot init db"); + return; + } + + // Create global settings table. Just a dumb table with list of jsons and their + // name + try { + createGlobalTableStatement = conn.createStatement(); + String sql = + "CREATE TABLE IF NOT EXISTS global (\n" + + " filename TINYTEXT PRIMARY KEY,\n" + + " contents mediumtext NOT NULL\n" + + ");"; + createGlobalTableStatement.execute(sql); + } catch (SQLException e) { + logger.error("Err creating global table", e); + } + + // Create cameras table, key is the camera unique name + try { + createCameraTableStatement = conn.createStatement(); + var sql = + "CREATE TABLE IF NOT EXISTS cameras (\n" + + " unique_name TINYTEXT PRIMARY KEY,\n" + + " config_json text NOT NULL,\n" + + " drivermode_json text NOT NULL,\n" + + " pipeline_jsons mediumtext NOT NULL\n" + + ");"; + createCameraTableStatement.execute(sql); + } catch (SQLException e) { + logger.error("Err creating cameras table", e); + } + + this.tryCommit(conn); + } finally { + try { + if (createGlobalTableStatement != null) createGlobalTableStatement.close(); + if (createCameraTableStatement != null) createCameraTableStatement.close(); + if (conn != null) conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } } - - private void initDatabase() { - // Make sure root dir exists - - if (!rootFolder.exists()) { - if (rootFolder.mkdirs()) { - logger.debug("Root config folder did not exist. Created!"); - } else { - logger.error("Failed to create root config folder!"); - } - } - - Connection conn = null; - Statement createGlobalTableStatement = null, createCameraTableStatement = null; - try { - conn = createConn(); - if (conn == null) { - logger.error("No connection, cannot init db"); - return; - } - - // Create global settings table. Just a dumb table with list of jsons and their - // name - try { - createGlobalTableStatement = conn.createStatement(); - String sql = - "CREATE TABLE IF NOT EXISTS global (\n" - + " filename TINYTEXT PRIMARY KEY,\n" - + " contents mediumtext NOT NULL\n" - + ");"; - createGlobalTableStatement.execute(sql); - } catch (SQLException e) { - logger.error("Err creating global table", e); - } - - // Create cameras table, key is the camera unique name - try { - createCameraTableStatement = conn.createStatement(); - var sql = - "CREATE TABLE IF NOT EXISTS cameras (\n" - + " unique_name TINYTEXT PRIMARY KEY,\n" - + " config_json text NOT NULL,\n" - + " drivermode_json text NOT NULL,\n" - + " pipeline_jsons mediumtext NOT NULL\n" - + ");"; - createCameraTableStatement.execute(sql); - } catch (SQLException e) { - logger.error("Err creating cameras table", e); - } - - this.tryCommit(conn); - } finally { - try { - if (createGlobalTableStatement != null) createGlobalTableStatement.close(); - if (createCameraTableStatement != null) createCameraTableStatement.close(); - if (conn != null) conn.close(); - } catch (SQLException e) { - e.printStackTrace(); - } - } - } - - @Override - public boolean saveToDisk() { - logger.debug("Saving to disk"); - var conn = createConn(); - if (conn == null) return false; - - synchronized (m_mutex) { - if (config == null) { - logger.error("Config null! Cannot save"); - return false; - } - - saveCameras(conn); - saveGlobal(conn); - tryCommit(conn); - - try { - conn.close(); - } catch (SQLException e) { - // TODO, does the file still save if the SQL connection isn't closed correctly? If so, - // return false here. - logger.error("SQL Err closing connection while saving to disk: ", e); - } - } - - logger.info("Settings saved!"); - return true; - } - - @Override - public void load() { - logger.debug("Loading config..."); - var conn = createConn(); - if (conn == null) return; - - synchronized (m_mutex) { - HardwareConfig hardwareConfig; - HardwareSettings hardwareSettings; - NetworkConfig networkConfig; - AprilTagFieldLayout atfl; - - try { - hardwareConfig = - JacksonUtils.deserialize( - getOneConfigFile(conn, TableKeys.HARDWARE_CONFIG), HardwareConfig.class); - } catch (IOException e) { - logger.error("Could not deserialize hardware config! Loading defaults"); - hardwareConfig = new HardwareConfig(); - } - - try { - hardwareSettings = - JacksonUtils.deserialize( - getOneConfigFile(conn, TableKeys.HARDWARE_SETTINGS), HardwareSettings.class); - } catch (IOException e) { - logger.error("Could not deserialize hardware settings! Loading defaults"); - hardwareSettings = new HardwareSettings(); - } - - try { - networkConfig = - JacksonUtils.deserialize( - getOneConfigFile(conn, TableKeys.NETWORK_CONFIG), NetworkConfig.class); - } catch (IOException e) { - logger.error("Could not deserialize network config! Loading defaults"); - networkConfig = new NetworkConfig(); - } - - try { - atfl = - JacksonUtils.deserialize( - getOneConfigFile(conn, TableKeys.ATFL_CONFIG_FILE), AprilTagFieldLayout.class); - } catch (IOException e) { - logger.error("Could not deserialize apriltag layout! Loading defaults"); - try { - atfl = AprilTagFields.kDefaultField.loadAprilTagLayoutField(); - } catch (UncheckedIOException e2) { - logger.error("Error loading WPILib field", e); - atfl = null; - } - if (atfl == null) { - // what do we even do here lmao -- wpilib should always work - logger.error("Field layout is *still* null??????"); - atfl = new AprilTagFieldLayout(List.of(), 1, 1); - } - } - - var cams = loadCameraConfigs(conn); - - try { - conn.close(); - } catch (SQLException e) { - logger.error("SQL Err closing connection while loading: ", e); - } - - this.config = - new PhotonConfiguration(hardwareConfig, hardwareSettings, networkConfig, atfl, cams); - } + } + + @Override + public boolean saveToDisk() { + logger.debug("Saving to disk"); + var conn = createConn(); + if (conn == null) return false; + + synchronized (m_mutex) { + if (config == null) { + logger.error("Config null! Cannot save"); + return false; + } + + saveCameras(conn); + saveGlobal(conn); + tryCommit(conn); + + try { + conn.close(); + } catch (SQLException e) { + // TODO, does the file still save if the SQL connection isn't closed correctly? If so, + // return false here. + logger.error("SQL Err closing connection while saving to disk: ", e); + } } - private String getOneConfigFile(Connection conn, String filename) { - // Query every single row of the global settings db - PreparedStatement query = null; + logger.info("Settings saved!"); + return true; + } + + @Override + public void load() { + logger.debug("Loading config..."); + var conn = createConn(); + if (conn == null) return; + + synchronized (m_mutex) { + HardwareConfig hardwareConfig; + HardwareSettings hardwareSettings; + NetworkConfig networkConfig; + AprilTagFieldLayout atfl; + + try { + hardwareConfig = + JacksonUtils.deserialize( + getOneConfigFile(conn, TableKeys.HARDWARE_CONFIG), HardwareConfig.class); + } catch (IOException e) { + logger.error("Could not deserialize hardware config! Loading defaults"); + hardwareConfig = new HardwareConfig(); + } + + try { + hardwareSettings = + JacksonUtils.deserialize( + getOneConfigFile(conn, TableKeys.HARDWARE_SETTINGS), HardwareSettings.class); + } catch (IOException e) { + logger.error("Could not deserialize hardware settings! Loading defaults"); + hardwareSettings = new HardwareSettings(); + } + + try { + networkConfig = + JacksonUtils.deserialize( + getOneConfigFile(conn, TableKeys.NETWORK_CONFIG), NetworkConfig.class); + } catch (IOException e) { + logger.error("Could not deserialize network config! Loading defaults"); + networkConfig = new NetworkConfig(); + } + + try { + atfl = + JacksonUtils.deserialize( + getOneConfigFile(conn, TableKeys.ATFL_CONFIG_FILE), AprilTagFieldLayout.class); + } catch (IOException e) { + logger.error("Could not deserialize apriltag layout! Loading defaults"); try { - query = - conn.prepareStatement("SELECT contents FROM global where filename=\"" + filename + "\""); - - var result = query.executeQuery(); - - while (result.next()) { - return result.getString("contents"); - } - } catch (SQLException e) { - logger.error("SQL Err getting file " + filename, e); - } finally { - try { - if (query != null) query.close(); - } catch (SQLException e) { - logger.error("SQL Err closing config file query " + filename, e); - } + atfl = AprilTagFields.kDefaultField.loadAprilTagLayoutField(); + } catch (UncheckedIOException e2) { + logger.error("Error loading WPILib field", e); + atfl = null; } - - return ""; - } - - private void saveCameras(Connection conn) { - try { - // Replace this camera's row with the new settings - var sqlString = - "REPLACE INTO cameras (unique_name, config_json, drivermode_json, pipeline_jsons) VALUES " - + "(?,?,?,?);"; - - for (var c : config.getCameraConfigurations().entrySet()) { - PreparedStatement statement = conn.prepareStatement(sqlString); - - var config = c.getValue(); - statement.setString(1, c.getKey()); - statement.setString(2, JacksonUtils.serializeToString(config)); - statement.setString(3, JacksonUtils.serializeToString(config.driveModeSettings)); - - // Serializing a list of abstract classes sucks. Instead, make it into an array - // of strings, which we can later unpack back into individual settings - List settings = - config.pipelineSettings.stream() - .map( - it -> { - try { - return JacksonUtils.serializeToString(it); - } catch (IOException e) { - e.printStackTrace(); - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - statement.setString(4, JacksonUtils.serializeToString(settings)); - - statement.executeUpdate(); - } - } catch (SQLException | IOException e) { - logger.error("Err saving cameras", e); - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error("Err rolling back changes: ", e); - } + if (atfl == null) { + // what do we even do here lmao -- wpilib should always work + logger.error("Field layout is *still* null??????"); + atfl = new AprilTagFieldLayout(List.of(), 1, 1); } - } + } - private void addFile(PreparedStatement ps, String key, String value) throws SQLException { - ps.setString(1, key); - ps.setString(2, value); - } - - private void saveGlobal(Connection conn) { - PreparedStatement statement1 = null; - PreparedStatement statement2 = null; - PreparedStatement statement3 = null; - try { - // Replace this camera's row with the new settings - var sqlString = "REPLACE INTO global (filename, contents) VALUES " + "(?,?);"; - - statement1 = conn.prepareStatement(sqlString); - addFile( - statement1, - TableKeys.HARDWARE_SETTINGS, - JacksonUtils.serializeToString(config.getHardwareSettings())); - statement1.executeUpdate(); - - statement2 = conn.prepareStatement(sqlString); - addFile( - statement2, - TableKeys.NETWORK_CONFIG, - JacksonUtils.serializeToString(config.getNetworkConfig())); - statement2.executeUpdate(); - statement2.close(); - - statement3 = conn.prepareStatement(sqlString); - addFile( - statement3, - TableKeys.HARDWARE_CONFIG, - JacksonUtils.serializeToString(config.getHardwareConfig())); - statement3.executeUpdate(); - statement3.close(); - - } catch (SQLException | IOException e) { - logger.error("Err saving global", e); - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error("Err rolling back changes: ", e); - } - } finally { - try { - if (statement1 != null) statement1.close(); - if (statement2 != null) statement2.close(); - if (statement3 != null) statement3.close(); - } catch (SQLException e) { - logger.error("SQL Err closing global settings query ", e); - } - } - } + var cams = loadCameraConfigs(conn); - private boolean saveOneFile(String fname, Path path) { - Connection conn = null; - PreparedStatement statement1 = null; + try { + conn.close(); + } catch (SQLException e) { + logger.error("SQL Err closing connection while loading: ", e); + } - try { - conn = createConn(); - if (conn == null) { - return false; - } - - // Replace this camera's row with the new settings - var sqlString = "REPLACE INTO global (filename, contents) VALUES " + "(?,?);"; - - statement1 = conn.prepareStatement(sqlString); - addFile(statement1, fname, Files.readString(path)); - statement1.executeUpdate(); - - conn.commit(); - return true; - } catch (SQLException | IOException e) { - logger.error("Error while saving file to global: ", e); - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error("Error rolling back changes: ", e); - } - return false; - } finally { - try { - if (statement1 != null) statement1.close(); - conn.close(); - } catch (SQLException e) { - logger.error("SQL Error saving file " + fname, e); - } - } + this.config = + new PhotonConfiguration(hardwareConfig, hardwareSettings, networkConfig, atfl, cams); } - - @Override - public boolean saveUploadedHardwareConfig(Path uploadPath) { - return saveOneFile(TableKeys.HARDWARE_CONFIG, uploadPath); + } + + private String getOneConfigFile(Connection conn, String filename) { + // Query every single row of the global settings db + PreparedStatement query = null; + try { + query = + conn.prepareStatement("SELECT contents FROM global where filename=\"" + filename + "\""); + + var result = query.executeQuery(); + + while (result.next()) { + return result.getString("contents"); + } + } catch (SQLException e) { + logger.error("SQL Err getting file " + filename, e); + } finally { + try { + if (query != null) query.close(); + } catch (SQLException e) { + logger.error("SQL Err closing config file query " + filename, e); + } } - @Override - public boolean saveUploadedHardwareSettings(Path uploadPath) { - return saveOneFile(TableKeys.HARDWARE_SETTINGS, uploadPath); + return ""; + } + + private void saveCameras(Connection conn) { + try { + // Replace this camera's row with the new settings + var sqlString = + "REPLACE INTO cameras (unique_name, config_json, drivermode_json, pipeline_jsons) VALUES " + + "(?,?,?,?);"; + + for (var c : config.getCameraConfigurations().entrySet()) { + PreparedStatement statement = conn.prepareStatement(sqlString); + + var config = c.getValue(); + statement.setString(1, c.getKey()); + statement.setString(2, JacksonUtils.serializeToString(config)); + statement.setString(3, JacksonUtils.serializeToString(config.driveModeSettings)); + + // Serializing a list of abstract classes sucks. Instead, make it into an array + // of strings, which we can later unpack back into individual settings + List settings = + config.pipelineSettings.stream() + .map( + it -> { + try { + return JacksonUtils.serializeToString(it); + } catch (IOException e) { + e.printStackTrace(); + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + statement.setString(4, JacksonUtils.serializeToString(settings)); + + statement.executeUpdate(); + } + } catch (SQLException | IOException e) { + logger.error("Err saving cameras", e); + try { + conn.rollback(); + } catch (SQLException e1) { + logger.error("Err rolling back changes: ", e); + } } - - @Override - public boolean saveUploadedNetworkConfig(Path uploadPath) { - return saveOneFile(TableKeys.NETWORK_CONFIG, uploadPath); + } + + private void addFile(PreparedStatement ps, String key, String value) throws SQLException { + ps.setString(1, key); + ps.setString(2, value); + } + + private void saveGlobal(Connection conn) { + PreparedStatement statement1 = null; + PreparedStatement statement2 = null; + PreparedStatement statement3 = null; + try { + // Replace this camera's row with the new settings + var sqlString = "REPLACE INTO global (filename, contents) VALUES " + "(?,?);"; + + statement1 = conn.prepareStatement(sqlString); + addFile( + statement1, + TableKeys.HARDWARE_SETTINGS, + JacksonUtils.serializeToString(config.getHardwareSettings())); + statement1.executeUpdate(); + + statement2 = conn.prepareStatement(sqlString); + addFile( + statement2, + TableKeys.NETWORK_CONFIG, + JacksonUtils.serializeToString(config.getNetworkConfig())); + statement2.executeUpdate(); + statement2.close(); + + statement3 = conn.prepareStatement(sqlString); + addFile( + statement3, + TableKeys.HARDWARE_CONFIG, + JacksonUtils.serializeToString(config.getHardwareConfig())); + statement3.executeUpdate(); + statement3.close(); + + } catch (SQLException | IOException e) { + logger.error("Err saving global", e); + try { + conn.rollback(); + } catch (SQLException e1) { + logger.error("Err rolling back changes: ", e); + } + } finally { + try { + if (statement1 != null) statement1.close(); + if (statement2 != null) statement2.close(); + if (statement3 != null) statement3.close(); + } catch (SQLException e) { + logger.error("SQL Err closing global settings query ", e); + } } - - @Override - public boolean saveUploadedAprilTagFieldLayout(Path uploadPath) { - return saveOneFile(TableKeys.ATFL_CONFIG_FILE, uploadPath); + } + + private boolean saveOneFile(String fname, Path path) { + Connection conn = null; + PreparedStatement statement1 = null; + + try { + conn = createConn(); + if (conn == null) { + return false; + } + + // Replace this camera's row with the new settings + var sqlString = "REPLACE INTO global (filename, contents) VALUES " + "(?,?);"; + + statement1 = conn.prepareStatement(sqlString); + addFile(statement1, fname, Files.readString(path)); + statement1.executeUpdate(); + + conn.commit(); + return true; + } catch (SQLException | IOException e) { + logger.error("Error while saving file to global: ", e); + try { + conn.rollback(); + } catch (SQLException e1) { + logger.error("Error rolling back changes: ", e); + } + return false; + } finally { + try { + if (statement1 != null) statement1.close(); + conn.close(); + } catch (SQLException e) { + logger.error("SQL Error saving file " + fname, e); + } } - - private HashMap loadCameraConfigs(Connection conn) { - HashMap loadedConfigurations = new HashMap<>(); - - // Query every single row of the cameras db - PreparedStatement query = null; - try { - query = - conn.prepareStatement( - String.format( - "SELECT %s, %s, %s, %s FROM cameras", - TableKeys.CAM_UNIQUE_NAME, - TableKeys.CONFIG_JSON, - TableKeys.DRIVERMODE_JSON, - TableKeys.PIPELINE_JSONS)); - - var result = query.executeQuery(); - - // Iterate over every row/"camera" in the table - while (result.next()) { - List dummyList = new ArrayList<>(); - - var uniqueName = result.getString(TableKeys.CAM_UNIQUE_NAME); - var config = - JacksonUtils.deserialize( - result.getString(TableKeys.CONFIG_JSON), CameraConfiguration.class); - var driverMode = - JacksonUtils.deserialize( - result.getString(TableKeys.DRIVERMODE_JSON), DriverModePipelineSettings.class); - List pipelineSettings = - JacksonUtils.deserialize( - result.getString(TableKeys.PIPELINE_JSONS), dummyList.getClass()); - - List loadedSettings = new ArrayList<>(); - for (var str : pipelineSettings) { - if (str instanceof String) { - loadedSettings.add(JacksonUtils.deserialize((String) str, CVPipelineSettings.class)); - } - } - - config.pipelineSettings = loadedSettings; - config.driveModeSettings = driverMode; - loadedConfigurations.put(uniqueName, config); - } - } catch (SQLException | IOException e) { - logger.error("Err loading cameras: ", e); - } finally { - try { - if (query != null) query.close(); - } catch (SQLException e) { - logger.error("SQL Err closing connection while loading cameras ", e); - } + } + + @Override + public boolean saveUploadedHardwareConfig(Path uploadPath) { + return saveOneFile(TableKeys.HARDWARE_CONFIG, uploadPath); + } + + @Override + public boolean saveUploadedHardwareSettings(Path uploadPath) { + return saveOneFile(TableKeys.HARDWARE_SETTINGS, uploadPath); + } + + @Override + public boolean saveUploadedNetworkConfig(Path uploadPath) { + return saveOneFile(TableKeys.NETWORK_CONFIG, uploadPath); + } + + @Override + public boolean saveUploadedAprilTagFieldLayout(Path uploadPath) { + return saveOneFile(TableKeys.ATFL_CONFIG_FILE, uploadPath); + } + + private HashMap loadCameraConfigs(Connection conn) { + HashMap loadedConfigurations = new HashMap<>(); + + // Query every single row of the cameras db + PreparedStatement query = null; + try { + query = + conn.prepareStatement( + String.format( + "SELECT %s, %s, %s, %s FROM cameras", + TableKeys.CAM_UNIQUE_NAME, + TableKeys.CONFIG_JSON, + TableKeys.DRIVERMODE_JSON, + TableKeys.PIPELINE_JSONS)); + + var result = query.executeQuery(); + + // Iterate over every row/"camera" in the table + while (result.next()) { + List dummyList = new ArrayList<>(); + + var uniqueName = result.getString(TableKeys.CAM_UNIQUE_NAME); + var config = + JacksonUtils.deserialize( + result.getString(TableKeys.CONFIG_JSON), CameraConfiguration.class); + var driverMode = + JacksonUtils.deserialize( + result.getString(TableKeys.DRIVERMODE_JSON), DriverModePipelineSettings.class); + List pipelineSettings = + JacksonUtils.deserialize( + result.getString(TableKeys.PIPELINE_JSONS), dummyList.getClass()); + + List loadedSettings = new ArrayList<>(); + for (var str : pipelineSettings) { + if (str instanceof String) { + loadedSettings.add(JacksonUtils.deserialize((String) str, CVPipelineSettings.class)); + } } - return loadedConfigurations; - } - public void setConfig(PhotonConfiguration config) { - this.config = config; + config.pipelineSettings = loadedSettings; + config.driveModeSettings = driverMode; + loadedConfigurations.put(uniqueName, config); + } + } catch (SQLException | IOException e) { + logger.error("Err loading cameras: ", e); + } finally { + try { + if (query != null) query.close(); + } catch (SQLException e) { + logger.error("SQL Err closing connection while loading cameras ", e); + } } + return loadedConfigurations; + } + + public void setConfig(PhotonConfiguration config) { + this.config = config; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeDestination.java b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeDestination.java index 16a69dfe4f..d046734463 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeDestination.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeDestination.java @@ -22,14 +22,14 @@ import java.util.List; public enum DataChangeDestination { - DCD_ACTIVEMODULE, - DCD_ACTIVEPIPELINESETTINGS, - DCD_GENSETTINGS, - DCD_UI, - DCD_OTHER; + DCD_ACTIVEMODULE, + DCD_ACTIVEPIPELINESETTINGS, + DCD_GENSETTINGS, + DCD_UI, + DCD_OTHER; - public static final List AllDestinations = - Arrays.asList(DataChangeDestination.values()); + public static final List AllDestinations = + Arrays.asList(DataChangeDestination.values()); - public static class DataChangeDestinationList extends ArrayList {} + public static class DataChangeDestinationList extends ArrayList {} } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeService.java b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeService.java index 9d49e94290..22b5ade244 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeService.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeService.java @@ -27,84 +27,84 @@ @SuppressWarnings("rawtypes") public class DataChangeService { - private static final Logger logger = new Logger(DataChangeService.class, LogGroup.WebServer); - - private static class ThreadSafeSingleton { - private static final DataChangeService INSTANCE = new DataChangeService(); - } - - public static DataChangeService getInstance() { - return ThreadSafeSingleton.INSTANCE; - } - - private final CopyOnWriteArrayList subscribers; - - @SuppressWarnings("FieldCanBeLocal") - private final Thread dispatchThread; - - private final BlockingQueue eventQueue = new LinkedBlockingQueue<>(); - - private DataChangeService() { - subscribers = new CopyOnWriteArrayList<>(); - dispatchThread = new Thread(this::dispatchFromQueue); - dispatchThread.setName("DataChangeEventDispatchThread"); - dispatchThread.start(); - } - - public boolean hasEvents() { - return !eventQueue.isEmpty(); - } - - private void dispatchFromQueue() { - while (true) { - try { - var taken = eventQueue.take(); - for (var sub : subscribers) { - if (sub.wantedSources.contains(taken.sourceType) - && sub.wantedDestinations.contains(taken.destType)) { - sub.onDataChangeEvent(taken); - } - } - } catch (Exception e) { - logger.error("Exception when dispatching event!", e); - e.printStackTrace(); - } + private static final Logger logger = new Logger(DataChangeService.class, LogGroup.WebServer); + + private static class ThreadSafeSingleton { + private static final DataChangeService INSTANCE = new DataChangeService(); + } + + public static DataChangeService getInstance() { + return ThreadSafeSingleton.INSTANCE; + } + + private final CopyOnWriteArrayList subscribers; + + @SuppressWarnings("FieldCanBeLocal") + private final Thread dispatchThread; + + private final BlockingQueue eventQueue = new LinkedBlockingQueue<>(); + + private DataChangeService() { + subscribers = new CopyOnWriteArrayList<>(); + dispatchThread = new Thread(this::dispatchFromQueue); + dispatchThread.setName("DataChangeEventDispatchThread"); + dispatchThread.start(); + } + + public boolean hasEvents() { + return !eventQueue.isEmpty(); + } + + private void dispatchFromQueue() { + while (true) { + try { + var taken = eventQueue.take(); + for (var sub : subscribers) { + if (sub.wantedSources.contains(taken.sourceType) + && sub.wantedDestinations.contains(taken.destType)) { + sub.onDataChangeEvent(taken); + } } + } catch (Exception e) { + logger.error("Exception when dispatching event!", e); + e.printStackTrace(); + } } - - public void addSubscriber(DataChangeSubscriber subscriber) { - if (!subscribers.addIfAbsent(subscriber)) { - logger.warn("Attempted to add already added subscriber!"); - } else { - logger.debug( - () -> { - var sources = - subscriber.wantedSources.stream() - .map(Enum::toString) - .collect(Collectors.joining(", ")); - var dests = - subscriber.wantedDestinations.stream() - .map(Enum::toString) - .collect(Collectors.joining(", ")); - - return "Added subscriber - " + "Sources: " + sources + ", Destinations: " + dests; - }); - } + } + + public void addSubscriber(DataChangeSubscriber subscriber) { + if (!subscribers.addIfAbsent(subscriber)) { + logger.warn("Attempted to add already added subscriber!"); + } else { + logger.debug( + () -> { + var sources = + subscriber.wantedSources.stream() + .map(Enum::toString) + .collect(Collectors.joining(", ")); + var dests = + subscriber.wantedDestinations.stream() + .map(Enum::toString) + .collect(Collectors.joining(", ")); + + return "Added subscriber - " + "Sources: " + sources + ", Destinations: " + dests; + }); } + } - public void addSubscribers(DataChangeSubscriber... subs) { - for (var sub : subs) { - addSubscriber(sub); - } + public void addSubscribers(DataChangeSubscriber... subs) { + for (var sub : subs) { + addSubscriber(sub); } + } - public void publishEvent(DataChangeEvent event) { - eventQueue.offer(event); - } + public void publishEvent(DataChangeEvent event) { + eventQueue.offer(event); + } - public void publishEvents(DataChangeEvent... events) { - for (var event : events) { - publishEvent(event); - } + public void publishEvents(DataChangeEvent... events) { + for (var event : events) { + publishEvent(event); } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSource.java b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSource.java index c9ba1585fd..966015ca74 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSource.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSource.java @@ -22,13 +22,13 @@ import java.util.List; public enum DataChangeSource { - DCS_WEBSOCKET, - DCS_HTTP, - DCS_NETWORKTABLES, - DCS_VISIONMODULE, - DCS_OTHER; + DCS_WEBSOCKET, + DCS_HTTP, + DCS_NETWORKTABLES, + DCS_VISIONMODULE, + DCS_OTHER; - public static final List AllSources = Arrays.asList(DataChangeSource.values()); + public static final List AllSources = Arrays.asList(DataChangeSource.values()); - public static class DataChangeSourceList extends ArrayList {} + public static class DataChangeSourceList extends ArrayList {} } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSubscriber.java b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSubscriber.java index 92a06fdb67..7b44095c31 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSubscriber.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSubscriber.java @@ -22,34 +22,34 @@ import org.photonvision.common.dataflow.events.DataChangeEvent; public abstract class DataChangeSubscriber { - public final List wantedSources; - public final List wantedDestinations; + public final List wantedSources; + public final List wantedDestinations; - private final int hash; + private final int hash; - public DataChangeSubscriber( - List wantedSources, List wantedDestinations) { - this.wantedSources = wantedSources; - this.wantedDestinations = wantedDestinations; - hash = Objects.hash(wantedSources, wantedDestinations); - } + public DataChangeSubscriber( + List wantedSources, List wantedDestinations) { + this.wantedSources = wantedSources; + this.wantedDestinations = wantedDestinations; + hash = Objects.hash(wantedSources, wantedDestinations); + } - public DataChangeSubscriber() { - this(DataChangeSource.AllSources, DataChangeDestination.AllDestinations); - } + public DataChangeSubscriber() { + this(DataChangeSource.AllSources, DataChangeDestination.AllDestinations); + } - public DataChangeSubscriber(DataChangeSource.DataChangeSourceList wantedSources) { - this(wantedSources, DataChangeDestination.AllDestinations); - } + public DataChangeSubscriber(DataChangeSource.DataChangeSourceList wantedSources) { + this(wantedSources, DataChangeDestination.AllDestinations); + } - public DataChangeSubscriber(DataChangeDestination.DataChangeDestinationList wantedDestinations) { - this(DataChangeSource.AllSources, wantedDestinations); - } + public DataChangeSubscriber(DataChangeDestination.DataChangeDestinationList wantedDestinations) { + this(DataChangeSource.AllSources, wantedDestinations); + } - public abstract void onDataChangeEvent(DataChangeEvent event); + public abstract void onDataChangeEvent(DataChangeEvent event); - @Override - public int hashCode() { - return hash; - } + @Override + public int hashCode() { + return hash; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/events/DataChangeEvent.java b/photon-core/src/main/java/org/photonvision/common/dataflow/events/DataChangeEvent.java index 3f40aba733..7f7b993c04 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/events/DataChangeEvent.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/events/DataChangeEvent.java @@ -21,34 +21,34 @@ import org.photonvision.common.dataflow.DataChangeSource; public class DataChangeEvent { - public final DataChangeSource sourceType; - public final DataChangeDestination destType; - public final String propertyName; - public final T data; + public final DataChangeSource sourceType; + public final DataChangeDestination destType; + public final String propertyName; + public final T data; - public DataChangeEvent( - DataChangeSource sourceType, - DataChangeDestination destType, - String propertyName, - T newValue) { - this.sourceType = sourceType; - this.destType = destType; - this.propertyName = propertyName; - this.data = newValue; - } + public DataChangeEvent( + DataChangeSource sourceType, + DataChangeDestination destType, + String propertyName, + T newValue) { + this.sourceType = sourceType; + this.destType = destType; + this.propertyName = propertyName; + this.data = newValue; + } - @Override - public String toString() { - return "DataChangeEvent{" - + "sourceType=" - + sourceType - + ", destType=" - + destType - + ", propertyName='" - + propertyName - + '\'' - + ", data=" - + data - + '}'; - } + @Override + public String toString() { + return "DataChangeEvent{" + + "sourceType=" + + sourceType + + ", destType=" + + destType + + ", propertyName='" + + propertyName + + '\'' + + ", data=" + + data + + '}'; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/events/HTTPRequestEvent.java b/photon-core/src/main/java/org/photonvision/common/dataflow/events/HTTPRequestEvent.java index 5843ba1cca..393977a7a8 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/events/HTTPRequestEvent.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/events/HTTPRequestEvent.java @@ -21,11 +21,11 @@ import org.photonvision.common.dataflow.DataChangeSource; public class HTTPRequestEvent extends DataChangeEvent { - public HTTPRequestEvent( - DataChangeSource sourceType, - DataChangeDestination destType, - String propertyName, - T newValue) { - super(sourceType, destType, propertyName, newValue); - } + public HTTPRequestEvent( + DataChangeSource sourceType, + DataChangeDestination destType, + String propertyName, + T newValue) { + super(sourceType, destType, propertyName, newValue); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/events/IncomingWebSocketEvent.java b/photon-core/src/main/java/org/photonvision/common/dataflow/events/IncomingWebSocketEvent.java index 7f0fd6134e..87d3ca3cab 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/events/IncomingWebSocketEvent.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/events/IncomingWebSocketEvent.java @@ -23,44 +23,44 @@ import org.photonvision.common.dataflow.DataChangeSource; public class IncomingWebSocketEvent extends DataChangeEvent { - public final Integer cameraIndex; - public final WsContext originContext; + public final Integer cameraIndex; + public final WsContext originContext; - public IncomingWebSocketEvent(DataChangeDestination destType, String propertyName, T newValue) { - this(destType, propertyName, newValue, null, null); - } + public IncomingWebSocketEvent(DataChangeDestination destType, String propertyName, T newValue) { + this(destType, propertyName, newValue, null, null); + } - public IncomingWebSocketEvent( - DataChangeDestination destType, - String propertyName, - T newValue, - Integer cameraIndex, - WsContext originContext) { - super(DataChangeSource.DCS_WEBSOCKET, destType, propertyName, newValue); - this.cameraIndex = cameraIndex; - this.originContext = originContext; - } + public IncomingWebSocketEvent( + DataChangeDestination destType, + String propertyName, + T newValue, + Integer cameraIndex, + WsContext originContext) { + super(DataChangeSource.DCS_WEBSOCKET, destType, propertyName, newValue); + this.cameraIndex = cameraIndex; + this.originContext = originContext; + } - @SuppressWarnings("unchecked") - public IncomingWebSocketEvent( - DataChangeDestination destType, String dataKey, HashMap data) { - this(destType, dataKey, (T) data.get(dataKey)); - } + @SuppressWarnings("unchecked") + public IncomingWebSocketEvent( + DataChangeDestination destType, String dataKey, HashMap data) { + this(destType, dataKey, (T) data.get(dataKey)); + } - @Override - public String toString() { - return "IncomingWebSocketEvent{" - + "cameraIndex=" - + cameraIndex - + ", sourceType=" - + sourceType - + ", destType=" - + destType - + ", propertyName='" - + propertyName - + '\'' - + ", data=" - + data - + '}'; - } + @Override + public String toString() { + return "IncomingWebSocketEvent{" + + "cameraIndex=" + + cameraIndex + + ", sourceType=" + + sourceType + + ", destType=" + + destType + + ", propertyName='" + + propertyName + + '\'' + + ", data=" + + data + + '}'; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/events/OutgoingUIEvent.java b/photon-core/src/main/java/org/photonvision/common/dataflow/events/OutgoingUIEvent.java index efb3308dc5..11feec070b 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/events/OutgoingUIEvent.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/events/OutgoingUIEvent.java @@ -23,29 +23,29 @@ import org.photonvision.common.dataflow.DataChangeSource; public class OutgoingUIEvent extends DataChangeEvent { - public final WsContext originContext; - - public OutgoingUIEvent(String propertyName, T newValue) { - this(propertyName, newValue, null); - } - - public OutgoingUIEvent(String propertyName, T newValue, WsContext originContext) { - super(DataChangeSource.DCS_WEBSOCKET, DataChangeDestination.DCD_UI, propertyName, newValue); - this.originContext = originContext; - } - - public static OutgoingUIEvent> wrappedOf( - String commandName, Object value) { - HashMap data = new HashMap<>(); - data.put(commandName, value); - return new OutgoingUIEvent<>(commandName, data); - } - - public static OutgoingUIEvent> wrappedOf( - String commandName, String propertyName, Object value, WsContext originContext) { - HashMap data = new HashMap<>(); - data.put(propertyName, value); - - return new OutgoingUIEvent<>(commandName, data, originContext); - } + public final WsContext originContext; + + public OutgoingUIEvent(String propertyName, T newValue) { + this(propertyName, newValue, null); + } + + public OutgoingUIEvent(String propertyName, T newValue, WsContext originContext) { + super(DataChangeSource.DCS_WEBSOCKET, DataChangeDestination.DCD_UI, propertyName, newValue); + this.originContext = originContext; + } + + public static OutgoingUIEvent> wrappedOf( + String commandName, Object value) { + HashMap data = new HashMap<>(); + data.put(commandName, value); + return new OutgoingUIEvent<>(commandName, data); + } + + public static OutgoingUIEvent> wrappedOf( + String commandName, String propertyName, Object value, WsContext originContext) { + HashMap data = new HashMap<>(); + data.put(propertyName, value); + + return new OutgoingUIEvent<>(commandName, data, originContext); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataChangeListener.java b/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataChangeListener.java index 2bd172cadb..9303b6c622 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataChangeListener.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataChangeListener.java @@ -24,22 +24,22 @@ import java.util.function.Consumer; public class NTDataChangeListener { - private final NetworkTableInstance instance; - private final Subscriber watchedEntry; - private final int listenerID; + private final NetworkTableInstance instance; + private final Subscriber watchedEntry; + private final int listenerID; - public NTDataChangeListener( - NetworkTableInstance instance, - Subscriber watchedSubscriber, - Consumer dataChangeConsumer) { - this.watchedEntry = watchedSubscriber; - this.instance = instance; - listenerID = - this.instance.addListener( - watchedEntry, EnumSet.of(NetworkTableEvent.Kind.kValueAll), dataChangeConsumer); - } + public NTDataChangeListener( + NetworkTableInstance instance, + Subscriber watchedSubscriber, + Consumer dataChangeConsumer) { + this.watchedEntry = watchedSubscriber; + this.instance = instance; + listenerID = + this.instance.addListener( + watchedEntry, EnumSet.of(NetworkTableEvent.Kind.kValueAll), dataChangeConsumer); + } - public void remove() { - this.instance.removeListener(listenerID); - } + public void remove() { + this.instance.removeListener(listenerID); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataPublisher.java b/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataPublisher.java index d985c710f5..306b6ae7a3 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataPublisher.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataPublisher.java @@ -32,166 +32,166 @@ import org.photonvision.vision.target.TrackedTarget; public class NTDataPublisher implements CVPipelineResultConsumer { - private final Logger logger = new Logger(NTDataPublisher.class, LogGroup.General); + private final Logger logger = new Logger(NTDataPublisher.class, LogGroup.General); - private final NetworkTable rootTable = NetworkTablesManager.getInstance().kRootTable; + private final NetworkTable rootTable = NetworkTablesManager.getInstance().kRootTable; - private final NTTopicSet ts = new NTTopicSet(); + private final NTTopicSet ts = new NTTopicSet(); - NTDataChangeListener pipelineIndexListener; - private final Supplier pipelineIndexSupplier; - private final Consumer pipelineIndexConsumer; + NTDataChangeListener pipelineIndexListener; + private final Supplier pipelineIndexSupplier; + private final Consumer pipelineIndexConsumer; - NTDataChangeListener driverModeListener; - private final BooleanSupplier driverModeSupplier; - private final Consumer driverModeConsumer; + NTDataChangeListener driverModeListener; + private final BooleanSupplier driverModeSupplier; + private final Consumer driverModeConsumer; - private long heartbeatCounter = 0; + private long heartbeatCounter = 0; - public NTDataPublisher( - String cameraNickname, - Supplier pipelineIndexSupplier, - Consumer pipelineIndexConsumer, - BooleanSupplier driverModeSupplier, - Consumer driverModeConsumer) { - this.pipelineIndexSupplier = pipelineIndexSupplier; - this.pipelineIndexConsumer = pipelineIndexConsumer; - this.driverModeSupplier = driverModeSupplier; - this.driverModeConsumer = driverModeConsumer; + public NTDataPublisher( + String cameraNickname, + Supplier pipelineIndexSupplier, + Consumer pipelineIndexConsumer, + BooleanSupplier driverModeSupplier, + Consumer driverModeConsumer) { + this.pipelineIndexSupplier = pipelineIndexSupplier; + this.pipelineIndexConsumer = pipelineIndexConsumer; + this.driverModeSupplier = driverModeSupplier; + this.driverModeConsumer = driverModeConsumer; - updateCameraNickname(cameraNickname); - updateEntries(); - } - - private void onPipelineIndexChange(NetworkTableEvent entryNotification) { - var newIndex = (int) entryNotification.valueData.value.getInteger(); - var originalIndex = pipelineIndexSupplier.get(); - - // ignore indexes below 0 - if (newIndex < 0) { - ts.pipelineIndexPublisher.set(originalIndex); - return; - } - - if (newIndex == originalIndex) { - logger.debug("Pipeline index is already " + newIndex); - return; - } - - pipelineIndexConsumer.accept(newIndex); - var setIndex = pipelineIndexSupplier.get(); - if (newIndex != setIndex) { // set failed - ts.pipelineIndexPublisher.set(setIndex); - // TODO: Log - } - logger.debug("Set pipeline index to " + newIndex); - } - - private void onDriverModeChange(NetworkTableEvent entryNotification) { - var newDriverMode = entryNotification.valueData.value.getBoolean(); - var originalDriverMode = driverModeSupplier.getAsBoolean(); + updateCameraNickname(cameraNickname); + updateEntries(); + } - if (newDriverMode == originalDriverMode) { - logger.debug("Driver mode is already " + newDriverMode); - return; - } + private void onPipelineIndexChange(NetworkTableEvent entryNotification) { + var newIndex = (int) entryNotification.valueData.value.getInteger(); + var originalIndex = pipelineIndexSupplier.get(); - driverModeConsumer.accept(newDriverMode); - logger.debug("Set driver mode to " + newDriverMode); + // ignore indexes below 0 + if (newIndex < 0) { + ts.pipelineIndexPublisher.set(originalIndex); + return; } - private void removeEntries() { - if (pipelineIndexListener != null) pipelineIndexListener.remove(); - if (driverModeListener != null) driverModeListener.remove(); - ts.removeEntries(); + if (newIndex == originalIndex) { + logger.debug("Pipeline index is already " + newIndex); + return; } - private void updateEntries() { - if (pipelineIndexListener != null) pipelineIndexListener.remove(); - if (driverModeListener != null) driverModeListener.remove(); - - ts.updateEntries(); + pipelineIndexConsumer.accept(newIndex); + var setIndex = pipelineIndexSupplier.get(); + if (newIndex != setIndex) { // set failed + ts.pipelineIndexPublisher.set(setIndex); + // TODO: Log + } + logger.debug("Set pipeline index to " + newIndex); + } - pipelineIndexListener = - new NTDataChangeListener( - ts.subTable.getInstance(), ts.pipelineIndexRequestSub, this::onPipelineIndexChange); + private void onDriverModeChange(NetworkTableEvent entryNotification) { + var newDriverMode = entryNotification.valueData.value.getBoolean(); + var originalDriverMode = driverModeSupplier.getAsBoolean(); - driverModeListener = - new NTDataChangeListener( - ts.subTable.getInstance(), ts.driverModeSubscriber, this::onDriverModeChange); + if (newDriverMode == originalDriverMode) { + logger.debug("Driver mode is already " + newDriverMode); + return; } - public void updateCameraNickname(String newCameraNickname) { - removeEntries(); - ts.subTable = rootTable.getSubTable(newCameraNickname); - updateEntries(); + driverModeConsumer.accept(newDriverMode); + logger.debug("Set driver mode to " + newDriverMode); + } + + private void removeEntries() { + if (pipelineIndexListener != null) pipelineIndexListener.remove(); + if (driverModeListener != null) driverModeListener.remove(); + ts.removeEntries(); + } + + private void updateEntries() { + if (pipelineIndexListener != null) pipelineIndexListener.remove(); + if (driverModeListener != null) driverModeListener.remove(); + + ts.updateEntries(); + + pipelineIndexListener = + new NTDataChangeListener( + ts.subTable.getInstance(), ts.pipelineIndexRequestSub, this::onPipelineIndexChange); + + driverModeListener = + new NTDataChangeListener( + ts.subTable.getInstance(), ts.driverModeSubscriber, this::onDriverModeChange); + } + + public void updateCameraNickname(String newCameraNickname) { + removeEntries(); + ts.subTable = rootTable.getSubTable(newCameraNickname); + updateEntries(); + } + + @Override + public void accept(CVPipelineResult result) { + var simplified = + new PhotonPipelineResult( + result.getLatencyMillis(), + TrackedTarget.simpleFromTrackedTargets(result.targets), + result.multiTagResult); + Packet packet = new Packet(simplified.getPacketSize()); + simplified.populatePacket(packet); + + ts.rawBytesEntry.set(packet.getData()); + + ts.pipelineIndexPublisher.set(pipelineIndexSupplier.get()); + ts.driverModePublisher.set(driverModeSupplier.getAsBoolean()); + ts.latencyMillisEntry.set(result.getLatencyMillis()); + ts.hasTargetEntry.set(result.hasTargets()); + + if (result.hasTargets()) { + var bestTarget = result.targets.get(0); + + ts.targetPitchEntry.set(bestTarget.getPitch()); + ts.targetYawEntry.set(bestTarget.getYaw()); + ts.targetAreaEntry.set(bestTarget.getArea()); + ts.targetSkewEntry.set(bestTarget.getSkew()); + + var pose = bestTarget.getBestCameraToTarget3d(); + ts.targetPoseEntry.set( + new double[] { + pose.getTranslation().getX(), + pose.getTranslation().getY(), + pose.getTranslation().getZ(), + pose.getRotation().getQuaternion().getW(), + pose.getRotation().getQuaternion().getX(), + pose.getRotation().getQuaternion().getY(), + pose.getRotation().getQuaternion().getZ() + }); + + var targetOffsetPoint = bestTarget.getTargetOffsetPoint(); + ts.bestTargetPosX.set(targetOffsetPoint.x); + ts.bestTargetPosY.set(targetOffsetPoint.y); + } else { + ts.targetPitchEntry.set(0); + ts.targetYawEntry.set(0); + ts.targetAreaEntry.set(0); + ts.targetSkewEntry.set(0); + ts.targetPoseEntry.set(new double[] {0, 0, 0}); + ts.bestTargetPosX.set(0); + ts.bestTargetPosY.set(0); } - @Override - public void accept(CVPipelineResult result) { - var simplified = - new PhotonPipelineResult( - result.getLatencyMillis(), - TrackedTarget.simpleFromTrackedTargets(result.targets), - result.multiTagResult); - Packet packet = new Packet(simplified.getPacketSize()); - simplified.populatePacket(packet); - - ts.rawBytesEntry.set(packet.getData()); - - ts.pipelineIndexPublisher.set(pipelineIndexSupplier.get()); - ts.driverModePublisher.set(driverModeSupplier.getAsBoolean()); - ts.latencyMillisEntry.set(result.getLatencyMillis()); - ts.hasTargetEntry.set(result.hasTargets()); - - if (result.hasTargets()) { - var bestTarget = result.targets.get(0); - - ts.targetPitchEntry.set(bestTarget.getPitch()); - ts.targetYawEntry.set(bestTarget.getYaw()); - ts.targetAreaEntry.set(bestTarget.getArea()); - ts.targetSkewEntry.set(bestTarget.getSkew()); - - var pose = bestTarget.getBestCameraToTarget3d(); - ts.targetPoseEntry.set( - new double[] { - pose.getTranslation().getX(), - pose.getTranslation().getY(), - pose.getTranslation().getZ(), - pose.getRotation().getQuaternion().getW(), - pose.getRotation().getQuaternion().getX(), - pose.getRotation().getQuaternion().getY(), - pose.getRotation().getQuaternion().getZ() - }); - - var targetOffsetPoint = bestTarget.getTargetOffsetPoint(); - ts.bestTargetPosX.set(targetOffsetPoint.x); - ts.bestTargetPosY.set(targetOffsetPoint.y); - } else { - ts.targetPitchEntry.set(0); - ts.targetYawEntry.set(0); - ts.targetAreaEntry.set(0); - ts.targetSkewEntry.set(0); - ts.targetPoseEntry.set(new double[] {0, 0, 0}); - ts.bestTargetPosX.set(0); - ts.bestTargetPosY.set(0); - } - - // Something in the result can sometimes be null -- so check probably too many things - if (result.inputAndOutputFrame != null - && result.inputAndOutputFrame.frameStaticProperties != null - && result.inputAndOutputFrame.frameStaticProperties.cameraCalibration != null) { - var fsp = result.inputAndOutputFrame.frameStaticProperties; - ts.cameraIntrinsicsPublisher.accept(fsp.cameraCalibration.getIntrinsicsArr()); - ts.cameraDistortionPublisher.accept(fsp.cameraCalibration.getExtrinsicsArr()); - } else { - ts.cameraIntrinsicsPublisher.accept(new double[] {}); - ts.cameraDistortionPublisher.accept(new double[] {}); - } - - ts.heartbeatPublisher.set(heartbeatCounter++); - - // TODO...nt4... is this needed? - rootTable.getInstance().flush(); + // Something in the result can sometimes be null -- so check probably too many things + if (result.inputAndOutputFrame != null + && result.inputAndOutputFrame.frameStaticProperties != null + && result.inputAndOutputFrame.frameStaticProperties.cameraCalibration != null) { + var fsp = result.inputAndOutputFrame.frameStaticProperties; + ts.cameraIntrinsicsPublisher.accept(fsp.cameraCalibration.getIntrinsicsArr()); + ts.cameraDistortionPublisher.accept(fsp.cameraCalibration.getExtrinsicsArr()); + } else { + ts.cameraIntrinsicsPublisher.accept(new double[] {}); + ts.cameraDistortionPublisher.accept(new double[] {}); } + + ts.heartbeatPublisher.set(heartbeatCounter++); + + // TODO...nt4... is this needed? + rootTable.getInstance().flush(); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NetworkTablesManager.java b/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NetworkTablesManager.java index a62e0f3b4c..74de5eb37e 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NetworkTablesManager.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NetworkTablesManager.java @@ -40,168 +40,168 @@ import org.photonvision.common.util.file.JacksonUtils; public class NetworkTablesManager { - private final NetworkTableInstance ntInstance = NetworkTableInstance.getDefault(); - private final String kRootTableName = "/photonvision"; - private final String kFieldLayoutName = "apriltag_field_layout"; - public final NetworkTable kRootTable = ntInstance.getTable(kRootTableName); - - private final NTLogger m_ntLogger = new NTLogger(); - - private boolean m_isRetryingConnection = false; - - private StringSubscriber m_fieldLayoutSubscriber = - kRootTable.getStringTopic(kFieldLayoutName).subscribe(""); - - private NetworkTablesManager() { - ntInstance.addLogger(255, 255, (event) -> {}); // to hide error messages - ntInstance.addConnectionListener(true, m_ntLogger); // to hide error messages - - ntInstance.addListener( - m_fieldLayoutSubscriber, EnumSet.of(Kind.kValueAll), this::onFieldLayoutChanged); - - TimedTaskManager.getInstance().addTask("NTManager", this::ntTick, 5000); - - // Get the UI state in sync with the backend. NT should fire a callback when it first connects - // to the robot - broadcastConnectedStatus(); - } - - private static NetworkTablesManager INSTANCE; - - public static NetworkTablesManager getInstance() { - if (INSTANCE == null) INSTANCE = new NetworkTablesManager(); - return INSTANCE; + private final NetworkTableInstance ntInstance = NetworkTableInstance.getDefault(); + private final String kRootTableName = "/photonvision"; + private final String kFieldLayoutName = "apriltag_field_layout"; + public final NetworkTable kRootTable = ntInstance.getTable(kRootTableName); + + private final NTLogger m_ntLogger = new NTLogger(); + + private boolean m_isRetryingConnection = false; + + private StringSubscriber m_fieldLayoutSubscriber = + kRootTable.getStringTopic(kFieldLayoutName).subscribe(""); + + private NetworkTablesManager() { + ntInstance.addLogger(255, 255, (event) -> {}); // to hide error messages + ntInstance.addConnectionListener(true, m_ntLogger); // to hide error messages + + ntInstance.addListener( + m_fieldLayoutSubscriber, EnumSet.of(Kind.kValueAll), this::onFieldLayoutChanged); + + TimedTaskManager.getInstance().addTask("NTManager", this::ntTick, 5000); + + // Get the UI state in sync with the backend. NT should fire a callback when it first connects + // to the robot + broadcastConnectedStatus(); + } + + private static NetworkTablesManager INSTANCE; + + public static NetworkTablesManager getInstance() { + if (INSTANCE == null) INSTANCE = new NetworkTablesManager(); + return INSTANCE; + } + + private static final Logger logger = new Logger(NetworkTablesManager.class, LogGroup.General); + + private static class NTLogger implements Consumer { + private boolean hasReportedConnectionFailure = false; + + @Override + public void accept(NetworkTableEvent event) { + var isConnEvent = event.is(Kind.kConnected); + var isDisconnEvent = event.is(Kind.kDisconnected); + + if (!hasReportedConnectionFailure && isDisconnEvent) { + var msg = + String.format( + "NT lost connection to %s:%d! (NT version %d). Will retry in background.", + event.connInfo.remote_ip, + event.connInfo.remote_port, + event.connInfo.protocol_version); + logger.error(msg); + + hasReportedConnectionFailure = true; + getInstance().broadcastConnectedStatus(); + } else if (isConnEvent && event.connInfo != null) { + var msg = + String.format( + "NT connected to %s:%d! (NT version %d)", + event.connInfo.remote_ip, + event.connInfo.remote_port, + event.connInfo.protocol_version); + logger.info(msg); + + hasReportedConnectionFailure = false; + ScriptManager.queueEvent(ScriptEventType.kNTConnected); + getInstance().broadcastVersion(); + getInstance().broadcastConnectedStatus(); + } } - - private static final Logger logger = new Logger(NetworkTablesManager.class, LogGroup.General); - - private static class NTLogger implements Consumer { - private boolean hasReportedConnectionFailure = false; - - @Override - public void accept(NetworkTableEvent event) { - var isConnEvent = event.is(Kind.kConnected); - var isDisconnEvent = event.is(Kind.kDisconnected); - - if (!hasReportedConnectionFailure && isDisconnEvent) { - var msg = - String.format( - "NT lost connection to %s:%d! (NT version %d). Will retry in background.", - event.connInfo.remote_ip, - event.connInfo.remote_port, - event.connInfo.protocol_version); - logger.error(msg); - - hasReportedConnectionFailure = true; - getInstance().broadcastConnectedStatus(); - } else if (isConnEvent && event.connInfo != null) { - var msg = - String.format( - "NT connected to %s:%d! (NT version %d)", - event.connInfo.remote_ip, - event.connInfo.remote_port, - event.connInfo.protocol_version); - logger.info(msg); - - hasReportedConnectionFailure = false; - ScriptManager.queueEvent(ScriptEventType.kNTConnected); - getInstance().broadcastVersion(); - getInstance().broadcastConnectedStatus(); - } - } - } - - private void onFieldLayoutChanged(NetworkTableEvent event) { - var atfl_json = event.valueData.value.getString(); - try { - System.out.println("Got new field layout!"); - var atfl = JacksonUtils.deserialize(atfl_json, AprilTagFieldLayout.class); - ConfigManager.getInstance().getConfig().setApriltagFieldLayout(atfl); - ConfigManager.getInstance().requestSave(); - DataChangeService.getInstance() - .publishEvent( - new OutgoingUIEvent<>( - "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); - } catch (IOException e) { - logger.error("Error deserializing atfl!"); - logger.error(atfl_json); - } - } - - public void broadcastConnectedStatus() { - TimedTaskManager.getInstance().addOneShotTask(this::broadcastConnectedStatusImpl, 1000L); + } + + private void onFieldLayoutChanged(NetworkTableEvent event) { + var atfl_json = event.valueData.value.getString(); + try { + System.out.println("Got new field layout!"); + var atfl = JacksonUtils.deserialize(atfl_json, AprilTagFieldLayout.class); + ConfigManager.getInstance().getConfig().setApriltagFieldLayout(atfl); + ConfigManager.getInstance().requestSave(); + DataChangeService.getInstance() + .publishEvent( + new OutgoingUIEvent<>( + "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); + } catch (IOException e) { + logger.error("Error deserializing atfl!"); + logger.error(atfl_json); } - - private void broadcastConnectedStatusImpl() { - HashMap map = new HashMap<>(); - var subMap = new HashMap(); - - subMap.put("connected", ntInstance.isConnected()); - if (ntInstance.isConnected()) { - var connections = ntInstance.getConnections(); - if (connections.length > 0) { - subMap.put("address", connections[0].remote_ip + ":" + connections[0].remote_port); - } - subMap.put("clients", connections.length); - } - - map.put("ntConnectionInfo", subMap); - DataChangeService.getInstance() - .publishEvent(new OutgoingUIEvent<>("networkTablesConnected", map)); + } + + public void broadcastConnectedStatus() { + TimedTaskManager.getInstance().addOneShotTask(this::broadcastConnectedStatusImpl, 1000L); + } + + private void broadcastConnectedStatusImpl() { + HashMap map = new HashMap<>(); + var subMap = new HashMap(); + + subMap.put("connected", ntInstance.isConnected()); + if (ntInstance.isConnected()) { + var connections = ntInstance.getConnections(); + if (connections.length > 0) { + subMap.put("address", connections[0].remote_ip + ":" + connections[0].remote_port); + } + subMap.put("clients", connections.length); } - private void broadcastVersion() { - kRootTable.getEntry("version").setString(PhotonVersion.versionString); - kRootTable.getEntry("buildDate").setString(PhotonVersion.buildDate); + map.put("ntConnectionInfo", subMap); + DataChangeService.getInstance() + .publishEvent(new OutgoingUIEvent<>("networkTablesConnected", map)); + } + + private void broadcastVersion() { + kRootTable.getEntry("version").setString(PhotonVersion.versionString); + kRootTable.getEntry("buildDate").setString(PhotonVersion.buildDate); + } + + public void setConfig(NetworkConfig config) { + if (config.runNTServer) { + setServerMode(); + } else { + setClientMode(config.ntServerAddress); } - - public void setConfig(NetworkConfig config) { - if (config.runNTServer) { - setServerMode(); - } else { - setClientMode(config.ntServerAddress); - } - broadcastVersion(); + broadcastVersion(); + } + + private void setClientMode(String ntServerAddress) { + ntInstance.stopServer(); + ntInstance.startClient4("photonvision"); + try { + int t = Integer.parseInt(ntServerAddress); + if (!m_isRetryingConnection) logger.info("Starting NT Client, server team is " + t); + ntInstance.setServerTeam(t); + } catch (NumberFormatException e) { + if (!m_isRetryingConnection) + logger.info("Starting NT Client, server IP is \"" + ntServerAddress + "\""); + ntInstance.setServer(ntServerAddress); } - - private void setClientMode(String ntServerAddress) { - ntInstance.stopServer(); - ntInstance.startClient4("photonvision"); - try { - int t = Integer.parseInt(ntServerAddress); - if (!m_isRetryingConnection) logger.info("Starting NT Client, server team is " + t); - ntInstance.setServerTeam(t); - } catch (NumberFormatException e) { - if (!m_isRetryingConnection) - logger.info("Starting NT Client, server IP is \"" + ntServerAddress + "\""); - ntInstance.setServer(ntServerAddress); - } - ntInstance.startDSClient(); - broadcastVersion(); - } - - private void setServerMode() { - logger.info("Starting NT Server"); - ntInstance.stopClient(); - ntInstance.startServer(); - broadcastVersion(); + ntInstance.startDSClient(); + broadcastVersion(); + } + + private void setServerMode() { + logger.info("Starting NT Server"); + ntInstance.stopClient(); + ntInstance.startServer(); + broadcastVersion(); + } + + // So it seems like if Photon starts before the robot NT server does, and both aren't static IP, + // it'll never connect. This hack works around it by restarting the client/server while the nt + // instance + // isn't connected, same as clicking the save button in the settings menu (or restarting the + // service) + private void ntTick() { + if (!ntInstance.isConnected() + && !ConfigManager.getInstance().getConfig().getNetworkConfig().runNTServer) { + setConfig(ConfigManager.getInstance().getConfig().getNetworkConfig()); } - // So it seems like if Photon starts before the robot NT server does, and both aren't static IP, - // it'll never connect. This hack works around it by restarting the client/server while the nt - // instance - // isn't connected, same as clicking the save button in the settings menu (or restarting the - // service) - private void ntTick() { - if (!ntInstance.isConnected() - && !ConfigManager.getInstance().getConfig().getNetworkConfig().runNTServer) { - setConfig(ConfigManager.getInstance().getConfig().getNetworkConfig()); - } - - if (!ntInstance.isConnected() && !m_isRetryingConnection) { - m_isRetryingConnection = true; - logger.error( - "[NetworkTablesManager] Could not connect to the robot! Will retry in the background..."); - } + if (!ntInstance.isConnected() && !m_isRetryingConnection) { + m_isRetryingConnection = true; + logger.error( + "[NetworkTablesManager] Could not connect to the robot! Will retry in the background..."); } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/websocket/UIDataPublisher.java b/photon-core/src/main/java/org/photonvision/common/dataflow/websocket/UIDataPublisher.java index 17ed240921..4a56ecab3e 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/websocket/UIDataPublisher.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/websocket/UIDataPublisher.java @@ -28,47 +28,47 @@ import org.photonvision.vision.pipeline.result.CVPipelineResult; public class UIDataPublisher implements CVPipelineResultConsumer { - private static final Logger logger = new Logger(UIDataPublisher.class, LogGroup.VisionModule); + private static final Logger logger = new Logger(UIDataPublisher.class, LogGroup.VisionModule); - private final int index; - private long lastUIResultUpdateTime = 0; + private final int index; + private long lastUIResultUpdateTime = 0; - public UIDataPublisher(int index) { - this.index = index; - } + public UIDataPublisher(int index) { + this.index = index; + } - @Override - public void accept(CVPipelineResult result) { - long now = System.currentTimeMillis(); + @Override + public void accept(CVPipelineResult result) { + long now = System.currentTimeMillis(); - // only update the UI at 15hz - if (lastUIResultUpdateTime + 1000.0 / 10.0 > now) return; + // only update the UI at 15hz + if (lastUIResultUpdateTime + 1000.0 / 10.0 > now) return; - var dataMap = new HashMap(); - dataMap.put("fps", result.fps); - dataMap.put("latency", result.getLatencyMillis()); - var uiTargets = new ArrayList>(result.targets.size()); - for (var t : result.targets) { - uiTargets.add(t.toHashMap()); - } - dataMap.put("targets", uiTargets); + var dataMap = new HashMap(); + dataMap.put("fps", result.fps); + dataMap.put("latency", result.getLatencyMillis()); + var uiTargets = new ArrayList>(result.targets.size()); + for (var t : result.targets) { + uiTargets.add(t.toHashMap()); + } + dataMap.put("targets", uiTargets); - // Only send Multitag Results if they are present, similar to 3d pose - if (result.multiTagResult.estimatedPose.isPresent) { - var multitagData = new HashMap(); - multitagData.put( - "bestTransform", - SerializationUtils.transformToHashMap(result.multiTagResult.estimatedPose.best)); - multitagData.put("bestReprojectionError", result.multiTagResult.estimatedPose.bestReprojErr); - multitagData.put("fiducialIDsUsed", result.multiTagResult.fiducialIDsUsed); - dataMap.put("multitagResult", multitagData); - } + // Only send Multitag Results if they are present, similar to 3d pose + if (result.multiTagResult.estimatedPose.isPresent) { + var multitagData = new HashMap(); + multitagData.put( + "bestTransform", + SerializationUtils.transformToHashMap(result.multiTagResult.estimatedPose.best)); + multitagData.put("bestReprojectionError", result.multiTagResult.estimatedPose.bestReprojErr); + multitagData.put("fiducialIDsUsed", result.multiTagResult.fiducialIDsUsed); + dataMap.put("multitagResult", multitagData); + } - var uiMap = new HashMap>(); - uiMap.put(index, dataMap); + var uiMap = new HashMap>(); + uiMap.put(index, dataMap); - DataChangeService.getInstance() - .publishEvent(OutgoingUIEvent.wrappedOf("updatePipelineResult", uiMap)); - lastUIResultUpdateTime = now; - } + DataChangeService.getInstance() + .publishEvent(OutgoingUIEvent.wrappedOf("updatePipelineResult", uiMap)); + lastUIResultUpdateTime = now; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/CustomGPIO.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/CustomGPIO.java index f67beb97cc..8b3e282a35 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/CustomGPIO.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/CustomGPIO.java @@ -21,79 +21,79 @@ import org.photonvision.common.hardware.Platform; public class CustomGPIO extends GPIOBase { - private boolean currentState; - private final int port; + private boolean currentState; + private final int port; - public CustomGPIO(int port) { - this.port = port; - } + public CustomGPIO(int port) { + this.port = port; + } - @Override - public void togglePin() { - if (this.port != -1) { - execute( - commands - .get("setState") - .replace("{s}", String.valueOf(!currentState)) - .replace("{p}", String.valueOf(this.port))); - currentState = !currentState; - } + @Override + public void togglePin() { + if (this.port != -1) { + execute( + commands + .get("setState") + .replace("{s}", String.valueOf(!currentState)) + .replace("{p}", String.valueOf(this.port))); + currentState = !currentState; } + } - @Override - public int getPinNumber() { - return port; - } + @Override + public int getPinNumber() { + return port; + } - @Override - public void setStateImpl(boolean state) { - if (this.port != -1) { - execute( - commands - .get("setState") - .replace("{s}", String.valueOf(state)) - .replace("{p}", String.valueOf(port))); - currentState = state; - } + @Override + public void setStateImpl(boolean state) { + if (this.port != -1) { + execute( + commands + .get("setState") + .replace("{s}", String.valueOf(state)) + .replace("{p}", String.valueOf(port))); + currentState = state; } + } - @Override - public boolean shutdown() { - if (this.port != -1) { - execute(commands.get("shutdown")); - return true; - } - return false; + @Override + public boolean shutdown() { + if (this.port != -1) { + execute(commands.get("shutdown")); + return true; } + return false; + } - @Override - public boolean getStateImpl() { - return currentState; - } + @Override + public boolean getStateImpl() { + return currentState; + } - @Override - public void blinkImpl(int pulseTimeMillis, int blinks) { - execute( - commands - .get("blink") - .replace("{pulseTime}", String.valueOf(pulseTimeMillis)) - .replace("{blinks}", String.valueOf(blinks)) - .replace("{p}", String.valueOf(this.port))); - } + @Override + public void blinkImpl(int pulseTimeMillis, int blinks) { + execute( + commands + .get("blink") + .replace("{pulseTime}", String.valueOf(pulseTimeMillis)) + .replace("{blinks}", String.valueOf(blinks)) + .replace("{p}", String.valueOf(this.port))); + } - @Override - public void setBrightnessImpl(int brightness) { - execute( - commands - .get("dim") - .replace("{p}", String.valueOf(port)) - .replace("{v}", String.valueOf(brightness))); - } + @Override + public void setBrightnessImpl(int brightness) { + execute( + commands + .get("dim") + .replace("{p}", String.valueOf(port)) + .replace("{v}", String.valueOf(brightness))); + } - public static void setConfig(HardwareConfig config) { - if (Platform.isRaspberryPi()) return; - commands.replace("setState", config.ledSetCommand); - commands.replace("dim", config.ledDimCommand); - commands.replace("blink", config.ledBlinkCommand); - } + public static void setConfig(HardwareConfig config) { + if (Platform.isRaspberryPi()) return; + commands.replace("setState", config.ledSetCommand); + commands.replace("dim", config.ledDimCommand); + commands.replace("blink", config.ledBlinkCommand); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/GPIOBase.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/GPIOBase.java index a1d25be98f..5b9d6ba368 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/GPIOBase.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/GPIOBase.java @@ -24,76 +24,76 @@ import org.photonvision.common.util.ShellExec; public abstract class GPIOBase { - private static final Logger logger = new Logger(GPIOBase.class, LogGroup.General); - private static final ShellExec runCommand = new ShellExec(true, true); - - protected static HashMap commands = - new HashMap<>() { - { - put("setState", ""); - put("shutdown", ""); - put("dim", ""); - put("blink", ""); - } - }; - - protected static String execute(String command) { - try { - runCommand.executeBashCommand(command); - } catch (Exception e) { - logger.error(Arrays.toString(e.getStackTrace())); - return ""; + private static final Logger logger = new Logger(GPIOBase.class, LogGroup.General); + private static final ShellExec runCommand = new ShellExec(true, true); + + protected static HashMap commands = + new HashMap<>() { + { + put("setState", ""); + put("shutdown", ""); + put("dim", ""); + put("blink", ""); } - return runCommand.getOutput(); + }; + + protected static String execute(String command) { + try { + runCommand.executeBashCommand(command); + } catch (Exception e) { + logger.error(Arrays.toString(e.getStackTrace())); + return ""; } + return runCommand.getOutput(); + } - public abstract int getPinNumber(); + public abstract int getPinNumber(); - public void setState(boolean state) { - if (getPinNumber() != -1) { - setStateImpl(state); - } + public void setState(boolean state) { + if (getPinNumber() != -1) { + setStateImpl(state); } + } - protected abstract void setStateImpl(boolean state); + protected abstract void setStateImpl(boolean state); - public final void setOff() { - setState(false); - } + public final void setOff() { + setState(false); + } - public final void setOn() { - setState(true); - } + public final void setOn() { + setState(true); + } - public void togglePin() { - setState(!getStateImpl()); - } + public void togglePin() { + setState(!getStateImpl()); + } - public abstract boolean shutdown(); + public abstract boolean shutdown(); - public final boolean getState() { - if (getPinNumber() != -1) { - return getStateImpl(); - } else return false; - } + public final boolean getState() { + if (getPinNumber() != -1) { + return getStateImpl(); + } else return false; + } - public abstract boolean getStateImpl(); + public abstract boolean getStateImpl(); - public final void blink(int pulseTimeMillis, int blinks) { - if (getPinNumber() != -1) { - blinkImpl(pulseTimeMillis, blinks); - } + public final void blink(int pulseTimeMillis, int blinks) { + if (getPinNumber() != -1) { + blinkImpl(pulseTimeMillis, blinks); } + } - protected abstract void blinkImpl(int pulseTimeMillis, int blinks); + protected abstract void blinkImpl(int pulseTimeMillis, int blinks); - public final void setBrightness(int brightness) { - if (getPinNumber() != -1) { - if (brightness > 100) brightness = 100; - if (brightness < 0) brightness = 0; - setBrightnessImpl(brightness); - } + public final void setBrightness(int brightness) { + if (getPinNumber() != -1) { + if (brightness > 100) brightness = 100; + if (brightness < 0) brightness = 0; + setBrightnessImpl(brightness); } + } - protected abstract void setBrightnessImpl(int brightness); + protected abstract void setBrightnessImpl(int brightness); } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioCommand.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioCommand.java index 932674bb9e..7ba3266ffe 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioCommand.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioCommand.java @@ -19,22 +19,22 @@ @SuppressWarnings("SpellCheckingInspection") public enum PigpioCommand { - PCMD_READ(3), // int gpio_read(unsigned gpio) - PCMD_WRITE(4), // int gpio_write(unsigned gpio, unsigned level) - PCMD_WVCLR(27), // int wave_clear(void) - PCMD_WVAG(28), // int wave_add_generic(unsigned numPulses, gpioPulse_t *pulses) - PCMD_WVHLT(33), // int wave_tx_stop(void) - PCMD_WVCRE(49), // int wave_create(void) - PCMD_WVDEL(50), // int wave_delete(unsigned wave_id) - PCMD_WVTX(51), // int wave_tx_send(unsigned wave_id) (once) - PCMD_WVTXR(52), // int wave_tx_send(unsigned wave_id) (repeat) - PCMD_GDC(83), // int get_duty_cyle(unsigned user_gpio) - PCMD_HP(86), // int hardware_pwm(unsigned gpio, unsigned PWMfreq, unsigned PWMduty) - PCMD_WVTXM(100); // int wave_tx_send(unsigned wave_id, unsigned wave_mode) + PCMD_READ(3), // int gpio_read(unsigned gpio) + PCMD_WRITE(4), // int gpio_write(unsigned gpio, unsigned level) + PCMD_WVCLR(27), // int wave_clear(void) + PCMD_WVAG(28), // int wave_add_generic(unsigned numPulses, gpioPulse_t *pulses) + PCMD_WVHLT(33), // int wave_tx_stop(void) + PCMD_WVCRE(49), // int wave_create(void) + PCMD_WVDEL(50), // int wave_delete(unsigned wave_id) + PCMD_WVTX(51), // int wave_tx_send(unsigned wave_id) (once) + PCMD_WVTXR(52), // int wave_tx_send(unsigned wave_id) (repeat) + PCMD_GDC(83), // int get_duty_cyle(unsigned user_gpio) + PCMD_HP(86), // int hardware_pwm(unsigned gpio, unsigned PWMfreq, unsigned PWMduty) + PCMD_WVTXM(100); // int wave_tx_send(unsigned wave_id, unsigned wave_mode) - public final int value; + public final int value; - PigpioCommand(int value) { - this.value = value; - } + PigpioCommand(int value) { + this.value = value; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioException.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioException.java index 3ad737777d..dfa2191206 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioException.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioException.java @@ -27,317 +27,317 @@ */ @SuppressWarnings({"SpellCheckingInspection", "unused", "RedundantSuppression"}) public class PigpioException extends Exception { - private int rc = -99999999; - private static final long serialVersionUID = 443595760654129068L; + private int rc = -99999999; + private static final long serialVersionUID = 443595760654129068L; - public PigpioException() { - super(); - } + public PigpioException() { + super(); + } - public PigpioException(int rc) { - super(); - this.rc = rc; - } + public PigpioException(int rc) { + super(); + this.rc = rc; + } - public PigpioException(int rc, String msg) { - super(msg); - this.rc = rc; - } + public PigpioException(int rc, String msg) { + super(msg); + this.rc = rc; + } - public PigpioException(String arg0, Throwable arg1, boolean arg2, boolean arg3) { - super(arg0, arg1, arg2, arg3); - } + public PigpioException(String arg0, Throwable arg1, boolean arg2, boolean arg3) { + super(arg0, arg1, arg2, arg3); + } - public PigpioException(String arg0, Throwable arg1) { - super(arg0, arg1); - } + public PigpioException(String arg0, Throwable arg1) { + super(arg0, arg1); + } - public PigpioException(String arg0) { - super(arg0); - } + public PigpioException(String arg0) { + super(arg0); + } - public PigpioException(Throwable arg0) { - super(arg0); - } + public PigpioException(Throwable arg0) { + super(arg0); + } - @Override - public String getMessage() { - return "(" + rc + ") " + getMessageForError(rc); - } + @Override + public String getMessage() { + return "(" + rc + ") " + getMessageForError(rc); + } - /** - * Retrieve the error code that was returned by the underlying Pigpio call. - * - * @return The error code that was returned by the underlying Pigpio call. - */ - public int getErrorCode() { - return rc; - } // End of getErrorCode + /** + * Retrieve the error code that was returned by the underlying Pigpio call. + * + * @return The error code that was returned by the underlying Pigpio call. + */ + public int getErrorCode() { + return rc; + } // End of getErrorCode - // Public constants for the error codes that can be thrown by Pigpio - public static final int PI_INIT_FAILED = -1; // gpioInitialise failed - public static final int PI_BAD_USER_GPIO = -2; // gpio not 0-31 - public static final int PI_BAD_GPIO = -3; // gpio not 0-53 - public static final int PI_BAD_MODE = -4; // mode not 0-7 - public static final int PI_BAD_LEVEL = -5; // level not 0-1 - public static final int PI_BAD_PUD = -6; // pud not 0-2 - public static final int PI_BAD_PULSEWIDTH = -7; // pulsewidth not 0 or 500-2500 - public static final int PI_BAD_DUTYCYCLE = -8; // dutycycle outside set range - public static final int PI_BAD_TIMER = -9; // timer not 0-9 - public static final int PI_BAD_MS = -10; // ms not 10-60000 - public static final int PI_BAD_TIMETYPE = -11; // timetype not 0-1 - public static final int PI_BAD_SECONDS = -12; // seconds < 0 - public static final int PI_BAD_MICROS = -13; // micros not 0-999999 - public static final int PI_TIMER_FAILED = -14; // gpioSetTimerFunc failed - public static final int PI_BAD_WDOG_TIMEOUT = -15; // timeout not 0-60000 - public static final int PI_NO_ALERT_FUNC = -16; // DEPRECATED - public static final int PI_BAD_CLK_PERIPH = -17; // clock peripheral not 0-1 - public static final int PI_BAD_CLK_SOURCE = -18; // DEPRECATED - public static final int PI_BAD_CLK_MICROS = -19; // clock micros not 1, 2, 4, 5, 8, or 10 - public static final int PI_BAD_BUF_MILLIS = -20; // buf millis not 100-10000 - public static final int PI_BAD_DUTYRANGE = -21; // dutycycle range not 25-40000 - public static final int PI_BAD_DUTY_RANGE = -21; // DEPRECATED (use PI_BAD_DUTYRANGE) - public static final int PI_BAD_SIGNUM = -22; // signum not 0-63 - public static final int PI_BAD_PATHNAME = -23; // can't open pathname - public static final int PI_NO_HANDLE = -24; // no handle available - public static final int PI_BAD_HANDLE = -25; // unknown handle - public static final int PI_BAD_IF_FLAGS = -26; // ifFlags > 3 - public static final int PI_BAD_CHANNEL = -27; // DMA channel not 0-14 - public static final int PI_BAD_PRIM_CHANNEL = -27; // DMA primary channel not 0-14 - public static final int PI_BAD_SOCKET_PORT = -28; // socket port not 1024-32000 - public static final int PI_BAD_FIFO_COMMAND = -29; // unrecognized fifo command - public static final int PI_BAD_SECO_CHANNEL = -30; // DMA secondary channel not 0-6 - public static final int PI_NOT_INITIALISED = -31; // function called before gpioInitialise - public static final int PI_INITIALISED = -32; // function called after gpioInitialise - public static final int PI_BAD_WAVE_MODE = -33; // waveform mode not 0-1 - public static final int PI_BAD_CFG_INTERNAL = -34; // bad parameter in gpioCfgInternals call - public static final int PI_BAD_WAVE_BAUD = -35; // baud rate not 50-250K(RX)/50-1M(TX) - public static final int PI_TOO_MANY_PULSES = -36; // waveform has too many pulses - public static final int PI_TOO_MANY_CHARS = -37; // waveform has too many chars - public static final int PI_NOT_SERIAL_GPIO = -38; // no serial read in progress on gpio - public static final int PI_BAD_SERIAL_STRUC = -39; // bad (null) serial structure parameter - public static final int PI_BAD_SERIAL_BUF = -40; // bad (null) serial buf parameter - public static final int PI_NOT_PERMITTED = -41; // gpio operation not permitted - public static final int PI_SOME_PERMITTED = -42; // one or more gpios not permitted - public static final int PI_BAD_WVSC_COMMND = -43; // bad WVSC subcommand - public static final int PI_BAD_WVSM_COMMND = -44; // bad WVSM subcommand - public static final int PI_BAD_WVSP_COMMND = -45; // bad WVSP subcommand - public static final int PI_BAD_PULSELEN = -46; // trigger pulse length not 1-100 - public static final int PI_BAD_SCRIPT = -47; // invalid script - public static final int PI_BAD_SCRIPT_ID = -48; // unknown script id - public static final int PI_BAD_SER_OFFSET = -49; // add serial data offset > 30 minutes - public static final int PI_GPIO_IN_USE = -50; // gpio already in use - public static final int PI_BAD_SERIAL_COUNT = -51; // must read at least a byte at a time - public static final int PI_BAD_PARAM_NUM = -52; // script parameter id not 0-9 - public static final int PI_DUP_TAG = -53; // script has duplicate tag - public static final int PI_TOO_MANY_TAGS = -54; // script has too many tags - public static final int PI_BAD_SCRIPT_CMD = -55; // illegal script command - public static final int PI_BAD_VAR_NUM = -56; // script variable id not 0-149 - public static final int PI_NO_SCRIPT_ROOM = -57; // no more room for scripts - public static final int PI_NO_MEMORY = -58; // can't allocate temporary memory - public static final int PI_SOCK_READ_FAILED = -59; // socket read failed - public static final int PI_SOCK_WRIT_FAILED = -60; // socket write failed - public static final int PI_TOO_MANY_PARAM = -61; // too many script parameters (> 10) - public static final int PI_NOT_HALTED = -62; // script already running or failed - public static final int PI_BAD_TAG = -63; // script has unresolved tag - public static final int PI_BAD_MICS_DELAY = -64; // bad MICS delay (too large) - public static final int PI_BAD_MILS_DELAY = -65; // bad MILS delay (too large) - public static final int PI_BAD_WAVE_ID = -66; // non existent wave id - public static final int PI_TOO_MANY_CBS = -67; // No more CBs for waveform - public static final int PI_TOO_MANY_OOL = -68; // No more OOL for waveform - public static final int PI_EMPTY_WAVEFORM = -69; // attempt to create an empty waveform - public static final int PI_NO_WAVEFORM_ID = -70; // no more waveforms - public static final int PI_I2C_OPEN_FAILED = -71; // can't open I2C device - public static final int PI_SER_OPEN_FAILED = -72; // can't open serial device - public static final int PI_SPI_OPEN_FAILED = -73; // can't open SPI device - public static final int PI_BAD_I2C_BUS = -74; // bad I2C bus - public static final int PI_BAD_I2C_ADDR = -75; // bad I2C address - public static final int PI_BAD_SPI_CHANNEL = -76; // bad SPI channel - public static final int PI_BAD_FLAGS = -77; // bad i2c/spi/ser open flags - public static final int PI_BAD_SPI_SPEED = -78; // bad SPI speed - public static final int PI_BAD_SER_DEVICE = -79; // bad serial device name - public static final int PI_BAD_SER_SPEED = -80; // bad serial baud rate - public static final int PI_BAD_PARAM = -81; // bad i2c/spi/ser parameter - public static final int PI_I2C_WRITE_FAILED = -82; // i2c write failed - public static final int PI_I2C_READ_FAILED = -83; // i2c read failed - public static final int PI_BAD_SPI_COUNT = -84; // bad SPI count - public static final int PI_SER_WRITE_FAILED = -85; // ser write failed - public static final int PI_SER_READ_FAILED = -86; // ser read failed - public static final int PI_SER_READ_NO_DATA = -87; // ser read no data available - public static final int PI_UNKNOWN_COMMAND = -88; // unknown command - public static final int PI_SPI_XFER_FAILED = -89; // spi xfer/read/write failed - public static final int PI_BAD_POINTER = -90; // bad (NULL) pointer - public static final int PI_NO_AUX_SPI = -91; // need a A+/B+/Pi2 for auxiliary SPI - public static final int PI_NOT_PWM_GPIO = -92; // gpio is not in use for PWM - public static final int PI_NOT_SERVO_GPIO = -93; // gpio is not in use for servo pulses - public static final int PI_NOT_HCLK_GPIO = -94; // gpio has no hardware clock - public static final int PI_NOT_HPWM_GPIO = -95; // gpio has no hardware PWM - public static final int PI_BAD_HPWM_FREQ = -96; // hardware PWM frequency not 1-125M - public static final int PI_BAD_HPWM_DUTY = -97; // hardware PWM dutycycle not 0-1M - public static final int PI_BAD_HCLK_FREQ = -98; // hardware clock frequency not 4689-250M - public static final int PI_BAD_HCLK_PASS = -99; // need password to use hardware clock 1 - public static final int PI_HPWM_ILLEGAL = -100; // illegal, PWM in use for main clock - public static final int PI_BAD_DATABITS = -101; // serial data bits not 1-32 - public static final int PI_BAD_STOPBITS = -102; // serial (half) stop bits not 2-8 - public static final int PI_MSG_TOOBIG = -103; // socket/pipe message too big - public static final int PI_BAD_MALLOC_MODE = -104; // bad memory allocation mode - public static final int PI_TOO_MANY_SEGS = -105; // too many I2C transaction parts - public static final int PI_BAD_I2C_SEG = -106; // a combined I2C transaction failed - public static final int PI_BAD_SMBUS_CMD = -107; - public static final int PI_NOT_I2C_GPIO = -108; - public static final int PI_BAD_I2C_WLEN = -109; - public static final int PI_BAD_I2C_RLEN = -110; - public static final int PI_BAD_I2C_CMD = -111; - public static final int PI_BAD_I2C_BAUD = -112; - public static final int PI_CHAIN_LOOP_CNT = -113; - public static final int PI_BAD_CHAIN_LOOP = -114; - public static final int PI_CHAIN_COUNTER = -115; - public static final int PI_BAD_CHAIN_CMD = -116; - public static final int PI_BAD_CHAIN_DELAY = -117; - public static final int PI_CHAIN_NESTING = -118; - public static final int PI_CHAIN_TOO_BIG = -119; - public static final int PI_DEPRECATED = -120; - public static final int PI_BAD_SER_INVERT = -121; - public static final int PI_BAD_EDGE = -122; - public static final int PI_BAD_ISR_INIT = -123; - public static final int PI_BAD_FOREVER = -124; - public static final int PI_BAD_FILTER = -125; + // Public constants for the error codes that can be thrown by Pigpio + public static final int PI_INIT_FAILED = -1; // gpioInitialise failed + public static final int PI_BAD_USER_GPIO = -2; // gpio not 0-31 + public static final int PI_BAD_GPIO = -3; // gpio not 0-53 + public static final int PI_BAD_MODE = -4; // mode not 0-7 + public static final int PI_BAD_LEVEL = -5; // level not 0-1 + public static final int PI_BAD_PUD = -6; // pud not 0-2 + public static final int PI_BAD_PULSEWIDTH = -7; // pulsewidth not 0 or 500-2500 + public static final int PI_BAD_DUTYCYCLE = -8; // dutycycle outside set range + public static final int PI_BAD_TIMER = -9; // timer not 0-9 + public static final int PI_BAD_MS = -10; // ms not 10-60000 + public static final int PI_BAD_TIMETYPE = -11; // timetype not 0-1 + public static final int PI_BAD_SECONDS = -12; // seconds < 0 + public static final int PI_BAD_MICROS = -13; // micros not 0-999999 + public static final int PI_TIMER_FAILED = -14; // gpioSetTimerFunc failed + public static final int PI_BAD_WDOG_TIMEOUT = -15; // timeout not 0-60000 + public static final int PI_NO_ALERT_FUNC = -16; // DEPRECATED + public static final int PI_BAD_CLK_PERIPH = -17; // clock peripheral not 0-1 + public static final int PI_BAD_CLK_SOURCE = -18; // DEPRECATED + public static final int PI_BAD_CLK_MICROS = -19; // clock micros not 1, 2, 4, 5, 8, or 10 + public static final int PI_BAD_BUF_MILLIS = -20; // buf millis not 100-10000 + public static final int PI_BAD_DUTYRANGE = -21; // dutycycle range not 25-40000 + public static final int PI_BAD_DUTY_RANGE = -21; // DEPRECATED (use PI_BAD_DUTYRANGE) + public static final int PI_BAD_SIGNUM = -22; // signum not 0-63 + public static final int PI_BAD_PATHNAME = -23; // can't open pathname + public static final int PI_NO_HANDLE = -24; // no handle available + public static final int PI_BAD_HANDLE = -25; // unknown handle + public static final int PI_BAD_IF_FLAGS = -26; // ifFlags > 3 + public static final int PI_BAD_CHANNEL = -27; // DMA channel not 0-14 + public static final int PI_BAD_PRIM_CHANNEL = -27; // DMA primary channel not 0-14 + public static final int PI_BAD_SOCKET_PORT = -28; // socket port not 1024-32000 + public static final int PI_BAD_FIFO_COMMAND = -29; // unrecognized fifo command + public static final int PI_BAD_SECO_CHANNEL = -30; // DMA secondary channel not 0-6 + public static final int PI_NOT_INITIALISED = -31; // function called before gpioInitialise + public static final int PI_INITIALISED = -32; // function called after gpioInitialise + public static final int PI_BAD_WAVE_MODE = -33; // waveform mode not 0-1 + public static final int PI_BAD_CFG_INTERNAL = -34; // bad parameter in gpioCfgInternals call + public static final int PI_BAD_WAVE_BAUD = -35; // baud rate not 50-250K(RX)/50-1M(TX) + public static final int PI_TOO_MANY_PULSES = -36; // waveform has too many pulses + public static final int PI_TOO_MANY_CHARS = -37; // waveform has too many chars + public static final int PI_NOT_SERIAL_GPIO = -38; // no serial read in progress on gpio + public static final int PI_BAD_SERIAL_STRUC = -39; // bad (null) serial structure parameter + public static final int PI_BAD_SERIAL_BUF = -40; // bad (null) serial buf parameter + public static final int PI_NOT_PERMITTED = -41; // gpio operation not permitted + public static final int PI_SOME_PERMITTED = -42; // one or more gpios not permitted + public static final int PI_BAD_WVSC_COMMND = -43; // bad WVSC subcommand + public static final int PI_BAD_WVSM_COMMND = -44; // bad WVSM subcommand + public static final int PI_BAD_WVSP_COMMND = -45; // bad WVSP subcommand + public static final int PI_BAD_PULSELEN = -46; // trigger pulse length not 1-100 + public static final int PI_BAD_SCRIPT = -47; // invalid script + public static final int PI_BAD_SCRIPT_ID = -48; // unknown script id + public static final int PI_BAD_SER_OFFSET = -49; // add serial data offset > 30 minutes + public static final int PI_GPIO_IN_USE = -50; // gpio already in use + public static final int PI_BAD_SERIAL_COUNT = -51; // must read at least a byte at a time + public static final int PI_BAD_PARAM_NUM = -52; // script parameter id not 0-9 + public static final int PI_DUP_TAG = -53; // script has duplicate tag + public static final int PI_TOO_MANY_TAGS = -54; // script has too many tags + public static final int PI_BAD_SCRIPT_CMD = -55; // illegal script command + public static final int PI_BAD_VAR_NUM = -56; // script variable id not 0-149 + public static final int PI_NO_SCRIPT_ROOM = -57; // no more room for scripts + public static final int PI_NO_MEMORY = -58; // can't allocate temporary memory + public static final int PI_SOCK_READ_FAILED = -59; // socket read failed + public static final int PI_SOCK_WRIT_FAILED = -60; // socket write failed + public static final int PI_TOO_MANY_PARAM = -61; // too many script parameters (> 10) + public static final int PI_NOT_HALTED = -62; // script already running or failed + public static final int PI_BAD_TAG = -63; // script has unresolved tag + public static final int PI_BAD_MICS_DELAY = -64; // bad MICS delay (too large) + public static final int PI_BAD_MILS_DELAY = -65; // bad MILS delay (too large) + public static final int PI_BAD_WAVE_ID = -66; // non existent wave id + public static final int PI_TOO_MANY_CBS = -67; // No more CBs for waveform + public static final int PI_TOO_MANY_OOL = -68; // No more OOL for waveform + public static final int PI_EMPTY_WAVEFORM = -69; // attempt to create an empty waveform + public static final int PI_NO_WAVEFORM_ID = -70; // no more waveforms + public static final int PI_I2C_OPEN_FAILED = -71; // can't open I2C device + public static final int PI_SER_OPEN_FAILED = -72; // can't open serial device + public static final int PI_SPI_OPEN_FAILED = -73; // can't open SPI device + public static final int PI_BAD_I2C_BUS = -74; // bad I2C bus + public static final int PI_BAD_I2C_ADDR = -75; // bad I2C address + public static final int PI_BAD_SPI_CHANNEL = -76; // bad SPI channel + public static final int PI_BAD_FLAGS = -77; // bad i2c/spi/ser open flags + public static final int PI_BAD_SPI_SPEED = -78; // bad SPI speed + public static final int PI_BAD_SER_DEVICE = -79; // bad serial device name + public static final int PI_BAD_SER_SPEED = -80; // bad serial baud rate + public static final int PI_BAD_PARAM = -81; // bad i2c/spi/ser parameter + public static final int PI_I2C_WRITE_FAILED = -82; // i2c write failed + public static final int PI_I2C_READ_FAILED = -83; // i2c read failed + public static final int PI_BAD_SPI_COUNT = -84; // bad SPI count + public static final int PI_SER_WRITE_FAILED = -85; // ser write failed + public static final int PI_SER_READ_FAILED = -86; // ser read failed + public static final int PI_SER_READ_NO_DATA = -87; // ser read no data available + public static final int PI_UNKNOWN_COMMAND = -88; // unknown command + public static final int PI_SPI_XFER_FAILED = -89; // spi xfer/read/write failed + public static final int PI_BAD_POINTER = -90; // bad (NULL) pointer + public static final int PI_NO_AUX_SPI = -91; // need a A+/B+/Pi2 for auxiliary SPI + public static final int PI_NOT_PWM_GPIO = -92; // gpio is not in use for PWM + public static final int PI_NOT_SERVO_GPIO = -93; // gpio is not in use for servo pulses + public static final int PI_NOT_HCLK_GPIO = -94; // gpio has no hardware clock + public static final int PI_NOT_HPWM_GPIO = -95; // gpio has no hardware PWM + public static final int PI_BAD_HPWM_FREQ = -96; // hardware PWM frequency not 1-125M + public static final int PI_BAD_HPWM_DUTY = -97; // hardware PWM dutycycle not 0-1M + public static final int PI_BAD_HCLK_FREQ = -98; // hardware clock frequency not 4689-250M + public static final int PI_BAD_HCLK_PASS = -99; // need password to use hardware clock 1 + public static final int PI_HPWM_ILLEGAL = -100; // illegal, PWM in use for main clock + public static final int PI_BAD_DATABITS = -101; // serial data bits not 1-32 + public static final int PI_BAD_STOPBITS = -102; // serial (half) stop bits not 2-8 + public static final int PI_MSG_TOOBIG = -103; // socket/pipe message too big + public static final int PI_BAD_MALLOC_MODE = -104; // bad memory allocation mode + public static final int PI_TOO_MANY_SEGS = -105; // too many I2C transaction parts + public static final int PI_BAD_I2C_SEG = -106; // a combined I2C transaction failed + public static final int PI_BAD_SMBUS_CMD = -107; + public static final int PI_NOT_I2C_GPIO = -108; + public static final int PI_BAD_I2C_WLEN = -109; + public static final int PI_BAD_I2C_RLEN = -110; + public static final int PI_BAD_I2C_CMD = -111; + public static final int PI_BAD_I2C_BAUD = -112; + public static final int PI_CHAIN_LOOP_CNT = -113; + public static final int PI_BAD_CHAIN_LOOP = -114; + public static final int PI_CHAIN_COUNTER = -115; + public static final int PI_BAD_CHAIN_CMD = -116; + public static final int PI_BAD_CHAIN_DELAY = -117; + public static final int PI_CHAIN_NESTING = -118; + public static final int PI_CHAIN_TOO_BIG = -119; + public static final int PI_DEPRECATED = -120; + public static final int PI_BAD_SER_INVERT = -121; + public static final int PI_BAD_EDGE = -122; + public static final int PI_BAD_ISR_INIT = -123; + public static final int PI_BAD_FOREVER = -124; + public static final int PI_BAD_FILTER = -125; - public static final int PI_PIGIF_ERR_0 = -2000; - public static final int PI_PIGIF_ERR_99 = -2099; + public static final int PI_PIGIF_ERR_0 = -2000; + public static final int PI_PIGIF_ERR_99 = -2099; - public static final int PI_CUSTOM_ERR_0 = -3000; - public static final int PI_CUSTOM_ERR_999 = -3999; + public static final int PI_CUSTOM_ERR_0 = -3000; + public static final int PI_CUSTOM_ERR_999 = -3999; - private static final HashMap errorMessages = new HashMap<>(); + private static final HashMap errorMessages = new HashMap<>(); - static { - errorMessages.put(PI_INIT_FAILED, "pigpio initialisation failed"); - errorMessages.put(PI_BAD_USER_GPIO, "GPIO not 0-31"); - errorMessages.put(PI_BAD_GPIO, "GPIO not 0-53"); - errorMessages.put(PI_BAD_MODE, "mode not 0-7"); - errorMessages.put(PI_BAD_LEVEL, "level not 0-1"); - errorMessages.put(PI_BAD_PUD, "pud not 0-2"); - errorMessages.put(PI_BAD_PULSEWIDTH, "pulsewidth not 0 or 500-2500"); - errorMessages.put(PI_BAD_DUTYCYCLE, "dutycycle not 0-range (default 255)"); - errorMessages.put(PI_BAD_TIMER, "timer not 0-9"); - errorMessages.put(PI_BAD_MS, "ms not 10-60000"); - errorMessages.put(PI_BAD_TIMETYPE, "timetype not 0-1"); - errorMessages.put(PI_BAD_SECONDS, "seconds < 0"); - errorMessages.put(PI_BAD_MICROS, "micros not 0-999999"); - errorMessages.put(PI_TIMER_FAILED, "gpioSetTimerFunc failed"); - errorMessages.put(PI_BAD_WDOG_TIMEOUT, "timeout not 0-60000"); - errorMessages.put(PI_NO_ALERT_FUNC, "DEPRECATED"); - errorMessages.put(PI_BAD_CLK_PERIPH, "clock peripheral not 0-1"); - errorMessages.put(PI_BAD_CLK_SOURCE, "DEPRECATED"); - errorMessages.put(PI_BAD_CLK_MICROS, "clock micros not 1, 2, 4, 5, 8, or 10"); - errorMessages.put(PI_BAD_BUF_MILLIS, "buf millis not 100-10000"); - errorMessages.put(PI_BAD_DUTYRANGE, "dutycycle range not 25-40000"); - errorMessages.put(PI_BAD_SIGNUM, "signum not 0-63"); - errorMessages.put(PI_BAD_PATHNAME, "can't open pathname"); - errorMessages.put(PI_NO_HANDLE, "no handle available"); - errorMessages.put(PI_BAD_HANDLE, "unknown handle"); - errorMessages.put(PI_BAD_IF_FLAGS, "ifFlags > 3"); - errorMessages.put(PI_BAD_CHANNEL, "DMA channel not 0-14"); - errorMessages.put(PI_BAD_SOCKET_PORT, "socket port not 1024-30000"); - errorMessages.put(PI_BAD_FIFO_COMMAND, "unknown fifo command"); - errorMessages.put(PI_BAD_SECO_CHANNEL, "DMA secondary channel not 0-14"); - errorMessages.put(PI_NOT_INITIALISED, "function called before gpioInitialise"); - errorMessages.put(PI_INITIALISED, "function called after gpioInitialise"); - errorMessages.put(PI_BAD_WAVE_MODE, "waveform mode not 0-1"); - errorMessages.put(PI_BAD_CFG_INTERNAL, "bad parameter in gpioCfgInternals call"); - errorMessages.put(PI_BAD_WAVE_BAUD, "baud rate not 50-250000(RX)/1000000(TX)"); - errorMessages.put(PI_TOO_MANY_PULSES, "waveform has too many pulses"); - errorMessages.put(PI_TOO_MANY_CHARS, "waveform has too many chars"); - errorMessages.put(PI_NOT_SERIAL_GPIO, "no bit bang serial read in progress on GPIO"); - errorMessages.put(PI_NOT_PERMITTED, "no permission to update GPIO"); - errorMessages.put(PI_SOME_PERMITTED, "no permission to update one or more GPIO"); - errorMessages.put(PI_BAD_WVSC_COMMND, "bad WVSC subcommand"); - errorMessages.put(PI_BAD_WVSM_COMMND, "bad WVSM subcommand"); - errorMessages.put(PI_BAD_WVSP_COMMND, "bad WVSP subcommand"); - errorMessages.put(PI_BAD_PULSELEN, "trigger pulse length not 1-100"); - errorMessages.put(PI_BAD_SCRIPT, "invalid script"); - errorMessages.put(PI_BAD_SCRIPT_ID, "unknown script id"); - errorMessages.put(PI_BAD_SER_OFFSET, "add serial data offset > 30 minute"); - errorMessages.put(PI_GPIO_IN_USE, "GPIO already in use"); - errorMessages.put(PI_BAD_SERIAL_COUNT, "must read at least a byte at a time"); - errorMessages.put(PI_BAD_PARAM_NUM, "script parameter id not 0-9"); - errorMessages.put(PI_DUP_TAG, "script has duplicate tag"); - errorMessages.put(PI_TOO_MANY_TAGS, "script has too many tags"); - errorMessages.put(PI_BAD_SCRIPT_CMD, "illegal script command"); - errorMessages.put(PI_BAD_VAR_NUM, "script variable id not 0-149"); - errorMessages.put(PI_NO_SCRIPT_ROOM, "no more room for scripts"); - errorMessages.put(PI_NO_MEMORY, "can't allocate temporary memory"); - errorMessages.put(PI_SOCK_READ_FAILED, "socket read failed"); - errorMessages.put(PI_SOCK_WRIT_FAILED, "socket write failed"); - errorMessages.put(PI_TOO_MANY_PARAM, "too many script parameters (> 10)"); - errorMessages.put(PI_NOT_HALTED, "script already running or failed"); - errorMessages.put(PI_BAD_TAG, "script has unresolved tag"); - errorMessages.put(PI_BAD_MICS_DELAY, "bad MICS delay (too large)"); - errorMessages.put(PI_BAD_MILS_DELAY, "bad MILS delay (too large)"); - errorMessages.put(PI_BAD_WAVE_ID, "non existent wave id"); - errorMessages.put(PI_TOO_MANY_CBS, "No more CBs for waveform"); - errorMessages.put(PI_TOO_MANY_OOL, "No more OOL for waveform"); - errorMessages.put(PI_EMPTY_WAVEFORM, "attempt to create an empty waveform"); - errorMessages.put(PI_NO_WAVEFORM_ID, "No more waveform ids"); - errorMessages.put(PI_I2C_OPEN_FAILED, "can't open I2C device"); - errorMessages.put(PI_SER_OPEN_FAILED, "can't open serial device"); - errorMessages.put(PI_SPI_OPEN_FAILED, "can't open SPI device"); - errorMessages.put(PI_BAD_I2C_BUS, "bad I2C bus"); - errorMessages.put(PI_BAD_I2C_ADDR, "bad I2C address"); - errorMessages.put(PI_BAD_SPI_CHANNEL, "bad SPI channel"); - errorMessages.put(PI_BAD_FLAGS, "bad i2c/spi/ser open flags"); - errorMessages.put(PI_BAD_SPI_SPEED, "bad SPI speed"); - errorMessages.put(PI_BAD_SER_DEVICE, "bad serial device name"); - errorMessages.put(PI_BAD_SER_SPEED, "bad serial baud rate"); - errorMessages.put(PI_BAD_PARAM, "bad i2c/spi/ser parameter"); - errorMessages.put(PI_I2C_WRITE_FAILED, "I2C write failed"); - errorMessages.put(PI_I2C_READ_FAILED, "I2C read failed"); - errorMessages.put(PI_BAD_SPI_COUNT, "bad SPI count"); - errorMessages.put(PI_SER_WRITE_FAILED, "ser write failed"); - errorMessages.put(PI_SER_READ_FAILED, "ser read failed"); - errorMessages.put(PI_SER_READ_NO_DATA, "ser read no data available"); - errorMessages.put(PI_UNKNOWN_COMMAND, "unknown command"); - errorMessages.put(PI_SPI_XFER_FAILED, "SPI xfer/read/write failed"); - errorMessages.put(PI_BAD_POINTER, "bad (NULL) pointer"); - errorMessages.put(PI_NO_AUX_SPI, "no auxiliary SPI on Pi A or B"); - errorMessages.put(PI_NOT_PWM_GPIO, "GPIO is not in use for PWM"); - errorMessages.put(PI_NOT_SERVO_GPIO, "GPIO is not in use for servo pulses"); - errorMessages.put(PI_NOT_HCLK_GPIO, "GPIO has no hardware clock"); - errorMessages.put(PI_NOT_HPWM_GPIO, "GPIO has no hardware PWM"); - errorMessages.put(PI_BAD_HPWM_FREQ, "hardware PWM frequency not 1-125M"); - errorMessages.put(PI_BAD_HPWM_DUTY, "hardware PWM dutycycle not 0-1M"); - errorMessages.put(PI_BAD_HCLK_FREQ, "hardware clock frequency not 4689-250M"); - errorMessages.put(PI_BAD_HCLK_PASS, "need password to use hardware clock 1"); - errorMessages.put(PI_HPWM_ILLEGAL, "illegal, PWM in use for main clock"); - errorMessages.put(PI_BAD_DATABITS, "serial data bits not 1-32"); - errorMessages.put(PI_BAD_STOPBITS, "serial (half) stop bits not 2-8"); - errorMessages.put(PI_MSG_TOOBIG, "socket/pipe message too big"); - errorMessages.put(PI_BAD_MALLOC_MODE, "bad memory allocation mode"); - errorMessages.put(PI_TOO_MANY_SEGS, "too many I2C transaction segments"); - errorMessages.put(PI_BAD_I2C_SEG, "an I2C transaction segment failed"); - errorMessages.put(PI_BAD_SMBUS_CMD, "SMBus command not supported"); - errorMessages.put(PI_NOT_I2C_GPIO, "no bit bang I2C in progress on GPIO"); - errorMessages.put(PI_BAD_I2C_WLEN, "bad I2C write length"); - errorMessages.put(PI_BAD_I2C_RLEN, "bad I2C read length"); - errorMessages.put(PI_BAD_I2C_CMD, "bad I2C command"); - errorMessages.put(PI_BAD_I2C_BAUD, "bad I2C baud rate, not 50-500k"); - errorMessages.put(PI_CHAIN_LOOP_CNT, "bad chain loop count"); - errorMessages.put(PI_BAD_CHAIN_LOOP, "empty chain loop"); - errorMessages.put(PI_CHAIN_COUNTER, "too many chain counters"); - errorMessages.put(PI_BAD_CHAIN_CMD, "bad chain command"); - errorMessages.put(PI_BAD_CHAIN_DELAY, "bad chain delay micros"); - errorMessages.put(PI_CHAIN_NESTING, "chain counters nested too deeply"); - errorMessages.put(PI_CHAIN_TOO_BIG, "chain is too long"); - errorMessages.put(PI_DEPRECATED, "deprecated function removed"); - errorMessages.put(PI_BAD_SER_INVERT, "bit bang serial invert not 0 or 1"); - errorMessages.put(PI_BAD_EDGE, "bad ISR edge value, not 0-2"); - errorMessages.put(PI_BAD_ISR_INIT, "bad ISR initialisation"); - errorMessages.put(PI_BAD_FOREVER, "loop forever must be last chain command"); - errorMessages.put(PI_BAD_FILTER, "bad filter parameter"); - } + static { + errorMessages.put(PI_INIT_FAILED, "pigpio initialisation failed"); + errorMessages.put(PI_BAD_USER_GPIO, "GPIO not 0-31"); + errorMessages.put(PI_BAD_GPIO, "GPIO not 0-53"); + errorMessages.put(PI_BAD_MODE, "mode not 0-7"); + errorMessages.put(PI_BAD_LEVEL, "level not 0-1"); + errorMessages.put(PI_BAD_PUD, "pud not 0-2"); + errorMessages.put(PI_BAD_PULSEWIDTH, "pulsewidth not 0 or 500-2500"); + errorMessages.put(PI_BAD_DUTYCYCLE, "dutycycle not 0-range (default 255)"); + errorMessages.put(PI_BAD_TIMER, "timer not 0-9"); + errorMessages.put(PI_BAD_MS, "ms not 10-60000"); + errorMessages.put(PI_BAD_TIMETYPE, "timetype not 0-1"); + errorMessages.put(PI_BAD_SECONDS, "seconds < 0"); + errorMessages.put(PI_BAD_MICROS, "micros not 0-999999"); + errorMessages.put(PI_TIMER_FAILED, "gpioSetTimerFunc failed"); + errorMessages.put(PI_BAD_WDOG_TIMEOUT, "timeout not 0-60000"); + errorMessages.put(PI_NO_ALERT_FUNC, "DEPRECATED"); + errorMessages.put(PI_BAD_CLK_PERIPH, "clock peripheral not 0-1"); + errorMessages.put(PI_BAD_CLK_SOURCE, "DEPRECATED"); + errorMessages.put(PI_BAD_CLK_MICROS, "clock micros not 1, 2, 4, 5, 8, or 10"); + errorMessages.put(PI_BAD_BUF_MILLIS, "buf millis not 100-10000"); + errorMessages.put(PI_BAD_DUTYRANGE, "dutycycle range not 25-40000"); + errorMessages.put(PI_BAD_SIGNUM, "signum not 0-63"); + errorMessages.put(PI_BAD_PATHNAME, "can't open pathname"); + errorMessages.put(PI_NO_HANDLE, "no handle available"); + errorMessages.put(PI_BAD_HANDLE, "unknown handle"); + errorMessages.put(PI_BAD_IF_FLAGS, "ifFlags > 3"); + errorMessages.put(PI_BAD_CHANNEL, "DMA channel not 0-14"); + errorMessages.put(PI_BAD_SOCKET_PORT, "socket port not 1024-30000"); + errorMessages.put(PI_BAD_FIFO_COMMAND, "unknown fifo command"); + errorMessages.put(PI_BAD_SECO_CHANNEL, "DMA secondary channel not 0-14"); + errorMessages.put(PI_NOT_INITIALISED, "function called before gpioInitialise"); + errorMessages.put(PI_INITIALISED, "function called after gpioInitialise"); + errorMessages.put(PI_BAD_WAVE_MODE, "waveform mode not 0-1"); + errorMessages.put(PI_BAD_CFG_INTERNAL, "bad parameter in gpioCfgInternals call"); + errorMessages.put(PI_BAD_WAVE_BAUD, "baud rate not 50-250000(RX)/1000000(TX)"); + errorMessages.put(PI_TOO_MANY_PULSES, "waveform has too many pulses"); + errorMessages.put(PI_TOO_MANY_CHARS, "waveform has too many chars"); + errorMessages.put(PI_NOT_SERIAL_GPIO, "no bit bang serial read in progress on GPIO"); + errorMessages.put(PI_NOT_PERMITTED, "no permission to update GPIO"); + errorMessages.put(PI_SOME_PERMITTED, "no permission to update one or more GPIO"); + errorMessages.put(PI_BAD_WVSC_COMMND, "bad WVSC subcommand"); + errorMessages.put(PI_BAD_WVSM_COMMND, "bad WVSM subcommand"); + errorMessages.put(PI_BAD_WVSP_COMMND, "bad WVSP subcommand"); + errorMessages.put(PI_BAD_PULSELEN, "trigger pulse length not 1-100"); + errorMessages.put(PI_BAD_SCRIPT, "invalid script"); + errorMessages.put(PI_BAD_SCRIPT_ID, "unknown script id"); + errorMessages.put(PI_BAD_SER_OFFSET, "add serial data offset > 30 minute"); + errorMessages.put(PI_GPIO_IN_USE, "GPIO already in use"); + errorMessages.put(PI_BAD_SERIAL_COUNT, "must read at least a byte at a time"); + errorMessages.put(PI_BAD_PARAM_NUM, "script parameter id not 0-9"); + errorMessages.put(PI_DUP_TAG, "script has duplicate tag"); + errorMessages.put(PI_TOO_MANY_TAGS, "script has too many tags"); + errorMessages.put(PI_BAD_SCRIPT_CMD, "illegal script command"); + errorMessages.put(PI_BAD_VAR_NUM, "script variable id not 0-149"); + errorMessages.put(PI_NO_SCRIPT_ROOM, "no more room for scripts"); + errorMessages.put(PI_NO_MEMORY, "can't allocate temporary memory"); + errorMessages.put(PI_SOCK_READ_FAILED, "socket read failed"); + errorMessages.put(PI_SOCK_WRIT_FAILED, "socket write failed"); + errorMessages.put(PI_TOO_MANY_PARAM, "too many script parameters (> 10)"); + errorMessages.put(PI_NOT_HALTED, "script already running or failed"); + errorMessages.put(PI_BAD_TAG, "script has unresolved tag"); + errorMessages.put(PI_BAD_MICS_DELAY, "bad MICS delay (too large)"); + errorMessages.put(PI_BAD_MILS_DELAY, "bad MILS delay (too large)"); + errorMessages.put(PI_BAD_WAVE_ID, "non existent wave id"); + errorMessages.put(PI_TOO_MANY_CBS, "No more CBs for waveform"); + errorMessages.put(PI_TOO_MANY_OOL, "No more OOL for waveform"); + errorMessages.put(PI_EMPTY_WAVEFORM, "attempt to create an empty waveform"); + errorMessages.put(PI_NO_WAVEFORM_ID, "No more waveform ids"); + errorMessages.put(PI_I2C_OPEN_FAILED, "can't open I2C device"); + errorMessages.put(PI_SER_OPEN_FAILED, "can't open serial device"); + errorMessages.put(PI_SPI_OPEN_FAILED, "can't open SPI device"); + errorMessages.put(PI_BAD_I2C_BUS, "bad I2C bus"); + errorMessages.put(PI_BAD_I2C_ADDR, "bad I2C address"); + errorMessages.put(PI_BAD_SPI_CHANNEL, "bad SPI channel"); + errorMessages.put(PI_BAD_FLAGS, "bad i2c/spi/ser open flags"); + errorMessages.put(PI_BAD_SPI_SPEED, "bad SPI speed"); + errorMessages.put(PI_BAD_SER_DEVICE, "bad serial device name"); + errorMessages.put(PI_BAD_SER_SPEED, "bad serial baud rate"); + errorMessages.put(PI_BAD_PARAM, "bad i2c/spi/ser parameter"); + errorMessages.put(PI_I2C_WRITE_FAILED, "I2C write failed"); + errorMessages.put(PI_I2C_READ_FAILED, "I2C read failed"); + errorMessages.put(PI_BAD_SPI_COUNT, "bad SPI count"); + errorMessages.put(PI_SER_WRITE_FAILED, "ser write failed"); + errorMessages.put(PI_SER_READ_FAILED, "ser read failed"); + errorMessages.put(PI_SER_READ_NO_DATA, "ser read no data available"); + errorMessages.put(PI_UNKNOWN_COMMAND, "unknown command"); + errorMessages.put(PI_SPI_XFER_FAILED, "SPI xfer/read/write failed"); + errorMessages.put(PI_BAD_POINTER, "bad (NULL) pointer"); + errorMessages.put(PI_NO_AUX_SPI, "no auxiliary SPI on Pi A or B"); + errorMessages.put(PI_NOT_PWM_GPIO, "GPIO is not in use for PWM"); + errorMessages.put(PI_NOT_SERVO_GPIO, "GPIO is not in use for servo pulses"); + errorMessages.put(PI_NOT_HCLK_GPIO, "GPIO has no hardware clock"); + errorMessages.put(PI_NOT_HPWM_GPIO, "GPIO has no hardware PWM"); + errorMessages.put(PI_BAD_HPWM_FREQ, "hardware PWM frequency not 1-125M"); + errorMessages.put(PI_BAD_HPWM_DUTY, "hardware PWM dutycycle not 0-1M"); + errorMessages.put(PI_BAD_HCLK_FREQ, "hardware clock frequency not 4689-250M"); + errorMessages.put(PI_BAD_HCLK_PASS, "need password to use hardware clock 1"); + errorMessages.put(PI_HPWM_ILLEGAL, "illegal, PWM in use for main clock"); + errorMessages.put(PI_BAD_DATABITS, "serial data bits not 1-32"); + errorMessages.put(PI_BAD_STOPBITS, "serial (half) stop bits not 2-8"); + errorMessages.put(PI_MSG_TOOBIG, "socket/pipe message too big"); + errorMessages.put(PI_BAD_MALLOC_MODE, "bad memory allocation mode"); + errorMessages.put(PI_TOO_MANY_SEGS, "too many I2C transaction segments"); + errorMessages.put(PI_BAD_I2C_SEG, "an I2C transaction segment failed"); + errorMessages.put(PI_BAD_SMBUS_CMD, "SMBus command not supported"); + errorMessages.put(PI_NOT_I2C_GPIO, "no bit bang I2C in progress on GPIO"); + errorMessages.put(PI_BAD_I2C_WLEN, "bad I2C write length"); + errorMessages.put(PI_BAD_I2C_RLEN, "bad I2C read length"); + errorMessages.put(PI_BAD_I2C_CMD, "bad I2C command"); + errorMessages.put(PI_BAD_I2C_BAUD, "bad I2C baud rate, not 50-500k"); + errorMessages.put(PI_CHAIN_LOOP_CNT, "bad chain loop count"); + errorMessages.put(PI_BAD_CHAIN_LOOP, "empty chain loop"); + errorMessages.put(PI_CHAIN_COUNTER, "too many chain counters"); + errorMessages.put(PI_BAD_CHAIN_CMD, "bad chain command"); + errorMessages.put(PI_BAD_CHAIN_DELAY, "bad chain delay micros"); + errorMessages.put(PI_CHAIN_NESTING, "chain counters nested too deeply"); + errorMessages.put(PI_CHAIN_TOO_BIG, "chain is too long"); + errorMessages.put(PI_DEPRECATED, "deprecated function removed"); + errorMessages.put(PI_BAD_SER_INVERT, "bit bang serial invert not 0 or 1"); + errorMessages.put(PI_BAD_EDGE, "bad ISR edge value, not 0-2"); + errorMessages.put(PI_BAD_ISR_INIT, "bad ISR initialisation"); + errorMessages.put(PI_BAD_FOREVER, "loop forever must be last chain command"); + errorMessages.put(PI_BAD_FILTER, "bad filter parameter"); + } - public static String getMessageForError(int errorCode) { - return errorMessages.get(errorCode); - } + public static String getMessageForError(int errorCode) { + return errorMessages.get(errorCode); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPin.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPin.java index 2e0cc6f0c5..288e950191 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPin.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPin.java @@ -22,72 +22,72 @@ import org.photonvision.common.logging.Logger; public class PigpioPin extends GPIOBase { - public static final Logger logger = new Logger(PigpioPin.class, LogGroup.General); - private static final PigpioSocket piSocket = new PigpioSocket(); + public static final Logger logger = new Logger(PigpioPin.class, LogGroup.General); + private static final PigpioSocket piSocket = new PigpioSocket(); - private final boolean isHardwarePWMPin; - private final int pinNo; + private final boolean isHardwarePWMPin; + private final int pinNo; - private boolean hasFailedHardwarePWM; + private boolean hasFailedHardwarePWM; - public PigpioPin(int pinNo) { - isHardwarePWMPin = pinNo == 12 || pinNo == 13 || pinNo == 17 || pinNo == 18; - this.pinNo = pinNo; - } + public PigpioPin(int pinNo) { + isHardwarePWMPin = pinNo == 12 || pinNo == 13 || pinNo == 17 || pinNo == 18; + this.pinNo = pinNo; + } - @Override - public int getPinNumber() { - return pinNo; - } + @Override + public int getPinNumber() { + return pinNo; + } - @Override - protected void setStateImpl(boolean state) { - try { - piSocket.gpioWrite(pinNo, state); - } catch (PigpioException e) { - logger.error("gpioWrite FAIL - " + e.getMessage()); - } + @Override + protected void setStateImpl(boolean state) { + try { + piSocket.gpioWrite(pinNo, state); + } catch (PigpioException e) { + logger.error("gpioWrite FAIL - " + e.getMessage()); } + } - @Override - public boolean shutdown() { - setState(false); - return true; - } + @Override + public boolean shutdown() { + setState(false); + return true; + } - @Override - public boolean getStateImpl() { - try { - return piSocket.gpioRead(pinNo); - } catch (PigpioException e) { - logger.error("gpioRead FAIL - " + e.getMessage()); - return false; - } + @Override + public boolean getStateImpl() { + try { + return piSocket.gpioRead(pinNo); + } catch (PigpioException e) { + logger.error("gpioRead FAIL - " + e.getMessage()); + return false; } + } - @Override - protected void blinkImpl(int pulseTimeMillis, int blinks) { - try { - piSocket.generateAndSendWaveform(pulseTimeMillis, blinks, pinNo); - } catch (PigpioException e) { - logger.error("Could not set blink - " + e.getMessage()); - } + @Override + protected void blinkImpl(int pulseTimeMillis, int blinks) { + try { + piSocket.generateAndSendWaveform(pulseTimeMillis, blinks, pinNo); + } catch (PigpioException e) { + logger.error("Could not set blink - " + e.getMessage()); } + } - @Override - protected void setBrightnessImpl(int brightness) { - if (isHardwarePWMPin) { - try { - piSocket.hardwarePWM(pinNo, 22000, (int) (1000000 * (brightness / 100.0))); - } catch (PigpioException e) { - logger.error("Failed to hardPWM - " + e.getMessage()); - } - } else if (!hasFailedHardwarePWM) { - logger.warn( - "Specified pin (" - + pinNo - + ") is not capable of hardware PWM - no action will be taken."); - hasFailedHardwarePWM = true; - } + @Override + protected void setBrightnessImpl(int brightness) { + if (isHardwarePWMPin) { + try { + piSocket.hardwarePWM(pinNo, 22000, (int) (1000000 * (brightness / 100.0))); + } catch (PigpioException e) { + logger.error("Failed to hardPWM - " + e.getMessage()); + } + } else if (!hasFailedHardwarePWM) { + logger.warn( + "Specified pin (" + + pinNo + + ") is not capable of hardware PWM - no action will be taken."); + hasFailedHardwarePWM = true; } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPulse.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPulse.java index 6d0ff0d1ea..4f262aa34f 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPulse.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPulse.java @@ -18,22 +18,22 @@ package org.photonvision.common.hardware.GPIO.pi; public class PigpioPulse { - int gpioOn; - int gpioOff; - int delayMicros; + int gpioOn; + int gpioOff; + int delayMicros; - /** - * Initialises a pulse. - * - * @param gpioOn GPIO number to switch on at the start of the pulse. If zero, then no GPIO will be - * switched on. - * @param gpioOff GPIO number to switch off at the start of the pulse. If zero, then no GPIO will - * be switched off. - * @param delayMicros the delay in microseconds before the next pulse. - */ - public PigpioPulse(int gpioOn, int gpioOff, int delayMicros) { - this.gpioOn = gpioOn != 0 ? 1 << gpioOn : 0; - this.gpioOff = gpioOff != 0 ? 1 << gpioOff : 0; - this.delayMicros = delayMicros; - } + /** + * Initialises a pulse. + * + * @param gpioOn GPIO number to switch on at the start of the pulse. If zero, then no GPIO will be + * switched on. + * @param gpioOff GPIO number to switch off at the start of the pulse. If zero, then no GPIO will + * be switched off. + * @param delayMicros the delay in microseconds before the next pulse. + */ + public PigpioPulse(int gpioOn, int gpioOff, int delayMicros) { + this.gpioOn = gpioOn != 0 ? 1 << gpioOn : 0; + this.gpioOff = gpioOff != 0 ? 1 << gpioOff : 0; + this.delayMicros = delayMicros; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocket.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocket.java index d6c2bd310d..9ec6f5d709 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocket.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocket.java @@ -28,345 +28,345 @@ @SuppressWarnings({"SpellCheckingInspection", "unused"}) public class PigpioSocket { - private static final Logger logger = new Logger(PigpioSocket.class, LogGroup.General); - private static final int PIGPIOD_MESSAGE_SIZE = 12; - - private PigpioSocketLock commandSocket; - private int activeWaveformID = -1; - - /** Creates and starts a socket connection to a pigpio daemon on localhost */ - public PigpioSocket() { - this("127.0.0.1", 8888); + private static final Logger logger = new Logger(PigpioSocket.class, LogGroup.General); + private static final int PIGPIOD_MESSAGE_SIZE = 12; + + private PigpioSocketLock commandSocket; + private int activeWaveformID = -1; + + /** Creates and starts a socket connection to a pigpio daemon on localhost */ + public PigpioSocket() { + this("127.0.0.1", 8888); + } + + /** + * Creates and starts a socket connection to a pigpio daemon on a remote host with the specified + * address and port + * + * @param addr Address of remote pigpio daemon + * @param port Port of remote pigpio daemon + */ + public PigpioSocket(String addr, int port) { + try { + commandSocket = new PigpioSocketLock(addr, port); + } catch (IOException e) { + logger.error("Failed to create or connect to Pigpio Daemon socket", e); } - - /** - * Creates and starts a socket connection to a pigpio daemon on a remote host with the specified - * address and port - * - * @param addr Address of remote pigpio daemon - * @param port Port of remote pigpio daemon - */ - public PigpioSocket(String addr, int port) { - try { - commandSocket = new PigpioSocketLock(addr, port); - } catch (IOException e) { - logger.error("Failed to create or connect to Pigpio Daemon socket", e); - } + } + + /** + * Reconnects to the pigpio daemon + * + * @throws PigpioException on failure + */ + public void reconnect() throws PigpioException { + try { + commandSocket.reconnect(); + } catch (IOException e) { + logger.error("Failed to reconnect to Pigpio Daemon socket", e); + throw new PigpioException("reconnect", e); } - - /** - * Reconnects to the pigpio daemon - * - * @throws PigpioException on failure - */ - public void reconnect() throws PigpioException { - try { - commandSocket.reconnect(); - } catch (IOException e) { - logger.error("Failed to reconnect to Pigpio Daemon socket", e); - throw new PigpioException("reconnect", e); - } + } + + /** + * Terminates the connection to the pigpio daemon + * + * @throws PigpioException on failure + */ + public void gpioTerminate() throws PigpioException { + try { + commandSocket.terminate(); + } catch (IOException e) { + logger.error("Failed to terminate connection to Pigpio Daemon socket", e); + throw new PigpioException("gpioTerminate", e); } - - /** - * Terminates the connection to the pigpio daemon - * - * @throws PigpioException on failure - */ - public void gpioTerminate() throws PigpioException { - try { - commandSocket.terminate(); - } catch (IOException e) { - logger.error("Failed to terminate connection to Pigpio Daemon socket", e); - throw new PigpioException("gpioTerminate", e); - } + } + + /** + * Read the GPIO level + * + * @param pin Pin to read from + * @return Value of the pin + * @throws PigpioException on failure + */ + public boolean gpioRead(int pin) throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_READ.value, pin); + if (retCode < 0) throw new PigpioException(retCode); + return retCode != 0; + } catch (IOException e) { + logger.error("Failed to read GPIO pin: " + pin, e); + throw new PigpioException("gpioRead", e); } - - /** - * Read the GPIO level - * - * @param pin Pin to read from - * @return Value of the pin - * @throws PigpioException on failure - */ - public boolean gpioRead(int pin) throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_READ.value, pin); - if (retCode < 0) throw new PigpioException(retCode); - return retCode != 0; - } catch (IOException e) { - logger.error("Failed to read GPIO pin: " + pin, e); - throw new PigpioException("gpioRead", e); - } + } + + /** + * Write the GPIO level + * + * @param pin Pin to write to + * @param value Value to write + * @throws PigpioException on failure + */ + public void gpioWrite(int pin, boolean value) throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WRITE.value, pin, value ? 1 : 0); + if (retCode < 0) throw new PigpioException(retCode); + } catch (IOException e) { + logger.error("Failed to write to GPIO pin: " + pin, e); + throw new PigpioException("gpioWrite", e); } - - /** - * Write the GPIO level - * - * @param pin Pin to write to - * @param value Value to write - * @throws PigpioException on failure - */ - public void gpioWrite(int pin, boolean value) throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WRITE.value, pin, value ? 1 : 0); - if (retCode < 0) throw new PigpioException(retCode); - } catch (IOException e) { - logger.error("Failed to write to GPIO pin: " + pin, e); - throw new PigpioException("gpioWrite", e); - } + } + + /** + * Clears all waveforms and any data added by calls to {@link #waveAddGeneric(ArrayList)} + * + * @throws PigpioException on failure + */ + public void waveClear() throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVCLR.value); + if (retCode < 0) throw new PigpioException(retCode); + } catch (IOException e) { + logger.error("Failed to clear waveforms", e); + throw new PigpioException("waveClear", e); } - - /** - * Clears all waveforms and any data added by calls to {@link #waveAddGeneric(ArrayList)} - * - * @throws PigpioException on failure - */ - public void waveClear() throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVCLR.value); - if (retCode < 0) throw new PigpioException(retCode); - } catch (IOException e) { - logger.error("Failed to clear waveforms", e); - throw new PigpioException("waveClear", e); - } + } + + /** + * Adds a number of pulses to the current waveform + * + * @param pulses ArrayList of pulses to add + * @return the new total number of pulses in the current waveform + * @throws PigpioException on failure + */ + private int waveAddGeneric(ArrayList pulses) throws PigpioException { + // pigpio wave message format + + // I p1 0 + // I p2 0 + // I p3 pulses * 12 + // ## extension ## + // III on/off/delay * pulses + + if (pulses == null || pulses.isEmpty()) return 0; + + try { + ByteBuffer bb = ByteBuffer.allocate(pulses.size() * 12); + bb.order(ByteOrder.LITTLE_ENDIAN); + for (var pulse : pulses) { + bb.putInt(pulse.gpioOn).putInt(pulse.gpioOff).putInt(pulse.delayMicros); + } + + int retCode = + commandSocket.sendCmd( + PigpioCommand.PCMD_WVAG.value, + 0, + 0, + pulses.size() * PIGPIOD_MESSAGE_SIZE, + bb.array()); + if (retCode < 0) throw new PigpioException(retCode); + + return retCode; + } catch (IOException e) { + logger.error("Failed to add pulse(s) to waveform", e); + throw new PigpioException("waveAddGeneric", e); } - - /** - * Adds a number of pulses to the current waveform - * - * @param pulses ArrayList of pulses to add - * @return the new total number of pulses in the current waveform - * @throws PigpioException on failure - */ - private int waveAddGeneric(ArrayList pulses) throws PigpioException { - // pigpio wave message format - - // I p1 0 - // I p2 0 - // I p3 pulses * 12 - // ## extension ## - // III on/off/delay * pulses - - if (pulses == null || pulses.isEmpty()) return 0; - - try { - ByteBuffer bb = ByteBuffer.allocate(pulses.size() * 12); - bb.order(ByteOrder.LITTLE_ENDIAN); - for (var pulse : pulses) { - bb.putInt(pulse.gpioOn).putInt(pulse.gpioOff).putInt(pulse.delayMicros); - } - - int retCode = - commandSocket.sendCmd( - PigpioCommand.PCMD_WVAG.value, - 0, - 0, - pulses.size() * PIGPIOD_MESSAGE_SIZE, - bb.array()); - if (retCode < 0) throw new PigpioException(retCode); - - return retCode; - } catch (IOException e) { - logger.error("Failed to add pulse(s) to waveform", e); - throw new PigpioException("waveAddGeneric", e); - } + } + + /** + * Creates pulses and adds them to the current waveform + * + * @param pulseTimeMillis Pulse length in milliseconds + * @param blinks Number of times to pulse. -1 for repeat + * @param pinNo Pin to pulse + */ + private void addBlinkPulsesToWaveform(int pulseTimeMillis, int blinks, int pinNo) { + boolean repeat = blinks == -1; + + if (blinks == 0) return; + + if (repeat) { + blinks = 1; } - /** - * Creates pulses and adds them to the current waveform - * - * @param pulseTimeMillis Pulse length in milliseconds - * @param blinks Number of times to pulse. -1 for repeat - * @param pinNo Pin to pulse - */ - private void addBlinkPulsesToWaveform(int pulseTimeMillis, int blinks, int pinNo) { - boolean repeat = blinks == -1; - - if (blinks == 0) return; - - if (repeat) { - blinks = 1; - } - - try { - ArrayList pulses = new ArrayList<>(); - var startPulse = new PigpioPulse(pinNo, 0, pulseTimeMillis * 1000); - var endPulse = new PigpioPulse(0, pinNo, pulseTimeMillis * 1000); - - for (int i = 0; i < blinks; i++) { - pulses.add(startPulse); - pulses.add(endPulse); - } - - waveAddGeneric(pulses); - pulses.clear(); - } catch (Exception e) { - e.printStackTrace(); - } - } + try { + ArrayList pulses = new ArrayList<>(); + var startPulse = new PigpioPulse(pinNo, 0, pulseTimeMillis * 1000); + var endPulse = new PigpioPulse(0, pinNo, pulseTimeMillis * 1000); - /** - * Generates and sends a waveform to the given pins with the specified parameters. - * - * @param pulseTimeMillis Pulse length in milliseconds - * @param blinks Number of times to pulse. -1 for repeat - * @param pins Pins to pulse - * @throws PigpioException on failure - */ - public void generateAndSendWaveform(int pulseTimeMillis, int blinks, int... pins) - throws PigpioException { - if (pins.length == 0) return; - boolean repeat = blinks == -1; - if (blinks == 0) return; - - // stop any active waves - waveTxStop(); - waveClear(); - - if (activeWaveformID != -1) { - waveDelete(activeWaveformID); - activeWaveformID = -1; - } - - for (int pin : pins) { - addBlinkPulsesToWaveform(pulseTimeMillis, blinks, pin); - } - - int waveformId = waveCreate(); - - if (waveformId >= 0) { - if (repeat) { - waveSendRepeat(waveformId); - } else { - waveSendOnce(waveformId); - } - } else { - String error = ""; - switch (waveformId) { - case PI_EMPTY_WAVEFORM: - error = "Waveform empty"; - break; - case PI_TOO_MANY_CBS: - error = "Too many CBS"; - break; - case PI_TOO_MANY_OOL: - error = "Too many OOL"; - break; - case PI_NO_WAVEFORM_ID: - error = "No waveform ID"; - break; - } - logger.error("Failed to send wave: " + error); - } - } + for (int i = 0; i < blinks; i++) { + pulses.add(startPulse); + pulses.add(endPulse); + } - /** - * Stops the transmission of the current waveform - * - * @return success - * @throws PigpioException on failure - */ - public boolean waveTxStop() throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVHLT.value); - if (retCode < 0) throw new PigpioException(retCode); - return retCode == 0; - } catch (IOException e) { - logger.error("Failed to stop waveform", e); - throw new PigpioException("waveTxStop", e); - } + waveAddGeneric(pulses); + pulses.clear(); + } catch (Exception e) { + e.printStackTrace(); } - - /** - * Creates a waveform from the data provided by the prior calls to {@link - * #waveAddGeneric(ArrayList)} Upon success a wave ID greater than or equal to 0 is returned - * - * @return ID of the created waveform - * @throws PigpioException on failure - */ - public int waveCreate() throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVCRE.value); - if (retCode < 0) throw new PigpioException(retCode); - return retCode; - } catch (IOException e) { - logger.error("Failed to create new waveform", e); - throw new PigpioException("waveCreate", e); - } + } + + /** + * Generates and sends a waveform to the given pins with the specified parameters. + * + * @param pulseTimeMillis Pulse length in milliseconds + * @param blinks Number of times to pulse. -1 for repeat + * @param pins Pins to pulse + * @throws PigpioException on failure + */ + public void generateAndSendWaveform(int pulseTimeMillis, int blinks, int... pins) + throws PigpioException { + if (pins.length == 0) return; + boolean repeat = blinks == -1; + if (blinks == 0) return; + + // stop any active waves + waveTxStop(); + waveClear(); + + if (activeWaveformID != -1) { + waveDelete(activeWaveformID); + activeWaveformID = -1; } - /** - * Deletes the waveform with specified wave ID - * - * @param waveId ID of the waveform to delete - * @throws PigpioException on failure - */ - public void waveDelete(int waveId) throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVDEL.value, waveId); - if (retCode < 0) throw new PigpioException(retCode); - } catch (IOException e) { - logger.error("Failed to delete wave: " + waveId, e); - throw new PigpioException("waveDelete", e); - } + for (int pin : pins) { + addBlinkPulsesToWaveform(pulseTimeMillis, blinks, pin); } - /** - * Transmits the waveform with specified wave ID. The waveform is sent once - * - * @param waveId ID of the waveform to transmit - * @return The number of DMA control blocks in the waveform - * @throws PigpioException on failure - */ - public int waveSendOnce(int waveId) throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVTX.value, waveId); - if (retCode < 0) throw new PigpioException(retCode); - return retCode; - } catch (IOException e) { - throw new PigpioException("waveSendOnce", e); - } + int waveformId = waveCreate(); + + if (waveformId >= 0) { + if (repeat) { + waveSendRepeat(waveformId); + } else { + waveSendOnce(waveformId); + } + } else { + String error = ""; + switch (waveformId) { + case PI_EMPTY_WAVEFORM: + error = "Waveform empty"; + break; + case PI_TOO_MANY_CBS: + error = "Too many CBS"; + break; + case PI_TOO_MANY_OOL: + error = "Too many OOL"; + break; + case PI_NO_WAVEFORM_ID: + error = "No waveform ID"; + break; + } + logger.error("Failed to send wave: " + error); } - - /** - * Transmits the waveform with specified wave ID. The waveform cycles until cancelled (either by - * the sending of a new waveform or {@link #waveTxStop()} - * - * @param waveId ID of the waveform to transmit - * @return The number of DMA control blocks in the waveform - * @throws PigpioException on failure - */ - public int waveSendRepeat(int waveId) throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVTXR.value, waveId); - if (retCode < 0) throw new PigpioException(retCode); - return retCode; - } catch (IOException e) { - throw new PigpioException("waveSendRepeat", e); - } + } + + /** + * Stops the transmission of the current waveform + * + * @return success + * @throws PigpioException on failure + */ + public boolean waveTxStop() throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVHLT.value); + if (retCode < 0) throw new PigpioException(retCode); + return retCode == 0; + } catch (IOException e) { + logger.error("Failed to stop waveform", e); + throw new PigpioException("waveTxStop", e); } - - /** - * Starts hardware PWM on a GPIO at the specified frequency and dutycycle - * - * @param pin GPIO pin to start PWM on - * @param pwmFrequency Frequency to run at (1Hz-125MHz). Frequencies above 30MHz are unlikely to - * work - * @param pwmDuty Duty cycle to run at (0-1,000,000) - * @throws PigpioException on failure - */ - public void hardwarePWM(int pin, int pwmFrequency, int pwmDuty) throws PigpioException { - try { - ByteBuffer bb = ByteBuffer.allocate(4); - bb.order(ByteOrder.LITTLE_ENDIAN); - bb.putInt(pwmDuty); - - int retCode = - commandSocket.sendCmd(PigpioCommand.PCMD_HP.value, pin, pwmFrequency, 4, bb.array()); - if (retCode < 0) throw new PigpioException(retCode); - } catch (IOException e) { - throw new PigpioException("hardwarePWM", e); - } + } + + /** + * Creates a waveform from the data provided by the prior calls to {@link + * #waveAddGeneric(ArrayList)} Upon success a wave ID greater than or equal to 0 is returned + * + * @return ID of the created waveform + * @throws PigpioException on failure + */ + public int waveCreate() throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVCRE.value); + if (retCode < 0) throw new PigpioException(retCode); + return retCode; + } catch (IOException e) { + logger.error("Failed to create new waveform", e); + throw new PigpioException("waveCreate", e); + } + } + + /** + * Deletes the waveform with specified wave ID + * + * @param waveId ID of the waveform to delete + * @throws PigpioException on failure + */ + public void waveDelete(int waveId) throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVDEL.value, waveId); + if (retCode < 0) throw new PigpioException(retCode); + } catch (IOException e) { + logger.error("Failed to delete wave: " + waveId, e); + throw new PigpioException("waveDelete", e); + } + } + + /** + * Transmits the waveform with specified wave ID. The waveform is sent once + * + * @param waveId ID of the waveform to transmit + * @return The number of DMA control blocks in the waveform + * @throws PigpioException on failure + */ + public int waveSendOnce(int waveId) throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVTX.value, waveId); + if (retCode < 0) throw new PigpioException(retCode); + return retCode; + } catch (IOException e) { + throw new PigpioException("waveSendOnce", e); + } + } + + /** + * Transmits the waveform with specified wave ID. The waveform cycles until cancelled (either by + * the sending of a new waveform or {@link #waveTxStop()} + * + * @param waveId ID of the waveform to transmit + * @return The number of DMA control blocks in the waveform + * @throws PigpioException on failure + */ + public int waveSendRepeat(int waveId) throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVTXR.value, waveId); + if (retCode < 0) throw new PigpioException(retCode); + return retCode; + } catch (IOException e) { + throw new PigpioException("waveSendRepeat", e); + } + } + + /** + * Starts hardware PWM on a GPIO at the specified frequency and dutycycle + * + * @param pin GPIO pin to start PWM on + * @param pwmFrequency Frequency to run at (1Hz-125MHz). Frequencies above 30MHz are unlikely to + * work + * @param pwmDuty Duty cycle to run at (0-1,000,000) + * @throws PigpioException on failure + */ + public void hardwarePWM(int pin, int pwmFrequency, int pwmDuty) throws PigpioException { + try { + ByteBuffer bb = ByteBuffer.allocate(4); + bb.order(ByteOrder.LITTLE_ENDIAN); + bb.putInt(pwmDuty); + + int retCode = + commandSocket.sendCmd(PigpioCommand.PCMD_HP.value, pin, pwmFrequency, 4, bb.array()); + if (retCode < 0) throw new PigpioException(retCode); + } catch (IOException e) { + throw new PigpioException("hardwarePWM", e); } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocketLock.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocketLock.java index 60e9d9ff39..f65169874b 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocketLock.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocketLock.java @@ -28,120 +28,120 @@ * https://github.com/nkolban/jpigpio/blob/master/JPigpio/src/jpigpio/SocketLock.java */ final class PigpioSocketLock { - private static final int replyTimeoutMillis = 1000; - - private final String addr; - private final int port; - - private Socket socket; - private DataInputStream in; - private DataOutputStream out; - - public PigpioSocketLock(String addr, int port) throws IOException { - this.addr = addr; - this.port = port; - reconnect(); - } - - public void reconnect() throws IOException { - socket = new Socket(addr, port); - out = new DataOutputStream(socket.getOutputStream()); - in = new DataInputStream(socket.getInputStream()); - } - - public void terminate() throws IOException { - in.close(); - in = null; - - out.flush(); - out.close(); - out = null; - - socket.close(); - socket = null; - } - - public synchronized int sendCmd(int cmd) throws IOException { - byte[] b = {}; - return sendCmd(cmd, 0, 0, 0, b); - } - - public synchronized int sendCmd(int cmd, int p1) throws IOException { - byte[] b = {}; - return sendCmd(cmd, p1, 0, 0, b); - } - - public synchronized int sendCmd(int cmd, int p1, int p2) throws IOException { - byte[] b = {}; - return sendCmd(cmd, p1, p2, 0, b); + private static final int replyTimeoutMillis = 1000; + + private final String addr; + private final int port; + + private Socket socket; + private DataInputStream in; + private DataOutputStream out; + + public PigpioSocketLock(String addr, int port) throws IOException { + this.addr = addr; + this.port = port; + reconnect(); + } + + public void reconnect() throws IOException { + socket = new Socket(addr, port); + out = new DataOutputStream(socket.getOutputStream()); + in = new DataInputStream(socket.getInputStream()); + } + + public void terminate() throws IOException { + in.close(); + in = null; + + out.flush(); + out.close(); + out = null; + + socket.close(); + socket = null; + } + + public synchronized int sendCmd(int cmd) throws IOException { + byte[] b = {}; + return sendCmd(cmd, 0, 0, 0, b); + } + + public synchronized int sendCmd(int cmd, int p1) throws IOException { + byte[] b = {}; + return sendCmd(cmd, p1, 0, 0, b); + } + + public synchronized int sendCmd(int cmd, int p1, int p2) throws IOException { + byte[] b = {}; + return sendCmd(cmd, p1, p2, 0, b); + } + + public synchronized int sendCmd(int cmd, int p1, int p2, int p3) throws IOException { + byte[] b = {}; + return sendCmd(cmd, p1, p2, p3, b); + } + + /** + * Send extended command to pigpiod and return result code + * + * @param cmd Command to send + * @param p1 Command parameter 1 + * @param p2 Command parameter 2 + * @param p3 Command parameter 3 (usually length of extended data - see paramater ext) + * @param ext Array of bytes containing extended data + * @return Command result code + * @throws IOException in case of network connection error + */ + @SuppressWarnings("UnusedAssignment") + public synchronized int sendCmd(int cmd, int p1, int p2, int p3, byte[] ext) throws IOException { + ByteBuffer bb = ByteBuffer.allocate(16 + ext.length); + + bb.putInt(Integer.reverseBytes(cmd)); + bb.putInt(Integer.reverseBytes(p1)); + bb.putInt(Integer.reverseBytes(p2)); + bb.putInt(Integer.reverseBytes(p3)); + + if (ext.length > 0) { + bb.put(ext); } - public synchronized int sendCmd(int cmd, int p1, int p2, int p3) throws IOException { - byte[] b = {}; - return sendCmd(cmd, p1, p2, p3, b); + out.write(bb.array()); + out.flush(); + + int w = replyTimeoutMillis; + int a = in.available(); + + // if by any chance there is no response from pigpiod, then wait up to + // specified timeout + while (w > 0 && a < 16) { + w -= 10; + try { + Thread.sleep(10); + } catch (InterruptedException ignored) { + } + a = in.available(); } - /** - * Send extended command to pigpiod and return result code - * - * @param cmd Command to send - * @param p1 Command parameter 1 - * @param p2 Command parameter 2 - * @param p3 Command parameter 3 (usually length of extended data - see paramater ext) - * @param ext Array of bytes containing extended data - * @return Command result code - * @throws IOException in case of network connection error - */ - @SuppressWarnings("UnusedAssignment") - public synchronized int sendCmd(int cmd, int p1, int p2, int p3, byte[] ext) throws IOException { - ByteBuffer bb = ByteBuffer.allocate(16 + ext.length); - - bb.putInt(Integer.reverseBytes(cmd)); - bb.putInt(Integer.reverseBytes(p1)); - bb.putInt(Integer.reverseBytes(p2)); - bb.putInt(Integer.reverseBytes(p3)); - - if (ext.length > 0) { - bb.put(ext); - } - - out.write(bb.array()); - out.flush(); - - int w = replyTimeoutMillis; - int a = in.available(); - - // if by any chance there is no response from pigpiod, then wait up to - // specified timeout - while (w > 0 && a < 16) { - w -= 10; - try { - Thread.sleep(10); - } catch (InterruptedException ignored) { - } - a = in.available(); - } - - // throw exception if response from pigpiod has not arrived yet - if (in.available() < 16) { - throw new IOException( - "Timeout: No response from pigpio daemon within " + replyTimeoutMillis + " ms."); - } - - int resp = Integer.reverseBytes(in.readInt()); // ignore response - resp = Integer.reverseBytes(in.readInt()); // ignore response - resp = Integer.reverseBytes(in.readInt()); // ignore response - resp = Integer.reverseBytes(in.readInt()); // contains error or response - return resp; + // throw exception if response from pigpiod has not arrived yet + if (in.available() < 16) { + throw new IOException( + "Timeout: No response from pigpio daemon within " + replyTimeoutMillis + " ms."); } - /** - * Read all remaining bytes coming from pigpiod - * - * @param data Array to store read bytes. - * @throws IOException if unable to read from network - */ - public void readBytes(byte[] data) throws IOException { - in.readFully(data); - } + int resp = Integer.reverseBytes(in.readInt()); // ignore response + resp = Integer.reverseBytes(in.readInt()); // ignore response + resp = Integer.reverseBytes(in.readInt()); // ignore response + resp = Integer.reverseBytes(in.readInt()); // contains error or response + return resp; + } + + /** + * Read all remaining bytes coming from pigpiod + * + * @param data Array to store read bytes. + * @throws IOException if unable to read from network + */ + public void readBytes(byte[] data) throws IOException { + in.readFully(data); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/HardwareManager.java b/photon-core/src/main/java/org/photonvision/common/hardware/HardwareManager.java index 2a049cccae..38c6f5cd81 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/HardwareManager.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/HardwareManager.java @@ -34,154 +34,154 @@ import org.photonvision.common.util.ShellExec; public class HardwareManager { - private static HardwareManager instance; + private static HardwareManager instance; - private final ShellExec shellExec = new ShellExec(true, false); - private final Logger logger = new Logger(HardwareManager.class, LogGroup.General); + private final ShellExec shellExec = new ShellExec(true, false); + private final Logger logger = new Logger(HardwareManager.class, LogGroup.General); - private final HardwareConfig hardwareConfig; - private final HardwareSettings hardwareSettings; + private final HardwareConfig hardwareConfig; + private final HardwareSettings hardwareSettings; - private final MetricsManager metricsManager; + private final MetricsManager metricsManager; - @SuppressWarnings({"FieldCanBeLocal", "unused"}) - private final StatusLED statusLED; + @SuppressWarnings({"FieldCanBeLocal", "unused"}) + private final StatusLED statusLED; - @SuppressWarnings("FieldCanBeLocal") - private final IntegerSubscriber ledModeRequest; + @SuppressWarnings("FieldCanBeLocal") + private final IntegerSubscriber ledModeRequest; - private final IntegerPublisher ledModeState; + private final IntegerPublisher ledModeState; - @SuppressWarnings({"FieldCanBeLocal", "unused"}) - private final NTDataChangeListener ledModeListener; + @SuppressWarnings({"FieldCanBeLocal", "unused"}) + private final NTDataChangeListener ledModeListener; - public final VisionLED visionLED; // May be null if no LED is specified + public final VisionLED visionLED; // May be null if no LED is specified - private final PigpioSocket pigpioSocket; // will be null unless on Raspi + private final PigpioSocket pigpioSocket; // will be null unless on Raspi - public static HardwareManager getInstance() { - if (instance == null) { - var conf = ConfigManager.getInstance().getConfig(); - instance = new HardwareManager(conf.getHardwareConfig(), conf.getHardwareSettings()); - } - return instance; + public static HardwareManager getInstance() { + if (instance == null) { + var conf = ConfigManager.getInstance().getConfig(); + instance = new HardwareManager(conf.getHardwareConfig(), conf.getHardwareSettings()); } - - private HardwareManager(HardwareConfig hardwareConfig, HardwareSettings hardwareSettings) { - this.hardwareConfig = hardwareConfig; - this.hardwareSettings = hardwareSettings; - - this.metricsManager = new MetricsManager(); - this.metricsManager.setConfig(hardwareConfig); - - ledModeRequest = - NetworkTablesManager.getInstance() - .kRootTable - .getIntegerTopic("ledModeRequest") - .subscribe(-1); - ledModeState = - NetworkTablesManager.getInstance().kRootTable.getIntegerTopic("ledModeState").publish(); - ledModeState.set(VisionLEDMode.kDefault.value); - - CustomGPIO.setConfig(hardwareConfig); - - if (Platform.isRaspberryPi()) { - pigpioSocket = new PigpioSocket(); - } else { - pigpioSocket = null; - } - - statusLED = - hardwareConfig.statusRGBPins.size() == 3 - ? new StatusLED(hardwareConfig.statusRGBPins) - : null; - - var hasBrightnessRange = hardwareConfig.ledBrightnessRange.size() == 2; - visionLED = - hardwareConfig.ledPins.isEmpty() - ? null - : new VisionLED( - hardwareConfig.ledPins, - hasBrightnessRange ? hardwareConfig.ledBrightnessRange.get(0) : 0, - hasBrightnessRange ? hardwareConfig.ledBrightnessRange.get(1) : 100, - pigpioSocket, - ledModeState::set); - - ledModeListener = - visionLED == null - ? null - : new NTDataChangeListener( - NetworkTablesManager.getInstance().kRootTable.getInstance(), - ledModeRequest, - visionLED::onLedModeChange); - - Runtime.getRuntime().addShutdownHook(new Thread(this::onJvmExit)); - - if (visionLED != null) { - visionLED.setBrightness(hardwareSettings.ledBrightnessPercentage); - visionLED.blink(85, 4); // bootup blink - } - - // Start hardware metrics thread (Disabled until implemented) - // if (Platform.isLinux()) MetricsPublisher.getInstance().startTask(); + return instance; + } + + private HardwareManager(HardwareConfig hardwareConfig, HardwareSettings hardwareSettings) { + this.hardwareConfig = hardwareConfig; + this.hardwareSettings = hardwareSettings; + + this.metricsManager = new MetricsManager(); + this.metricsManager.setConfig(hardwareConfig); + + ledModeRequest = + NetworkTablesManager.getInstance() + .kRootTable + .getIntegerTopic("ledModeRequest") + .subscribe(-1); + ledModeState = + NetworkTablesManager.getInstance().kRootTable.getIntegerTopic("ledModeState").publish(); + ledModeState.set(VisionLEDMode.kDefault.value); + + CustomGPIO.setConfig(hardwareConfig); + + if (Platform.isRaspberryPi()) { + pigpioSocket = new PigpioSocket(); + } else { + pigpioSocket = null; } - public void setBrightnessPercent(int percent) { - if (percent != hardwareSettings.ledBrightnessPercentage) { - hardwareSettings.ledBrightnessPercentage = percent; - if (visionLED != null) visionLED.setBrightness(percent); - ConfigManager.getInstance().requestSave(); - logger.info("Setting led brightness to " + percent + "%"); - } + statusLED = + hardwareConfig.statusRGBPins.size() == 3 + ? new StatusLED(hardwareConfig.statusRGBPins) + : null; + + var hasBrightnessRange = hardwareConfig.ledBrightnessRange.size() == 2; + visionLED = + hardwareConfig.ledPins.isEmpty() + ? null + : new VisionLED( + hardwareConfig.ledPins, + hasBrightnessRange ? hardwareConfig.ledBrightnessRange.get(0) : 0, + hasBrightnessRange ? hardwareConfig.ledBrightnessRange.get(1) : 100, + pigpioSocket, + ledModeState::set); + + ledModeListener = + visionLED == null + ? null + : new NTDataChangeListener( + NetworkTablesManager.getInstance().kRootTable.getInstance(), + ledModeRequest, + visionLED::onLedModeChange); + + Runtime.getRuntime().addShutdownHook(new Thread(this::onJvmExit)); + + if (visionLED != null) { + visionLED.setBrightness(hardwareSettings.ledBrightnessPercentage); + visionLED.blink(85, 4); // bootup blink } - private void onJvmExit() { - logger.info("Shutting down LEDs..."); - if (visionLED != null) visionLED.setState(false); + // Start hardware metrics thread (Disabled until implemented) + // if (Platform.isLinux()) MetricsPublisher.getInstance().startTask(); + } - logger.info("Force-flushing settings..."); - ConfigManager.getInstance().saveToDisk(); + public void setBrightnessPercent(int percent) { + if (percent != hardwareSettings.ledBrightnessPercentage) { + hardwareSettings.ledBrightnessPercentage = percent; + if (visionLED != null) visionLED.setBrightness(percent); + ConfigManager.getInstance().requestSave(); + logger.info("Setting led brightness to " + percent + "%"); } - - public boolean restartDevice() { - if (Platform.isLinux()) { - try { - return shellExec.executeBashCommand("reboot now") == 0; - } catch (IOException e) { - logger.error("Could not restart device!", e); - return false; - } - } - try { - return shellExec.executeBashCommand(hardwareConfig.restartHardwareCommand) == 0; - } catch (IOException e) { - logger.error("Could not restart device!", e); - return false; - } + } + + private void onJvmExit() { + logger.info("Shutting down LEDs..."); + if (visionLED != null) visionLED.setState(false); + + logger.info("Force-flushing settings..."); + ConfigManager.getInstance().saveToDisk(); + } + + public boolean restartDevice() { + if (Platform.isLinux()) { + try { + return shellExec.executeBashCommand("reboot now") == 0; + } catch (IOException e) { + logger.error("Could not restart device!", e); + return false; + } } - - public void setStatus(ProgramStatus status) { - switch (status) { - case UHOH: - // red flashing, green off - break; - case RUNNING: - // red solid, green off - break; - case RUNNING_NT: - // red off, green solid - break; - case RUNNING_NT_TARGET: - // red off, green flashing - break; - } + try { + return shellExec.executeBashCommand(hardwareConfig.restartHardwareCommand) == 0; + } catch (IOException e) { + logger.error("Could not restart device!", e); + return false; } - - public HardwareConfig getConfig() { - return hardwareConfig; + } + + public void setStatus(ProgramStatus status) { + switch (status) { + case UHOH: + // red flashing, green off + break; + case RUNNING: + // red solid, green off + break; + case RUNNING_NT: + // red off, green solid + break; + case RUNNING_NT_TARGET: + // red off, green flashing + break; } + } - public void publishMetrics() { - metricsManager.publishMetrics(); - } + public HardwareConfig getConfig() { + return hardwareConfig; + } + + public void publishMetrics() { + metricsManager.publishMetrics(); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/PiVersion.java b/photon-core/src/main/java/org/photonvision/common/hardware/PiVersion.java index dbf23ac7e4..240f98779c 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/PiVersion.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/PiVersion.java @@ -21,51 +21,51 @@ import org.photonvision.common.util.ShellExec; public enum PiVersion { - PI_B("Pi Model B"), - COMPUTE_MODULE("Compute Module Rev"), - ZERO_W("Pi Zero W Rev 1.1"), - ZERO_2_W("Raspberry Pi Zero 2"), - PI_3("Pi 3"), - PI_4("Pi 4"), - COMPUTE_MODULE_3("Compute Module 3"), - UNKNOWN("UNKNOWN"); + PI_B("Pi Model B"), + COMPUTE_MODULE("Compute Module Rev"), + ZERO_W("Pi Zero W Rev 1.1"), + ZERO_2_W("Raspberry Pi Zero 2"), + PI_3("Pi 3"), + PI_4("Pi 4"), + COMPUTE_MODULE_3("Compute Module 3"), + UNKNOWN("UNKNOWN"); - private final String identifier; - private static final ShellExec shell = new ShellExec(true, false); - private static final PiVersion currentPiVersion = calcPiVersion(); + private final String identifier; + private static final ShellExec shell = new ShellExec(true, false); + private static final PiVersion currentPiVersion = calcPiVersion(); - PiVersion(String s) { - this.identifier = s.toLowerCase(); - } + PiVersion(String s) { + this.identifier = s.toLowerCase(); + } - public static PiVersion getPiVersion() { - return currentPiVersion; - } + public static PiVersion getPiVersion() { + return currentPiVersion; + } - private static PiVersion calcPiVersion() { - if (!Platform.isRaspberryPi()) return PiVersion.UNKNOWN; - String piString = getPiVersionString(); - for (PiVersion p : PiVersion.values()) { - if (piString.toLowerCase().contains(p.identifier)) return p; - } - return UNKNOWN; + private static PiVersion calcPiVersion() { + if (!Platform.isRaspberryPi()) return PiVersion.UNKNOWN; + String piString = getPiVersionString(); + for (PiVersion p : PiVersion.values()) { + if (piString.toLowerCase().contains(p.identifier)) return p; } + return UNKNOWN; + } - // Query /proc/device-tree/model. This should return the model of the pi - // Versions here: - // https://github.com/raspberrypi/linux/blob/rpi-5.10.y/arch/arm/boot/dts/bcm2710-rpi-cm3.dts - private static String getPiVersionString() { - if (!Platform.isRaspberryPi()) return ""; - try { - shell.executeBashCommand("cat /proc/device-tree/model"); - } catch (IOException e) { - e.printStackTrace(); - } - if (shell.getExitCode() == 0) { - // We expect it to be in the format "raspberry pi X model X" - return shell.getOutput(); - } - - return ""; + // Query /proc/device-tree/model. This should return the model of the pi + // Versions here: + // https://github.com/raspberrypi/linux/blob/rpi-5.10.y/arch/arm/boot/dts/bcm2710-rpi-cm3.dts + private static String getPiVersionString() { + if (!Platform.isRaspberryPi()) return ""; + try { + shell.executeBashCommand("cat /proc/device-tree/model"); + } catch (IOException e) { + e.printStackTrace(); } + if (shell.getExitCode() == 0) { + // We expect it to be in the format "raspberry pi X model X" + return shell.getOutput(); + } + + return ""; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/Platform.java b/photon-core/src/main/java/org/photonvision/common/hardware/Platform.java index 1c500ab722..1a0290cef2 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/Platform.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/Platform.java @@ -26,190 +26,190 @@ @SuppressWarnings("unused") public enum Platform { - // WPILib Supported (JNI) - WINDOWS_64("Windows x64", false, OSType.WINDOWS, true), - LINUX_32("Linux x86", false, OSType.LINUX, true), - LINUX_64("Linux x64", false, OSType.LINUX, true), - LINUX_RASPBIAN32( - "Linux Raspbian 32-bit", true, OSType.LINUX, true), // Raspberry Pi 3/4 with a 32-bit image - LINUX_RASPBIAN64( - "Linux Raspbian 64-bit", true, OSType.LINUX, true), // Raspberry Pi 3/4 with a 64-bit image - LINUX_AARCH64("Linux AARCH64", false, OSType.LINUX, true), // Jetson Nano, Jetson TX2 - - // PhotonVision Supported (Manual build/install) - LINUX_ARM32("Linux ARM32", false, OSType.LINUX, true), // ODROID XU4, C1+ - LINUX_ARM64("Linux ARM64", false, OSType.LINUX, true), // ODROID C2, N2 - - // Completely unsupported - WINDOWS_32("Windows x86", false, OSType.WINDOWS, false), - MACOS("Mac OS", false, OSType.MACOS, false), - UNKNOWN("Unsupported Platform", false, OSType.UNKNOWN, false); - - private enum OSType { - WINDOWS, - LINUX, - MACOS, - UNKNOWN + // WPILib Supported (JNI) + WINDOWS_64("Windows x64", false, OSType.WINDOWS, true), + LINUX_32("Linux x86", false, OSType.LINUX, true), + LINUX_64("Linux x64", false, OSType.LINUX, true), + LINUX_RASPBIAN32( + "Linux Raspbian 32-bit", true, OSType.LINUX, true), // Raspberry Pi 3/4 with a 32-bit image + LINUX_RASPBIAN64( + "Linux Raspbian 64-bit", true, OSType.LINUX, true), // Raspberry Pi 3/4 with a 64-bit image + LINUX_AARCH64("Linux AARCH64", false, OSType.LINUX, true), // Jetson Nano, Jetson TX2 + + // PhotonVision Supported (Manual build/install) + LINUX_ARM32("Linux ARM32", false, OSType.LINUX, true), // ODROID XU4, C1+ + LINUX_ARM64("Linux ARM64", false, OSType.LINUX, true), // ODROID C2, N2 + + // Completely unsupported + WINDOWS_32("Windows x86", false, OSType.WINDOWS, false), + MACOS("Mac OS", false, OSType.MACOS, false), + UNKNOWN("Unsupported Platform", false, OSType.UNKNOWN, false); + + private enum OSType { + WINDOWS, + LINUX, + MACOS, + UNKNOWN + } + + private static final ShellExec shell = new ShellExec(true, false); + public final String description; + public final boolean isPi; + public final OSType osType; + public final boolean isSupported; + + // Set once at init, shouldn't be needed after. + private static final Platform currentPlatform = getCurrentPlatform(); + private static final boolean isRoot = checkForRoot(); + + Platform(String description, boolean isPi, OSType osType, boolean isSupported) { + this.description = description; + this.isPi = isPi; + this.osType = osType; + this.isSupported = isSupported; + } + + ////////////////////////////////////////////////////// + // Public API + + // Checks specifically if unix shell and API are supported + public static boolean isLinux() { + return currentPlatform.osType == OSType.LINUX; + } + + public static boolean isRaspberryPi() { + return currentPlatform.isPi; + } + + public static String getPlatformName() { + if (currentPlatform.equals(UNKNOWN)) { + return UnknownPlatformString; + } else { + return currentPlatform.description; } - - private static final ShellExec shell = new ShellExec(true, false); - public final String description; - public final boolean isPi; - public final OSType osType; - public final boolean isSupported; - - // Set once at init, shouldn't be needed after. - private static final Platform currentPlatform = getCurrentPlatform(); - private static final boolean isRoot = checkForRoot(); - - Platform(String description, boolean isPi, OSType osType, boolean isSupported) { - this.description = description; - this.isPi = isPi; - this.osType = osType; - this.isSupported = isSupported; + } + + public static boolean isRoot() { + return isRoot; + } + + ////////////////////////////////////////////////////// + + // Debug info related to unknown platforms for debug help + private static final String OS_NAME = System.getProperty("os.name"); + private static final String OS_ARCH = System.getProperty("os.arch"); + private static final String UnknownPlatformString = + String.format("Unknown Platform. OS: %s, Architecture: %s", OS_NAME, OS_ARCH); + + @SuppressWarnings("StatementWithEmptyBody") + private static boolean checkForRoot() { + if (isLinux()) { + try { + shell.executeBashCommand("id -u"); + } catch (IOException e) { + e.printStackTrace(); + } + + while (!shell.isOutputCompleted()) { + // TODO: add timeout + } + + if (shell.getExitCode() == 0) { + return shell.getOutput().split("\n")[0].equals("0"); + } + + } else { + return true; } - - ////////////////////////////////////////////////////// - // Public API - - // Checks specifically if unix shell and API are supported - public static boolean isLinux() { - return currentPlatform.osType == OSType.LINUX; + return false; + } + + private static Platform getCurrentPlatform() { + if (RuntimeDetector.isWindows()) { + if (RuntimeDetector.is32BitIntel()) { + return WINDOWS_32; + } else if (RuntimeDetector.is64BitIntel()) { + return WINDOWS_64; + } else { + // please don't try this + return UNKNOWN; + } } - public static boolean isRaspberryPi() { - return currentPlatform.isPi; + if (RuntimeDetector.isMac()) { + // TODO - once we have real support, this might have to be more granular + return MACOS; } - public static String getPlatformName() { - if (currentPlatform.equals(UNKNOWN)) { - return UnknownPlatformString; + if (RuntimeDetector.isLinux()) { + if (isPiSBC()) { + if (RuntimeDetector.isArm32()) { + return LINUX_RASPBIAN32; + } else if (RuntimeDetector.isArm64()) { + return LINUX_RASPBIAN64; } else { - return currentPlatform.description; + // Unknown/exotic installation + return UNKNOWN; } - } - - public static boolean isRoot() { - return isRoot; - } - - ////////////////////////////////////////////////////// - - // Debug info related to unknown platforms for debug help - private static final String OS_NAME = System.getProperty("os.name"); - private static final String OS_ARCH = System.getProperty("os.arch"); - private static final String UnknownPlatformString = - String.format("Unknown Platform. OS: %s, Architecture: %s", OS_NAME, OS_ARCH); - - @SuppressWarnings("StatementWithEmptyBody") - private static boolean checkForRoot() { - if (isLinux()) { - try { - shell.executeBashCommand("id -u"); - } catch (IOException e) { - e.printStackTrace(); - } - - while (!shell.isOutputCompleted()) { - // TODO: add timeout - } - - if (shell.getExitCode() == 0) { - return shell.getOutput().split("\n")[0].equals("0"); - } - + } else if (isJetsonSBC()) { + if (RuntimeDetector.isArm64()) { + // TODO - do we need to check OS version? + return LINUX_AARCH64; } else { - return true; - } - return false; - } - - private static Platform getCurrentPlatform() { - if (RuntimeDetector.isWindows()) { - if (RuntimeDetector.is32BitIntel()) { - return WINDOWS_32; - } else if (RuntimeDetector.is64BitIntel()) { - return WINDOWS_64; - } else { - // please don't try this - return UNKNOWN; - } + // Unknown/exotic installation + return UNKNOWN; } - - if (RuntimeDetector.isMac()) { - // TODO - once we have real support, this might have to be more granular - return MACOS; - } - - if (RuntimeDetector.isLinux()) { - if (isPiSBC()) { - if (RuntimeDetector.isArm32()) { - return LINUX_RASPBIAN32; - } else if (RuntimeDetector.isArm64()) { - return LINUX_RASPBIAN64; - } else { - // Unknown/exotic installation - return UNKNOWN; - } - } else if (isJetsonSBC()) { - if (RuntimeDetector.isArm64()) { - // TODO - do we need to check OS version? - return LINUX_AARCH64; - } else { - // Unknown/exotic installation - return UNKNOWN; - } - } else if (RuntimeDetector.is64BitIntel()) { - return LINUX_64; - } else if (RuntimeDetector.is32BitIntel()) { - return LINUX_32; - } else if (RuntimeDetector.isArm64()) { - // TODO - os detection needed? - return LINUX_AARCH64; - } else { - // Unknown or otherwise unsupported platform - return Platform.UNKNOWN; - } - } - - // If we fall through all the way to here, + } else if (RuntimeDetector.is64BitIntel()) { + return LINUX_64; + } else if (RuntimeDetector.is32BitIntel()) { + return LINUX_32; + } else if (RuntimeDetector.isArm64()) { + // TODO - os detection needed? + return LINUX_AARCH64; + } else { + // Unknown or otherwise unsupported platform return Platform.UNKNOWN; + } } - // Check for various known SBC types - private static boolean isPiSBC() { - return fileHasText("/proc/cpuinfo", "Raspberry Pi"); - } - - private static boolean isJetsonSBC() { - // https://forums.developer.nvidia.com/t/how-to-recognize-jetson-nano-device/146624 - return fileHasText("/proc/device-tree/model", "NVIDIA Jetson"); - } - - // Checks for various names of linux OS - private static boolean isStretch() { - // TODO - this is a total guess - return fileHasText("/etc/os-release", "Stretch"); - } - - private static boolean isBuster() { - // TODO - this is a total guess - return fileHasText("/etc/os-release", "Buster"); - } - - private static boolean fileHasText(String filename, String text) { - try (BufferedReader reader = Files.newBufferedReader(Paths.get(filename))) { - while (true) { - String value = reader.readLine(); - if (value == null) { - return false; - - } else if (value.contains(text)) { - return true; - } // else, next line - } - } catch (IOException ex) { - return false; - } + // If we fall through all the way to here, + return Platform.UNKNOWN; + } + + // Check for various known SBC types + private static boolean isPiSBC() { + return fileHasText("/proc/cpuinfo", "Raspberry Pi"); + } + + private static boolean isJetsonSBC() { + // https://forums.developer.nvidia.com/t/how-to-recognize-jetson-nano-device/146624 + return fileHasText("/proc/device-tree/model", "NVIDIA Jetson"); + } + + // Checks for various names of linux OS + private static boolean isStretch() { + // TODO - this is a total guess + return fileHasText("/etc/os-release", "Stretch"); + } + + private static boolean isBuster() { + // TODO - this is a total guess + return fileHasText("/etc/os-release", "Buster"); + } + + private static boolean fileHasText(String filename, String text) { + try (BufferedReader reader = Files.newBufferedReader(Paths.get(filename))) { + while (true) { + String value = reader.readLine(); + if (value == null) { + return false; + + } else if (value.contains(text)) { + return true; + } // else, next line + } + } catch (IOException ex) { + return false; } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/StatusLED.java b/photon-core/src/main/java/org/photonvision/common/hardware/StatusLED.java index 0c10b3548e..01a181e0a7 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/StatusLED.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/StatusLED.java @@ -23,26 +23,26 @@ import org.photonvision.common.hardware.GPIO.pi.PigpioPin; public class StatusLED { - public final GPIOBase redLED; - public final GPIOBase greenLED; - public final GPIOBase blueLED; + public final GPIOBase redLED; + public final GPIOBase greenLED; + public final GPIOBase blueLED; - public StatusLED(List statusLedPins) { - // fill unassigned pins with -1 to disable - if (statusLedPins.size() != 3) { - for (int i = 0; i < 3 - statusLedPins.size(); i++) { - statusLedPins.add(-1); - } - } + public StatusLED(List statusLedPins) { + // fill unassigned pins with -1 to disable + if (statusLedPins.size() != 3) { + for (int i = 0; i < 3 - statusLedPins.size(); i++) { + statusLedPins.add(-1); + } + } - if (Platform.isRaspberryPi()) { - redLED = new PigpioPin(statusLedPins.get(0)); - greenLED = new PigpioPin(statusLedPins.get(1)); - blueLED = new PigpioPin(statusLedPins.get(2)); - } else { - redLED = new CustomGPIO(statusLedPins.get(0)); - greenLED = new CustomGPIO(statusLedPins.get(1)); - blueLED = new CustomGPIO(statusLedPins.get(2)); - } + if (Platform.isRaspberryPi()) { + redLED = new PigpioPin(statusLedPins.get(0)); + greenLED = new PigpioPin(statusLedPins.get(1)); + blueLED = new PigpioPin(statusLedPins.get(2)); + } else { + redLED = new CustomGPIO(statusLedPins.get(0)); + greenLED = new CustomGPIO(statusLedPins.get(1)); + blueLED = new CustomGPIO(statusLedPins.get(2)); } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/VisionLED.java b/photon-core/src/main/java/org/photonvision/common/hardware/VisionLED.java index 7d1d5273b3..6be153391f 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/VisionLED.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/VisionLED.java @@ -33,171 +33,171 @@ import org.photonvision.common.util.math.MathUtils; public class VisionLED { - private static final Logger logger = new Logger(VisionLED.class, LogGroup.VisionModule); - - private final int[] ledPins; - private final List visionLEDs = new ArrayList<>(); - private final int brightnessMin; - private final int brightnessMax; - private final PigpioSocket pigpioSocket; - - private VisionLEDMode currentLedMode = VisionLEDMode.kDefault; - private BooleanSupplier pipelineModeSupplier; - - private int mappedBrightnessPercentage; - - private final Consumer modeConsumer; - - public VisionLED( - List ledPins, - int brightnessMin, - int brightnessMax, - PigpioSocket pigpioSocket, - Consumer visionLEDmode) { - this.brightnessMin = brightnessMin; - this.brightnessMax = brightnessMax; - this.pigpioSocket = pigpioSocket; - this.modeConsumer = visionLEDmode; - this.ledPins = ledPins.stream().mapToInt(i -> i).toArray(); - ledPins.forEach( - pin -> { - if (Platform.isRaspberryPi()) { - visionLEDs.add(new PigpioPin(pin)); - } else { - visionLEDs.add(new CustomGPIO(pin)); - } - }); - pipelineModeSupplier = () -> false; + private static final Logger logger = new Logger(VisionLED.class, LogGroup.VisionModule); + + private final int[] ledPins; + private final List visionLEDs = new ArrayList<>(); + private final int brightnessMin; + private final int brightnessMax; + private final PigpioSocket pigpioSocket; + + private VisionLEDMode currentLedMode = VisionLEDMode.kDefault; + private BooleanSupplier pipelineModeSupplier; + + private int mappedBrightnessPercentage; + + private final Consumer modeConsumer; + + public VisionLED( + List ledPins, + int brightnessMin, + int brightnessMax, + PigpioSocket pigpioSocket, + Consumer visionLEDmode) { + this.brightnessMin = brightnessMin; + this.brightnessMax = brightnessMax; + this.pigpioSocket = pigpioSocket; + this.modeConsumer = visionLEDmode; + this.ledPins = ledPins.stream().mapToInt(i -> i).toArray(); + ledPins.forEach( + pin -> { + if (Platform.isRaspberryPi()) { + visionLEDs.add(new PigpioPin(pin)); + } else { + visionLEDs.add(new CustomGPIO(pin)); + } + }); + pipelineModeSupplier = () -> false; + } + + public void setPipelineModeSupplier(BooleanSupplier pipelineModeSupplier) { + this.pipelineModeSupplier = pipelineModeSupplier; + } + + public void setBrightness(int percentage) { + mappedBrightnessPercentage = MathUtils.map(percentage, 0, 100, brightnessMin, brightnessMax); + setInternal(currentLedMode, false); + } + + public void blink(int pulseLengthMillis, int blinkCount) { + blinkImpl(pulseLengthMillis, blinkCount); + int blinkDuration = pulseLengthMillis * blinkCount * 2; + TimedTaskManager.getInstance() + .addOneShotTask(() -> setInternal(this.currentLedMode, false), blinkDuration + 150); + } + + private void blinkImpl(int pulseLengthMillis, int blinkCount) { + if (Platform.isRaspberryPi()) { + try { + setStateImpl(false); // hack to ensure hardware PWM has stopped before trying to blink + pigpioSocket.generateAndSendWaveform(pulseLengthMillis, blinkCount, ledPins); + } catch (PigpioException e) { + logger.error("Failed to blink!", e); + } catch (NullPointerException e) { + logger.error("Failed to blink, pigpio internal issue!", e); + } + } else { + for (GPIOBase led : visionLEDs) { + led.blink(pulseLengthMillis, blinkCount); + } } - - public void setPipelineModeSupplier(BooleanSupplier pipelineModeSupplier) { - this.pipelineModeSupplier = pipelineModeSupplier; - } - - public void setBrightness(int percentage) { - mappedBrightnessPercentage = MathUtils.map(percentage, 0, 100, brightnessMin, brightnessMax); - setInternal(currentLedMode, false); - } - - public void blink(int pulseLengthMillis, int blinkCount) { - blinkImpl(pulseLengthMillis, blinkCount); - int blinkDuration = pulseLengthMillis * blinkCount * 2; - TimedTaskManager.getInstance() - .addOneShotTask(() -> setInternal(this.currentLedMode, false), blinkDuration + 150); - } - - private void blinkImpl(int pulseLengthMillis, int blinkCount) { - if (Platform.isRaspberryPi()) { - try { - setStateImpl(false); // hack to ensure hardware PWM has stopped before trying to blink - pigpioSocket.generateAndSendWaveform(pulseLengthMillis, blinkCount, ledPins); - } catch (PigpioException e) { - logger.error("Failed to blink!", e); - } catch (NullPointerException e) { - logger.error("Failed to blink, pigpio internal issue!", e); - } - } else { - for (GPIOBase led : visionLEDs) { - led.blink(pulseLengthMillis, blinkCount); - } - } + } + + private void setStateImpl(boolean state) { + if (Platform.isRaspberryPi()) { + try { + // stop any active blink + pigpioSocket.waveTxStop(); + } catch (PigpioException e) { + logger.error("Failed to stop blink!", e); + } catch (NullPointerException e) { + logger.error("Failed to blink, pigpio internal issue!", e); + } } - - private void setStateImpl(boolean state) { - if (Platform.isRaspberryPi()) { - try { - // stop any active blink - pigpioSocket.waveTxStop(); - } catch (PigpioException e) { - logger.error("Failed to stop blink!", e); - } catch (NullPointerException e) { - logger.error("Failed to blink, pigpio internal issue!", e); - } - } - try { - // if the user has set an LED brightness other than 100%, use that instead - if (mappedBrightnessPercentage == 100 || !state) { - visionLEDs.forEach((led) -> led.setState(state)); - } else { - visionLEDs.forEach((led) -> led.setBrightness(mappedBrightnessPercentage)); - } - } catch (NullPointerException e) { - logger.error("Failed to blink, pigpio internal issue!", e); - } - } - - public void setState(boolean on) { - setInternal(on ? VisionLEDMode.kOn : VisionLEDMode.kOff, false); + try { + // if the user has set an LED brightness other than 100%, use that instead + if (mappedBrightnessPercentage == 100 || !state) { + visionLEDs.forEach((led) -> led.setState(state)); + } else { + visionLEDs.forEach((led) -> led.setBrightness(mappedBrightnessPercentage)); + } + } catch (NullPointerException e) { + logger.error("Failed to blink, pigpio internal issue!", e); } - - void onLedModeChange(NetworkTableEvent entryNotification) { - var newLedModeRaw = (int) entryNotification.valueData.value.getInteger(); - logger.debug("Got LED mode " + newLedModeRaw); - if (newLedModeRaw != currentLedMode.value) { - VisionLEDMode newLedMode; - switch (newLedModeRaw) { - case -1: - newLedMode = VisionLEDMode.kDefault; - break; - case 0: - newLedMode = VisionLEDMode.kOff; - break; - case 1: - newLedMode = VisionLEDMode.kOn; - break; - case 2: - newLedMode = VisionLEDMode.kBlink; - break; - default: - logger.warn("User supplied invalid LED mode, falling back to Default"); - newLedMode = VisionLEDMode.kDefault; - break; - } - setInternal(newLedMode, true); - - if (modeConsumer != null) modeConsumer.accept(newLedMode.value); - } + } + + public void setState(boolean on) { + setInternal(on ? VisionLEDMode.kOn : VisionLEDMode.kOff, false); + } + + void onLedModeChange(NetworkTableEvent entryNotification) { + var newLedModeRaw = (int) entryNotification.valueData.value.getInteger(); + logger.debug("Got LED mode " + newLedModeRaw); + if (newLedModeRaw != currentLedMode.value) { + VisionLEDMode newLedMode; + switch (newLedModeRaw) { + case -1: + newLedMode = VisionLEDMode.kDefault; + break; + case 0: + newLedMode = VisionLEDMode.kOff; + break; + case 1: + newLedMode = VisionLEDMode.kOn; + break; + case 2: + newLedMode = VisionLEDMode.kBlink; + break; + default: + logger.warn("User supplied invalid LED mode, falling back to Default"); + newLedMode = VisionLEDMode.kDefault; + break; + } + setInternal(newLedMode, true); + + if (modeConsumer != null) modeConsumer.accept(newLedMode.value); } - - private void setInternal(VisionLEDMode newLedMode, boolean fromNT) { - var lastLedMode = currentLedMode; - - if (fromNT) { - switch (newLedMode) { - case kDefault: - setStateImpl(pipelineModeSupplier.getAsBoolean()); - break; - case kOff: - setStateImpl(false); - break; - case kOn: - setStateImpl(true); - break; - case kBlink: - blinkImpl(85, -1); - break; - } - currentLedMode = newLedMode; - logger.info( - "Changing LED mode from \"" + lastLedMode.toString() + "\" to \"" + newLedMode + "\""); - } else { - if (currentLedMode == VisionLEDMode.kDefault) { - switch (newLedMode) { - case kDefault: - setStateImpl(pipelineModeSupplier.getAsBoolean()); - break; - case kOff: - setStateImpl(false); - break; - case kOn: - setStateImpl(true); - break; - case kBlink: - blinkImpl(85, -1); - break; - } - } - logger.info("Changing LED internal state to " + newLedMode.toString()); + } + + private void setInternal(VisionLEDMode newLedMode, boolean fromNT) { + var lastLedMode = currentLedMode; + + if (fromNT) { + switch (newLedMode) { + case kDefault: + setStateImpl(pipelineModeSupplier.getAsBoolean()); + break; + case kOff: + setStateImpl(false); + break; + case kOn: + setStateImpl(true); + break; + case kBlink: + blinkImpl(85, -1); + break; + } + currentLedMode = newLedMode; + logger.info( + "Changing LED mode from \"" + lastLedMode.toString() + "\" to \"" + newLedMode + "\""); + } else { + if (currentLedMode == VisionLEDMode.kDefault) { + switch (newLedMode) { + case kDefault: + setStateImpl(pipelineModeSupplier.getAsBoolean()); + break; + case kOff: + setStateImpl(false); + break; + case kOn: + setStateImpl(true); + break; + case kBlink: + blinkImpl(85, -1); + break; } + } + logger.info("Changing LED internal state to " + newLedMode.toString()); } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/MetricsManager.java b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/MetricsManager.java index d38b527187..b68bd06623 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/MetricsManager.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/MetricsManager.java @@ -33,129 +33,129 @@ import org.photonvision.common.util.ShellExec; public class MetricsManager { - final Logger logger = new Logger(MetricsManager.class, LogGroup.General); + final Logger logger = new Logger(MetricsManager.class, LogGroup.General); - CmdBase cmds; + CmdBase cmds; - private final ShellExec runCommand = new ShellExec(true, true); + private final ShellExec runCommand = new ShellExec(true, true); - public void setConfig(HardwareConfig config) { - if (config.hasCommandsConfigured()) { - cmds = new FileCmds(); - } else if (Platform.isRaspberryPi()) { - cmds = new PiCmds(); // Pi's can use a hardcoded command set - } else if (Platform.isLinux()) { - cmds = new LinuxCmds(); // Linux/Unix platforms assume a nominal command set - } else { - cmds = new CmdBase(); // default - base has no commands - } - - cmds.initCmds(config); - } - - public String safeExecute(String str) { - if (str.isEmpty()) return ""; - try { - return execute(str); - } catch (Exception e) { - return "****"; - } + public void setConfig(HardwareConfig config) { + if (config.hasCommandsConfigured()) { + cmds = new FileCmds(); + } else if (Platform.isRaspberryPi()) { + cmds = new PiCmds(); // Pi's can use a hardcoded command set + } else if (Platform.isLinux()) { + cmds = new LinuxCmds(); // Linux/Unix platforms assume a nominal command set + } else { + cmds = new CmdBase(); // default - base has no commands } - private String cpuMemSave = null; + cmds.initCmds(config); + } - public String getMemory() { - if (cmds.cpuMemoryCommand.isEmpty()) return ""; - if (cpuMemSave == null) { - // save the value and only run it once - cpuMemSave = execute(cmds.cpuMemoryCommand); - } - return cpuMemSave; + public String safeExecute(String str) { + if (str.isEmpty()) return ""; + try { + return execute(str); + } catch (Exception e) { + return "****"; } + } - public String getTemp() { - return safeExecute(cmds.cpuTemperatureCommand); - } + private String cpuMemSave = null; - public String getUtilization() { - return safeExecute(cmds.cpuUtilizationCommand); + public String getMemory() { + if (cmds.cpuMemoryCommand.isEmpty()) return ""; + if (cpuMemSave == null) { + // save the value and only run it once + cpuMemSave = execute(cmds.cpuMemoryCommand); } + return cpuMemSave; + } - public String getUptime() { - return safeExecute(cmds.cpuUptimeCommand); - } + public String getTemp() { + return safeExecute(cmds.cpuTemperatureCommand); + } - public String getThrottleReason() { - return safeExecute(cmds.cpuThrottleReasonCmd); - } + public String getUtilization() { + return safeExecute(cmds.cpuUtilizationCommand); + } - private String gpuMemSave = null; + public String getUptime() { + return safeExecute(cmds.cpuUptimeCommand); + } - public String getGPUMemorySplit() { - if (gpuMemSave == null) { - // only needs to run once - gpuMemSave = safeExecute(cmds.gpuMemoryCommand); - } - return gpuMemSave; - } + public String getThrottleReason() { + return safeExecute(cmds.cpuThrottleReasonCmd); + } - public String getMallocedMemory() { - return safeExecute(cmds.gpuMemUsageCommand); - } - - public String getUsedDiskPct() { - return safeExecute(cmds.diskUsageCommand); - } + private String gpuMemSave = null; - // TODO: Output in MBs for consistency - public String getUsedRam() { - return safeExecute(cmds.ramUsageCommand); + public String getGPUMemorySplit() { + if (gpuMemSave == null) { + // only needs to run once + gpuMemSave = safeExecute(cmds.gpuMemoryCommand); } - - public void publishMetrics() { - logger.debug("Publishing Metrics..."); - final var metrics = new HashMap(); - - metrics.put("cpuTemp", this.getTemp()); - metrics.put("cpuUtil", this.getUtilization()); - metrics.put("cpuMem", this.getMemory()); - metrics.put("cpuThr", this.getThrottleReason()); - metrics.put("cpuUptime", this.getUptime()); - metrics.put("gpuMem", this.getGPUMemorySplit()); - metrics.put("ramUtil", this.getUsedRam()); - metrics.put("gpuMemUtil", this.getMallocedMemory()); - metrics.put("diskUtilPct", this.getUsedDiskPct()); - - DataChangeService.getInstance().publishEvent(OutgoingUIEvent.wrappedOf("metrics", metrics)); - } - - public synchronized String execute(String command) { - try { - runCommand.executeBashCommand(command); - return runCommand.getOutput(); - } catch (Exception e) { - StringWriter sw = new StringWriter(); - PrintWriter pw = new PrintWriter(sw); - e.printStackTrace(pw); - - logger.error( - "Command: \"" - + command - + "\" returned an error!" - + "\nOutput Received: " - + runCommand.getOutput() - + "\nStandard Error: " - + runCommand.getError() - + "\nCommand completed: " - + runCommand.isOutputCompleted() - + "\nError completed: " - + runCommand.isErrorCompleted() - + "\nExit code: " - + runCommand.getExitCode() - + "\n Exception: " - + e - + sw); - return ""; - } + return gpuMemSave; + } + + public String getMallocedMemory() { + return safeExecute(cmds.gpuMemUsageCommand); + } + + public String getUsedDiskPct() { + return safeExecute(cmds.diskUsageCommand); + } + + // TODO: Output in MBs for consistency + public String getUsedRam() { + return safeExecute(cmds.ramUsageCommand); + } + + public void publishMetrics() { + logger.debug("Publishing Metrics..."); + final var metrics = new HashMap(); + + metrics.put("cpuTemp", this.getTemp()); + metrics.put("cpuUtil", this.getUtilization()); + metrics.put("cpuMem", this.getMemory()); + metrics.put("cpuThr", this.getThrottleReason()); + metrics.put("cpuUptime", this.getUptime()); + metrics.put("gpuMem", this.getGPUMemorySplit()); + metrics.put("ramUtil", this.getUsedRam()); + metrics.put("gpuMemUtil", this.getMallocedMemory()); + metrics.put("diskUtilPct", this.getUsedDiskPct()); + + DataChangeService.getInstance().publishEvent(OutgoingUIEvent.wrappedOf("metrics", metrics)); + } + + public synchronized String execute(String command) { + try { + runCommand.executeBashCommand(command); + return runCommand.getOutput(); + } catch (Exception e) { + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + e.printStackTrace(pw); + + logger.error( + "Command: \"" + + command + + "\" returned an error!" + + "\nOutput Received: " + + runCommand.getOutput() + + "\nStandard Error: " + + runCommand.getError() + + "\nCommand completed: " + + runCommand.isOutputCompleted() + + "\nError completed: " + + runCommand.isErrorCompleted() + + "\nExit code: " + + runCommand.getExitCode() + + "\n Exception: " + + e + + sw); + return ""; } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/CmdBase.java b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/CmdBase.java index 69473ad9e1..d22086be6b 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/CmdBase.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/CmdBase.java @@ -20,21 +20,21 @@ import org.photonvision.common.configuration.HardwareConfig; public class CmdBase { - // CPU - public String cpuMemoryCommand = ""; - public String cpuTemperatureCommand = ""; - public String cpuUtilizationCommand = ""; - public String cpuThrottleReasonCmd = ""; - public String cpuUptimeCommand = ""; - // GPU - public String gpuMemoryCommand = ""; - public String gpuMemUsageCommand = ""; - // RAM - public String ramUsageCommand = ""; - // Disk - public String diskUsageCommand = ""; + // CPU + public String cpuMemoryCommand = ""; + public String cpuTemperatureCommand = ""; + public String cpuUtilizationCommand = ""; + public String cpuThrottleReasonCmd = ""; + public String cpuUptimeCommand = ""; + // GPU + public String gpuMemoryCommand = ""; + public String gpuMemUsageCommand = ""; + // RAM + public String ramUsageCommand = ""; + // Disk + public String diskUsageCommand = ""; - public void initCmds(HardwareConfig config) { - // default - do nothing - } + public void initCmds(HardwareConfig config) { + // default - do nothing + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/FileCmds.java b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/FileCmds.java index 7a27469037..f7942560d4 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/FileCmds.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/FileCmds.java @@ -20,19 +20,19 @@ import org.photonvision.common.configuration.HardwareConfig; public class FileCmds extends CmdBase { - @Override - public void initCmds(HardwareConfig config) { - cpuMemoryCommand = config.cpuMemoryCommand; - cpuTemperatureCommand = config.cpuTempCommand; - cpuUtilizationCommand = config.cpuUtilCommand; - cpuThrottleReasonCmd = config.cpuThrottleReasonCmd; - cpuUptimeCommand = config.cpuUptimeCommand; + @Override + public void initCmds(HardwareConfig config) { + cpuMemoryCommand = config.cpuMemoryCommand; + cpuTemperatureCommand = config.cpuTempCommand; + cpuUtilizationCommand = config.cpuUtilCommand; + cpuThrottleReasonCmd = config.cpuThrottleReasonCmd; + cpuUptimeCommand = config.cpuUptimeCommand; - gpuMemoryCommand = config.gpuMemoryCommand; - gpuMemUsageCommand = config.gpuMemUsageCommand; + gpuMemoryCommand = config.gpuMemoryCommand; + gpuMemUsageCommand = config.gpuMemUsageCommand; - diskUsageCommand = config.diskUsageCommand; + diskUsageCommand = config.diskUsageCommand; - ramUsageCommand = config.ramUtilCommand; - } + ramUsageCommand = config.ramUtilCommand; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/LinuxCmds.java b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/LinuxCmds.java index 1a7d18f35e..2acaee228e 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/LinuxCmds.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/LinuxCmds.java @@ -20,21 +20,21 @@ import org.photonvision.common.configuration.HardwareConfig; public class LinuxCmds extends CmdBase { - public void initCmds(HardwareConfig config) { - // CPU - cpuMemoryCommand = "awk '/MemTotal:/ {print int($2 / 1000);}' /proc/meminfo"; + public void initCmds(HardwareConfig config) { + // CPU + cpuMemoryCommand = "awk '/MemTotal:/ {print int($2 / 1000);}' /proc/meminfo"; - // TODO: boards have lots of thermal devices. Hard to pick the CPU + // TODO: boards have lots of thermal devices. Hard to pick the CPU - cpuUtilizationCommand = - "top -bn1 | grep \"Cpu(s)\" | sed \"s/.*, *\\([0-9.]*\\)%* id.*/\\1/\" | awk '{print 100 - $1}'"; + cpuUtilizationCommand = + "top -bn1 | grep \"Cpu(s)\" | sed \"s/.*, *\\([0-9.]*\\)%* id.*/\\1/\" | awk '{print 100 - $1}'"; - cpuUptimeCommand = "uptime -p | cut -c 4-"; + cpuUptimeCommand = "uptime -p | cut -c 4-"; - // RAM - ramUsageCommand = "awk '/MemAvailable:/ {print int($2 / 1000);}' /proc/meminfo"; + // RAM + ramUsageCommand = "awk '/MemAvailable:/ {print int($2 / 1000);}' /proc/meminfo"; - // Disk - diskUsageCommand = "df ./ --output=pcent | tail -n +2"; - } + // Disk + diskUsageCommand = "df ./ --output=pcent | tail -n +2"; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/PiCmds.java b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/PiCmds.java index 6eb005acc5..6c11fc0489 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/PiCmds.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/PiCmds.java @@ -20,22 +20,22 @@ import org.photonvision.common.configuration.HardwareConfig; public class PiCmds extends LinuxCmds { - /** Applies pi-specific commands, ignoring any input configuration */ - public void initCmds(HardwareConfig config) { - super.initCmds(config); + /** Applies pi-specific commands, ignoring any input configuration */ + public void initCmds(HardwareConfig config) { + super.initCmds(config); - // CPU - cpuMemoryCommand = "vcgencmd get_mem arm | grep -Eo '[0-9]+'"; - cpuTemperatureCommand = "sed 's/.\\{3\\}$/.&/' /sys/class/thermal/thermal_zone0/temp"; - cpuThrottleReasonCmd = - "if (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x01 )) != 0x00 )); then echo \"LOW VOLTAGE\"; " - + " elif (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x08 )) != 0x00 )); then echo \"HIGH TEMP\"; " - + " elif (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x10000 )) != 0x00 )); then echo \"Prev. Low Voltage\"; " - + " elif (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x80000 )) != 0x00 )); then echo \"Prev. High Temp\"; " - + " else echo \"None\"; fi"; + // CPU + cpuMemoryCommand = "vcgencmd get_mem arm | grep -Eo '[0-9]+'"; + cpuTemperatureCommand = "sed 's/.\\{3\\}$/.&/' /sys/class/thermal/thermal_zone0/temp"; + cpuThrottleReasonCmd = + "if (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x01 )) != 0x00 )); then echo \"LOW VOLTAGE\"; " + + " elif (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x08 )) != 0x00 )); then echo \"HIGH TEMP\"; " + + " elif (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x10000 )) != 0x00 )); then echo \"Prev. Low Voltage\"; " + + " elif (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x80000 )) != 0x00 )); then echo \"Prev. High Temp\"; " + + " else echo \"None\"; fi"; - // GPU - gpuMemoryCommand = "vcgencmd get_mem gpu | grep -Eo '[0-9]+'"; - gpuMemUsageCommand = "vcgencmd get_mem malloc | grep -Eo '[0-9]+'"; - } + // GPU + gpuMemoryCommand = "vcgencmd get_mem gpu | grep -Eo '[0-9]+'"; + gpuMemUsageCommand = "vcgencmd get_mem malloc | grep -Eo '[0-9]+'"; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/logging/LogGroup.java b/photon-core/src/main/java/org/photonvision/common/logging/LogGroup.java index 50524db63f..d968e9f6f1 100644 --- a/photon-core/src/main/java/org/photonvision/common/logging/LogGroup.java +++ b/photon-core/src/main/java/org/photonvision/common/logging/LogGroup.java @@ -18,10 +18,10 @@ package org.photonvision.common.logging; public enum LogGroup { - Camera, - WebServer, - VisionModule, - Data, - General, - Config + Camera, + WebServer, + VisionModule, + Data, + General, + Config } diff --git a/photon-core/src/main/java/org/photonvision/common/logging/LogLevel.java b/photon-core/src/main/java/org/photonvision/common/logging/LogLevel.java index 5d96d3aeb4..890a803a37 100644 --- a/photon-core/src/main/java/org/photonvision/common/logging/LogLevel.java +++ b/photon-core/src/main/java/org/photonvision/common/logging/LogLevel.java @@ -18,17 +18,17 @@ package org.photonvision.common.logging; public enum LogLevel { - ERROR(0, Logger.ANSI_RED), - WARN(1, Logger.ANSI_YELLOW), - INFO(2, Logger.ANSI_GREEN), - DEBUG(3, Logger.ANSI_WHITE), - TRACE(4, Logger.ANSI_CYAN); + ERROR(0, Logger.ANSI_RED), + WARN(1, Logger.ANSI_YELLOW), + INFO(2, Logger.ANSI_GREEN), + DEBUG(3, Logger.ANSI_WHITE), + TRACE(4, Logger.ANSI_CYAN); - public final String colorCode; - public final int code; + public final String colorCode; + public final int code; - LogLevel(int code, String colorCode) { - this.code = code; - this.colorCode = colorCode; - } + LogLevel(int code, String colorCode) { + this.code = code; + this.colorCode = colorCode; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/logging/Logger.java b/photon-core/src/main/java/org/photonvision/common/logging/Logger.java index b06fea13fa..d8338d4ec9 100644 --- a/photon-core/src/main/java/org/photonvision/common/logging/Logger.java +++ b/photon-core/src/main/java/org/photonvision/common/logging/Logger.java @@ -31,316 +31,316 @@ @SuppressWarnings("unused") public class Logger { - public static final String ANSI_RESET = "\u001B[0m"; - public static final String ANSI_BLACK = "\u001B[30m"; - public static final String ANSI_RED = "\u001B[31m"; - public static final String ANSI_GREEN = "\u001B[32m"; - public static final String ANSI_YELLOW = "\u001B[33m"; - public static final String ANSI_BLUE = "\u001B[34m"; - public static final String ANSI_PURPLE = "\u001B[35m"; - public static final String ANSI_CYAN = "\u001B[36m"; - public static final String ANSI_WHITE = "\u001B[37m"; - - public static final int MAX_LOGS_TO_KEEP = 100; - - private static final SimpleDateFormat simpleDateFormat = - new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - - private static final List> uiBacklog = new ArrayList<>(); - private static boolean connected = false; - - private static final UILogAppender uiLogAppender = new UILogAppender(); - - private final String className; - private final LogGroup group; - - public Logger(Class clazz, LogGroup group) { - this.className = clazz.getSimpleName(); - this.group = group; - } - - public Logger(Class clazz, String suffix, LogGroup group) { - this.className = clazz.getSimpleName() + " - " + suffix; - this.group = group; - } - - public static String getDate() { - return simpleDateFormat.format(new Date()); - } - - public static String format( - String logMessage, LogLevel level, LogGroup group, String clazz, boolean color) { - var date = getDate(); - var builder = new StringBuilder(); - if (color) builder.append(level.colorCode); - builder - .append("[") - .append(date) - .append("] [") - .append(group) - .append(" - ") - .append(clazz) - .append("] [") - .append(level.name()) - .append("] ") - .append(logMessage); - if (color) builder.append(ANSI_RESET); - return builder.toString(); - } - - private static final HashMap levelMap = new HashMap<>(); - private static final List currentAppenders = new ArrayList<>(); - - static { - levelMap.put(LogGroup.Camera, LogLevel.INFO); - levelMap.put(LogGroup.General, LogLevel.INFO); - levelMap.put(LogGroup.WebServer, LogLevel.INFO); - levelMap.put(LogGroup.Data, LogLevel.INFO); - levelMap.put(LogGroup.VisionModule, LogLevel.INFO); - levelMap.put(LogGroup.Config, LogLevel.INFO); - } - - static { - currentAppenders.add(new ConsoleLogAppender()); - currentAppenders.add(uiLogAppender); - addFileAppender(ConfigManager.getInstance().getLogPath()); - cleanLogs(ConfigManager.getInstance().getLogsDir()); - } - - @SuppressWarnings("ResultOfMethodCallIgnored") - public static void addFileAppender(Path logFilePath) { - var file = logFilePath.toFile(); - if (!file.exists()) { - try { - file.getParentFile().mkdirs(); - file.createNewFile(); - } catch (IOException e) { - e.printStackTrace(); - } - } - currentAppenders.add(new FileLogAppender(logFilePath)); - } - - public static void cleanLogs(Path folderToClean) { - File[] logs = folderToClean.toFile().listFiles(); - if (logs == null) return; - LinkedList logFileList = new LinkedList<>(Arrays.asList(logs)); - HashMap logFileStartDateMap = new HashMap<>(); - - // Remove any files from the list for which we can't parse a start date from their name. - // Simultaneously populate our HashMap with Date objects representing the file-name - // indicated log start time. - logFileList.removeIf( - (File arg0) -> { - try { - logFileStartDateMap.put( - arg0, ConfigManager.getInstance().logFnameToDate(arg0.getName())); - return false; - } catch (ParseException e) { - return true; + public static final String ANSI_RESET = "\u001B[0m"; + public static final String ANSI_BLACK = "\u001B[30m"; + public static final String ANSI_RED = "\u001B[31m"; + public static final String ANSI_GREEN = "\u001B[32m"; + public static final String ANSI_YELLOW = "\u001B[33m"; + public static final String ANSI_BLUE = "\u001B[34m"; + public static final String ANSI_PURPLE = "\u001B[35m"; + public static final String ANSI_CYAN = "\u001B[36m"; + public static final String ANSI_WHITE = "\u001B[37m"; + + public static final int MAX_LOGS_TO_KEEP = 100; + + private static final SimpleDateFormat simpleDateFormat = + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + + private static final List> uiBacklog = new ArrayList<>(); + private static boolean connected = false; + + private static final UILogAppender uiLogAppender = new UILogAppender(); + + private final String className; + private final LogGroup group; + + public Logger(Class clazz, LogGroup group) { + this.className = clazz.getSimpleName(); + this.group = group; + } + + public Logger(Class clazz, String suffix, LogGroup group) { + this.className = clazz.getSimpleName() + " - " + suffix; + this.group = group; + } + + public static String getDate() { + return simpleDateFormat.format(new Date()); + } + + public static String format( + String logMessage, LogLevel level, LogGroup group, String clazz, boolean color) { + var date = getDate(); + var builder = new StringBuilder(); + if (color) builder.append(level.colorCode); + builder + .append("[") + .append(date) + .append("] [") + .append(group) + .append(" - ") + .append(clazz) + .append("] [") + .append(level.name()) + .append("] ") + .append(logMessage); + if (color) builder.append(ANSI_RESET); + return builder.toString(); + } + + private static final HashMap levelMap = new HashMap<>(); + private static final List currentAppenders = new ArrayList<>(); + + static { + levelMap.put(LogGroup.Camera, LogLevel.INFO); + levelMap.put(LogGroup.General, LogLevel.INFO); + levelMap.put(LogGroup.WebServer, LogLevel.INFO); + levelMap.put(LogGroup.Data, LogLevel.INFO); + levelMap.put(LogGroup.VisionModule, LogLevel.INFO); + levelMap.put(LogGroup.Config, LogLevel.INFO); + } + + static { + currentAppenders.add(new ConsoleLogAppender()); + currentAppenders.add(uiLogAppender); + addFileAppender(ConfigManager.getInstance().getLogPath()); + cleanLogs(ConfigManager.getInstance().getLogsDir()); + } + + @SuppressWarnings("ResultOfMethodCallIgnored") + public static void addFileAppender(Path logFilePath) { + var file = logFilePath.toFile(); + if (!file.exists()) { + try { + file.getParentFile().mkdirs(); + file.createNewFile(); + } catch (IOException e) { + e.printStackTrace(); + } + } + currentAppenders.add(new FileLogAppender(logFilePath)); + } + + public static void cleanLogs(Path folderToClean) { + File[] logs = folderToClean.toFile().listFiles(); + if (logs == null) return; + LinkedList logFileList = new LinkedList<>(Arrays.asList(logs)); + HashMap logFileStartDateMap = new HashMap<>(); + + // Remove any files from the list for which we can't parse a start date from their name. + // Simultaneously populate our HashMap with Date objects representing the file-name + // indicated log start time. + logFileList.removeIf( + (File arg0) -> { + try { + logFileStartDateMap.put( + arg0, ConfigManager.getInstance().logFnameToDate(arg0.getName())); + return false; + } catch (ParseException e) { + return true; + } + }); + + // Execute a sort on the log file list by date in the filename. + logFileList.sort( + (File arg0, File arg1) -> { + Date date0 = logFileStartDateMap.get(arg0); + Date date1 = logFileStartDateMap.get(arg1); + return date1.compareTo(date0); + }); + + int logCounter = 0; + for (File file : logFileList) { + // Due to filtering above, everything in logFileList should be a log file + if (logCounter < MAX_LOGS_TO_KEEP) { + // Skip over the first MAX_LOGS_TO_KEEP files + logCounter++; + } else { + // Delete this file. + file.delete(); + } + } + } + + public static void setLevel(LogGroup group, LogLevel newLevel) { + levelMap.put(group, newLevel); + } + + // TODO: profile + private static void log(String message, LogLevel level, LogGroup group, String clazz) { + for (var a : currentAppenders) { + var shouldColor = a instanceof ConsoleLogAppender; + var formattedMessage = format(message, level, group, clazz, shouldColor); + a.log(formattedMessage, level); + } + if (!connected) { + synchronized (uiBacklog) { + uiBacklog.add(Pair.of(format(message, level, group, clazz, false), level)); + } + } + } + + public static void sendConnectedBacklog() { + connected = true; + synchronized (uiBacklog) { + for (var message : uiBacklog) { + uiLogAppender.log(message.getLeft(), message.getRight()); + } + uiBacklog.clear(); + } + } + + public boolean shouldLog(LogLevel logLevel) { + return logLevel.code <= levelMap.get(group).code; + } + + private void log(String message, LogLevel level) { + if (shouldLog(level)) { + log(message, level, group, className); + } + } + + private void log(String message, LogLevel messageLevel, LogLevel conditionalLevel) { + if (shouldLog(conditionalLevel)) { + log(message, messageLevel, group, className); + } + } + + private void log(Supplier messageSupplier, LogLevel level) { + if (shouldLog(level)) { + log(messageSupplier.get(), level, group, className); + } + } + + private void log( + Supplier messageSupplier, LogLevel messageLevel, LogLevel conditionalLevel) { + if (shouldLog(conditionalLevel)) { + log(messageSupplier.get(), messageLevel, group, className); + } + } + + public void error(Supplier messageSupplier) { + log(messageSupplier, LogLevel.ERROR); + } + + public void error(String message) { + log(message, LogLevel.ERROR); + } + + /** + * Logs an error message with the stack trace of a Throwable. The stacktrace will only be printed + * if the current LogLevel is TRACE + * + * @param message + * @param t + */ + public void error(String message, Throwable t) { + log(message + ": " + t.getMessage(), LogLevel.ERROR); + log(convertStackTraceToString(t), LogLevel.ERROR, LogLevel.DEBUG); + } + + public void warn(Supplier messageSupplier) { + log(messageSupplier, LogLevel.WARN); + } + + public void warn(String message) { + log(message, LogLevel.WARN); + } + + public void info(Supplier messageSupplier) { + log(messageSupplier, LogLevel.INFO); + } + + public void info(String message) { + log(message, LogLevel.INFO); + } + + public void debug(Supplier messageSupplier) { + log(messageSupplier, LogLevel.DEBUG); + } + + public void debug(String message) { + log(message, LogLevel.DEBUG); + } + + public void trace(Supplier messageSupplier) { + log(messageSupplier, LogLevel.TRACE); + } + + public void trace(String message) { + log(message, LogLevel.TRACE); + } + + private static String convertStackTraceToString(Throwable throwable) { + try (StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw)) { + throwable.printStackTrace(pw); + return sw.toString(); + } catch (IOException ioe) { + throw new IllegalStateException(ioe); + } + } + + private interface LogAppender { + void log(String message, LogLevel level); + } + + private static class ConsoleLogAppender implements LogAppender { + @Override + public void log(String message, LogLevel level) { + System.out.println(message); + } + } + + private static class UILogAppender implements LogAppender { + @Override + public void log(String message, LogLevel level) { + var messageMap = new HashMap(); + messageMap.put("logMessage", message); + messageMap.put("logLevel", level.code); + var superMap = new HashMap(); + superMap.put("logMessage", messageMap); + DataChangeService.getInstance().publishEvent(OutgoingUIEvent.wrappedOf("log", superMap)); + } + } + + private static class FileLogAppender implements LogAppender { + private OutputStream out; + private boolean wantsFlush; + + public FileLogAppender(Path logFilePath) { + try { + this.out = new FileOutputStream(logFilePath.toFile()); + TimedTaskManager.getInstance() + .addTask( + "FileLogAppender", + () -> { + try { + if (wantsFlush) { + out.flush(); + wantsFlush = false; } - }); - - // Execute a sort on the log file list by date in the filename. - logFileList.sort( - (File arg0, File arg1) -> { - Date date0 = logFileStartDateMap.get(arg0); - Date date1 = logFileStartDateMap.get(arg1); - return date1.compareTo(date0); - }); - - int logCounter = 0; - for (File file : logFileList) { - // Due to filtering above, everything in logFileList should be a log file - if (logCounter < MAX_LOGS_TO_KEEP) { - // Skip over the first MAX_LOGS_TO_KEEP files - logCounter++; - } else { - // Delete this file. - file.delete(); - } - } - } - - public static void setLevel(LogGroup group, LogLevel newLevel) { - levelMap.put(group, newLevel); - } - - // TODO: profile - private static void log(String message, LogLevel level, LogGroup group, String clazz) { - for (var a : currentAppenders) { - var shouldColor = a instanceof ConsoleLogAppender; - var formattedMessage = format(message, level, group, clazz, shouldColor); - a.log(formattedMessage, level); - } - if (!connected) { - synchronized (uiBacklog) { - uiBacklog.add(Pair.of(format(message, level, group, clazz, false), level)); - } - } - } - - public static void sendConnectedBacklog() { - connected = true; - synchronized (uiBacklog) { - for (var message : uiBacklog) { - uiLogAppender.log(message.getLeft(), message.getRight()); - } - uiBacklog.clear(); - } - } - - public boolean shouldLog(LogLevel logLevel) { - return logLevel.code <= levelMap.get(group).code; - } - - private void log(String message, LogLevel level) { - if (shouldLog(level)) { - log(message, level, group, className); - } - } - - private void log(String message, LogLevel messageLevel, LogLevel conditionalLevel) { - if (shouldLog(conditionalLevel)) { - log(message, messageLevel, group, className); - } - } - - private void log(Supplier messageSupplier, LogLevel level) { - if (shouldLog(level)) { - log(messageSupplier.get(), level, group, className); - } - } - - private void log( - Supplier messageSupplier, LogLevel messageLevel, LogLevel conditionalLevel) { - if (shouldLog(conditionalLevel)) { - log(messageSupplier.get(), messageLevel, group, className); - } - } - - public void error(Supplier messageSupplier) { - log(messageSupplier, LogLevel.ERROR); - } - - public void error(String message) { - log(message, LogLevel.ERROR); - } - - /** - * Logs an error message with the stack trace of a Throwable. The stacktrace will only be printed - * if the current LogLevel is TRACE - * - * @param message - * @param t - */ - public void error(String message, Throwable t) { - log(message + ": " + t.getMessage(), LogLevel.ERROR); - log(convertStackTraceToString(t), LogLevel.ERROR, LogLevel.DEBUG); - } - - public void warn(Supplier messageSupplier) { - log(messageSupplier, LogLevel.WARN); - } - - public void warn(String message) { - log(message, LogLevel.WARN); - } - - public void info(Supplier messageSupplier) { - log(messageSupplier, LogLevel.INFO); - } - - public void info(String message) { - log(message, LogLevel.INFO); - } - - public void debug(Supplier messageSupplier) { - log(messageSupplier, LogLevel.DEBUG); - } - - public void debug(String message) { - log(message, LogLevel.DEBUG); - } - - public void trace(Supplier messageSupplier) { - log(messageSupplier, LogLevel.TRACE); - } - - public void trace(String message) { - log(message, LogLevel.TRACE); - } - - private static String convertStackTraceToString(Throwable throwable) { - try (StringWriter sw = new StringWriter(); - PrintWriter pw = new PrintWriter(sw)) { - throwable.printStackTrace(pw); - return sw.toString(); - } catch (IOException ioe) { - throw new IllegalStateException(ioe); - } - } - - private interface LogAppender { - void log(String message, LogLevel level); - } - - private static class ConsoleLogAppender implements LogAppender { - @Override - public void log(String message, LogLevel level) { - System.out.println(message); - } - } - - private static class UILogAppender implements LogAppender { - @Override - public void log(String message, LogLevel level) { - var messageMap = new HashMap(); - messageMap.put("logMessage", message); - messageMap.put("logLevel", level.code); - var superMap = new HashMap(); - superMap.put("logMessage", messageMap); - DataChangeService.getInstance().publishEvent(OutgoingUIEvent.wrappedOf("log", superMap)); - } - } - - private static class FileLogAppender implements LogAppender { - private OutputStream out; - private boolean wantsFlush; - - public FileLogAppender(Path logFilePath) { - try { - this.out = new FileOutputStream(logFilePath.toFile()); - TimedTaskManager.getInstance() - .addTask( - "FileLogAppender", - () -> { - try { - if (wantsFlush) { - out.flush(); - wantsFlush = false; - } - } catch (IOException ignored) { - } - }, - 3000L); - } catch (FileNotFoundException e) { - out = null; - System.err.println("Unable to log to file " + logFilePath); - } - } - - @Override - public void log(String message, LogLevel level) { - message += "\n"; - try { - out.write(message.getBytes()); - wantsFlush = true; - } catch (IOException e) { - e.printStackTrace(); - } catch (NullPointerException e) { - // Nothing to do - no stream available for writing - } - } - } + } catch (IOException ignored) { + } + }, + 3000L); + } catch (FileNotFoundException e) { + out = null; + System.err.println("Unable to log to file " + logFilePath); + } + } + + @Override + public void log(String message, LogLevel level) { + message += "\n"; + try { + out.write(message.getBytes()); + wantsFlush = true; + } catch (IOException e) { + e.printStackTrace(); + } catch (NullPointerException e) { + // Nothing to do - no stream available for writing + } + } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/networking/NetworkInterface.java b/photon-core/src/main/java/org/photonvision/common/networking/NetworkInterface.java index 8130dccb62..8bd8ba182d 100644 --- a/photon-core/src/main/java/org/photonvision/common/networking/NetworkInterface.java +++ b/photon-core/src/main/java/org/photonvision/common/networking/NetworkInterface.java @@ -23,56 +23,56 @@ @SuppressWarnings("WeakerAccess") public class NetworkInterface { - private static final Logger logger = new Logger(NetworkInterface.class, LogGroup.General); + private static final Logger logger = new Logger(NetworkInterface.class, LogGroup.General); - public final String name; - public final String displayName; - public final String ipAddress; - public final String netmask; - public final String broadcast; + public final String name; + public final String displayName; + public final String ipAddress; + public final String netmask; + public final String broadcast; - public NetworkInterface(java.net.NetworkInterface inetface, InterfaceAddress ifaceAddress) { - name = inetface.getName(); - displayName = inetface.getDisplayName(); + public NetworkInterface(java.net.NetworkInterface inetface, InterfaceAddress ifaceAddress) { + name = inetface.getName(); + displayName = inetface.getDisplayName(); - var inetAddress = ifaceAddress.getAddress(); - ipAddress = inetAddress.getHostAddress(); - netmask = getIPv4LocalNetMask(ifaceAddress); + var inetAddress = ifaceAddress.getAddress(); + ipAddress = inetAddress.getHostAddress(); + netmask = getIPv4LocalNetMask(ifaceAddress); - // TODO: (low) hack to "get" gateway, this is gross and bad, pls fix - var splitIPAddr = ipAddress.split("\\."); - splitIPAddr[3] = "1"; - splitIPAddr[3] = "255"; - broadcast = String.join(".", splitIPAddr); - } + // TODO: (low) hack to "get" gateway, this is gross and bad, pls fix + var splitIPAddr = ipAddress.split("\\."); + splitIPAddr[3] = "1"; + splitIPAddr[3] = "255"; + broadcast = String.join(".", splitIPAddr); + } - private static String getIPv4LocalNetMask(InterfaceAddress interfaceAddress) { - var netPrefix = interfaceAddress.getNetworkPrefixLength(); - try { - // Since this is for IPv4, it's 32 bits, so set the sign value of - // the int to "negative"... - int shiftby = (1 << 31); - // For the number of bits of the prefix -1 (we already set the sign bit) - for (int i = netPrefix - 1; i > 0; i--) { - // Shift the sign right... Java makes the sign bit sticky on a shift... - // So no need to "set it back up"... - shiftby = (shiftby >> 1); - } - // Transform the resulting value in xxx.xxx.xxx.xxx format, like if - /// it was a standard address... - // Return the address thus created... - return ((shiftby >> 24) & 255) - + "." - + ((shiftby >> 16) & 255) - + "." - + ((shiftby >> 8) & 255) - + "." - + (shiftby & 255); - // return InetAddress.getByName(maskString); - } catch (Exception e) { - logger.error("Failed to get netmask!", e); - } - // Something went wrong here... - return null; + private static String getIPv4LocalNetMask(InterfaceAddress interfaceAddress) { + var netPrefix = interfaceAddress.getNetworkPrefixLength(); + try { + // Since this is for IPv4, it's 32 bits, so set the sign value of + // the int to "negative"... + int shiftby = (1 << 31); + // For the number of bits of the prefix -1 (we already set the sign bit) + for (int i = netPrefix - 1; i > 0; i--) { + // Shift the sign right... Java makes the sign bit sticky on a shift... + // So no need to "set it back up"... + shiftby = (shiftby >> 1); + } + // Transform the resulting value in xxx.xxx.xxx.xxx format, like if + /// it was a standard address... + // Return the address thus created... + return ((shiftby >> 24) & 255) + + "." + + ((shiftby >> 16) & 255) + + "." + + ((shiftby >> 8) & 255) + + "." + + (shiftby & 255); + // return InetAddress.getByName(maskString); + } catch (Exception e) { + logger.error("Failed to get netmask!", e); } + // Something went wrong here... + return null; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/networking/NetworkManager.java b/photon-core/src/main/java/org/photonvision/common/networking/NetworkManager.java index 22543c4bb3..c09649bc91 100644 --- a/photon-core/src/main/java/org/photonvision/common/networking/NetworkManager.java +++ b/photon-core/src/main/java/org/photonvision/common/networking/NetworkManager.java @@ -25,127 +25,127 @@ import org.photonvision.common.util.ShellExec; public class NetworkManager { - private static final Logger logger = new Logger(NetworkManager.class, LogGroup.General); + private static final Logger logger = new Logger(NetworkManager.class, LogGroup.General); - private NetworkManager() {} + private NetworkManager() {} - private static class SingletonHolder { - private static final NetworkManager INSTANCE = new NetworkManager(); - } - - public static NetworkManager getInstance() { - return SingletonHolder.INSTANCE; - } + private static class SingletonHolder { + private static final NetworkManager INSTANCE = new NetworkManager(); + } - private boolean isManaged = false; - public boolean networkingIsDisabled = false; // Passed in via CLI + public static NetworkManager getInstance() { + return SingletonHolder.INSTANCE; + } - public void initialize(boolean shouldManage) { - isManaged = shouldManage && !networkingIsDisabled; - if (!isManaged) { - return; - } + private boolean isManaged = false; + public boolean networkingIsDisabled = false; // Passed in via CLI - var config = ConfigManager.getInstance().getConfig().getNetworkConfig(); - logger.info("Setting " + config.connectionType + " with team " + config.ntServerAddress); - if (Platform.isLinux()) { - if (!Platform.isRoot()) { - logger.error("Cannot manage hostname without root!"); - } + public void initialize(boolean shouldManage) { + isManaged = shouldManage && !networkingIsDisabled; + if (!isManaged) { + return; + } - // always set hostname - if (!config.hostname.isEmpty()) { - try { - var shell = new ShellExec(true, false); - shell.executeBashCommand("cat /etc/hostname | tr -d \" \\t\\n\\r\""); - var oldHostname = shell.getOutput().replace("\n", ""); - - var setHostnameRetCode = - shell.executeBashCommand( - "echo $NEW_HOSTNAME > /etc/hostname".replace("$NEW_HOSTNAME", config.hostname)); - setHostnameRetCode = - shell.executeBashCommand("hostnamectl set-hostname " + config.hostname); - - // Add to /etc/hosts - var addHostRetCode = - shell.executeBashCommand( - String.format( - "sed -i \"s/127.0.1.1.*%s/127.0.1.1\\t%s/g\" /etc/hosts", - oldHostname, config.hostname)); - - shell.executeBashCommand("sudo service avahi-daemon restart"); - - var success = setHostnameRetCode == 0 && addHostRetCode == 0; - if (!success) { - logger.error( - "Setting hostname returned non-zero codes (hostname/hosts) " - + setHostnameRetCode - + "|" - + addHostRetCode - + "!"); - } else { - logger.info("Set hostname to " + config.hostname); - } - } catch (Exception e) { - logger.error("Failed to set hostname!", e); - } + var config = ConfigManager.getInstance().getConfig().getNetworkConfig(); + logger.info("Setting " + config.connectionType + " with team " + config.ntServerAddress); + if (Platform.isLinux()) { + if (!Platform.isRoot()) { + logger.error("Cannot manage hostname without root!"); + } + + // always set hostname + if (!config.hostname.isEmpty()) { + try { + var shell = new ShellExec(true, false); + shell.executeBashCommand("cat /etc/hostname | tr -d \" \\t\\n\\r\""); + var oldHostname = shell.getOutput().replace("\n", ""); + + var setHostnameRetCode = + shell.executeBashCommand( + "echo $NEW_HOSTNAME > /etc/hostname".replace("$NEW_HOSTNAME", config.hostname)); + setHostnameRetCode = + shell.executeBashCommand("hostnamectl set-hostname " + config.hostname); + + // Add to /etc/hosts + var addHostRetCode = + shell.executeBashCommand( + String.format( + "sed -i \"s/127.0.1.1.*%s/127.0.1.1\\t%s/g\" /etc/hosts", + oldHostname, config.hostname)); + + shell.executeBashCommand("sudo service avahi-daemon restart"); + + var success = setHostnameRetCode == 0 && addHostRetCode == 0; + if (!success) { + logger.error( + "Setting hostname returned non-zero codes (hostname/hosts) " + + setHostnameRetCode + + "|" + + addHostRetCode + + "!"); + } else { + logger.info("Set hostname to " + config.hostname); + } + } catch (Exception e) { + logger.error("Failed to set hostname!", e); + } + } else { + logger.warn("Got empty hostname?"); + } + + if (config.connectionType == NetworkMode.DHCP) { + var shell = new ShellExec(); + try { + // set nmcli back to DHCP, and re-run dhclient -- this ought to grab a new IP address + shell.executeBashCommand( + config.setDHCPcommand.replace( + NetworkConfig.NM_IFACE_STRING, config.getEscapedInterfaceName())); + shell.executeBashCommand("dhclient " + config.getPhysicalInterfaceName(), false); + } catch (Exception e) { + logger.error("Exception while setting DHCP!"); + } + } else if (config.connectionType == NetworkMode.STATIC) { + var shell = new ShellExec(); + if (!config.staticIp.isEmpty()) { + try { + shell.executeBashCommand( + config + .setStaticCommand + .replace(NetworkConfig.NM_IFACE_STRING, config.getEscapedInterfaceName()) + .replace(NetworkConfig.NM_IP_STRING, config.staticIp)); + + if (Platform.isRaspberryPi()) { + // Pi's need to manually have their interface adjusted?? and the 5-second sleep is + // integral in my testing (Matt) + shell.executeBashCommand( + "sh -c 'nmcli con down " + + config.getEscapedInterfaceName() + + "; nmcli con up " + + config.getEscapedInterfaceName() + + "'"); } else { - logger.warn("Got empty hostname?"); - } - - if (config.connectionType == NetworkMode.DHCP) { - var shell = new ShellExec(); - try { - // set nmcli back to DHCP, and re-run dhclient -- this ought to grab a new IP address - shell.executeBashCommand( - config.setDHCPcommand.replace( - NetworkConfig.NM_IFACE_STRING, config.getEscapedInterfaceName())); - shell.executeBashCommand("dhclient " + config.getPhysicalInterfaceName(), false); - } catch (Exception e) { - logger.error("Exception while setting DHCP!"); - } - } else if (config.connectionType == NetworkMode.STATIC) { - var shell = new ShellExec(); - if (!config.staticIp.isEmpty()) { - try { - shell.executeBashCommand( - config - .setStaticCommand - .replace(NetworkConfig.NM_IFACE_STRING, config.getEscapedInterfaceName()) - .replace(NetworkConfig.NM_IP_STRING, config.staticIp)); - - if (Platform.isRaspberryPi()) { - // Pi's need to manually have their interface adjusted?? and the 5-second sleep is - // integral in my testing (Matt) - shell.executeBashCommand( - "sh -c 'nmcli con down " - + config.getEscapedInterfaceName() - + "; nmcli con up " - + config.getEscapedInterfaceName() - + "'"); - } else { - // for now just bring down /up -- more testing needed on beelink et al. - shell.executeBashCommand( - "sh -c 'nmcli con down " - + config.getEscapedInterfaceName() - + "; nmcli con up " - + config.getEscapedInterfaceName() - + "'"); - } - } catch (Exception e) { - logger.error("Error while setting static IP!", e); - } - } else { - logger.warn("Got empty static IP?"); - } + // for now just bring down /up -- more testing needed on beelink et al. + shell.executeBashCommand( + "sh -c 'nmcli con down " + + config.getEscapedInterfaceName() + + "; nmcli con up " + + config.getEscapedInterfaceName() + + "'"); } + } catch (Exception e) { + logger.error("Error while setting static IP!", e); + } } else { - logger.info("Not managing network on non-Linux platforms"); + logger.warn("Got empty static IP?"); } + } + } else { + logger.info("Not managing network on non-Linux platforms"); } + } - public void reinitialize() { - initialize(ConfigManager.getInstance().getConfig().getNetworkConfig().shouldManage()); - } + public void reinitialize() { + initialize(ConfigManager.getInstance().getConfig().getNetworkConfig().shouldManage()); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/networking/NetworkMode.java b/photon-core/src/main/java/org/photonvision/common/networking/NetworkMode.java index c491e86625..0aa6ef14da 100644 --- a/photon-core/src/main/java/org/photonvision/common/networking/NetworkMode.java +++ b/photon-core/src/main/java/org/photonvision/common/networking/NetworkMode.java @@ -20,11 +20,11 @@ import com.fasterxml.jackson.annotation.JsonValue; public enum NetworkMode { - DHCP, - STATIC; + DHCP, + STATIC; - @JsonValue - public int toValue() { - return ordinal(); - } + @JsonValue + public int toValue() { + return ordinal(); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/networking/NetworkUtils.java b/photon-core/src/main/java/org/photonvision/common/networking/NetworkUtils.java index cf84309e57..84467eea1a 100644 --- a/photon-core/src/main/java/org/photonvision/common/networking/NetworkUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/networking/NetworkUtils.java @@ -29,111 +29,111 @@ import org.photonvision.common.util.ShellExec; public class NetworkUtils { - private static final Logger logger = new Logger(NetworkUtils.class, LogGroup.General); + private static final Logger logger = new Logger(NetworkUtils.class, LogGroup.General); - public enum NMType { - NMTYPE_ETHERNET("ethernet"), - NMTYPE_WIFI("wifi"), - NMTYPE_UNKNOWN(""); + public enum NMType { + NMTYPE_ETHERNET("ethernet"), + NMTYPE_WIFI("wifi"), + NMTYPE_UNKNOWN(""); - NMType(String id) { - identifier = id; - } - - private final String identifier; - - public static NMType typeForString(String s) { - for (var t : NMType.values()) { - if (t.identifier.equals(s)) { - return t; - } - } - return NMTYPE_UNKNOWN; - } + NMType(String id) { + identifier = id; } - public static class NMDeviceInfo { - public NMDeviceInfo(String c, String d, String type) { - connName = c; - devName = d; - nmType = NMType.typeForString(type); - } + private final String identifier; - public final String connName; // Human-readable name used by "nmcli con" - public final String devName; // underlying device, used by dhclient - public final NMType nmType; - - @Override - public String toString() { - return "NMDeviceInfo [connName=" - + connName - + ", devName=" - + devName - + ", nmType=" - + nmType - + "]"; + public static NMType typeForString(String s) { + for (var t : NMType.values()) { + if (t.identifier.equals(s)) { + return t; } + } + return NMTYPE_UNKNOWN; } + } - private static List allInterfaces = new ArrayList<>(); - private static long lastReadTimestamp = 0; - - public static List getAllInterfaces() { - long now = System.currentTimeMillis(); - if (now - lastReadTimestamp < 5000) return allInterfaces; - else lastReadTimestamp = now; - - var ret = new ArrayList(); + public static class NMDeviceInfo { + public NMDeviceInfo(String c, String d, String type) { + connName = c; + devName = d; + nmType = NMType.typeForString(type); + } - if (!Platform.isLinux()) { - // Can only determine interface name on Linux, give up - return ret; - } + public final String connName; // Human-readable name used by "nmcli con" + public final String devName; // underlying device, used by dhclient + public final NMType nmType; + + @Override + public String toString() { + return "NMDeviceInfo [connName=" + + connName + + ", devName=" + + devName + + ", nmType=" + + nmType + + "]"; + } + } - try { - var shell = new ShellExec(true, false); - shell.executeBashCommand( - "nmcli -t -f GENERAL.CONNECTION,GENERAL.DEVICE,GENERAL.TYPE device show"); - String out = shell.getOutput(); - if (out == null) { - return new ArrayList<>(); - } - Pattern pattern = - Pattern.compile("GENERAL.CONNECTION:(.*)\nGENERAL.DEVICE:(.*)\nGENERAL.TYPE:(.*)"); - Matcher matcher = pattern.matcher(out); - while (matcher.find()) { - ret.add(new NMDeviceInfo(matcher.group(1), matcher.group(2), matcher.group(3))); - } - } catch (IOException e) { - logger.error("Could not get active NM ifaces!", e); - } + private static List allInterfaces = new ArrayList<>(); + private static long lastReadTimestamp = 0; - logger.debug("Found network interfaces:\n" + ret); + public static List getAllInterfaces() { + long now = System.currentTimeMillis(); + if (now - lastReadTimestamp < 5000) return allInterfaces; + else lastReadTimestamp = now; - allInterfaces = ret; - return ret; - } + var ret = new ArrayList(); - public static List getAllActiveInterfaces() { - // Seems like if an interface exists but isn't actually connected, the connection name will be - // an empty string. Check here and only return connections with non-empty names - return getAllInterfaces().stream() - .filter(it -> !it.connName.trim().isEmpty()) - .collect(Collectors.toList()); + if (!Platform.isLinux()) { + // Can only determine interface name on Linux, give up + return ret; } - public static List getAllWiredInterfaces() { - return getAllActiveInterfaces().stream() - .filter(it -> it.nmType == NMType.NMTYPE_ETHERNET) - .collect(Collectors.toList()); + try { + var shell = new ShellExec(true, false); + shell.executeBashCommand( + "nmcli -t -f GENERAL.CONNECTION,GENERAL.DEVICE,GENERAL.TYPE device show"); + String out = shell.getOutput(); + if (out == null) { + return new ArrayList<>(); + } + Pattern pattern = + Pattern.compile("GENERAL.CONNECTION:(.*)\nGENERAL.DEVICE:(.*)\nGENERAL.TYPE:(.*)"); + Matcher matcher = pattern.matcher(out); + while (matcher.find()) { + ret.add(new NMDeviceInfo(matcher.group(1), matcher.group(2), matcher.group(3))); + } + } catch (IOException e) { + logger.error("Could not get active NM ifaces!", e); } - public static NMDeviceInfo getNMinfoForConnName(String connName) { - for (NMDeviceInfo info : getAllActiveInterfaces()) { - if (info.connName.equals(connName)) { - return info; - } - } - return null; + logger.debug("Found network interfaces:\n" + ret); + + allInterfaces = ret; + return ret; + } + + public static List getAllActiveInterfaces() { + // Seems like if an interface exists but isn't actually connected, the connection name will be + // an empty string. Check here and only return connections with non-empty names + return getAllInterfaces().stream() + .filter(it -> !it.connName.trim().isEmpty()) + .collect(Collectors.toList()); + } + + public static List getAllWiredInterfaces() { + return getAllActiveInterfaces().stream() + .filter(it -> it.nmType == NMType.NMTYPE_ETHERNET) + .collect(Collectors.toList()); + } + + public static NMDeviceInfo getNMinfoForConnName(String connName) { + for (NMDeviceInfo info : getAllActiveInterfaces()) { + if (info.connName.equals(connName)) { + return info; + } } + return null; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/networking/RoborioFinder.java b/photon-core/src/main/java/org/photonvision/common/networking/RoborioFinder.java index 605480e296..7e068c5a7c 100644 --- a/photon-core/src/main/java/org/photonvision/common/networking/RoborioFinder.java +++ b/photon-core/src/main/java/org/photonvision/common/networking/RoborioFinder.java @@ -31,88 +31,88 @@ import org.photonvision.common.logging.Logger; public class RoborioFinder { - private static RoborioFinder instance; - private static final Logger logger = new Logger(RoborioFinder.class, LogGroup.General); + private static RoborioFinder instance; + private static final Logger logger = new Logger(RoborioFinder.class, LogGroup.General); - public static RoborioFinder getInstance() { - if (instance == null) instance = new RoborioFinder(); - return instance; - } + public static RoborioFinder getInstance() { + if (instance == null) instance = new RoborioFinder(); + return instance; + } - public void findRios() { - HashMap map = new HashMap<>(); - var subMap = new HashMap(); - // Separate from the above so we don't hold stuff up - System.setProperty("java.net.preferIPv4Stack", "true"); - subMap.put( - "deviceIps", - Arrays.stream(CameraServerJNI.getNetworkInterfaces()) - .filter(it -> !it.equals("0.0.0.0")) - .toArray()); - logger.info("Searching for rios"); - List possibleRioList = new ArrayList<>(); - for (var ip : CameraServerJNI.getNetworkInterfaces()) { - logger.info("Trying " + ip); - var possibleRioAddr = getPossibleRioAddress(ip); - if (possibleRioAddr != null) { - logger.info("Maybe found " + ip); - searchForHost(possibleRioList, possibleRioAddr); - } else { - logger.info("Didn't match RIO IP"); - } - } + public void findRios() { + HashMap map = new HashMap<>(); + var subMap = new HashMap(); + // Separate from the above so we don't hold stuff up + System.setProperty("java.net.preferIPv4Stack", "true"); + subMap.put( + "deviceIps", + Arrays.stream(CameraServerJNI.getNetworkInterfaces()) + .filter(it -> !it.equals("0.0.0.0")) + .toArray()); + logger.info("Searching for rios"); + List possibleRioList = new ArrayList<>(); + for (var ip : CameraServerJNI.getNetworkInterfaces()) { + logger.info("Trying " + ip); + var possibleRioAddr = getPossibleRioAddress(ip); + if (possibleRioAddr != null) { + logger.info("Maybe found " + ip); + searchForHost(possibleRioList, possibleRioAddr); + } else { + logger.info("Didn't match RIO IP"); + } + } - // String name = - // "roboRIO-" - // + - // ConfigManager.getInstance().getConfig().getNetworkConfig().teamNumber - // + "-FRC.local"; - // searchForHost(possibleRioList, name); - // name = - // "roboRIO-" - // + - // ConfigManager.getInstance().getConfig().getNetworkConfig().teamNumber - // + "-FRC.lan"; - // searchForHost(possibleRioList, name); - // name = - // "roboRIO-" - // + - // ConfigManager.getInstance().getConfig().getNetworkConfig().teamNumber - // + "-FRC.frc-field.local"; - // searchForHost(possibleRioList, name); - // subMap.put("possibleRios", possibleRioList.toArray()); + // String name = + // "roboRIO-" + // + + // ConfigManager.getInstance().getConfig().getNetworkConfig().teamNumber + // + "-FRC.local"; + // searchForHost(possibleRioList, name); + // name = + // "roboRIO-" + // + + // ConfigManager.getInstance().getConfig().getNetworkConfig().teamNumber + // + "-FRC.lan"; + // searchForHost(possibleRioList, name); + // name = + // "roboRIO-" + // + + // ConfigManager.getInstance().getConfig().getNetworkConfig().teamNumber + // + "-FRC.frc-field.local"; + // searchForHost(possibleRioList, name); + // subMap.put("possibleRios", possibleRioList.toArray()); - subMap.put("possibleRios", possibleRioList.toArray()); - map.put("networkInfo", subMap); - DataChangeService.getInstance().publishEvent(new OutgoingUIEvent<>("deviceIpInfo", map)); - } + subMap.put("possibleRios", possibleRioList.toArray()); + map.put("networkInfo", subMap); + DataChangeService.getInstance().publishEvent(new OutgoingUIEvent<>("deviceIpInfo", map)); + } - String getPossibleRioAddress(String ip) { - try { - InetAddress addr = InetAddress.getByName(ip); - var address = addr.getAddress(); - if (address[0] != (byte) (10 & 0xff)) return null; - address[3] = (byte) (2 & 0xff); - return InetAddress.getByAddress(address).getHostAddress(); - } catch (UnknownHostException e) { - e.printStackTrace(); - } - return null; + String getPossibleRioAddress(String ip) { + try { + InetAddress addr = InetAddress.getByName(ip); + var address = addr.getAddress(); + if (address[0] != (byte) (10 & 0xff)) return null; + address[3] = (byte) (2 & 0xff); + return InetAddress.getByAddress(address).getHostAddress(); + } catch (UnknownHostException e) { + e.printStackTrace(); } + return null; + } - void searchForHost(List list, String hostname) { - try { - logger.info("Looking up " + hostname); - InetAddress testAddr = InetAddress.getByName(hostname); - logger.info("Pinging " + hostname); - var canContact = testAddr.isReachable(500); - if (canContact) { - logger.info("Was able to connect to " + hostname); - if (!list.contains(hostname)) list.add(hostname); - } else { - logger.info("Unable to reach " + hostname); - } - } catch (IOException ignored) { - } + void searchForHost(List list, String hostname) { + try { + logger.info("Looking up " + hostname); + InetAddress testAddr = InetAddress.getByName(hostname); + logger.info("Pinging " + hostname); + var canContact = testAddr.isReachable(500); + if (canContact) { + logger.info("Was able to connect to " + hostname); + if (!list.contains(hostname)) list.add(hostname); + } else { + logger.info("Unable to reach " + hostname); + } + } catch (IOException ignored) { } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptCommandType.java b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptCommandType.java index debc05c1b8..8c005e62ef 100644 --- a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptCommandType.java +++ b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptCommandType.java @@ -18,14 +18,14 @@ package org.photonvision.common.scripting; public enum ScriptCommandType { - kDefault(""), - kBashScript("bash"), - kPythonScript("python"), - kPython3Script("python3"); + kDefault(""), + kBashScript("bash"), + kPythonScript("python"), + kPython3Script("python3"); - public final String value; + public final String value; - ScriptCommandType(String value) { - this.value = value; - } + ScriptCommandType(String value) { + this.value = value; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptConfig.java b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptConfig.java index 684a1a882c..e83ac12143 100644 --- a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptConfig.java +++ b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptConfig.java @@ -21,19 +21,19 @@ import com.fasterxml.jackson.annotation.JsonProperty; public class ScriptConfig { - public final ScriptEventType eventType; - public final String command; + public final ScriptEventType eventType; + public final String command; - public ScriptConfig(ScriptEventType eventType) { - this.eventType = eventType; - this.command = ""; - } + public ScriptConfig(ScriptEventType eventType) { + this.eventType = eventType; + this.command = ""; + } - @JsonCreator - public ScriptConfig( - @JsonProperty("eventType") ScriptEventType eventType, - @JsonProperty("command") String command) { - this.eventType = eventType; - this.command = command; - } + @JsonCreator + public ScriptConfig( + @JsonProperty("eventType") ScriptEventType eventType, + @JsonProperty("command") String command) { + this.eventType = eventType; + this.command = command; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEvent.java b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEvent.java index a60dd566a4..0d998f1e9f 100644 --- a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEvent.java +++ b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEvent.java @@ -23,32 +23,32 @@ import org.photonvision.common.util.ShellExec; public class ScriptEvent { - private static final ShellExec executor = new ShellExec(true, true); + private static final ShellExec executor = new ShellExec(true, true); - public final ScriptConfig config; - private final Logger logger = new Logger(ScriptEvent.class, LogGroup.General); + public final ScriptConfig config; + private final Logger logger = new Logger(ScriptEvent.class, LogGroup.General); - public ScriptEvent(ScriptConfig config) { - this.config = config; - } + public ScriptEvent(ScriptConfig config) { + this.config = config; + } + + public int run() throws IOException { + int retVal = executor.executeBashCommand(config.command); + + String output = executor.getOutput(); + String error = executor.getError(); - public int run() throws IOException { - int retVal = executor.executeBashCommand(config.command); - - String output = executor.getOutput(); - String error = executor.getError(); - - if (!error.isEmpty()) { - logger.error("Error when running \"" + config.eventType.name() + "\" script: " + error); - } else if (!output.isEmpty()) { - logger.info( - String.format("Output from \"%s\" script: %s\n", config.eventType.name(), output)); - } - logger.info( - String.format( - "Script for %s ran with command line: \"%s\", exit code: %d, output: %s, " - + "error: %s\n", - config.eventType.name(), config.command, retVal, output, error)); - return retVal; + if (!error.isEmpty()) { + logger.error("Error when running \"" + config.eventType.name() + "\" script: " + error); + } else if (!output.isEmpty()) { + logger.info( + String.format("Output from \"%s\" script: %s\n", config.eventType.name(), output)); } + logger.info( + String.format( + "Script for %s ran with command line: \"%s\", exit code: %d, output: %s, " + + "error: %s\n", + config.eventType.name(), config.command, retVal, output, error)); + return retVal; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEventType.java b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEventType.java index 3fe98bf447..1bbcf2008b 100644 --- a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEventType.java +++ b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEventType.java @@ -18,21 +18,21 @@ package org.photonvision.common.scripting; public enum ScriptEventType { - kProgramInit("Program Init"), - kProgramExit("Program Exit"), - kNTConnected("NT Connected"), - kLEDOn("LED On"), - kLEDOff("LED Off"), - kEnterDriverMode("Enter Driver Mode"), - kExitDriverMode("Exit Driver Mode"), - kFoundTarget("Found Target"), - kFoundMultipleTarget("Found Multiple Target"), - kLostTarget("Lost Target"), - kPipelineLag("Pipeline Lag"); + kProgramInit("Program Init"), + kProgramExit("Program Exit"), + kNTConnected("NT Connected"), + kLEDOn("LED On"), + kLEDOff("LED Off"), + kEnterDriverMode("Enter Driver Mode"), + kExitDriverMode("Exit Driver Mode"), + kFoundTarget("Found Target"), + kFoundMultipleTarget("Found Multiple Target"), + kLostTarget("Lost Target"), + kPipelineLag("Pipeline Lag"); - public final String value; + public final String value; - ScriptEventType(String value) { - this.value = value; - } + ScriptEventType(String value) { + this.value = value; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptManager.java b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptManager.java index a47684cc5c..b9c8e04033 100644 --- a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptManager.java +++ b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptManager.java @@ -31,110 +31,110 @@ import org.photonvision.common.util.file.JacksonUtils; public class ScriptManager { - private static final Logger logger = new Logger(ScriptManager.class, LogGroup.General); + private static final Logger logger = new Logger(ScriptManager.class, LogGroup.General); - private ScriptManager() {} + private ScriptManager() {} - private static final List events = new ArrayList<>(); - private static final LinkedBlockingDeque queuedEvents = - new LinkedBlockingDeque<>(25); + private static final List events = new ArrayList<>(); + private static final LinkedBlockingDeque queuedEvents = + new LinkedBlockingDeque<>(25); - public static void initialize() { - ScriptConfigManager.initialize(); - if (ScriptConfigManager.fileExists()) { - for (ScriptConfig scriptConfig : ScriptConfigManager.loadConfig()) { - ScriptEvent scriptEvent = new ScriptEvent(scriptConfig); - events.add(scriptEvent); - } + public static void initialize() { + ScriptConfigManager.initialize(); + if (ScriptConfigManager.fileExists()) { + for (ScriptConfig scriptConfig : ScriptConfigManager.loadConfig()) { + ScriptEvent scriptEvent = new ScriptEvent(scriptConfig); + events.add(scriptEvent); + } - TimedTaskManager.getInstance().addTask("ScriptRunner", new ScriptRunner(), 10); + TimedTaskManager.getInstance().addTask("ScriptRunner", new ScriptRunner(), 10); - } else { - logger.error("Something went wrong initializing scripts! Events will not run."); - } + } else { + logger.error("Something went wrong initializing scripts! Events will not run."); + } + } + + private static class ScriptRunner implements Runnable { + @Override + public void run() { + try { + handleEvent(queuedEvents.takeFirst()); + } catch (InterruptedException e) { + logger.error("ScriptRunner queue interrupted!", e); + } } - private static class ScriptRunner implements Runnable { - @Override - public void run() { - try { - handleEvent(queuedEvents.takeFirst()); - } catch (InterruptedException e) { - logger.error("ScriptRunner queue interrupted!", e); - } - } - - private void handleEvent(ScriptEventType eventType) { - var toRun = - events.parallelStream() - .filter(e -> e.config.eventType == eventType) - .findFirst() - .orElse(null); - if (toRun != null) { - try { - toRun.run(); - } catch (IOException e) { - logger.error("Failed to run script for event \"" + eventType.name() + "\"", e); - } - } + private void handleEvent(ScriptEventType eventType) { + var toRun = + events.parallelStream() + .filter(e -> e.config.eventType == eventType) + .findFirst() + .orElse(null); + if (toRun != null) { + try { + toRun.run(); + } catch (IOException e) { + logger.error("Failed to run script for event \"" + eventType.name() + "\"", e); } + } } + } - protected static class ScriptConfigManager { - // protected static final Path scriptConfigPath = - // Paths.get(ConfigManager.SettingsPath.toString(), "scripts.json"); - static final Path scriptConfigPath = Paths.get(""); // TODO: Waiting on config + protected static class ScriptConfigManager { + // protected static final Path scriptConfigPath = + // Paths.get(ConfigManager.SettingsPath.toString(), "scripts.json"); + static final Path scriptConfigPath = Paths.get(""); // TODO: Waiting on config - private ScriptConfigManager() {} + private ScriptConfigManager() {} - static boolean fileExists() { - return Files.exists(scriptConfigPath); - } + static boolean fileExists() { + return Files.exists(scriptConfigPath); + } - public static void initialize() { - if (!fileExists()) { - List eventsConfig = new ArrayList<>(); - for (var eventType : ScriptEventType.values()) { - eventsConfig.add(new ScriptConfig(eventType)); - } - - try { - JacksonUtils.serialize(scriptConfigPath, eventsConfig.toArray(new ScriptConfig[0]), true); - } catch (IOException e) { - logger.error("Failed to initialize!", e); - } - } + public static void initialize() { + if (!fileExists()) { + List eventsConfig = new ArrayList<>(); + for (var eventType : ScriptEventType.values()) { + eventsConfig.add(new ScriptConfig(eventType)); } - static List loadConfig() { - try { - var raw = JacksonUtils.deserialize(scriptConfigPath, ScriptConfig[].class); - if (raw != null) { - return List.of(raw); - } - } catch (IOException e) { - logger.error("Failed to load scripting config!", e); - } - return new ArrayList<>(); + try { + JacksonUtils.serialize(scriptConfigPath, eventsConfig.toArray(new ScriptConfig[0]), true); + } catch (IOException e) { + logger.error("Failed to initialize!", e); } + } + } - protected static void deleteConfig() { - try { - Files.delete(scriptConfigPath); - } catch (IOException e) { - // - } + static List loadConfig() { + try { + var raw = JacksonUtils.deserialize(scriptConfigPath, ScriptConfig[].class); + if (raw != null) { + return List.of(raw); } + } catch (IOException e) { + logger.error("Failed to load scripting config!", e); + } + return new ArrayList<>(); } - public static void queueEvent(ScriptEventType eventType) { - if (Platform.isLinux()) { - try { - queuedEvents.putLast(eventType); - logger.info("Queued event: " + eventType.name()); - } catch (InterruptedException e) { - logger.error("Failed to add event to queue: " + eventType.name(), e); - } - } + protected static void deleteConfig() { + try { + Files.delete(scriptConfigPath); + } catch (IOException e) { + // + } + } + } + + public static void queueEvent(ScriptEventType eventType) { + if (Platform.isLinux()) { + try { + queuedEvents.putLast(eventType); + logger.info("Queued event: " + eventType.name()); + } catch (InterruptedException e) { + logger.error("Failed to add event to queue: " + eventType.name(), e); + } } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/ColorHelper.java b/photon-core/src/main/java/org/photonvision/common/util/ColorHelper.java index 1ce4c55f18..f4b7b58627 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/ColorHelper.java +++ b/photon-core/src/main/java/org/photonvision/common/util/ColorHelper.java @@ -21,7 +21,7 @@ import org.opencv.core.Scalar; public class ColorHelper { - public static Scalar colorToScalar(Color color) { - return new Scalar(color.getBlue(), color.getGreen(), color.getRed()); - } + public static Scalar colorToScalar(Color color) { + return new Scalar(color.getBlue(), color.getGreen(), color.getRed()); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/MemoryManager.java b/photon-core/src/main/java/org/photonvision/common/util/MemoryManager.java index d84536e0a4..7bf426320b 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/MemoryManager.java +++ b/photon-core/src/main/java/org/photonvision/common/util/MemoryManager.java @@ -18,67 +18,67 @@ package org.photonvision.common.util; public class MemoryManager { - private static final long MEGABYTE_FACTOR = 1024L * 1024L; + private static final long MEGABYTE_FACTOR = 1024L * 1024L; - private int collectionThreshold; - private long collectionPeriodMillis = -1; + private int collectionThreshold; + private long collectionPeriodMillis = -1; - private double lastUsedMb = 0; - private long lastCollectionMillis = 0; + private double lastUsedMb = 0; + private long lastCollectionMillis = 0; - public MemoryManager(int collectionThreshold) { - this.collectionThreshold = collectionThreshold; - } + public MemoryManager(int collectionThreshold) { + this.collectionThreshold = collectionThreshold; + } - public MemoryManager(int collectionThreshold, long collectionPeriodMillis) { - this.collectionThreshold = collectionThreshold; - this.collectionPeriodMillis = collectionPeriodMillis; - } + public MemoryManager(int collectionThreshold, long collectionPeriodMillis) { + this.collectionThreshold = collectionThreshold; + this.collectionPeriodMillis = collectionPeriodMillis; + } - public void setCollectionThreshold(int collectionThreshold) { - this.collectionThreshold = collectionThreshold; - } + public void setCollectionThreshold(int collectionThreshold) { + this.collectionThreshold = collectionThreshold; + } - public void setCollectionPeriodMillis(long collectionPeriodMillis) { - this.collectionPeriodMillis = collectionPeriodMillis; - } + public void setCollectionPeriodMillis(long collectionPeriodMillis) { + this.collectionPeriodMillis = collectionPeriodMillis; + } - private static long getUsedMemory() { - return Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); - } + private static long getUsedMemory() { + return Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); + } - private static double getUsedMemoryMB() { - return ((double) getUsedMemory() / MEGABYTE_FACTOR); - } + private static double getUsedMemoryMB() { + return ((double) getUsedMemory() / MEGABYTE_FACTOR); + } - private void collect() { - System.gc(); - System.runFinalization(); - } + private void collect() { + System.gc(); + System.runFinalization(); + } + + public void run() { + run(false); + } + + public void run(boolean print) { + var usedMem = getUsedMemoryMB(); - public void run() { - run(false); + if (usedMem != lastUsedMb) { + lastUsedMb = usedMem; + if (print) System.out.printf("Memory usage: %.2fMB\n", usedMem); } - public void run(boolean print) { - var usedMem = getUsedMemoryMB(); - - if (usedMem != lastUsedMb) { - lastUsedMb = usedMem; - if (print) System.out.printf("Memory usage: %.2fMB\n", usedMem); - } - - boolean collectionThresholdPassed = usedMem >= collectionThreshold; - boolean collectionPeriodPassed = - collectionPeriodMillis != -1 - && (System.currentTimeMillis() - lastCollectionMillis >= collectionPeriodMillis); - - if (collectionThresholdPassed || collectionPeriodPassed) { - collect(); - lastCollectionMillis = System.currentTimeMillis(); - if (print) { - System.out.printf("Garbage collected at %.2fMB\n", usedMem); - } - } + boolean collectionThresholdPassed = usedMem >= collectionThreshold; + boolean collectionPeriodPassed = + collectionPeriodMillis != -1 + && (System.currentTimeMillis() - lastCollectionMillis >= collectionPeriodMillis); + + if (collectionThresholdPassed || collectionPeriodPassed) { + collect(); + lastCollectionMillis = System.currentTimeMillis(); + if (print) { + System.out.printf("Garbage collected at %.2fMB\n", usedMem); + } } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/NativeLibHelper.java b/photon-core/src/main/java/org/photonvision/common/util/NativeLibHelper.java index a8d7dd5ef0..fec7523936 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/NativeLibHelper.java +++ b/photon-core/src/main/java/org/photonvision/common/util/NativeLibHelper.java @@ -21,20 +21,20 @@ import java.nio.file.Paths; public class NativeLibHelper { - private static NativeLibHelper INSTANCE; + private static NativeLibHelper INSTANCE; - public static NativeLibHelper getInstance() { - if (INSTANCE == null) { - INSTANCE = new NativeLibHelper(); - } - - return INSTANCE; + public static NativeLibHelper getInstance() { + if (INSTANCE == null) { + INSTANCE = new NativeLibHelper(); } - public final Path NativeLibPath; + return INSTANCE; + } - private NativeLibHelper() { - String home = System.getProperty("user.home"); - NativeLibPath = Paths.get(home, ".pvlibs", "nativecache"); - } + public final Path NativeLibPath; + + private NativeLibHelper() { + String home = System.getProperty("user.home"); + NativeLibPath = Paths.get(home, ".pvlibs", "nativecache"); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/ReflectionUtils.java b/photon-core/src/main/java/org/photonvision/common/util/ReflectionUtils.java index cda97c3f8f..3289d45ae6 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/ReflectionUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/ReflectionUtils.java @@ -18,41 +18,41 @@ package org.photonvision.common.util; public class ReflectionUtils { - public static StackTraceElement[] getFullStackTrace() { - return Thread.currentThread().getStackTrace(); - } + public static StackTraceElement[] getFullStackTrace() { + return Thread.currentThread().getStackTrace(); + } - public static StackTraceElement getNthCaller(int n) { - if (n < 0) n = 0; - return Thread.currentThread().getStackTrace()[n]; - } + public static StackTraceElement getNthCaller(int n) { + if (n < 0) n = 0; + return Thread.currentThread().getStackTrace()[n]; + } - public static String getCallerClassName() { - StackTraceElement[] stElements = Thread.currentThread().getStackTrace(); - for (int i = 1; i < stElements.length; i++) { - StackTraceElement ste = stElements[i]; - if (!ste.getClassName().equals(ReflectionUtils.class.getName()) - && ste.getClassName().indexOf("java.lang.Thread") != 0) { - return ste.getClassName(); - } - } - return null; + public static String getCallerClassName() { + StackTraceElement[] stElements = Thread.currentThread().getStackTrace(); + for (int i = 1; i < stElements.length; i++) { + StackTraceElement ste = stElements[i]; + if (!ste.getClassName().equals(ReflectionUtils.class.getName()) + && ste.getClassName().indexOf("java.lang.Thread") != 0) { + return ste.getClassName(); + } } + return null; + } - public static String getCallerCallerClassName() { - StackTraceElement[] stElements = Thread.currentThread().getStackTrace(); - String callerClassName = null; - for (int i = 1; i < stElements.length; i++) { - StackTraceElement ste = stElements[i]; - if (!ste.getClassName().equals(ReflectionUtils.class.getName()) - && ste.getClassName().indexOf("java.lang.Thread") != 0) { - if (callerClassName == null) { - callerClassName = ste.getClassName(); - } else if (!callerClassName.equals(ste.getClassName())) { - return ste.getClassName(); - } - } + public static String getCallerCallerClassName() { + StackTraceElement[] stElements = Thread.currentThread().getStackTrace(); + String callerClassName = null; + for (int i = 1; i < stElements.length; i++) { + StackTraceElement ste = stElements[i]; + if (!ste.getClassName().equals(ReflectionUtils.class.getName()) + && ste.getClassName().indexOf("java.lang.Thread") != 0) { + if (callerClassName == null) { + callerClassName = ste.getClassName(); + } else if (!callerClassName.equals(ste.getClassName())) { + return ste.getClassName(); } - return null; + } } + return null; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/SerializationUtils.java b/photon-core/src/main/java/org/photonvision/common/util/SerializationUtils.java index 6fc3ea9c30..783b7b62e7 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/SerializationUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/SerializationUtils.java @@ -23,39 +23,39 @@ import org.photonvision.common.logging.Logger; public final class SerializationUtils { - private static final Logger logger = new Logger(SerializationUtils.class, LogGroup.General); + private static final Logger logger = new Logger(SerializationUtils.class, LogGroup.General); - public static HashMap objectToHashMap(Object src) { - var ret = new HashMap(); - for (var field : src.getClass().getFields()) { - try { - field.setAccessible(true); - if (!field - .getType() - .isEnum()) { // if the field is not an enum, get it based on the current pipeline - ret.put(field.getName(), field.get(src)); - } else { - var ordinal = (Enum) field.get(src); - ret.put(field.getName(), ordinal.ordinal()); - } - } catch (IllegalArgumentException | IllegalAccessException e) { - logger.error("Could not serialize " + src.getClass().getSimpleName(), e); - } + public static HashMap objectToHashMap(Object src) { + var ret = new HashMap(); + for (var field : src.getClass().getFields()) { + try { + field.setAccessible(true); + if (!field + .getType() + .isEnum()) { // if the field is not an enum, get it based on the current pipeline + ret.put(field.getName(), field.get(src)); + } else { + var ordinal = (Enum) field.get(src); + ret.put(field.getName(), ordinal.ordinal()); } - return ret; + } catch (IllegalArgumentException | IllegalAccessException e) { + logger.error("Could not serialize " + src.getClass().getSimpleName(), e); + } } + return ret; + } - public static HashMap transformToHashMap(Transform3d transform) { - var ret = new HashMap(); - ret.put("x", transform.getTranslation().getX()); - ret.put("y", transform.getTranslation().getY()); - ret.put("z", transform.getTranslation().getZ()); - ret.put("qw", transform.getRotation().getQuaternion().getW()); - ret.put("qx", transform.getRotation().getQuaternion().getX()); - ret.put("qy", transform.getRotation().getQuaternion().getY()); - ret.put("qz", transform.getRotation().getQuaternion().getZ()); + public static HashMap transformToHashMap(Transform3d transform) { + var ret = new HashMap(); + ret.put("x", transform.getTranslation().getX()); + ret.put("y", transform.getTranslation().getY()); + ret.put("z", transform.getTranslation().getZ()); + ret.put("qw", transform.getRotation().getQuaternion().getW()); + ret.put("qx", transform.getRotation().getQuaternion().getX()); + ret.put("qy", transform.getRotation().getQuaternion().getY()); + ret.put("qz", transform.getRotation().getQuaternion().getZ()); - ret.put("angle_z", transform.getRotation().getZ()); - return ret; - } + ret.put("angle_z", transform.getRotation().getZ()); + return ret; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/ShellExec.java b/photon-core/src/main/java/org/photonvision/common/util/ShellExec.java index c8433ec158..b6e83d18fb 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/ShellExec.java +++ b/photon-core/src/main/java/org/photonvision/common/util/ShellExec.java @@ -24,185 +24,185 @@ /** Execute external process and optionally read output buffer. */ @SuppressWarnings({"unused"}) public class ShellExec { - private static final Logger logger = new Logger(ShellExec.class, LogGroup.General); - - private int exitCode; - private final boolean readOutput; - private final boolean readError; - private StreamGobbler errorGobbler, outputGobbler; - - public ShellExec() { - this(false, false); + private static final Logger logger = new Logger(ShellExec.class, LogGroup.General); + + private int exitCode; + private final boolean readOutput; + private final boolean readError; + private StreamGobbler errorGobbler, outputGobbler; + + public ShellExec() { + this(false, false); + } + + public ShellExec(boolean readOutput, boolean readError) { + this.readOutput = readOutput; + this.readError = readError; + } + + public int executeBashCommand(String command) throws IOException { + return executeBashCommand(command, true); + } + + /** + * Execute a bash command. We can handle complex bash commands including multiple executions (; | + * and ||), quotes, expansions ($), escapes (\), e.g.: "cd /abc/def; mv ghi 'older ghi '$(whoami)" + * + * @param command Bash command to execute + * @return true if bash got started, but your command may have failed. + */ + public int executeBashCommand(String command, boolean wait) throws IOException { + logger.debug("Executing \"" + command + "\""); + + boolean success = false; + Runtime r = Runtime.getRuntime(); + // Use bash -c, so we can handle things like multi commands separated by ; and + // things like quotes, $, |, and \. My tests show that command comes as + // one argument to bash, so we do not need to quote it to make it one thing. + // Also, exec may object if it does not have an executable file as the first thing, + // so having bash here makes it happy provided bash is installed and in path. + String[] commands = {"bash", "-c", command}; + + Process process = r.exec(commands); + + // Consume streams, older jvm's had a memory leak if streams were not read, + // some other jvm+OS combinations may block unless streams are consumed. + int retcode = doProcess(wait, process); + logger.debug("Got exit code " + retcode); + return retcode; + } + + /** + * Execute a command in current folder, and wait for process to end + * + * @param command command ("c:/some/folder/script.bat" or "some/folder/script.sh") + * @param args 0..n command line arguments + * @return process exit code + */ + public int execute(String command, String... args) throws IOException { + return execute(command, null, true, args); + } + + /** + * Execute a command. + * + * @param command command ("c:/some/folder/script.bat" or "some/folder/script.sh") + * @param workdir working directory or NULL to use command folder + * @param wait wait for process to end + * @param args 0..n command line arguments + * @return process exit code + */ + public int execute(String command, String workdir, boolean wait, String... args) + throws IOException { + String[] cmdArr; + if (args != null && args.length > 0) { + cmdArr = new String[1 + args.length]; + cmdArr[0] = command; + System.arraycopy(args, 0, cmdArr, 1, args.length); + } else { + cmdArr = new String[] {command}; } - public ShellExec(boolean readOutput, boolean readError) { - this.readOutput = readOutput; - this.readError = readError; + ProcessBuilder pb = new ProcessBuilder(cmdArr); + File workingDir = (workdir == null ? new File(command).getParentFile() : new File(workdir)); + pb.directory(workingDir); + + Process process = pb.start(); + + // Consume streams, older jvm's had a memory leak if streams were not read, + // some other jvm+OS combinations may block unless streams are consumed. + return doProcess(wait, process); + } + + private int doProcess(boolean wait, Process process) { + errorGobbler = new StreamGobbler(process.getErrorStream(), readError); + outputGobbler = new StreamGobbler(process.getInputStream(), readOutput); + errorGobbler.start(); + outputGobbler.start(); + + exitCode = 0; + if (wait) { + try { + process.waitFor(); + exitCode = process.exitValue(); + } catch (InterruptedException ignored) { + } } - - public int executeBashCommand(String command) throws IOException { - return executeBashCommand(command, true); + return exitCode; + } + + public int getExitCode() { + return exitCode; + } + + public boolean isOutputCompleted() { + return (outputGobbler != null && outputGobbler.isCompleted()); + } + + public boolean isErrorCompleted() { + return (errorGobbler != null && errorGobbler.isCompleted()); + } + + public String getOutput() { + return (outputGobbler != null ? outputGobbler.getOutput() : null); + } + + public String getError() { + return (errorGobbler != null ? errorGobbler.getOutput() : null); + } + + // ******************************************** + // ******************************************** + + /** + * StreamGobbler reads inputstream to "gobble" it. This is used by Executor class when running a + * commandline applications. Gobblers must read/purge INSTR and ERRSTR process streams. + * http://www.javaworld.com/javaworld/jw-12-2000/jw-1229-traps.html?page=4 + */ + @SuppressWarnings("WeakerAccess") + private static class StreamGobbler extends Thread { + private final InputStream is; + private final StringBuilder output; + private volatile boolean completed; // mark volatile to guarantee a thread safety + + public StreamGobbler(InputStream is, boolean readStream) { + this.is = is; + this.output = (readStream ? new StringBuilder(256) : null); } - /** - * Execute a bash command. We can handle complex bash commands including multiple executions (; | - * and ||), quotes, expansions ($), escapes (\), e.g.: "cd /abc/def; mv ghi 'older ghi '$(whoami)" - * - * @param command Bash command to execute - * @return true if bash got started, but your command may have failed. - */ - public int executeBashCommand(String command, boolean wait) throws IOException { - logger.debug("Executing \"" + command + "\""); - - boolean success = false; - Runtime r = Runtime.getRuntime(); - // Use bash -c, so we can handle things like multi commands separated by ; and - // things like quotes, $, |, and \. My tests show that command comes as - // one argument to bash, so we do not need to quote it to make it one thing. - // Also, exec may object if it does not have an executable file as the first thing, - // so having bash here makes it happy provided bash is installed and in path. - String[] commands = {"bash", "-c", command}; - - Process process = r.exec(commands); - - // Consume streams, older jvm's had a memory leak if streams were not read, - // some other jvm+OS combinations may block unless streams are consumed. - int retcode = doProcess(wait, process); - logger.debug("Got exit code " + retcode); - return retcode; - } + public void run() { + completed = false; + try { + String NL = System.getProperty("line.separator", "\r\n"); - /** - * Execute a command in current folder, and wait for process to end - * - * @param command command ("c:/some/folder/script.bat" or "some/folder/script.sh") - * @param args 0..n command line arguments - * @return process exit code - */ - public int execute(String command, String... args) throws IOException { - return execute(command, null, true, args); + InputStreamReader isr = new InputStreamReader(is); + BufferedReader br = new BufferedReader(isr); + String line; + while ((line = br.readLine()) != null) { + if (output != null) output.append(line).append(NL); + } + } catch (IOException ex) { + // ex.printStackTrace(); + } + completed = true; } /** - * Execute a command. + * Get inputstream buffer or null if stream was not consumed. * - * @param command command ("c:/some/folder/script.bat" or "some/folder/script.sh") - * @param workdir working directory or NULL to use command folder - * @param wait wait for process to end - * @param args 0..n command line arguments - * @return process exit code + * @return Output stream */ - public int execute(String command, String workdir, boolean wait, String... args) - throws IOException { - String[] cmdArr; - if (args != null && args.length > 0) { - cmdArr = new String[1 + args.length]; - cmdArr[0] = command; - System.arraycopy(args, 0, cmdArr, 1, args.length); - } else { - cmdArr = new String[] {command}; - } - - ProcessBuilder pb = new ProcessBuilder(cmdArr); - File workingDir = (workdir == null ? new File(command).getParentFile() : new File(workdir)); - pb.directory(workingDir); - - Process process = pb.start(); - - // Consume streams, older jvm's had a memory leak if streams were not read, - // some other jvm+OS combinations may block unless streams are consumed. - return doProcess(wait, process); - } - - private int doProcess(boolean wait, Process process) { - errorGobbler = new StreamGobbler(process.getErrorStream(), readError); - outputGobbler = new StreamGobbler(process.getInputStream(), readOutput); - errorGobbler.start(); - outputGobbler.start(); - - exitCode = 0; - if (wait) { - try { - process.waitFor(); - exitCode = process.exitValue(); - } catch (InterruptedException ignored) { - } - } - return exitCode; - } - - public int getExitCode() { - return exitCode; - } - - public boolean isOutputCompleted() { - return (outputGobbler != null && outputGobbler.isCompleted()); - } - - public boolean isErrorCompleted() { - return (errorGobbler != null && errorGobbler.isCompleted()); - } - public String getOutput() { - return (outputGobbler != null ? outputGobbler.getOutput() : null); - } - - public String getError() { - return (errorGobbler != null ? errorGobbler.getOutput() : null); + return (output != null ? output.toString() : null); } - // ******************************************** - // ******************************************** - /** - * StreamGobbler reads inputstream to "gobble" it. This is used by Executor class when running a - * commandline applications. Gobblers must read/purge INSTR and ERRSTR process streams. - * http://www.javaworld.com/javaworld/jw-12-2000/jw-1229-traps.html?page=4 + * Is input stream completed. + * + * @return if input stream is completed */ - @SuppressWarnings("WeakerAccess") - private static class StreamGobbler extends Thread { - private final InputStream is; - private final StringBuilder output; - private volatile boolean completed; // mark volatile to guarantee a thread safety - - public StreamGobbler(InputStream is, boolean readStream) { - this.is = is; - this.output = (readStream ? new StringBuilder(256) : null); - } - - public void run() { - completed = false; - try { - String NL = System.getProperty("line.separator", "\r\n"); - - InputStreamReader isr = new InputStreamReader(is); - BufferedReader br = new BufferedReader(isr); - String line; - while ((line = br.readLine()) != null) { - if (output != null) output.append(line).append(NL); - } - } catch (IOException ex) { - // ex.printStackTrace(); - } - completed = true; - } - - /** - * Get inputstream buffer or null if stream was not consumed. - * - * @return Output stream - */ - public String getOutput() { - return (output != null ? output.toString() : null); - } - - /** - * Is input stream completed. - * - * @return if input stream is completed - */ - public boolean isCompleted() { - return completed; - } + public boolean isCompleted() { + return completed; } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/TestUtils.java b/photon-core/src/main/java/org/photonvision/common/util/TestUtils.java index d109f97834..4497350897 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/TestUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/TestUtils.java @@ -39,360 +39,360 @@ import org.photonvision.vision.calibration.CameraCalibrationCoefficients; public class TestUtils { - public static boolean loadLibraries() { - JNIWrapper.Helper.setExtractOnStaticLoad(false); - WPIUtilJNI.Helper.setExtractOnStaticLoad(false); - NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); - WPINetJNI.Helper.setExtractOnStaticLoad(false); - CameraServerJNI.Helper.setExtractOnStaticLoad(false); - CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); - AprilTagJNI.Helper.setExtractOnStaticLoad(false); - - try { - var loader = - new RuntimeLoader<>( - Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); - loader.loadLibrary(); - - CombinedRuntimeLoader.loadLibraries( - TestUtils.class, - "wpiutiljni", - "ntcorejni", - "wpinetjni", - "wpiHaljni", - "cscorejni", - "cscorejnicvstatic", - "apriltagjni"); - return true; - } catch (IOException e) { - e.printStackTrace(); - return false; - } - } - - @SuppressWarnings("unused") - public enum WPI2019Image { - kCargoAngledDark48in(1.2192), - kCargoSideStraightDark36in(0.9144), - kCargoSideStraightDark60in(1.524), - kCargoSideStraightDark72in(1.8288), - kCargoSideStraightPanelDark36in(0.9144), - kCargoStraightDark19in(0.4826), - kCargoStraightDark24in(0.6096), - kCargoStraightDark48in(1.2192), - kCargoStraightDark72in(1.8288), - kCargoStraightDark72in_HighRes(1.8288), - kCargoStraightDark90in(2.286), - kRocketPanelAngleDark48in(1.2192), - kRocketPanelAngleDark60in(1.524); - - public static double FOV = 68.5; - - public final double distanceMeters; - public final Path path; - - Path getPath() { - var filename = this.toString().substring(1); - return Path.of("2019", "WPI", filename + ".jpg"); - } - - WPI2019Image(double distanceMeters) { - this.distanceMeters = distanceMeters; - this.path = getPath(); - } - } - - @SuppressWarnings("unused") - public enum WPI2020Image { - kBlueGoal_060in_Center(1.524), - kBlueGoal_084in_Center(2.1336), - kBlueGoal_084in_Center_720p(2.1336), - kBlueGoal_108in_Center(2.7432), - kBlueGoal_132in_Center(3.3528), - kBlueGoal_156in_Center(3.9624), - kBlueGoal_180in_Center(4.572), - kBlueGoal_156in_Left(3.9624), - kBlueGoal_224in_Left(5.6896), - kBlueGoal_228in_ProtectedZone(5.7912), - kBlueGoal_330in_ProtectedZone(8.382), - kBlueGoal_Far_ProtectedZone(10.668), // TODO: find a more accurate distance - kRedLoading_016in_Down(0.4064), - kRedLoading_030in_Down(0.762), - kRedLoading_048in_Down(1.2192), - kRedLoading_048in(1.2192), - kRedLoading_060in(1.524), - kRedLoading_084in(2.1336), - kRedLoading_108in(2.7432); - - public static double FOV = 68.5; - - public final double distanceMeters; - public final Path path; - - Path getPath() { - var filename = this.toString().substring(1).replace('_', '-'); - return Path.of("2020", "WPI", filename + ".jpg"); - } - - WPI2020Image(double distanceMeters) { - this.distanceMeters = distanceMeters; - this.path = getPath(); - } - } + public static boolean loadLibraries() { + JNIWrapper.Helper.setExtractOnStaticLoad(false); + WPIUtilJNI.Helper.setExtractOnStaticLoad(false); + NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); + WPINetJNI.Helper.setExtractOnStaticLoad(false); + CameraServerJNI.Helper.setExtractOnStaticLoad(false); + CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); + AprilTagJNI.Helper.setExtractOnStaticLoad(false); + + try { + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + + CombinedRuntimeLoader.loadLibraries( + TestUtils.class, + "wpiutiljni", + "ntcorejni", + "wpinetjni", + "wpiHaljni", + "cscorejni", + "cscorejnicvstatic", + "apriltagjni"); + return true; + } catch (IOException e) { + e.printStackTrace(); + return false; + } + } + + @SuppressWarnings("unused") + public enum WPI2019Image { + kCargoAngledDark48in(1.2192), + kCargoSideStraightDark36in(0.9144), + kCargoSideStraightDark60in(1.524), + kCargoSideStraightDark72in(1.8288), + kCargoSideStraightPanelDark36in(0.9144), + kCargoStraightDark19in(0.4826), + kCargoStraightDark24in(0.6096), + kCargoStraightDark48in(1.2192), + kCargoStraightDark72in(1.8288), + kCargoStraightDark72in_HighRes(1.8288), + kCargoStraightDark90in(2.286), + kRocketPanelAngleDark48in(1.2192), + kRocketPanelAngleDark60in(1.524); + + public static double FOV = 68.5; + + public final double distanceMeters; + public final Path path; + + Path getPath() { + var filename = this.toString().substring(1); + return Path.of("2019", "WPI", filename + ".jpg"); + } + + WPI2019Image(double distanceMeters) { + this.distanceMeters = distanceMeters; + this.path = getPath(); + } + } + + @SuppressWarnings("unused") + public enum WPI2020Image { + kBlueGoal_060in_Center(1.524), + kBlueGoal_084in_Center(2.1336), + kBlueGoal_084in_Center_720p(2.1336), + kBlueGoal_108in_Center(2.7432), + kBlueGoal_132in_Center(3.3528), + kBlueGoal_156in_Center(3.9624), + kBlueGoal_180in_Center(4.572), + kBlueGoal_156in_Left(3.9624), + kBlueGoal_224in_Left(5.6896), + kBlueGoal_228in_ProtectedZone(5.7912), + kBlueGoal_330in_ProtectedZone(8.382), + kBlueGoal_Far_ProtectedZone(10.668), // TODO: find a more accurate distance + kRedLoading_016in_Down(0.4064), + kRedLoading_030in_Down(0.762), + kRedLoading_048in_Down(1.2192), + kRedLoading_048in(1.2192), + kRedLoading_060in(1.524), + kRedLoading_084in(2.1336), + kRedLoading_108in(2.7432); - public enum WPI2023Apriltags { - k162_36_Angle, - k162_36_Straight, - k383_60_Angle2; + public static double FOV = 68.5; - public static double FOV = 68.5; + public final double distanceMeters; + public final Path path; - public final Translation2d approxPose; - public final Path path; - - Path getPath() { - var filename = this.toString().substring(1); - return Path.of("2023", "AprilTags", filename + ".png"); - } - - Translation2d getPose() { - var names = this.toString().substring(1).split("_"); - var x = Units.inchesToMeters(Integer.parseInt(names[0])); - var y = Units.inchesToMeters(Integer.parseInt(names[1])); - return new Translation2d(x, y); - } - - WPI2023Apriltags() { - this.approxPose = getPose(); - this.path = getPath(); - } + Path getPath() { + var filename = this.toString().substring(1).replace('_', '-'); + return Path.of("2020", "WPI", filename + ".jpg"); } - public enum WPI2022Image { - kTerminal12ft6in(Units.feetToMeters(12.5)), - kTerminal22ft6in(Units.feetToMeters(22.5)); - - public static double FOV = 68.5; - - public final double distanceMeters; - public final Path path; - - Path getPath() { - var filename = this.toString().substring(1).replace('_', '-'); - return Path.of("2022", "WPI", filename + ".png"); - } - - WPI2022Image(double distanceMeters) { - this.distanceMeters = distanceMeters; - this.path = getPath(); - } + WPI2020Image(double distanceMeters) { + this.distanceMeters = distanceMeters; + this.path = getPath(); } + } - public enum PolygonTestImages { - kPolygons; + public enum WPI2023Apriltags { + k162_36_Angle, + k162_36_Straight, + k383_60_Angle2; - public final Path path; + public static double FOV = 68.5; - Path getPath() { - var filename = this.toString().substring(1).toLowerCase(); - return Path.of("polygons", filename + ".png"); - } + public final Translation2d approxPose; + public final Path path; - PolygonTestImages() { - this.path = getPath(); - } + Path getPath() { + var filename = this.toString().substring(1); + return Path.of("2023", "AprilTags", filename + ".png"); } - public enum PowercellTestImages { - kPowercell_test_1, - kPowercell_test_2, - kPowercell_test_3, - kPowercell_test_4, - kPowercell_test_5, - kPowercell_test_6; - - public final Path path; - - Path getPath() { - var filename = this.toString().substring(1).toLowerCase(); - return Path.of(filename + ".png"); - } - - PowercellTestImages() { - this.path = getPath(); - } + Translation2d getPose() { + var names = this.toString().substring(1).split("_"); + var x = Units.inchesToMeters(Integer.parseInt(names[0])); + var y = Units.inchesToMeters(Integer.parseInt(names[1])); + return new Translation2d(x, y); } - public enum ApriltagTestImages { - kRobots, - kTag1_640_480, - kTag1_16h5_1280, - kTag_corner_1280; - - public final Path path; - - Path getPath() { - // Strip leading k - var filename = this.toString().substring(1).toLowerCase(); - var extension = ".jpg"; - if (filename.equals("tag1_16h5_1280")) extension = ".png"; - return Path.of("apriltag", filename + extension); - } - - ApriltagTestImages() { - this.path = getPath(); - } + WPI2023Apriltags() { + this.approxPose = getPose(); + this.path = getPath(); } + } - public static Path getResourcesFolderPath(boolean testMode) { - System.out.println("CWD: " + Path.of("").toAbsolutePath()); - - // VSCode likes to make this path relative to the wrong root directory, so a fun hack to tell - // if it's wrong - Path ret = Path.of("test-resources").toAbsolutePath(); - if (Path.of("test-resources") - .toAbsolutePath() - .toString() - .replace("/", "") - .replace("\\", "") - .toLowerCase() - .matches(".*photon-[a-z]*test-resources")) { - ret = Path.of("../test-resources").toAbsolutePath(); - } - return ret; - } + public enum WPI2022Image { + kTerminal12ft6in(Units.feetToMeters(12.5)), + kTerminal22ft6in(Units.feetToMeters(22.5)); - public static Path getTestMode2019ImagePath() { - return getResourcesFolderPath(true) - .resolve("testimages") - .resolve(WPI2019Image.kRocketPanelAngleDark60in.path); - } + public static double FOV = 68.5; - public static Path getTestMode2020ImagePath() { - return getResourcesFolderPath(true) - .resolve("testimages") - .resolve(WPI2020Image.kBlueGoal_156in_Left.path); - } + public final double distanceMeters; + public final Path path; - public static Path getTestMode2022ImagePath() { - return getResourcesFolderPath(true) - .resolve("testimages") - .resolve(WPI2022Image.kTerminal22ft6in.path); + Path getPath() { + var filename = this.toString().substring(1).replace('_', '-'); + return Path.of("2022", "WPI", filename + ".png"); } - public static Path getTestModeApriltagPath() { - return getResourcesFolderPath(true) - .resolve("testimages") - .resolve(ApriltagTestImages.kRobots.path); + WPI2022Image(double distanceMeters) { + this.distanceMeters = distanceMeters; + this.path = getPath(); } + } - public static Path getTestImagesPath(boolean testMode) { - return getResourcesFolderPath(testMode).resolve("testimages"); - } + public enum PolygonTestImages { + kPolygons; - public static Path getCalibrationPath(boolean testMode) { - return getResourcesFolderPath(testMode).resolve("calibration"); - } + public final Path path; - public static Path getPowercellPath(boolean testMode) { - return getTestImagesPath(testMode).resolve("polygons").resolve("powercells"); + Path getPath() { + var filename = this.toString().substring(1).toLowerCase(); + return Path.of("polygons", filename + ".png"); } - public static Path getWPIImagePath(WPI2020Image image, boolean testMode) { - return getTestImagesPath(testMode).resolve(image.path); + PolygonTestImages() { + this.path = getPath(); } + } - public static Path getWPIImagePath(WPI2019Image image, boolean testMode) { - return getTestImagesPath(testMode).resolve(image.path); - } + public enum PowercellTestImages { + kPowercell_test_1, + kPowercell_test_2, + kPowercell_test_3, + kPowercell_test_4, + kPowercell_test_5, + kPowercell_test_6; - public static Path getPolygonImagePath(PolygonTestImages image, boolean testMode) { - return getTestImagesPath(testMode).resolve(image.path); - } + public final Path path; - public static Path getApriltagImagePath(ApriltagTestImages image, boolean testMode) { - return getTestImagesPath(testMode).resolve(image.path); + Path getPath() { + var filename = this.toString().substring(1).toLowerCase(); + return Path.of(filename + ".png"); } - public static Path getPowercellImagePath(PowercellTestImages image, boolean testMode) { - return getPowercellPath(testMode).resolve(image.path); + PowercellTestImages() { + this.path = getPath(); } + } - public static Path getDotBoardImagesPath() { - return getResourcesFolderPath(false).resolve("calibrationBoardImages"); - } + public enum ApriltagTestImages { + kRobots, + kTag1_640_480, + kTag1_16h5_1280, + kTag_corner_1280; - public static Path getSquaresBoardImagesPath() { - return getResourcesFolderPath(false).resolve("calibrationSquaresImg"); - } + public final Path path; - public static File getHardwareConfigJson() { - return getResourcesFolderPath(false) - .resolve("hardware") - .resolve("HardwareConfig.json") - .toFile(); + Path getPath() { + // Strip leading k + var filename = this.toString().substring(1).toLowerCase(); + var extension = ".jpg"; + if (filename.equals("tag1_16h5_1280")) extension = ".png"; + return Path.of("apriltag", filename + extension); } - private static final String LIFECAM_240P_CAL_FILE = "lifecam240p.json"; - private static final String LIFECAM_480P_CAL_FILE = "lifecam480p.json"; - public static final String LIFECAM_1280P_CAL_FILE = "lifecam_1280.json"; - public static final String LIMELIGHT_480P_CAL_FILE = "limelight_1280_720.json"; - - public static CameraCalibrationCoefficients getCoeffs(String filename, boolean testMode) { - try { - return new ObjectMapper() - .readValue( - (Path.of(getCalibrationPath(testMode).toString(), filename).toFile()), - CameraCalibrationCoefficients.class); - } catch (IOException e) { - e.printStackTrace(); - return null; - } + ApriltagTestImages() { + this.path = getPath(); } + } - public static CameraCalibrationCoefficients get2019LifeCamCoeffs(boolean testMode) { - return getCoeffs(LIFECAM_240P_CAL_FILE, testMode); - } + public static Path getResourcesFolderPath(boolean testMode) { + System.out.println("CWD: " + Path.of("").toAbsolutePath()); - public static CameraCalibrationCoefficients get2020LifeCamCoeffs(boolean testMode) { - return getCoeffs(LIFECAM_480P_CAL_FILE, testMode); + // VSCode likes to make this path relative to the wrong root directory, so a fun hack to tell + // if it's wrong + Path ret = Path.of("test-resources").toAbsolutePath(); + if (Path.of("test-resources") + .toAbsolutePath() + .toString() + .replace("/", "") + .replace("\\", "") + .toLowerCase() + .matches(".*photon-[a-z]*test-resources")) { + ret = Path.of("../test-resources").toAbsolutePath(); } + return ret; + } - public static CameraCalibrationCoefficients getLaptop() { - return getCoeffs("laptop.json", true); - } + public static Path getTestMode2019ImagePath() { + return getResourcesFolderPath(true) + .resolve("testimages") + .resolve(WPI2019Image.kRocketPanelAngleDark60in.path); + } - private static final int DefaultTimeoutMillis = 5000; + public static Path getTestMode2020ImagePath() { + return getResourcesFolderPath(true) + .resolve("testimages") + .resolve(WPI2020Image.kBlueGoal_156in_Left.path); + } - public static void showImage(Mat frame, String title, int timeoutMs) { - if (frame.empty()) return; - try { - HighGui.imshow(title, frame); - HighGui.waitKey(timeoutMs); - HighGui.destroyAllWindows(); - } catch (HeadlessException ignored) { - } - } + public static Path getTestMode2022ImagePath() { + return getResourcesFolderPath(true) + .resolve("testimages") + .resolve(WPI2022Image.kTerminal22ft6in.path); + } - public static void showImage(Mat frame, int timeoutMs) { - showImage(frame, "", timeoutMs); - } - - public static void showImage(Mat frame, String title) { - showImage(frame, title, DefaultTimeoutMillis); - } - - public static void showImage(Mat frame) { - showImage(frame, DefaultTimeoutMillis); - } - - public static Path getTestMode2023ImagePath() { - return getResourcesFolderPath(true) - .resolve("testimages") - .resolve(WPI2022Image.kTerminal22ft6in.path); - } + public static Path getTestModeApriltagPath() { + return getResourcesFolderPath(true) + .resolve("testimages") + .resolve(ApriltagTestImages.kRobots.path); + } - public static CameraCalibrationCoefficients get2023LifeCamCoeffs(boolean testMode) { - return getCoeffs(LIFECAM_1280P_CAL_FILE, testMode); - } + public static Path getTestImagesPath(boolean testMode) { + return getResourcesFolderPath(testMode).resolve("testimages"); + } + + public static Path getCalibrationPath(boolean testMode) { + return getResourcesFolderPath(testMode).resolve("calibration"); + } + + public static Path getPowercellPath(boolean testMode) { + return getTestImagesPath(testMode).resolve("polygons").resolve("powercells"); + } + + public static Path getWPIImagePath(WPI2020Image image, boolean testMode) { + return getTestImagesPath(testMode).resolve(image.path); + } + + public static Path getWPIImagePath(WPI2019Image image, boolean testMode) { + return getTestImagesPath(testMode).resolve(image.path); + } + + public static Path getPolygonImagePath(PolygonTestImages image, boolean testMode) { + return getTestImagesPath(testMode).resolve(image.path); + } + + public static Path getApriltagImagePath(ApriltagTestImages image, boolean testMode) { + return getTestImagesPath(testMode).resolve(image.path); + } + + public static Path getPowercellImagePath(PowercellTestImages image, boolean testMode) { + return getPowercellPath(testMode).resolve(image.path); + } + + public static Path getDotBoardImagesPath() { + return getResourcesFolderPath(false).resolve("calibrationBoardImages"); + } + + public static Path getSquaresBoardImagesPath() { + return getResourcesFolderPath(false).resolve("calibrationSquaresImg"); + } + + public static File getHardwareConfigJson() { + return getResourcesFolderPath(false) + .resolve("hardware") + .resolve("HardwareConfig.json") + .toFile(); + } + + private static final String LIFECAM_240P_CAL_FILE = "lifecam240p.json"; + private static final String LIFECAM_480P_CAL_FILE = "lifecam480p.json"; + public static final String LIFECAM_1280P_CAL_FILE = "lifecam_1280.json"; + public static final String LIMELIGHT_480P_CAL_FILE = "limelight_1280_720.json"; + + public static CameraCalibrationCoefficients getCoeffs(String filename, boolean testMode) { + try { + return new ObjectMapper() + .readValue( + (Path.of(getCalibrationPath(testMode).toString(), filename).toFile()), + CameraCalibrationCoefficients.class); + } catch (IOException e) { + e.printStackTrace(); + return null; + } + } + + public static CameraCalibrationCoefficients get2019LifeCamCoeffs(boolean testMode) { + return getCoeffs(LIFECAM_240P_CAL_FILE, testMode); + } + + public static CameraCalibrationCoefficients get2020LifeCamCoeffs(boolean testMode) { + return getCoeffs(LIFECAM_480P_CAL_FILE, testMode); + } + + public static CameraCalibrationCoefficients getLaptop() { + return getCoeffs("laptop.json", true); + } + + private static final int DefaultTimeoutMillis = 5000; + + public static void showImage(Mat frame, String title, int timeoutMs) { + if (frame.empty()) return; + try { + HighGui.imshow(title, frame); + HighGui.waitKey(timeoutMs); + HighGui.destroyAllWindows(); + } catch (HeadlessException ignored) { + } + } + + public static void showImage(Mat frame, int timeoutMs) { + showImage(frame, "", timeoutMs); + } + + public static void showImage(Mat frame, String title) { + showImage(frame, title, DefaultTimeoutMillis); + } + + public static void showImage(Mat frame) { + showImage(frame, DefaultTimeoutMillis); + } + + public static Path getTestMode2023ImagePath() { + return getResourcesFolderPath(true) + .resolve("testimages") + .resolve(WPI2022Image.kTerminal22ft6in.path); + } + + public static CameraCalibrationCoefficients get2023LifeCamCoeffs(boolean testMode) { + return getCoeffs(LIFECAM_1280P_CAL_FILE, testMode); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/TimedTaskManager.java b/photon-core/src/main/java/org/photonvision/common/util/TimedTaskManager.java index 7107faae32..e17d3c8b3d 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/TimedTaskManager.java +++ b/photon-core/src/main/java/org/photonvision/common/util/TimedTaskManager.java @@ -23,60 +23,60 @@ import org.photonvision.common.logging.Logger; public class TimedTaskManager { - private static final Logger logger = new Logger(TimedTaskManager.class, LogGroup.General); + private static final Logger logger = new Logger(TimedTaskManager.class, LogGroup.General); - private static class Singleton { - public static final TimedTaskManager INSTANCE = new TimedTaskManager(); - } + private static class Singleton { + public static final TimedTaskManager INSTANCE = new TimedTaskManager(); + } - public static TimedTaskManager getInstance() { - return Singleton.INSTANCE; - } + public static TimedTaskManager getInstance() { + return Singleton.INSTANCE; + } - private static class CaughtThreadFactory implements ThreadFactory { - private static final ThreadFactory defaultThreadFactory = Executors.defaultThreadFactory(); + private static class CaughtThreadFactory implements ThreadFactory { + private static final ThreadFactory defaultThreadFactory = Executors.defaultThreadFactory(); - @Override - public Thread newThread(@NotNull Runnable r) { - Thread thread = defaultThreadFactory.newThread(r); - thread.setUncaughtExceptionHandler( - (t, e) -> logger.error("TimedTask threw uncaught exception!", e)); - return thread; - } + @Override + public Thread newThread(@NotNull Runnable r) { + Thread thread = defaultThreadFactory.newThread(r); + thread.setUncaughtExceptionHandler( + (t, e) -> logger.error("TimedTask threw uncaught exception!", e)); + return thread; } + } - private final ScheduledExecutorService timedTaskExecutorPool = - new ScheduledThreadPoolExecutor(2, new CaughtThreadFactory()); - private final ConcurrentHashMap> activeTasks = new ConcurrentHashMap<>(); + private final ScheduledExecutorService timedTaskExecutorPool = + new ScheduledThreadPoolExecutor(2, new CaughtThreadFactory()); + private final ConcurrentHashMap> activeTasks = new ConcurrentHashMap<>(); - public void addTask(String identifier, Runnable runnable, long millisInterval) { - if (!activeTasks.containsKey(identifier)) { - var future = - timedTaskExecutorPool.scheduleAtFixedRate( - runnable, 0, millisInterval, TimeUnit.MILLISECONDS); - activeTasks.put(identifier, future); - } + public void addTask(String identifier, Runnable runnable, long millisInterval) { + if (!activeTasks.containsKey(identifier)) { + var future = + timedTaskExecutorPool.scheduleAtFixedRate( + runnable, 0, millisInterval, TimeUnit.MILLISECONDS); + activeTasks.put(identifier, future); } + } - public void addTask( - String identifier, Runnable runnable, long millisStartDelay, long millisInterval) { - if (!activeTasks.containsKey(identifier)) { - var future = - timedTaskExecutorPool.scheduleAtFixedRate( - runnable, millisStartDelay, millisInterval, TimeUnit.MILLISECONDS); - activeTasks.put(identifier, future); - } + public void addTask( + String identifier, Runnable runnable, long millisStartDelay, long millisInterval) { + if (!activeTasks.containsKey(identifier)) { + var future = + timedTaskExecutorPool.scheduleAtFixedRate( + runnable, millisStartDelay, millisInterval, TimeUnit.MILLISECONDS); + activeTasks.put(identifier, future); } + } - public void addOneShotTask(Runnable runnable, long millisStartDelay) { - timedTaskExecutorPool.schedule(runnable, millisStartDelay, TimeUnit.MILLISECONDS); - } + public void addOneShotTask(Runnable runnable, long millisStartDelay) { + timedTaskExecutorPool.schedule(runnable, millisStartDelay, TimeUnit.MILLISECONDS); + } - public void cancelTask(String identifier) { - var future = activeTasks.getOrDefault(identifier, null); - if (future != null) { - future.cancel(true); - activeTasks.remove(identifier); - } + public void cancelTask(String identifier) { + var future = activeTasks.getOrDefault(identifier, null); + if (future != null) { + future.cancel(true); + activeTasks.remove(identifier); } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/file/FileUtils.java b/photon-core/src/main/java/org/photonvision/common/util/file/FileUtils.java index 9d4cce6569..1da1b3b054 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/file/FileUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/file/FileUtils.java @@ -34,109 +34,109 @@ import org.photonvision.common.logging.Logger; public class FileUtils { - private FileUtils() {} + private FileUtils() {} - private static final Logger logger = new Logger(FileUtils.class, LogGroup.General); - private static final Set allReadWriteExecutePerms = - new HashSet<>(Arrays.asList(PosixFilePermission.values())); + private static final Logger logger = new Logger(FileUtils.class, LogGroup.General); + private static final Set allReadWriteExecutePerms = + new HashSet<>(Arrays.asList(PosixFilePermission.values())); - public static void deleteDirectory(Path path) { - try { - var files = Files.walk(path); + public static void deleteDirectory(Path path) { + try { + var files = Files.walk(path); - // delete directory including files and sub-folders - files - .sorted(Comparator.reverseOrder()) - .map(Path::toFile) - // .filter(File::isFile) // we want to delete directories and sub-dirs, too - .forEach((var file) -> deleteFile(file.toPath())); + // delete directory including files and sub-folders + files + .sorted(Comparator.reverseOrder()) + .map(Path::toFile) + // .filter(File::isFile) // we want to delete directories and sub-dirs, too + .forEach((var file) -> deleteFile(file.toPath())); - // close the stream - files.close(); - } catch (IOException e) { - logger.error("Exception deleting files in " + path + "!", e); - } + // close the stream + files.close(); + } catch (IOException e) { + logger.error("Exception deleting files in " + path + "!", e); } + } - /** - * Delete the file at the path. - * - * @param path file path to delete. - * @return whether the operation was successful. - */ - public static boolean deleteFile(Path path) { - try { - Files.delete(path); - return true; - } catch (FileNotFoundException | NoSuchFileException fe) { - logger.warn("Tried to delete file \"" + path + "\" but it did not exist"); - return false; - } catch (IOException e) { - logger.error("Exception deleting file \"" + path + "\"!", e); - return false; - } + /** + * Delete the file at the path. + * + * @param path file path to delete. + * @return whether the operation was successful. + */ + public static boolean deleteFile(Path path) { + try { + Files.delete(path); + return true; + } catch (FileNotFoundException | NoSuchFileException fe) { + logger.warn("Tried to delete file \"" + path + "\" but it did not exist"); + return false; + } catch (IOException e) { + logger.error("Exception deleting file \"" + path + "\"!", e); + return false; } + } - /** - * Copy a file from a source to a new destination. - * - * @param src the file path to copy. - * @param dst the file path to replace. - * @return whether the operation was successful. - */ - public static boolean copyFile(Path src, Path dst) { - try { - Files.copy(src, dst); - return true; - } catch (IOException e) { - logger.error("Exception copying file " + src + " to " + dst + "!", e); - return false; - } + /** + * Copy a file from a source to a new destination. + * + * @param src the file path to copy. + * @param dst the file path to replace. + * @return whether the operation was successful. + */ + public static boolean copyFile(Path src, Path dst) { + try { + Files.copy(src, dst); + return true; + } catch (IOException e) { + logger.error("Exception copying file " + src + " to " + dst + "!", e); + return false; } + } - /** - * Replace the destination file with a new source. - * - * @param src the file path to replace with. - * @param dst the file path to replace. - * @return whether the operation was successful. - */ - public static boolean replaceFile(Path src, Path dst) { - boolean fileDeleted = deleteFile(dst); - boolean fileCopied = copyFile(src, dst); - return fileDeleted && fileCopied; - } + /** + * Replace the destination file with a new source. + * + * @param src the file path to replace with. + * @param dst the file path to replace. + * @return whether the operation was successful. + */ + public static boolean replaceFile(Path src, Path dst) { + boolean fileDeleted = deleteFile(dst); + boolean fileCopied = copyFile(src, dst); + return fileDeleted && fileCopied; + } - public static void setFilePerms(Path path) throws IOException { - if (Platform.isLinux()) { - File thisFile = path.toFile(); - Set perms = - Files.readAttributes(path, PosixFileAttributes.class).permissions(); - if (!perms.equals(allReadWriteExecutePerms)) { - logger.info("Setting perms on" + path); - Files.setPosixFilePermissions(path, perms); - var theseFiles = thisFile.listFiles(); - if (thisFile.isDirectory() && theseFiles != null) { - for (File subfile : theseFiles) { - setFilePerms(subfile.toPath()); - } - } - } + public static void setFilePerms(Path path) throws IOException { + if (Platform.isLinux()) { + File thisFile = path.toFile(); + Set perms = + Files.readAttributes(path, PosixFileAttributes.class).permissions(); + if (!perms.equals(allReadWriteExecutePerms)) { + logger.info("Setting perms on" + path); + Files.setPosixFilePermissions(path, perms); + var theseFiles = thisFile.listFiles(); + if (thisFile.isDirectory() && theseFiles != null) { + for (File subfile : theseFiles) { + setFilePerms(subfile.toPath()); + } } + } } + } - public static void setAllPerms(Path path) { - if (Platform.isLinux()) { - String command = String.format("chmod 777 -R %s", path.toString()); - try { - Process p = Runtime.getRuntime().exec(command); - p.waitFor(); + public static void setAllPerms(Path path) { + if (Platform.isLinux()) { + String command = String.format("chmod 777 -R %s", path.toString()); + try { + Process p = Runtime.getRuntime().exec(command); + p.waitFor(); - } catch (Exception e) { - logger.error("Setting perms failed!", e); - } - } else { - logger.info("Cannot set directory permissions on Windows!"); - } + } catch (Exception e) { + logger.error("Setting perms failed!", e); + } + } else { + logger.info("Cannot set directory permissions on Windows!"); } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/file/JacksonUtils.java b/photon-core/src/main/java/org/photonvision/common/util/file/JacksonUtils.java index 818a0e51ec..a891c14fa7 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/file/JacksonUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/file/JacksonUtils.java @@ -34,110 +34,110 @@ import java.util.HashMap; public class JacksonUtils { - public static class UIMap extends HashMap {} + public static class UIMap extends HashMap {} - public static void serialize(Path path, T object) throws IOException { - serialize(path, object, true); - } + public static void serialize(Path path, T object) throws IOException { + serialize(path, object, true); + } - public static String serializeToString(T object) throws IOException { - PolymorphicTypeValidator ptv = - BasicPolymorphicTypeValidator.builder().allowIfBaseType(object.getClass()).build(); - ObjectMapper objectMapper = - JsonMapper.builder() - .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) - .build(); - return objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(object); - } + public static String serializeToString(T object) throws IOException { + PolymorphicTypeValidator ptv = + BasicPolymorphicTypeValidator.builder().allowIfBaseType(object.getClass()).build(); + ObjectMapper objectMapper = + JsonMapper.builder() + .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) + .build(); + return objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(object); + } - public static void serialize(Path path, T object, boolean forceSync) throws IOException { - PolymorphicTypeValidator ptv = - BasicPolymorphicTypeValidator.builder().allowIfBaseType(object.getClass()).build(); - ObjectMapper objectMapper = - JsonMapper.builder() - .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) - .build(); - String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(object); - saveJsonString(json, path, forceSync); - } + public static void serialize(Path path, T object, boolean forceSync) throws IOException { + PolymorphicTypeValidator ptv = + BasicPolymorphicTypeValidator.builder().allowIfBaseType(object.getClass()).build(); + ObjectMapper objectMapper = + JsonMapper.builder() + .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) + .build(); + String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(object); + saveJsonString(json, path, forceSync); + } - public static T deserialize(String s, Class ref) throws IOException { - PolymorphicTypeValidator ptv = - BasicPolymorphicTypeValidator.builder().allowIfBaseType(ref).build(); - ObjectMapper objectMapper = - JsonMapper.builder() - .configure(JsonReadFeature.ALLOW_JAVA_COMMENTS, true) - .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) - .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) - .build(); + public static T deserialize(String s, Class ref) throws IOException { + PolymorphicTypeValidator ptv = + BasicPolymorphicTypeValidator.builder().allowIfBaseType(ref).build(); + ObjectMapper objectMapper = + JsonMapper.builder() + .configure(JsonReadFeature.ALLOW_JAVA_COMMENTS, true) + .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) + .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) + .build(); - return objectMapper.readValue(s, ref); - } + return objectMapper.readValue(s, ref); + } - public static T deserialize(Path path, Class ref) throws IOException { - PolymorphicTypeValidator ptv = - BasicPolymorphicTypeValidator.builder().allowIfBaseType(ref).build(); - ObjectMapper objectMapper = - JsonMapper.builder() - .configure(JsonReadFeature.ALLOW_JAVA_COMMENTS, true) - .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) - .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) - .build(); - File jsonFile = new File(path.toString()); - if (jsonFile.exists() && jsonFile.length() > 0) { - return objectMapper.readValue(jsonFile, ref); - } - return null; + public static T deserialize(Path path, Class ref) throws IOException { + PolymorphicTypeValidator ptv = + BasicPolymorphicTypeValidator.builder().allowIfBaseType(ref).build(); + ObjectMapper objectMapper = + JsonMapper.builder() + .configure(JsonReadFeature.ALLOW_JAVA_COMMENTS, true) + .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) + .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) + .build(); + File jsonFile = new File(path.toString()); + if (jsonFile.exists() && jsonFile.length() > 0) { + return objectMapper.readValue(jsonFile, ref); } + return null; + } - public static T deserialize(Path path, Class ref, StdDeserializer deserializer) - throws IOException { - ObjectMapper objectMapper = new ObjectMapper(); - SimpleModule module = new SimpleModule(); - module.addDeserializer(ref, deserializer); - objectMapper.registerModule(module); + public static T deserialize(Path path, Class ref, StdDeserializer deserializer) + throws IOException { + ObjectMapper objectMapper = new ObjectMapper(); + SimpleModule module = new SimpleModule(); + module.addDeserializer(ref, deserializer); + objectMapper.registerModule(module); - File jsonFile = new File(path.toString()); - if (jsonFile.exists() && jsonFile.length() > 0) { - return objectMapper.readValue(jsonFile, ref); - } - return null; + File jsonFile = new File(path.toString()); + if (jsonFile.exists() && jsonFile.length() > 0) { + return objectMapper.readValue(jsonFile, ref); } + return null; + } - public static void serialize(Path path, T object, Class ref, StdSerializer serializer) - throws IOException { - serialize(path, object, ref, serializer, true); - } + public static void serialize(Path path, T object, Class ref, StdSerializer serializer) + throws IOException { + serialize(path, object, ref, serializer, true); + } - public static void serialize( - Path path, T object, Class ref, StdSerializer serializer, boolean forceSync) - throws IOException { - ObjectMapper objectMapper = new ObjectMapper(); - SimpleModule module = new SimpleModule(); - module.addSerializer(ref, serializer); - objectMapper.registerModule(module); - String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(object); - saveJsonString(json, path, forceSync); - } + public static void serialize( + Path path, T object, Class ref, StdSerializer serializer, boolean forceSync) + throws IOException { + ObjectMapper objectMapper = new ObjectMapper(); + SimpleModule module = new SimpleModule(); + module.addSerializer(ref, serializer); + objectMapper.registerModule(module); + String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(object); + saveJsonString(json, path, forceSync); + } - private static void saveJsonString(String json, Path path, boolean forceSync) throws IOException { - var file = path.toFile(); - if (file.getParentFile() != null && !file.getParentFile().exists()) { - file.getParentFile().mkdirs(); - } - if (!file.exists()) { - if (!file.canWrite()) { - file.setWritable(true); - } - file.createNewFile(); - } - FileOutputStream fileOutputStream = new FileOutputStream(file); - fileOutputStream.write(json.getBytes()); - fileOutputStream.flush(); - if (forceSync) { - FileDescriptor fileDescriptor = fileOutputStream.getFD(); - fileDescriptor.sync(); - } - fileOutputStream.close(); + private static void saveJsonString(String json, Path path, boolean forceSync) throws IOException { + var file = path.toFile(); + if (file.getParentFile() != null && !file.getParentFile().exists()) { + file.getParentFile().mkdirs(); + } + if (!file.exists()) { + if (!file.canWrite()) { + file.setWritable(true); + } + file.createNewFile(); + } + FileOutputStream fileOutputStream = new FileOutputStream(file); + fileOutputStream.write(json.getBytes()); + fileOutputStream.flush(); + if (forceSync) { + FileDescriptor fileDescriptor = fileOutputStream.getFD(); + fileDescriptor.sync(); } + fileOutputStream.close(); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/file/ProgramDirectoryUtilities.java b/photon-core/src/main/java/org/photonvision/common/util/file/ProgramDirectoryUtilities.java index e9e244386d..541b2a42cb 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/file/ProgramDirectoryUtilities.java +++ b/photon-core/src/main/java/org/photonvision/common/util/file/ProgramDirectoryUtilities.java @@ -21,43 +21,43 @@ import java.net.URISyntaxException; public class ProgramDirectoryUtilities { - private static String getJarName() { - return new File( - ProgramDirectoryUtilities.class - .getProtectionDomain() - .getCodeSource() - .getLocation() - .getPath()) - .getName(); - } + private static String getJarName() { + return new File( + ProgramDirectoryUtilities.class + .getProtectionDomain() + .getCodeSource() + .getLocation() + .getPath()) + .getName(); + } - private static boolean runningFromJAR() { - String jarName = getJarName(); - return jarName.contains(".jar"); - } + private static boolean runningFromJAR() { + String jarName = getJarName(); + return jarName.contains(".jar"); + } - public static String getProgramDirectory() { - if (runningFromJAR()) { - return getCurrentJARDirectory(); - } else { - return System.getProperty("user.dir"); - } + public static String getProgramDirectory() { + if (runningFromJAR()) { + return getCurrentJARDirectory(); + } else { + return System.getProperty("user.dir"); } + } - private static String getCurrentJARDirectory() { - try { - return new File( - ProgramDirectoryUtilities.class - .getProtectionDomain() - .getCodeSource() - .getLocation() - .toURI() - .getPath()) - .getParent(); - } catch (URISyntaxException exception) { - exception.printStackTrace(); - } - - return null; + private static String getCurrentJARDirectory() { + try { + return new File( + ProgramDirectoryUtilities.class + .getProtectionDomain() + .getCodeSource() + .getLocation() + .toURI() + .getPath()) + .getParent(); + } catch (URISyntaxException exception) { + exception.printStackTrace(); } + + return null; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/java/TriConsumer.java b/photon-core/src/main/java/org/photonvision/common/util/java/TriConsumer.java index 6f1dc67fbc..3d599f00bd 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/java/TriConsumer.java +++ b/photon-core/src/main/java/org/photonvision/common/util/java/TriConsumer.java @@ -18,5 +18,5 @@ package org.photonvision.common.util.java; public interface TriConsumer { - void accept(T t, U u, V v); + void accept(T t, U u, V v); } diff --git a/photon-core/src/main/java/org/photonvision/common/util/math/IPUtils.java b/photon-core/src/main/java/org/photonvision/common/util/math/IPUtils.java index f4127e782c..ecd2d93caf 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/math/IPUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/math/IPUtils.java @@ -21,36 +21,36 @@ import java.util.List; public class IPUtils { - public static boolean isValidIPV4(final String ip) { - String PATTERN = - "^((0|1\\d?\\d?|2[0-4]?\\d?|25[0-5]?|[3-9]\\d?)\\.){3}(0|1\\d?\\d?|2[0-4]?\\d?|25[0-5]?|[3-9]\\d?)$"; - - return ip.matches(PATTERN); - } - - public static List getDigitBytes(int num) { - List digits = new ArrayList<>(); - collectDigitBytes(num, digits); - return digits; - } - - private static void collectDigitBytes(int num, List digits) { - if (num / 10 > 0) { - collectDigitBytes(num / 10, digits); - } - digits.add((byte) (num % 10)); - } - - public static List getDigits(int num) { - List digits = new ArrayList<>(); - collectDigits(num, digits); - return digits; + public static boolean isValidIPV4(final String ip) { + String PATTERN = + "^((0|1\\d?\\d?|2[0-4]?\\d?|25[0-5]?|[3-9]\\d?)\\.){3}(0|1\\d?\\d?|2[0-4]?\\d?|25[0-5]?|[3-9]\\d?)$"; + + return ip.matches(PATTERN); + } + + public static List getDigitBytes(int num) { + List digits = new ArrayList<>(); + collectDigitBytes(num, digits); + return digits; + } + + private static void collectDigitBytes(int num, List digits) { + if (num / 10 > 0) { + collectDigitBytes(num / 10, digits); } - - private static void collectDigits(int num, List digits) { - if (num / 10 > 0) { - collectDigits(num / 10, digits); - } - digits.add(num % 10); + digits.add((byte) (num % 10)); + } + + public static List getDigits(int num) { + List digits = new ArrayList<>(); + collectDigits(num, digits); + return digits; + } + + private static void collectDigits(int num, List digits) { + if (num / 10 > 0) { + collectDigits(num / 10, digits); } + digits.add(num % 10); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/math/MathUtils.java b/photon-core/src/main/java/org/photonvision/common/util/math/MathUtils.java index 2b6bb31ae3..c85875fa89 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/math/MathUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/math/MathUtils.java @@ -30,172 +30,172 @@ import org.opencv.core.Mat; public class MathUtils { - MathUtils() {} - - public static double toSlope(Number angle) { - return Math.atan(Math.toRadians(angle.doubleValue() - 90)); - } - - public static int safeDivide(int quotient, int divisor) { - if (divisor == 0) { - return 0; - } else { - return quotient / divisor; - } - } - - public static double roundTo(double value, int to) { - double toMult = Math.pow(10, to); - return (double) Math.round(value * toMult) / toMult; - } - - public static double nanosToMillis(long nanos) { - return nanos / 1000000.0; - } - - public static double nanosToMillis(double nanos) { - return nanos / 1000000.0; - } - - public static long millisToNanos(long millis) { - return millis * 1000000; - } - - public static long microsToNanos(long micros) { - return micros * 1000; - } - - public static double map( - double value, double in_min, double in_max, double out_min, double out_max) { - return (value - in_min) * (out_max - out_min) / (in_max - in_min) + out_min; - } - - public static int map(int value, int inMin, int inMax, int outMin, int outMax) { - return (int) Math.floor(map((double) value, inMin, inMax, outMin, outMax) + 0.5); - } - - public static long wpiNanoTime() { - return microsToNanos(WPIUtilJNI.now()); - } - - /** - * Get the value of the nTh percentile of a list - * - * @param list The list to evaluate - * @param p The percentile, in [0,100] - * @return - */ - public static double getPercentile(List list, double p) { - if ((p > 100) || (p <= 0)) { - throw new IllegalArgumentException("invalid quantile value: " + p); - } - - if (list.isEmpty()) { - return Double.NaN; - } - if (list.size() == 1) { - return list.get(0); // always return single value for n = 1 - } - - // Sort array. We avoid a third copy here by just creating the - // list directly. - double[] sorted = new double[list.size()]; - for (int i = 0; i < list.size(); i++) { - sorted[i] = list.get(i); - } - Arrays.sort(sorted); - - return evaluateSorted(sorted, p); - } - - private static double evaluateSorted(final double[] sorted, final double p) { - double n = sorted.length; - double pos = p * (n + 1) / 100; - double fpos = Math.floor(pos); - int intPos = (int) fpos; - double dif = pos - fpos; - - if (pos < 1) { - return sorted[0]; - } - if (pos >= n) { - return sorted[sorted.length - 1]; - } - double lower = sorted[intPos - 1]; - double upper = sorted[intPos]; - return lower + dif * (upper - lower); - } - - /** - * Linearly interpolates between two values. - * - * @param startValue The start value. - * @param endValue The end value. - * @param t The fraction for interpolation. - * @return The interpolated value. - */ - @SuppressWarnings("ParameterName") - public static double lerp(double startValue, double endValue, double t) { - return startValue + (endValue - startValue) * t; - } - - /** - * OpenCV uses the EDN coordinate system, but WPILib uses NWU. Converts a camera-to-target - * transformation from EDN to NWU. - * - *

Note: The detected target's rvec and tvec perform a rotation-translation transformation - * which converts points in the target's coordinate system to the camera's. This means applying - * the transformation to the target point (0,0,0) for example would give the target's center - * relative to the camera. Conveniently, if we make a translation-rotation transformation out of - * these components instead, we get the transformation from the camera to the target. - * - * @param cameraToTarget3d A camera-to-target Transform3d in EDN. - * @return A camera-to-target Transform3d in NWU. - */ - public static Transform3d convertOpenCVtoPhotonTransform(Transform3d cameraToTarget3d) { - // TODO: Refactor into new pipe? - return CoordinateSystem.convert( - cameraToTarget3d, CoordinateSystem.EDN(), CoordinateSystem.NWU()); - } - - /* - * From the AprilTag repo: - * "The coordinate system has the origin at the camera center. The z-axis points from the camera - * center out the camera lens. The x-axis is to the right in the image taken by the camera, and - * y is down. The tag's coordinate frame is centered at the center of the tag, with x-axis to the - * right, y-axis down, and z-axis into the tag." - * - * This means our detected transformation will be in EDN. Our subsequent uses of the transformation, - * however, assume the tag's z-axis point away from the tag instead of into it. This means we - * have to correct the transformation's rotation. - */ - private static final Rotation3d APRILTAG_BASE_ROTATION = - new Rotation3d(VecBuilder.fill(0, 1, 0), Units.degreesToRadians(180)); - - /** - * AprilTag returns a camera-to-tag transform in EDN, but the tag's z-axis points into the tag - * instead of away from it and towards the camera. This means we have to correct the - * transformation's rotation. - * - * @param pose The Transform3d with translation and rotation directly from the {@link - * AprilTagPoseEstimate}. - */ - public static Transform3d convertApriltagtoOpenCV(Transform3d pose) { - var ocvRotation = APRILTAG_BASE_ROTATION.rotateBy(pose.getRotation()); - return new Transform3d(pose.getTranslation(), ocvRotation); - } - - public static Pose3d convertArucotoOpenCV(Transform3d pose) { - var ocvRotation = - APRILTAG_BASE_ROTATION.rotateBy( - new Rotation3d(VecBuilder.fill(0, 0, 1), Units.degreesToRadians(180)) - .rotateBy(pose.getRotation())); - return new Pose3d(pose.getTranslation(), ocvRotation); - } - - public static void rotationToOpencvRvec(Rotation3d rotation, Mat rvecOutput) { - var angle = rotation.getAngle(); - var axis = rotation.getAxis().times(angle); - rvecOutput.put(0, 0, axis.getData()); - } + MathUtils() {} + + public static double toSlope(Number angle) { + return Math.atan(Math.toRadians(angle.doubleValue() - 90)); + } + + public static int safeDivide(int quotient, int divisor) { + if (divisor == 0) { + return 0; + } else { + return quotient / divisor; + } + } + + public static double roundTo(double value, int to) { + double toMult = Math.pow(10, to); + return (double) Math.round(value * toMult) / toMult; + } + + public static double nanosToMillis(long nanos) { + return nanos / 1000000.0; + } + + public static double nanosToMillis(double nanos) { + return nanos / 1000000.0; + } + + public static long millisToNanos(long millis) { + return millis * 1000000; + } + + public static long microsToNanos(long micros) { + return micros * 1000; + } + + public static double map( + double value, double in_min, double in_max, double out_min, double out_max) { + return (value - in_min) * (out_max - out_min) / (in_max - in_min) + out_min; + } + + public static int map(int value, int inMin, int inMax, int outMin, int outMax) { + return (int) Math.floor(map((double) value, inMin, inMax, outMin, outMax) + 0.5); + } + + public static long wpiNanoTime() { + return microsToNanos(WPIUtilJNI.now()); + } + + /** + * Get the value of the nTh percentile of a list + * + * @param list The list to evaluate + * @param p The percentile, in [0,100] + * @return + */ + public static double getPercentile(List list, double p) { + if ((p > 100) || (p <= 0)) { + throw new IllegalArgumentException("invalid quantile value: " + p); + } + + if (list.isEmpty()) { + return Double.NaN; + } + if (list.size() == 1) { + return list.get(0); // always return single value for n = 1 + } + + // Sort array. We avoid a third copy here by just creating the + // list directly. + double[] sorted = new double[list.size()]; + for (int i = 0; i < list.size(); i++) { + sorted[i] = list.get(i); + } + Arrays.sort(sorted); + + return evaluateSorted(sorted, p); + } + + private static double evaluateSorted(final double[] sorted, final double p) { + double n = sorted.length; + double pos = p * (n + 1) / 100; + double fpos = Math.floor(pos); + int intPos = (int) fpos; + double dif = pos - fpos; + + if (pos < 1) { + return sorted[0]; + } + if (pos >= n) { + return sorted[sorted.length - 1]; + } + double lower = sorted[intPos - 1]; + double upper = sorted[intPos]; + return lower + dif * (upper - lower); + } + + /** + * Linearly interpolates between two values. + * + * @param startValue The start value. + * @param endValue The end value. + * @param t The fraction for interpolation. + * @return The interpolated value. + */ + @SuppressWarnings("ParameterName") + public static double lerp(double startValue, double endValue, double t) { + return startValue + (endValue - startValue) * t; + } + + /** + * OpenCV uses the EDN coordinate system, but WPILib uses NWU. Converts a camera-to-target + * transformation from EDN to NWU. + * + *

Note: The detected target's rvec and tvec perform a rotation-translation transformation + * which converts points in the target's coordinate system to the camera's. This means applying + * the transformation to the target point (0,0,0) for example would give the target's center + * relative to the camera. Conveniently, if we make a translation-rotation transformation out of + * these components instead, we get the transformation from the camera to the target. + * + * @param cameraToTarget3d A camera-to-target Transform3d in EDN. + * @return A camera-to-target Transform3d in NWU. + */ + public static Transform3d convertOpenCVtoPhotonTransform(Transform3d cameraToTarget3d) { + // TODO: Refactor into new pipe? + return CoordinateSystem.convert( + cameraToTarget3d, CoordinateSystem.EDN(), CoordinateSystem.NWU()); + } + + /* + * From the AprilTag repo: + * "The coordinate system has the origin at the camera center. The z-axis points from the camera + * center out the camera lens. The x-axis is to the right in the image taken by the camera, and + * y is down. The tag's coordinate frame is centered at the center of the tag, with x-axis to the + * right, y-axis down, and z-axis into the tag." + * + * This means our detected transformation will be in EDN. Our subsequent uses of the transformation, + * however, assume the tag's z-axis point away from the tag instead of into it. This means we + * have to correct the transformation's rotation. + */ + private static final Rotation3d APRILTAG_BASE_ROTATION = + new Rotation3d(VecBuilder.fill(0, 1, 0), Units.degreesToRadians(180)); + + /** + * AprilTag returns a camera-to-tag transform in EDN, but the tag's z-axis points into the tag + * instead of away from it and towards the camera. This means we have to correct the + * transformation's rotation. + * + * @param pose The Transform3d with translation and rotation directly from the {@link + * AprilTagPoseEstimate}. + */ + public static Transform3d convertApriltagtoOpenCV(Transform3d pose) { + var ocvRotation = APRILTAG_BASE_ROTATION.rotateBy(pose.getRotation()); + return new Transform3d(pose.getTranslation(), ocvRotation); + } + + public static Pose3d convertArucotoOpenCV(Transform3d pose) { + var ocvRotation = + APRILTAG_BASE_ROTATION.rotateBy( + new Rotation3d(VecBuilder.fill(0, 0, 1), Units.degreesToRadians(180)) + .rotateBy(pose.getRotation())); + return new Pose3d(pose.getTranslation(), ocvRotation); + } + + public static void rotationToOpencvRvec(Rotation3d rotation, Mat rvecOutput) { + var angle = rotation.getAngle(); + var axis = rotation.getAxis().times(angle); + rvecOutput.put(0, 0, axis.getData()); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/numbers/DoubleCouple.java b/photon-core/src/main/java/org/photonvision/common/util/numbers/DoubleCouple.java index d8503503e0..6294e561ff 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/numbers/DoubleCouple.java +++ b/photon-core/src/main/java/org/photonvision/common/util/numbers/DoubleCouple.java @@ -20,28 +20,28 @@ import org.opencv.core.Point; public class DoubleCouple extends NumberCouple { - public DoubleCouple() { - super(0.0, 0.0); - } - - public DoubleCouple(Number first, Number second) { - super(first.doubleValue(), second.doubleValue()); - } - - public DoubleCouple(Double first, Double second) { - super(first, second); - } - - public DoubleCouple(Point point) { - super(point.x, point.y); - } - - public Point toPoint() { - return new Point(first, second); - } - - public void fromPoint(Point point) { - first = point.x; - second = point.y; - } + public DoubleCouple() { + super(0.0, 0.0); + } + + public DoubleCouple(Number first, Number second) { + super(first.doubleValue(), second.doubleValue()); + } + + public DoubleCouple(Double first, Double second) { + super(first, second); + } + + public DoubleCouple(Point point) { + super(point.x, point.y); + } + + public Point toPoint() { + return new Point(first, second); + } + + public void fromPoint(Point point) { + first = point.x; + second = point.y; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/numbers/IntegerCouple.java b/photon-core/src/main/java/org/photonvision/common/util/numbers/IntegerCouple.java index 0cd834cc23..196776b068 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/numbers/IntegerCouple.java +++ b/photon-core/src/main/java/org/photonvision/common/util/numbers/IntegerCouple.java @@ -18,11 +18,11 @@ package org.photonvision.common.util.numbers; public class IntegerCouple extends NumberCouple { - public IntegerCouple() { - super(0, 0); - } + public IntegerCouple() { + super(0, 0); + } - public IntegerCouple(Integer first, Integer second) { - super(first, second); - } + public IntegerCouple(Integer first, Integer second) { + super(first, second); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberCouple.java b/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberCouple.java index 9a6047c797..26c40fe984 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberCouple.java +++ b/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberCouple.java @@ -20,51 +20,51 @@ import com.fasterxml.jackson.annotation.JsonIgnore; public abstract class NumberCouple { - protected T first; - protected T second; + protected T first; + protected T second; - public NumberCouple(T first, T second) { - this.first = first; - this.second = second; - } - - public void setFirst(T first) { - this.first = first; - } - - public T getFirst() { - return first; - } + public NumberCouple(T first, T second) { + this.first = first; + this.second = second; + } - public void setSecond(T second) { - this.second = second; - } + public void setFirst(T first) { + this.first = first; + } - public T getSecond() { - return second; - } + public T getFirst() { + return first; + } - public void set(T first, T second) { - this.first = first; - this.second = second; - } + public void setSecond(T second) { + this.second = second; + } - @Override - public boolean equals(Object obj) { - if (!(obj instanceof NumberCouple)) { - return false; - } + public T getSecond() { + return second; + } - var couple = (NumberCouple) obj; - if (!couple.first.equals(first)) { - return false; - } + public void set(T first, T second) { + this.first = first; + this.second = second; + } - return couple.second.equals(second); + @Override + public boolean equals(Object obj) { + if (!(obj instanceof NumberCouple)) { + return false; } - @JsonIgnore - public boolean isEmpty() { - return first.intValue() == 0 && second.intValue() == 0; + var couple = (NumberCouple) obj; + if (!couple.first.equals(first)) { + return false; } + + return couple.second.equals(second); + } + + @JsonIgnore + public boolean isEmpty() { + return first.intValue() == 0 && second.intValue() == 0; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberListUtils.java b/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberListUtils.java index bd1272745c..1823c33e21 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberListUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberListUtils.java @@ -25,90 +25,90 @@ @SuppressWarnings("unused") public class NumberListUtils { - /** - * @param collection an ArrayList of Comparable objects - * @return the median of collection - */ - public static double median(List collection, Comparator comp) { - double result; - int n = collection.size() / 2; - - if (collection.size() % 2 == 0) // even number of items; find the middle two and average them - result = - (nthSmallest(collection, n - 1, comp).doubleValue() - + nthSmallest(collection, n, comp).doubleValue()) - / 2.0; - else // odd number of items; return the one in the middle - result = nthSmallest(collection, n, comp).doubleValue(); - - return result; + /** + * @param collection an ArrayList of Comparable objects + * @return the median of collection + */ + public static double median(List collection, Comparator comp) { + double result; + int n = collection.size() / 2; + + if (collection.size() % 2 == 0) // even number of items; find the middle two and average them + result = + (nthSmallest(collection, n - 1, comp).doubleValue() + + nthSmallest(collection, n, comp).doubleValue()) + / 2.0; + else // odd number of items; return the one in the middle + result = nthSmallest(collection, n, comp).doubleValue(); + + return result; + } + + public static String toString(List collection) { + return toString(collection, ""); + } + + public static String toString(List collection, String suffix) { + StringJoiner joiner = new StringJoiner(", "); + for (T x : collection) { + String s = x.doubleValue() + suffix; + joiner.add(s); } - - public static String toString(List collection) { - return toString(collection, ""); - } - - public static String toString(List collection, String suffix) { - StringJoiner joiner = new StringJoiner(", "); - for (T x : collection) { - String s = x.doubleValue() + suffix; - joiner.add(s); - } - return joiner.toString(); - } - - /** - * @param collection an ArrayList of Numbers - * @return the mean of collection - */ - public static double mean(final List collection) { - BigDecimal sum = BigDecimal.ZERO; - for (final Number number : collection) { - sum = sum.add(BigDecimal.valueOf(number.doubleValue())); - } - return (sum.doubleValue() / collection.size()); - } - - /** - * @param collection a collection of Comparable objects - * @param n the position of the desired object, using the ordering defined on the collection - * elements - * @return the nth smallest object - */ - public static T nthSmallest(List collection, int n, Comparator comp) { - T result, pivot; - ArrayList underPivot = new ArrayList<>(), - overPivot = new ArrayList<>(), - equalPivot = new ArrayList<>(); - - // choosing a pivot is a whole topic in itself. - // this implementation uses the simple strategy of grabbing something from the middle of the - // ArrayList. - - pivot = collection.get(n / 2); - - // split collection into 3 lists based on comparison with the pivot - - for (T obj : collection) { - int order = comp.compare(obj, pivot); - - if (order < 0) // obj < pivot - underPivot.add(obj); - else if (order > 0) // obj > pivot - overPivot.add(obj); - else // obj = pivot - equalPivot.add(obj); - } // for each obj in collection - - // recurse on the appropriate collection - - if (n < underPivot.size()) result = nthSmallest(underPivot, n, comp); - else if (n < underPivot.size() + equalPivot.size()) // equal to pivot; just return it - result = pivot; - else // everything in underPivot and equalPivot is too small. Adjust n accordingly in the - // recursion. - result = nthSmallest(overPivot, n - underPivot.size() - equalPivot.size(), comp); - - return result; + return joiner.toString(); + } + + /** + * @param collection an ArrayList of Numbers + * @return the mean of collection + */ + public static double mean(final List collection) { + BigDecimal sum = BigDecimal.ZERO; + for (final Number number : collection) { + sum = sum.add(BigDecimal.valueOf(number.doubleValue())); } + return (sum.doubleValue() / collection.size()); + } + + /** + * @param collection a collection of Comparable objects + * @param n the position of the desired object, using the ordering defined on the collection + * elements + * @return the nth smallest object + */ + public static T nthSmallest(List collection, int n, Comparator comp) { + T result, pivot; + ArrayList underPivot = new ArrayList<>(), + overPivot = new ArrayList<>(), + equalPivot = new ArrayList<>(); + + // choosing a pivot is a whole topic in itself. + // this implementation uses the simple strategy of grabbing something from the middle of the + // ArrayList. + + pivot = collection.get(n / 2); + + // split collection into 3 lists based on comparison with the pivot + + for (T obj : collection) { + int order = comp.compare(obj, pivot); + + if (order < 0) // obj < pivot + underPivot.add(obj); + else if (order > 0) // obj > pivot + overPivot.add(obj); + else // obj = pivot + equalPivot.add(obj); + } // for each obj in collection + + // recurse on the appropriate collection + + if (n < underPivot.size()) result = nthSmallest(underPivot, n, comp); + else if (n < underPivot.size() + equalPivot.size()) // equal to pivot; just return it + result = pivot; + else // everything in underPivot and equalPivot is too small. Adjust n accordingly in the + // recursion. + result = nthSmallest(overPivot, n - underPivot.size() - equalPivot.size(), comp); + + return result; + } } diff --git a/photon-core/src/main/java/org/photonvision/raspi/LibCameraJNI.java b/photon-core/src/main/java/org/photonvision/raspi/LibCameraJNI.java index a75ae2e579..f4588d81c7 100644 --- a/photon-core/src/main/java/org/photonvision/raspi/LibCameraJNI.java +++ b/photon-core/src/main/java/org/photonvision/raspi/LibCameraJNI.java @@ -27,165 +27,165 @@ import org.photonvision.common.logging.Logger; public class LibCameraJNI { - private static boolean libraryLoaded = false; - private static final Logger logger = new Logger(LibCameraJNI.class, LogGroup.Camera); - - public static final Object CAMERA_LOCK = new Object(); - - public static synchronized void forceLoad() throws IOException { - if (libraryLoaded) return; - - try { - File libDirectory = Path.of("lib/").toFile(); - if (!libDirectory.exists()) { - Files.createDirectory(libDirectory.toPath()).toFile(); - } - - // We always extract the shared object (we could hash each so, but that's a lot of work) - URL resourceURL = LibCameraJNI.class.getResource("/nativelibraries/libphotonlibcamera.so"); - File libFile = Path.of("lib/libphotonlibcamera.so").toFile(); - try (InputStream in = resourceURL.openStream()) { - if (libFile.exists()) Files.delete(libFile.toPath()); - Files.copy(in, libFile.toPath()); - } catch (Exception e) { - logger.error("Could not extract the native library!"); - } - System.load(libFile.getAbsolutePath()); - - libraryLoaded = true; - logger.info("Successfully loaded libpicam shared object"); - } catch (UnsatisfiedLinkError e) { - logger.error("Couldn't load libpicam shared object"); - e.printStackTrace(); - } + private static boolean libraryLoaded = false; + private static final Logger logger = new Logger(LibCameraJNI.class, LogGroup.Camera); + + public static final Object CAMERA_LOCK = new Object(); + + public static synchronized void forceLoad() throws IOException { + if (libraryLoaded) return; + + try { + File libDirectory = Path.of("lib/").toFile(); + if (!libDirectory.exists()) { + Files.createDirectory(libDirectory.toPath()).toFile(); + } + + // We always extract the shared object (we could hash each so, but that's a lot of work) + URL resourceURL = LibCameraJNI.class.getResource("/nativelibraries/libphotonlibcamera.so"); + File libFile = Path.of("lib/libphotonlibcamera.so").toFile(); + try (InputStream in = resourceURL.openStream()) { + if (libFile.exists()) Files.delete(libFile.toPath()); + Files.copy(in, libFile.toPath()); + } catch (Exception e) { + logger.error("Could not extract the native library!"); + } + System.load(libFile.getAbsolutePath()); + + libraryLoaded = true; + logger.info("Successfully loaded libpicam shared object"); + } catch (UnsatisfiedLinkError e) { + logger.error("Couldn't load libpicam shared object"); + e.printStackTrace(); } - - public enum SensorModel { - Disconnected, - OV5647, // Picam v1 - IMX219, // Picam v2 - IMX477, // Picam HQ - OV9281, - OV7251, - Unknown; - - public String getFriendlyName() { - switch (this) { - case Disconnected: - return "Disconnected Camera"; - case OV5647: - return "Camera Module v1"; - case IMX219: - return "Camera Module v2"; - case IMX477: - return "HQ Camera"; - case OV9281: - return "OV9281"; - case OV7251: - return "OV7251"; - case Unknown: - default: - return "Unknown Camera"; - } - } + } + + public enum SensorModel { + Disconnected, + OV5647, // Picam v1 + IMX219, // Picam v2 + IMX477, // Picam HQ + OV9281, + OV7251, + Unknown; + + public String getFriendlyName() { + switch (this) { + case Disconnected: + return "Disconnected Camera"; + case OV5647: + return "Camera Module v1"; + case IMX219: + return "Camera Module v2"; + case IMX477: + return "HQ Camera"; + case OV9281: + return "OV9281"; + case OV7251: + return "OV7251"; + case Unknown: + default: + return "Unknown Camera"; + } } + } - public static SensorModel getSensorModel() { - int model = getSensorModelRaw(); - return SensorModel.values()[model]; - } + public static SensorModel getSensorModel() { + int model = getSensorModelRaw(); + return SensorModel.values()[model]; + } - public static boolean isSupported() { - return libraryLoaded - // && getSensorModel() != PicamJNI.SensorModel.Disconnected - // && Platform.isRaspberryPi() - && isLibraryWorking(); - } + public static boolean isSupported() { + return libraryLoaded + // && getSensorModel() != PicamJNI.SensorModel.Disconnected + // && Platform.isRaspberryPi() + && isLibraryWorking(); + } - private static native boolean isLibraryWorking(); + private static native boolean isLibraryWorking(); - public static native int getSensorModelRaw(); + public static native int getSensorModelRaw(); - // ======================================================== // + // ======================================================== // - /** - * Creates a new runner with a given width/height/fps - * - * @param width Camera video mode width in pixels - * @param height Camera video mode height in pixels - * @param fps Camera video mode FPS - * @return success of creating a camera object - */ - public static native boolean createCamera(int width, int height, int rotation); + /** + * Creates a new runner with a given width/height/fps + * + * @param width Camera video mode width in pixels + * @param height Camera video mode height in pixels + * @param fps Camera video mode FPS + * @return success of creating a camera object + */ + public static native boolean createCamera(int width, int height, int rotation); - /** - * Starts the camera thresholder and display threads running. Make sure that this function is - * called synchronously with stopCamera and returnFrame! - */ - public static native boolean startCamera(); + /** + * Starts the camera thresholder and display threads running. Make sure that this function is + * called synchronously with stopCamera and returnFrame! + */ + public static native boolean startCamera(); - /** Stops the camera runner. Make sure to call prior to destroying the camera! */ - public static native boolean stopCamera(); + /** Stops the camera runner. Make sure to call prior to destroying the camera! */ + public static native boolean stopCamera(); - // Destroy all native resources associated with a camera. Ensure stop is called prior! - public static native boolean destroyCamera(); + // Destroy all native resources associated with a camera. Ensure stop is called prior! + public static native boolean destroyCamera(); - // ======================================================== // + // ======================================================== // - // Set thresholds on [0..1] - public static native boolean setThresholds( - double hl, double sl, double vl, double hu, double su, double vu, boolean hueInverted); + // Set thresholds on [0..1] + public static native boolean setThresholds( + double hl, double sl, double vl, double hu, double su, double vu, boolean hueInverted); - public static native boolean setAutoExposure(boolean doAutoExposure); + public static native boolean setAutoExposure(boolean doAutoExposure); - // Exposure time, in microseconds - public static native boolean setExposure(int exposureUs); + // Exposure time, in microseconds + public static native boolean setExposure(int exposureUs); - // Set brightness on [-1, 1] - public static native boolean setBrightness(double brightness); + // Set brightness on [-1, 1] + public static native boolean setBrightness(double brightness); - // Unknown ranges for red and blue AWB gain - public static native boolean setAwbGain(double red, double blue); + // Unknown ranges for red and blue AWB gain + public static native boolean setAwbGain(double red, double blue); - /** - * Get the time when the first pixel exposure was started, in the same timebase as libcamera gives - * the frame capture time. Units are nanoseconds. - */ - public static native long getFrameCaptureTime(); + /** + * Get the time when the first pixel exposure was started, in the same timebase as libcamera gives + * the frame capture time. Units are nanoseconds. + */ + public static native long getFrameCaptureTime(); - /** - * Get the current time, in the same timebase as libcamera gives the frame capture time. Units are - * nanoseconds. - */ - public static native long getLibcameraTimestamp(); + /** + * Get the current time, in the same timebase as libcamera gives the frame capture time. Units are + * nanoseconds. + */ + public static native long getLibcameraTimestamp(); - public static native long setFramesToCopy(boolean copyIn, boolean copyOut); + public static native long setFramesToCopy(boolean copyIn, boolean copyOut); - // Analog gain multiplier to apply to all color channels, on [1, Big Number] - public static native boolean setAnalogGain(double analog); + // Analog gain multiplier to apply to all color channels, on [1, Big Number] + public static native boolean setAnalogGain(double analog); - /** Block until a new frame is available from native code. */ - public static native boolean awaitNewFrame(); + /** Block until a new frame is available from native code. */ + public static native boolean awaitNewFrame(); - /** - * Get a pointer to the most recent color mat generated. Call this immediately after - * awaitNewFrame, and call only once per new frame! - */ - public static native long takeColorFrame(); + /** + * Get a pointer to the most recent color mat generated. Call this immediately after + * awaitNewFrame, and call only once per new frame! + */ + public static native long takeColorFrame(); - /** - * Get a pointer to the most recent processed mat generated. Call this immediately after - * awaitNewFrame, and call only once per new frame! - */ - public static native long takeProcessedFrame(); + /** + * Get a pointer to the most recent processed mat generated. Call this immediately after + * awaitNewFrame, and call only once per new frame! + */ + public static native long takeProcessedFrame(); - /** - * Set the GPU processing type we should do. Enum of [none, HSV, greyscale, adaptive threshold]. - */ - public static native boolean setGpuProcessType(int type); + /** + * Set the GPU processing type we should do. Enum of [none, HSV, greyscale, adaptive threshold]. + */ + public static native boolean setGpuProcessType(int type); - public static native int getGpuProcessType(); + public static native int getGpuProcessType(); - // Release a frame pointer back to the libcamera driver code to be filled again */ - // public static native long returnFrame(long frame); + // Release a frame pointer back to the libcamera driver code to be filled again */ + // public static native long returnFrame(long frame); } diff --git a/photon-core/src/main/java/org/photonvision/vision/apriltag/AprilTagFamily.java b/photon-core/src/main/java/org/photonvision/vision/apriltag/AprilTagFamily.java index 8fbda02683..40042080ef 100644 --- a/photon-core/src/main/java/org/photonvision/vision/apriltag/AprilTagFamily.java +++ b/photon-core/src/main/java/org/photonvision/vision/apriltag/AprilTagFamily.java @@ -18,17 +18,17 @@ package org.photonvision.vision.apriltag; public enum AprilTagFamily { - kTag36h11, - kTag25h9, - kTag16h5, - kTagCircle21h7, - kTagCircle49h12, - kTagStandard41h12, - kTagStandard52h13, - kTagCustom48h11; + kTag36h11, + kTag25h9, + kTag16h5, + kTagCircle21h7, + kTagCircle49h12, + kTagStandard41h12, + kTagStandard52h13, + kTagCustom48h11; - public String getNativeName() { - // We want to strip the leading kT and replace with "t" - return this.name().replaceFirst("kT", "t"); - } + public String getNativeName() { + // We want to strip the leading kT and replace with "t" + return this.name().replaceFirst("kT", "t"); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectionResult.java b/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectionResult.java index b59ae9e9e4..bf8916d30f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectionResult.java +++ b/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectionResult.java @@ -22,62 +22,62 @@ import org.photonvision.common.logging.Logger; public class ArucoDetectionResult { - private static final Logger logger = - new Logger(ArucoDetectionResult.class, LogGroup.VisionModule); - double[] xCorners; - double[] yCorners; + private static final Logger logger = + new Logger(ArucoDetectionResult.class, LogGroup.VisionModule); + double[] xCorners; + double[] yCorners; - int id; + int id; - double[] tvec, rvec; + double[] tvec, rvec; - public ArucoDetectionResult( - double[] xCorners, double[] yCorners, int id, double[] tvec, double[] rvec) { - this.xCorners = xCorners; - this.yCorners = yCorners; - this.id = id; - this.tvec = tvec; - this.rvec = rvec; - // logger.debug("Creating a new detection result: " + this.toString()); - } + public ArucoDetectionResult( + double[] xCorners, double[] yCorners, int id, double[] tvec, double[] rvec) { + this.xCorners = xCorners; + this.yCorners = yCorners; + this.id = id; + this.tvec = tvec; + this.rvec = rvec; + // logger.debug("Creating a new detection result: " + this.toString()); + } - public double[] getTvec() { - return tvec; - } + public double[] getTvec() { + return tvec; + } - public double[] getRvec() { - return rvec; - } + public double[] getRvec() { + return rvec; + } - public double[] getxCorners() { - return xCorners; - } + public double[] getxCorners() { + return xCorners; + } - public double[] getyCorners() { - return yCorners; - } + public double[] getyCorners() { + return yCorners; + } - public int getId() { - return id; - } + public int getId() { + return id; + } - public double getCenterX() { - return (xCorners[0] + xCorners[1] + xCorners[2] + xCorners[3]) * .25; - } + public double getCenterX() { + return (xCorners[0] + xCorners[1] + xCorners[2] + xCorners[3]) * .25; + } - public double getCenterY() { - return (yCorners[0] + yCorners[1] + yCorners[2] + yCorners[3]) * .25; - } + public double getCenterY() { + return (yCorners[0] + yCorners[1] + yCorners[2] + yCorners[3]) * .25; + } - @Override - public String toString() { - return "ArucoDetectionResult{" - + "xCorners=" - + Arrays.toString(xCorners) - + ", yCorners=" - + Arrays.toString(yCorners) - + ", id=" - + id - + '}'; - } + @Override + public String toString() { + return "ArucoDetectionResult{" + + "xCorners=" + + Arrays.toString(xCorners) + + ", yCorners=" + + Arrays.toString(yCorners) + + ", id=" + + id + + '}'; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectorParams.java b/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectorParams.java index 24ea8dafab..2a69eecd22 100644 --- a/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectorParams.java +++ b/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectorParams.java @@ -24,54 +24,54 @@ import org.photonvision.common.logging.Logger; public class ArucoDetectorParams { - private static final Logger logger = new Logger(PhotonArucoDetector.class, LogGroup.VisionModule); + private static final Logger logger = new Logger(PhotonArucoDetector.class, LogGroup.VisionModule); - private float m_decimate = -1; - private int m_iterations = -1; - private double m_accuracy = -1; + private float m_decimate = -1; + private int m_iterations = -1; + private double m_accuracy = -1; - DetectorParameters parameters = new DetectorParameters(); - ArucoDetector detector; + DetectorParameters parameters = new DetectorParameters(); + ArucoDetector detector; - public ArucoDetectorParams() { - setDecimation(1); - setCornerAccuracy(25); - setCornerRefinementMaxIterations(100); + public ArucoDetectorParams() { + setDecimation(1); + setCornerAccuracy(25); + setCornerRefinementMaxIterations(100); - detector = - new ArucoDetector( - Objdetect.getPredefinedDictionary(Objdetect.DICT_APRILTAG_16h5), parameters); - } + detector = + new ArucoDetector( + Objdetect.getPredefinedDictionary(Objdetect.DICT_APRILTAG_16h5), parameters); + } - public void setDecimation(float decimate) { - if (decimate == m_decimate) return; + public void setDecimation(float decimate) { + if (decimate == m_decimate) return; - logger.info("Setting decimation from " + m_decimate + " to " + decimate); + logger.info("Setting decimation from " + m_decimate + " to " + decimate); - // We only need to mutate the parameters -- the detector keeps a pointer to the parameters - // object internally, so it should automatically update - parameters.set_aprilTagQuadDecimate(decimate); - m_decimate = decimate; - } + // We only need to mutate the parameters -- the detector keeps a pointer to the parameters + // object internally, so it should automatically update + parameters.set_aprilTagQuadDecimate(decimate); + m_decimate = decimate; + } - public void setCornerRefinementMaxIterations(int iters) { - if (iters == m_iterations || iters <= 0) return; + public void setCornerRefinementMaxIterations(int iters) { + if (iters == m_iterations || iters <= 0) return; - parameters.set_cornerRefinementMethod(Objdetect.CORNER_REFINE_SUBPIX); - parameters.set_cornerRefinementMaxIterations(iters); // 200 + parameters.set_cornerRefinementMethod(Objdetect.CORNER_REFINE_SUBPIX); + parameters.set_cornerRefinementMaxIterations(iters); // 200 - m_iterations = iters; - } + m_iterations = iters; + } - public void setCornerAccuracy(double accuracy) { - if (accuracy == m_accuracy || accuracy <= 0) return; + public void setCornerAccuracy(double accuracy) { + if (accuracy == m_accuracy || accuracy <= 0) return; - parameters.set_cornerRefinementMinAccuracy( - accuracy / 1000.0); // divides by 1000 because the UI multiplies it by 1000 - m_accuracy = accuracy; - } + parameters.set_cornerRefinementMinAccuracy( + accuracy / 1000.0); // divides by 1000 because the UI multiplies it by 1000 + m_accuracy = accuracy; + } - public ArucoDetector getDetector() { - return detector; - } + public ArucoDetector getDetector() { + return detector; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/aruco/PhotonArucoDetector.java b/photon-core/src/main/java/org/photonvision/vision/aruco/PhotonArucoDetector.java index 6bb10f7baa..f73694b514 100644 --- a/photon-core/src/main/java/org/photonvision/vision/aruco/PhotonArucoDetector.java +++ b/photon-core/src/main/java/org/photonvision/vision/aruco/PhotonArucoDetector.java @@ -31,102 +31,102 @@ import org.photonvision.vision.calibration.CameraCalibrationCoefficients; public class PhotonArucoDetector { - private static final Logger logger = new Logger(PhotonArucoDetector.class, LogGroup.VisionModule); + private static final Logger logger = new Logger(PhotonArucoDetector.class, LogGroup.VisionModule); - private static final Rotation3d ARUCO_BASE_ROTATION = - new Rotation3d(VecBuilder.fill(0, 0, 1), Units.degreesToRadians(180)); + private static final Rotation3d ARUCO_BASE_ROTATION = + new Rotation3d(VecBuilder.fill(0, 0, 1), Units.degreesToRadians(180)); - Mat ids; + Mat ids; - Mat tvecs; - Mat rvecs; - ArrayList corners; + Mat tvecs; + Mat rvecs; + ArrayList corners; - Mat cornerMat; - Translation3d translation; - Rotation3d rotation; - double timeStartDetect; - double timeEndDetect; - Pose3d tagPose; - double timeStartProcess; - double timeEndProcess; - double[] xCorners = new double[4]; - double[] yCorners = new double[4]; + Mat cornerMat; + Translation3d translation; + Rotation3d rotation; + double timeStartDetect; + double timeEndDetect; + Pose3d tagPose; + double timeStartProcess; + double timeEndProcess; + double[] xCorners = new double[4]; + double[] yCorners = new double[4]; - public PhotonArucoDetector() { - logger.debug("New Aruco Detector"); - ids = new Mat(); - tvecs = new Mat(); - rvecs = new Mat(); - corners = new ArrayList<>(); - tagPose = new Pose3d(); - translation = new Translation3d(); - rotation = new Rotation3d(); - } - - public ArucoDetectionResult[] detect( - Mat grayscaleImg, - float tagSize, - CameraCalibrationCoefficients coeffs, - ArucoDetector detector) { - detector.detectMarkers(grayscaleImg, corners, ids); - if (coeffs != null) { - Aruco.estimatePoseSingleMarkers( - corners, - tagSize, - coeffs.getCameraIntrinsicsMat(), - coeffs.getDistCoeffsMat(), - rvecs, - tvecs); - } + public PhotonArucoDetector() { + logger.debug("New Aruco Detector"); + ids = new Mat(); + tvecs = new Mat(); + rvecs = new Mat(); + corners = new ArrayList<>(); + tagPose = new Pose3d(); + translation = new Translation3d(); + rotation = new Rotation3d(); + } - ArucoDetectionResult[] toReturn = new ArucoDetectionResult[corners.size()]; - timeStartProcess = System.currentTimeMillis(); - for (int i = 0; i < corners.size(); i++) { - cornerMat = corners.get(i); - // logger.debug(cornerMat.dump()); - xCorners = - new double[] { - cornerMat.get(0, 0)[0], - cornerMat.get(0, 1)[0], - cornerMat.get(0, 2)[0], - cornerMat.get(0, 3)[0] - }; - yCorners = - new double[] { - cornerMat.get(0, 0)[1], - cornerMat.get(0, 1)[1], - cornerMat.get(0, 2)[1], - cornerMat.get(0, 3)[1] - }; - cornerMat.release(); + public ArucoDetectionResult[] detect( + Mat grayscaleImg, + float tagSize, + CameraCalibrationCoefficients coeffs, + ArucoDetector detector) { + detector.detectMarkers(grayscaleImg, corners, ids); + if (coeffs != null) { + Aruco.estimatePoseSingleMarkers( + corners, + tagSize, + coeffs.getCameraIntrinsicsMat(), + coeffs.getDistCoeffsMat(), + rvecs, + tvecs); + } - double[] tvec; - double[] rvec; - if (coeffs != null) { - // Need to apply a 180 rotation about Z - var origRvec = rvecs.get(i, 0); - var axisangle = VecBuilder.fill(origRvec[0], origRvec[1], origRvec[2]); - Rotation3d rotation = new Rotation3d(axisangle, axisangle.normF()); - var ocvRotation = ARUCO_BASE_ROTATION.rotateBy(rotation); + ArucoDetectionResult[] toReturn = new ArucoDetectionResult[corners.size()]; + timeStartProcess = System.currentTimeMillis(); + for (int i = 0; i < corners.size(); i++) { + cornerMat = corners.get(i); + // logger.debug(cornerMat.dump()); + xCorners = + new double[] { + cornerMat.get(0, 0)[0], + cornerMat.get(0, 1)[0], + cornerMat.get(0, 2)[0], + cornerMat.get(0, 3)[0] + }; + yCorners = + new double[] { + cornerMat.get(0, 0)[1], + cornerMat.get(0, 1)[1], + cornerMat.get(0, 2)[1], + cornerMat.get(0, 3)[1] + }; + cornerMat.release(); - var angle = ocvRotation.getAngle(); - var finalAxisAngle = ocvRotation.getAxis().times(angle); + double[] tvec; + double[] rvec; + if (coeffs != null) { + // Need to apply a 180 rotation about Z + var origRvec = rvecs.get(i, 0); + var axisangle = VecBuilder.fill(origRvec[0], origRvec[1], origRvec[2]); + Rotation3d rotation = new Rotation3d(axisangle, axisangle.normF()); + var ocvRotation = ARUCO_BASE_ROTATION.rotateBy(rotation); - tvec = tvecs.get(i, 0); - rvec = finalAxisAngle.getData(); - } else { - tvec = new double[] {0, 0, 0}; - rvec = new double[] {0, 0, 0}; - } + var angle = ocvRotation.getAngle(); + var finalAxisAngle = ocvRotation.getAxis().times(angle); - toReturn[i] = - new ArucoDetectionResult(xCorners, yCorners, (int) ids.get(i, 0)[0], tvec, rvec); - } - rvecs.release(); - tvecs.release(); - ids.release(); + tvec = tvecs.get(i, 0); + rvec = finalAxisAngle.getData(); + } else { + tvec = new double[] {0, 0, 0}; + rvec = new double[] {0, 0, 0}; + } - return toReturn; + toReturn[i] = + new ArucoDetectionResult(xCorners, yCorners, (int) ids.get(i, 0)[0], tvec, rvec); } + rvecs.release(); + tvecs.release(); + ids.release(); + + return toReturn; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/calibration/CameraCalibrationCoefficients.java b/photon-core/src/main/java/org/photonvision/vision/calibration/CameraCalibrationCoefficients.java index 4b4ae5a8cc..08f064cd75 100644 --- a/photon-core/src/main/java/org/photonvision/vision/calibration/CameraCalibrationCoefficients.java +++ b/photon-core/src/main/java/org/photonvision/vision/calibration/CameraCalibrationCoefficients.java @@ -28,116 +28,116 @@ import org.photonvision.vision.opencv.Releasable; public class CameraCalibrationCoefficients implements Releasable { - @JsonProperty("resolution") - public final Size resolution; - - @JsonProperty("cameraIntrinsics") - public final JsonMat cameraIntrinsics; - - @JsonProperty("cameraExtrinsics") - @JsonAlias({"cameraExtrinsics", "distCoeffs"}) - public final JsonMat distCoeffs; - - @JsonProperty("perViewErrors") - public final double[] perViewErrors; - - @JsonProperty("standardDeviation") - public final double standardDeviation; - - @JsonIgnore private final double[] intrinsicsArr = new double[9]; - - @JsonIgnore private final double[] extrinsicsArr = new double[5]; - - @JsonCreator - public CameraCalibrationCoefficients( - @JsonProperty("resolution") Size resolution, - @JsonProperty("cameraIntrinsics") JsonMat cameraIntrinsics, - @JsonProperty("cameraExtrinsics") JsonMat distCoeffs, - @JsonProperty("perViewErrors") double[] perViewErrors, - @JsonProperty("standardDeviation") double standardDeviation) { - this.resolution = resolution; - this.cameraIntrinsics = cameraIntrinsics; - this.distCoeffs = distCoeffs; - this.perViewErrors = perViewErrors; - this.standardDeviation = standardDeviation; - - // do this once so gets are quick - getCameraIntrinsicsMat().get(0, 0, intrinsicsArr); - getDistCoeffsMat().get(0, 0, extrinsicsArr); - } - - @JsonIgnore - public Mat getCameraIntrinsicsMat() { - return cameraIntrinsics.getAsMat(); - } - - @JsonIgnore - public MatOfDouble getDistCoeffsMat() { - return distCoeffs.getAsMatOfDouble(); - } - - @JsonIgnore - public double[] getIntrinsicsArr() { - return intrinsicsArr; - } - - @JsonIgnore - public double[] getExtrinsicsArr() { - return extrinsicsArr; - } - - @JsonIgnore - public double[] getPerViewErrors() { - return perViewErrors; - } - - @JsonIgnore - public double getStandardDeviation() { - return standardDeviation; - } - - @Override - public void release() { - cameraIntrinsics.release(); - distCoeffs.release(); - } - - public static CameraCalibrationCoefficients parseFromCalibdbJson(JsonNode json) { - // camera_matrix is a row major, array of arrays - var cam_matrix = json.get("camera_matrix"); - - double[] cam_arr = - new double[] { - cam_matrix.get(0).get(0).doubleValue(), - cam_matrix.get(0).get(1).doubleValue(), - cam_matrix.get(0).get(2).doubleValue(), - cam_matrix.get(1).get(0).doubleValue(), - cam_matrix.get(1).get(1).doubleValue(), - cam_matrix.get(1).get(2).doubleValue(), - cam_matrix.get(2).get(0).doubleValue(), - cam_matrix.get(2).get(1).doubleValue(), - cam_matrix.get(2).get(2).doubleValue() - }; - - var dist_coefs = json.get("distortion_coefficients"); - - double[] dist_array = - new double[] { - dist_coefs.get(0).doubleValue(), - dist_coefs.get(1).doubleValue(), - dist_coefs.get(2).doubleValue(), - dist_coefs.get(3).doubleValue(), - dist_coefs.get(4).doubleValue(), - }; - - var cam_jsonmat = new JsonMat(3, 3, cam_arr); - var distortion_jsonmat = new JsonMat(1, 5, dist_array); - - var error = json.get("avg_reprojection_error").asDouble(); - var width = json.get("img_size").get(0).doubleValue(); - var height = json.get("img_size").get(1).doubleValue(); - - return new CameraCalibrationCoefficients( - new Size(width, height), cam_jsonmat, distortion_jsonmat, new double[] {error}, 0); - } + @JsonProperty("resolution") + public final Size resolution; + + @JsonProperty("cameraIntrinsics") + public final JsonMat cameraIntrinsics; + + @JsonProperty("cameraExtrinsics") + @JsonAlias({"cameraExtrinsics", "distCoeffs"}) + public final JsonMat distCoeffs; + + @JsonProperty("perViewErrors") + public final double[] perViewErrors; + + @JsonProperty("standardDeviation") + public final double standardDeviation; + + @JsonIgnore private final double[] intrinsicsArr = new double[9]; + + @JsonIgnore private final double[] extrinsicsArr = new double[5]; + + @JsonCreator + public CameraCalibrationCoefficients( + @JsonProperty("resolution") Size resolution, + @JsonProperty("cameraIntrinsics") JsonMat cameraIntrinsics, + @JsonProperty("cameraExtrinsics") JsonMat distCoeffs, + @JsonProperty("perViewErrors") double[] perViewErrors, + @JsonProperty("standardDeviation") double standardDeviation) { + this.resolution = resolution; + this.cameraIntrinsics = cameraIntrinsics; + this.distCoeffs = distCoeffs; + this.perViewErrors = perViewErrors; + this.standardDeviation = standardDeviation; + + // do this once so gets are quick + getCameraIntrinsicsMat().get(0, 0, intrinsicsArr); + getDistCoeffsMat().get(0, 0, extrinsicsArr); + } + + @JsonIgnore + public Mat getCameraIntrinsicsMat() { + return cameraIntrinsics.getAsMat(); + } + + @JsonIgnore + public MatOfDouble getDistCoeffsMat() { + return distCoeffs.getAsMatOfDouble(); + } + + @JsonIgnore + public double[] getIntrinsicsArr() { + return intrinsicsArr; + } + + @JsonIgnore + public double[] getExtrinsicsArr() { + return extrinsicsArr; + } + + @JsonIgnore + public double[] getPerViewErrors() { + return perViewErrors; + } + + @JsonIgnore + public double getStandardDeviation() { + return standardDeviation; + } + + @Override + public void release() { + cameraIntrinsics.release(); + distCoeffs.release(); + } + + public static CameraCalibrationCoefficients parseFromCalibdbJson(JsonNode json) { + // camera_matrix is a row major, array of arrays + var cam_matrix = json.get("camera_matrix"); + + double[] cam_arr = + new double[] { + cam_matrix.get(0).get(0).doubleValue(), + cam_matrix.get(0).get(1).doubleValue(), + cam_matrix.get(0).get(2).doubleValue(), + cam_matrix.get(1).get(0).doubleValue(), + cam_matrix.get(1).get(1).doubleValue(), + cam_matrix.get(1).get(2).doubleValue(), + cam_matrix.get(2).get(0).doubleValue(), + cam_matrix.get(2).get(1).doubleValue(), + cam_matrix.get(2).get(2).doubleValue() + }; + + var dist_coefs = json.get("distortion_coefficients"); + + double[] dist_array = + new double[] { + dist_coefs.get(0).doubleValue(), + dist_coefs.get(1).doubleValue(), + dist_coefs.get(2).doubleValue(), + dist_coefs.get(3).doubleValue(), + dist_coefs.get(4).doubleValue(), + }; + + var cam_jsonmat = new JsonMat(3, 3, cam_arr); + var distortion_jsonmat = new JsonMat(1, 5, dist_array); + + var error = json.get("avg_reprojection_error").asDouble(); + var width = json.get("img_size").get(0).doubleValue(); + var height = json.get("img_size").get(1).doubleValue(); + + return new CameraCalibrationCoefficients( + new Size(width, height), cam_jsonmat, distortion_jsonmat, new double[] {error}, 0); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/calibration/JsonMat.java b/photon-core/src/main/java/org/photonvision/vision/calibration/JsonMat.java index 15a6d0b9e0..79a3aec9c6 100644 --- a/photon-core/src/main/java/org/photonvision/vision/calibration/JsonMat.java +++ b/photon-core/src/main/java/org/photonvision/vision/calibration/JsonMat.java @@ -30,100 +30,100 @@ import org.photonvision.vision.opencv.Releasable; public class JsonMat implements Releasable { - public final int rows; - public final int cols; - public final int type; - public final double[] data; - - // Cached matrices to avoid object recreation - @JsonIgnore private Mat wrappedMat = null; - @JsonIgnore private Matrix wpilibMat = null; - - private MatOfDouble wrappedMatOfDouble; - - public JsonMat(int rows, int cols, double[] data) { - this(rows, cols, CvType.CV_64FC1, data); - } - - public JsonMat( - @JsonProperty("rows") int rows, - @JsonProperty("cols") int cols, - @JsonProperty("type") int type, - @JsonProperty("data") double[] data) { - this.rows = rows; - this.cols = cols; - this.type = type; - this.data = data; - } - - private static boolean isCameraMatrixMat(Mat mat) { - return mat.type() == CvType.CV_64FC1 && mat.cols() == 3 && mat.rows() == 3; + public final int rows; + public final int cols; + public final int type; + public final double[] data; + + // Cached matrices to avoid object recreation + @JsonIgnore private Mat wrappedMat = null; + @JsonIgnore private Matrix wpilibMat = null; + + private MatOfDouble wrappedMatOfDouble; + + public JsonMat(int rows, int cols, double[] data) { + this(rows, cols, CvType.CV_64FC1, data); + } + + public JsonMat( + @JsonProperty("rows") int rows, + @JsonProperty("cols") int cols, + @JsonProperty("type") int type, + @JsonProperty("data") double[] data) { + this.rows = rows; + this.cols = cols; + this.type = type; + this.data = data; + } + + private static boolean isCameraMatrixMat(Mat mat) { + return mat.type() == CvType.CV_64FC1 && mat.cols() == 3 && mat.rows() == 3; + } + + private static boolean isDistortionCoeffsMat(Mat mat) { + return mat.type() == CvType.CV_64FC1 && mat.cols() == 5 && mat.rows() == 1; + } + + private static boolean isCalibrationMat(Mat mat) { + return isDistortionCoeffsMat(mat) || isCameraMatrixMat(mat); + } + + @JsonIgnore + public static double[] getDataFromMat(Mat mat) { + if (!isCalibrationMat(mat)) return null; + + double[] data = new double[(int) (mat.total() * mat.elemSize())]; + mat.get(0, 0, data); + + int dataLen = -1; + + if (isCameraMatrixMat(mat)) dataLen = 9; + if (isDistortionCoeffsMat(mat)) dataLen = 5; + + // truncate Mat data to correct number data points. + return Arrays.copyOfRange(data, 0, dataLen); + } + + public static JsonMat fromMat(Mat mat) { + if (!isCalibrationMat(mat)) return null; + return new JsonMat(mat.rows(), mat.cols(), getDataFromMat(mat)); + } + + @JsonIgnore + public Mat getAsMat() { + if (this.type != CvType.CV_64FC1) return null; + + if (wrappedMat == null) { + this.wrappedMat = new Mat(this.rows, this.cols, this.type); + this.wrappedMat.put(0, 0, this.data); } - - private static boolean isDistortionCoeffsMat(Mat mat) { - return mat.type() == CvType.CV_64FC1 && mat.cols() == 5 && mat.rows() == 1; - } - - private static boolean isCalibrationMat(Mat mat) { - return isDistortionCoeffsMat(mat) || isCameraMatrixMat(mat); - } - - @JsonIgnore - public static double[] getDataFromMat(Mat mat) { - if (!isCalibrationMat(mat)) return null; - - double[] data = new double[(int) (mat.total() * mat.elemSize())]; - mat.get(0, 0, data); - - int dataLen = -1; - - if (isCameraMatrixMat(mat)) dataLen = 9; - if (isDistortionCoeffsMat(mat)) dataLen = 5; - - // truncate Mat data to correct number data points. - return Arrays.copyOfRange(data, 0, dataLen); - } - - public static JsonMat fromMat(Mat mat) { - if (!isCalibrationMat(mat)) return null; - return new JsonMat(mat.rows(), mat.cols(), getDataFromMat(mat)); - } - - @JsonIgnore - public Mat getAsMat() { - if (this.type != CvType.CV_64FC1) return null; - - if (wrappedMat == null) { - this.wrappedMat = new Mat(this.rows, this.cols, this.type); - this.wrappedMat.put(0, 0, this.data); - } - return this.wrappedMat; - } - - @JsonIgnore - public MatOfDouble getAsMatOfDouble() { - if (this.wrappedMatOfDouble == null) { - this.wrappedMatOfDouble = new MatOfDouble(); - getAsMat().convertTo(wrappedMatOfDouble, CvType.CV_64F); - } - return this.wrappedMatOfDouble; - } - - @JsonIgnore - public Matrix getAsWpilibMat() { - if (wpilibMat == null) { - wpilibMat = new Matrix(new SimpleMatrix(rows, cols, true, data)); - } - return (Matrix) wpilibMat; - } - - @Override - public void release() { - getAsMat().release(); + return this.wrappedMat; + } + + @JsonIgnore + public MatOfDouble getAsMatOfDouble() { + if (this.wrappedMatOfDouble == null) { + this.wrappedMatOfDouble = new MatOfDouble(); + getAsMat().convertTo(wrappedMatOfDouble, CvType.CV_64F); } + return this.wrappedMatOfDouble; + } - public Packet populatePacket(Packet packet) { - packet.encode(this.data); - return packet; + @JsonIgnore + public Matrix getAsWpilibMat() { + if (wpilibMat == null) { + wpilibMat = new Matrix(new SimpleMatrix(rows, cols, true, data)); } + return (Matrix) wpilibMat; + } + + @Override + public void release() { + getAsMat().release(); + } + + public Packet populatePacket(Packet packet) { + packet.encode(this.data); + return packet; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/camera/CameraQuirk.java b/photon-core/src/main/java/org/photonvision/vision/camera/CameraQuirk.java index 69f9c564cb..c28620d532 100644 --- a/photon-core/src/main/java/org/photonvision/vision/camera/CameraQuirk.java +++ b/photon-core/src/main/java/org/photonvision/vision/camera/CameraQuirk.java @@ -18,16 +18,16 @@ package org.photonvision.vision.camera; public enum CameraQuirk { - /** Camera settable for controllable image gain */ - Gain, - /** For the Raspberry Pi Camera */ - PiCam, - /** Cap at 100FPS for high-bandwidth cameras */ - FPSCap100, - /** Separate red/blue gain controls available */ - AWBGain, - /** Will not work with photonvision - Logitec C270 at least */ - CompletelyBroken, - /** Has adjustable focus and autofocus switch */ - AdjustableFocus, + /** Camera settable for controllable image gain */ + Gain, + /** For the Raspberry Pi Camera */ + PiCam, + /** Cap at 100FPS for high-bandwidth cameras */ + FPSCap100, + /** Separate red/blue gain controls available */ + AWBGain, + /** Will not work with photonvision - Logitec C270 at least */ + CompletelyBroken, + /** Has adjustable focus and autofocus switch */ + AdjustableFocus, } diff --git a/photon-core/src/main/java/org/photonvision/vision/camera/CameraType.java b/photon-core/src/main/java/org/photonvision/vision/camera/CameraType.java index 8ce16ca275..d63221e1bd 100644 --- a/photon-core/src/main/java/org/photonvision/vision/camera/CameraType.java +++ b/photon-core/src/main/java/org/photonvision/vision/camera/CameraType.java @@ -18,7 +18,7 @@ package org.photonvision.vision.camera; public enum CameraType { - UsbCamera, - HttpCamera, - ZeroCopyPicam + UsbCamera, + HttpCamera, + ZeroCopyPicam } diff --git a/photon-core/src/main/java/org/photonvision/vision/camera/FileVisionSource.java b/photon-core/src/main/java/org/photonvision/vision/camera/FileVisionSource.java index 951135fcf4..2aeb57787b 100644 --- a/photon-core/src/main/java/org/photonvision/vision/camera/FileVisionSource.java +++ b/photon-core/src/main/java/org/photonvision/vision/camera/FileVisionSource.java @@ -29,89 +29,89 @@ import org.photonvision.vision.processes.VisionSourceSettables; public class FileVisionSource extends VisionSource { - private final FileFrameProvider frameProvider; - private final FileSourceSettables settables; - - public FileVisionSource(CameraConfiguration cameraConfiguration) { - super(cameraConfiguration); - var calibration = - !cameraConfiguration.calibrations.isEmpty() - ? cameraConfiguration.calibrations.get(0) - : null; - frameProvider = - new FileFrameProvider( - Path.of(cameraConfiguration.path), - cameraConfiguration.FOV, - FileFrameProvider.MAX_FPS, - calibration); - settables = - new FileSourceSettables(cameraConfiguration, frameProvider.get().frameStaticProperties); + private final FileFrameProvider frameProvider; + private final FileSourceSettables settables; + + public FileVisionSource(CameraConfiguration cameraConfiguration) { + super(cameraConfiguration); + var calibration = + !cameraConfiguration.calibrations.isEmpty() + ? cameraConfiguration.calibrations.get(0) + : null; + frameProvider = + new FileFrameProvider( + Path.of(cameraConfiguration.path), + cameraConfiguration.FOV, + FileFrameProvider.MAX_FPS, + calibration); + settables = + new FileSourceSettables(cameraConfiguration, frameProvider.get().frameStaticProperties); + } + + public FileVisionSource(String name, String imagePath, double fov) { + super(new CameraConfiguration(name, imagePath)); + frameProvider = new FileFrameProvider(imagePath, fov); + settables = + new FileSourceSettables(cameraConfiguration, frameProvider.get().frameStaticProperties); + } + + @Override + public FrameProvider getFrameProvider() { + return frameProvider; + } + + @Override + public VisionSourceSettables getSettables() { + return settables; + } + + @Override + public boolean isVendorCamera() { + return false; + } + + private static class FileSourceSettables extends VisionSourceSettables { + private final VideoMode videoMode; + + private final HashMap videoModes = new HashMap<>(); + + FileSourceSettables( + CameraConfiguration cameraConfiguration, FrameStaticProperties frameStaticProperties) { + super(cameraConfiguration); + this.frameStaticProperties = frameStaticProperties; + videoMode = + new VideoMode( + PixelFormat.kMJPEG, + frameStaticProperties.imageWidth, + frameStaticProperties.imageHeight, + 30); + videoModes.put(0, videoMode); } - public FileVisionSource(String name, String imagePath, double fov) { - super(new CameraConfiguration(name, imagePath)); - frameProvider = new FileFrameProvider(imagePath, fov); - settables = - new FileSourceSettables(cameraConfiguration, frameProvider.get().frameStaticProperties); - } + @Override + public void setExposure(double exposure) {} + + public void setAutoExposure(boolean cameraAutoExposure) {} @Override - public FrameProvider getFrameProvider() { - return frameProvider; - } + public void setBrightness(int brightness) {} @Override - public VisionSourceSettables getSettables() { - return settables; + public void setGain(int gain) {} + + @Override + public VideoMode getCurrentVideoMode() { + return videoMode; } @Override - public boolean isVendorCamera() { - return false; + protected void setVideoModeInternal(VideoMode videoMode) { + // Do nothing } - private static class FileSourceSettables extends VisionSourceSettables { - private final VideoMode videoMode; - - private final HashMap videoModes = new HashMap<>(); - - FileSourceSettables( - CameraConfiguration cameraConfiguration, FrameStaticProperties frameStaticProperties) { - super(cameraConfiguration); - this.frameStaticProperties = frameStaticProperties; - videoMode = - new VideoMode( - PixelFormat.kMJPEG, - frameStaticProperties.imageWidth, - frameStaticProperties.imageHeight, - 30); - videoModes.put(0, videoMode); - } - - @Override - public void setExposure(double exposure) {} - - public void setAutoExposure(boolean cameraAutoExposure) {} - - @Override - public void setBrightness(int brightness) {} - - @Override - public void setGain(int gain) {} - - @Override - public VideoMode getCurrentVideoMode() { - return videoMode; - } - - @Override - protected void setVideoModeInternal(VideoMode videoMode) { - // Do nothing - } - - @Override - public HashMap getAllVideoModes() { - return videoModes; - } + @Override + public HashMap getAllVideoModes() { + return videoModes; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSettables.java b/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSettables.java index 7e0bd5c5c7..cd052dbb96 100644 --- a/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSettables.java +++ b/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSettables.java @@ -28,216 +28,216 @@ import org.photonvision.vision.processes.VisionSourceSettables; public class LibcameraGpuSettables extends VisionSourceSettables { - private FPSRatedVideoMode currentVideoMode; - private double lastManualExposure = 50; - private int lastBrightness = 50; - private boolean lastAutoExposureActive; - private int lastGain = 50; - private Pair lastAwbGains = new Pair<>(18, 18); - private boolean m_initialized = false; + private FPSRatedVideoMode currentVideoMode; + private double lastManualExposure = 50; + private int lastBrightness = 50; + private boolean lastAutoExposureActive; + private int lastGain = 50; + private Pair lastAwbGains = new Pair<>(18, 18); + private boolean m_initialized = false; - private final LibCameraJNI.SensorModel sensorModel; + private final LibCameraJNI.SensorModel sensorModel; - private ImageRotationMode m_rotationMode; + private ImageRotationMode m_rotationMode; - public void setRotation(ImageRotationMode rotationMode) { - if (rotationMode != m_rotationMode) { - m_rotationMode = rotationMode; + public void setRotation(ImageRotationMode rotationMode) { + if (rotationMode != m_rotationMode) { + m_rotationMode = rotationMode; - setVideoModeInternal(getCurrentVideoMode()); - } + setVideoModeInternal(getCurrentVideoMode()); } - - public LibcameraGpuSettables(CameraConfiguration configuration) { - super(configuration); - - videoModes = new HashMap<>(); - - sensorModel = LibCameraJNI.getSensorModel(); - - if (sensorModel == LibCameraJNI.SensorModel.IMX219) { - // Settings for the IMX219 sensor, which is used on the Pi Camera Module v2 - videoModes.put( - 0, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 120, 120, .39)); - videoModes.put( - 1, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 30, 30, .39)); - videoModes.put( - 2, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 65, 90, .39)); - videoModes.put( - 3, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 30, 30, .39)); - // TODO: fix 1280x720 in the native code and re-add it - videoModes.put( - 4, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1920, 1080, 15, 20, .53)); - videoModes.put( - 5, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 3280 / 2, 2464 / 2, 15, 20, 1)); - videoModes.put( - 6, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 3280 / 4, 2464 / 4, 15, 20, 1)); - } else if (sensorModel == LibCameraJNI.SensorModel.OV9281) { - videoModes.put( - 0, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 30, 30, .39)); - videoModes.put( - 1, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1280 / 2, 800 / 2, 60, 60, 1)); - videoModes.put( - 2, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 65, 90, .39)); - videoModes.put( - 3, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1280, 800, 60, 60, 1)); - - } else { - if (sensorModel == LibCameraJNI.SensorModel.IMX477) { - LibcameraGpuSource.logger.warn( - "It appears you are using a Pi HQ Camera. This camera is not officially supported. You will have to set your camera FOV differently based on resolution."); - } else if (sensorModel == LibCameraJNI.SensorModel.Unknown) { - LibcameraGpuSource.logger.warn( - "You have an unknown sensor connected to your Pi over CSI! This is likely a bug. If it is not, then you will have to set your camera FOV differently based on resolution."); - } - - // Settings for the OV5647 sensor, which is used by the Pi Camera Module v1 - videoModes.put(0, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 90, 90, 1)); - videoModes.put(1, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 85, 90, 1)); - videoModes.put( - 2, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 960, 720, 45, 49, 0.74)); - // Half the size of the active areas on the OV5647 - videoModes.put( - 3, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 2592 / 2, 1944 / 2, 20, 20, 1)); - videoModes.put( - 4, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1280, 720, 30, 45, 0.91)); - videoModes.put( - 5, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1920, 1080, 15, 20, 0.72)); - } - - // TODO need to add more video modes for new sensors here - - currentVideoMode = (FPSRatedVideoMode) videoModes.get(0); + } + + public LibcameraGpuSettables(CameraConfiguration configuration) { + super(configuration); + + videoModes = new HashMap<>(); + + sensorModel = LibCameraJNI.getSensorModel(); + + if (sensorModel == LibCameraJNI.SensorModel.IMX219) { + // Settings for the IMX219 sensor, which is used on the Pi Camera Module v2 + videoModes.put( + 0, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 120, 120, .39)); + videoModes.put( + 1, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 30, 30, .39)); + videoModes.put( + 2, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 65, 90, .39)); + videoModes.put( + 3, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 30, 30, .39)); + // TODO: fix 1280x720 in the native code and re-add it + videoModes.put( + 4, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1920, 1080, 15, 20, .53)); + videoModes.put( + 5, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 3280 / 2, 2464 / 2, 15, 20, 1)); + videoModes.put( + 6, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 3280 / 4, 2464 / 4, 15, 20, 1)); + } else if (sensorModel == LibCameraJNI.SensorModel.OV9281) { + videoModes.put( + 0, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 30, 30, .39)); + videoModes.put( + 1, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1280 / 2, 800 / 2, 60, 60, 1)); + videoModes.put( + 2, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 65, 90, .39)); + videoModes.put( + 3, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1280, 800, 60, 60, 1)); + + } else { + if (sensorModel == LibCameraJNI.SensorModel.IMX477) { + LibcameraGpuSource.logger.warn( + "It appears you are using a Pi HQ Camera. This camera is not officially supported. You will have to set your camera FOV differently based on resolution."); + } else if (sensorModel == LibCameraJNI.SensorModel.Unknown) { + LibcameraGpuSource.logger.warn( + "You have an unknown sensor connected to your Pi over CSI! This is likely a bug. If it is not, then you will have to set your camera FOV differently based on resolution."); + } + + // Settings for the OV5647 sensor, which is used by the Pi Camera Module v1 + videoModes.put(0, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 90, 90, 1)); + videoModes.put(1, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 85, 90, 1)); + videoModes.put( + 2, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 960, 720, 45, 49, 0.74)); + // Half the size of the active areas on the OV5647 + videoModes.put( + 3, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 2592 / 2, 1944 / 2, 20, 20, 1)); + videoModes.put( + 4, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1280, 720, 30, 45, 0.91)); + videoModes.put( + 5, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1920, 1080, 15, 20, 0.72)); } - @Override - public double getFOV() { - return getCurrentVideoMode().fovMultiplier * getConfiguration().FOV; - } + // TODO need to add more video modes for new sensors here - @Override - public void setAutoExposure(boolean cameraAutoExposure) { - lastAutoExposureActive = cameraAutoExposure; - LibCameraJNI.setAutoExposure(cameraAutoExposure); - } + currentVideoMode = (FPSRatedVideoMode) videoModes.get(0); + } - @Override - public void setExposure(double exposure) { - if (exposure < 0.0 || lastAutoExposureActive) { - // Auto-exposure is active right now, don't set anything. - return; - } + @Override + public double getFOV() { + return getCurrentVideoMode().fovMultiplier * getConfiguration().FOV; + } - // HACKS! - // If we set exposure too low, libcamera crashes or slows down - // Very weird and smelly - // For now, band-aid this by just not setting it lower than the "it breaks" limit - // is different depending on camera. - if (sensorModel == LibCameraJNI.SensorModel.OV9281) { - if (exposure < 6.0) { - exposure = 6.0; - } - } else if (sensorModel == LibCameraJNI.SensorModel.OV5647) { - if (exposure < 0.7) { - exposure = 0.7; - } - } + @Override + public void setAutoExposure(boolean cameraAutoExposure) { + lastAutoExposureActive = cameraAutoExposure; + LibCameraJNI.setAutoExposure(cameraAutoExposure); + } - lastManualExposure = exposure; - var success = LibCameraJNI.setExposure((int) Math.round(exposure) * 800); - if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera exposure"); + @Override + public void setExposure(double exposure) { + if (exposure < 0.0 || lastAutoExposureActive) { + // Auto-exposure is active right now, don't set anything. + return; } - @Override - public void setBrightness(int brightness) { - lastBrightness = brightness; - double realBrightness = MathUtils.map(brightness, 0.0, 100.0, -1.0, 1.0); - var success = LibCameraJNI.setBrightness(realBrightness); - if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera brightness"); + // HACKS! + // If we set exposure too low, libcamera crashes or slows down + // Very weird and smelly + // For now, band-aid this by just not setting it lower than the "it breaks" limit + // is different depending on camera. + if (sensorModel == LibCameraJNI.SensorModel.OV9281) { + if (exposure < 6.0) { + exposure = 6.0; + } + } else if (sensorModel == LibCameraJNI.SensorModel.OV5647) { + if (exposure < 0.7) { + exposure = 0.7; + } } - @Override - public void setGain(int gain) { - lastGain = gain; - // TODO units here seem odd -- 5ish seems legit? So divide by 10 - var success = LibCameraJNI.setAnalogGain(gain / 10.0); - if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera gain"); + lastManualExposure = exposure; + var success = LibCameraJNI.setExposure((int) Math.round(exposure) * 800); + if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera exposure"); + } + + @Override + public void setBrightness(int brightness) { + lastBrightness = brightness; + double realBrightness = MathUtils.map(brightness, 0.0, 100.0, -1.0, 1.0); + var success = LibCameraJNI.setBrightness(realBrightness); + if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera brightness"); + } + + @Override + public void setGain(int gain) { + lastGain = gain; + // TODO units here seem odd -- 5ish seems legit? So divide by 10 + var success = LibCameraJNI.setAnalogGain(gain / 10.0); + if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera gain"); + } + + @Override + public void setRedGain(int red) { + if (sensorModel != LibCameraJNI.SensorModel.OV9281) { + lastAwbGains = Pair.of(red, lastAwbGains.getSecond()); + setAwbGain(lastAwbGains.getFirst(), lastAwbGains.getSecond()); } + } - @Override - public void setRedGain(int red) { - if (sensorModel != LibCameraJNI.SensorModel.OV9281) { - lastAwbGains = Pair.of(red, lastAwbGains.getSecond()); - setAwbGain(lastAwbGains.getFirst(), lastAwbGains.getSecond()); - } + @Override + public void setBlueGain(int blue) { + if (sensorModel != LibCameraJNI.SensorModel.OV9281) { + lastAwbGains = Pair.of(lastAwbGains.getFirst(), blue); + setAwbGain(lastAwbGains.getFirst(), lastAwbGains.getSecond()); } + } - @Override - public void setBlueGain(int blue) { - if (sensorModel != LibCameraJNI.SensorModel.OV9281) { - lastAwbGains = Pair.of(lastAwbGains.getFirst(), blue); - setAwbGain(lastAwbGains.getFirst(), lastAwbGains.getSecond()); - } + public void setAwbGain(int red, int blue) { + if (sensorModel != LibCameraJNI.SensorModel.OV9281) { + var success = LibCameraJNI.setAwbGain(red / 10.0, blue / 10.0); + if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera AWB gains"); } - - public void setAwbGain(int red, int blue) { - if (sensorModel != LibCameraJNI.SensorModel.OV9281) { - var success = LibCameraJNI.setAwbGain(red / 10.0, blue / 10.0); - if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera AWB gains"); + } + + @Override + public FPSRatedVideoMode getCurrentVideoMode() { + return currentVideoMode; + } + + @Override + protected void setVideoModeInternal(VideoMode videoMode) { + var mode = (FPSRatedVideoMode) videoMode; + + // We need to make sure that other threads don't try to do anything funny while we're recreating + // the camera + synchronized (LibCameraJNI.CAMERA_LOCK) { + if (m_initialized) { + logger.debug("Stopping libcamera"); + if (!LibCameraJNI.stopCamera()) { + logger.error("Couldn't stop a zero copy Pi Camera while switching video modes"); } - } - - @Override - public FPSRatedVideoMode getCurrentVideoMode() { - return currentVideoMode; - } - - @Override - protected void setVideoModeInternal(VideoMode videoMode) { - var mode = (FPSRatedVideoMode) videoMode; - - // We need to make sure that other threads don't try to do anything funny while we're recreating - // the camera - synchronized (LibCameraJNI.CAMERA_LOCK) { - if (m_initialized) { - logger.debug("Stopping libcamera"); - if (!LibCameraJNI.stopCamera()) { - logger.error("Couldn't stop a zero copy Pi Camera while switching video modes"); - } - logger.debug("Destroying libcamera"); - if (!LibCameraJNI.destroyCamera()) { - logger.error("Couldn't destroy a zero copy Pi Camera while switching video modes"); - } - } - - logger.debug("Creating libcamera"); - if (!LibCameraJNI.createCamera( - mode.width, mode.height, (m_rotationMode == ImageRotationMode.DEG_180 ? 180 : 0))) { - logger.error("Couldn't create a zero copy Pi Camera while switching video modes"); - } - logger.debug("Starting libcamera"); - if (!LibCameraJNI.startCamera()) { - logger.error("Couldn't start a zero copy Pi Camera while switching video modes"); - } - - m_initialized = true; + logger.debug("Destroying libcamera"); + if (!LibCameraJNI.destroyCamera()) { + logger.error("Couldn't destroy a zero copy Pi Camera while switching video modes"); } + } + + logger.debug("Creating libcamera"); + if (!LibCameraJNI.createCamera( + mode.width, mode.height, (m_rotationMode == ImageRotationMode.DEG_180 ? 180 : 0))) { + logger.error("Couldn't create a zero copy Pi Camera while switching video modes"); + } + logger.debug("Starting libcamera"); + if (!LibCameraJNI.startCamera()) { + logger.error("Couldn't start a zero copy Pi Camera while switching video modes"); + } + + m_initialized = true; + } - // We don't store last settings on the native side, and when you change video mode these get - // reset on MMAL's end - setExposure(lastManualExposure); - setAutoExposure(lastAutoExposureActive); - setBrightness(lastBrightness); - setGain(lastGain); - setAwbGain(lastAwbGains.getFirst(), lastAwbGains.getSecond()); + // We don't store last settings on the native side, and when you change video mode these get + // reset on MMAL's end + setExposure(lastManualExposure); + setAutoExposure(lastAutoExposureActive); + setBrightness(lastBrightness); + setGain(lastGain); + setAwbGain(lastAwbGains.getFirst(), lastAwbGains.getSecond()); - LibCameraJNI.setFramesToCopy(true, true); + LibCameraJNI.setFramesToCopy(true, true); - currentVideoMode = mode; - } + currentVideoMode = mode; + } - @Override - public HashMap getAllVideoModes() { - return videoModes; - } + @Override + public HashMap getAllVideoModes() { + return videoModes; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSource.java b/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSource.java index 7e4330e4d7..5ab1ecb312 100644 --- a/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSource.java +++ b/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSource.java @@ -28,60 +28,60 @@ import org.photonvision.vision.processes.VisionSourceSettables; public class LibcameraGpuSource extends VisionSource { - static final Logger logger = new Logger(LibcameraGpuSource.class, LogGroup.Camera); + static final Logger logger = new Logger(LibcameraGpuSource.class, LogGroup.Camera); - private final LibcameraGpuSettables settables; - private final LibcameraGpuFrameProvider frameProvider; + private final LibcameraGpuSettables settables; + private final LibcameraGpuFrameProvider frameProvider; - public LibcameraGpuSource(CameraConfiguration configuration) { - super(configuration); - if (configuration.cameraType != CameraType.ZeroCopyPicam) { - throw new IllegalArgumentException( - "GPUAcceleratedPicamSource only accepts CameraConfigurations with type Picam"); - } - - settables = new LibcameraGpuSettables(configuration); - frameProvider = new LibcameraGpuFrameProvider(settables); + public LibcameraGpuSource(CameraConfiguration configuration) { + super(configuration); + if (configuration.cameraType != CameraType.ZeroCopyPicam) { + throw new IllegalArgumentException( + "GPUAcceleratedPicamSource only accepts CameraConfigurations with type Picam"); } - @Override - public FrameProvider getFrameProvider() { - return frameProvider; - } + settables = new LibcameraGpuSettables(configuration); + frameProvider = new LibcameraGpuFrameProvider(settables); + } - @Override - public VisionSourceSettables getSettables() { - return settables; - } + @Override + public FrameProvider getFrameProvider() { + return frameProvider; + } - /** - * On the OV5649 the actual FPS we want to request from the GPU can be higher than the FPS that we - * can do after processing. On the IMX219 these FPSes match pretty closely, except for the - * 1280x720 mode. We use this to present a rated FPS to the user that's lower than the actual FPS - * we request from the GPU. This is important for setting user expectations, and is also used by - * the frontend to detect and explain FPS drops. This class should ONLY be used by Picam video - * modes! This is to make sure it shows up nice in the frontend - */ - public static class FPSRatedVideoMode extends VideoMode { - public final int fpsActual; - public final double fovMultiplier; + @Override + public VisionSourceSettables getSettables() { + return settables; + } - public FPSRatedVideoMode( - PixelFormat pixelFormat, - int width, - int height, - int ratedFPS, - int actualFPS, - double fovMultiplier) { - super(pixelFormat, width, height, ratedFPS); + /** + * On the OV5649 the actual FPS we want to request from the GPU can be higher than the FPS that we + * can do after processing. On the IMX219 these FPSes match pretty closely, except for the + * 1280x720 mode. We use this to present a rated FPS to the user that's lower than the actual FPS + * we request from the GPU. This is important for setting user expectations, and is also used by + * the frontend to detect and explain FPS drops. This class should ONLY be used by Picam video + * modes! This is to make sure it shows up nice in the frontend + */ + public static class FPSRatedVideoMode extends VideoMode { + public final int fpsActual; + public final double fovMultiplier; - this.fpsActual = actualFPS; - this.fovMultiplier = fovMultiplier; - } - } + public FPSRatedVideoMode( + PixelFormat pixelFormat, + int width, + int height, + int ratedFPS, + int actualFPS, + double fovMultiplier) { + super(pixelFormat, width, height, ratedFPS); - @Override - public boolean isVendorCamera() { - return ConfigManager.getInstance().getConfig().getHardwareConfig().hasPresetFOV(); + this.fpsActual = actualFPS; + this.fovMultiplier = fovMultiplier; } + } + + @Override + public boolean isVendorCamera() { + return ConfigManager.getInstance().getConfig().getHardwareConfig().hasPresetFOV(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/camera/QuirkyCamera.java b/photon-core/src/main/java/org/photonvision/vision/camera/QuirkyCamera.java index da2dc835cb..fa5bbb2d0b 100644 --- a/photon-core/src/main/java/org/photonvision/vision/camera/QuirkyCamera.java +++ b/photon-core/src/main/java/org/photonvision/vision/camera/QuirkyCamera.java @@ -22,115 +22,115 @@ import java.util.Objects; public class QuirkyCamera { - private static final List quirkyCameras = - List.of( - new QuirkyCamera( - 0x9331, - 0x5A3, - CameraQuirk.CompletelyBroken), // Chris's older generic "Logitec HD Webcam" - new QuirkyCamera(0x825, 0x46D, CameraQuirk.CompletelyBroken), // Logitec C270 - new QuirkyCamera( - 0x0bda, - 0x5510, - CameraQuirk.CompletelyBroken), // A laptop internal camera someone found broken - new QuirkyCamera( - -1, -1, "Snap Camera", CameraQuirk.CompletelyBroken), // SnapCamera on Windows - new QuirkyCamera( - -1, - -1, - "FaceTime HD Camera", - CameraQuirk.CompletelyBroken), // Mac Facetime Camera shared into Windows in Bootcamp - new QuirkyCamera(0x2000, 0x1415, CameraQuirk.Gain, CameraQuirk.FPSCap100), // PS3Eye - new QuirkyCamera( - -1, -1, "mmal service 16.1", CameraQuirk.PiCam), // PiCam (via V4L2, not zerocopy) - new QuirkyCamera(-1, -1, "unicam", CameraQuirk.PiCam), // PiCam (via V4L2, not zerocopy) - new QuirkyCamera(0x85B, 0x46D, CameraQuirk.AdjustableFocus) // Logitech C925-e - ); + private static final List quirkyCameras = + List.of( + new QuirkyCamera( + 0x9331, + 0x5A3, + CameraQuirk.CompletelyBroken), // Chris's older generic "Logitec HD Webcam" + new QuirkyCamera(0x825, 0x46D, CameraQuirk.CompletelyBroken), // Logitec C270 + new QuirkyCamera( + 0x0bda, + 0x5510, + CameraQuirk.CompletelyBroken), // A laptop internal camera someone found broken + new QuirkyCamera( + -1, -1, "Snap Camera", CameraQuirk.CompletelyBroken), // SnapCamera on Windows + new QuirkyCamera( + -1, + -1, + "FaceTime HD Camera", + CameraQuirk.CompletelyBroken), // Mac Facetime Camera shared into Windows in Bootcamp + new QuirkyCamera(0x2000, 0x1415, CameraQuirk.Gain, CameraQuirk.FPSCap100), // PS3Eye + new QuirkyCamera( + -1, -1, "mmal service 16.1", CameraQuirk.PiCam), // PiCam (via V4L2, not zerocopy) + new QuirkyCamera(-1, -1, "unicam", CameraQuirk.PiCam), // PiCam (via V4L2, not zerocopy) + new QuirkyCamera(0x85B, 0x46D, CameraQuirk.AdjustableFocus) // Logitech C925-e + ); - public static final QuirkyCamera DefaultCamera = new QuirkyCamera(0, 0, ""); - public static final QuirkyCamera ZeroCopyPiCamera = - new QuirkyCamera( - -1, - -1, - "mmal service 16.1", - CameraQuirk.PiCam, - CameraQuirk.Gain, - CameraQuirk.AWBGain); // PiCam (special zerocopy version) + public static final QuirkyCamera DefaultCamera = new QuirkyCamera(0, 0, ""); + public static final QuirkyCamera ZeroCopyPiCamera = + new QuirkyCamera( + -1, + -1, + "mmal service 16.1", + CameraQuirk.PiCam, + CameraQuirk.Gain, + CameraQuirk.AWBGain); // PiCam (special zerocopy version) - public final String baseName; - public final int usbVid; - public final int usbPid; - public final HashMap quirks; + public final String baseName; + public final int usbVid; + public final int usbPid; + public final HashMap quirks; - /** - * Creates a QuirkyCamera that matches by USB VID/PID - * - * @param usbVid USB VID of camera - * @param usbPid USB PID of camera - * @param quirks Camera quirks - */ - private QuirkyCamera(int usbVid, int usbPid, CameraQuirk... quirks) { - this(usbVid, usbPid, "", quirks); - } + /** + * Creates a QuirkyCamera that matches by USB VID/PID + * + * @param usbVid USB VID of camera + * @param usbPid USB PID of camera + * @param quirks Camera quirks + */ + private QuirkyCamera(int usbVid, int usbPid, CameraQuirk... quirks) { + this(usbVid, usbPid, "", quirks); + } - /** - * Creates a QuirkyCamera that matches by USB VID/PID and name - * - * @param usbVid USB VID of camera - * @param usbPid USB PID of camera - * @param baseName CSCore name of camera - * @param quirks Camera quirks - */ - private QuirkyCamera(int usbVid, int usbPid, String baseName, CameraQuirk... quirks) { - this.usbVid = usbVid; - this.usbPid = usbPid; - this.baseName = baseName; + /** + * Creates a QuirkyCamera that matches by USB VID/PID and name + * + * @param usbVid USB VID of camera + * @param usbPid USB PID of camera + * @param baseName CSCore name of camera + * @param quirks Camera quirks + */ + private QuirkyCamera(int usbVid, int usbPid, String baseName, CameraQuirk... quirks) { + this.usbVid = usbVid; + this.usbPid = usbPid; + this.baseName = baseName; - this.quirks = new HashMap<>(); - for (var q : quirks) { - this.quirks.put(q, true); - } - for (var q : CameraQuirk.values()) { - this.quirks.putIfAbsent(q, false); - } + this.quirks = new HashMap<>(); + for (var q : quirks) { + this.quirks.put(q, true); } - - public boolean hasQuirk(CameraQuirk quirk) { - return quirks.get(quirk); + for (var q : CameraQuirk.values()) { + this.quirks.putIfAbsent(q, false); } + } - public static QuirkyCamera getQuirkyCamera(int usbVid, int usbPid) { - return getQuirkyCamera(usbVid, usbPid, ""); - } + public boolean hasQuirk(CameraQuirk quirk) { + return quirks.get(quirk); + } - public static QuirkyCamera getQuirkyCamera(int usbVid, int usbPid, String baseName) { - for (var qc : quirkyCameras) { - boolean hasBaseName = !qc.baseName.isEmpty(); - boolean matchesBaseName = qc.baseName.equals(baseName) || !hasBaseName; - if (qc.usbVid == usbVid && qc.usbPid == usbPid && matchesBaseName) { - return qc; - } - } - return new QuirkyCamera(usbVid, usbPid, baseName); - } + public static QuirkyCamera getQuirkyCamera(int usbVid, int usbPid) { + return getQuirkyCamera(usbVid, usbPid, ""); + } - public boolean hasQuirks() { - return quirks.containsValue(true); + public static QuirkyCamera getQuirkyCamera(int usbVid, int usbPid, String baseName) { + for (var qc : quirkyCameras) { + boolean hasBaseName = !qc.baseName.isEmpty(); + boolean matchesBaseName = qc.baseName.equals(baseName) || !hasBaseName; + if (qc.usbVid == usbVid && qc.usbPid == usbPid && matchesBaseName) { + return qc; + } } + return new QuirkyCamera(usbVid, usbPid, baseName); + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - QuirkyCamera that = (QuirkyCamera) o; - return usbVid == that.usbVid - && usbPid == that.usbPid - && Objects.equals(baseName, that.baseName) - && Objects.equals(quirks, that.quirks); - } + public boolean hasQuirks() { + return quirks.containsValue(true); + } - @Override - public int hashCode() { - return Objects.hash(usbVid, usbPid, baseName, quirks); - } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QuirkyCamera that = (QuirkyCamera) o; + return usbVid == that.usbVid + && usbPid == that.usbPid + && Objects.equals(baseName, that.baseName) + && Objects.equals(quirks, that.quirks); + } + + @Override + public int hashCode() { + return Objects.hash(usbVid, usbPid, baseName, quirks); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/camera/USBCameraSource.java b/photon-core/src/main/java/org/photonvision/vision/camera/USBCameraSource.java index ad235da26a..69fbc18805 100644 --- a/photon-core/src/main/java/org/photonvision/vision/camera/USBCameraSource.java +++ b/photon-core/src/main/java/org/photonvision/vision/camera/USBCameraSource.java @@ -34,324 +34,324 @@ import org.photonvision.vision.processes.VisionSourceSettables; public class USBCameraSource extends VisionSource { - private final Logger logger; - private final UsbCamera camera; - private final USBCameraSettables usbCameraSettables; - private final USBFrameProvider usbFrameProvider; - private final CvSink cvSink; + private final Logger logger; + private final UsbCamera camera; + private final USBCameraSettables usbCameraSettables; + private final USBFrameProvider usbFrameProvider; + private final CvSink cvSink; - public final QuirkyCamera cameraQuirks; + public final QuirkyCamera cameraQuirks; - public USBCameraSource(CameraConfiguration config) { - super(config); + public USBCameraSource(CameraConfiguration config) { + super(config); - logger = new Logger(USBCameraSource.class, config.nickname, LogGroup.Camera); - camera = new UsbCamera(config.nickname, config.path); - cvSink = CameraServer.getVideo(this.camera); + logger = new Logger(USBCameraSource.class, config.nickname, LogGroup.Camera); + camera = new UsbCamera(config.nickname, config.path); + cvSink = CameraServer.getVideo(this.camera); - cameraQuirks = - QuirkyCamera.getQuirkyCamera( - camera.getInfo().productId, camera.getInfo().vendorId, config.baseName); + cameraQuirks = + QuirkyCamera.getQuirkyCamera( + camera.getInfo().productId, camera.getInfo().vendorId, config.baseName); - if (cameraQuirks.hasQuirks()) { - logger.info("Quirky camera detected: " + cameraQuirks.baseName); - } + if (cameraQuirks.hasQuirks()) { + logger.info("Quirky camera detected: " + cameraQuirks.baseName); + } + + if (cameraQuirks.hasQuirk(CameraQuirk.CompletelyBroken)) { + // set some defaults, as these should never be used. + logger.info("Camera " + cameraQuirks.baseName + " is not supported for PhotonVision"); + usbCameraSettables = null; + usbFrameProvider = null; + } else { + // Normal init + // auto exposure/brightness/gain will be set by the visionmodule later + disableAutoFocus(); + + usbCameraSettables = new USBCameraSettables(config); + if (usbCameraSettables.getAllVideoModes().isEmpty()) { + logger.info("Camera " + camera.getPath() + " has no video modes supported by PhotonVision"); + usbFrameProvider = null; + } else { + usbFrameProvider = new USBFrameProvider(cvSink, usbCameraSettables); + } + } + } + + void disableAutoFocus() { + if (cameraQuirks.hasQuirk(CameraQuirk.AdjustableFocus)) { + try { + camera.getProperty("focus_auto").set(0); + camera.getProperty("focus_absolute").set(0); // Focus into infinity + } catch (VideoException e) { + logger.error("Unable to disable autofocus!", e); + } + } + } + + @Override + public FrameProvider getFrameProvider() { + return usbFrameProvider; + } + + @Override + public VisionSourceSettables getSettables() { + return this.usbCameraSettables; + } + + public class USBCameraSettables extends VisionSourceSettables { + protected USBCameraSettables(CameraConfiguration configuration) { + super(configuration); + getAllVideoModes(); + setVideoMode(videoModes.get(0)); + } - if (cameraQuirks.hasQuirk(CameraQuirk.CompletelyBroken)) { - // set some defaults, as these should never be used. - logger.info("Camera " + cameraQuirks.baseName + " is not supported for PhotonVision"); - usbCameraSettables = null; - usbFrameProvider = null; + public void setAutoExposure(boolean cameraAutoExposure) { + logger.debug("Setting auto exposure to " + cameraAutoExposure); + + if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { + // Case, we know this is a picam. Go through v4l2-ctl interface directly + + // Common settings + camera + .getProperty("image_stabilization") + .set(0); // No image stabilization, as this will throw off odometry + camera.getProperty("power_line_frequency").set(2); // Assume 60Hz USA + camera.getProperty("scene_mode").set(0); // no presets + camera.getProperty("exposure_metering_mode").set(0); + camera.getProperty("exposure_dynamic_framerate").set(0); + + if (!cameraAutoExposure) { + // Pick a bunch of reasonable setting defaults for vision processing retroreflective + camera.getProperty("auto_exposure_bias").set(0); + camera.getProperty("iso_sensitivity_auto").set(0); // Disable auto ISO adjustment + camera.getProperty("iso_sensitivity").set(0); // Manual ISO adjustment + camera.getProperty("white_balance_auto_preset").set(2); // Auto white-balance disabled + camera.getProperty("auto_exposure").set(1); // auto exposure disabled } else { - // Normal init - // auto exposure/brightness/gain will be set by the visionmodule later - disableAutoFocus(); - - usbCameraSettables = new USBCameraSettables(config); - if (usbCameraSettables.getAllVideoModes().isEmpty()) { - logger.info("Camera " + camera.getPath() + " has no video modes supported by PhotonVision"); - usbFrameProvider = null; - } else { - usbFrameProvider = new USBFrameProvider(cvSink, usbCameraSettables); - } + // Pick a bunch of reasonable setting defaults for driver, fiducials, or otherwise + // nice-for-humans + camera.getProperty("auto_exposure_bias").set(12); + camera.getProperty("iso_sensitivity_auto").set(1); + camera.getProperty("iso_sensitivity").set(1); // Manual ISO adjustment by default + camera.getProperty("white_balance_auto_preset").set(1); // Auto white-balance enabled + camera.getProperty("auto_exposure").set(0); // auto exposure enabled } - } - void disableAutoFocus() { - if (cameraQuirks.hasQuirk(CameraQuirk.AdjustableFocus)) { - try { - camera.getProperty("focus_auto").set(0); - camera.getProperty("focus_absolute").set(0); // Focus into infinity - } catch (VideoException e) { - logger.error("Unable to disable autofocus!", e); - } + } else { + // Case - this is some other USB cam. Default to wpilib's implementation + + var canSetWhiteBalance = !cameraQuirks.hasQuirk(CameraQuirk.Gain); + + if (!cameraAutoExposure) { + // Pick a bunch of reasonable setting defaults for vision processing retroreflective + if (canSetWhiteBalance) { + camera.setWhiteBalanceManual(4000); // Auto white-balance disabled, 4000K preset + } + } else { + // Pick a bunch of reasonable setting defaults for driver, fiducials, or otherwise + // nice-for-humans + if (canSetWhiteBalance) { + camera.setWhiteBalanceAuto(); // Auto white-balance enabled + } + camera.setExposureAuto(); // auto exposure enabled } + } } - @Override - public FrameProvider getFrameProvider() { - return usbFrameProvider; + private int timeToPiCamRawExposure(double time_us) { + int retVal = + (int) + Math.round( + time_us / 100.0); // Pi Cam's (both v1 and v2) need exposure time in units of + // 100us/bit + return Math.min(Math.max(retVal, 1), 10000); // Cap to allowable range for parameter } - @Override - public VisionSourceSettables getSettables() { - return this.usbCameraSettables; + private double pctToExposureTimeUs(double pct_in) { + // Mirror the photonvision raspicam driver's algorithm for picking an exposure time + // from a 0-100% input + final double PADDING_LOW_US = 10; + final double PADDING_HIGH_US = 10; + return PADDING_LOW_US + + (pct_in / 100.0) * ((1e6 / (double) camera.getVideoMode().fps) - PADDING_HIGH_US); } - public class USBCameraSettables extends VisionSourceSettables { - protected USBCameraSettables(CameraConfiguration configuration) { - super(configuration); - getAllVideoModes(); - setVideoMode(videoModes.get(0)); + @Override + public void setExposure(double exposure) { + if (exposure >= 0.0) { + try { + int scaledExposure = 1; + if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { + scaledExposure = Math.round(timeToPiCamRawExposure(pctToExposureTimeUs(exposure))); + logger.debug("Setting camera raw exposure to " + scaledExposure); + camera.getProperty("raw_exposure_time_absolute").set(scaledExposure); + camera.getProperty("raw_exposure_time_absolute").set(scaledExposure); + + } else { + scaledExposure = (int) Math.round(exposure); + logger.debug("Setting camera exposure to " + scaledExposure); + camera.setExposureManual(scaledExposure); + camera.setExposureManual(scaledExposure); + } + } catch (VideoException e) { + logger.error("Failed to set camera exposure!", e); } + } + } - public void setAutoExposure(boolean cameraAutoExposure) { - logger.debug("Setting auto exposure to " + cameraAutoExposure); + @Override + public void setBrightness(int brightness) { + try { + camera.setBrightness(brightness); + camera.setBrightness(brightness); + } catch (VideoException e) { + logger.error("Failed to set camera brightness!", e); + } + } - if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { - // Case, we know this is a picam. Go through v4l2-ctl interface directly - - // Common settings - camera - .getProperty("image_stabilization") - .set(0); // No image stabilization, as this will throw off odometry - camera.getProperty("power_line_frequency").set(2); // Assume 60Hz USA - camera.getProperty("scene_mode").set(0); // no presets - camera.getProperty("exposure_metering_mode").set(0); - camera.getProperty("exposure_dynamic_framerate").set(0); - - if (!cameraAutoExposure) { - // Pick a bunch of reasonable setting defaults for vision processing retroreflective - camera.getProperty("auto_exposure_bias").set(0); - camera.getProperty("iso_sensitivity_auto").set(0); // Disable auto ISO adjustment - camera.getProperty("iso_sensitivity").set(0); // Manual ISO adjustment - camera.getProperty("white_balance_auto_preset").set(2); // Auto white-balance disabled - camera.getProperty("auto_exposure").set(1); // auto exposure disabled - } else { - // Pick a bunch of reasonable setting defaults for driver, fiducials, or otherwise - // nice-for-humans - camera.getProperty("auto_exposure_bias").set(12); - camera.getProperty("iso_sensitivity_auto").set(1); - camera.getProperty("iso_sensitivity").set(1); // Manual ISO adjustment by default - camera.getProperty("white_balance_auto_preset").set(1); // Auto white-balance enabled - camera.getProperty("auto_exposure").set(0); // auto exposure enabled - } - - } else { - // Case - this is some other USB cam. Default to wpilib's implementation - - var canSetWhiteBalance = !cameraQuirks.hasQuirk(CameraQuirk.Gain); - - if (!cameraAutoExposure) { - // Pick a bunch of reasonable setting defaults for vision processing retroreflective - if (canSetWhiteBalance) { - camera.setWhiteBalanceManual(4000); // Auto white-balance disabled, 4000K preset - } - } else { - // Pick a bunch of reasonable setting defaults for driver, fiducials, or otherwise - // nice-for-humans - if (canSetWhiteBalance) { - camera.setWhiteBalanceAuto(); // Auto white-balance enabled - } - camera.setExposureAuto(); // auto exposure enabled - } - } + @Override + public void setGain(int gain) { + try { + if (cameraQuirks.hasQuirk(CameraQuirk.Gain)) { + camera.getProperty("gain_automatic").set(0); + camera.getProperty("gain").set(gain); } + } catch (VideoException e) { + logger.error("Failed to set camera gain!", e); + } + } - private int timeToPiCamRawExposure(double time_us) { - int retVal = - (int) - Math.round( - time_us / 100.0); // Pi Cam's (both v1 and v2) need exposure time in units of - // 100us/bit - return Math.min(Math.max(retVal, 1), 10000); // Cap to allowable range for parameter - } + @Override + public VideoMode getCurrentVideoMode() { + return camera.isConnected() ? camera.getVideoMode() : null; + } - private double pctToExposureTimeUs(double pct_in) { - // Mirror the photonvision raspicam driver's algorithm for picking an exposure time - // from a 0-100% input - final double PADDING_LOW_US = 10; - final double PADDING_HIGH_US = 10; - return PADDING_LOW_US - + (pct_in / 100.0) * ((1e6 / (double) camera.getVideoMode().fps) - PADDING_HIGH_US); + @Override + public void setVideoModeInternal(VideoMode videoMode) { + try { + if (videoMode == null) { + logger.error("Got a null video mode! Doing nothing..."); + return; } + camera.setVideoMode(videoMode); + } catch (Exception e) { + logger.error("Failed to set video mode!", e); + } + } - @Override - public void setExposure(double exposure) { - if (exposure >= 0.0) { - try { - int scaledExposure = 1; - if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { - scaledExposure = Math.round(timeToPiCamRawExposure(pctToExposureTimeUs(exposure))); - logger.debug("Setting camera raw exposure to " + scaledExposure); - camera.getProperty("raw_exposure_time_absolute").set(scaledExposure); - camera.getProperty("raw_exposure_time_absolute").set(scaledExposure); - - } else { - scaledExposure = (int) Math.round(exposure); - logger.debug("Setting camera exposure to " + scaledExposure); - camera.setExposureManual(scaledExposure); - camera.setExposureManual(scaledExposure); - } - } catch (VideoException e) { - logger.error("Failed to set camera exposure!", e); - } + @Override + public HashMap getAllVideoModes() { + if (videoModes == null) { + videoModes = new HashMap<>(); + List videoModesList = new ArrayList<>(); + try { + VideoMode[] modes; + if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { + modes = + new VideoMode[] { + new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 90), + new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 30), + new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 15), + new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 10), + new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 90), + new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 45), + new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 30), + new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 15), + new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 10), + new VideoMode(VideoMode.PixelFormat.kBGR, 960, 720, 60), + new VideoMode(VideoMode.PixelFormat.kBGR, 960, 720, 10), + new VideoMode(VideoMode.PixelFormat.kBGR, 1280, 720, 45), + new VideoMode(VideoMode.PixelFormat.kBGR, 1920, 1080, 20), + }; + } else { + modes = camera.enumerateVideoModes(); + } + for (VideoMode videoMode : modes) { + // Filter grey modes + if (videoMode.pixelFormat == VideoMode.PixelFormat.kGray + || videoMode.pixelFormat == VideoMode.PixelFormat.kUnknown) { + continue; } - } - @Override - public void setBrightness(int brightness) { - try { - camera.setBrightness(brightness); - camera.setBrightness(brightness); - } catch (VideoException e) { - logger.error("Failed to set camera brightness!", e); + // On picam, filter non-bgr modes for performance + if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { + if (videoMode.pixelFormat != VideoMode.PixelFormat.kBGR) { + continue; + } } - } - @Override - public void setGain(int gain) { - try { - if (cameraQuirks.hasQuirk(CameraQuirk.Gain)) { - camera.getProperty("gain_automatic").set(0); - camera.getProperty("gain").set(gain); - } - } catch (VideoException e) { - logger.error("Failed to set camera gain!", e); + if (cameraQuirks.hasQuirk(CameraQuirk.FPSCap100)) { + if (videoMode.fps > 100) { + continue; + } } - } - @Override - public VideoMode getCurrentVideoMode() { - return camera.isConnected() ? camera.getVideoMode() : null; + videoModesList.add(videoMode); + + // TODO - do we want to trim down FPS modes? in cases where the camera has no gain + // control, + // lower FPS might be needed to ensure total exposure is acceptable. + // We look for modes with the same height/width/pixelformat as this mode + // and remove all the ones that are slower. This is sorted low to high. + // So we remove the last element (the fastest FPS) from the duplicate list, + // and remove all remaining elements from the final list + // var duplicateModes = + // videoModesList.stream() + // .filter( + // it -> + // it.height == videoMode.height + // && it.width == videoMode.width + // && it.pixelFormat == videoMode.pixelFormat) + // .sorted(Comparator.comparingDouble(it -> it.fps)) + // .collect(Collectors.toList()); + // duplicateModes.remove(duplicateModes.size() - 1); + // videoModesList.removeAll(duplicateModes); + } + } catch (Exception e) { + logger.error("Exception while enumerating video modes!", e); + videoModesList = List.of(); } - @Override - public void setVideoModeInternal(VideoMode videoMode) { - try { - if (videoMode == null) { - logger.error("Got a null video mode! Doing nothing..."); - return; - } - camera.setVideoMode(videoMode); - } catch (Exception e) { - logger.error("Failed to set video mode!", e); - } + // Sort by resolution + var sortedList = + videoModesList.stream() + .sorted(((a, b) -> (b.width + b.height) - (a.width + a.height))) + .collect(Collectors.toList()); + Collections.reverse(sortedList); + + // On vendor cameras, respect blacklisted indices + var indexBlacklist = + ConfigManager.getInstance().getConfig().getHardwareConfig().blacklistedResIndices; + for (int badIdx : indexBlacklist) { + sortedList.remove(badIdx); } - @Override - public HashMap getAllVideoModes() { - if (videoModes == null) { - videoModes = new HashMap<>(); - List videoModesList = new ArrayList<>(); - try { - VideoMode[] modes; - if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { - modes = - new VideoMode[] { - new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 90), - new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 30), - new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 15), - new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 10), - new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 90), - new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 45), - new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 30), - new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 15), - new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 10), - new VideoMode(VideoMode.PixelFormat.kBGR, 960, 720, 60), - new VideoMode(VideoMode.PixelFormat.kBGR, 960, 720, 10), - new VideoMode(VideoMode.PixelFormat.kBGR, 1280, 720, 45), - new VideoMode(VideoMode.PixelFormat.kBGR, 1920, 1080, 20), - }; - } else { - modes = camera.enumerateVideoModes(); - } - for (VideoMode videoMode : modes) { - // Filter grey modes - if (videoMode.pixelFormat == VideoMode.PixelFormat.kGray - || videoMode.pixelFormat == VideoMode.PixelFormat.kUnknown) { - continue; - } - - // On picam, filter non-bgr modes for performance - if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { - if (videoMode.pixelFormat != VideoMode.PixelFormat.kBGR) { - continue; - } - } - - if (cameraQuirks.hasQuirk(CameraQuirk.FPSCap100)) { - if (videoMode.fps > 100) { - continue; - } - } - - videoModesList.add(videoMode); - - // TODO - do we want to trim down FPS modes? in cases where the camera has no gain - // control, - // lower FPS might be needed to ensure total exposure is acceptable. - // We look for modes with the same height/width/pixelformat as this mode - // and remove all the ones that are slower. This is sorted low to high. - // So we remove the last element (the fastest FPS) from the duplicate list, - // and remove all remaining elements from the final list - // var duplicateModes = - // videoModesList.stream() - // .filter( - // it -> - // it.height == videoMode.height - // && it.width == videoMode.width - // && it.pixelFormat == videoMode.pixelFormat) - // .sorted(Comparator.comparingDouble(it -> it.fps)) - // .collect(Collectors.toList()); - // duplicateModes.remove(duplicateModes.size() - 1); - // videoModesList.removeAll(duplicateModes); - } - } catch (Exception e) { - logger.error("Exception while enumerating video modes!", e); - videoModesList = List.of(); - } - - // Sort by resolution - var sortedList = - videoModesList.stream() - .sorted(((a, b) -> (b.width + b.height) - (a.width + a.height))) - .collect(Collectors.toList()); - Collections.reverse(sortedList); - - // On vendor cameras, respect blacklisted indices - var indexBlacklist = - ConfigManager.getInstance().getConfig().getHardwareConfig().blacklistedResIndices; - for (int badIdx : indexBlacklist) { - sortedList.remove(badIdx); - } - - for (VideoMode videoMode : sortedList) { - videoModes.put(sortedList.indexOf(videoMode), videoMode); - } - } - return videoModes; + for (VideoMode videoMode : sortedList) { + videoModes.put(sortedList.indexOf(videoMode), videoMode); } + } + return videoModes; } - - // TODO improve robustness of this detection - @Override - public boolean isVendorCamera() { - return ConfigManager.getInstance().getConfig().getHardwareConfig().hasPresetFOV() - && cameraQuirks.hasQuirk(CameraQuirk.PiCam); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - USBCameraSource that = (USBCameraSource) o; - return cameraQuirks.equals(that.cameraQuirks); - } - - @Override - public int hashCode() { - return Objects.hash( - camera, usbCameraSettables, usbFrameProvider, cameraConfiguration, cvSink, cameraQuirks); - } + } + + // TODO improve robustness of this detection + @Override + public boolean isVendorCamera() { + return ConfigManager.getInstance().getConfig().getHardwareConfig().hasPresetFOV() + && cameraQuirks.hasQuirk(CameraQuirk.PiCam); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + USBCameraSource that = (USBCameraSource) o; + return cameraQuirks.equals(that.cameraQuirks); + } + + @Override + public int hashCode() { + return Objects.hash( + camera, usbCameraSettables, usbFrameProvider, cameraConfiguration, cvSink, cameraQuirks); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/Frame.java b/photon-core/src/main/java/org/photonvision/vision/frame/Frame.java index d4202f77a8..b4f2988825 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/Frame.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/Frame.java @@ -22,53 +22,53 @@ import org.photonvision.vision.opencv.Releasable; public class Frame implements Releasable { - public final long timestampNanos; + public final long timestampNanos; - // Frame should at _least_ contain the thresholded frame, and sometimes the color image - public final CVMat colorImage; - public final CVMat processedImage; - public final FrameThresholdType type; + // Frame should at _least_ contain the thresholded frame, and sometimes the color image + public final CVMat colorImage; + public final CVMat processedImage; + public final FrameThresholdType type; - public final FrameStaticProperties frameStaticProperties; + public final FrameStaticProperties frameStaticProperties; - public Frame( - CVMat color, - CVMat processed, - FrameThresholdType type, - long timestampNanos, - FrameStaticProperties frameStaticProperties) { - this.colorImage = color; - this.processedImage = processed; - this.type = type; - this.timestampNanos = timestampNanos; - this.frameStaticProperties = frameStaticProperties; - } + public Frame( + CVMat color, + CVMat processed, + FrameThresholdType type, + long timestampNanos, + FrameStaticProperties frameStaticProperties) { + this.colorImage = color; + this.processedImage = processed; + this.type = type; + this.timestampNanos = timestampNanos; + this.frameStaticProperties = frameStaticProperties; + } - public Frame( - CVMat color, - CVMat processed, - FrameThresholdType processType, - FrameStaticProperties frameStaticProperties) { - this(color, processed, processType, MathUtils.wpiNanoTime(), frameStaticProperties); - } + public Frame( + CVMat color, + CVMat processed, + FrameThresholdType processType, + FrameStaticProperties frameStaticProperties) { + this(color, processed, processType, MathUtils.wpiNanoTime(), frameStaticProperties); + } - public Frame() { - this( - new CVMat(), - new CVMat(), - FrameThresholdType.NONE, - MathUtils.wpiNanoTime(), - new FrameStaticProperties(0, 0, 0, null)); - } + public Frame() { + this( + new CVMat(), + new CVMat(), + FrameThresholdType.NONE, + MathUtils.wpiNanoTime(), + new FrameStaticProperties(0, 0, 0, null)); + } - public void copyTo(Frame destFrame) { - colorImage.getMat().copyTo(destFrame.colorImage.getMat()); - processedImage.getMat().copyTo(destFrame.processedImage.getMat()); - } + public void copyTo(Frame destFrame) { + colorImage.getMat().copyTo(destFrame.colorImage.getMat()); + processedImage.getMat().copyTo(destFrame.processedImage.getMat()); + } - @Override - public void release() { - colorImage.release(); - processedImage.release(); - } + @Override + public void release() { + colorImage.release(); + processedImage.release(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/FrameDivisor.java b/photon-core/src/main/java/org/photonvision/vision/frame/FrameDivisor.java index b44ca78d10..7d24df7993 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/FrameDivisor.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/FrameDivisor.java @@ -18,14 +18,14 @@ package org.photonvision.vision.frame; public enum FrameDivisor { - NONE(1), - HALF(2), - QUARTER(4), - SIXTH(6); + NONE(1), + HALF(2), + QUARTER(4), + SIXTH(6); - public final Integer value; + public final Integer value; - FrameDivisor(int value) { - this.value = value; - } + FrameDivisor(int value) { + this.value = value; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/FrameProvider.java b/photon-core/src/main/java/org/photonvision/vision/frame/FrameProvider.java index 70de9c5115..8121ab262e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/FrameProvider.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/FrameProvider.java @@ -22,17 +22,17 @@ import org.photonvision.vision.pipe.impl.HSVPipe; public interface FrameProvider extends Supplier { - String getName(); + String getName(); - /** Ask the camera to produce a certain kind of processed image (e.g. HSV or greyscale) */ - void requestFrameThresholdType(FrameThresholdType type); + /** Ask the camera to produce a certain kind of processed image (e.g. HSV or greyscale) */ + void requestFrameThresholdType(FrameThresholdType type); - /** Ask the camera to rotate frames it outputs */ - void requestFrameRotation(ImageRotationMode rotationMode); + /** Ask the camera to rotate frames it outputs */ + void requestFrameRotation(ImageRotationMode rotationMode); - /** Ask the camera to provide either the input, output, or both frames. */ - void requestFrameCopies(boolean copyInput, boolean copyOutput); + /** Ask the camera to provide either the input, output, or both frames. */ + void requestFrameCopies(boolean copyInput, boolean copyOutput); - /** Ask the camera to rotate frames it outputs */ - void requestHsvSettings(HSVPipe.HSVParams params); + /** Ask the camera to rotate frames it outputs */ + void requestHsvSettings(HSVPipe.HSVParams params); } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/FrameStaticProperties.java b/photon-core/src/main/java/org/photonvision/vision/frame/FrameStaticProperties.java index cedda04d22..eb709f38eb 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/FrameStaticProperties.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/FrameStaticProperties.java @@ -24,84 +24,84 @@ /** Represents the properties of a frame. */ public class FrameStaticProperties { - public final int imageWidth; - public final int imageHeight; - public final double fov; - public final double imageArea; - public final double centerX; - public final double centerY; - public final Point centerPoint; - public final double horizontalFocalLength; - public final double verticalFocalLength; - public CameraCalibrationCoefficients cameraCalibration; + public final int imageWidth; + public final int imageHeight; + public final double fov; + public final double imageArea; + public final double centerX; + public final double centerY; + public final Point centerPoint; + public final double horizontalFocalLength; + public final double verticalFocalLength; + public CameraCalibrationCoefficients cameraCalibration; - /** - * Instantiates a new Frame static properties. - * - * @param mode The Video Mode of the camera. - * @param fov The FOV (Field Of Vision) of the image in degrees. - */ - public FrameStaticProperties(VideoMode mode, double fov, CameraCalibrationCoefficients cal) { - this(mode != null ? mode.width : 1, mode != null ? mode.height : 1, fov, cal); - } + /** + * Instantiates a new Frame static properties. + * + * @param mode The Video Mode of the camera. + * @param fov The FOV (Field Of Vision) of the image in degrees. + */ + public FrameStaticProperties(VideoMode mode, double fov, CameraCalibrationCoefficients cal) { + this(mode != null ? mode.width : 1, mode != null ? mode.height : 1, fov, cal); + } - /** - * Instantiates a new Frame static properties. - * - * @param imageWidth The width of the image in pixels. - * @param imageHeight The width of the image in pixels. - * @param fov The FOV (Field Of Vision) of the image in degrees. - */ - public FrameStaticProperties( - int imageWidth, int imageHeight, double fov, CameraCalibrationCoefficients cal) { - this.imageWidth = imageWidth; - this.imageHeight = imageHeight; - this.fov = fov; - this.cameraCalibration = cal; + /** + * Instantiates a new Frame static properties. + * + * @param imageWidth The width of the image in pixels. + * @param imageHeight The width of the image in pixels. + * @param fov The FOV (Field Of Vision) of the image in degrees. + */ + public FrameStaticProperties( + int imageWidth, int imageHeight, double fov, CameraCalibrationCoefficients cal) { + this.imageWidth = imageWidth; + this.imageHeight = imageHeight; + this.fov = fov; + this.cameraCalibration = cal; - imageArea = this.imageWidth * this.imageHeight; + imageArea = this.imageWidth * this.imageHeight; - // pinhole model calculations - if (cameraCalibration != null && cameraCalibration.getCameraIntrinsicsMat() != null) { - // Use calibration data - var camIntrinsics = cameraCalibration.getCameraIntrinsicsMat(); - centerX = camIntrinsics.get(0, 2)[0]; - centerY = camIntrinsics.get(1, 2)[0]; - centerPoint = new Point(centerX, centerY); - horizontalFocalLength = camIntrinsics.get(0, 0)[0]; - verticalFocalLength = camIntrinsics.get(1, 1)[0]; - } else { - // No calibration data. Calculate from user provided diagonal FOV - centerX = (this.imageWidth / 2.0) - 0.5; - centerY = (this.imageHeight / 2.0) - 0.5; - centerPoint = new Point(centerX, centerY); + // pinhole model calculations + if (cameraCalibration != null && cameraCalibration.getCameraIntrinsicsMat() != null) { + // Use calibration data + var camIntrinsics = cameraCalibration.getCameraIntrinsicsMat(); + centerX = camIntrinsics.get(0, 2)[0]; + centerY = camIntrinsics.get(1, 2)[0]; + centerPoint = new Point(centerX, centerY); + horizontalFocalLength = camIntrinsics.get(0, 0)[0]; + verticalFocalLength = camIntrinsics.get(1, 1)[0]; + } else { + // No calibration data. Calculate from user provided diagonal FOV + centerX = (this.imageWidth / 2.0) - 0.5; + centerY = (this.imageHeight / 2.0) - 0.5; + centerPoint = new Point(centerX, centerY); - DoubleCouple horizVertViews = - calculateHorizontalVerticalFoV(this.fov, this.imageWidth, this.imageHeight); - double horizFOV = Math.toRadians(horizVertViews.getFirst()); - double vertFOV = Math.toRadians(horizVertViews.getSecond()); - horizontalFocalLength = (this.imageWidth / 2.0) / Math.tan(horizFOV / 2.0); - verticalFocalLength = (this.imageHeight / 2.0) / Math.tan(vertFOV / 2.0); - } + DoubleCouple horizVertViews = + calculateHorizontalVerticalFoV(this.fov, this.imageWidth, this.imageHeight); + double horizFOV = Math.toRadians(horizVertViews.getFirst()); + double vertFOV = Math.toRadians(horizVertViews.getSecond()); + horizontalFocalLength = (this.imageWidth / 2.0) / Math.tan(horizFOV / 2.0); + verticalFocalLength = (this.imageHeight / 2.0) / Math.tan(vertFOV / 2.0); } + } - /** - * Calculates the horizontal and vertical FOV components from a given diagonal FOV and image size. - * - * @param diagonalFoV Diagonal FOV in degrees - * @param imageWidth Image width in pixels - * @param imageHeight Image height in pixels - * @return Horizontal and vertical FOV in degrees - */ - public static DoubleCouple calculateHorizontalVerticalFoV( - double diagonalFoV, int imageWidth, int imageHeight) { - diagonalFoV = Math.toRadians(diagonalFoV); - double diagonalAspect = Math.hypot(imageWidth, imageHeight); + /** + * Calculates the horizontal and vertical FOV components from a given diagonal FOV and image size. + * + * @param diagonalFoV Diagonal FOV in degrees + * @param imageWidth Image width in pixels + * @param imageHeight Image height in pixels + * @return Horizontal and vertical FOV in degrees + */ + public static DoubleCouple calculateHorizontalVerticalFoV( + double diagonalFoV, int imageWidth, int imageHeight) { + diagonalFoV = Math.toRadians(diagonalFoV); + double diagonalAspect = Math.hypot(imageWidth, imageHeight); - double horizontalView = - Math.atan(Math.tan(diagonalFoV / 2) * (imageWidth / diagonalAspect)) * 2; - double verticalView = Math.atan(Math.tan(diagonalFoV / 2) * (imageHeight / diagonalAspect)) * 2; + double horizontalView = + Math.atan(Math.tan(diagonalFoV / 2) * (imageWidth / diagonalAspect)) * 2; + double verticalView = Math.atan(Math.tan(diagonalFoV / 2) * (imageHeight / diagonalAspect)) * 2; - return new DoubleCouple(Math.toDegrees(horizontalView), Math.toDegrees(verticalView)); - } + return new DoubleCouple(Math.toDegrees(horizontalView), Math.toDegrees(verticalView)); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/FrameThresholdType.java b/photon-core/src/main/java/org/photonvision/vision/frame/FrameThresholdType.java index 4e779f3c02..3a85b675f2 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/FrameThresholdType.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/FrameThresholdType.java @@ -18,7 +18,7 @@ package org.photonvision.vision.frame; public enum FrameThresholdType { - NONE, - HSV, - GREYSCALE, + NONE, + HSV, + GREYSCALE, } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/consumer/DummyFrameConsumer.java b/photon-core/src/main/java/org/photonvision/vision/frame/consumer/DummyFrameConsumer.java index 1e9449a355..12f06b837c 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/consumer/DummyFrameConsumer.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/consumer/DummyFrameConsumer.java @@ -21,8 +21,8 @@ import org.photonvision.vision.frame.FrameConsumer; public class DummyFrameConsumer implements FrameConsumer { - @Override - public void accept(Frame frame) { - frame.release(); // lol ez - } + @Override + public void accept(Frame frame) { + frame.release(); // lol ez + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/consumer/FileSaveFrameConsumer.java b/photon-core/src/main/java/org/photonvision/vision/frame/consumer/FileSaveFrameConsumer.java index 57bc312b2f..326e38a776 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/consumer/FileSaveFrameConsumer.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/consumer/FileSaveFrameConsumer.java @@ -32,78 +32,78 @@ import org.photonvision.vision.opencv.CVMat; public class FileSaveFrameConsumer implements Consumer { - private final Logger logger = new Logger(FileSaveFrameConsumer.class, LogGroup.General); + private final Logger logger = new Logger(FileSaveFrameConsumer.class, LogGroup.General); - // Formatters to generate unique, timestamped file names - private static final String FILE_PATH = ConfigManager.getInstance().getImageSavePath().toString(); - private static final String FILE_EXTENSION = ".jpg"; - private static final String NT_SUFFIX = "SaveImgCmd"; + // Formatters to generate unique, timestamped file names + private static final String FILE_PATH = ConfigManager.getInstance().getImageSavePath().toString(); + private static final String FILE_EXTENSION = ".jpg"; + private static final String NT_SUFFIX = "SaveImgCmd"; - DateFormat df = new SimpleDateFormat("yyyy-MM-dd"); - DateFormat tf = new SimpleDateFormat("hhmmssSS"); + DateFormat df = new SimpleDateFormat("yyyy-MM-dd"); + DateFormat tf = new SimpleDateFormat("hhmmssSS"); - private final NetworkTable rootTable; - private NetworkTable subTable; - private final String ntEntryName; - private IntegerEntry saveFrameEntry; + private final NetworkTable rootTable; + private NetworkTable subTable; + private final String ntEntryName; + private IntegerEntry saveFrameEntry; - private String cameraNickname; - private final String streamType; + private String cameraNickname; + private final String streamType; - private long savedImagesCount = 0; + private long savedImagesCount = 0; - public FileSaveFrameConsumer(String camNickname, String streamPrefix) { - this.ntEntryName = streamPrefix + NT_SUFFIX; - this.cameraNickname = camNickname; - this.streamType = streamPrefix; + public FileSaveFrameConsumer(String camNickname, String streamPrefix) { + this.ntEntryName = streamPrefix + NT_SUFFIX; + this.cameraNickname = camNickname; + this.streamType = streamPrefix; - this.rootTable = NetworkTablesManager.getInstance().kRootTable; - updateCameraNickname(camNickname); - } + this.rootTable = NetworkTablesManager.getInstance().kRootTable; + updateCameraNickname(camNickname); + } - public void accept(CVMat image) { - if (image != null && image.getMat() != null && !image.getMat().empty()) { - long currentCount = saveFrameEntry.get(); + public void accept(CVMat image) { + if (image != null && image.getMat() != null && !image.getMat().empty()) { + long currentCount = saveFrameEntry.get(); - // Await save request - if (currentCount == -1) return; + // Await save request + if (currentCount == -1) return; - // The requested count is greater than the actual count - if (savedImagesCount < currentCount) { - Date now = new Date(); + // The requested count is greater than the actual count + if (savedImagesCount < currentCount) { + Date now = new Date(); - String fileName = - cameraNickname + "_" + streamType + "_" + df.format(now) + "T" + tf.format(now); - String saveFilePath = FILE_PATH + File.separator + fileName + FILE_EXTENSION; + String fileName = + cameraNickname + "_" + streamType + "_" + df.format(now) + "T" + tf.format(now); + String saveFilePath = FILE_PATH + File.separator + fileName + FILE_EXTENSION; - Imgcodecs.imwrite(saveFilePath, image.getMat()); + Imgcodecs.imwrite(saveFilePath, image.getMat()); - savedImagesCount++; - logger.info("Saved new image at " + saveFilePath); - } else if (savedImagesCount > currentCount) { - // Reset local value with NT value in case of de-sync - savedImagesCount = currentCount; - } - } + savedImagesCount++; + logger.info("Saved new image at " + saveFilePath); + } else if (savedImagesCount > currentCount) { + // Reset local value with NT value in case of de-sync + savedImagesCount = currentCount; + } } - - public void updateCameraNickname(String newCameraNickname) { - // Remove existing entries - if (this.subTable != null) { - if (this.subTable.containsKey(ntEntryName)) { - this.subTable.getEntry(ntEntryName).close(); - } - } - - // Recreate and re-init network tables structure - this.cameraNickname = newCameraNickname; - this.subTable = rootTable.getSubTable(this.cameraNickname); - this.subTable.getEntry(ntEntryName).setInteger(savedImagesCount); - this.saveFrameEntry = subTable.getIntegerTopic(ntEntryName).getEntry(-1); // Default negative + } + + public void updateCameraNickname(String newCameraNickname) { + // Remove existing entries + if (this.subTable != null) { + if (this.subTable.containsKey(ntEntryName)) { + this.subTable.getEntry(ntEntryName).close(); + } } - public void overrideTakeSnapshot() { - // Simulate NT change - saveFrameEntry.set(saveFrameEntry.get() + 1); - } + // Recreate and re-init network tables structure + this.cameraNickname = newCameraNickname; + this.subTable = rootTable.getSubTable(this.cameraNickname); + this.subTable.getEntry(ntEntryName).setInteger(savedImagesCount); + this.saveFrameEntry = subTable.getIntegerTopic(ntEntryName).getEntry(-1); // Default negative + } + + public void overrideTakeSnapshot() { + // Simulate NT change + saveFrameEntry.set(saveFrameEntry.get() + 1); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/consumer/MJPGFrameConsumer.java b/photon-core/src/main/java/org/photonvision/vision/frame/consumer/MJPGFrameConsumer.java index 06f9bf986b..be40d3b0f8 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/consumer/MJPGFrameConsumer.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/consumer/MJPGFrameConsumer.java @@ -31,215 +31,215 @@ import org.photonvision.vision.opencv.CVMat; public class MJPGFrameConsumer { - public static final Mat EMPTY_MAT = new Mat(60, 15 * 7, CvType.CV_8UC3); - private static final double EMPTY_FRAMERATE = 2; - private long lastEmptyTime; - - static { - EMPTY_MAT.setTo(ColorHelper.colorToScalar(Color.BLACK)); - var col = 0; - Imgproc.rectangle( - EMPTY_MAT, - new Rect(col, 0, 15, EMPTY_MAT.height()), - ColorHelper.colorToScalar(new Color(0xa2a2a2)), - -1); - col += 15; - Imgproc.rectangle( - EMPTY_MAT, - new Rect(col, 0, 15, EMPTY_MAT.height()), - ColorHelper.colorToScalar(new Color(0xa2a300)), - -1); - col += 15; - Imgproc.rectangle( - EMPTY_MAT, - new Rect(col, 0, 15, EMPTY_MAT.height()), - ColorHelper.colorToScalar(new Color(0x00a3a2)), - -1); - col += 15; - Imgproc.rectangle( - EMPTY_MAT, - new Rect(col, 0, 15, EMPTY_MAT.height()), - ColorHelper.colorToScalar(new Color(0x00a200)), - -1); - col += 15; - Imgproc.rectangle( - EMPTY_MAT, - new Rect(col, 0, 15, EMPTY_MAT.height()), - ColorHelper.colorToScalar(new Color(0x440045)), - -1); - col += 15; - Imgproc.rectangle( - EMPTY_MAT, - new Rect(col, 0, 15, EMPTY_MAT.height()), - ColorHelper.colorToScalar(new Color(0x0000a2)), - -1); - col += 15; - Imgproc.rectangle( - EMPTY_MAT, - new Rect(col, 0, 15, EMPTY_MAT.height()), - ColorHelper.colorToScalar(new Color(0)), - -1); - Imgproc.rectangle( - EMPTY_MAT, - new Rect(0, 50, EMPTY_MAT.width(), 10), - ColorHelper.colorToScalar(new Color(0)), - -1); - Imgproc.rectangle( - EMPTY_MAT, new Rect(15, 50, 30, 10), ColorHelper.colorToScalar(Color.WHITE), -1); - - Imgproc.putText( - EMPTY_MAT, "Stream", new Point(14, 20), 0, 0.6, ColorHelper.colorToScalar(Color.white), 2); - Imgproc.putText( - EMPTY_MAT, - "Disabled", - new Point(14, 45), - 0, - 0.6, - ColorHelper.colorToScalar(Color.white), - 2); - Imgproc.putText( - EMPTY_MAT, "Stream", new Point(14, 20), 0, 0.6, ColorHelper.colorToScalar(Color.RED), 1); - Imgproc.putText( - EMPTY_MAT, "Disabled", new Point(14, 45), 0, 0.6, ColorHelper.colorToScalar(Color.RED), 1); - } - - private CvSource cvSource; - private MjpegServer mjpegServer; - - private VideoListener listener; - - private final NetworkTable table; - boolean isDisabled = false; - - public MJPGFrameConsumer(String sourceName, int width, int height, int port) { - this.cvSource = new CvSource(sourceName, VideoMode.PixelFormat.kMJPEG, width, height, 30); - this.table = - NetworkTableInstance.getDefault().getTable("/CameraPublisher").getSubTable(sourceName); - - this.mjpegServer = new MjpegServer("serve_" + cvSource.getName(), port); - mjpegServer.setSource(cvSource); - mjpegServer.setCompression(75); - - listener = - new VideoListener( - event -> { - if (event.kind == VideoEvent.Kind.kNetworkInterfacesChanged) { - table.getEntry("source").setString("cv:"); - table.getEntry("streams"); - table.getEntry("connected").setBoolean(true); - table.getEntry("mode").setString(videoModeToString(cvSource.getVideoMode())); - table.getEntry("modes").setStringArray(getSourceModeValues(cvSource.getHandle())); - updateStreamValues(); - } - }, - 0x4fff, - true); - } - - private synchronized void updateStreamValues() { - // Get port - int port = mjpegServer.getPort(); - - // Generate values - var addresses = CameraServerJNI.getNetworkInterfaces(); - ArrayList values = new ArrayList<>(addresses.length + 1); - String listenAddress = CameraServerJNI.getMjpegServerListenAddress(mjpegServer.getHandle()); - if (!listenAddress.isEmpty()) { - // If a listen address is specified, only use that - values.add(makeStreamValue(listenAddress, port)); - } else { - // Otherwise generate for hostname and all interface addresses - values.add(makeStreamValue(CameraServerJNI.getHostname() + ".local", port)); - for (String addr : addresses) { - if ("127.0.0.1".equals(addr)) { - continue; // ignore localhost - } - values.add(makeStreamValue(addr, port)); - } + public static final Mat EMPTY_MAT = new Mat(60, 15 * 7, CvType.CV_8UC3); + private static final double EMPTY_FRAMERATE = 2; + private long lastEmptyTime; + + static { + EMPTY_MAT.setTo(ColorHelper.colorToScalar(Color.BLACK)); + var col = 0; + Imgproc.rectangle( + EMPTY_MAT, + new Rect(col, 0, 15, EMPTY_MAT.height()), + ColorHelper.colorToScalar(new Color(0xa2a2a2)), + -1); + col += 15; + Imgproc.rectangle( + EMPTY_MAT, + new Rect(col, 0, 15, EMPTY_MAT.height()), + ColorHelper.colorToScalar(new Color(0xa2a300)), + -1); + col += 15; + Imgproc.rectangle( + EMPTY_MAT, + new Rect(col, 0, 15, EMPTY_MAT.height()), + ColorHelper.colorToScalar(new Color(0x00a3a2)), + -1); + col += 15; + Imgproc.rectangle( + EMPTY_MAT, + new Rect(col, 0, 15, EMPTY_MAT.height()), + ColorHelper.colorToScalar(new Color(0x00a200)), + -1); + col += 15; + Imgproc.rectangle( + EMPTY_MAT, + new Rect(col, 0, 15, EMPTY_MAT.height()), + ColorHelper.colorToScalar(new Color(0x440045)), + -1); + col += 15; + Imgproc.rectangle( + EMPTY_MAT, + new Rect(col, 0, 15, EMPTY_MAT.height()), + ColorHelper.colorToScalar(new Color(0x0000a2)), + -1); + col += 15; + Imgproc.rectangle( + EMPTY_MAT, + new Rect(col, 0, 15, EMPTY_MAT.height()), + ColorHelper.colorToScalar(new Color(0)), + -1); + Imgproc.rectangle( + EMPTY_MAT, + new Rect(0, 50, EMPTY_MAT.width(), 10), + ColorHelper.colorToScalar(new Color(0)), + -1); + Imgproc.rectangle( + EMPTY_MAT, new Rect(15, 50, 30, 10), ColorHelper.colorToScalar(Color.WHITE), -1); + + Imgproc.putText( + EMPTY_MAT, "Stream", new Point(14, 20), 0, 0.6, ColorHelper.colorToScalar(Color.white), 2); + Imgproc.putText( + EMPTY_MAT, + "Disabled", + new Point(14, 45), + 0, + 0.6, + ColorHelper.colorToScalar(Color.white), + 2); + Imgproc.putText( + EMPTY_MAT, "Stream", new Point(14, 20), 0, 0.6, ColorHelper.colorToScalar(Color.RED), 1); + Imgproc.putText( + EMPTY_MAT, "Disabled", new Point(14, 45), 0, 0.6, ColorHelper.colorToScalar(Color.RED), 1); + } + + private CvSource cvSource; + private MjpegServer mjpegServer; + + private VideoListener listener; + + private final NetworkTable table; + boolean isDisabled = false; + + public MJPGFrameConsumer(String sourceName, int width, int height, int port) { + this.cvSource = new CvSource(sourceName, VideoMode.PixelFormat.kMJPEG, width, height, 30); + this.table = + NetworkTableInstance.getDefault().getTable("/CameraPublisher").getSubTable(sourceName); + + this.mjpegServer = new MjpegServer("serve_" + cvSource.getName(), port); + mjpegServer.setSource(cvSource); + mjpegServer.setCompression(75); + + listener = + new VideoListener( + event -> { + if (event.kind == VideoEvent.Kind.kNetworkInterfacesChanged) { + table.getEntry("source").setString("cv:"); + table.getEntry("streams"); + table.getEntry("connected").setBoolean(true); + table.getEntry("mode").setString(videoModeToString(cvSource.getVideoMode())); + table.getEntry("modes").setStringArray(getSourceModeValues(cvSource.getHandle())); + updateStreamValues(); + } + }, + 0x4fff, + true); + } + + private synchronized void updateStreamValues() { + // Get port + int port = mjpegServer.getPort(); + + // Generate values + var addresses = CameraServerJNI.getNetworkInterfaces(); + ArrayList values = new ArrayList<>(addresses.length + 1); + String listenAddress = CameraServerJNI.getMjpegServerListenAddress(mjpegServer.getHandle()); + if (!listenAddress.isEmpty()) { + // If a listen address is specified, only use that + values.add(makeStreamValue(listenAddress, port)); + } else { + // Otherwise generate for hostname and all interface addresses + values.add(makeStreamValue(CameraServerJNI.getHostname() + ".local", port)); + for (String addr : addresses) { + if ("127.0.0.1".equals(addr)) { + continue; // ignore localhost } - - String[] streamAddresses = values.toArray(new String[0]); - table.getEntry("streams").setStringArray(streamAddresses); - } - - public MJPGFrameConsumer(String name, int port) { - this(name, 320, 240, port); + values.add(makeStreamValue(addr, port)); + } } - public void accept(CVMat image) { - if (image != null && !image.getMat().empty()) { - cvSource.putFrame(image.getMat()); + String[] streamAddresses = values.toArray(new String[0]); + table.getEntry("streams").setStringArray(streamAddresses); + } - // Make sure our disabled framerate limiting doesn't get confused - isDisabled = false; - lastEmptyTime = 0; - } - } + public MJPGFrameConsumer(String name, int port) { + this(name, 320, 240, port); + } - public void disabledTick() { - if (!isDisabled) { - cvSource.setVideoMode(VideoMode.PixelFormat.kMJPEG, EMPTY_MAT.width(), EMPTY_MAT.height(), 0); - isDisabled = true; - } + public void accept(CVMat image) { + if (image != null && !image.getMat().empty()) { + cvSource.putFrame(image.getMat()); - if (System.currentTimeMillis() - lastEmptyTime > 1000.0 / EMPTY_FRAMERATE) { - cvSource.putFrame(EMPTY_MAT); - lastEmptyTime = System.currentTimeMillis(); - } + // Make sure our disabled framerate limiting doesn't get confused + isDisabled = false; + lastEmptyTime = 0; } + } - public int getCurrentStreamPort() { - return mjpegServer.getPort(); + public void disabledTick() { + if (!isDisabled) { + cvSource.setVideoMode(VideoMode.PixelFormat.kMJPEG, EMPTY_MAT.width(), EMPTY_MAT.height(), 0); + isDisabled = true; } - private static String makeStreamValue(String address, int port) { - return "mjpg:http://" + address + ":" + port + "/?action=stream"; + if (System.currentTimeMillis() - lastEmptyTime > 1000.0 / EMPTY_FRAMERATE) { + cvSource.putFrame(EMPTY_MAT); + lastEmptyTime = System.currentTimeMillis(); } + } - private static String[] getSourceModeValues(int sourceHandle) { - VideoMode[] modes = CameraServerJNI.enumerateSourceVideoModes(sourceHandle); - String[] modeStrings = new String[modes.length]; - for (int i = 0; i < modes.length; i++) { - modeStrings[i] = videoModeToString(modes[i]); - } - return modeStrings; - } + public int getCurrentStreamPort() { + return mjpegServer.getPort(); + } - private static String videoModeToString(VideoMode mode) { - return mode.width - + "x" - + mode.height - + " " - + pixelFormatToString(mode.pixelFormat) - + " " - + mode.fps - + " fps"; - } + private static String makeStreamValue(String address, int port) { + return "mjpg:http://" + address + ":" + port + "/?action=stream"; + } - private static String pixelFormatToString(VideoMode.PixelFormat pixelFormat) { - switch (pixelFormat) { - case kMJPEG: - return "MJPEG"; - case kYUYV: - return "YUYV"; - case kRGB565: - return "RGB565"; - case kBGR: - return "BGR"; - case kGray: - return "Gray"; - default: - return "Unknown"; - } + private static String[] getSourceModeValues(int sourceHandle) { + VideoMode[] modes = CameraServerJNI.enumerateSourceVideoModes(sourceHandle); + String[] modeStrings = new String[modes.length]; + for (int i = 0; i < modes.length; i++) { + modeStrings[i] = videoModeToString(modes[i]); } - - public void close() { - table.getEntry("connected").setBoolean(false); - mjpegServer.close(); - cvSource.close(); - listener.close(); - mjpegServer = null; - cvSource = null; - listener = null; + return modeStrings; + } + + private static String videoModeToString(VideoMode mode) { + return mode.width + + "x" + + mode.height + + " " + + pixelFormatToString(mode.pixelFormat) + + " " + + mode.fps + + " fps"; + } + + private static String pixelFormatToString(VideoMode.PixelFormat pixelFormat) { + switch (pixelFormat) { + case kMJPEG: + return "MJPEG"; + case kYUYV: + return "YUYV"; + case kRGB565: + return "RGB565"; + case kBGR: + return "BGR"; + case kGray: + return "Gray"; + default: + return "Unknown"; } + } + + public void close() { + table.getEntry("connected").setBoolean(false); + mjpegServer.close(); + cvSource.close(); + listener.close(); + mjpegServer = null; + cvSource = null; + listener = null; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/provider/CpuImageProcessor.java b/photon-core/src/main/java/org/photonvision/vision/frame/provider/CpuImageProcessor.java index 034ae6615e..c84284306e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/provider/CpuImageProcessor.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/provider/CpuImageProcessor.java @@ -30,94 +30,94 @@ import org.photonvision.vision.pipe.impl.RotateImagePipe; public abstract class CpuImageProcessor implements FrameProvider { - protected static class CapturedFrame { - CVMat colorImage; - FrameStaticProperties staticProps; - long captureTimestamp; - - public CapturedFrame( - CVMat colorImage, FrameStaticProperties staticProps, long captureTimestampNanos) { - this.colorImage = colorImage; - this.staticProps = staticProps; - this.captureTimestamp = captureTimestampNanos; - } + protected static class CapturedFrame { + CVMat colorImage; + FrameStaticProperties staticProps; + long captureTimestamp; + + public CapturedFrame( + CVMat colorImage, FrameStaticProperties staticProps, long captureTimestampNanos) { + this.colorImage = colorImage; + this.staticProps = staticProps; + this.captureTimestamp = captureTimestampNanos; } + } - private final HSVPipe m_hsvPipe = new HSVPipe(); - private final RotateImagePipe m_rImagePipe = new RotateImagePipe(); - private final GrayscalePipe m_grayPipe = new GrayscalePipe(); - FrameThresholdType m_processType; + private final HSVPipe m_hsvPipe = new HSVPipe(); + private final RotateImagePipe m_rImagePipe = new RotateImagePipe(); + private final GrayscalePipe m_grayPipe = new GrayscalePipe(); + FrameThresholdType m_processType; - private final Object m_mutex = new Object(); + private final Object m_mutex = new Object(); - abstract CapturedFrame getInputMat(); + abstract CapturedFrame getInputMat(); - public CpuImageProcessor() { - m_hsvPipe.setParams( - new HSVPipe.HSVParams( - new IntegerCouple(0, 180), - new IntegerCouple(0, 255), - new IntegerCouple(0, 255), - false)); - } + public CpuImageProcessor() { + m_hsvPipe.setParams( + new HSVPipe.HSVParams( + new IntegerCouple(0, 180), + new IntegerCouple(0, 255), + new IntegerCouple(0, 255), + false)); + } + + @Override + public final Frame get() { + // TODO Auto-generated method stub + var input = getInputMat(); + + CVMat outputMat = null; + long sumNanos = 0; - @Override - public final Frame get() { - // TODO Auto-generated method stub - var input = getInputMat(); - - CVMat outputMat = null; - long sumNanos = 0; - - { - CVPipeResult out = m_rImagePipe.run(input.colorImage.getMat()); - sumNanos += out.nanosElapsed; - } - - if (!input.colorImage.getMat().empty()) { - if (m_processType == FrameThresholdType.HSV) { - var hsvResult = m_hsvPipe.run(input.colorImage.getMat()); - outputMat = new CVMat(hsvResult.output); - sumNanos += hsvResult.nanosElapsed; - } else if (m_processType == FrameThresholdType.GREYSCALE) { - var result = m_grayPipe.run(input.colorImage.getMat()); - outputMat = new CVMat(result.output); - sumNanos += result.nanosElapsed; - } else { - outputMat = new CVMat(); - } - } else { - System.out.println("Input was empty!"); - outputMat = new CVMat(); - } - - return new Frame( - input.colorImage, outputMat, m_processType, input.captureTimestamp, input.staticProps); + { + CVPipeResult out = m_rImagePipe.run(input.colorImage.getMat()); + sumNanos += out.nanosElapsed; } - @Override - public void requestFrameThresholdType(FrameThresholdType type) { - synchronized (m_mutex) { - this.m_processType = type; - } + if (!input.colorImage.getMat().empty()) { + if (m_processType == FrameThresholdType.HSV) { + var hsvResult = m_hsvPipe.run(input.colorImage.getMat()); + outputMat = new CVMat(hsvResult.output); + sumNanos += hsvResult.nanosElapsed; + } else if (m_processType == FrameThresholdType.GREYSCALE) { + var result = m_grayPipe.run(input.colorImage.getMat()); + outputMat = new CVMat(result.output); + sumNanos += result.nanosElapsed; + } else { + outputMat = new CVMat(); + } + } else { + System.out.println("Input was empty!"); + outputMat = new CVMat(); } - @Override - public void requestFrameRotation(ImageRotationMode rotationMode) { - synchronized (m_mutex) { - m_rImagePipe.setParams(new RotateImagePipe.RotateImageParams(rotationMode)); - } + return new Frame( + input.colorImage, outputMat, m_processType, input.captureTimestamp, input.staticProps); + } + + @Override + public void requestFrameThresholdType(FrameThresholdType type) { + synchronized (m_mutex) { + this.m_processType = type; } + } - /** Ask the camera to rotate frames it outputs */ - public void requestHsvSettings(HSVPipe.HSVParams params) { - synchronized (m_mutex) { - m_hsvPipe.setParams(params); - } + @Override + public void requestFrameRotation(ImageRotationMode rotationMode) { + synchronized (m_mutex) { + m_rImagePipe.setParams(new RotateImagePipe.RotateImageParams(rotationMode)); } + } - @Override - public void requestFrameCopies(boolean copyInput, boolean copyOutput) { - // We don't actually do zero-copy, so this method is a no-op + /** Ask the camera to rotate frames it outputs */ + public void requestHsvSettings(HSVPipe.HSVParams params) { + synchronized (m_mutex) { + m_hsvPipe.setParams(params); } + } + + @Override + public void requestFrameCopies(boolean copyInput, boolean copyOutput) { + // We don't actually do zero-copy, so this method is a no-op + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/provider/FileFrameProvider.java b/photon-core/src/main/java/org/photonvision/vision/frame/provider/FileFrameProvider.java index 315e4e3de3..f6e77c4ffd 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/provider/FileFrameProvider.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/provider/FileFrameProvider.java @@ -33,89 +33,89 @@ * path}. */ public class FileFrameProvider extends CpuImageProcessor { - public static final int MAX_FPS = 5; - private static int count = 0; - - private final int thisIndex = count++; - private final Path path; - private final int millisDelay; - private final CVMat originalFrame; - - private final FrameStaticProperties properties; - - private long lastGetMillis = System.currentTimeMillis(); - - /** - * Instantiates a new FileFrameProvider. - * - * @param path The path of the image to read from. - * @param fov The fov of the image. - * @param maxFPS The max framerate to provide the image at. - */ - public FileFrameProvider(Path path, double fov, int maxFPS) { - this(path, fov, maxFPS, null); + public static final int MAX_FPS = 5; + private static int count = 0; + + private final int thisIndex = count++; + private final Path path; + private final int millisDelay; + private final CVMat originalFrame; + + private final FrameStaticProperties properties; + + private long lastGetMillis = System.currentTimeMillis(); + + /** + * Instantiates a new FileFrameProvider. + * + * @param path The path of the image to read from. + * @param fov The fov of the image. + * @param maxFPS The max framerate to provide the image at. + */ + public FileFrameProvider(Path path, double fov, int maxFPS) { + this(path, fov, maxFPS, null); + } + + public FileFrameProvider(Path path, double fov, CameraCalibrationCoefficients calibration) { + this(path, fov, MAX_FPS, calibration); + } + + public FileFrameProvider( + Path path, double fov, int maxFPS, CameraCalibrationCoefficients calibration) { + if (!Files.exists(path)) + throw new RuntimeException("Invalid path for image: " + path.toAbsolutePath()); + this.path = path; + this.millisDelay = 1000 / maxFPS; + + Mat rawImage = Imgcodecs.imread(path.toString()); + if (rawImage.cols() > 0 && rawImage.rows() > 0) { + properties = new FrameStaticProperties(rawImage.width(), rawImage.height(), fov, calibration); + originalFrame = new CVMat(rawImage); + } else { + throw new RuntimeException("Image loading failed!"); } - - public FileFrameProvider(Path path, double fov, CameraCalibrationCoefficients calibration) { - this(path, fov, MAX_FPS, calibration); + } + + /** + * Instantiates a new File frame provider. + * + * @param pathAsString The path of the image to read from as a string. + * @param fov The fov of the image. + */ + public FileFrameProvider(String pathAsString, double fov) { + this(Paths.get(pathAsString), fov, MAX_FPS); + } + + /** + * Instantiates a new File frame provider. + * + * @param path The path of the image to read from. + * @param fov The fov of the image. + */ + public FileFrameProvider(Path path, double fov) { + this(path, fov, MAX_FPS); + } + + @Override + public CapturedFrame getInputMat() { + var out = new CVMat(); + out.copyTo(originalFrame); + + // block to keep FPS at a defined rate + if (System.currentTimeMillis() - lastGetMillis < millisDelay) { + try { + Thread.sleep(millisDelay); + } catch (InterruptedException e) { + e.printStackTrace(); + } } - public FileFrameProvider( - Path path, double fov, int maxFPS, CameraCalibrationCoefficients calibration) { - if (!Files.exists(path)) - throw new RuntimeException("Invalid path for image: " + path.toAbsolutePath()); - this.path = path; - this.millisDelay = 1000 / maxFPS; - - Mat rawImage = Imgcodecs.imread(path.toString()); - if (rawImage.cols() > 0 && rawImage.rows() > 0) { - properties = new FrameStaticProperties(rawImage.width(), rawImage.height(), fov, calibration); - originalFrame = new CVMat(rawImage); - } else { - throw new RuntimeException("Image loading failed!"); - } - } + lastGetMillis = System.currentTimeMillis(); + return new CapturedFrame(out, properties, MathUtils.wpiNanoTime()); + } - /** - * Instantiates a new File frame provider. - * - * @param pathAsString The path of the image to read from as a string. - * @param fov The fov of the image. - */ - public FileFrameProvider(String pathAsString, double fov) { - this(Paths.get(pathAsString), fov, MAX_FPS); - } - - /** - * Instantiates a new File frame provider. - * - * @param path The path of the image to read from. - * @param fov The fov of the image. - */ - public FileFrameProvider(Path path, double fov) { - this(path, fov, MAX_FPS); - } - - @Override - public CapturedFrame getInputMat() { - var out = new CVMat(); - out.copyTo(originalFrame); - - // block to keep FPS at a defined rate - if (System.currentTimeMillis() - lastGetMillis < millisDelay) { - try { - Thread.sleep(millisDelay); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - - lastGetMillis = System.currentTimeMillis(); - return new CapturedFrame(out, properties, MathUtils.wpiNanoTime()); - } - - @Override - public String getName() { - return "FileFrameProvider" + thisIndex + " - " + path.getFileName(); - } + @Override + public String getName() { + return "FileFrameProvider" + thisIndex + " - " + path.getFileName(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/provider/LibcameraGpuFrameProvider.java b/photon-core/src/main/java/org/photonvision/vision/frame/provider/LibcameraGpuFrameProvider.java index 7b76f658ad..6ed0229315 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/provider/LibcameraGpuFrameProvider.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/provider/LibcameraGpuFrameProvider.java @@ -29,86 +29,86 @@ import org.photonvision.vision.pipe.impl.HSVPipe.HSVParams; public class LibcameraGpuFrameProvider implements FrameProvider { - private final LibcameraGpuSettables settables; - - public LibcameraGpuFrameProvider(LibcameraGpuSettables visionSettables) { - this.settables = visionSettables; - - var vidMode = settables.getCurrentVideoMode(); - settables.setVideoMode(vidMode); - } - - @Override - public String getName() { - return "AcceleratedPicamFrameProvider"; - } - - int i = 0; - - @Override - public Frame get() { - // We need to make sure that other threads don't try to change video modes while we're waiting - // for a frame - // System.out.println("GET!"); - synchronized (LibCameraJNI.CAMERA_LOCK) { - var success = LibCameraJNI.awaitNewFrame(); - - if (!success) { - System.out.println("No new frame"); - return new Frame(); - } - - var colorMat = new CVMat(new Mat(LibCameraJNI.takeColorFrame())); - var processedMat = new CVMat(new Mat(LibCameraJNI.takeProcessedFrame())); - - // System.out.println("Color mat: " + colorMat.getMat().size()); - - // Imgcodecs.imwrite("color" + i + ".jpg", colorMat.getMat()); - // Imgcodecs.imwrite("processed" + (i) + ".jpg", processedMat.getMat()); - - int itype = LibCameraJNI.getGpuProcessType(); - FrameThresholdType type = FrameThresholdType.NONE; - if (itype < FrameThresholdType.values().length && itype >= 0) { - type = FrameThresholdType.values()[itype]; - } - - var now = LibCameraJNI.getLibcameraTimestamp(); - var capture = LibCameraJNI.getFrameCaptureTime(); - var latency = (now - capture); - - return new Frame( - colorMat, - processedMat, - type, - MathUtils.wpiNanoTime() - latency, - settables.getFrameStaticProperties()); - } - } - - @Override - public void requestFrameThresholdType(FrameThresholdType type) { - LibCameraJNI.setGpuProcessType(type.ordinal()); - } - - @Override - public void requestFrameRotation(ImageRotationMode rotationMode) { - this.settables.setRotation(rotationMode); - } - - @Override - public void requestHsvSettings(HSVParams params) { - LibCameraJNI.setThresholds( - params.getHsvLower().val[0] / 180.0, - params.getHsvLower().val[1] / 255.0, - params.getHsvLower().val[2] / 255.0, - params.getHsvUpper().val[0] / 180.0, - params.getHsvUpper().val[1] / 255.0, - params.getHsvUpper().val[2] / 255.0, - params.getHueInverted()); - } - - @Override - public void requestFrameCopies(boolean copyInput, boolean copyOutput) { - LibCameraJNI.setFramesToCopy(copyInput, copyOutput); + private final LibcameraGpuSettables settables; + + public LibcameraGpuFrameProvider(LibcameraGpuSettables visionSettables) { + this.settables = visionSettables; + + var vidMode = settables.getCurrentVideoMode(); + settables.setVideoMode(vidMode); + } + + @Override + public String getName() { + return "AcceleratedPicamFrameProvider"; + } + + int i = 0; + + @Override + public Frame get() { + // We need to make sure that other threads don't try to change video modes while we're waiting + // for a frame + // System.out.println("GET!"); + synchronized (LibCameraJNI.CAMERA_LOCK) { + var success = LibCameraJNI.awaitNewFrame(); + + if (!success) { + System.out.println("No new frame"); + return new Frame(); + } + + var colorMat = new CVMat(new Mat(LibCameraJNI.takeColorFrame())); + var processedMat = new CVMat(new Mat(LibCameraJNI.takeProcessedFrame())); + + // System.out.println("Color mat: " + colorMat.getMat().size()); + + // Imgcodecs.imwrite("color" + i + ".jpg", colorMat.getMat()); + // Imgcodecs.imwrite("processed" + (i) + ".jpg", processedMat.getMat()); + + int itype = LibCameraJNI.getGpuProcessType(); + FrameThresholdType type = FrameThresholdType.NONE; + if (itype < FrameThresholdType.values().length && itype >= 0) { + type = FrameThresholdType.values()[itype]; + } + + var now = LibCameraJNI.getLibcameraTimestamp(); + var capture = LibCameraJNI.getFrameCaptureTime(); + var latency = (now - capture); + + return new Frame( + colorMat, + processedMat, + type, + MathUtils.wpiNanoTime() - latency, + settables.getFrameStaticProperties()); } + } + + @Override + public void requestFrameThresholdType(FrameThresholdType type) { + LibCameraJNI.setGpuProcessType(type.ordinal()); + } + + @Override + public void requestFrameRotation(ImageRotationMode rotationMode) { + this.settables.setRotation(rotationMode); + } + + @Override + public void requestHsvSettings(HSVParams params) { + LibCameraJNI.setThresholds( + params.getHsvLower().val[0] / 180.0, + params.getHsvLower().val[1] / 255.0, + params.getHsvLower().val[2] / 255.0, + params.getHsvUpper().val[0] / 180.0, + params.getHsvUpper().val[1] / 255.0, + params.getHsvUpper().val[2] / 255.0, + params.getHueInverted()); + } + + @Override + public void requestFrameCopies(boolean copyInput, boolean copyOutput) { + LibCameraJNI.setFramesToCopy(copyInput, copyOutput); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/provider/USBFrameProvider.java b/photon-core/src/main/java/org/photonvision/vision/frame/provider/USBFrameProvider.java index 03459c197d..f89111cf18 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/provider/USBFrameProvider.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/provider/USBFrameProvider.java @@ -23,36 +23,36 @@ import org.photonvision.vision.processes.VisionSourceSettables; public class USBFrameProvider extends CpuImageProcessor { - private final CvSink cvSink; - - @SuppressWarnings("SpellCheckingInspection") - private final VisionSourceSettables settables; - - @SuppressWarnings("SpellCheckingInspection") - public USBFrameProvider(CvSink sink, VisionSourceSettables visionSettables) { - cvSink = sink; - cvSink.setEnabled(true); - this.settables = visionSettables; + private final CvSink cvSink; + + @SuppressWarnings("SpellCheckingInspection") + private final VisionSourceSettables settables; + + @SuppressWarnings("SpellCheckingInspection") + public USBFrameProvider(CvSink sink, VisionSourceSettables visionSettables) { + cvSink = sink; + cvSink.setEnabled(true); + this.settables = visionSettables; + } + + @Override + public CapturedFrame getInputMat() { + var mat = new CVMat(); // We do this so that we don't fill a Mat in use by another thread + // This is from wpi::Now, or WPIUtilJNI.now() + long time = + cvSink.grabFrame(mat.getMat()) + * 1000; // Units are microseconds, epoch is the same as the Unix epoch + + // Sometimes CSCore gives us a zero frametime. + if (time <= 1e-6) { + time = MathUtils.wpiNanoTime(); } - @Override - public CapturedFrame getInputMat() { - var mat = new CVMat(); // We do this so that we don't fill a Mat in use by another thread - // This is from wpi::Now, or WPIUtilJNI.now() - long time = - cvSink.grabFrame(mat.getMat()) - * 1000; // Units are microseconds, epoch is the same as the Unix epoch - - // Sometimes CSCore gives us a zero frametime. - if (time <= 1e-6) { - time = MathUtils.wpiNanoTime(); - } + return new CapturedFrame(mat, settables.getFrameStaticProperties(), time); + } - return new CapturedFrame(mat, settables.getFrameStaticProperties(), time); - } - - @Override - public String getName() { - return "USBFrameProvider - " + cvSink.getName(); - } + @Override + public String getName() { + return "USBFrameProvider - " + cvSink.getName(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/CVMat.java b/photon-core/src/main/java/org/photonvision/vision/opencv/CVMat.java index e58f0d11d7..c48d78094e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/CVMat.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/CVMat.java @@ -23,80 +23,80 @@ import org.photonvision.common.logging.Logger; public class CVMat implements Releasable { - private static final Logger logger = new Logger(CVMat.class, LogGroup.General); + private static final Logger logger = new Logger(CVMat.class, LogGroup.General); - private static int allMatCounter = 0; - private static final HashMap allMats = new HashMap<>(); + private static int allMatCounter = 0; + private static final HashMap allMats = new HashMap<>(); - private static boolean shouldPrint; + private static boolean shouldPrint; - private final Mat mat; + private final Mat mat; - public CVMat() { - this(new Mat()); - } + public CVMat() { + this(new Mat()); + } - public void copyTo(CVMat srcMat) { - copyTo(srcMat.getMat()); - } + public void copyTo(CVMat srcMat) { + copyTo(srcMat.getMat()); + } - public void copyTo(Mat srcMat) { - srcMat.copyTo(mat); - } + public void copyTo(Mat srcMat) { + srcMat.copyTo(mat); + } - private StringBuilder getStackTraceBuilder() { - var trace = Thread.currentThread().getStackTrace(); + private StringBuilder getStackTraceBuilder() { + var trace = Thread.currentThread().getStackTrace(); - final int STACK_FRAMES_TO_SKIP = 3; - final var traceStr = new StringBuilder(); - for (int idx = STACK_FRAMES_TO_SKIP; idx < trace.length; idx++) { - traceStr.append("\t\n").append(trace[idx]); - } - traceStr.append("\n"); - return traceStr; + final int STACK_FRAMES_TO_SKIP = 3; + final var traceStr = new StringBuilder(); + for (int idx = STACK_FRAMES_TO_SKIP; idx < trace.length; idx++) { + traceStr.append("\t\n").append(trace[idx]); } - - public CVMat(Mat mat) { - this.mat = mat; - allMatCounter++; - allMats.put(mat, allMatCounter); - - if (shouldPrint) { - logger.trace(() -> "CVMat" + allMatCounter + " alloc - new count: " + allMats.size()); - logger.trace(getStackTraceBuilder()::toString); - } + traceStr.append("\n"); + return traceStr; + } + + public CVMat(Mat mat) { + this.mat = mat; + allMatCounter++; + allMats.put(mat, allMatCounter); + + if (shouldPrint) { + logger.trace(() -> "CVMat" + allMatCounter + " alloc - new count: " + allMats.size()); + logger.trace(getStackTraceBuilder()::toString); } + } - @Override - public void release() { - // If this mat is empty, all we can do is return - if (mat.empty()) return; + @Override + public void release() { + // If this mat is empty, all we can do is return + if (mat.empty()) return; - // If the mat isn't in the hashmap, we can't remove it - Integer matNo = allMats.get(mat); - if (matNo != null) allMats.remove(mat); - mat.release(); + // If the mat isn't in the hashmap, we can't remove it + Integer matNo = allMats.get(mat); + if (matNo != null) allMats.remove(mat); + mat.release(); - if (shouldPrint) { - logger.trace(() -> "CVMat" + matNo + " de-alloc - new count: " + allMats.size()); - logger.trace(getStackTraceBuilder()::toString); - } + if (shouldPrint) { + logger.trace(() -> "CVMat" + matNo + " de-alloc - new count: " + allMats.size()); + logger.trace(getStackTraceBuilder()::toString); } + } - public Mat getMat() { - return mat; - } + public Mat getMat() { + return mat; + } - @Override - public String toString() { - return "CVMat{" + mat.toString() + '}'; - } + @Override + public String toString() { + return "CVMat{" + mat.toString() + '}'; + } - public static int getMatCount() { - return allMats.size(); - } + public static int getMatCount() { + return allMats.size(); + } - public static void enablePrint(boolean enabled) { - shouldPrint = enabled; - } + public static void enablePrint(boolean enabled) { + shouldPrint = enabled; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/CVShape.java b/photon-core/src/main/java/org/photonvision/vision/opencv/CVShape.java index 99b8c2f107..2b4f083ea0 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/CVShape.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/CVShape.java @@ -23,42 +23,42 @@ import org.opencv.core.Point; public class CVShape implements Releasable { - public final Contour contour; + public final Contour contour; - @Nullable public final ContourShape shape; + @Nullable public final ContourShape shape; - public double radius = 0; - public Point center = null; + public double radius = 0; + public Point center = null; - private MatOfPoint3f customTarget = null; + private MatOfPoint3f customTarget = null; - private final MatOfPoint2f approxCurve = new MatOfPoint2f(); + private final MatOfPoint2f approxCurve = new MatOfPoint2f(); - public CVShape(Contour contour, ContourShape shape) { - this.contour = contour; - this.shape = shape; - } + public CVShape(Contour contour, ContourShape shape) { + this.contour = contour; + this.shape = shape; + } - public CVShape(Contour contour, Point center, double radius) { - this(contour, ContourShape.Circle); - this.radius = radius; - this.center = center; - } + public CVShape(Contour contour, Point center, double radius) { + this(contour, ContourShape.Circle); + this.radius = radius; + this.center = center; + } - public CVShape(Contour contour, MatOfPoint3f targetPoints) { - this.contour = contour; - this.shape = ContourShape.Custom; - customTarget = targetPoints; - } + public CVShape(Contour contour, MatOfPoint3f targetPoints) { + this.contour = contour; + this.shape = ContourShape.Custom; + customTarget = targetPoints; + } - public Contour getContour() { - return contour; - } + public Contour getContour() { + return contour; + } - @Override - public void release() { - if (customTarget != null) customTarget.release(); - approxCurve.release(); - contour.release(); - } + @Override + public void release() { + if (customTarget != null) customTarget.release(); + approxCurve.release(); + contour.release(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/Contour.java b/photon-core/src/main/java/org/photonvision/vision/opencv/Contour.java index 181de0b3b3..ab13160607 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/Contour.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/Contour.java @@ -27,204 +27,204 @@ import org.photonvision.common.util.math.MathUtils; public class Contour implements Releasable { - public static final Comparator SortByMomentsX = - Comparator.comparingDouble( - (contour) -> contour.getMoments().get_m10() / contour.getMoments().get_m00()); + public static final Comparator SortByMomentsX = + Comparator.comparingDouble( + (contour) -> contour.getMoments().get_m10() / contour.getMoments().get_m00()); + + public final MatOfPoint mat; - public final MatOfPoint mat; - - private Double area = Double.NaN; - private Double perimeter = Double.NaN; - private MatOfPoint2f mat2f = null; - private RotatedRect minAreaRect = null; - private Rect boundingRect = null; - private Moments moments = null; - - private MatOfPoint2f convexHull = null; - private MatOfPoint2f approxPolyDp = null; - - public Contour(MatOfPoint mat) { - this.mat = mat; - } - - public MatOfPoint2f getMat2f() { - if (mat2f == null) { - mat2f = new MatOfPoint2f(mat.toArray()); - mat.convertTo(mat2f, CvType.CV_32F); + private Double area = Double.NaN; + private Double perimeter = Double.NaN; + private MatOfPoint2f mat2f = null; + private RotatedRect minAreaRect = null; + private Rect boundingRect = null; + private Moments moments = null; + + private MatOfPoint2f convexHull = null; + private MatOfPoint2f approxPolyDp = null; + + public Contour(MatOfPoint mat) { + this.mat = mat; + } + + public MatOfPoint2f getMat2f() { + if (mat2f == null) { + mat2f = new MatOfPoint2f(mat.toArray()); + mat.convertTo(mat2f, CvType.CV_32F); + } + return mat2f; + } + + public MatOfPoint2f getConvexHull() { + if (this.convexHull == null) { + var ints = new MatOfInt(); + Imgproc.convexHull(mat, ints); + this.convexHull = Contour.convertIndexesToPoints(mat, ints); + ints.release(); + } + return convexHull; + } + + public MatOfPoint2f getApproxPolyDp(double epsilon, boolean closed) { + if (this.approxPolyDp == null) { + approxPolyDp = new MatOfPoint2f(); + Imgproc.approxPolyDP(getConvexHull(), approxPolyDp, epsilon, closed); + } + return approxPolyDp; + } + + @Nullable + public MatOfPoint2f getApproxPolyDp() { + return this.approxPolyDp; + } + + public double getArea() { + if (Double.isNaN(area)) { + area = Imgproc.contourArea(mat); + } + return area; + } + + public double getPerimeter() { + if (Double.isNaN(perimeter)) { + perimeter = Imgproc.arcLength(getMat2f(), true); + } + return perimeter; + } + + public RotatedRect getMinAreaRect() { + if (minAreaRect == null) { + minAreaRect = Imgproc.minAreaRect(getMat2f()); + } + return minAreaRect; + } + + public Rect getBoundingRect() { + if (boundingRect == null) { + boundingRect = Imgproc.boundingRect(mat); + } + return boundingRect; + } + + public Moments getMoments() { + if (moments == null) { + moments = Imgproc.moments(mat); + } + return moments; + } + + public Point getCenterPoint() { + return getMinAreaRect().center; + } + + public boolean isEmpty() { + return mat.empty(); + } + + public boolean isIntersecting( + Contour secondContour, ContourIntersectionDirection intersectionDirection) { + boolean isIntersecting = false; + + if (intersectionDirection == ContourIntersectionDirection.None) { + isIntersecting = true; + } else { + try { + MatOfPoint2f intersectMatA = new MatOfPoint2f(); + MatOfPoint2f intersectMatB = new MatOfPoint2f(); + + mat.convertTo(intersectMatA, CvType.CV_32F); + secondContour.mat.convertTo(intersectMatB, CvType.CV_32F); + + RotatedRect a = Imgproc.fitEllipse(intersectMatA); + RotatedRect b = Imgproc.fitEllipse(intersectMatB); + double mA = MathUtils.toSlope(a.angle); + double mB = MathUtils.toSlope(b.angle); + double x0A = a.center.x; + double y0A = a.center.y; + double x0B = b.center.x; + double y0B = b.center.y; + double intersectionX = ((mA * x0A) - y0A - (mB * x0B) + y0B) / (mA - mB); + double intersectionY = (mA * (intersectionX - x0A)) + y0A; + double massX = (x0A + x0B) / 2; + double massY = (y0A + y0B) / 2; + switch (intersectionDirection) { + case Up: + if (intersectionY < massY) isIntersecting = true; + break; + case Down: + if (intersectionY > massY) isIntersecting = true; + break; + case Left: + if (intersectionX < massX) isIntersecting = true; + break; + case Right: + if (intersectionX > massX) isIntersecting = true; + break; } - return mat2f; + intersectMatA.release(); + intersectMatB.release(); + } catch (Exception e) { + // defaults to false + } } - public MatOfPoint2f getConvexHull() { - if (this.convexHull == null) { - var ints = new MatOfInt(); - Imgproc.convexHull(mat, ints); - this.convexHull = Contour.convertIndexesToPoints(mat, ints); - ints.release(); - } - return convexHull; - } + return isIntersecting; + } - public MatOfPoint2f getApproxPolyDp(double epsilon, boolean closed) { - if (this.approxPolyDp == null) { - approxPolyDp = new MatOfPoint2f(); - Imgproc.approxPolyDP(getConvexHull(), approxPolyDp, epsilon, closed); - } - return approxPolyDp; + // TODO: refactor to do "infinite" contours ??????? + public static Contour groupContoursByIntersection( + Contour firstContour, Contour secondContour, ContourIntersectionDirection intersection) { + if (areIntersecting(firstContour, secondContour, intersection)) { + return combineContours(firstContour, secondContour); + } else { + return null; } + } - @Nullable - public MatOfPoint2f getApproxPolyDp() { - return this.approxPolyDp; - } + public static boolean areIntersecting( + Contour firstContour, + Contour secondContour, + ContourIntersectionDirection intersectionDirection) { + return firstContour.isIntersecting(secondContour, intersectionDirection) + || secondContour.isIntersecting(firstContour, intersectionDirection); + } - public double getArea() { - if (Double.isNaN(area)) { - area = Imgproc.contourArea(mat); - } - return area; - } + public static Contour combineContours(Contour... contours) { + return combineContourList(Arrays.asList(contours)); + } - public double getPerimeter() { - if (Double.isNaN(perimeter)) { - perimeter = Imgproc.arcLength(getMat2f(), true); - } - return perimeter; - } + public static Contour combineContourList(Collection contours) { + var points = new MatOfPoint(); - public RotatedRect getMinAreaRect() { - if (minAreaRect == null) { - minAreaRect = Imgproc.minAreaRect(getMat2f()); - } - return minAreaRect; + for (var contour : contours) { + points.push_back(contour.mat); } - public Rect getBoundingRect() { - if (boundingRect == null) { - boundingRect = Imgproc.boundingRect(mat); - } - return boundingRect; - } + var finalContour = new Contour(points); - public Moments getMoments() { - if (moments == null) { - moments = Imgproc.moments(mat); - } - return moments; - } + boolean contourEmpty = finalContour.isEmpty(); + return contourEmpty ? null : finalContour; + } - public Point getCenterPoint() { - return getMinAreaRect().center; - } + @Override + public void release() { + if (mat != null) mat.release(); + if (mat2f != null) mat2f.release(); + if (convexHull != null) convexHull.release(); + if (approxPolyDp != null) approxPolyDp.release(); + } - public boolean isEmpty() { - return mat.empty(); - } + public static MatOfPoint2f convertIndexesToPoints(MatOfPoint contour, MatOfInt indexes) { + int[] arrIndex = indexes.toArray(); + Point[] arrContour = contour.toArray(); + Point[] arrPoints = new Point[arrIndex.length]; - public boolean isIntersecting( - Contour secondContour, ContourIntersectionDirection intersectionDirection) { - boolean isIntersecting = false; - - if (intersectionDirection == ContourIntersectionDirection.None) { - isIntersecting = true; - } else { - try { - MatOfPoint2f intersectMatA = new MatOfPoint2f(); - MatOfPoint2f intersectMatB = new MatOfPoint2f(); - - mat.convertTo(intersectMatA, CvType.CV_32F); - secondContour.mat.convertTo(intersectMatB, CvType.CV_32F); - - RotatedRect a = Imgproc.fitEllipse(intersectMatA); - RotatedRect b = Imgproc.fitEllipse(intersectMatB); - double mA = MathUtils.toSlope(a.angle); - double mB = MathUtils.toSlope(b.angle); - double x0A = a.center.x; - double y0A = a.center.y; - double x0B = b.center.x; - double y0B = b.center.y; - double intersectionX = ((mA * x0A) - y0A - (mB * x0B) + y0B) / (mA - mB); - double intersectionY = (mA * (intersectionX - x0A)) + y0A; - double massX = (x0A + x0B) / 2; - double massY = (y0A + y0B) / 2; - switch (intersectionDirection) { - case Up: - if (intersectionY < massY) isIntersecting = true; - break; - case Down: - if (intersectionY > massY) isIntersecting = true; - break; - case Left: - if (intersectionX < massX) isIntersecting = true; - break; - case Right: - if (intersectionX > massX) isIntersecting = true; - break; - } - intersectMatA.release(); - intersectMatB.release(); - } catch (Exception e) { - // defaults to false - } - } - - return isIntersecting; - } - - // TODO: refactor to do "infinite" contours ??????? - public static Contour groupContoursByIntersection( - Contour firstContour, Contour secondContour, ContourIntersectionDirection intersection) { - if (areIntersecting(firstContour, secondContour, intersection)) { - return combineContours(firstContour, secondContour); - } else { - return null; - } - } - - public static boolean areIntersecting( - Contour firstContour, - Contour secondContour, - ContourIntersectionDirection intersectionDirection) { - return firstContour.isIntersecting(secondContour, intersectionDirection) - || secondContour.isIntersecting(firstContour, intersectionDirection); - } - - public static Contour combineContours(Contour... contours) { - return combineContourList(Arrays.asList(contours)); - } - - public static Contour combineContourList(Collection contours) { - var points = new MatOfPoint(); - - for (var contour : contours) { - points.push_back(contour.mat); - } - - var finalContour = new Contour(points); - - boolean contourEmpty = finalContour.isEmpty(); - return contourEmpty ? null : finalContour; - } - - @Override - public void release() { - if (mat != null) mat.release(); - if (mat2f != null) mat2f.release(); - if (convexHull != null) convexHull.release(); - if (approxPolyDp != null) approxPolyDp.release(); + for (int i = 0; i < arrIndex.length; i++) { + arrPoints[i] = arrContour[arrIndex[i]]; } - public static MatOfPoint2f convertIndexesToPoints(MatOfPoint contour, MatOfInt indexes) { - int[] arrIndex = indexes.toArray(); - Point[] arrContour = contour.toArray(); - Point[] arrPoints = new Point[arrIndex.length]; - - for (int i = 0; i < arrIndex.length; i++) { - arrPoints[i] = arrContour[arrIndex[i]]; - } - - var hull = new MatOfPoint2f(); - hull.fromArray(arrPoints); - return hull; - } + var hull = new MatOfPoint2f(); + hull.fromArray(arrPoints); + return hull; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourGroupingMode.java b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourGroupingMode.java index 1929c058fb..42500feb87 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourGroupingMode.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourGroupingMode.java @@ -18,13 +18,13 @@ package org.photonvision.vision.opencv; public enum ContourGroupingMode { - Single(1), - Dual(2), - TwoOrMore(2); + Single(1), + Dual(2), + TwoOrMore(2); - public final int count; + public final int count; - ContourGroupingMode(int count) { - this.count = count; - } + ContourGroupingMode(int count) { + this.count = count; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourIntersectionDirection.java b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourIntersectionDirection.java index d3f97d61e4..5aa701c978 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourIntersectionDirection.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourIntersectionDirection.java @@ -18,9 +18,9 @@ package org.photonvision.vision.opencv; public enum ContourIntersectionDirection { - None, - Up, - Down, - Left, - Right + None, + Up, + Down, + Left, + Right } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourShape.java b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourShape.java index 4eac4d847d..bf77b2b0c9 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourShape.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourShape.java @@ -21,26 +21,26 @@ import java.util.HashMap; public enum ContourShape { - Circle(0), - Custom(-1), - Triangle(3), - Quadrilateral(4); + Circle(0), + Custom(-1), + Triangle(3), + Quadrilateral(4); - public final int sides; + public final int sides; - ContourShape(int sides) { - this.sides = sides; - } + ContourShape(int sides) { + this.sides = sides; + } - private static final HashMap sidesToValueMap = new HashMap<>(); + private static final HashMap sidesToValueMap = new HashMap<>(); - static { - for (var value : EnumSet.allOf(ContourShape.class)) { - sidesToValueMap.put(value.sides, value); - } + static { + for (var value : EnumSet.allOf(ContourShape.class)) { + sidesToValueMap.put(value.sides, value); } + } - public static ContourShape fromSides(int sides) { - return sidesToValueMap.get(sides); - } + public static ContourShape fromSides(int sides) { + return sidesToValueMap.get(sides); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourSortMode.java b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourSortMode.java index 2ceff908ec..52b31811ca 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourSortMode.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourSortMode.java @@ -21,27 +21,27 @@ import org.photonvision.vision.target.PotentialTarget; public enum ContourSortMode { - Largest( - Comparator.comparingDouble(PotentialTarget::getArea) - .reversed()), // reversed so that zero index has the largest size - Smallest(Largest.getComparator().reversed()), - Highest(Comparator.comparingDouble(rect -> rect.getMinAreaRect().center.y)), - Lowest(Highest.getComparator().reversed()), - Leftmost(Comparator.comparingDouble(target -> target.getMinAreaRect().center.x * -1)), - Rightmost(Leftmost.getComparator().reversed()), - Centermost( - Comparator.comparingDouble( - rect -> - (Math.pow(rect.getMinAreaRect().center.y, 2) - + Math.pow(rect.getMinAreaRect().center.x, 2)))); + Largest( + Comparator.comparingDouble(PotentialTarget::getArea) + .reversed()), // reversed so that zero index has the largest size + Smallest(Largest.getComparator().reversed()), + Highest(Comparator.comparingDouble(rect -> rect.getMinAreaRect().center.y)), + Lowest(Highest.getComparator().reversed()), + Leftmost(Comparator.comparingDouble(target -> target.getMinAreaRect().center.x * -1)), + Rightmost(Leftmost.getComparator().reversed()), + Centermost( + Comparator.comparingDouble( + rect -> + (Math.pow(rect.getMinAreaRect().center.y, 2) + + Math.pow(rect.getMinAreaRect().center.x, 2)))); - private final Comparator m_comparator; + private final Comparator m_comparator; - ContourSortMode(Comparator comparator) { - m_comparator = comparator; - } + ContourSortMode(Comparator comparator) { + m_comparator = comparator; + } - public Comparator getComparator() { - return m_comparator; - } + public Comparator getComparator() { + return m_comparator; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/DualMat.java b/photon-core/src/main/java/org/photonvision/vision/opencv/DualMat.java index f2adb4a92f..7efe9fcd87 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/DualMat.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/DualMat.java @@ -20,6 +20,6 @@ import org.opencv.core.Mat; public class DualMat { - public Mat first; - public Mat second; + public Mat first; + public Mat second; } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/DualOffsetValues.java b/photon-core/src/main/java/org/photonvision/vision/opencv/DualOffsetValues.java index c7b2ac04f2..1039ccff85 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/DualOffsetValues.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/DualOffsetValues.java @@ -22,27 +22,27 @@ import org.photonvision.vision.target.TargetCalculations; public class DualOffsetValues { - public final Point firstPoint; - public final double firstPointArea; - public final Point secondPoint; - public final double secondPointArea; + public final Point firstPoint; + public final double firstPointArea; + public final Point secondPoint; + public final double secondPointArea; - public DualOffsetValues() { - firstPoint = new Point(); - firstPointArea = 0; - secondPoint = new Point(); - secondPointArea = 0; - } + public DualOffsetValues() { + firstPoint = new Point(); + firstPointArea = 0; + secondPoint = new Point(); + secondPointArea = 0; + } - public DualOffsetValues( - Point firstPoint, double firstPointArea, Point secondPoint, double secondPointArea) { - this.firstPoint = firstPoint; - this.firstPointArea = firstPointArea; - this.secondPoint = secondPoint; - this.secondPointArea = secondPointArea; - } + public DualOffsetValues( + Point firstPoint, double firstPointArea, Point secondPoint, double secondPointArea) { + this.firstPoint = firstPoint; + this.firstPointArea = firstPointArea; + this.secondPoint = secondPoint; + this.secondPointArea = secondPointArea; + } - public DoubleCouple getLineValues() { - return TargetCalculations.getLineFromPoints(firstPoint, secondPoint); - } + public DoubleCouple getLineValues() { + return TargetCalculations.getLineFromPoints(firstPoint, secondPoint); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/ImageFlipMode.java b/photon-core/src/main/java/org/photonvision/vision/opencv/ImageFlipMode.java index 6fa456e7a0..d156c25e1f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/ImageFlipMode.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/ImageFlipMode.java @@ -18,14 +18,14 @@ package org.photonvision.vision.opencv; public enum ImageFlipMode { - NONE(Integer.MIN_VALUE), - VERTICAL(1), - HORIZONTAL(0), - BOTH(-1); + NONE(Integer.MIN_VALUE), + VERTICAL(1), + HORIZONTAL(0), + BOTH(-1); - public final int value; + public final int value; - ImageFlipMode(int value) { - this.value = value; - } + ImageFlipMode(int value) { + this.value = value; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/ImageRotationMode.java b/photon-core/src/main/java/org/photonvision/vision/opencv/ImageRotationMode.java index e72dd50e27..a86ffdab6d 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/ImageRotationMode.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/ImageRotationMode.java @@ -18,18 +18,18 @@ package org.photonvision.vision.opencv; public enum ImageRotationMode { - DEG_0(-1), - DEG_90(0), - DEG_180(1), - DEG_270(2); + DEG_0(-1), + DEG_90(0), + DEG_180(1), + DEG_270(2); - public final int value; + public final int value; - ImageRotationMode(int value) { - this.value = value; - } + ImageRotationMode(int value) { + this.value = value; + } - public boolean isRotated() { - return this.value == DEG_90.value || this.value == DEG_270.value; - } + public boolean isRotated() { + return this.value == DEG_90.value || this.value == DEG_270.value; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/Releasable.java b/photon-core/src/main/java/org/photonvision/vision/opencv/Releasable.java index dc76678884..91fead4705 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/Releasable.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/Releasable.java @@ -18,5 +18,5 @@ package org.photonvision.vision.opencv; public interface Releasable { - void release(); + void release(); } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/CVPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/CVPipe.java index d941f51cd0..23040b1191 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/CVPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/CVPipe.java @@ -26,34 +26,34 @@ * @param

Parameters type for the pipe */ public abstract class CVPipe { - protected CVPipeResult result = new CVPipeResult<>(); - protected P params; + protected CVPipeResult result = new CVPipeResult<>(); + protected P params; - public void setParams(P params) { - this.params = params; - } + public void setParams(P params) { + this.params = params; + } - /** - * Runs the process for the pipe. - * - * @param in Input for pipe processing. - * @return Result of processing. - */ - protected abstract O process(I in); + /** + * Runs the process for the pipe. + * + * @param in Input for pipe processing. + * @return Result of processing. + */ + protected abstract O process(I in); - /** - * @param in Input for pipe processing. - * @return Result of processing. - */ - public CVPipeResult run(I in) { - long pipeStartNanos = System.nanoTime(); - result.output = process(in); - result.nanosElapsed = System.nanoTime() - pipeStartNanos; - return result; - } + /** + * @param in Input for pipe processing. + * @return Result of processing. + */ + public CVPipeResult run(I in) { + long pipeStartNanos = System.nanoTime(); + result.output = process(in); + result.nanosElapsed = System.nanoTime() - pipeStartNanos; + return result; + } - public static class CVPipeResult { - public O output; - public long nanosElapsed; - } + public static class CVPipeResult { + public O output; + public long nanosElapsed; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipe.java index 5c1497db3e..18f95acb5e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipe.java @@ -24,40 +24,40 @@ import org.photonvision.vision.pipe.CVPipe; public class AprilTagDetectionPipe - extends CVPipe, AprilTagDetectionPipeParams> { - private final AprilTagDetector m_detector = new AprilTagDetector(); + extends CVPipe, AprilTagDetectionPipeParams> { + private final AprilTagDetector m_detector = new AprilTagDetector(); - public AprilTagDetectionPipe() { - super(); + public AprilTagDetectionPipe() { + super(); - m_detector.addFamily("tag16h5"); - m_detector.addFamily("tag36h11"); - } - - @Override - protected List process(CVMat in) { - if (in.getMat().empty()) { - return List.of(); - } + m_detector.addFamily("tag16h5"); + m_detector.addFamily("tag36h11"); + } - var ret = m_detector.detect(in.getMat()); + @Override + protected List process(CVMat in) { + if (in.getMat().empty()) { + return List.of(); + } - if (ret == null) { - return List.of(); - } + var ret = m_detector.detect(in.getMat()); - return List.of(ret); + if (ret == null) { + return List.of(); } - @Override - public void setParams(AprilTagDetectionPipeParams newParams) { - if (this.params == null || !this.params.equals(newParams)) { - m_detector.setConfig(newParams.detectorParams); + return List.of(ret); + } - m_detector.clearFamilies(); - m_detector.addFamily(newParams.family.getNativeName()); - } + @Override + public void setParams(AprilTagDetectionPipeParams newParams) { + if (this.params == null || !this.params.equals(newParams)) { + m_detector.setConfig(newParams.detectorParams); - super.setParams(newParams); + m_detector.clearFamilies(); + m_detector.addFamily(newParams.family.getNativeName()); } + + super.setParams(newParams); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipeParams.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipeParams.java index ab52d7bcfb..7a1cd08452 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipeParams.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipeParams.java @@ -21,32 +21,32 @@ import org.photonvision.vision.apriltag.AprilTagFamily; public class AprilTagDetectionPipeParams { - public final AprilTagFamily family; - public final AprilTagDetector.Config detectorParams; + public final AprilTagFamily family; + public final AprilTagDetector.Config detectorParams; - public AprilTagDetectionPipeParams(AprilTagFamily tagFamily, AprilTagDetector.Config config) { - this.family = tagFamily; - this.detectorParams = config; - } + public AprilTagDetectionPipeParams(AprilTagFamily tagFamily, AprilTagDetector.Config config) { + this.family = tagFamily; + this.detectorParams = config; + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((family == null) ? 0 : family.hashCode()); - result = prime * result + ((detectorParams == null) ? 0 : detectorParams.hashCode()); - return result; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((family == null) ? 0 : family.hashCode()); + result = prime * result + ((detectorParams == null) ? 0 : detectorParams.hashCode()); + return result; + } - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - AprilTagDetectionPipeParams other = (AprilTagDetectionPipeParams) obj; - if (family != other.family) return false; - if (detectorParams == null) { - return other.detectorParams == null; - } else return detectorParams.equals(other.detectorParams); - } + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + AprilTagDetectionPipeParams other = (AprilTagDetectionPipeParams) obj; + if (family != other.family) return false; + if (detectorParams == null) { + return other.detectorParams == null; + } else return detectorParams.equals(other.detectorParams); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagPoseEstimatorPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagPoseEstimatorPipe.java index 3c6563e182..388dd63325 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagPoseEstimatorPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagPoseEstimatorPipe.java @@ -28,101 +28,101 @@ import org.photonvision.vision.pipe.CVPipe; public class AprilTagPoseEstimatorPipe - extends CVPipe< - AprilTagDetection, - AprilTagPoseEstimate, - AprilTagPoseEstimatorPipe.AprilTagPoseEstimatorPipeParams> { - private final AprilTagPoseEstimator m_poseEstimator = - new AprilTagPoseEstimator(new AprilTagPoseEstimator.Config(0, 0, 0, 0, 0)); - - public AprilTagPoseEstimatorPipe() { - super(); + extends CVPipe< + AprilTagDetection, + AprilTagPoseEstimate, + AprilTagPoseEstimatorPipe.AprilTagPoseEstimatorPipeParams> { + private final AprilTagPoseEstimator m_poseEstimator = + new AprilTagPoseEstimator(new AprilTagPoseEstimator.Config(0, 0, 0, 0, 0)); + + public AprilTagPoseEstimatorPipe() { + super(); + } + + MatOfPoint2f temp = new MatOfPoint2f(); + + @Override + protected AprilTagPoseEstimate process(AprilTagDetection in) { + // Save the corner points of our detection to an array + Point[] corners = new Point[4]; + for (int i = 0; i < 4; i++) { + corners[i] = new Point(in.getCornerX(i), in.getCornerY(i)); + } + // And shove into our matofpoints + temp.fromArray(corners); + + // Probably overwrites what was in temp before. I hope + Calib3d.undistortImagePoints( + temp, + temp, + params.calibration.getCameraIntrinsicsMat(), + params.calibration.getDistCoeffsMat()); + + // Save out undistorted corners + corners = temp.toArray(); + + // Apriltagdetection expects an array in form [x1 y1 x2 y2 ...] + var fixedCorners = new double[8]; + for (int i = 0; i < 4; i++) { + fixedCorners[i * 2] = corners[i].x; + fixedCorners[i * 2 + 1] = corners[i].y; + } + + // Create a new Detection with the fixed corners + var corrected = + new AprilTagDetection( + in.getFamily(), + in.getId(), + in.getHamming(), + in.getDecisionMargin(), + in.getHomography(), + in.getCenterX(), + in.getCenterY(), + fixedCorners); + + return m_poseEstimator.estimateOrthogonalIteration(corrected, params.nIters); + } + + @Override + public void setParams(AprilTagPoseEstimatorPipe.AprilTagPoseEstimatorPipeParams newParams) { + if (this.params == null || !this.params.equals(newParams)) { + m_poseEstimator.setConfig(newParams.config); } - MatOfPoint2f temp = new MatOfPoint2f(); + super.setParams(newParams); + } - @Override - protected AprilTagPoseEstimate process(AprilTagDetection in) { - // Save the corner points of our detection to an array - Point[] corners = new Point[4]; - for (int i = 0; i < 4; i++) { - corners[i] = new Point(in.getCornerX(i), in.getCornerY(i)); - } - // And shove into our matofpoints - temp.fromArray(corners); - - // Probably overwrites what was in temp before. I hope - Calib3d.undistortImagePoints( - temp, - temp, - params.calibration.getCameraIntrinsicsMat(), - params.calibration.getDistCoeffsMat()); - - // Save out undistorted corners - corners = temp.toArray(); - - // Apriltagdetection expects an array in form [x1 y1 x2 y2 ...] - var fixedCorners = new double[8]; - for (int i = 0; i < 4; i++) { - fixedCorners[i * 2] = corners[i].x; - fixedCorners[i * 2 + 1] = corners[i].y; - } - - // Create a new Detection with the fixed corners - var corrected = - new AprilTagDetection( - in.getFamily(), - in.getId(), - in.getHamming(), - in.getDecisionMargin(), - in.getHomography(), - in.getCenterX(), - in.getCenterY(), - fixedCorners); - - return m_poseEstimator.estimateOrthogonalIteration(corrected, params.nIters); + public static class AprilTagPoseEstimatorPipeParams { + final AprilTagPoseEstimator.Config config; + final CameraCalibrationCoefficients calibration; + final int nIters; + + public AprilTagPoseEstimatorPipeParams( + Config config, CameraCalibrationCoefficients cal, int nIters) { + this.config = config; + this.nIters = nIters; + this.calibration = cal; } @Override - public void setParams(AprilTagPoseEstimatorPipe.AprilTagPoseEstimatorPipeParams newParams) { - if (this.params == null || !this.params.equals(newParams)) { - m_poseEstimator.setConfig(newParams.config); - } - - super.setParams(newParams); + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((config == null) ? 0 : config.hashCode()); + result = prime * result + nIters; + return result; } - public static class AprilTagPoseEstimatorPipeParams { - final AprilTagPoseEstimator.Config config; - final CameraCalibrationCoefficients calibration; - final int nIters; - - public AprilTagPoseEstimatorPipeParams( - Config config, CameraCalibrationCoefficients cal, int nIters) { - this.config = config; - this.nIters = nIters; - this.calibration = cal; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((config == null) ? 0 : config.hashCode()); - result = prime * result + nIters; - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - AprilTagPoseEstimatorPipeParams other = (AprilTagPoseEstimatorPipeParams) obj; - if (config == null) { - if (other.config != null) return false; - } else if (!config.equals(other.config)) return false; - return nIters == other.nIters; - } + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + AprilTagPoseEstimatorPipeParams other = (AprilTagPoseEstimatorPipeParams) obj; + if (config == null) { + if (other.config != null) return false; + } else if (!config.equals(other.config)) return false; + return nIters == other.nIters; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipe.java index b18978c912..73d60cd7c8 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipe.java @@ -26,25 +26,25 @@ import org.photonvision.vision.pipe.CVPipe; public class ArucoDetectionPipe - extends CVPipe, ArucoDetectionPipeParams> { - PhotonArucoDetector detector = new PhotonArucoDetector(); + extends CVPipe, ArucoDetectionPipeParams> { + PhotonArucoDetector detector = new PhotonArucoDetector(); - @Override - protected List process(Mat in) { - return List.of( - detector.detect( - in, - (float) Units.inchesToMeters(6), - params.cameraCalibrationCoefficients, - params.detectorParams)); - } + @Override + protected List process(Mat in) { + return List.of( + detector.detect( + in, + (float) Units.inchesToMeters(6), + params.cameraCalibrationCoefficients, + params.detectorParams)); + } - @Override - public void setParams(ArucoDetectionPipeParams params) { - super.setParams(params); - } + @Override + public void setParams(ArucoDetectionPipeParams params) { + super.setParams(params); + } - public DetectorParameters getParameters() { - return params == null ? null : params.detectorParams.getDetectorParameters(); - } + public DetectorParameters getParameters() { + return params == null ? null : params.detectorParams.getDetectorParameters(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipeParams.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipeParams.java index 62d3214030..26dfd8c50b 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipeParams.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipeParams.java @@ -22,36 +22,36 @@ import org.photonvision.vision.calibration.CameraCalibrationCoefficients; public class ArucoDetectionPipeParams { - public ArucoDetector detectorParams; - public final CameraCalibrationCoefficients cameraCalibrationCoefficients; + public ArucoDetector detectorParams; + public final CameraCalibrationCoefficients cameraCalibrationCoefficients; - public ArucoDetectionPipeParams( - ArucoDetector detector, CameraCalibrationCoefficients cameraCalibrationCoefficients) { - this.detectorParams = detector; - this.cameraCalibrationCoefficients = cameraCalibrationCoefficients; - } + public ArucoDetectionPipeParams( + ArucoDetector detector, CameraCalibrationCoefficients cameraCalibrationCoefficients) { + this.detectorParams = detector; + this.cameraCalibrationCoefficients = cameraCalibrationCoefficients; + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ArucoDetectionPipeParams that = (ArucoDetectionPipeParams) o; - return Objects.equals(detectorParams, that.detectorParams) - && Objects.equals(cameraCalibrationCoefficients, that.cameraCalibrationCoefficients); - } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ArucoDetectionPipeParams that = (ArucoDetectionPipeParams) o; + return Objects.equals(detectorParams, that.detectorParams) + && Objects.equals(cameraCalibrationCoefficients, that.cameraCalibrationCoefficients); + } - @Override - public int hashCode() { - return Objects.hash(detectorParams, cameraCalibrationCoefficients); - } + @Override + public int hashCode() { + return Objects.hash(detectorParams, cameraCalibrationCoefficients); + } - @Override - public String toString() { - return "ArucoDetectionPipeParams{" - + "detectorParams=" - + detectorParams - + ", cameraCalibrationCoefficients=" - + cameraCalibrationCoefficients - + '}'; - } + @Override + public String toString() { + return "ArucoDetectionPipeParams{" + + "detectorParams=" + + detectorParams + + ", cameraCalibrationCoefficients=" + + cameraCalibrationCoefficients + + '}'; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/BlurPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/BlurPipe.java index 11eff2e21c..ed22cf415c 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/BlurPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/BlurPipe.java @@ -24,41 +24,41 @@ /** Represents a pipeline that blurs the image. */ public class BlurPipe extends MutatingPipe { + /** + * Processes this pipe. + * + * @param in Input for pipe processing. + * @return The processed frame. + */ + @Override + protected Void process(Mat in) { + Imgproc.blur(in, in, params.getBlurSize()); + return null; + } + + public static class BlurParams { + // Default BlurImagePrams with zero blur. + public static BlurParams DEFAULT = new BlurParams(0); + + // Member to store the blur size. + private final int m_blurSize; + /** - * Processes this pipe. + * Constructs a new BlurImageParams. * - * @param in Input for pipe processing. - * @return The processed frame. + * @param blurSize The blur size. */ - @Override - protected Void process(Mat in) { - Imgproc.blur(in, in, params.getBlurSize()); - return null; + public BlurParams(int blurSize) { + m_blurSize = blurSize; } - public static class BlurParams { - // Default BlurImagePrams with zero blur. - public static BlurParams DEFAULT = new BlurParams(0); - - // Member to store the blur size. - private final int m_blurSize; - - /** - * Constructs a new BlurImageParams. - * - * @param blurSize The blur size. - */ - public BlurParams(int blurSize) { - m_blurSize = blurSize; - } - - /** - * Returns the blur size. - * - * @return The blur size. - */ - public Size getBlurSize() { - return new Size(m_blurSize, m_blurSize); - } + /** + * Returns the blur size. + * + * @return The blur size. + */ + public Size getBlurSize() { + return new Size(m_blurSize, m_blurSize); } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CalculateFPSPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CalculateFPSPipe.java index 113da4d357..b0c90efc3d 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CalculateFPSPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CalculateFPSPipe.java @@ -22,22 +22,22 @@ import org.photonvision.vision.pipe.CVPipe; public class CalculateFPSPipe - extends CVPipe { - private final LinearFilter fpsFilter = LinearFilter.movingAverage(20); - StopWatch clock = new StopWatch(); + extends CVPipe { + private final LinearFilter fpsFilter = LinearFilter.movingAverage(20); + StopWatch clock = new StopWatch(); - @Override - protected Integer process(Void in) { - if (!clock.isStarted()) { - clock.reset(); - clock.start(); - } - clock.stop(); - var fps = (int) fpsFilter.calculate(1000.0 / clock.getTime()); - clock.reset(); - clock.start(); - return fps; + @Override + protected Integer process(Void in) { + if (!clock.isStarted()) { + clock.reset(); + clock.start(); } + clock.stop(); + var fps = (int) fpsFilter.calculate(1000.0 / clock.getTime()); + clock.reset(); + clock.start(); + return fps; + } - public static class CalculateFPSPipeParams {} + public static class CalculateFPSPipeParams {} } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Calibrate3dPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Calibrate3dPipe.java index bfca09d6fd..131e2c6dcf 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Calibrate3dPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Calibrate3dPipe.java @@ -34,133 +34,133 @@ import org.photonvision.vision.pipe.CVPipe; public class Calibrate3dPipe - extends CVPipe< - List>, - CameraCalibrationCoefficients, - Calibrate3dPipe.CalibratePipeParams> { - // Camera matrix stores the center of the image and focal length across the x and y-axis in a 3x3 - // matrix - private final Mat cameraMatrix = new Mat(); - // Stores the radical and tangential distortion in a 5x1 matrix - private final MatOfDouble distortionCoefficients = new MatOfDouble(); - - // For logging - private static final Logger logger = new Logger(Calibrate3dPipe.class, LogGroup.General); - - // Translational and rotational matrices - private final List rvecs = new ArrayList<>(); - private final List tvecs = new ArrayList<>(); - - // The Standard deviation of the estimated parameters - private final Mat stdDeviationsIntrinsics = new Mat(); - private final Mat stdDeviationsExtrinsics = new Mat(); - - // Contains the re projection error of each snapshot by re projecting the corners we found and - // finding the Euclidean distance between the actual corners. - private final Mat perViewErrors = new Mat(); - - // RMS of the calibration - private double calibrationAccuracy; - - /** - * Runs the process for the pipe. - * - * @param in Input for pipe processing. In the format (Input image, object points, image points) - * @return Result of processing. - */ - @Override - protected CameraCalibrationCoefficients process(List> in) { - in = - in.stream() - .filter( - it -> - it != null - && it.getLeft() != null - && it.getMiddle() != null - && it.getRight() != null) - .collect(Collectors.toList()); - try { - // FindBoardCorners pipe outputs all the image points, object points, and frames to calculate - // imageSize from, other parameters are output Mats - - var objPoints = in.stream().map(Triple::getMiddle).collect(Collectors.toList()); - var imgPts = in.stream().map(Triple::getRight).collect(Collectors.toList()); - if (objPoints.size() != imgPts.size()) { - logger.error("objpts.size != imgpts.size"); - return null; - } - calibrationAccuracy = - Calib3d.calibrateCameraExtended( - objPoints, - imgPts, - new Size(in.get(0).getLeft().width, in.get(0).getLeft().height), - cameraMatrix, - distortionCoefficients, - rvecs, - tvecs, - stdDeviationsIntrinsics, - stdDeviationsExtrinsics, - perViewErrors); - } catch (Exception e) { - logger.error("Calibration failed!", e); - e.printStackTrace(); - return null; - } - JsonMat cameraMatrixMat = JsonMat.fromMat(cameraMatrix); - JsonMat distortionCoefficientsMat = JsonMat.fromMat(distortionCoefficients); - // Create a new CameraCalibrationCoefficients object to pass onto SolvePnP - double[] perViewErrorsArray = - new double[(int) perViewErrors.total() * perViewErrors.channels()]; - perViewErrors.get(0, 0, perViewErrorsArray); - - // Standard deviation of results - double stdDev = calculateSD(perViewErrorsArray); - try { - // Print calibration successful - logger.info( - "CALIBRATION SUCCESS for res " - + params.resolution - + " (with accuracy " - + calibrationAccuracy - + ")! camMatrix: \n" - + new ObjectMapper().writeValueAsString(cameraMatrixMat) - + "\ndistortionCoeffs:\n" - + new ObjectMapper().writeValueAsString(distortionCoefficientsMat) - + "\nWith Standard Deviation Of\n" - + stdDev - + "\n"); - } catch (JsonProcessingException e) { - logger.error("Failed to parse calibration data to json!", e); - } - return new CameraCalibrationCoefficients( - params.resolution, cameraMatrixMat, distortionCoefficientsMat, perViewErrorsArray, stdDev); + extends CVPipe< + List>, + CameraCalibrationCoefficients, + Calibrate3dPipe.CalibratePipeParams> { + // Camera matrix stores the center of the image and focal length across the x and y-axis in a 3x3 + // matrix + private final Mat cameraMatrix = new Mat(); + // Stores the radical and tangential distortion in a 5x1 matrix + private final MatOfDouble distortionCoefficients = new MatOfDouble(); + + // For logging + private static final Logger logger = new Logger(Calibrate3dPipe.class, LogGroup.General); + + // Translational and rotational matrices + private final List rvecs = new ArrayList<>(); + private final List tvecs = new ArrayList<>(); + + // The Standard deviation of the estimated parameters + private final Mat stdDeviationsIntrinsics = new Mat(); + private final Mat stdDeviationsExtrinsics = new Mat(); + + // Contains the re projection error of each snapshot by re projecting the corners we found and + // finding the Euclidean distance between the actual corners. + private final Mat perViewErrors = new Mat(); + + // RMS of the calibration + private double calibrationAccuracy; + + /** + * Runs the process for the pipe. + * + * @param in Input for pipe processing. In the format (Input image, object points, image points) + * @return Result of processing. + */ + @Override + protected CameraCalibrationCoefficients process(List> in) { + in = + in.stream() + .filter( + it -> + it != null + && it.getLeft() != null + && it.getMiddle() != null + && it.getRight() != null) + .collect(Collectors.toList()); + try { + // FindBoardCorners pipe outputs all the image points, object points, and frames to calculate + // imageSize from, other parameters are output Mats + + var objPoints = in.stream().map(Triple::getMiddle).collect(Collectors.toList()); + var imgPts = in.stream().map(Triple::getRight).collect(Collectors.toList()); + if (objPoints.size() != imgPts.size()) { + logger.error("objpts.size != imgpts.size"); + return null; + } + calibrationAccuracy = + Calib3d.calibrateCameraExtended( + objPoints, + imgPts, + new Size(in.get(0).getLeft().width, in.get(0).getLeft().height), + cameraMatrix, + distortionCoefficients, + rvecs, + tvecs, + stdDeviationsIntrinsics, + stdDeviationsExtrinsics, + perViewErrors); + } catch (Exception e) { + logger.error("Calibration failed!", e); + e.printStackTrace(); + return null; } + JsonMat cameraMatrixMat = JsonMat.fromMat(cameraMatrix); + JsonMat distortionCoefficientsMat = JsonMat.fromMat(distortionCoefficients); + // Create a new CameraCalibrationCoefficients object to pass onto SolvePnP + double[] perViewErrorsArray = + new double[(int) perViewErrors.total() * perViewErrors.channels()]; + perViewErrors.get(0, 0, perViewErrorsArray); + + // Standard deviation of results + double stdDev = calculateSD(perViewErrorsArray); + try { + // Print calibration successful + logger.info( + "CALIBRATION SUCCESS for res " + + params.resolution + + " (with accuracy " + + calibrationAccuracy + + ")! camMatrix: \n" + + new ObjectMapper().writeValueAsString(cameraMatrixMat) + + "\ndistortionCoeffs:\n" + + new ObjectMapper().writeValueAsString(distortionCoefficientsMat) + + "\nWith Standard Deviation Of\n" + + stdDev + + "\n"); + } catch (JsonProcessingException e) { + logger.error("Failed to parse calibration data to json!", e); + } + return new CameraCalibrationCoefficients( + params.resolution, cameraMatrixMat, distortionCoefficientsMat, perViewErrorsArray, stdDev); + } - // Calculate standard deviation of the RMS error of the snapshots - private static double calculateSD(double[] numArray) { - double sum = 0.0, standardDeviation = 0.0; - int length = numArray.length; - - for (double num : numArray) { - sum += num; - } + // Calculate standard deviation of the RMS error of the snapshots + private static double calculateSD(double[] numArray) { + double sum = 0.0, standardDeviation = 0.0; + int length = numArray.length; - double mean = sum / length; + for (double num : numArray) { + sum += num; + } - for (double num : numArray) { - standardDeviation += Math.pow(num - mean, 2); - } + double mean = sum / length; - return Math.sqrt(standardDeviation / length); + for (double num : numArray) { + standardDeviation += Math.pow(num - mean, 2); } - public static class CalibratePipeParams { - // Only needs resolution to pass onto CameraCalibrationCoefficients object. - private final Size resolution; + return Math.sqrt(standardDeviation / length); + } + + public static class CalibratePipeParams { + // Only needs resolution to pass onto CameraCalibrationCoefficients object. + private final Size resolution; - public CalibratePipeParams(Size resolution) { - // logger.info("res: " + resolution.toString()); - this.resolution = resolution; - } + public CalibratePipeParams(Size resolution) { + // logger.info("res: " + resolution.toString()); + this.resolution = resolution; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Collect2dTargetsPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Collect2dTargetsPipe.java index d5e4bda995..437ef1ec5a 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Collect2dTargetsPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Collect2dTargetsPipe.java @@ -27,55 +27,55 @@ /** Represents a pipe that collects available 2d targets. */ public class Collect2dTargetsPipe - extends CVPipe< - List, List, Collect2dTargetsPipe.Collect2dTargetsParams> { - /** - * Processes this pipeline. - * - * @param in Input for pipe processing. - * @return A list of tracked targets. - */ - @Override - protected List process(List in) { - List targets = new ArrayList<>(); + extends CVPipe< + List, List, Collect2dTargetsPipe.Collect2dTargetsParams> { + /** + * Processes this pipeline. + * + * @param in Input for pipe processing. + * @return A list of tracked targets. + */ + @Override + protected List process(List in) { + List targets = new ArrayList<>(); - var calculationParams = - new TrackedTarget.TargetCalculationParameters( - params.targetOrientation == TargetOrientation.Landscape, - params.targetOffsetPointEdge, - params.robotOffsetPointMode, - params.robotOffsetSinglePoint, - params.dualOffsetValues, - params.frameStaticProperties); + var calculationParams = + new TrackedTarget.TargetCalculationParameters( + params.targetOrientation == TargetOrientation.Landscape, + params.targetOffsetPointEdge, + params.robotOffsetPointMode, + params.robotOffsetSinglePoint, + params.dualOffsetValues, + params.frameStaticProperties); - for (PotentialTarget target : in) { - targets.add(new TrackedTarget(target, calculationParams, target.shape)); - } - - return targets; + for (PotentialTarget target : in) { + targets.add(new TrackedTarget(target, calculationParams, target.shape)); } - public static class Collect2dTargetsParams { - private final RobotOffsetPointMode robotOffsetPointMode; - private final Point robotOffsetSinglePoint; - private final DualOffsetValues dualOffsetValues; - private final TargetOffsetPointEdge targetOffsetPointEdge; - private final TargetOrientation targetOrientation; - private final FrameStaticProperties frameStaticProperties; + return targets; + } + + public static class Collect2dTargetsParams { + private final RobotOffsetPointMode robotOffsetPointMode; + private final Point robotOffsetSinglePoint; + private final DualOffsetValues dualOffsetValues; + private final TargetOffsetPointEdge targetOffsetPointEdge; + private final TargetOrientation targetOrientation; + private final FrameStaticProperties frameStaticProperties; - public Collect2dTargetsParams( - RobotOffsetPointMode robotOffsetPointMode, - Point robotOffsetSinglePoint, - DualOffsetValues dualOffsetValues, - TargetOffsetPointEdge targetOffsetPointEdge, - TargetOrientation orientation, - FrameStaticProperties frameStaticProperties) { - this.frameStaticProperties = frameStaticProperties; - this.robotOffsetPointMode = robotOffsetPointMode; - this.robotOffsetSinglePoint = robotOffsetSinglePoint; - this.dualOffsetValues = dualOffsetValues; - this.targetOffsetPointEdge = targetOffsetPointEdge; - targetOrientation = orientation; - } + public Collect2dTargetsParams( + RobotOffsetPointMode robotOffsetPointMode, + Point robotOffsetSinglePoint, + DualOffsetValues dualOffsetValues, + TargetOffsetPointEdge targetOffsetPointEdge, + TargetOrientation orientation, + FrameStaticProperties frameStaticProperties) { + this.frameStaticProperties = frameStaticProperties; + this.robotOffsetPointMode = robotOffsetPointMode; + this.robotOffsetSinglePoint = robotOffsetSinglePoint; + this.dualOffsetValues = dualOffsetValues; + this.targetOffsetPointEdge = targetOffsetPointEdge; + targetOrientation = orientation; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CornerDetectionPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CornerDetectionPipe.java index bed3c6b5ef..538403c333 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CornerDetectionPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CornerDetectionPipe.java @@ -29,179 +29,179 @@ * CornerDetectionPipeParameters} affect how these corners are calculated. */ public class CornerDetectionPipe - extends CVPipe< - List, - List, - CornerDetectionPipe.CornerDetectionPipeParameters> { - Comparator leftRightComparator = Comparator.comparingDouble(point -> point.x); - Comparator verticalComparator = Comparator.comparingDouble(point -> point.y); - MatOfPoint2f polyOutput = new MatOfPoint2f(); - - @Override - protected List process(List targetList) { - for (var target : targetList) { - // detect corners. Might implement more algorithms later but - // APPROX_POLY_DP_AND_EXTREME_CORNERS should be year agnostic - if (Objects.requireNonNull(params.cornerDetectionStrategy) - == DetectionStrategy.APPROX_POLY_DP_AND_EXTREME_CORNERS) { - var targetCorners = detectExtremeCornersByApproxPolyDp(target, params.calculateConvexHulls); - target.setTargetCorners(targetCorners); - } - } - return targetList; + extends CVPipe< + List, + List, + CornerDetectionPipe.CornerDetectionPipeParameters> { + Comparator leftRightComparator = Comparator.comparingDouble(point -> point.x); + Comparator verticalComparator = Comparator.comparingDouble(point -> point.y); + MatOfPoint2f polyOutput = new MatOfPoint2f(); + + @Override + protected List process(List targetList) { + for (var target : targetList) { + // detect corners. Might implement more algorithms later but + // APPROX_POLY_DP_AND_EXTREME_CORNERS should be year agnostic + if (Objects.requireNonNull(params.cornerDetectionStrategy) + == DetectionStrategy.APPROX_POLY_DP_AND_EXTREME_CORNERS) { + var targetCorners = detectExtremeCornersByApproxPolyDp(target, params.calculateConvexHulls); + target.setTargetCorners(targetCorners); + } } - - /** - * @param target The target to find the corners of. - * @return Corners: (bottom-left, bottom-right, top-right, top-left) - */ - private List findBoundingBoxCorners(TrackedTarget target) { - // extract the corners - var points = new Point[4]; - target.m_mainContour.getMinAreaRect().points(points); - - // find the bl/br/tr/tl corners - // first, min by left/right - var list_ = Arrays.asList(points); - list_.sort(leftRightComparator); - // of this, we now have left and right - // sort to get top and bottom - var left = new ArrayList<>(List.of(list_.get(0), list_.get(1))); - left.sort(verticalComparator); - var right = new ArrayList<>(List.of(list_.get(2), list_.get(3))); - right.sort(verticalComparator); - - var tl = left.get(0); - var bl = left.get(1); - var tr = right.get(0); - var br = right.get(1); - - return List.of(bl, br, tr, tl); + return targetList; + } + + /** + * @param target The target to find the corners of. + * @return Corners: (bottom-left, bottom-right, top-right, top-left) + */ + private List findBoundingBoxCorners(TrackedTarget target) { + // extract the corners + var points = new Point[4]; + target.m_mainContour.getMinAreaRect().points(points); + + // find the bl/br/tr/tl corners + // first, min by left/right + var list_ = Arrays.asList(points); + list_.sort(leftRightComparator); + // of this, we now have left and right + // sort to get top and bottom + var left = new ArrayList<>(List.of(list_.get(0), list_.get(1))); + left.sort(verticalComparator); + var right = new ArrayList<>(List.of(list_.get(2), list_.get(3))); + right.sort(verticalComparator); + + var tl = left.get(0); + var bl = left.get(1); + var tr = right.get(0); + var br = right.get(1); + + return List.of(bl, br, tr, tl); + } + + /** + * @param a First point. + * @param b Second point. + * @return The straight line distance between them. + */ + private static double distanceBetween(Point a, Point b) { + double xDelta = a.x - b.x; + double yDelta = a.y - b.y; + return Math.sqrt(xDelta * xDelta + yDelta * yDelta); + } + + /** + * Find the 4 most extreme corners of the target's contour. + * + * @param target The target to track. + * @param convexHull Whether to use the convex hull of the contour instead. + * @return The 4 extreme corners of the contour: (bottom-left, bottom-right, top-right, top-left) + */ + private List detectExtremeCornersByApproxPolyDp(TrackedTarget target, boolean convexHull) { + var centroid = target.getMinAreaRect().center; + Comparator compareCenterDist = + Comparator.comparingDouble((Point point) -> distanceBetween(centroid, point)); + + MatOfPoint2f targetContour; + if (convexHull) { + targetContour = target.m_mainContour.getConvexHull(); + } else { + targetContour = target.m_mainContour.getMat2f(); } - /** - * @param a First point. - * @param b Second point. - * @return The straight line distance between them. - */ - private static double distanceBetween(Point a, Point b) { - double xDelta = a.x - b.x; - double yDelta = a.y - b.y; - return Math.sqrt(xDelta * xDelta + yDelta * yDelta); + /* + approximating a shape around the contours + Can be tuned to allow/disallow hulls + we want a number between 0 and 0.16 out of a percentage from 0 to 100 + so take accuracy and divide by 600 + + Furthermore, we know that the contour is open if we haven't done convex hulls, + and it has subcontours. + */ + var isOpen = !convexHull && target.hasSubContours(); + var peri = Imgproc.arcLength(targetContour, true); + Imgproc.approxPolyDP( + targetContour, polyOutput, params.accuracyPercentage / 600.0 * peri, !isOpen); + + // we must have at least 4 corners for this strategy to work. + // If we are looking for an exact side count that is handled here too. + var pointList = new ArrayList<>(polyOutput.toList()); + if (pointList.size() < 4 || (params.exactSideCount && params.sideCount != pointList.size())) + return null; + + target.setApproximateBoundingPolygon(polyOutput); + + // left top, left bottom, right bottom, right top + var boundingBoxCorners = findBoundingBoxCorners(target); + + var compareDistToTl = + Comparator.comparingDouble((Point p) -> distanceBetween(p, boundingBoxCorners.get(3))); + + var compareDistToTr = + Comparator.comparingDouble((Point p) -> distanceBetween(p, boundingBoxCorners.get(2))); + + // top left and top right are the poly corners closest to the bouding box tl and tr + pointList.sort(compareDistToTl); + var tl = pointList.get(0); + pointList.remove(tl); + pointList.sort(compareDistToTr); + var tr = pointList.get(0); + pointList.remove(tr); + + // at this point we look for points on the left/right of the center of the remaining points + // and maximize their distance from the center of the min area rectangle + var leftList = new ArrayList(); + var rightList = new ArrayList(); + double averageXCoordinate = 0.0; + for (var p : pointList) { + averageXCoordinate += p.x; } - - /** - * Find the 4 most extreme corners of the target's contour. - * - * @param target The target to track. - * @param convexHull Whether to use the convex hull of the contour instead. - * @return The 4 extreme corners of the contour: (bottom-left, bottom-right, top-right, top-left) - */ - private List detectExtremeCornersByApproxPolyDp(TrackedTarget target, boolean convexHull) { - var centroid = target.getMinAreaRect().center; - Comparator compareCenterDist = - Comparator.comparingDouble((Point point) -> distanceBetween(centroid, point)); - - MatOfPoint2f targetContour; - if (convexHull) { - targetContour = target.m_mainContour.getConvexHull(); + averageXCoordinate /= pointList.size(); + + // add points that are below the center of the min area rectangle of the target + for (var p : pointList) { + if (p.y + > target.m_mainContour.getBoundingRect().y + + target.m_mainContour.getBoundingRect().height / 2.0) + if (p.x < averageXCoordinate) { + leftList.add(p); } else { - targetContour = target.m_mainContour.getMat2f(); - } - - /* - approximating a shape around the contours - Can be tuned to allow/disallow hulls - we want a number between 0 and 0.16 out of a percentage from 0 to 100 - so take accuracy and divide by 600 - - Furthermore, we know that the contour is open if we haven't done convex hulls, - and it has subcontours. - */ - var isOpen = !convexHull && target.hasSubContours(); - var peri = Imgproc.arcLength(targetContour, true); - Imgproc.approxPolyDP( - targetContour, polyOutput, params.accuracyPercentage / 600.0 * peri, !isOpen); - - // we must have at least 4 corners for this strategy to work. - // If we are looking for an exact side count that is handled here too. - var pointList = new ArrayList<>(polyOutput.toList()); - if (pointList.size() < 4 || (params.exactSideCount && params.sideCount != pointList.size())) - return null; - - target.setApproximateBoundingPolygon(polyOutput); - - // left top, left bottom, right bottom, right top - var boundingBoxCorners = findBoundingBoxCorners(target); - - var compareDistToTl = - Comparator.comparingDouble((Point p) -> distanceBetween(p, boundingBoxCorners.get(3))); - - var compareDistToTr = - Comparator.comparingDouble((Point p) -> distanceBetween(p, boundingBoxCorners.get(2))); - - // top left and top right are the poly corners closest to the bouding box tl and tr - pointList.sort(compareDistToTl); - var tl = pointList.get(0); - pointList.remove(tl); - pointList.sort(compareDistToTr); - var tr = pointList.get(0); - pointList.remove(tr); - - // at this point we look for points on the left/right of the center of the remaining points - // and maximize their distance from the center of the min area rectangle - var leftList = new ArrayList(); - var rightList = new ArrayList(); - double averageXCoordinate = 0.0; - for (var p : pointList) { - averageXCoordinate += p.x; + rightList.add(p); } - averageXCoordinate /= pointList.size(); - - // add points that are below the center of the min area rectangle of the target - for (var p : pointList) { - if (p.y - > target.m_mainContour.getBoundingRect().y - + target.m_mainContour.getBoundingRect().height / 2.0) - if (p.x < averageXCoordinate) { - leftList.add(p); - } else { - rightList.add(p); - } - } - if (leftList.isEmpty() || rightList.isEmpty()) return null; - leftList.sort(compareCenterDist); - rightList.sort(compareCenterDist); - var bl = leftList.get(leftList.size() - 1); - var br = rightList.get(rightList.size() - 1); - return List.of(bl, br, tr, tl); } - - public static class CornerDetectionPipeParameters { - private final DetectionStrategy cornerDetectionStrategy; - - private final boolean calculateConvexHulls; - private final boolean exactSideCount; - private final int sideCount; - - /** This number can be changed to change how "accurate" our approximate polygon must be. */ - private final double accuracyPercentage; - - public CornerDetectionPipeParameters( - DetectionStrategy cornerDetectionStrategy, - boolean calculateConvexHulls, - boolean exactSideCount, - int sideCount, - double accuracyPercentage) { - this.cornerDetectionStrategy = cornerDetectionStrategy; - this.calculateConvexHulls = calculateConvexHulls; - this.exactSideCount = exactSideCount; - this.sideCount = sideCount; - this.accuracyPercentage = accuracyPercentage; - } + if (leftList.isEmpty() || rightList.isEmpty()) return null; + leftList.sort(compareCenterDist); + rightList.sort(compareCenterDist); + var bl = leftList.get(leftList.size() - 1); + var br = rightList.get(rightList.size() - 1); + return List.of(bl, br, tr, tl); + } + + public static class CornerDetectionPipeParameters { + private final DetectionStrategy cornerDetectionStrategy; + + private final boolean calculateConvexHulls; + private final boolean exactSideCount; + private final int sideCount; + + /** This number can be changed to change how "accurate" our approximate polygon must be. */ + private final double accuracyPercentage; + + public CornerDetectionPipeParameters( + DetectionStrategy cornerDetectionStrategy, + boolean calculateConvexHulls, + boolean exactSideCount, + int sideCount, + double accuracyPercentage) { + this.cornerDetectionStrategy = cornerDetectionStrategy; + this.calculateConvexHulls = calculateConvexHulls; + this.exactSideCount = exactSideCount; + this.sideCount = sideCount; + this.accuracyPercentage = accuracyPercentage; } + } - public enum DetectionStrategy { - APPROX_POLY_DP_AND_EXTREME_CORNERS - } + public enum DetectionStrategy { + APPROX_POLY_DP_AND_EXTREME_CORNERS + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dAprilTagsPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dAprilTagsPipe.java index ec88e06fe3..5ae2b59200 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dAprilTagsPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dAprilTagsPipe.java @@ -21,14 +21,14 @@ import org.photonvision.vision.frame.FrameDivisor; public class Draw2dAprilTagsPipe extends Draw2dTargetsPipe { - public static class Draw2dAprilTagsParams extends Draw2dTargetsPipe.Draw2dTargetsParams { - public Draw2dAprilTagsParams( - boolean shouldDraw, boolean showMultipleTargets, FrameDivisor divisor) { - super(shouldDraw, showMultipleTargets, divisor); - // We want to show the polygon, not the rotated box - this.showRotatedBox = false; - this.showMaximumBox = false; - this.rotatedBoxColor = Color.RED; - } + public static class Draw2dAprilTagsParams extends Draw2dTargetsPipe.Draw2dTargetsParams { + public Draw2dAprilTagsParams( + boolean shouldDraw, boolean showMultipleTargets, FrameDivisor divisor) { + super(shouldDraw, showMultipleTargets, divisor); + // We want to show the polygon, not the rotated box + this.showRotatedBox = false; + this.showMaximumBox = false; + this.rotatedBoxColor = Color.RED; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dArucoPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dArucoPipe.java index 85f4ef1801..036c9ecdce 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dArucoPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dArucoPipe.java @@ -21,14 +21,14 @@ import org.photonvision.vision.frame.FrameDivisor; public class Draw2dArucoPipe extends Draw2dTargetsPipe { - public static class Draw2dArucoParams extends Draw2dTargetsPipe.Draw2dTargetsParams { - public Draw2dArucoParams( - boolean shouldDraw, boolean showMultipleTargets, FrameDivisor divisor) { - super(shouldDraw, showMultipleTargets, divisor); - // We want to show the polygon, not the rotated box - this.showRotatedBox = false; - this.showMaximumBox = false; - this.rotatedBoxColor = Color.RED; - } + public static class Draw2dArucoParams extends Draw2dTargetsPipe.Draw2dTargetsParams { + public Draw2dArucoParams( + boolean shouldDraw, boolean showMultipleTargets, FrameDivisor divisor) { + super(shouldDraw, showMultipleTargets, divisor); + // We want to show the polygon, not the rotated box + this.showRotatedBox = false; + this.showMaximumBox = false; + this.rotatedBoxColor = Color.RED; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dCrosshairPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dCrosshairPipe.java index 53f5852e72..c9fc3ba280 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dCrosshairPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dCrosshairPipe.java @@ -34,101 +34,101 @@ import org.photonvision.vision.target.TrackedTarget; public class Draw2dCrosshairPipe - extends MutatingPipe< - Pair>, Draw2dCrosshairPipe.Draw2dCrosshairParams> { - @Override - protected Void process(Pair> in) { - if (!params.shouldDraw) return null; + extends MutatingPipe< + Pair>, Draw2dCrosshairPipe.Draw2dCrosshairParams> { + @Override + protected Void process(Pair> in) { + if (!params.shouldDraw) return null; - var image = in.getLeft(); + var image = in.getLeft(); - if (params.showCrosshair) { - double x = params.frameStaticProperties.centerX; - double y = params.frameStaticProperties.centerY; - double scale = params.frameStaticProperties.imageWidth / (double) params.divisor.value / 32.0; + if (params.showCrosshair) { + double x = params.frameStaticProperties.centerX; + double y = params.frameStaticProperties.centerY; + double scale = params.frameStaticProperties.imageWidth / (double) params.divisor.value / 32.0; - if (this.params.rotMode == ImageRotationMode.DEG_270 - || this.params.rotMode == ImageRotationMode.DEG_90) { - var tmp = x; - x = y; - y = tmp; - } + if (this.params.rotMode == ImageRotationMode.DEG_270 + || this.params.rotMode == ImageRotationMode.DEG_90) { + var tmp = x; + x = y; + y = tmp; + } - switch (params.robotOffsetPointMode) { - case Single: - if (params.singleOffsetPoint.x != 0 && params.singleOffsetPoint.y != 0) { - x = params.singleOffsetPoint.x; - y = params.singleOffsetPoint.y; - } - break; - case Dual: - if (!in.getRight().isEmpty()) { - var target = in.getRight().get(0); - if (target != null) { - var area = target.getArea(); - var offsetCrosshair = - TargetCalculations.calculateDualOffsetCrosshair(params.dualOffsetValues, area); - x = offsetCrosshair.x; - y = offsetCrosshair.y; - } - } - break; + switch (params.robotOffsetPointMode) { + case Single: + if (params.singleOffsetPoint.x != 0 && params.singleOffsetPoint.y != 0) { + x = params.singleOffsetPoint.x; + y = params.singleOffsetPoint.y; + } + break; + case Dual: + if (!in.getRight().isEmpty()) { + var target = in.getRight().get(0); + if (target != null) { + var area = target.getArea(); + var offsetCrosshair = + TargetCalculations.calculateDualOffsetCrosshair(params.dualOffsetValues, area); + x = offsetCrosshair.x; + y = offsetCrosshair.y; } + } + break; + } - x /= (double) params.divisor.value; - y /= (double) params.divisor.value; + x /= (double) params.divisor.value; + y /= (double) params.divisor.value; - Point xMax = new Point(x + scale, y); - Point xMin = new Point(x - scale, y); - Point yMax = new Point(x, y + scale); - Point yMin = new Point(x, y - scale); + Point xMax = new Point(x + scale, y); + Point xMin = new Point(x - scale, y); + Point yMax = new Point(x, y + scale); + Point yMin = new Point(x, y - scale); - Imgproc.line(image, xMax, xMin, ColorHelper.colorToScalar(params.crosshairColor)); - Imgproc.line(image, yMax, yMin, ColorHelper.colorToScalar(params.crosshairColor)); - } - return null; + Imgproc.line(image, xMax, xMin, ColorHelper.colorToScalar(params.crosshairColor)); + Imgproc.line(image, yMax, yMin, ColorHelper.colorToScalar(params.crosshairColor)); } + return null; + } - public static class Draw2dCrosshairParams { - public boolean showCrosshair = true; - public Color crosshairColor = Color.GREEN; + public static class Draw2dCrosshairParams { + public boolean showCrosshair = true; + public Color crosshairColor = Color.GREEN; - public final boolean shouldDraw; - public final FrameStaticProperties frameStaticProperties; - public final ImageRotationMode rotMode; - public final RobotOffsetPointMode robotOffsetPointMode; - public final Point singleOffsetPoint; - public final DualOffsetValues dualOffsetValues; - private final FrameDivisor divisor; + public final boolean shouldDraw; + public final FrameStaticProperties frameStaticProperties; + public final ImageRotationMode rotMode; + public final RobotOffsetPointMode robotOffsetPointMode; + public final Point singleOffsetPoint; + public final DualOffsetValues dualOffsetValues; + private final FrameDivisor divisor; - public Draw2dCrosshairParams( - FrameStaticProperties frameStaticProperties, - FrameDivisor divisor, - ImageRotationMode rotMode) { - shouldDraw = true; - this.frameStaticProperties = frameStaticProperties; - this.rotMode = rotMode; - robotOffsetPointMode = RobotOffsetPointMode.None; - singleOffsetPoint = new Point(); - dualOffsetValues = new DualOffsetValues(); - this.divisor = divisor; - } + public Draw2dCrosshairParams( + FrameStaticProperties frameStaticProperties, + FrameDivisor divisor, + ImageRotationMode rotMode) { + shouldDraw = true; + this.frameStaticProperties = frameStaticProperties; + this.rotMode = rotMode; + robotOffsetPointMode = RobotOffsetPointMode.None; + singleOffsetPoint = new Point(); + dualOffsetValues = new DualOffsetValues(); + this.divisor = divisor; + } - public Draw2dCrosshairParams( - boolean shouldDraw, - RobotOffsetPointMode robotOffsetPointMode, - Point singleOffsetPoint, - DualOffsetValues dualOffsetValues, - FrameStaticProperties frameStaticProperties, - FrameDivisor divisor, - ImageRotationMode rotMode) { - this.shouldDraw = shouldDraw; - this.frameStaticProperties = frameStaticProperties; - this.robotOffsetPointMode = robotOffsetPointMode; - this.singleOffsetPoint = singleOffsetPoint; - this.dualOffsetValues = dualOffsetValues; - this.divisor = divisor; - this.rotMode = rotMode; - } + public Draw2dCrosshairParams( + boolean shouldDraw, + RobotOffsetPointMode robotOffsetPointMode, + Point singleOffsetPoint, + DualOffsetValues dualOffsetValues, + FrameStaticProperties frameStaticProperties, + FrameDivisor divisor, + ImageRotationMode rotMode) { + this.shouldDraw = shouldDraw; + this.frameStaticProperties = frameStaticProperties; + this.robotOffsetPointMode = robotOffsetPointMode; + this.singleOffsetPoint = singleOffsetPoint; + this.dualOffsetValues = dualOffsetValues; + this.divisor = divisor; + this.rotMode = rotMode; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dTargetsPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dTargetsPipe.java index 3365a5cf3e..159698ac1e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dTargetsPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dTargetsPipe.java @@ -33,225 +33,225 @@ import org.photonvision.vision.target.TrackedTarget; public class Draw2dTargetsPipe - extends MutatingPipe>, Draw2dTargetsPipe.Draw2dTargetsParams> { - MatOfPoint tempMat = new MatOfPoint(); - private static final Logger logger = new Logger(Draw2dTargetsPipe.class, LogGroup.General); - - @Override - protected Void process(Pair> in) { - var imRows = in.getLeft().rows(); - var imCols = in.getLeft().cols(); - var imageSize = Math.sqrt(imRows * imCols); - var textSize = params.kPixelsToText * imageSize; - var thickness = params.kPixelsToThickness * imageSize; - - if (!params.shouldDraw) return null; - - if (!in.getRight().isEmpty() - && (params.showCentroid - || params.showMaximumBox - || params.showRotatedBox - || params.showShape)) { - var centroidColour = ColorHelper.colorToScalar(params.centroidColor); - var maximumBoxColour = ColorHelper.colorToScalar(params.maximumBoxColor); - var rotatedBoxColour = ColorHelper.colorToScalar(params.rotatedBoxColor); - var circleColor = ColorHelper.colorToScalar(params.circleColor); - var shapeColour = ColorHelper.colorToScalar(params.shapeOutlineColour); - - for (int i = 0; i < (params.showMultipleTargets ? in.getRight().size() : 1); i++) { - Point[] vertices = new Point[4]; - MatOfPoint contour = new MatOfPoint(); - - if (i != 0 && !params.showMultipleTargets) { - break; - } - - TrackedTarget target = in.getRight().get(i); - RotatedRect r = target.getMinAreaRect(); - - if (r == null) continue; - - r.points(vertices); - dividePointArray(vertices); - contour.fromArray(vertices); - - if (params.shouldShowRotatedBox(target.getShape())) { - Imgproc.drawContours( - in.getLeft(), - List.of(contour), - 0, - rotatedBoxColour, - (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); - } else if (params.shouldShowCircle(target.getShape())) { - Imgproc.circle( - in.getLeft(), - target.getShape().center, - (int) target.getShape().radius, - circleColor, - (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); - } else { - // draw approximate polygon - var poly = target.getApproximateBoundingPolygon(); - - // fall back on the shape's approx poly dp - if (poly == null && target.getShape() != null) - poly = target.getShape().getContour().getApproxPolyDp(); - if (poly != null) { - var mat = new MatOfPoint(); - mat.fromArray(poly.toArray()); - divideMat(mat, mat); - Imgproc.drawContours( - in.getLeft(), - List.of(mat), - -1, - ColorHelper.colorToScalar(params.rotatedBoxColor), - 2); - mat.release(); - } - } - - if (params.showMaximumBox) { - Rect box = Imgproc.boundingRect(contour); - Imgproc.rectangle( - in.getLeft(), - new Point(box.x, box.y), - new Point(box.x + box.width, box.y + box.height), - maximumBoxColour, - (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); - } - - if (params.showShape) { - divideMat(target.m_mainContour.mat, tempMat); - Imgproc.drawContours( - in.getLeft(), - List.of(tempMat), - -1, - shapeColour, - (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); - } - - if (params.showContourNumber) { - var center = target.m_mainContour.getCenterPoint(); - var textPos = - new Point( - center.x + params.kPixelsToOffset * imageSize, - center.y - params.kPixelsToOffset * imageSize); - dividePoint(textPos); - - int id = target.getFiducialId(); - var contourNumber = String.valueOf(id == -1 ? i : id); - - Imgproc.putText( - in.getLeft(), - contourNumber, - textPos, - 0, - textSize, - ColorHelper.colorToScalar(params.textColor), - (int) thickness); - } - - if (params.showCentroid) { - Point centroid = target.getTargetOffsetPoint().clone(); - dividePoint(centroid); - var crosshairRadius = (int) (imageSize * params.kPixelsToCentroidRadius); - var x = centroid.x; - var y = centroid.y; - Point xMax = new Point(x + crosshairRadius, y); - Point xMin = new Point(x - crosshairRadius, y); - Point yMax = new Point(x, y + crosshairRadius); - Point yMin = new Point(x, y - crosshairRadius); - - Imgproc.line( - in.getLeft(), - xMax, - xMin, - centroidColour, - (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); - Imgproc.line( - in.getLeft(), - yMax, - yMin, - centroidColour, - (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); - } - } + extends MutatingPipe>, Draw2dTargetsPipe.Draw2dTargetsParams> { + MatOfPoint tempMat = new MatOfPoint(); + private static final Logger logger = new Logger(Draw2dTargetsPipe.class, LogGroup.General); + + @Override + protected Void process(Pair> in) { + var imRows = in.getLeft().rows(); + var imCols = in.getLeft().cols(); + var imageSize = Math.sqrt(imRows * imCols); + var textSize = params.kPixelsToText * imageSize; + var thickness = params.kPixelsToThickness * imageSize; + + if (!params.shouldDraw) return null; + + if (!in.getRight().isEmpty() + && (params.showCentroid + || params.showMaximumBox + || params.showRotatedBox + || params.showShape)) { + var centroidColour = ColorHelper.colorToScalar(params.centroidColor); + var maximumBoxColour = ColorHelper.colorToScalar(params.maximumBoxColor); + var rotatedBoxColour = ColorHelper.colorToScalar(params.rotatedBoxColor); + var circleColor = ColorHelper.colorToScalar(params.circleColor); + var shapeColour = ColorHelper.colorToScalar(params.shapeOutlineColour); + + for (int i = 0; i < (params.showMultipleTargets ? in.getRight().size() : 1); i++) { + Point[] vertices = new Point[4]; + MatOfPoint contour = new MatOfPoint(); + + if (i != 0 && !params.showMultipleTargets) { + break; } - return null; - } + TrackedTarget target = in.getRight().get(i); + RotatedRect r = target.getMinAreaRect(); + + if (r == null) continue; + + r.points(vertices); + dividePointArray(vertices); + contour.fromArray(vertices); + + if (params.shouldShowRotatedBox(target.getShape())) { + Imgproc.drawContours( + in.getLeft(), + List.of(contour), + 0, + rotatedBoxColour, + (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); + } else if (params.shouldShowCircle(target.getShape())) { + Imgproc.circle( + in.getLeft(), + target.getShape().center, + (int) target.getShape().radius, + circleColor, + (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); + } else { + // draw approximate polygon + var poly = target.getApproximateBoundingPolygon(); + + // fall back on the shape's approx poly dp + if (poly == null && target.getShape() != null) + poly = target.getShape().getContour().getApproxPolyDp(); + if (poly != null) { + var mat = new MatOfPoint(); + mat.fromArray(poly.toArray()); + divideMat(mat, mat); + Imgproc.drawContours( + in.getLeft(), + List.of(mat), + -1, + ColorHelper.colorToScalar(params.rotatedBoxColor), + 2); + mat.release(); + } + } - private void divideMat(MatOfPoint src, MatOfPoint dst) { - var hull = src.toArray(); - for (Point point : hull) { - dividePoint(point); + if (params.showMaximumBox) { + Rect box = Imgproc.boundingRect(contour); + Imgproc.rectangle( + in.getLeft(), + new Point(box.x, box.y), + new Point(box.x + box.width, box.y + box.height), + maximumBoxColour, + (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); } - dst.fromArray(hull); - } - private void divideMat(MatOfPoint2f src, MatOfPoint dst) { - var hull = src.toArray(); - for (Point point : hull) { - dividePoint(point); + if (params.showShape) { + divideMat(target.m_mainContour.mat, tempMat); + Imgproc.drawContours( + in.getLeft(), + List.of(tempMat), + -1, + shapeColour, + (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); } - dst.fromArray(hull); - } - /** Scale a given point list by the current frame divisor. the point list is mutated! */ - private void dividePointList(List points) { - for (var p : points) { - dividePoint(p); + if (params.showContourNumber) { + var center = target.m_mainContour.getCenterPoint(); + var textPos = + new Point( + center.x + params.kPixelsToOffset * imageSize, + center.y - params.kPixelsToOffset * imageSize); + dividePoint(textPos); + + int id = target.getFiducialId(); + var contourNumber = String.valueOf(id == -1 ? i : id); + + Imgproc.putText( + in.getLeft(), + contourNumber, + textPos, + 0, + textSize, + ColorHelper.colorToScalar(params.textColor), + (int) thickness); } - } - /** Scale a given point array by the current frame divisor. the point list is mutated! */ - private void dividePointArray(Point[] points) { - for (var p : points) { - dividePoint(p); + if (params.showCentroid) { + Point centroid = target.getTargetOffsetPoint().clone(); + dividePoint(centroid); + var crosshairRadius = (int) (imageSize * params.kPixelsToCentroidRadius); + var x = centroid.x; + var y = centroid.y; + Point xMax = new Point(x + crosshairRadius, y); + Point xMin = new Point(x - crosshairRadius, y); + Point yMax = new Point(x, y + crosshairRadius); + Point yMin = new Point(x, y - crosshairRadius); + + Imgproc.line( + in.getLeft(), + xMax, + xMin, + centroidColour, + (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); + Imgproc.line( + in.getLeft(), + yMax, + yMin, + centroidColour, + (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); } + } } - private void dividePoint(Point p) { - p.x = p.x / (double) params.divisor.value; - p.y = p.y / (double) params.divisor.value; + return null; + } + + private void divideMat(MatOfPoint src, MatOfPoint dst) { + var hull = src.toArray(); + for (Point point : hull) { + dividePoint(point); } + dst.fromArray(hull); + } - public static class Draw2dTargetsParams { - public double kPixelsToText = 0.0025; - public double kPixelsToThickness = 0.008; - public double kPixelsToOffset = 0.04; - public double kPixelsToBoxThickness = 0.007; - public double kPixelsToCentroidRadius = 0.03; - public boolean showCentroid = true; - public boolean showRotatedBox = true; - public boolean showShape = false; - public boolean showMaximumBox = true; - public boolean showContourNumber = true; - public Color centroidColor = Color.GREEN; // Color.decode("#ff5ebf"); - public Color rotatedBoxColor = Color.BLUE; - public Color maximumBoxColor = Color.RED; - public Color shapeOutlineColour = Color.MAGENTA; - public Color textColor = Color.GREEN; - public Color circleColor = Color.RED; - - public final boolean showMultipleTargets; - public final boolean shouldDraw; - - public final FrameDivisor divisor; - - public boolean shouldShowRotatedBox(CVShape shape) { - return showRotatedBox && (shape == null || shape.shape.equals(ContourShape.Quadrilateral)); - } + private void divideMat(MatOfPoint2f src, MatOfPoint dst) { + var hull = src.toArray(); + for (Point point : hull) { + dividePoint(point); + } + dst.fromArray(hull); + } - public boolean shouldShowCircle(CVShape shape) { - return shape != null && shape.shape.equals(ContourShape.Circle); - } + /** Scale a given point list by the current frame divisor. the point list is mutated! */ + private void dividePointList(List points) { + for (var p : points) { + dividePoint(p); + } + } - public Draw2dTargetsParams( - boolean shouldDraw, boolean showMultipleTargets, FrameDivisor divisor) { - this.shouldDraw = shouldDraw; - this.showMultipleTargets = showMultipleTargets; - this.divisor = divisor; - } + /** Scale a given point array by the current frame divisor. the point list is mutated! */ + private void dividePointArray(Point[] points) { + for (var p : points) { + dividePoint(p); + } + } + + private void dividePoint(Point p) { + p.x = p.x / (double) params.divisor.value; + p.y = p.y / (double) params.divisor.value; + } + + public static class Draw2dTargetsParams { + public double kPixelsToText = 0.0025; + public double kPixelsToThickness = 0.008; + public double kPixelsToOffset = 0.04; + public double kPixelsToBoxThickness = 0.007; + public double kPixelsToCentroidRadius = 0.03; + public boolean showCentroid = true; + public boolean showRotatedBox = true; + public boolean showShape = false; + public boolean showMaximumBox = true; + public boolean showContourNumber = true; + public Color centroidColor = Color.GREEN; // Color.decode("#ff5ebf"); + public Color rotatedBoxColor = Color.BLUE; + public Color maximumBoxColor = Color.RED; + public Color shapeOutlineColour = Color.MAGENTA; + public Color textColor = Color.GREEN; + public Color circleColor = Color.RED; + + public final boolean showMultipleTargets; + public final boolean shouldDraw; + + public final FrameDivisor divisor; + + public boolean shouldShowRotatedBox(CVShape shape) { + return showRotatedBox && (shape == null || shape.shape.equals(ContourShape.Quadrilateral)); + } + + public boolean shouldShowCircle(CVShape shape) { + return shape != null && shape.shape.equals(ContourShape.Circle); + } + + public Draw2dTargetsParams( + boolean shouldDraw, boolean showMultipleTargets, FrameDivisor divisor) { + this.shouldDraw = shouldDraw; + this.showMultipleTargets = showMultipleTargets; + this.divisor = divisor; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dAprilTagsPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dAprilTagsPipe.java index 017e973372..26f1939b74 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dAprilTagsPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dAprilTagsPipe.java @@ -22,15 +22,15 @@ import org.photonvision.vision.target.TargetModel; public class Draw3dAprilTagsPipe extends Draw3dTargetsPipe { - public static class Draw3dAprilTagsParams extends Draw3dContoursParams { - public Draw3dAprilTagsParams( - boolean shouldDraw, - CameraCalibrationCoefficients cameraCalibrationCoefficients, - TargetModel targetModel, - FrameDivisor divisor) { - super(shouldDraw, cameraCalibrationCoefficients, targetModel, divisor); - this.shouldDrawHull = false; - this.redistortPoints = true; - } + public static class Draw3dAprilTagsParams extends Draw3dContoursParams { + public Draw3dAprilTagsParams( + boolean shouldDraw, + CameraCalibrationCoefficients cameraCalibrationCoefficients, + TargetModel targetModel, + FrameDivisor divisor) { + super(shouldDraw, cameraCalibrationCoefficients, targetModel, divisor); + this.shouldDrawHull = false; + this.redistortPoints = true; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dArucoPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dArucoPipe.java index 0fdd719358..e048b80e30 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dArucoPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dArucoPipe.java @@ -22,14 +22,14 @@ import org.photonvision.vision.target.TargetModel; public class Draw3dArucoPipe extends Draw3dTargetsPipe { - public static class Draw3dArucoParams extends Draw3dContoursParams { - public Draw3dArucoParams( - boolean shouldDraw, - CameraCalibrationCoefficients cameraCalibrationCoefficients, - TargetModel targetModel, - FrameDivisor divisor) { - super(shouldDraw, cameraCalibrationCoefficients, targetModel, divisor); - this.shouldDrawHull = false; - } + public static class Draw3dArucoParams extends Draw3dContoursParams { + public Draw3dArucoParams( + boolean shouldDraw, + CameraCalibrationCoefficients cameraCalibrationCoefficients, + TargetModel targetModel, + FrameDivisor divisor) { + super(shouldDraw, cameraCalibrationCoefficients, targetModel, divisor); + this.shouldDrawHull = false; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dTargetsPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dTargetsPipe.java index 99927ea0f1..11f8794a86 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dTargetsPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dTargetsPipe.java @@ -35,279 +35,279 @@ import org.photonvision.vision.target.TrackedTarget; public class Draw3dTargetsPipe - extends MutatingPipe>, Draw3dTargetsPipe.Draw3dContoursParams> { - Logger logger = new Logger(Draw3dTargetsPipe.class, LogGroup.VisionModule); - - @Override - protected Void process(Pair> in) { - if (!params.shouldDraw) return null; - if (params.cameraCalibrationCoefficients == null - || params.cameraCalibrationCoefficients.getCameraIntrinsicsMat() == null - || params.cameraCalibrationCoefficients.getDistCoeffsMat() == null) { - return null; - } + extends MutatingPipe>, Draw3dTargetsPipe.Draw3dContoursParams> { + Logger logger = new Logger(Draw3dTargetsPipe.class, LogGroup.VisionModule); + + @Override + protected Void process(Pair> in) { + if (!params.shouldDraw) return null; + if (params.cameraCalibrationCoefficients == null + || params.cameraCalibrationCoefficients.getCameraIntrinsicsMat() == null + || params.cameraCalibrationCoefficients.getDistCoeffsMat() == null) { + return null; + } - for (var target : in.getRight()) { - // draw convex hull - if (params.shouldDrawHull(target)) { - var pointMat = new MatOfPoint(); - divideMat2f(target.m_mainContour.getConvexHull(), pointMat); - if (pointMat.size().empty()) { - logger.error("Convex hull is empty?"); - logger.debug( - "Orig. Convex Hull: " + target.m_mainContour.getConvexHull().size().toString()); - continue; - } - Imgproc.drawContours( - in.getLeft(), List.of(pointMat), -1, ColorHelper.colorToScalar(Color.green), 1); - - // draw approximate polygon - var poly = target.getApproximateBoundingPolygon(); - if (poly != null) { - divideMat2f(poly, pointMat); - Imgproc.drawContours( - in.getLeft(), List.of(pointMat), -1, ColorHelper.colorToScalar(Color.blue), 2); - } - pointMat.release(); - } - - // Draw floor and top - if (target.getCameraRelativeRvec() != null - && target.getCameraRelativeTvec() != null - && params.shouldDrawBox) { - var tempMat = new MatOfPoint2f(); - - var jac = new Mat(); - var bottomModel = params.targetModel.getVisualizationBoxBottom(); - var topModel = params.targetModel.getVisualizationBoxTop(); - - Calib3d.projectPoints( - bottomModel, - target.getCameraRelativeRvec(), - target.getCameraRelativeTvec(), - params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(), - params.cameraCalibrationCoefficients.getDistCoeffsMat(), - tempMat, - jac); - - if (params.redistortPoints) { - // Distort the points, so they match the image they're being overlaid on - distortPoints(tempMat, tempMat); - } - - var bottomPoints = tempMat.toList(); - - Calib3d.projectPoints( - topModel, - target.getCameraRelativeRvec(), - target.getCameraRelativeTvec(), - params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(), - params.cameraCalibrationCoefficients.getDistCoeffsMat(), - tempMat, - jac); - - if (params.redistortPoints) { - // Distort the points, so they match the image they're being overlaid on - distortPoints(tempMat, tempMat); - } - var topPoints = tempMat.toList(); - - dividePointList(bottomPoints); - dividePointList(topPoints); - - // floor, then pillars, then top - for (int i = 0; i < bottomPoints.size(); i++) { - Imgproc.line( - in.getLeft(), - bottomPoints.get(i), - bottomPoints.get((i + 1) % (bottomPoints.size())), - ColorHelper.colorToScalar(Color.green), - 3); - } - - // Draw X, Y and Z axis - MatOfPoint3f pointMat = new MatOfPoint3f(); - // OpenCV expects coordinates in EDN, but we want to visualize in NWU - // NWU | EDN - // X: Z - // Y: -X - // Z: -Y - final double AXIS_LEN = 0.2; - var list = - List.of( - new Point3(0, 0, 0), - new Point3(0, 0, AXIS_LEN), // x-axis - new Point3(-AXIS_LEN, 0, 0), // y-axis - new Point3(0, -AXIS_LEN, 0)); // z-axis - pointMat.fromList(list); - - // The detected target's rvec and tvec perform a rotation-translation transformation which - // converts points in the target's coordinate system to the camera's. This means applying - // the transformation to the target point (0,0,0) for example would give the target's center - // relative to the camera. - Calib3d.projectPoints( - pointMat, - target.getCameraRelativeRvec(), - target.getCameraRelativeTvec(), - params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(), - params.cameraCalibrationCoefficients.getDistCoeffsMat(), - tempMat, - jac); - var axisPoints = tempMat.toList(); - dividePointList(axisPoints); - - // XYZ is RGB - // y-axis = green - Imgproc.line( - in.getLeft(), - axisPoints.get(0), - axisPoints.get(2), - ColorHelper.colorToScalar(Color.GREEN), - 3); - // z-axis = blue - Imgproc.line( - in.getLeft(), - axisPoints.get(0), - axisPoints.get(3), - ColorHelper.colorToScalar(Color.BLUE), - 3); - // x-axis = red - Imgproc.line( - in.getLeft(), - axisPoints.get(0), - axisPoints.get(1), - ColorHelper.colorToScalar(Color.RED), - 3); - - // box edges perpendicular to tag - for (int i = 0; i < bottomPoints.size(); i++) { - Imgproc.line( - in.getLeft(), - bottomPoints.get(i), - topPoints.get(i), - ColorHelper.colorToScalar(Color.blue), - 3); - } - // box edges parallel to tag - for (int i = 0; i < topPoints.size(); i++) { - Imgproc.line( - in.getLeft(), - topPoints.get(i), - topPoints.get((i + 1) % (bottomPoints.size())), - ColorHelper.colorToScalar(Color.orange), - 3); - } - - tempMat.release(); - jac.release(); - pointMat.release(); - } - - // draw corners - var corners = target.getTargetCorners(); - if (corners != null && !corners.isEmpty()) { - for (var corner : corners) { - var x = corner.x / (double) params.divisor.value; - var y = corner.y / (double) params.divisor.value; - - Imgproc.circle( - in.getLeft(), - new Point(x, y), - params.radius, - ColorHelper.colorToScalar(params.color), - params.radius); - } - } + for (var target : in.getRight()) { + // draw convex hull + if (params.shouldDrawHull(target)) { + var pointMat = new MatOfPoint(); + divideMat2f(target.m_mainContour.getConvexHull(), pointMat); + if (pointMat.size().empty()) { + logger.error("Convex hull is empty?"); + logger.debug( + "Orig. Convex Hull: " + target.m_mainContour.getConvexHull().size().toString()); + continue; + } + Imgproc.drawContours( + in.getLeft(), List.of(pointMat), -1, ColorHelper.colorToScalar(Color.green), 1); + + // draw approximate polygon + var poly = target.getApproximateBoundingPolygon(); + if (poly != null) { + divideMat2f(poly, pointMat); + Imgproc.drawContours( + in.getLeft(), List.of(pointMat), -1, ColorHelper.colorToScalar(Color.blue), 2); + } + pointMat.release(); + } + + // Draw floor and top + if (target.getCameraRelativeRvec() != null + && target.getCameraRelativeTvec() != null + && params.shouldDrawBox) { + var tempMat = new MatOfPoint2f(); + + var jac = new Mat(); + var bottomModel = params.targetModel.getVisualizationBoxBottom(); + var topModel = params.targetModel.getVisualizationBoxTop(); + + Calib3d.projectPoints( + bottomModel, + target.getCameraRelativeRvec(), + target.getCameraRelativeTvec(), + params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(), + params.cameraCalibrationCoefficients.getDistCoeffsMat(), + tempMat, + jac); + + if (params.redistortPoints) { + // Distort the points, so they match the image they're being overlaid on + distortPoints(tempMat, tempMat); } - return null; - } + var bottomPoints = tempMat.toList(); + + Calib3d.projectPoints( + topModel, + target.getCameraRelativeRvec(), + target.getCameraRelativeTvec(), + params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(), + params.cameraCalibrationCoefficients.getDistCoeffsMat(), + tempMat, + jac); - private void distortPoints(MatOfPoint2f src, MatOfPoint2f dst) { - var pointsList = src.toList(); - var dstList = new ArrayList(); - final Mat cameraMatrix = params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(); - // k1, k2, p1, p2, k3 - final Mat distCoeffs = params.cameraCalibrationCoefficients.getDistCoeffsMat(); - var cx = cameraMatrix.get(0, 2)[0]; - var cy = cameraMatrix.get(1, 2)[0]; - var fx = cameraMatrix.get(0, 0)[0]; - var fy = cameraMatrix.get(1, 1)[0]; - var k1 = distCoeffs.get(0, 0)[0]; - var k2 = distCoeffs.get(0, 1)[0]; - var k3 = distCoeffs.get(0, 4)[0]; - var p1 = distCoeffs.get(0, 2)[0]; - var p2 = distCoeffs.get(0, 3)[0]; - - for (Point point : pointsList) { - // To relative coordinates <- this is the step you are missing. - double x = (point.x - cx) / fx; // cx, cy is the center of distortion - double y = (point.y - cy) / fy; - - double r2 = x * x + y * y; // square of the radius from center - - // Radial distortion - double xDistort = x * (1 + k1 * r2 + k2 * r2 * r2 + k3 * r2 * r2 * r2); - double yDistort = y * (1 + k1 * r2 + k2 * r2 * r2 + k3 * r2 * r2 * r2); - - // Tangential distortion - xDistort = xDistort + (2 * p1 * x * y + p2 * (r2 + 2 * x * x)); - yDistort = yDistort + (p1 * (r2 + 2 * y * y) + 2 * p2 * x * y); - - // Back to absolute coordinates. - xDistort = xDistort * fx + cx; - yDistort = yDistort * fy + cy; - dstList.add(new Point(xDistort, yDistort)); + if (params.redistortPoints) { + // Distort the points, so they match the image they're being overlaid on + distortPoints(tempMat, tempMat); + } + var topPoints = tempMat.toList(); + + dividePointList(bottomPoints); + dividePointList(topPoints); + + // floor, then pillars, then top + for (int i = 0; i < bottomPoints.size(); i++) { + Imgproc.line( + in.getLeft(), + bottomPoints.get(i), + bottomPoints.get((i + 1) % (bottomPoints.size())), + ColorHelper.colorToScalar(Color.green), + 3); } - dst.fromList(dstList); - } - private void divideMat2f(MatOfPoint2f src, MatOfPoint dst) { - var hull = src.toArray(); - var pointArray = new Point[hull.length]; - for (int i = 0; i < hull.length; i++) { - var hullAtI = hull[i]; - pointArray[i] = - new Point( - hullAtI.x / (double) params.divisor.value, hullAtI.y / (double) params.divisor.value); + // Draw X, Y and Z axis + MatOfPoint3f pointMat = new MatOfPoint3f(); + // OpenCV expects coordinates in EDN, but we want to visualize in NWU + // NWU | EDN + // X: Z + // Y: -X + // Z: -Y + final double AXIS_LEN = 0.2; + var list = + List.of( + new Point3(0, 0, 0), + new Point3(0, 0, AXIS_LEN), // x-axis + new Point3(-AXIS_LEN, 0, 0), // y-axis + new Point3(0, -AXIS_LEN, 0)); // z-axis + pointMat.fromList(list); + + // The detected target's rvec and tvec perform a rotation-translation transformation which + // converts points in the target's coordinate system to the camera's. This means applying + // the transformation to the target point (0,0,0) for example would give the target's center + // relative to the camera. + Calib3d.projectPoints( + pointMat, + target.getCameraRelativeRvec(), + target.getCameraRelativeTvec(), + params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(), + params.cameraCalibrationCoefficients.getDistCoeffsMat(), + tempMat, + jac); + var axisPoints = tempMat.toList(); + dividePointList(axisPoints); + + // XYZ is RGB + // y-axis = green + Imgproc.line( + in.getLeft(), + axisPoints.get(0), + axisPoints.get(2), + ColorHelper.colorToScalar(Color.GREEN), + 3); + // z-axis = blue + Imgproc.line( + in.getLeft(), + axisPoints.get(0), + axisPoints.get(3), + ColorHelper.colorToScalar(Color.BLUE), + 3); + // x-axis = red + Imgproc.line( + in.getLeft(), + axisPoints.get(0), + axisPoints.get(1), + ColorHelper.colorToScalar(Color.RED), + 3); + + // box edges perpendicular to tag + for (int i = 0; i < bottomPoints.size(); i++) { + Imgproc.line( + in.getLeft(), + bottomPoints.get(i), + topPoints.get(i), + ColorHelper.colorToScalar(Color.blue), + 3); + } + // box edges parallel to tag + for (int i = 0; i < topPoints.size(); i++) { + Imgproc.line( + in.getLeft(), + topPoints.get(i), + topPoints.get((i + 1) % (bottomPoints.size())), + ColorHelper.colorToScalar(Color.orange), + 3); } - dst.fromArray(pointArray); - } - /** Scale a given point list by the current frame divisor. the point list is mutated! */ - private void dividePointList(List points) { - for (var p : points) { - p.x = p.x / (double) params.divisor.value; - p.y = p.y / (double) params.divisor.value; + tempMat.release(); + jac.release(); + pointMat.release(); + } + + // draw corners + var corners = target.getTargetCorners(); + if (corners != null && !corners.isEmpty()) { + for (var corner : corners) { + var x = corner.x / (double) params.divisor.value; + var y = corner.y / (double) params.divisor.value; + + Imgproc.circle( + in.getLeft(), + new Point(x, y), + params.radius, + ColorHelper.colorToScalar(params.color), + params.radius); } + } } - public static class Draw3dContoursParams { - public int radius = 2; - public Color color = Color.RED; - - public final boolean shouldDraw; - public boolean shouldDrawHull = true; - public boolean shouldDrawBox = true; - public final TargetModel targetModel; - public final CameraCalibrationCoefficients cameraCalibrationCoefficients; - public final FrameDivisor divisor; - - public boolean redistortPoints = false; - - public Draw3dContoursParams( - boolean shouldDraw, - CameraCalibrationCoefficients cameraCalibrationCoefficients, - TargetModel targetModel, - FrameDivisor divisor) { - this.shouldDraw = shouldDraw; - this.cameraCalibrationCoefficients = cameraCalibrationCoefficients; - this.targetModel = targetModel; - this.divisor = divisor; - } + return null; + } + + private void distortPoints(MatOfPoint2f src, MatOfPoint2f dst) { + var pointsList = src.toList(); + var dstList = new ArrayList(); + final Mat cameraMatrix = params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(); + // k1, k2, p1, p2, k3 + final Mat distCoeffs = params.cameraCalibrationCoefficients.getDistCoeffsMat(); + var cx = cameraMatrix.get(0, 2)[0]; + var cy = cameraMatrix.get(1, 2)[0]; + var fx = cameraMatrix.get(0, 0)[0]; + var fy = cameraMatrix.get(1, 1)[0]; + var k1 = distCoeffs.get(0, 0)[0]; + var k2 = distCoeffs.get(0, 1)[0]; + var k3 = distCoeffs.get(0, 4)[0]; + var p1 = distCoeffs.get(0, 2)[0]; + var p2 = distCoeffs.get(0, 3)[0]; + + for (Point point : pointsList) { + // To relative coordinates <- this is the step you are missing. + double x = (point.x - cx) / fx; // cx, cy is the center of distortion + double y = (point.y - cy) / fy; + + double r2 = x * x + y * y; // square of the radius from center + + // Radial distortion + double xDistort = x * (1 + k1 * r2 + k2 * r2 * r2 + k3 * r2 * r2 * r2); + double yDistort = y * (1 + k1 * r2 + k2 * r2 * r2 + k3 * r2 * r2 * r2); + + // Tangential distortion + xDistort = xDistort + (2 * p1 * x * y + p2 * (r2 + 2 * x * x)); + yDistort = yDistort + (p1 * (r2 + 2 * y * y) + 2 * p2 * x * y); + + // Back to absolute coordinates. + xDistort = xDistort * fx + cx; + yDistort = yDistort * fy + cy; + dstList.add(new Point(xDistort, yDistort)); + } + dst.fromList(dstList); + } + + private void divideMat2f(MatOfPoint2f src, MatOfPoint dst) { + var hull = src.toArray(); + var pointArray = new Point[hull.length]; + for (int i = 0; i < hull.length; i++) { + var hullAtI = hull[i]; + pointArray[i] = + new Point( + hullAtI.x / (double) params.divisor.value, hullAtI.y / (double) params.divisor.value); + } + dst.fromArray(pointArray); + } + + /** Scale a given point list by the current frame divisor. the point list is mutated! */ + private void dividePointList(List points) { + for (var p : points) { + p.x = p.x / (double) params.divisor.value; + p.y = p.y / (double) params.divisor.value; + } + } + + public static class Draw3dContoursParams { + public int radius = 2; + public Color color = Color.RED; + + public final boolean shouldDraw; + public boolean shouldDrawHull = true; + public boolean shouldDrawBox = true; + public final TargetModel targetModel; + public final CameraCalibrationCoefficients cameraCalibrationCoefficients; + public final FrameDivisor divisor; + + public boolean redistortPoints = false; + + public Draw3dContoursParams( + boolean shouldDraw, + CameraCalibrationCoefficients cameraCalibrationCoefficients, + TargetModel targetModel, + FrameDivisor divisor) { + this.shouldDraw = shouldDraw; + this.cameraCalibrationCoefficients = cameraCalibrationCoefficients; + this.targetModel = targetModel; + this.divisor = divisor; + } - public boolean shouldDrawHull(TrackedTarget t) { - return !t.isFiducial() && this.shouldDrawHull; - } + public boolean shouldDrawHull(TrackedTarget t) { + return !t.isFiducial() && this.shouldDrawHull; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/DrawCornerDetectionPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/DrawCornerDetectionPipe.java index 9ef05ad99f..f7a88c6227 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/DrawCornerDetectionPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/DrawCornerDetectionPipe.java @@ -26,23 +26,23 @@ import org.photonvision.vision.target.TrackedTarget; public class DrawCornerDetectionPipe - extends MutatingPipe>, DrawCornerDetectionPipe.DrawCornerParams> { - @Override - protected Void process(Pair> in) { - Mat image = in.getLeft(); + extends MutatingPipe>, DrawCornerDetectionPipe.DrawCornerParams> { + @Override + protected Void process(Pair> in) { + Mat image = in.getLeft(); - for (var target : in.getRight()) { - var corners = target.getTargetCorners(); - for (var corner : corners) { - Imgproc.circle(image, corner, params.dotRadius, params.dotColor); - } - } - - return null; + for (var target : in.getRight()) { + var corners = target.getTargetCorners(); + for (var corner : corners) { + Imgproc.circle(image, corner, params.dotRadius, params.dotColor); + } } - public static class DrawCornerParams { - int dotRadius; - Scalar dotColor; - } + return null; + } + + public static class DrawCornerParams { + int dotRadius; + Scalar dotColor; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ErodeDilatePipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ErodeDilatePipe.java index 9d09a5d198..35781f7b4f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ErodeDilatePipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ErodeDilatePipe.java @@ -23,39 +23,39 @@ import org.photonvision.vision.pipe.MutatingPipe; public class ErodeDilatePipe extends MutatingPipe { - @Override - protected Void process(Mat in) { - if (params.shouldErode()) { - Imgproc.erode(in, in, params.getKernel()); - } - if (params.shouldDilate()) { - Imgproc.dilate(in, in, params.getKernel()); - } - return null; + @Override + protected Void process(Mat in) { + if (params.shouldErode()) { + Imgproc.erode(in, in, params.getKernel()); + } + if (params.shouldDilate()) { + Imgproc.dilate(in, in, params.getKernel()); + } + return null; + } + + public static class ErodeDilateParams { + private final boolean m_erode; + private final boolean m_dilate; + private final Mat m_kernel; + + public ErodeDilateParams(boolean erode, boolean dilate, int kernelSize) { + m_erode = erode; + m_dilate = dilate; + m_kernel = + Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(kernelSize, kernelSize)); + } + + public boolean shouldErode() { + return m_erode; + } + + public boolean shouldDilate() { + return m_dilate; } - public static class ErodeDilateParams { - private final boolean m_erode; - private final boolean m_dilate; - private final Mat m_kernel; - - public ErodeDilateParams(boolean erode, boolean dilate, int kernelSize) { - m_erode = erode; - m_dilate = dilate; - m_kernel = - Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(kernelSize, kernelSize)); - } - - public boolean shouldErode() { - return m_erode; - } - - public boolean shouldDilate() { - return m_dilate; - } - - public Mat getKernel() { - return m_kernel; - } + public Mat getKernel() { + return m_kernel; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterContoursPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterContoursPipe.java index 1e7616c44f..25af4d3ac3 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterContoursPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterContoursPipe.java @@ -27,120 +27,120 @@ import org.photonvision.vision.target.TargetCalculations; public class FilterContoursPipe - extends CVPipe, List, FilterContoursPipe.FilterContoursParams> { - List m_filteredContours = new ArrayList<>(); + extends CVPipe, List, FilterContoursPipe.FilterContoursParams> { + List m_filteredContours = new ArrayList<>(); + + @Override + protected List process(List in) { + m_filteredContours.clear(); + for (Contour contour : in) { + filterContour(contour); + } + + // we need the whole list for outlier rejection + rejectOutliers(m_filteredContours, params.xTol, params.yTol); + + return m_filteredContours; + } + + private void rejectOutliers(List list, double xTol, double yTol) { + if (list.size() < 2) return; // Must have at least 2 points to reject outliers + + double meanX = list.stream().mapToDouble(it -> it.getCenterPoint().x).sum() / list.size(); - @Override - protected List process(List in) { - m_filteredContours.clear(); - for (Contour contour : in) { - filterContour(contour); - } + double stdDevX = + list.stream().mapToDouble(it -> Math.pow(it.getCenterPoint().x - meanX, 2.0)).sum(); + stdDevX /= (list.size() - 1); + stdDevX = Math.sqrt(stdDevX); - // we need the whole list for outlier rejection - rejectOutliers(m_filteredContours, params.xTol, params.yTol); + double meanY = list.stream().mapToDouble(it -> it.getCenterPoint().y).sum() / list.size(); + + double stdDevY = + list.stream().mapToDouble(it -> Math.pow(it.getCenterPoint().y - meanY, 2.0)).sum(); + stdDevY /= (list.size() - 1); + stdDevY = Math.sqrt(stdDevY); + + for (var it = list.iterator(); it.hasNext(); ) { + // Reject points more than N standard devs above/below median + // That is, |point - median| > std dev * tol + Contour c = it.next(); + double x = c.getCenterPoint().x; + double y = c.getCenterPoint().y; + + if (Math.abs(x - meanX) > stdDevX * xTol) { + it.remove(); + } else if (Math.abs(y - meanY) > stdDevY * yTol) { + it.remove(); + } + // Otherwise we're good! Keep it in + } + } + + private void filterContour(Contour contour) { + RotatedRect minAreaRect = contour.getMinAreaRect(); + + // Area Filtering. + double areaPercentage = + minAreaRect.size.area() / params.getFrameStaticProperties().imageArea * 100.0; + double minAreaPercentage = params.getArea().getFirst(); + double maxAreaPercentage = params.getArea().getSecond(); + if (areaPercentage < minAreaPercentage || areaPercentage > maxAreaPercentage) return; + + // Fullness Filtering. + double contourArea = contour.getArea(); + double minFullness = params.getFullness().getFirst() * minAreaRect.size.area() / 100.0; + double maxFullness = params.getFullness().getSecond() * minAreaRect.size.area() / 100.0; + if (contourArea <= minFullness || contourArea >= maxFullness) return; + + // Aspect Ratio Filtering. + double aspectRatio = + TargetCalculations.getAspectRatio(contour.getMinAreaRect(), params.isLandscape); + if (aspectRatio < params.getRatio().getFirst() || aspectRatio > params.getRatio().getSecond()) + return; + + m_filteredContours.add(contour); + } + + public static class FilterContoursParams { + private final DoubleCouple m_area; + private final DoubleCouple m_ratio; + private final DoubleCouple m_fullness; + private final FrameStaticProperties m_frameStaticProperties; + private final double xTol; // IQR tolerance for x + private final double yTol; // IQR tolerance for x + public final boolean isLandscape; + + public FilterContoursParams( + DoubleCouple area, + DoubleCouple ratio, + DoubleCouple extent, + FrameStaticProperties camProperties, + double xTol, + double yTol, + boolean isLandscape) { + this.m_area = area; + this.m_ratio = ratio; + this.m_fullness = extent; + this.m_frameStaticProperties = camProperties; + this.xTol = xTol; + this.yTol = yTol; + this.isLandscape = isLandscape; + } - return m_filteredContours; + public DoubleCouple getArea() { + return m_area; } - private void rejectOutliers(List list, double xTol, double yTol) { - if (list.size() < 2) return; // Must have at least 2 points to reject outliers - - double meanX = list.stream().mapToDouble(it -> it.getCenterPoint().x).sum() / list.size(); - - double stdDevX = - list.stream().mapToDouble(it -> Math.pow(it.getCenterPoint().x - meanX, 2.0)).sum(); - stdDevX /= (list.size() - 1); - stdDevX = Math.sqrt(stdDevX); - - double meanY = list.stream().mapToDouble(it -> it.getCenterPoint().y).sum() / list.size(); - - double stdDevY = - list.stream().mapToDouble(it -> Math.pow(it.getCenterPoint().y - meanY, 2.0)).sum(); - stdDevY /= (list.size() - 1); - stdDevY = Math.sqrt(stdDevY); - - for (var it = list.iterator(); it.hasNext(); ) { - // Reject points more than N standard devs above/below median - // That is, |point - median| > std dev * tol - Contour c = it.next(); - double x = c.getCenterPoint().x; - double y = c.getCenterPoint().y; - - if (Math.abs(x - meanX) > stdDevX * xTol) { - it.remove(); - } else if (Math.abs(y - meanY) > stdDevY * yTol) { - it.remove(); - } - // Otherwise we're good! Keep it in - } + public DoubleCouple getRatio() { + return m_ratio; } - private void filterContour(Contour contour) { - RotatedRect minAreaRect = contour.getMinAreaRect(); - - // Area Filtering. - double areaPercentage = - minAreaRect.size.area() / params.getFrameStaticProperties().imageArea * 100.0; - double minAreaPercentage = params.getArea().getFirst(); - double maxAreaPercentage = params.getArea().getSecond(); - if (areaPercentage < minAreaPercentage || areaPercentage > maxAreaPercentage) return; - - // Fullness Filtering. - double contourArea = contour.getArea(); - double minFullness = params.getFullness().getFirst() * minAreaRect.size.area() / 100.0; - double maxFullness = params.getFullness().getSecond() * minAreaRect.size.area() / 100.0; - if (contourArea <= minFullness || contourArea >= maxFullness) return; - - // Aspect Ratio Filtering. - double aspectRatio = - TargetCalculations.getAspectRatio(contour.getMinAreaRect(), params.isLandscape); - if (aspectRatio < params.getRatio().getFirst() || aspectRatio > params.getRatio().getSecond()) - return; - - m_filteredContours.add(contour); + public DoubleCouple getFullness() { + return m_fullness; } - public static class FilterContoursParams { - private final DoubleCouple m_area; - private final DoubleCouple m_ratio; - private final DoubleCouple m_fullness; - private final FrameStaticProperties m_frameStaticProperties; - private final double xTol; // IQR tolerance for x - private final double yTol; // IQR tolerance for x - public final boolean isLandscape; - - public FilterContoursParams( - DoubleCouple area, - DoubleCouple ratio, - DoubleCouple extent, - FrameStaticProperties camProperties, - double xTol, - double yTol, - boolean isLandscape) { - this.m_area = area; - this.m_ratio = ratio; - this.m_fullness = extent; - this.m_frameStaticProperties = camProperties; - this.xTol = xTol; - this.yTol = yTol; - this.isLandscape = isLandscape; - } - - public DoubleCouple getArea() { - return m_area; - } - - public DoubleCouple getRatio() { - return m_ratio; - } - - public DoubleCouple getFullness() { - return m_fullness; - } - - public FrameStaticProperties getFrameStaticProperties() { - return m_frameStaticProperties; - } + public FrameStaticProperties getFrameStaticProperties() { + return m_frameStaticProperties; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterShapesPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterShapesPipe.java index 1802d6971f..c2c95a51d7 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterShapesPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterShapesPipe.java @@ -25,63 +25,63 @@ import org.photonvision.vision.pipe.CVPipe; public class FilterShapesPipe - extends CVPipe, List, FilterShapesPipe.FilterShapesPipeParams> { - List outputList = new ArrayList<>(); + extends CVPipe, List, FilterShapesPipe.FilterShapesPipeParams> { + List outputList = new ArrayList<>(); - /** - * Runs the process for the pipe. - * - * @param in Input for pipe processing. - * @return Result of processing. - */ - @Override - protected List process(List in) { - outputList.forEach(CVShape::release); - outputList.clear(); - outputList = new ArrayList<>(); + /** + * Runs the process for the pipe. + * + * @param in Input for pipe processing. + * @return Result of processing. + */ + @Override + protected List process(List in) { + outputList.forEach(CVShape::release); + outputList.clear(); + outputList = new ArrayList<>(); - for (var shape : in) { - if (!shouldRemove(shape)) outputList.add(shape); - } - - return outputList; + for (var shape : in) { + if (!shouldRemove(shape)) outputList.add(shape); } - private boolean shouldRemove(CVShape shape) { - return shape.shape != params.desiredShape - || shape.contour.getArea() / params.getFrameStaticProperties().imageArea * 100.0 - > params.maxArea - || shape.contour.getArea() / params.getFrameStaticProperties().imageArea * 100.0 - < params.minArea - || shape.contour.getPerimeter() > params.maxPeri - || shape.contour.getPerimeter() < params.minPeri; - } + return outputList; + } - public static class FilterShapesPipeParams { - private final ContourShape desiredShape; - private final FrameStaticProperties frameStaticProperties; - private final double minArea; - private final double maxArea; - private final double minPeri; - private final double maxPeri; + private boolean shouldRemove(CVShape shape) { + return shape.shape != params.desiredShape + || shape.contour.getArea() / params.getFrameStaticProperties().imageArea * 100.0 + > params.maxArea + || shape.contour.getArea() / params.getFrameStaticProperties().imageArea * 100.0 + < params.minArea + || shape.contour.getPerimeter() > params.maxPeri + || shape.contour.getPerimeter() < params.minPeri; + } - public FilterShapesPipeParams( - ContourShape desiredShape, - double minArea, - double maxArea, - double minPeri, - double maxPeri, - FrameStaticProperties frameStaticProperties) { - this.desiredShape = desiredShape; - this.minArea = minArea; - this.maxArea = maxArea; - this.minPeri = minPeri; - this.maxPeri = maxPeri; - this.frameStaticProperties = frameStaticProperties; - } + public static class FilterShapesPipeParams { + private final ContourShape desiredShape; + private final FrameStaticProperties frameStaticProperties; + private final double minArea; + private final double maxArea; + private final double minPeri; + private final double maxPeri; + + public FilterShapesPipeParams( + ContourShape desiredShape, + double minArea, + double maxArea, + double minPeri, + double maxPeri, + FrameStaticProperties frameStaticProperties) { + this.desiredShape = desiredShape; + this.minArea = minArea; + this.maxArea = maxArea; + this.minPeri = minPeri; + this.maxPeri = maxPeri; + this.frameStaticProperties = frameStaticProperties; + } - public FrameStaticProperties getFrameStaticProperties() { - return frameStaticProperties; - } + public FrameStaticProperties getFrameStaticProperties() { + return frameStaticProperties; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindBoardCornersPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindBoardCornersPipe.java index 678566c707..279c3bb819 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindBoardCornersPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindBoardCornersPipe.java @@ -29,306 +29,306 @@ import org.photonvision.vision.pipeline.UICalibrationData; public class FindBoardCornersPipe - extends CVPipe< - Pair, Triple, FindBoardCornersPipe.FindCornersPipeParams> { - private static final Logger logger = - new Logger(FindBoardCornersPipe.class, LogGroup.VisionModule); - - MatOfPoint3f objectPoints = new MatOfPoint3f(); - - Size imageSize; - Size patternSize; - - // Configure the optimizations used while using OpenCV's find corners algorithm - // Since we return results in real-time, we want to ensure it goes as fast as possible - // and fails as fast as possible. - final int findChessboardFlags = - Calib3d.CALIB_CB_NORMALIZE_IMAGE - | Calib3d.CALIB_CB_ADAPTIVE_THRESH - | Calib3d.CALIB_CB_FILTER_QUADS - | Calib3d.CALIB_CB_FAST_CHECK; - - private final MatOfPoint2f boardCorners = new MatOfPoint2f(); - - // Intermediate result mat's - Mat smallerInFrame = new Mat(); - MatOfPoint2f smallerBoardCorners = new MatOfPoint2f(); - - // SubCornerPix params - private final Size zeroZone = new Size(-1, -1); - private final TermCriteria criteria = new TermCriteria(3, 30, 0.001); - - private FindCornersPipeParams lastParams = null; - - public void createObjectPoints() { - if (this.lastParams != null && this.lastParams.equals(this.params)) return; - this.lastParams = this.params; - - this.objectPoints.release(); - this.objectPoints = null; - this.objectPoints = new MatOfPoint3f(); - - /*If using a chessboard, then the pattern size if the inner corners of the board. For example, the pattern size of a 9x9 chessboard would be 8x8 - If using a dot board, then the pattern size width is the sum of the bottom 2 rows and the height is the left or right most column - For example, a 5x4 dot board would have a pattern size of 11x4 - We subtract 1 for chessboard because the UI prompts users for the number of squares, not the - number of corners. - * */ - this.patternSize = - params.type == UICalibrationData.BoardType.CHESSBOARD - ? new Size(params.boardWidth - 1, params.boardHeight - 1) - : new Size(params.boardWidth, params.boardHeight); - - // Chessboard and dot board have different 3D points to project as a dot board has alternating - // dots per column - if (params.type == UICalibrationData.BoardType.CHESSBOARD) { - // Here we can create an NxN grid since a chessboard is rectangular - for (int heightIdx = 0; heightIdx < patternSize.height; heightIdx++) { - for (int widthIdx = 0; widthIdx < patternSize.width; widthIdx++) { - double boardYCoord = heightIdx * params.gridSize; - double boardXCoord = widthIdx * params.gridSize; - objectPoints.push_back(new MatOfPoint3f(new Point3(boardXCoord, boardYCoord, 0.0))); - } - } - } else if (params.type == UICalibrationData.BoardType.DOTBOARD) { - // Here we need to alternate the amount of dots per column since a dot board is not - // rectangular and also by taking in account the grid size which should be in mm - for (int i = 0; i < patternSize.height; i++) { - for (int j = 0; j < patternSize.width; j++) { - objectPoints.push_back( - new MatOfPoint3f( - new Point3((2 * j + i % 2) * params.gridSize, i * params.gridSize, 0.0d))); - } - } - } else { - logger.error("Can't create pattern for unknown board type " + params.type); + extends CVPipe< + Pair, Triple, FindBoardCornersPipe.FindCornersPipeParams> { + private static final Logger logger = + new Logger(FindBoardCornersPipe.class, LogGroup.VisionModule); + + MatOfPoint3f objectPoints = new MatOfPoint3f(); + + Size imageSize; + Size patternSize; + + // Configure the optimizations used while using OpenCV's find corners algorithm + // Since we return results in real-time, we want to ensure it goes as fast as possible + // and fails as fast as possible. + final int findChessboardFlags = + Calib3d.CALIB_CB_NORMALIZE_IMAGE + | Calib3d.CALIB_CB_ADAPTIVE_THRESH + | Calib3d.CALIB_CB_FILTER_QUADS + | Calib3d.CALIB_CB_FAST_CHECK; + + private final MatOfPoint2f boardCorners = new MatOfPoint2f(); + + // Intermediate result mat's + Mat smallerInFrame = new Mat(); + MatOfPoint2f smallerBoardCorners = new MatOfPoint2f(); + + // SubCornerPix params + private final Size zeroZone = new Size(-1, -1); + private final TermCriteria criteria = new TermCriteria(3, 30, 0.001); + + private FindCornersPipeParams lastParams = null; + + public void createObjectPoints() { + if (this.lastParams != null && this.lastParams.equals(this.params)) return; + this.lastParams = this.params; + + this.objectPoints.release(); + this.objectPoints = null; + this.objectPoints = new MatOfPoint3f(); + + /*If using a chessboard, then the pattern size if the inner corners of the board. For example, the pattern size of a 9x9 chessboard would be 8x8 + If using a dot board, then the pattern size width is the sum of the bottom 2 rows and the height is the left or right most column + For example, a 5x4 dot board would have a pattern size of 11x4 + We subtract 1 for chessboard because the UI prompts users for the number of squares, not the + number of corners. + * */ + this.patternSize = + params.type == UICalibrationData.BoardType.CHESSBOARD + ? new Size(params.boardWidth - 1, params.boardHeight - 1) + : new Size(params.boardWidth, params.boardHeight); + + // Chessboard and dot board have different 3D points to project as a dot board has alternating + // dots per column + if (params.type == UICalibrationData.BoardType.CHESSBOARD) { + // Here we can create an NxN grid since a chessboard is rectangular + for (int heightIdx = 0; heightIdx < patternSize.height; heightIdx++) { + for (int widthIdx = 0; widthIdx < patternSize.width; widthIdx++) { + double boardYCoord = heightIdx * params.gridSize; + double boardXCoord = widthIdx * params.gridSize; + objectPoints.push_back(new MatOfPoint3f(new Point3(boardXCoord, boardYCoord, 0.0))); } + } + } else if (params.type == UICalibrationData.BoardType.DOTBOARD) { + // Here we need to alternate the amount of dots per column since a dot board is not + // rectangular and also by taking in account the grid size which should be in mm + for (int i = 0; i < patternSize.height; i++) { + for (int j = 0; j < patternSize.width; j++) { + objectPoints.push_back( + new MatOfPoint3f( + new Point3((2 * j + i % 2) * params.gridSize, i * params.gridSize, 0.0d))); + } + } + } else { + logger.error("Can't create pattern for unknown board type " + params.type); } - - /** - * Finds the corners in a given image and returns them - * - * @param in Input for pipe processing. Pair of input and output mat - * @return All valid Mats for camera calibration - */ - @Override - protected Triple process(Pair in) { - // Create the object points - createObjectPoints(); - - return findBoardCorners(in); + } + + /** + * Finds the corners in a given image and returns them + * + * @param in Input for pipe processing. Pair of input and output mat + * @return All valid Mats for camera calibration + */ + @Override + protected Triple process(Pair in) { + // Create the object points + createObjectPoints(); + + return findBoardCorners(in); + } + + /** + * Figures out how much a frame or point cloud must be scaled down by to match the desired size at + * which to run FindCorners. Should usually be > 1. + * + * @param inFrame + * @return + */ + private double getFindCornersScaleFactor(Mat inFrame) { + return 1.0 / params.divisor.value; + } + + /** + * Finds the minimum spacing between a set of x/y points Currently only considers points whose + * index is next to each other Which, currently, means it traverses one dimension. This is a rough + * heuristic approach which could be refined in the future. + * + *

Note that the current implementation can be fooled under the following conditions: (1) The + * width of the image is an odd number, and the smallest distance was actually on the between the + * last two points in a given row and (2) The smallest distance was actually in the direction + * orthogonal to that which was getting traversed by iterating through the MatOfPoint2f in order. + * + *

I've chosen not to handle these for speed's sake, and because, really, you don't need the + * exact answer for "min distance". you just need something fairly reasonable. + * + * @param inPoints point set to analyze. Must be a "tall" matrix. + * @return min spacing between neighbors + */ + private double getApproxMinSpacing(MatOfPoint2f inPoints) { + double minSpacing = Double.MAX_VALUE; + for (int pointIdx = 0; pointIdx < inPoints.height() - 1; pointIdx += 2) { + // +1 idx Neighbor distance + double[] startPoint = inPoints.get(pointIdx, 0); + double[] endPoint = inPoints.get(pointIdx + 1, 0); + double deltaX = startPoint[0] - endPoint[0]; + double deltaY = startPoint[1] - endPoint[1]; + double distToNext = Math.sqrt(deltaX * deltaX + deltaY * deltaY); + + minSpacing = Math.min(distToNext, minSpacing); } - - /** - * Figures out how much a frame or point cloud must be scaled down by to match the desired size at - * which to run FindCorners. Should usually be > 1. - * - * @param inFrame - * @return - */ - private double getFindCornersScaleFactor(Mat inFrame) { - return 1.0 / params.divisor.value; + return minSpacing; + } + + /** + * @param inFrame Full-size mat that is going to get scaled down before passing to + * findBoardCorners + * @return the size to scale the input mat to + */ + private Size getFindCornersImgSize(Mat in) { + int width = in.cols() / params.divisor.value; + int height = in.rows() / params.divisor.value; + return new Size(width, height); + } + + /** + * Given an input frame and a set of points from the "smaller" findChessboardCorner analysis, + * re-scale the points back to where they would have been in the input frame + * + * @param inPoints set of points derived from a call to findChessboardCorner on a shrunken mat. + * Must be a "tall" matrix. + * @param origFrame Original frame we're rescaling points back to + * @param outPoints mat into which the output rescaled points get placed + */ + private void rescalePointsToOrigFrame( + MatOfPoint2f inPoints, Mat origFrame, MatOfPoint2f outPoints) { + // Rescale boardCorners back up to the inproc image size + Point[] outPointsArr = new Point[inPoints.height()]; + double sf = getFindCornersScaleFactor(origFrame); + for (int pointIdx = 0; pointIdx < inPoints.height(); pointIdx++) { + double[] pointCoords = inPoints.get(pointIdx, 0); + double outXCoord = pointCoords[0] / sf; + double outYCoord = pointCoords[1] / sf; + outPointsArr[pointIdx] = new Point(outXCoord, outYCoord); } - - /** - * Finds the minimum spacing between a set of x/y points Currently only considers points whose - * index is next to each other Which, currently, means it traverses one dimension. This is a rough - * heuristic approach which could be refined in the future. - * - *

Note that the current implementation can be fooled under the following conditions: (1) The - * width of the image is an odd number, and the smallest distance was actually on the between the - * last two points in a given row and (2) The smallest distance was actually in the direction - * orthogonal to that which was getting traversed by iterating through the MatOfPoint2f in order. - * - *

I've chosen not to handle these for speed's sake, and because, really, you don't need the - * exact answer for "min distance". you just need something fairly reasonable. - * - * @param inPoints point set to analyze. Must be a "tall" matrix. - * @return min spacing between neighbors - */ - private double getApproxMinSpacing(MatOfPoint2f inPoints) { - double minSpacing = Double.MAX_VALUE; - for (int pointIdx = 0; pointIdx < inPoints.height() - 1; pointIdx += 2) { - // +1 idx Neighbor distance - double[] startPoint = inPoints.get(pointIdx, 0); - double[] endPoint = inPoints.get(pointIdx + 1, 0); - double deltaX = startPoint[0] - endPoint[0]; - double deltaY = startPoint[1] - endPoint[1]; - double distToNext = Math.sqrt(deltaX * deltaX + deltaY * deltaY); - - minSpacing = Math.min(distToNext, minSpacing); - } - return minSpacing; + outPoints.fromArray(outPointsArr); + } + + /** + * Picks a window size for doing subpixel optimization based on the board type and spacing + * observed between the corners or points in the image + * + * @param inPoints + * @return + */ + private Size getWindowSize(MatOfPoint2f inPoints) { + double windowHalfWidth = 11; // Dot board uses fixed-size window half-width + if (params.type == UICalibrationData.BoardType.CHESSBOARD) { + // Chessboard uses a dynamic sized window based on how far apart the corners are + windowHalfWidth = Math.floor(getApproxMinSpacing(inPoints) * 0.50); + windowHalfWidth = Math.max(1, windowHalfWidth); } - - /** - * @param inFrame Full-size mat that is going to get scaled down before passing to - * findBoardCorners - * @return the size to scale the input mat to - */ - private Size getFindCornersImgSize(Mat in) { - int width = in.cols() / params.divisor.value; - int height = in.rows() / params.divisor.value; - return new Size(width, height); + return new Size(windowHalfWidth, windowHalfWidth); + } + + /** + * Find chessboard corners given an input mat and output mat to draw on + * + * @return Frame resolution, object points, board corners + */ + private Triple findBoardCorners(Pair in) { + createObjectPoints(); + + var inFrame = in.getLeft(); + var outFrame = in.getRight(); + + // Convert the inFrame too grayscale to increase contrast + Imgproc.cvtColor(inFrame, inFrame, Imgproc.COLOR_BGR2GRAY); + boolean boardFound = false; + + if (params.type == UICalibrationData.BoardType.CHESSBOARD) { + // This is for chessboards + + // Reduce the image size to be much more manageable + Imgproc.resize(inFrame, smallerInFrame, getFindCornersImgSize(inFrame)); + + // Run the chessboard corner finder on the smaller image + boardFound = + Calib3d.findChessboardCorners( + smallerInFrame, patternSize, smallerBoardCorners, findChessboardFlags); + + // Rescale back to original pixel locations + if (boardFound) { + rescalePointsToOrigFrame(smallerBoardCorners, inFrame, boardCorners); + } + + } else if (params.type == UICalibrationData.BoardType.DOTBOARD) { + // For dot boards + boardFound = + Calib3d.findCirclesGrid( + inFrame, patternSize, boardCorners, Calib3d.CALIB_CB_ASYMMETRIC_GRID); } - /** - * Given an input frame and a set of points from the "smaller" findChessboardCorner analysis, - * re-scale the points back to where they would have been in the input frame - * - * @param inPoints set of points derived from a call to findChessboardCorner on a shrunken mat. - * Must be a "tall" matrix. - * @param origFrame Original frame we're rescaling points back to - * @param outPoints mat into which the output rescaled points get placed - */ - private void rescalePointsToOrigFrame( - MatOfPoint2f inPoints, Mat origFrame, MatOfPoint2f outPoints) { - // Rescale boardCorners back up to the inproc image size - Point[] outPointsArr = new Point[inPoints.height()]; - double sf = getFindCornersScaleFactor(origFrame); - for (int pointIdx = 0; pointIdx < inPoints.height(); pointIdx++) { - double[] pointCoords = inPoints.get(pointIdx, 0); - double outXCoord = pointCoords[0] / sf; - double outYCoord = pointCoords[1] / sf; - outPointsArr[pointIdx] = new Point(outXCoord, outYCoord); - } - outPoints.fromArray(outPointsArr); + if (!boardFound) { + // If we can't find a chessboard/dot board, just return + return null; } - /** - * Picks a window size for doing subpixel optimization based on the board type and spacing - * observed between the corners or points in the image - * - * @param inPoints - * @return - */ - private Size getWindowSize(MatOfPoint2f inPoints) { - double windowHalfWidth = 11; // Dot board uses fixed-size window half-width - if (params.type == UICalibrationData.BoardType.CHESSBOARD) { - // Chessboard uses a dynamic sized window based on how far apart the corners are - windowHalfWidth = Math.floor(getApproxMinSpacing(inPoints) * 0.50); - windowHalfWidth = Math.max(1, windowHalfWidth); - } - return new Size(windowHalfWidth, windowHalfWidth); + var outBoardCorners = new MatOfPoint2f(); + boardCorners.copyTo(outBoardCorners); + + var objPts = new MatOfPoint2f(); + objectPoints.copyTo(objPts); + + // Get the size of the inFrame + this.imageSize = new Size(inFrame.width(), inFrame.height()); + + // Do sub corner pix for drawing chessboard + Imgproc.cornerSubPix( + inFrame, outBoardCorners, getWindowSize(outBoardCorners), zeroZone, criteria); + + // convert back to BGR + // Imgproc.cvtColor(inFrame, inFrame, Imgproc.COLOR_GRAY2BGR); + // draw the chessboard, doesn't have to be different for a dot board since it just re projects + // the corners we found + Calib3d.drawChessboardCorners(outFrame, patternSize, outBoardCorners, true); + + // // Add the 3D points and the points of the corners found + // if (addToSnapList) { + // this.listOfObjectPoints.add(objectPoints); + // this.listOfImagePoints.add(boardCorners); + // } + + return Triple.of(inFrame.size(), objPts, outBoardCorners); + } + + public static class FindCornersPipeParams { + private final int boardHeight; + private final int boardWidth; + private final UICalibrationData.BoardType type; + private final double gridSize; + private final FrameDivisor divisor; + + public FindCornersPipeParams( + int boardHeight, + int boardWidth, + UICalibrationData.BoardType type, + double gridSize, + FrameDivisor divisor) { + this.boardHeight = boardHeight; + this.boardWidth = boardWidth; + this.type = type; + this.gridSize = gridSize; // mm + this.divisor = divisor; } - /** - * Find chessboard corners given an input mat and output mat to draw on - * - * @return Frame resolution, object points, board corners - */ - private Triple findBoardCorners(Pair in) { - createObjectPoints(); - - var inFrame = in.getLeft(); - var outFrame = in.getRight(); - - // Convert the inFrame too grayscale to increase contrast - Imgproc.cvtColor(inFrame, inFrame, Imgproc.COLOR_BGR2GRAY); - boolean boardFound = false; - - if (params.type == UICalibrationData.BoardType.CHESSBOARD) { - // This is for chessboards - - // Reduce the image size to be much more manageable - Imgproc.resize(inFrame, smallerInFrame, getFindCornersImgSize(inFrame)); - - // Run the chessboard corner finder on the smaller image - boardFound = - Calib3d.findChessboardCorners( - smallerInFrame, patternSize, smallerBoardCorners, findChessboardFlags); - - // Rescale back to original pixel locations - if (boardFound) { - rescalePointsToOrigFrame(smallerBoardCorners, inFrame, boardCorners); - } - - } else if (params.type == UICalibrationData.BoardType.DOTBOARD) { - // For dot boards - boardFound = - Calib3d.findCirclesGrid( - inFrame, patternSize, boardCorners, Calib3d.CALIB_CB_ASYMMETRIC_GRID); - } - - if (!boardFound) { - // If we can't find a chessboard/dot board, just return - return null; - } - - var outBoardCorners = new MatOfPoint2f(); - boardCorners.copyTo(outBoardCorners); - - var objPts = new MatOfPoint2f(); - objectPoints.copyTo(objPts); - - // Get the size of the inFrame - this.imageSize = new Size(inFrame.width(), inFrame.height()); - - // Do sub corner pix for drawing chessboard - Imgproc.cornerSubPix( - inFrame, outBoardCorners, getWindowSize(outBoardCorners), zeroZone, criteria); - - // convert back to BGR - // Imgproc.cvtColor(inFrame, inFrame, Imgproc.COLOR_GRAY2BGR); - // draw the chessboard, doesn't have to be different for a dot board since it just re projects - // the corners we found - Calib3d.drawChessboardCorners(outFrame, patternSize, outBoardCorners, true); - - // // Add the 3D points and the points of the corners found - // if (addToSnapList) { - // this.listOfObjectPoints.add(objectPoints); - // this.listOfImagePoints.add(boardCorners); - // } - - return Triple.of(inFrame.size(), objPts, outBoardCorners); + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + boardHeight; + result = prime * result + boardWidth; + result = prime * result + ((type == null) ? 0 : type.hashCode()); + long temp; + temp = Double.doubleToLongBits(gridSize); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + ((divisor == null) ? 0 : divisor.hashCode()); + return result; } - public static class FindCornersPipeParams { - private final int boardHeight; - private final int boardWidth; - private final UICalibrationData.BoardType type; - private final double gridSize; - private final FrameDivisor divisor; - - public FindCornersPipeParams( - int boardHeight, - int boardWidth, - UICalibrationData.BoardType type, - double gridSize, - FrameDivisor divisor) { - this.boardHeight = boardHeight; - this.boardWidth = boardWidth; - this.type = type; - this.gridSize = gridSize; // mm - this.divisor = divisor; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + boardHeight; - result = prime * result + boardWidth; - result = prime * result + ((type == null) ? 0 : type.hashCode()); - long temp; - temp = Double.doubleToLongBits(gridSize); - result = prime * result + (int) (temp ^ (temp >>> 32)); - result = prime * result + ((divisor == null) ? 0 : divisor.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - FindCornersPipeParams other = (FindCornersPipeParams) obj; - if (boardHeight != other.boardHeight) return false; - if (boardWidth != other.boardWidth) return false; - if (type != other.type) return false; - if (Double.doubleToLongBits(gridSize) != Double.doubleToLongBits(other.gridSize)) - return false; - return divisor == other.divisor; - } + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + FindCornersPipeParams other = (FindCornersPipeParams) obj; + if (boardHeight != other.boardHeight) return false; + if (boardWidth != other.boardWidth) return false; + if (type != other.type) return false; + if (Double.doubleToLongBits(gridSize) != Double.doubleToLongBits(other.gridSize)) + return false; + return divisor == other.divisor; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindCirclesPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindCirclesPipe.java index 07ec9ecd82..c7f8761dc2 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindCirclesPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindCirclesPipe.java @@ -29,119 +29,119 @@ import org.photonvision.vision.pipe.CVPipe; public class FindCirclesPipe - extends CVPipe>, List, FindCirclesPipe.FindCirclePipeParams> { - // Output vector of found circles. Each vector is encoded as 3 or 4 element floating-point vector - // (x,y,radius) or (x,y,radius,votes) . - private final Mat circles = new Mat(); + extends CVPipe>, List, FindCirclesPipe.FindCirclePipeParams> { + // Output vector of found circles. Each vector is encoded as 3 or 4 element floating-point vector + // (x,y,radius) or (x,y,radius,votes) . + private final Mat circles = new Mat(); - /** - * Runs the process for the pipe. The reason we need a separate pipe for circles is because if we - * were to use the FindShapes pipe, we would have to assume that any shape more than 10-20+ sides - * is a circle. Only issue with such approximation is that the user would no longer be able to - * track shapes with 10-20+ sides. And hence, in order to overcome this edge case, we can use - * HoughCircles which is more flexible and accurate for finding circles. - * - * @param in Input for pipe processing. 8-bit, single-channel, grayscale input image. - * @return Result of processing. - */ - @Override - protected List process(Pair> in) { - circles.release(); - List output = new ArrayList<>(); + /** + * Runs the process for the pipe. The reason we need a separate pipe for circles is because if we + * were to use the FindShapes pipe, we would have to assume that any shape more than 10-20+ sides + * is a circle. Only issue with such approximation is that the user would no longer be able to + * track shapes with 10-20+ sides. And hence, in order to overcome this edge case, we can use + * HoughCircles which is more flexible and accurate for finding circles. + * + * @param in Input for pipe processing. 8-bit, single-channel, grayscale input image. + * @return Result of processing. + */ + @Override + protected List process(Pair> in) { + circles.release(); + List output = new ArrayList<>(); - var diag = params.diagonalLengthPx; - var minRadius = (int) (params.minRadius * diag / 100.0); - var maxRadius = (int) (params.maxRadius * diag / 100.0); + var diag = params.diagonalLengthPx; + var minRadius = (int) (params.minRadius * diag / 100.0); + var maxRadius = (int) (params.maxRadius * diag / 100.0); - Imgproc.HoughCircles( - in.getLeft(), - circles, - // Detection method, see #HoughModes. The available methods are #HOUGH_GRADIENT and - // #HOUGH_GRADIENT_ALT. - Imgproc.HOUGH_GRADIENT, - /*Inverse ratio of the accumulator resolution to the image resolution. - For example, if dp=1 , the accumulator has the same resolution as the input image. - If dp=2 , the accumulator has half as big width and height. For #HOUGH_GRADIENT_ALT the recommended value is dp=1.5, - unless some small very circles need to be detected. - */ - 1.0, - params.minDist, - params.maxCannyThresh, - Math.max(1.0, params.accuracy), - minRadius, - maxRadius); - // Great, we now found the center point of the circle, and it's radius, but we have no idea what - // contour it corresponds to - // Each contour can only match to one circle, so we keep a list of unmatched contours around and - // only match against them - // This does mean that contours closer than allowableThreshold aren't matched to anything if - // there's a 'better' option - var unmatchedContours = in.getRight(); - for (int x = 0; x < circles.cols(); x++) { - // Grab the current circle we are looking at - double[] c = circles.get(0, x); - // Find the center points of that circle - double x_center = c[0]; - double y_center = c[1]; + Imgproc.HoughCircles( + in.getLeft(), + circles, + // Detection method, see #HoughModes. The available methods are #HOUGH_GRADIENT and + // #HOUGH_GRADIENT_ALT. + Imgproc.HOUGH_GRADIENT, + /*Inverse ratio of the accumulator resolution to the image resolution. + For example, if dp=1 , the accumulator has the same resolution as the input image. + If dp=2 , the accumulator has half as big width and height. For #HOUGH_GRADIENT_ALT the recommended value is dp=1.5, + unless some small very circles need to be detected. + */ + 1.0, + params.minDist, + params.maxCannyThresh, + Math.max(1.0, params.accuracy), + minRadius, + maxRadius); + // Great, we now found the center point of the circle, and it's radius, but we have no idea what + // contour it corresponds to + // Each contour can only match to one circle, so we keep a list of unmatched contours around and + // only match against them + // This does mean that contours closer than allowableThreshold aren't matched to anything if + // there's a 'better' option + var unmatchedContours = in.getRight(); + for (int x = 0; x < circles.cols(); x++) { + // Grab the current circle we are looking at + double[] c = circles.get(0, x); + // Find the center points of that circle + double x_center = c[0]; + double y_center = c[1]; - for (Contour contour : unmatchedContours) { - // Grab the moments of the current contour - Moments mu = contour.getMoments(); - // Determine if the contour is within the threshold of the detected circle - // NOTE: This means that the centroid of the contour must be within the "allowable - // threshold" - // of the center of the circle - if (Math.abs(x_center - (mu.m10 / mu.m00)) <= params.allowableThreshold - && Math.abs(y_center - (mu.m01 / mu.m00)) <= params.allowableThreshold) { - // If it is, then add it to the output array - output.add(new CVShape(contour, new Point(c[0], c[1]), c[2])); - unmatchedContours.remove(contour); - break; - } - } + for (Contour contour : unmatchedContours) { + // Grab the moments of the current contour + Moments mu = contour.getMoments(); + // Determine if the contour is within the threshold of the detected circle + // NOTE: This means that the centroid of the contour must be within the "allowable + // threshold" + // of the center of the circle + if (Math.abs(x_center - (mu.m10 / mu.m00)) <= params.allowableThreshold + && Math.abs(y_center - (mu.m01 / mu.m00)) <= params.allowableThreshold) { + // If it is, then add it to the output array + output.add(new CVShape(contour, new Point(c[0], c[1]), c[2])); + unmatchedContours.remove(contour); + break; } + } + } - // Release everything we don't use - for (var c : unmatchedContours) c.release(); + // Release everything we don't use + for (var c : unmatchedContours) c.release(); - return output; - } + return output; + } - public static class FindCirclePipeParams { - private final int allowableThreshold; - private final int minRadius; - private final int maxRadius; - private final int minDist; - private final int maxCannyThresh; - private final int accuracy; - private final double diagonalLengthPx; + public static class FindCirclePipeParams { + private final int allowableThreshold; + private final int minRadius; + private final int maxRadius; + private final int minDist; + private final int maxCannyThresh; + private final int accuracy; + private final double diagonalLengthPx; - /* - * @params minDist - Minimum distance between the centers of the detected circles. - * If the parameter is too small, multiple neighbor circles may be falsely detected in addition to a true one. If it is too large, some circles may be missed. - * - * @param maxCannyThresh -First method-specific parameter. In case of #HOUGH_GRADIENT and #HOUGH_GRADIENT_ALT, it is the higher threshold of the two passed to the Canny edge detector (the lower one is twice smaller). - * Note that #HOUGH_GRADIENT_ALT uses #Scharr algorithm to compute image derivatives, so the threshold value should normally be higher, such as 300 or normally exposed and contrasty images. - * - * - * @param allowableThreshold - When finding the corresponding contour, this is used to see how close a center should be to a contour for it to be considered THAT contour. - * Should be increased with lower resolutions and decreased with higher resolution - * */ - public FindCirclePipeParams( - int allowableThreshold, - int minRadius, - int minDist, - int maxRadius, - int maxCannyThresh, - int accuracy, - double diagonalLengthPx) { - this.allowableThreshold = allowableThreshold; - this.minRadius = minRadius; - this.maxRadius = maxRadius; - this.minDist = minDist; - this.maxCannyThresh = maxCannyThresh; - this.accuracy = accuracy; - this.diagonalLengthPx = diagonalLengthPx; - } + /* + * @params minDist - Minimum distance between the centers of the detected circles. + * If the parameter is too small, multiple neighbor circles may be falsely detected in addition to a true one. If it is too large, some circles may be missed. + * + * @param maxCannyThresh -First method-specific parameter. In case of #HOUGH_GRADIENT and #HOUGH_GRADIENT_ALT, it is the higher threshold of the two passed to the Canny edge detector (the lower one is twice smaller). + * Note that #HOUGH_GRADIENT_ALT uses #Scharr algorithm to compute image derivatives, so the threshold value should normally be higher, such as 300 or normally exposed and contrasty images. + * + * + * @param allowableThreshold - When finding the corresponding contour, this is used to see how close a center should be to a contour for it to be considered THAT contour. + * Should be increased with lower resolutions and decreased with higher resolution + * */ + public FindCirclePipeParams( + int allowableThreshold, + int minRadius, + int minDist, + int maxRadius, + int maxCannyThresh, + int accuracy, + double diagonalLengthPx) { + this.allowableThreshold = allowableThreshold; + this.minRadius = minRadius; + this.maxRadius = maxRadius; + this.minDist = minDist; + this.maxCannyThresh = maxCannyThresh; + this.accuracy = accuracy; + this.diagonalLengthPx = diagonalLengthPx; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindContoursPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindContoursPipe.java index d5564d63fc..4f05f67532 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindContoursPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindContoursPipe.java @@ -27,21 +27,21 @@ import org.photonvision.vision.pipe.CVPipe; public class FindContoursPipe - extends CVPipe, FindContoursPipe.FindContoursParams> { - private final List m_foundContours = new ArrayList<>(); + extends CVPipe, FindContoursPipe.FindContoursParams> { + private final List m_foundContours = new ArrayList<>(); - @Override - protected List process(Mat in) { - for (var m : m_foundContours) { - m.release(); // necessary? - } - m_foundContours.clear(); + @Override + protected List process(Mat in) { + for (var m : m_foundContours) { + m.release(); // necessary? + } + m_foundContours.clear(); - Imgproc.findContours( - in, m_foundContours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_TC89_KCOS); + Imgproc.findContours( + in, m_foundContours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_TC89_KCOS); - return m_foundContours.stream().map(Contour::new).collect(Collectors.toList()); - } + return m_foundContours.stream().map(Contour::new).collect(Collectors.toList()); + } - public static class FindContoursParams {} + public static class FindContoursParams {} } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindPolygonPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindPolygonPipe.java index dfadfda53f..d291ccb33a 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindPolygonPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindPolygonPipe.java @@ -26,65 +26,65 @@ import org.photonvision.vision.pipe.CVPipe; public class FindPolygonPipe - extends CVPipe, List, FindPolygonPipe.FindPolygonPipeParams> { - List shapeList = new ArrayList<>(); + extends CVPipe, List, FindPolygonPipe.FindPolygonPipeParams> { + List shapeList = new ArrayList<>(); - /* - * Runs the process for the pipe. - * - * @param in Input for pipe processing. - * @return Result of processing. - */ - @Override - protected List process(List in) { - shapeList.forEach(CVShape::release); - shapeList.clear(); - shapeList = new ArrayList<>(); + /* + * Runs the process for the pipe. + * + * @param in Input for pipe processing. + * @return Result of processing. + */ + @Override + protected List process(List in) { + shapeList.forEach(CVShape::release); + shapeList.clear(); + shapeList = new ArrayList<>(); - for (Contour contour : in) { - shapeList.add(getShape(contour)); - } - - return shapeList; + for (Contour contour : in) { + shapeList.add(getShape(contour)); } - private CVShape getShape(Contour in) { - int corners = getCorners(in); + return shapeList; + } - /*The contourShape enum has predefined shapes for Circles, Triangles, and Quads - meaning any shape not fitting in those predefined shapes must be a custom shape. - */ - if (ContourShape.fromSides(corners) == null) { - return new CVShape(in, ContourShape.Custom); - } - switch (ContourShape.fromSides(corners)) { - case Circle: - return new CVShape(in, ContourShape.Circle); - case Triangle: - return new CVShape(in, ContourShape.Triangle); - case Quadrilateral: - return new CVShape(in, ContourShape.Quadrilateral); - } + private CVShape getShape(Contour in) { + int corners = getCorners(in); - return new CVShape(in, ContourShape.Custom); + /*The contourShape enum has predefined shapes for Circles, Triangles, and Quads + meaning any shape not fitting in those predefined shapes must be a custom shape. + */ + if (ContourShape.fromSides(corners) == null) { + return new CVShape(in, ContourShape.Custom); + } + switch (ContourShape.fromSides(corners)) { + case Circle: + return new CVShape(in, ContourShape.Circle); + case Triangle: + return new CVShape(in, ContourShape.Triangle); + case Quadrilateral: + return new CVShape(in, ContourShape.Quadrilateral); } - private int getCorners(Contour contour) { - var approx = - contour.getApproxPolyDp( - (100 - params.accuracyPercentage) / 100.0 * Imgproc.arcLength(contour.getMat2f(), true), - true); + return new CVShape(in, ContourShape.Custom); + } - // The height of the resultant approximation is the number of vertices - return (int) approx.size().height; - } + private int getCorners(Contour contour) { + var approx = + contour.getApproxPolyDp( + (100 - params.accuracyPercentage) / 100.0 * Imgproc.arcLength(contour.getMat2f(), true), + true); + + // The height of the resultant approximation is the number of vertices + return (int) approx.size().height; + } - public static class FindPolygonPipeParams { - private final double accuracyPercentage; + public static class FindPolygonPipeParams { + private final double accuracyPercentage; - // Should be a value between 0-100 - public FindPolygonPipeParams(double accuracyPercentage) { - this.accuracyPercentage = accuracyPercentage; - } + // Should be a value between 0-100 + public FindPolygonPipeParams(double accuracyPercentage) { + this.accuracyPercentage = accuracyPercentage; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GPUAcceleratedHSVPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GPUAcceleratedHSVPipe.java index 5b49faf641..c25b351a97 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GPUAcceleratedHSVPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GPUAcceleratedHSVPipe.java @@ -35,517 +35,517 @@ import org.photonvision.vision.pipe.CVPipe; public class GPUAcceleratedHSVPipe extends CVPipe { - private static final String k_vertexShader = - String.join( - "\n", - "#version 100", - "", - "attribute vec4 position;", - "", - "void main() {", - " gl_Position = position;", - "}"); - private static final String k_fragmentShader = - String.join( - "\n", - "#version 100", - "", - "precision highp float;", - "precision highp int;", - "", - "uniform vec3 lowerThresh;", - "uniform vec3 upperThresh;", - "uniform vec2 resolution;", - "uniform sampler2D texture0;", - "", - "vec3 rgb2hsv(vec3 c) {", - " vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);", - " vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));", - " vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));", - "", - " float d = q.x - min(q.w, q.y);", - " float e = 1.0e-10;", - " return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);", - "}", - "", - "bool inRange(vec3 hsv) {", - " bvec3 botBool = greaterThanEqual(hsv, lowerThresh);", - " bvec3 topBool = lessThanEqual(hsv, upperThresh);", - " return all(botBool) && all(topBool);", - "}", - "", - "void main() {", - " vec2 uv = gl_FragCoord.xy/resolution;", - // Important! We do this .bgr swizzle because the image comes in as BGR, but we pretend - // it's RGB for convenience+speed - " vec3 col = texture2D(texture0, uv).bgr;", - // Only the first value in the vec4 gets used for GL_RED, and only the last value gets - // used for GL_ALPHA - " gl_FragColor = inRange(rgb2hsv(col)) ? vec4(1.0, 0.0, 0.0, 1.0) : vec4(0.0, 0.0, 0.0, 0.0);", - "}"); - private static final int k_startingWidth = 1280, k_startingHeight = 720; - private static final float[] k_vertexPositions = { - // Set up a quad that covers the screen - -1f, +1f, +1f, +1f, -1f, -1f, +1f, -1f - }; - private static final int k_positionVertexAttribute = - 0; // ID for the vertex shader position variable - - public enum PBOMode { - NONE, - SINGLE_BUFFERED, - DOUBLE_BUFFERED + private static final String k_vertexShader = + String.join( + "\n", + "#version 100", + "", + "attribute vec4 position;", + "", + "void main() {", + " gl_Position = position;", + "}"); + private static final String k_fragmentShader = + String.join( + "\n", + "#version 100", + "", + "precision highp float;", + "precision highp int;", + "", + "uniform vec3 lowerThresh;", + "uniform vec3 upperThresh;", + "uniform vec2 resolution;", + "uniform sampler2D texture0;", + "", + "vec3 rgb2hsv(vec3 c) {", + " vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);", + " vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));", + " vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));", + "", + " float d = q.x - min(q.w, q.y);", + " float e = 1.0e-10;", + " return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);", + "}", + "", + "bool inRange(vec3 hsv) {", + " bvec3 botBool = greaterThanEqual(hsv, lowerThresh);", + " bvec3 topBool = lessThanEqual(hsv, upperThresh);", + " return all(botBool) && all(topBool);", + "}", + "", + "void main() {", + " vec2 uv = gl_FragCoord.xy/resolution;", + // Important! We do this .bgr swizzle because the image comes in as BGR, but we pretend + // it's RGB for convenience+speed + " vec3 col = texture2D(texture0, uv).bgr;", + // Only the first value in the vec4 gets used for GL_RED, and only the last value gets + // used for GL_ALPHA + " gl_FragColor = inRange(rgb2hsv(col)) ? vec4(1.0, 0.0, 0.0, 1.0) : vec4(0.0, 0.0, 0.0, 0.0);", + "}"); + private static final int k_startingWidth = 1280, k_startingHeight = 720; + private static final float[] k_vertexPositions = { + // Set up a quad that covers the screen + -1f, +1f, +1f, +1f, -1f, -1f, +1f, -1f + }; + private static final int k_positionVertexAttribute = + 0; // ID for the vertex shader position variable + + public enum PBOMode { + NONE, + SINGLE_BUFFERED, + DOUBLE_BUFFERED + } + + private final IntBuffer vertexVBOIds = GLBuffers.newDirectIntBuffer(1), + unpackPBOIds = GLBuffers.newDirectIntBuffer(2), + packPBOIds = GLBuffers.newDirectIntBuffer(2); + + private final GL2ES2 gl; + private final GLProfile profile; + private final int outputFormat; + private final PBOMode pboMode; + private final GLOffscreenAutoDrawableImpl.FBOImpl drawable; + private final Texture texture; + // The texture uniform holds the image that's being processed + // The resolution uniform holds the current image resolution + // The lower and upper uniforms hold the lower and upper HSV limits for thresholding + private final int textureUniformId, resolutionUniformId, lowerUniformId, upperUniformId; + + private final Logger logger = new Logger(GPUAcceleratedHSVPipe.class, LogGroup.General); + + private byte[] inputBytes, outputBytes; + private Mat outputMat = new Mat(k_startingHeight, k_startingWidth, CvType.CV_8UC1); + private int previousWidth = -1, previousHeight = -1; + private int unpackIndex = 0, unpackNextIndex = 0, packIndex = 0, packNextIndex = 0; + + public GPUAcceleratedHSVPipe(PBOMode pboMode) { + this.pboMode = pboMode; + + // Set up GL profile and ask for specific capabilities + profile = GLProfile.get(pboMode == PBOMode.NONE ? GLProfile.GLES2 : GLProfile.GL4ES3); + final var capabilities = new GLCapabilities(profile); + capabilities.setHardwareAccelerated(true); + capabilities.setFBO(true); + capabilities.setDoubleBuffered(false); + capabilities.setOnscreen(false); + capabilities.setRedBits(8); + capabilities.setBlueBits(0); + capabilities.setGreenBits(0); + capabilities.setAlphaBits(0); + + // Set up the offscreen area we're going to draw to + final var factory = GLDrawableFactory.getFactory(profile); + drawable = + (GLOffscreenAutoDrawableImpl.FBOImpl) + factory.createOffscreenAutoDrawable( + factory.getDefaultDevice(), + capabilities, + new DefaultGLCapabilitiesChooser(), + k_startingWidth, + k_startingHeight); + drawable.display(); + drawable.getContext().makeCurrent(); + + // Get an OpenGL context; OpenGL ES 2.0 and OpenGL 2.0 are compatible with all the coprocs we + // care about but not compatible with PBOs. Open GL ES 3.0 and OpenGL 4.0 are compatible with + // select coprocs *and* PBOs + gl = pboMode == PBOMode.NONE ? drawable.getGL().getGLES2() : drawable.getGL().getGL4ES3(); + final int programId = gl.glCreateProgram(); + + if (pboMode == PBOMode.NONE && !gl.glGetString(GL_EXTENSIONS).contains("GL_EXT_texture_rg")) { + logger.warn( + "OpenGL ES 2.0 implementation does not have the 'GL_EXT_texture_rg' extension, falling back to GL_ALPHA instead of GL_RED output format"); + outputFormat = GL_ALPHA; + } else { + outputFormat = GL_RED; } - private final IntBuffer vertexVBOIds = GLBuffers.newDirectIntBuffer(1), - unpackPBOIds = GLBuffers.newDirectIntBuffer(2), - packPBOIds = GLBuffers.newDirectIntBuffer(2); - - private final GL2ES2 gl; - private final GLProfile profile; - private final int outputFormat; - private final PBOMode pboMode; - private final GLOffscreenAutoDrawableImpl.FBOImpl drawable; - private final Texture texture; - // The texture uniform holds the image that's being processed - // The resolution uniform holds the current image resolution - // The lower and upper uniforms hold the lower and upper HSV limits for thresholding - private final int textureUniformId, resolutionUniformId, lowerUniformId, upperUniformId; - - private final Logger logger = new Logger(GPUAcceleratedHSVPipe.class, LogGroup.General); - - private byte[] inputBytes, outputBytes; - private Mat outputMat = new Mat(k_startingHeight, k_startingWidth, CvType.CV_8UC1); - private int previousWidth = -1, previousHeight = -1; - private int unpackIndex = 0, unpackNextIndex = 0, packIndex = 0, packNextIndex = 0; - - public GPUAcceleratedHSVPipe(PBOMode pboMode) { - this.pboMode = pboMode; - - // Set up GL profile and ask for specific capabilities - profile = GLProfile.get(pboMode == PBOMode.NONE ? GLProfile.GLES2 : GLProfile.GL4ES3); - final var capabilities = new GLCapabilities(profile); - capabilities.setHardwareAccelerated(true); - capabilities.setFBO(true); - capabilities.setDoubleBuffered(false); - capabilities.setOnscreen(false); - capabilities.setRedBits(8); - capabilities.setBlueBits(0); - capabilities.setGreenBits(0); - capabilities.setAlphaBits(0); - - // Set up the offscreen area we're going to draw to - final var factory = GLDrawableFactory.getFactory(profile); - drawable = - (GLOffscreenAutoDrawableImpl.FBOImpl) - factory.createOffscreenAutoDrawable( - factory.getDefaultDevice(), - capabilities, - new DefaultGLCapabilitiesChooser(), - k_startingWidth, - k_startingHeight); - drawable.display(); - drawable.getContext().makeCurrent(); - - // Get an OpenGL context; OpenGL ES 2.0 and OpenGL 2.0 are compatible with all the coprocs we - // care about but not compatible with PBOs. Open GL ES 3.0 and OpenGL 4.0 are compatible with - // select coprocs *and* PBOs - gl = pboMode == PBOMode.NONE ? drawable.getGL().getGLES2() : drawable.getGL().getGL4ES3(); - final int programId = gl.glCreateProgram(); - - if (pboMode == PBOMode.NONE && !gl.glGetString(GL_EXTENSIONS).contains("GL_EXT_texture_rg")) { - logger.warn( - "OpenGL ES 2.0 implementation does not have the 'GL_EXT_texture_rg' extension, falling back to GL_ALPHA instead of GL_RED output format"); - outputFormat = GL_ALPHA; - } else { - outputFormat = GL_RED; - } + // JOGL creates a framebuffer color attachment that has RGB set as the format, which is not + // appropriate for us because we want a single-channel format + // We make ourown FBO color attachment to remedy this + // Detach and destroy the FBO color attachment that JOGL made for us + drawable.getFBObject(GL_FRONT).detachColorbuffer(gl, 0, true); + // Equivalent to calling glBindFramebuffer + drawable.getFBObject(GL_FRONT).bind(gl); + if (true) { // For now renderbuffers are disabled + // Create a color attachment texture to hold our rendered output + var colorBufferIds = GLBuffers.newDirectIntBuffer(1); + gl.glGenTextures(1, colorBufferIds); + gl.glBindTexture(GL_TEXTURE_2D, colorBufferIds.get(0)); + gl.glTexImage2D( + GL_TEXTURE_2D, + 0, + outputFormat == GL_RED ? GL_R8 : GL_ALPHA8, + k_startingWidth, + k_startingHeight, + 0, + outputFormat, + GL_UNSIGNED_BYTE, + null); + gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + // Attach the texture to the framebuffer + gl.glBindTexture(GL_TEXTURE_2D, 0); + gl.glFramebufferTexture2D( + GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, colorBufferIds.get(0), 0); + // Cleanup + gl.glBindTexture(GL_TEXTURE_2D, 0); + } else { + // Create a color attachment texture to hold our rendered output + var renderBufferIds = GLBuffers.newDirectIntBuffer(1); + gl.glGenRenderbuffers(1, renderBufferIds); + gl.glBindRenderbuffer(GL_RENDERBUFFER, renderBufferIds.get(0)); + gl.glRenderbufferStorage( + GL_RENDERBUFFER, + outputFormat == GL_RED ? GL_R8 : GL_ALPHA8, + k_startingWidth, + k_startingHeight); + // Attach the texture to the framebuffer + gl.glFramebufferRenderbuffer( + GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderBufferIds.get(0)); + // Cleanup + gl.glBindRenderbuffer(GL_RENDERBUFFER, 0); + } + drawable.getFBObject(GL_FRONT).unbind(gl); + + // Check that the FBO is attached + int fboStatus = gl.glCheckFramebufferStatus(GL_FRAMEBUFFER); + if (fboStatus == GL_FRAMEBUFFER_UNSUPPORTED) { + throw new RuntimeException( + "GL implementation does not support rendering to internal format '" + + String.format("0x%08X", outputFormat == GL_RED ? GL_R8 : GL_ALPHA8) + + "' with type '" + + String.format("0x%08X", GL_UNSIGNED_BYTE) + + "'"); + } else if (fboStatus != GL_FRAMEBUFFER_COMPLETE) { + throw new RuntimeException( + "Framebuffer is not complete; framebuffer status is " + + String.format("0x%08X", fboStatus)); + } - // JOGL creates a framebuffer color attachment that has RGB set as the format, which is not - // appropriate for us because we want a single-channel format - // We make ourown FBO color attachment to remedy this - // Detach and destroy the FBO color attachment that JOGL made for us - drawable.getFBObject(GL_FRONT).detachColorbuffer(gl, 0, true); - // Equivalent to calling glBindFramebuffer - drawable.getFBObject(GL_FRONT).bind(gl); - if (true) { // For now renderbuffers are disabled - // Create a color attachment texture to hold our rendered output - var colorBufferIds = GLBuffers.newDirectIntBuffer(1); - gl.glGenTextures(1, colorBufferIds); - gl.glBindTexture(GL_TEXTURE_2D, colorBufferIds.get(0)); - gl.glTexImage2D( - GL_TEXTURE_2D, - 0, - outputFormat == GL_RED ? GL_R8 : GL_ALPHA8, - k_startingWidth, - k_startingHeight, - 0, - outputFormat, - GL_UNSIGNED_BYTE, - null); - gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - // Attach the texture to the framebuffer - gl.glBindTexture(GL_TEXTURE_2D, 0); - gl.glFramebufferTexture2D( - GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, colorBufferIds.get(0), 0); - // Cleanup - gl.glBindTexture(GL_TEXTURE_2D, 0); - } else { - // Create a color attachment texture to hold our rendered output - var renderBufferIds = GLBuffers.newDirectIntBuffer(1); - gl.glGenRenderbuffers(1, renderBufferIds); - gl.glBindRenderbuffer(GL_RENDERBUFFER, renderBufferIds.get(0)); - gl.glRenderbufferStorage( - GL_RENDERBUFFER, - outputFormat == GL_RED ? GL_R8 : GL_ALPHA8, - k_startingWidth, - k_startingHeight); - // Attach the texture to the framebuffer - gl.glFramebufferRenderbuffer( - GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderBufferIds.get(0)); - // Cleanup - gl.glBindRenderbuffer(GL_RENDERBUFFER, 0); - } - drawable.getFBObject(GL_FRONT).unbind(gl); - - // Check that the FBO is attached - int fboStatus = gl.glCheckFramebufferStatus(GL_FRAMEBUFFER); - if (fboStatus == GL_FRAMEBUFFER_UNSUPPORTED) { - throw new RuntimeException( - "GL implementation does not support rendering to internal format '" - + String.format("0x%08X", outputFormat == GL_RED ? GL_R8 : GL_ALPHA8) - + "' with type '" - + String.format("0x%08X", GL_UNSIGNED_BYTE) - + "'"); - } else if (fboStatus != GL_FRAMEBUFFER_COMPLETE) { - throw new RuntimeException( - "Framebuffer is not complete; framebuffer status is " - + String.format("0x%08X", fboStatus)); - } + logger.debug( + "Created an OpenGL context with renderer '" + + gl.glGetString(GL_RENDERER) + + "', version '" + + gl.glGetString(GL.GL_VERSION) + + "', and profile '" + + profile + + "'"); + + var fmt = GLBuffers.newDirectIntBuffer(1); + gl.glGetIntegerv(GLES3.GL_IMPLEMENTATION_COLOR_READ_FORMAT, fmt); + var type = GLBuffers.newDirectIntBuffer(1); + gl.glGetIntegerv(GLES3.GL_IMPLEMENTATION_COLOR_READ_TYPE, type); + + // Tell OpenGL that the attribute in the vertex shader named position is bound to index 0 (the + // index for the generic position input) + gl.glBindAttribLocation(programId, 0, "position"); + + // Compile and set up our two shaders with our program + final int vertexId = createShader(gl, programId, k_vertexShader, GL_VERTEX_SHADER); + final int fragmentId = createShader(gl, programId, k_fragmentShader, GL_FRAGMENT_SHADER); + + // Link our program together and check for errors + gl.glLinkProgram(programId); + IntBuffer status = GLBuffers.newDirectIntBuffer(1); + gl.glGetProgramiv(programId, GL_LINK_STATUS, status); + if (status.get(0) == GL_FALSE) { + IntBuffer infoLogLength = GLBuffers.newDirectIntBuffer(1); + gl.glGetProgramiv(programId, GL_INFO_LOG_LENGTH, infoLogLength); + + ByteBuffer bufferInfoLog = GLBuffers.newDirectByteBuffer(infoLogLength.get(0)); + gl.glGetProgramInfoLog(programId, infoLogLength.get(0), null, bufferInfoLog); + byte[] bytes = new byte[infoLogLength.get(0)]; + bufferInfoLog.get(bytes); + String strInfoLog = new String(bytes); + + throw new RuntimeException("Linker failure: " + strInfoLog); + } + gl.glValidateProgram(programId); + + // Cleanup shaders that are now compiled in + gl.glDetachShader(programId, vertexId); + gl.glDetachShader(programId, fragmentId); + gl.glDeleteShader(vertexId); + gl.glDeleteShader(fragmentId); + + // Tell OpenGL to use our program + gl.glUseProgram(programId); + + // Set up our texture + texture = new Texture(GL_TEXTURE_2D); + texture.setTexParameteri(gl, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + texture.setTexParameteri(gl, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + texture.setTexParameteri(gl, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + texture.setTexParameteri(gl, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + // Set up a uniform holding our image as a texture + textureUniformId = gl.glGetUniformLocation(programId, "texture0"); + gl.glUniform1i(textureUniformId, 0); + + // Set up a uniform to hold image resolution + resolutionUniformId = gl.glGetUniformLocation(programId, "resolution"); + + // Set up uniforms for the HSV thresholds + lowerUniformId = gl.glGetUniformLocation(programId, "lowerThresh"); + upperUniformId = gl.glGetUniformLocation(programId, "upperThresh"); + + // Set up a quad that covers the entire screen so that our fragment shader draws onto the entire + // screen + gl.glGenBuffers(1, vertexVBOIds); + + FloatBuffer vertexBuffer = GLBuffers.newDirectFloatBuffer(k_vertexPositions); + gl.glBindBuffer(GL_ARRAY_BUFFER, vertexVBOIds.get(0)); + gl.glBufferData( + GL_ARRAY_BUFFER, + (long) vertexBuffer.capacity() * Float.BYTES, + vertexBuffer, + GL_STATIC_DRAW); + + // Set up pixel unpack buffer (a PBO to transfer image data to the GPU) + if (pboMode != PBOMode.NONE) { + gl.glGenBuffers(2, unpackPBOIds); + + gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(0)); + gl.glBufferData( + GLES3.GL_PIXEL_UNPACK_BUFFER, + k_startingHeight * k_startingWidth * 3, + null, + GLES3.GL_STREAM_DRAW); + if (pboMode == PBOMode.DOUBLE_BUFFERED) { + gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(1)); + gl.glBufferData( + GLES3.GL_PIXEL_UNPACK_BUFFER, + k_startingHeight * k_startingWidth * 3, + null, + GLES3.GL_STREAM_DRAW); + } + gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, 0); + } - logger.debug( - "Created an OpenGL context with renderer '" - + gl.glGetString(GL_RENDERER) - + "', version '" - + gl.glGetString(GL.GL_VERSION) - + "', and profile '" - + profile - + "'"); - - var fmt = GLBuffers.newDirectIntBuffer(1); - gl.glGetIntegerv(GLES3.GL_IMPLEMENTATION_COLOR_READ_FORMAT, fmt); - var type = GLBuffers.newDirectIntBuffer(1); - gl.glGetIntegerv(GLES3.GL_IMPLEMENTATION_COLOR_READ_TYPE, type); - - // Tell OpenGL that the attribute in the vertex shader named position is bound to index 0 (the - // index for the generic position input) - gl.glBindAttribLocation(programId, 0, "position"); - - // Compile and set up our two shaders with our program - final int vertexId = createShader(gl, programId, k_vertexShader, GL_VERTEX_SHADER); - final int fragmentId = createShader(gl, programId, k_fragmentShader, GL_FRAGMENT_SHADER); - - // Link our program together and check for errors - gl.glLinkProgram(programId); - IntBuffer status = GLBuffers.newDirectIntBuffer(1); - gl.glGetProgramiv(programId, GL_LINK_STATUS, status); - if (status.get(0) == GL_FALSE) { - IntBuffer infoLogLength = GLBuffers.newDirectIntBuffer(1); - gl.glGetProgramiv(programId, GL_INFO_LOG_LENGTH, infoLogLength); - - ByteBuffer bufferInfoLog = GLBuffers.newDirectByteBuffer(infoLogLength.get(0)); - gl.glGetProgramInfoLog(programId, infoLogLength.get(0), null, bufferInfoLog); - byte[] bytes = new byte[infoLogLength.get(0)]; - bufferInfoLog.get(bytes); - String strInfoLog = new String(bytes); - - throw new RuntimeException("Linker failure: " + strInfoLog); - } - gl.glValidateProgram(programId); + // Set up pixel pack buffer (a PBO to transfer the processed image back from the GPU) + if (pboMode != PBOMode.NONE) { + gl.glGenBuffers(2, packPBOIds); + + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(0)); + gl.glBufferData( + GLES3.GL_PIXEL_PACK_BUFFER, + k_startingHeight * k_startingWidth, + null, + GLES3.GL_STREAM_READ); + if (pboMode == PBOMode.DOUBLE_BUFFERED) { + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(1)); + gl.glBufferData( + GLES3.GL_PIXEL_PACK_BUFFER, + k_startingHeight * k_startingWidth, + null, + GLES3.GL_STREAM_READ); + } + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, 0); + } + } + + private static int createShader(GL2ES2 gl, int programId, String glslCode, int shaderType) { + int shaderId = gl.glCreateShader(shaderType); + if (shaderId == 0) throw new RuntimeException("Shader ID is zero"); + + IntBuffer length = GLBuffers.newDirectIntBuffer(new int[] {glslCode.length()}); + gl.glShaderSource(shaderId, 1, new String[] {glslCode}, length); + gl.glCompileShader(shaderId); + + IntBuffer intBuffer = IntBuffer.allocate(1); + gl.glGetShaderiv(shaderId, GL_COMPILE_STATUS, intBuffer); + + if (intBuffer.get(0) != 1) { + gl.glGetShaderiv(shaderId, GL_INFO_LOG_LENGTH, intBuffer); + int size = intBuffer.get(0); + if (size > 0) { + ByteBuffer byteBuffer = ByteBuffer.allocate(size); + gl.glGetShaderInfoLog(shaderId, size, intBuffer, byteBuffer); + System.err.println(new String(byteBuffer.array())); + } + throw new RuntimeException("Couldn't compile shader"); + } - // Cleanup shaders that are now compiled in - gl.glDetachShader(programId, vertexId); - gl.glDetachShader(programId, fragmentId); - gl.glDeleteShader(vertexId); - gl.glDeleteShader(fragmentId); + gl.glAttachShader(programId, shaderId); - // Tell OpenGL to use our program - gl.glUseProgram(programId); + return shaderId; + } - // Set up our texture - texture = new Texture(GL_TEXTURE_2D); - texture.setTexParameteri(gl, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - texture.setTexParameteri(gl, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - texture.setTexParameteri(gl, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - texture.setTexParameteri(gl, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + @Override + protected Mat process(Mat in) { + if (in.width() != previousWidth && in.height() != previousHeight) { + logger.debug("Resizing OpenGL viewport, byte buffers, and PBOs"); - // Set up a uniform holding our image as a texture - textureUniformId = gl.glGetUniformLocation(programId, "texture0"); - gl.glUniform1i(textureUniformId, 0); + drawable.setSurfaceSize(in.width(), in.height()); + gl.glViewport(0, 0, in.width(), in.height()); - // Set up a uniform to hold image resolution - resolutionUniformId = gl.glGetUniformLocation(programId, "resolution"); + previousWidth = in.width(); + previousHeight = in.height(); - // Set up uniforms for the HSV thresholds - lowerUniformId = gl.glGetUniformLocation(programId, "lowerThresh"); - upperUniformId = gl.glGetUniformLocation(programId, "upperThresh"); + inputBytes = new byte[in.width() * in.height() * 3]; - // Set up a quad that covers the entire screen so that our fragment shader draws onto the entire - // screen - gl.glGenBuffers(1, vertexVBOIds); + outputBytes = new byte[in.width() * in.height()]; + outputMat = new Mat(k_startingHeight, k_startingWidth, CvType.CV_8UC1); - FloatBuffer vertexBuffer = GLBuffers.newDirectFloatBuffer(k_vertexPositions); - gl.glBindBuffer(GL_ARRAY_BUFFER, vertexVBOIds.get(0)); + if (pboMode != PBOMode.NONE) { + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(0)); gl.glBufferData( - GL_ARRAY_BUFFER, - (long) vertexBuffer.capacity() * Float.BYTES, - vertexBuffer, - GL_STATIC_DRAW); - - // Set up pixel unpack buffer (a PBO to transfer image data to the GPU) - if (pboMode != PBOMode.NONE) { - gl.glGenBuffers(2, unpackPBOIds); - - gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(0)); - gl.glBufferData( - GLES3.GL_PIXEL_UNPACK_BUFFER, - k_startingHeight * k_startingWidth * 3, - null, - GLES3.GL_STREAM_DRAW); - if (pboMode == PBOMode.DOUBLE_BUFFERED) { - gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(1)); - gl.glBufferData( - GLES3.GL_PIXEL_UNPACK_BUFFER, - k_startingHeight * k_startingWidth * 3, - null, - GLES3.GL_STREAM_DRAW); - } - gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, 0); - } + GLES3.GL_PIXEL_PACK_BUFFER, + (long) in.width() * in.height(), + null, + GLES3.GL_STREAM_READ); - // Set up pixel pack buffer (a PBO to transfer the processed image back from the GPU) - if (pboMode != PBOMode.NONE) { - gl.glGenBuffers(2, packPBOIds); - - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(0)); - gl.glBufferData( - GLES3.GL_PIXEL_PACK_BUFFER, - k_startingHeight * k_startingWidth, - null, - GLES3.GL_STREAM_READ); - if (pboMode == PBOMode.DOUBLE_BUFFERED) { - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(1)); - gl.glBufferData( - GLES3.GL_PIXEL_PACK_BUFFER, - k_startingHeight * k_startingWidth, - null, - GLES3.GL_STREAM_READ); - } - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, 0); + if (pboMode == PBOMode.DOUBLE_BUFFERED) { + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(1)); + gl.glBufferData( + GLES3.GL_PIXEL_PACK_BUFFER, + (long) in.width() * in.height(), + null, + GLES3.GL_STREAM_READ); } + } } - private static int createShader(GL2ES2 gl, int programId, String glslCode, int shaderType) { - int shaderId = gl.glCreateShader(shaderType); - if (shaderId == 0) throw new RuntimeException("Shader ID is zero"); - - IntBuffer length = GLBuffers.newDirectIntBuffer(new int[] {glslCode.length()}); - gl.glShaderSource(shaderId, 1, new String[] {glslCode}, length); - gl.glCompileShader(shaderId); - - IntBuffer intBuffer = IntBuffer.allocate(1); - gl.glGetShaderiv(shaderId, GL_COMPILE_STATUS, intBuffer); - - if (intBuffer.get(0) != 1) { - gl.glGetShaderiv(shaderId, GL_INFO_LOG_LENGTH, intBuffer); - int size = intBuffer.get(0); - if (size > 0) { - ByteBuffer byteBuffer = ByteBuffer.allocate(size); - gl.glGetShaderInfoLog(shaderId, size, intBuffer, byteBuffer); - System.err.println(new String(byteBuffer.array())); - } - throw new RuntimeException("Couldn't compile shader"); - } - - gl.glAttachShader(programId, shaderId); - - return shaderId; + if (pboMode == PBOMode.DOUBLE_BUFFERED) { + unpackIndex = (unpackIndex + 1) % 2; + unpackNextIndex = (unpackIndex + 1) % 2; } - @Override - protected Mat process(Mat in) { - if (in.width() != previousWidth && in.height() != previousHeight) { - logger.debug("Resizing OpenGL viewport, byte buffers, and PBOs"); - - drawable.setSurfaceSize(in.width(), in.height()); - gl.glViewport(0, 0, in.width(), in.height()); - - previousWidth = in.width(); - previousHeight = in.height(); - - inputBytes = new byte[in.width() * in.height() * 3]; - - outputBytes = new byte[in.width() * in.height()]; - outputMat = new Mat(k_startingHeight, k_startingWidth, CvType.CV_8UC1); - - if (pboMode != PBOMode.NONE) { - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(0)); - gl.glBufferData( - GLES3.GL_PIXEL_PACK_BUFFER, - (long) in.width() * in.height(), - null, - GLES3.GL_STREAM_READ); - - if (pboMode == PBOMode.DOUBLE_BUFFERED) { - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(1)); - gl.glBufferData( - GLES3.GL_PIXEL_PACK_BUFFER, - (long) in.width() * in.height(), - null, - GLES3.GL_STREAM_READ); - } - } - } - - if (pboMode == PBOMode.DOUBLE_BUFFERED) { - unpackIndex = (unpackIndex + 1) % 2; - unpackNextIndex = (unpackIndex + 1) % 2; - } - - // Reset the fullscreen quad - gl.glBindBuffer(GL_ARRAY_BUFFER, vertexVBOIds.get(0)); - gl.glEnableVertexAttribArray(k_positionVertexAttribute); - gl.glVertexAttribPointer(0, 2, GL_FLOAT, false, 0, 0); - gl.glBindBuffer(GL_ARRAY_BUFFER, 0); - - // Load and bind our image as a 2D texture - gl.glActiveTexture(GL_TEXTURE0); - texture.enable(gl); - texture.bind(gl); - - // Load our image into the texture - in.get(0, 0, inputBytes); - if (pboMode == PBOMode.NONE) { - ByteBuffer buf = ByteBuffer.wrap(inputBytes); - // (We're actually taking in BGR even though this says RGB; it's much easier and faster to - // switch it around in the fragment shader) - texture.updateImage( - gl, - new TextureData( - profile, - GL_RGB8, - in.width(), - in.height(), - 0, - GL_RGB, - GL_UNSIGNED_BYTE, - false, - false, - false, - buf, - null)); - } else { - // Bind the PBO to the texture - gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(unpackIndex)); - - // Copy pixels from the PBO to the texture object - gl.glTexSubImage2D( - GLES3.GL_TEXTURE_2D, - 0, - 0, - 0, - in.width(), - in.height(), - GLES3.GL_RGB8, - GLES3.GL_UNSIGNED_BYTE, - 0); - - // Bind (potentially) another PBO to update the texture source - gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(unpackNextIndex)); - - // This call with a nullptr for the data arg tells OpenGL *not* to wait to be in sync with the - // GPU - // This causes the previous data in the PBO to be discarded - gl.glBufferData( - GLES3.GL_PIXEL_UNPACK_BUFFER, - (long) in.width() * in.height() * 3, - null, - GLES3.GL_STREAM_DRAW); - - // Map the buffer of GPU memory into a place that's accessible by us - var buf = - gl.glMapBufferRange( - GLES3.GL_PIXEL_UNPACK_BUFFER, - 0, - (long) in.width() * in.height() * 3, - GLES3.GL_MAP_WRITE_BIT); - buf.put(inputBytes); - - gl.glUnmapBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER); - gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, 0); - } + // Reset the fullscreen quad + gl.glBindBuffer(GL_ARRAY_BUFFER, vertexVBOIds.get(0)); + gl.glEnableVertexAttribArray(k_positionVertexAttribute); + gl.glVertexAttribPointer(0, 2, GL_FLOAT, false, 0, 0); + gl.glBindBuffer(GL_ARRAY_BUFFER, 0); + + // Load and bind our image as a 2D texture + gl.glActiveTexture(GL_TEXTURE0); + texture.enable(gl); + texture.bind(gl); + + // Load our image into the texture + in.get(0, 0, inputBytes); + if (pboMode == PBOMode.NONE) { + ByteBuffer buf = ByteBuffer.wrap(inputBytes); + // (We're actually taking in BGR even though this says RGB; it's much easier and faster to + // switch it around in the fragment shader) + texture.updateImage( + gl, + new TextureData( + profile, + GL_RGB8, + in.width(), + in.height(), + 0, + GL_RGB, + GL_UNSIGNED_BYTE, + false, + false, + false, + buf, + null)); + } else { + // Bind the PBO to the texture + gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(unpackIndex)); + + // Copy pixels from the PBO to the texture object + gl.glTexSubImage2D( + GLES3.GL_TEXTURE_2D, + 0, + 0, + 0, + in.width(), + in.height(), + GLES3.GL_RGB8, + GLES3.GL_UNSIGNED_BYTE, + 0); + + // Bind (potentially) another PBO to update the texture source + gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(unpackNextIndex)); + + // This call with a nullptr for the data arg tells OpenGL *not* to wait to be in sync with the + // GPU + // This causes the previous data in the PBO to be discarded + gl.glBufferData( + GLES3.GL_PIXEL_UNPACK_BUFFER, + (long) in.width() * in.height() * 3, + null, + GLES3.GL_STREAM_DRAW); + + // Map the buffer of GPU memory into a place that's accessible by us + var buf = + gl.glMapBufferRange( + GLES3.GL_PIXEL_UNPACK_BUFFER, + 0, + (long) in.width() * in.height() * 3, + GLES3.GL_MAP_WRITE_BIT); + buf.put(inputBytes); + + gl.glUnmapBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER); + gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, 0); + } - // Put values in a uniform holding the image resolution - gl.glUniform2f(resolutionUniformId, in.width(), in.height()); + // Put values in a uniform holding the image resolution + gl.glUniform2f(resolutionUniformId, in.width(), in.height()); - // Put values in threshold uniforms - var lowr = params.getHsvLower().val; - var upr = params.getHsvUpper().val; - gl.glUniform3f(lowerUniformId, (float) lowr[0], (float) lowr[1], (float) lowr[2]); - gl.glUniform3f(upperUniformId, (float) upr[0], (float) upr[1], (float) upr[2]); + // Put values in threshold uniforms + var lowr = params.getHsvLower().val; + var upr = params.getHsvUpper().val; + gl.glUniform3f(lowerUniformId, (float) lowr[0], (float) lowr[1], (float) lowr[2]); + gl.glUniform3f(upperUniformId, (float) upr[0], (float) upr[1], (float) upr[2]); - // Draw the fullscreen quad - gl.glDrawArrays(GL_TRIANGLE_STRIP, 0, k_vertexPositions.length); + // Draw the fullscreen quad + gl.glDrawArrays(GL_TRIANGLE_STRIP, 0, k_vertexPositions.length); - // Cleanup - texture.disable(gl); - gl.glDisableVertexAttribArray(k_positionVertexAttribute); - gl.glUseProgram(0); + // Cleanup + texture.disable(gl); + gl.glDisableVertexAttribArray(k_positionVertexAttribute); + gl.glUseProgram(0); - if (pboMode == PBOMode.NONE) { - return saveMatNoPBO(gl, in.width(), in.height()); - } else { - return saveMatPBO((GLES3) gl, in.width(), in.height(), pboMode == PBOMode.DOUBLE_BUFFERED); - } + if (pboMode == PBOMode.NONE) { + return saveMatNoPBO(gl, in.width(), in.height()); + } else { + return saveMatPBO((GLES3) gl, in.width(), in.height(), pboMode == PBOMode.DOUBLE_BUFFERED); } - - private Mat saveMatNoPBO(GL2ES2 gl, int width, int height) { - ByteBuffer buffer = GLBuffers.newDirectByteBuffer(width * height); - // We use GL_RED/GL_ALPHA to get things in a single-channel format - // Note that which pixel format you use is *very* important to performance - // E.g. GL_ALPHA is super slow in this case - gl.glReadPixels(0, 0, width, height, outputFormat, GL_UNSIGNED_BYTE, buffer); - - return new Mat(height, width, CvType.CV_8UC1, buffer); + } + + private Mat saveMatNoPBO(GL2ES2 gl, int width, int height) { + ByteBuffer buffer = GLBuffers.newDirectByteBuffer(width * height); + // We use GL_RED/GL_ALPHA to get things in a single-channel format + // Note that which pixel format you use is *very* important to performance + // E.g. GL_ALPHA is super slow in this case + gl.glReadPixels(0, 0, width, height, outputFormat, GL_UNSIGNED_BYTE, buffer); + + return new Mat(height, width, CvType.CV_8UC1, buffer); + } + + private Mat saveMatPBO(GLES3 gl, int width, int height, boolean doubleBuffered) { + if (doubleBuffered) { + packIndex = (packIndex + 1) % 2; + packNextIndex = (packIndex + 1) % 2; } - private Mat saveMatPBO(GLES3 gl, int width, int height, boolean doubleBuffered) { - if (doubleBuffered) { - packIndex = (packIndex + 1) % 2; - packNextIndex = (packIndex + 1) % 2; - } - - // Set the target framebuffer attachment to read - gl.glReadBuffer(GLES3.GL_COLOR_ATTACHMENT0); - - // Read pixels from the framebuffer to the PBO - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(packIndex)); - // We use GL_RED (which is always supported in GLES3) to get things in a single-channel format - // Note that which pixel format you use is *very* important to performance - // E.g. GL_ALPHA is super slow in this case - gl.glReadPixels(0, 0, width, height, GLES3.GL_RED, GLES3.GL_UNSIGNED_BYTE, 0); - - // Map the PBO into the CPU's memory - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(packNextIndex)); - var buf = - gl.glMapBufferRange( - GLES3.GL_PIXEL_PACK_BUFFER, 0, (long) width * height, GLES3.GL_MAP_READ_BIT); - buf.get(outputBytes); - outputMat.put(0, 0, outputBytes); - gl.glUnmapBuffer(GLES3.GL_PIXEL_PACK_BUFFER); - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, 0); - - return outputMat; - } + // Set the target framebuffer attachment to read + gl.glReadBuffer(GLES3.GL_COLOR_ATTACHMENT0); + + // Read pixels from the framebuffer to the PBO + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(packIndex)); + // We use GL_RED (which is always supported in GLES3) to get things in a single-channel format + // Note that which pixel format you use is *very* important to performance + // E.g. GL_ALPHA is super slow in this case + gl.glReadPixels(0, 0, width, height, GLES3.GL_RED, GLES3.GL_UNSIGNED_BYTE, 0); + + // Map the PBO into the CPU's memory + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(packNextIndex)); + var buf = + gl.glMapBufferRange( + GLES3.GL_PIXEL_PACK_BUFFER, 0, (long) width * height, GLES3.GL_MAP_READ_BIT); + buf.get(outputBytes); + outputMat.put(0, 0, outputBytes); + gl.glUnmapBuffer(GLES3.GL_PIXEL_PACK_BUFFER); + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, 0); + + return outputMat; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GrayscalePipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GrayscalePipe.java index 04b1bf774a..cc8404ec70 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GrayscalePipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GrayscalePipe.java @@ -22,17 +22,17 @@ import org.photonvision.vision.pipe.CVPipe; public class GrayscalePipe extends CVPipe { - @Override - protected Mat process(Mat in) { - var outputMat = new Mat(); - // We can save a copy here by sending the output of cvtcolor to outputMat directly - // rather than copying. Free performance! - Imgproc.cvtColor(in, outputMat, Imgproc.COLOR_BGR2GRAY, 3); + @Override + protected Mat process(Mat in) { + var outputMat = new Mat(); + // We can save a copy here by sending the output of cvtcolor to outputMat directly + // rather than copying. Free performance! + Imgproc.cvtColor(in, outputMat, Imgproc.COLOR_BGR2GRAY, 3); - return outputMat; - } + return outputMat; + } - public static class GrayscaleParams { - public GrayscaleParams() {} - } + public static class GrayscaleParams { + public GrayscaleParams() {} + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GroupContoursPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GroupContoursPipe.java index 7711467dd2..84b6e819df 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GroupContoursPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GroupContoursPipe.java @@ -27,83 +27,83 @@ import org.photonvision.vision.target.PotentialTarget; public class GroupContoursPipe - extends CVPipe, List, GroupContoursPipe.GroupContoursParams> { - private final List m_targets = new ArrayList<>(); + extends CVPipe, List, GroupContoursPipe.GroupContoursParams> { + private final List m_targets = new ArrayList<>(); - @Override - protected List process(List input) { - for (var target : m_targets) { - target.release(); - } + @Override + protected List process(List input) { + for (var target : m_targets) { + target.release(); + } - m_targets.clear(); + m_targets.clear(); - if (params.getGroup() == ContourGroupingMode.Single) { - for (var contour : input) { - m_targets.add(new PotentialTarget(contour)); - } - } - // Check if we have at least 2 targets for 2 or more - // This will only ever return 1 contour! - else if (params.getGroup() == ContourGroupingMode.TwoOrMore - && input.size() >= ContourGroupingMode.TwoOrMore.count) { - // Just blob everything together - Contour groupedContour = Contour.combineContourList(input); - if (groupedContour != null) { - m_targets.add(new PotentialTarget(groupedContour, input)); - } - } else { - int groupingCount = params.getGroup().count; + if (params.getGroup() == ContourGroupingMode.Single) { + for (var contour : input) { + m_targets.add(new PotentialTarget(contour)); + } + } + // Check if we have at least 2 targets for 2 or more + // This will only ever return 1 contour! + else if (params.getGroup() == ContourGroupingMode.TwoOrMore + && input.size() >= ContourGroupingMode.TwoOrMore.count) { + // Just blob everything together + Contour groupedContour = Contour.combineContourList(input); + if (groupedContour != null) { + m_targets.add(new PotentialTarget(groupedContour, input)); + } + } else { + int groupingCount = params.getGroup().count; - if (input.size() >= groupingCount) { - input.sort(Contour.SortByMomentsX); - // also why reverse? shouldn't the sort comparator just get reversed? - // TODO: Matt, see this - Collections.reverse(input); + if (input.size() >= groupingCount) { + input.sort(Contour.SortByMomentsX); + // also why reverse? shouldn't the sort comparator just get reversed? + // TODO: Matt, see this + Collections.reverse(input); - for (int i = 0; i < input.size() - 1; i++) { - // make a list of the desired count of contours to group - // (Just make sure we don't get an index out of bounds exception - if (i < 0 || i + groupingCount > input.size()) continue; + for (int i = 0; i < input.size() - 1; i++) { + // make a list of the desired count of contours to group + // (Just make sure we don't get an index out of bounds exception + if (i < 0 || i + groupingCount > input.size()) continue; - // If we're in two or more mode, just try to group everything - List groupingSet = input.subList(i, i + groupingCount); + // If we're in two or more mode, just try to group everything + List groupingSet = input.subList(i, i + groupingCount); - try { - // FYI: This method only takes 2 contours! - Contour groupedContour = - Contour.groupContoursByIntersection( - groupingSet.get(0), groupingSet.get(1), params.getIntersection()); + try { + // FYI: This method only takes 2 contours! + Contour groupedContour = + Contour.groupContoursByIntersection( + groupingSet.get(0), groupingSet.get(1), params.getIntersection()); - if (groupedContour != null) { - m_targets.add(new PotentialTarget(groupedContour, groupingSet)); - i += (groupingCount - 1); - } - } catch (Exception ex) { - ex.printStackTrace(); - } - } + if (groupedContour != null) { + m_targets.add(new PotentialTarget(groupedContour, groupingSet)); + i += (groupingCount - 1); } + } catch (Exception ex) { + ex.printStackTrace(); + } } - return m_targets; + } } + return m_targets; + } - public static class GroupContoursParams { - private final ContourGroupingMode m_group; - private final ContourIntersectionDirection m_intersection; + public static class GroupContoursParams { + private final ContourGroupingMode m_group; + private final ContourIntersectionDirection m_intersection; - public GroupContoursParams( - ContourGroupingMode group, ContourIntersectionDirection intersectionDirection) { - m_group = group; - m_intersection = intersectionDirection; - } + public GroupContoursParams( + ContourGroupingMode group, ContourIntersectionDirection intersectionDirection) { + m_group = group; + m_intersection = intersectionDirection; + } - public ContourGroupingMode getGroup() { - return m_group; - } + public ContourGroupingMode getGroup() { + return m_group; + } - public ContourIntersectionDirection getIntersection() { - return m_intersection; - } + public ContourIntersectionDirection getIntersection() { + return m_intersection; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/HSVPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/HSVPipe.java index e0ba36ce0e..c9ac07fa54 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/HSVPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/HSVPipe.java @@ -25,70 +25,70 @@ import org.photonvision.vision.pipe.CVPipe; public class HSVPipe extends CVPipe { - @Override - protected Mat process(Mat in) { - var outputMat = new Mat(); - // We can save a copy here by sending the output of cvtcolor to outputMat directly - // rather than copying. Free performance! - Imgproc.cvtColor(in, outputMat, Imgproc.COLOR_BGR2HSV, 3); - - if (params.getHueInverted()) { - // In Java code we do this by taking an image thresholded - // from [0, minHue] and ORing it with [maxHue, 180] - - // we want hue from the end of the slider to max hue - Scalar firstLower = params.getHsvLower().clone(); - Scalar firstUpper = params.getHsvUpper().clone(); - firstLower.val[0] = params.getHsvUpper().val[0]; - firstUpper.val[0] = 180; - - var lowerThresholdMat = new Mat(); - Core.inRange(outputMat, firstLower, firstUpper, lowerThresholdMat); - - // We want hue from 0 to the start of the slider - var secondLower = params.getHsvLower().clone(); - var secondUpper = params.getHsvUpper().clone(); - secondLower.val[0] = 0; - secondUpper.val[0] = params.getHsvLower().val[0]; - - // Now that the output mat's been used by the first inRange, it's fine to mutate it - Core.inRange(outputMat, secondLower, secondUpper, outputMat); - - // Now OR the two images together to make a mat that combines the lower and upper bounds - // outputMat holds the second half of the range - Core.bitwise_or(lowerThresholdMat, outputMat, outputMat); - - lowerThresholdMat.release(); - } else { - Core.inRange(outputMat, params.getHsvLower(), params.getHsvUpper(), outputMat); - } - - return outputMat; + @Override + protected Mat process(Mat in) { + var outputMat = new Mat(); + // We can save a copy here by sending the output of cvtcolor to outputMat directly + // rather than copying. Free performance! + Imgproc.cvtColor(in, outputMat, Imgproc.COLOR_BGR2HSV, 3); + + if (params.getHueInverted()) { + // In Java code we do this by taking an image thresholded + // from [0, minHue] and ORing it with [maxHue, 180] + + // we want hue from the end of the slider to max hue + Scalar firstLower = params.getHsvLower().clone(); + Scalar firstUpper = params.getHsvUpper().clone(); + firstLower.val[0] = params.getHsvUpper().val[0]; + firstUpper.val[0] = 180; + + var lowerThresholdMat = new Mat(); + Core.inRange(outputMat, firstLower, firstUpper, lowerThresholdMat); + + // We want hue from 0 to the start of the slider + var secondLower = params.getHsvLower().clone(); + var secondUpper = params.getHsvUpper().clone(); + secondLower.val[0] = 0; + secondUpper.val[0] = params.getHsvLower().val[0]; + + // Now that the output mat's been used by the first inRange, it's fine to mutate it + Core.inRange(outputMat, secondLower, secondUpper, outputMat); + + // Now OR the two images together to make a mat that combines the lower and upper bounds + // outputMat holds the second half of the range + Core.bitwise_or(lowerThresholdMat, outputMat, outputMat); + + lowerThresholdMat.release(); + } else { + Core.inRange(outputMat, params.getHsvLower(), params.getHsvUpper(), outputMat); } - public static class HSVParams { - private final Scalar m_hsvLower; - private final Scalar m_hsvUpper; - private final boolean m_hueInverted; + return outputMat; + } - public HSVParams( - IntegerCouple hue, IntegerCouple saturation, IntegerCouple value, boolean hueInverted) { - m_hsvLower = new Scalar(hue.getFirst(), saturation.getFirst(), value.getFirst()); - m_hsvUpper = new Scalar(hue.getSecond(), saturation.getSecond(), value.getSecond()); + public static class HSVParams { + private final Scalar m_hsvLower; + private final Scalar m_hsvUpper; + private final boolean m_hueInverted; - this.m_hueInverted = hueInverted; - } + public HSVParams( + IntegerCouple hue, IntegerCouple saturation, IntegerCouple value, boolean hueInverted) { + m_hsvLower = new Scalar(hue.getFirst(), saturation.getFirst(), value.getFirst()); + m_hsvUpper = new Scalar(hue.getSecond(), saturation.getSecond(), value.getSecond()); - public Scalar getHsvLower() { - return m_hsvLower; - } + this.m_hueInverted = hueInverted; + } + + public Scalar getHsvLower() { + return m_hsvLower; + } - public Scalar getHsvUpper() { - return m_hsvUpper; - } + public Scalar getHsvUpper() { + return m_hsvUpper; + } - public boolean getHueInverted() { - return m_hueInverted; - } + public boolean getHueInverted() { + return m_hueInverted; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/MultiTargetPNPPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/MultiTargetPNPPipe.java index 5ce9f124cc..17b5e7ae06 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/MultiTargetPNPPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/MultiTargetPNPPipe.java @@ -30,59 +30,59 @@ /** Estimate the camera pose given multiple Apriltag observations */ public class MultiTargetPNPPipe - extends CVPipe< - List, MultiTargetPNPResults, MultiTargetPNPPipe.MultiTargetPNPPipeParams> { - private static final Logger logger = new Logger(MultiTargetPNPPipe.class, LogGroup.VisionModule); + extends CVPipe< + List, MultiTargetPNPResults, MultiTargetPNPPipe.MultiTargetPNPPipeParams> { + private static final Logger logger = new Logger(MultiTargetPNPPipe.class, LogGroup.VisionModule); - private boolean hasWarned = false; + private boolean hasWarned = false; - @Override - protected MultiTargetPNPResults process(List targetList) { - if (params.cameraCoefficients == null - || params.cameraCoefficients.getCameraIntrinsicsMat() == null - || params.cameraCoefficients.getDistCoeffsMat() == null) { - if (!hasWarned) { - logger.warn( - "Cannot perform solvePNP an uncalibrated camera! Please calibrate this resolution..."); - hasWarned = true; - } - return new MultiTargetPNPResults(); - } - - return calculateCameraInField(targetList); + @Override + protected MultiTargetPNPResults process(List targetList) { + if (params.cameraCoefficients == null + || params.cameraCoefficients.getCameraIntrinsicsMat() == null + || params.cameraCoefficients.getDistCoeffsMat() == null) { + if (!hasWarned) { + logger.warn( + "Cannot perform solvePNP an uncalibrated camera! Please calibrate this resolution..."); + hasWarned = true; + } + return new MultiTargetPNPResults(); } - private MultiTargetPNPResults calculateCameraInField(List targetList) { - // Find tag IDs that exist in the tag layout - var tagIDsUsed = new ArrayList(); - for (var target : targetList) { - int id = target.getFiducialId(); - if (params.atfl.getTagPose(id).isPresent()) tagIDsUsed.add(id); - } - - // Only run with multiple targets - if (tagIDsUsed.size() < 2) { - return new MultiTargetPNPResults(); - } + return calculateCameraInField(targetList); + } - var estimatedPose = - VisionEstimation.estimateCamPosePNP( - params.cameraCoefficients.cameraIntrinsics.getAsWpilibMat(), - params.cameraCoefficients.distCoeffs.getAsWpilibMat(), - TrackedTarget.simpleFromTrackedTargets(targetList), - params.atfl); + private MultiTargetPNPResults calculateCameraInField(List targetList) { + // Find tag IDs that exist in the tag layout + var tagIDsUsed = new ArrayList(); + for (var target : targetList) { + int id = target.getFiducialId(); + if (params.atfl.getTagPose(id).isPresent()) tagIDsUsed.add(id); + } - return new MultiTargetPNPResults(estimatedPose, tagIDsUsed); + // Only run with multiple targets + if (tagIDsUsed.size() < 2) { + return new MultiTargetPNPResults(); } - public static class MultiTargetPNPPipeParams { - private final CameraCalibrationCoefficients cameraCoefficients; - private final AprilTagFieldLayout atfl; + var estimatedPose = + VisionEstimation.estimateCamPosePNP( + params.cameraCoefficients.cameraIntrinsics.getAsWpilibMat(), + params.cameraCoefficients.distCoeffs.getAsWpilibMat(), + TrackedTarget.simpleFromTrackedTargets(targetList), + params.atfl); + + return new MultiTargetPNPResults(estimatedPose, tagIDsUsed); + } + + public static class MultiTargetPNPPipeParams { + private final CameraCalibrationCoefficients cameraCoefficients; + private final AprilTagFieldLayout atfl; - public MultiTargetPNPPipeParams( - CameraCalibrationCoefficients cameraCoefficients, AprilTagFieldLayout atfl) { - this.cameraCoefficients = cameraCoefficients; - this.atfl = atfl; - } + public MultiTargetPNPPipeParams( + CameraCalibrationCoefficients cameraCoefficients, AprilTagFieldLayout atfl) { + this.cameraCoefficients = cameraCoefficients; + this.atfl = atfl; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/OutputMatPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/OutputMatPipe.java index 944addf95f..56fa001955 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/OutputMatPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/OutputMatPipe.java @@ -22,12 +22,12 @@ import org.photonvision.vision.pipe.MutatingPipe; public class OutputMatPipe extends MutatingPipe { - @Override - protected Void process(Mat in) { - // convert input mat - Imgproc.cvtColor(in, in, Imgproc.COLOR_GRAY2BGR, 3); - return null; - } + @Override + protected Void process(Mat in) { + // convert input mat + Imgproc.cvtColor(in, in, Imgproc.COLOR_GRAY2BGR, 3); + return null; + } - public static class OutputMatParams {} + public static class OutputMatParams {} } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ResizeImagePipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ResizeImagePipe.java index 3d9c030434..f3ad99a61c 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ResizeImagePipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ResizeImagePipe.java @@ -25,29 +25,29 @@ /** Pipe that resizes an image to a given resolution */ public class ResizeImagePipe extends MutatingPipe { - /** - * Process this pipe - * - * @param in {@link Mat} to be resized - */ - @Override - protected Void process(Mat in) { - int width = in.cols() / params.getDivisor().value; - int height = in.rows() / params.getDivisor().value; - Imgproc.resize(in, in, new Size(width, height)); + /** + * Process this pipe + * + * @param in {@link Mat} to be resized + */ + @Override + protected Void process(Mat in) { + int width = in.cols() / params.getDivisor().value; + int height = in.rows() / params.getDivisor().value; + Imgproc.resize(in, in, new Size(width, height)); - return null; - } + return null; + } - public static class ResizeImageParams { - private final FrameDivisor divisor; + public static class ResizeImageParams { + private final FrameDivisor divisor; - public ResizeImageParams(FrameDivisor divisor) { - this.divisor = divisor; - } + public ResizeImageParams(FrameDivisor divisor) { + this.divisor = divisor; + } - public FrameDivisor getDivisor() { - return divisor; - } + public FrameDivisor getDivisor() { + return divisor; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/RotateImagePipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/RotateImagePipe.java index 172eda5e01..5cbaa67f5d 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/RotateImagePipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/RotateImagePipe.java @@ -24,37 +24,37 @@ /** Pipe that rotates an image to a given orientation */ public class RotateImagePipe extends MutatingPipe { - public RotateImagePipe() { - setParams(RotateImageParams.DEFAULT); + public RotateImagePipe() { + setParams(RotateImageParams.DEFAULT); + } + + public RotateImagePipe(RotateImageParams params) { + setParams(params); + } + + /** + * Process this pipe + * + * @param in {@link Mat} to be rotated + * @return Rotated {@link Mat} + */ + @Override + protected Void process(Mat in) { + Core.rotate(in, in, params.rotation.value); + return null; + } + + public static class RotateImageParams { + public static RotateImageParams DEFAULT = new RotateImageParams(ImageRotationMode.DEG_0); + + public ImageRotationMode rotation; + + public RotateImageParams() { + rotation = DEFAULT.rotation; } - public RotateImagePipe(RotateImageParams params) { - setParams(params); - } - - /** - * Process this pipe - * - * @param in {@link Mat} to be rotated - * @return Rotated {@link Mat} - */ - @Override - protected Void process(Mat in) { - Core.rotate(in, in, params.rotation.value); - return null; - } - - public static class RotateImageParams { - public static RotateImageParams DEFAULT = new RotateImageParams(ImageRotationMode.DEG_0); - - public ImageRotationMode rotation; - - public RotateImageParams() { - rotation = DEFAULT.rotation; - } - - public RotateImageParams(ImageRotationMode rotation) { - this.rotation = rotation; - } + public RotateImageParams(ImageRotationMode rotation) { + this.rotation = rotation; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SolvePNPPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SolvePNPPipe.java index 6a887d3a18..92ecd0a29f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SolvePNPPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SolvePNPPipe.java @@ -35,82 +35,82 @@ import org.photonvision.vision.target.TrackedTarget; public class SolvePNPPipe - extends CVPipe, List, SolvePNPPipe.SolvePNPPipeParams> { - private static final Logger logger = new Logger(SolvePNPPipe.class, LogGroup.VisionModule); + extends CVPipe, List, SolvePNPPipe.SolvePNPPipeParams> { + private static final Logger logger = new Logger(SolvePNPPipe.class, LogGroup.VisionModule); - private final MatOfPoint2f imagePoints = new MatOfPoint2f(); + private final MatOfPoint2f imagePoints = new MatOfPoint2f(); - private boolean hasWarned = false; + private boolean hasWarned = false; - @Override - protected List process(List targetList) { - if (params.cameraCoefficients == null - || params.cameraCoefficients.getCameraIntrinsicsMat() == null - || params.cameraCoefficients.getDistCoeffsMat() == null) { - if (!hasWarned) { - logger.warn( - "Cannot perform solvePNP an uncalibrated camera! Please calibrate this resolution..."); - hasWarned = true; - } - return targetList; - } + @Override + protected List process(List targetList) { + if (params.cameraCoefficients == null + || params.cameraCoefficients.getCameraIntrinsicsMat() == null + || params.cameraCoefficients.getDistCoeffsMat() == null) { + if (!hasWarned) { + logger.warn( + "Cannot perform solvePNP an uncalibrated camera! Please calibrate this resolution..."); + hasWarned = true; + } + return targetList; + } - for (var target : targetList) { - calculateTargetPose(target); - } - return targetList; + for (var target : targetList) { + calculateTargetPose(target); } + return targetList; + } - private void calculateTargetPose(TrackedTarget target) { - var corners = target.getTargetCorners(); - if (corners == null - || corners.isEmpty() - || params.cameraCoefficients == null - || params.cameraCoefficients.getCameraIntrinsicsMat() == null - || params.cameraCoefficients.getDistCoeffsMat() == null) { - return; - } - this.imagePoints.fromList(corners); + private void calculateTargetPose(TrackedTarget target) { + var corners = target.getTargetCorners(); + if (corners == null + || corners.isEmpty() + || params.cameraCoefficients == null + || params.cameraCoefficients.getCameraIntrinsicsMat() == null + || params.cameraCoefficients.getDistCoeffsMat() == null) { + return; + } + this.imagePoints.fromList(corners); - var rVec = new Mat(); - var tVec = new Mat(); - try { - Calib3d.solvePnP( - params.targetModel.getRealWorldTargetCoordinates(), - imagePoints, - params.cameraCoefficients.getCameraIntrinsicsMat(), - params.cameraCoefficients.getDistCoeffsMat(), - rVec, - tVec); - } catch (Exception e) { - logger.error("Exception when attempting solvePnP!", e); - return; - } + var rVec = new Mat(); + var tVec = new Mat(); + try { + Calib3d.solvePnP( + params.targetModel.getRealWorldTargetCoordinates(), + imagePoints, + params.cameraCoefficients.getCameraIntrinsicsMat(), + params.cameraCoefficients.getDistCoeffsMat(), + rVec, + tVec); + } catch (Exception e) { + logger.error("Exception when attempting solvePnP!", e); + return; + } - target.setCameraRelativeTvec(tVec); - target.setCameraRelativeRvec(rVec); + target.setCameraRelativeTvec(tVec); + target.setCameraRelativeRvec(rVec); - Translation3d translation = - new Translation3d(tVec.get(0, 0)[0], tVec.get(1, 0)[0], tVec.get(2, 0)[0]); - Rotation3d rotation = - new Rotation3d( - VecBuilder.fill(rVec.get(0, 0)[0], rVec.get(1, 0)[0], rVec.get(2, 0)[0]), - Core.norm(rVec)); + Translation3d translation = + new Translation3d(tVec.get(0, 0)[0], tVec.get(1, 0)[0], tVec.get(2, 0)[0]); + Rotation3d rotation = + new Rotation3d( + VecBuilder.fill(rVec.get(0, 0)[0], rVec.get(1, 0)[0], rVec.get(2, 0)[0]), + Core.norm(rVec)); - Transform3d camToTarget = - MathUtils.convertOpenCVtoPhotonTransform(new Transform3d(translation, rotation)); - target.setBestCameraToTarget3d(camToTarget); - target.setAltCameraToTarget3d(new Transform3d()); - } + Transform3d camToTarget = + MathUtils.convertOpenCVtoPhotonTransform(new Transform3d(translation, rotation)); + target.setBestCameraToTarget3d(camToTarget); + target.setAltCameraToTarget3d(new Transform3d()); + } - public static class SolvePNPPipeParams { - private final CameraCalibrationCoefficients cameraCoefficients; - private final TargetModel targetModel; + public static class SolvePNPPipeParams { + private final CameraCalibrationCoefficients cameraCoefficients; + private final TargetModel targetModel; - public SolvePNPPipeParams( - CameraCalibrationCoefficients cameraCoefficients, TargetModel targetModel) { - this.cameraCoefficients = cameraCoefficients; - this.targetModel = targetModel; - } + public SolvePNPPipeParams( + CameraCalibrationCoefficients cameraCoefficients, TargetModel targetModel) { + this.cameraCoefficients = cameraCoefficients; + this.targetModel = targetModel; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SortContoursPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SortContoursPipe.java index 7bdc9bf0d6..7c9eef2001 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SortContoursPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SortContoursPipe.java @@ -26,58 +26,58 @@ import org.photonvision.vision.target.PotentialTarget; public class SortContoursPipe - extends CVPipe< - List, List, SortContoursPipe.SortContoursParams> { - private final List m_sortedContours = new ArrayList<>(); + extends CVPipe< + List, List, SortContoursPipe.SortContoursParams> { + private final List m_sortedContours = new ArrayList<>(); - @Override - protected List process(List in) { - for (var oldTarget : m_sortedContours) { - oldTarget.release(); - } - m_sortedContours.clear(); - - if (!in.isEmpty()) { - m_sortedContours.addAll(in); - if (params.getSortMode() != ContourSortMode.Centermost) { - m_sortedContours.sort(params.getSortMode().getComparator()); - } else { - m_sortedContours.sort(Comparator.comparingDouble(this::calcSquareCenterDistance)); - } - } - - return new ArrayList<>( - m_sortedContours.subList(0, Math.min(in.size(), params.getMaxTargets()))); + @Override + protected List process(List in) { + for (var oldTarget : m_sortedContours) { + oldTarget.release(); } + m_sortedContours.clear(); - private double calcSquareCenterDistance(PotentialTarget rect) { - return Math.sqrt( - Math.pow(params.getCamProperties().centerX - rect.getMinAreaRect().center.x, 2) - + Math.pow(params.getCamProperties().centerY - rect.getMinAreaRect().center.y, 2)); + if (!in.isEmpty()) { + m_sortedContours.addAll(in); + if (params.getSortMode() != ContourSortMode.Centermost) { + m_sortedContours.sort(params.getSortMode().getComparator()); + } else { + m_sortedContours.sort(Comparator.comparingDouble(this::calcSquareCenterDistance)); + } } - public static class SortContoursParams { - private final ContourSortMode m_sortMode; - private final int m_maxTargets; - private final FrameStaticProperties m_frameStaticProperties; + return new ArrayList<>( + m_sortedContours.subList(0, Math.min(in.size(), params.getMaxTargets()))); + } - public SortContoursParams( - ContourSortMode sortMode, int maxTargets, FrameStaticProperties camProperties) { - m_sortMode = sortMode; - m_maxTargets = maxTargets; - m_frameStaticProperties = camProperties; - } + private double calcSquareCenterDistance(PotentialTarget rect) { + return Math.sqrt( + Math.pow(params.getCamProperties().centerX - rect.getMinAreaRect().center.x, 2) + + Math.pow(params.getCamProperties().centerY - rect.getMinAreaRect().center.y, 2)); + } - public ContourSortMode getSortMode() { - return m_sortMode; - } + public static class SortContoursParams { + private final ContourSortMode m_sortMode; + private final int m_maxTargets; + private final FrameStaticProperties m_frameStaticProperties; + + public SortContoursParams( + ContourSortMode sortMode, int maxTargets, FrameStaticProperties camProperties) { + m_sortMode = sortMode; + m_maxTargets = maxTargets; + m_frameStaticProperties = camProperties; + } - public FrameStaticProperties getCamProperties() { - return m_frameStaticProperties; - } + public ContourSortMode getSortMode() { + return m_sortMode; + } + + public FrameStaticProperties getCamProperties() { + return m_frameStaticProperties; + } - public int getMaxTargets() { - return m_maxTargets; - } + public int getMaxTargets() { + return m_maxTargets; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SpeckleRejectPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SpeckleRejectPipe.java index 4f4c5ce547..a7aec39693 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SpeckleRejectPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SpeckleRejectPipe.java @@ -23,45 +23,45 @@ import org.photonvision.vision.pipe.CVPipe; public class SpeckleRejectPipe - extends CVPipe, List, SpeckleRejectPipe.SpeckleRejectParams> { - private final List m_despeckledContours = new ArrayList<>(); + extends CVPipe, List, SpeckleRejectPipe.SpeckleRejectParams> { + private final List m_despeckledContours = new ArrayList<>(); - @Override - protected List process(List in) { - for (var c : m_despeckledContours) { - c.mat.release(); - } - m_despeckledContours.clear(); + @Override + protected List process(List in) { + for (var c : m_despeckledContours) { + c.mat.release(); + } + m_despeckledContours.clear(); - if (!in.isEmpty()) { - double averageArea = 0.0; - for (Contour c : in) { - averageArea += c.getArea(); - } - averageArea /= in.size(); + if (!in.isEmpty()) { + double averageArea = 0.0; + for (Contour c : in) { + averageArea += c.getArea(); + } + averageArea /= in.size(); - double minAllowedArea = params.getMinPercentOfAvg() / 100.0 * averageArea; - for (Contour c : in) { - if (c.getArea() >= minAllowedArea) { - m_despeckledContours.add(c); - } else { - c.release(); - } - } + double minAllowedArea = params.getMinPercentOfAvg() / 100.0 * averageArea; + for (Contour c : in) { + if (c.getArea() >= minAllowedArea) { + m_despeckledContours.add(c); + } else { + c.release(); } - - return m_despeckledContours; + } } - public static class SpeckleRejectParams { - private final double m_minPercentOfAvg; + return m_despeckledContours; + } - public SpeckleRejectParams(double minPercentOfAvg) { - m_minPercentOfAvg = minPercentOfAvg; - } + public static class SpeckleRejectParams { + private final double m_minPercentOfAvg; - public double getMinPercentOfAvg() { - return m_minPercentOfAvg; - } + public SpeckleRejectParams(double minPercentOfAvg) { + m_minPercentOfAvg = minPercentOfAvg; + } + + public double getMinPercentOfAvg() { + return m_minPercentOfAvg; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/AdvancedPipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/AdvancedPipelineSettings.java index 411642c89b..b6c080f6d5 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/AdvancedPipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/AdvancedPipelineSettings.java @@ -31,133 +31,133 @@ import org.photonvision.vision.target.TargetOrientation; public class AdvancedPipelineSettings extends CVPipelineSettings { - public AdvancedPipelineSettings() { - ledMode = true; - } - - public IntegerCouple hsvHue = new IntegerCouple(50, 180); - public IntegerCouple hsvSaturation = new IntegerCouple(50, 255); - public IntegerCouple hsvValue = new IntegerCouple(50, 255); - public boolean hueInverted = false; - - public boolean outputShouldDraw = true; - public boolean outputShowMultipleTargets = false; - - public DoubleCouple contourArea = new DoubleCouple(0.0, 100.0); - public DoubleCouple contourRatio = new DoubleCouple(0.0, 20.0); - public DoubleCouple contourFullness = new DoubleCouple(0.0, 100.0); - public int contourSpecklePercentage = 5; - - // the order in which to sort contours to find the most desirable - public ContourSortMode contourSortMode = ContourSortMode.Largest; - - // the edge (or not) of the target to consider the center point (Top, Bottom, Left, Right, - // Center) - public TargetOffsetPointEdge contourTargetOffsetPointEdge = TargetOffsetPointEdge.Center; - - // orientation of the target in terms of aspect ratio - public TargetOrientation contourTargetOrientation = TargetOrientation.Landscape; - - // the mode in which to offset target center point based on the camera being offset on the - // robot - // (None, Single Point, Dual Point) - public RobotOffsetPointMode offsetRobotOffsetMode = RobotOffsetPointMode.None; - - // the point set by the user in Single Point Offset mode (maybe double too? idr) - public Point offsetSinglePoint = new Point(); - - // the two values that define the line of the Dual Point Offset calibration (think y=mx+b) - public Point offsetDualPointA = new Point(); - public double offsetDualPointAArea = 0; - public Point offsetDualPointB = new Point(); - public double offsetDualPointBArea = 0; - - // how many contours to attempt to group (Single, Dual) - public ContourGroupingMode contourGroupingMode = ContourGroupingMode.Single; - - // the direction in which contours must intersect to be considered intersecting - public ContourIntersectionDirection contourIntersection = ContourIntersectionDirection.Up; - - // 3d settings - public boolean solvePNPEnabled = false; - public TargetModel targetModel = TargetModel.k2020HighGoalOuter; - - // Corner detection settings - public CornerDetectionPipe.DetectionStrategy cornerDetectionStrategy = - CornerDetectionPipe.DetectionStrategy.APPROX_POLY_DP_AND_EXTREME_CORNERS; - public boolean cornerDetectionUseConvexHulls = true; - public boolean cornerDetectionExactSideCount = false; - public int cornerDetectionSideCount = 4; - public double cornerDetectionAccuracyPercentage = 10; - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof AdvancedPipelineSettings)) return false; - if (!super.equals(o)) return false; - AdvancedPipelineSettings that = (AdvancedPipelineSettings) o; - return outputShouldDraw == that.outputShouldDraw - && outputShowMultipleTargets == that.outputShowMultipleTargets - && contourSpecklePercentage == that.contourSpecklePercentage - && Double.compare(that.offsetDualPointAArea, offsetDualPointAArea) == 0 - && Double.compare(that.offsetDualPointBArea, offsetDualPointBArea) == 0 - && solvePNPEnabled == that.solvePNPEnabled - && cornerDetectionUseConvexHulls == that.cornerDetectionUseConvexHulls - && cornerDetectionExactSideCount == that.cornerDetectionExactSideCount - && cornerDetectionSideCount == that.cornerDetectionSideCount - && Double.compare(that.cornerDetectionAccuracyPercentage, cornerDetectionAccuracyPercentage) - == 0 - && Objects.equals(hsvHue, that.hsvHue) - && Objects.equals(hsvSaturation, that.hsvSaturation) - && Objects.equals(hsvValue, that.hsvValue) - && Objects.equals(hueInverted, that.hueInverted) - && Objects.equals(contourArea, that.contourArea) - && Objects.equals(contourRatio, that.contourRatio) - && Objects.equals(contourFullness, that.contourFullness) - && contourSortMode == that.contourSortMode - && contourTargetOffsetPointEdge == that.contourTargetOffsetPointEdge - && contourTargetOrientation == that.contourTargetOrientation - && offsetRobotOffsetMode == that.offsetRobotOffsetMode - && Objects.equals(offsetSinglePoint, that.offsetSinglePoint) - && Objects.equals(offsetDualPointA, that.offsetDualPointA) - && Objects.equals(offsetDualPointB, that.offsetDualPointB) - && contourGroupingMode == that.contourGroupingMode - && contourIntersection == that.contourIntersection - && Objects.equals(targetModel, that.targetModel) - && cornerDetectionStrategy == that.cornerDetectionStrategy; - } - - @Override - public int hashCode() { - return Objects.hash( - super.hashCode(), - hsvHue, - hsvSaturation, - hsvValue, - hueInverted, - outputShouldDraw, - outputShowMultipleTargets, - contourArea, - contourRatio, - contourFullness, - contourSpecklePercentage, - contourSortMode, - contourTargetOffsetPointEdge, - contourTargetOrientation, - offsetRobotOffsetMode, - offsetSinglePoint, - offsetDualPointA, - offsetDualPointAArea, - offsetDualPointB, - offsetDualPointBArea, - contourGroupingMode, - contourIntersection, - solvePNPEnabled, - targetModel, - cornerDetectionStrategy, - cornerDetectionUseConvexHulls, - cornerDetectionExactSideCount, - cornerDetectionSideCount, - cornerDetectionAccuracyPercentage); - } + public AdvancedPipelineSettings() { + ledMode = true; + } + + public IntegerCouple hsvHue = new IntegerCouple(50, 180); + public IntegerCouple hsvSaturation = new IntegerCouple(50, 255); + public IntegerCouple hsvValue = new IntegerCouple(50, 255); + public boolean hueInverted = false; + + public boolean outputShouldDraw = true; + public boolean outputShowMultipleTargets = false; + + public DoubleCouple contourArea = new DoubleCouple(0.0, 100.0); + public DoubleCouple contourRatio = new DoubleCouple(0.0, 20.0); + public DoubleCouple contourFullness = new DoubleCouple(0.0, 100.0); + public int contourSpecklePercentage = 5; + + // the order in which to sort contours to find the most desirable + public ContourSortMode contourSortMode = ContourSortMode.Largest; + + // the edge (or not) of the target to consider the center point (Top, Bottom, Left, Right, + // Center) + public TargetOffsetPointEdge contourTargetOffsetPointEdge = TargetOffsetPointEdge.Center; + + // orientation of the target in terms of aspect ratio + public TargetOrientation contourTargetOrientation = TargetOrientation.Landscape; + + // the mode in which to offset target center point based on the camera being offset on the + // robot + // (None, Single Point, Dual Point) + public RobotOffsetPointMode offsetRobotOffsetMode = RobotOffsetPointMode.None; + + // the point set by the user in Single Point Offset mode (maybe double too? idr) + public Point offsetSinglePoint = new Point(); + + // the two values that define the line of the Dual Point Offset calibration (think y=mx+b) + public Point offsetDualPointA = new Point(); + public double offsetDualPointAArea = 0; + public Point offsetDualPointB = new Point(); + public double offsetDualPointBArea = 0; + + // how many contours to attempt to group (Single, Dual) + public ContourGroupingMode contourGroupingMode = ContourGroupingMode.Single; + + // the direction in which contours must intersect to be considered intersecting + public ContourIntersectionDirection contourIntersection = ContourIntersectionDirection.Up; + + // 3d settings + public boolean solvePNPEnabled = false; + public TargetModel targetModel = TargetModel.k2020HighGoalOuter; + + // Corner detection settings + public CornerDetectionPipe.DetectionStrategy cornerDetectionStrategy = + CornerDetectionPipe.DetectionStrategy.APPROX_POLY_DP_AND_EXTREME_CORNERS; + public boolean cornerDetectionUseConvexHulls = true; + public boolean cornerDetectionExactSideCount = false; + public int cornerDetectionSideCount = 4; + public double cornerDetectionAccuracyPercentage = 10; + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof AdvancedPipelineSettings)) return false; + if (!super.equals(o)) return false; + AdvancedPipelineSettings that = (AdvancedPipelineSettings) o; + return outputShouldDraw == that.outputShouldDraw + && outputShowMultipleTargets == that.outputShowMultipleTargets + && contourSpecklePercentage == that.contourSpecklePercentage + && Double.compare(that.offsetDualPointAArea, offsetDualPointAArea) == 0 + && Double.compare(that.offsetDualPointBArea, offsetDualPointBArea) == 0 + && solvePNPEnabled == that.solvePNPEnabled + && cornerDetectionUseConvexHulls == that.cornerDetectionUseConvexHulls + && cornerDetectionExactSideCount == that.cornerDetectionExactSideCount + && cornerDetectionSideCount == that.cornerDetectionSideCount + && Double.compare(that.cornerDetectionAccuracyPercentage, cornerDetectionAccuracyPercentage) + == 0 + && Objects.equals(hsvHue, that.hsvHue) + && Objects.equals(hsvSaturation, that.hsvSaturation) + && Objects.equals(hsvValue, that.hsvValue) + && Objects.equals(hueInverted, that.hueInverted) + && Objects.equals(contourArea, that.contourArea) + && Objects.equals(contourRatio, that.contourRatio) + && Objects.equals(contourFullness, that.contourFullness) + && contourSortMode == that.contourSortMode + && contourTargetOffsetPointEdge == that.contourTargetOffsetPointEdge + && contourTargetOrientation == that.contourTargetOrientation + && offsetRobotOffsetMode == that.offsetRobotOffsetMode + && Objects.equals(offsetSinglePoint, that.offsetSinglePoint) + && Objects.equals(offsetDualPointA, that.offsetDualPointA) + && Objects.equals(offsetDualPointB, that.offsetDualPointB) + && contourGroupingMode == that.contourGroupingMode + && contourIntersection == that.contourIntersection + && Objects.equals(targetModel, that.targetModel) + && cornerDetectionStrategy == that.cornerDetectionStrategy; + } + + @Override + public int hashCode() { + return Objects.hash( + super.hashCode(), + hsvHue, + hsvSaturation, + hsvValue, + hueInverted, + outputShouldDraw, + outputShowMultipleTargets, + contourArea, + contourRatio, + contourFullness, + contourSpecklePercentage, + contourSortMode, + contourTargetOffsetPointEdge, + contourTargetOrientation, + offsetRobotOffsetMode, + offsetSinglePoint, + offsetDualPointA, + offsetDualPointAArea, + offsetDualPointB, + offsetDualPointBArea, + contourGroupingMode, + contourIntersection, + solvePNPEnabled, + targetModel, + cornerDetectionStrategy, + cornerDetectionUseConvexHulls, + cornerDetectionExactSideCount, + cornerDetectionSideCount, + cornerDetectionAccuracyPercentage); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipeline.java index d1837ff770..0bc91b5adb 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipeline.java @@ -46,197 +46,197 @@ import org.photonvision.vision.target.TrackedTarget.TargetCalculationParameters; public class AprilTagPipeline extends CVPipeline { - private final AprilTagDetectionPipe aprilTagDetectionPipe = new AprilTagDetectionPipe(); - private final AprilTagPoseEstimatorPipe singleTagPoseEstimatorPipe = - new AprilTagPoseEstimatorPipe(); - private final MultiTargetPNPPipe multiTagPNPPipe = new MultiTargetPNPPipe(); - private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); + private final AprilTagDetectionPipe aprilTagDetectionPipe = new AprilTagDetectionPipe(); + private final AprilTagPoseEstimatorPipe singleTagPoseEstimatorPipe = + new AprilTagPoseEstimatorPipe(); + private final MultiTargetPNPPipe multiTagPNPPipe = new MultiTargetPNPPipe(); + private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); + + private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.GREYSCALE; + + public AprilTagPipeline() { + super(PROCESSING_TYPE); + settings = new AprilTagPipelineSettings(); + } + + public AprilTagPipeline(AprilTagPipelineSettings settings) { + super(PROCESSING_TYPE); + this.settings = settings; + } + + @Override + protected void setPipeParamsImpl() { + // Sanitize thread count - not supported to have fewer than 1 threads + settings.threads = Math.max(1, settings.threads); + + // if (cameraQuirks.hasQuirk(CameraQuirk.PiCam) && LibCameraJNI.isSupported()) { + // // TODO: Picam grayscale + // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); + // // LibCameraJNI.setShouldCopyColor(true); // need the color image to grayscale + // } + + // TODO (HACK): tag width is Fun because it really belongs in the "target model" + // We need the tag width for the JNI to figure out target pose, but we need a + // target model for the draw 3d targets pipeline to work... + + // for now, hard code tag width based on enum value + double tagWidth = Units.inchesToMeters(3 * 2); // for 6in 16h5 tag. + + // AprilTagDetectorParams aprilTagDetectionParams = + // new AprilTagDetectorParams( + // settings.tagFamily, + // settings.decimate, + // settings.blur, + // settings.threads, + // settings.debug, + // settings.refineEdges); + + var config = new AprilTagDetector.Config(); + config.numThreads = settings.threads; + config.refineEdges = settings.refineEdges; + config.quadSigma = (float) settings.blur; + config.quadDecimate = settings.decimate; + aprilTagDetectionPipe.setParams(new AprilTagDetectionPipeParams(settings.tagFamily, config)); + + if (frameStaticProperties.cameraCalibration != null) { + var cameraMatrix = frameStaticProperties.cameraCalibration.getCameraIntrinsicsMat(); + if (cameraMatrix != null && cameraMatrix.rows() > 0) { + var cx = cameraMatrix.get(0, 2)[0]; + var cy = cameraMatrix.get(1, 2)[0]; + var fx = cameraMatrix.get(0, 0)[0]; + var fy = cameraMatrix.get(1, 1)[0]; + + singleTagPoseEstimatorPipe.setParams( + new AprilTagPoseEstimatorPipeParams( + new Config(tagWidth, fx, fy, cx, cy), + frameStaticProperties.cameraCalibration, + settings.numIterations)); + + // TODO global state ew + var atfl = ConfigManager.getInstance().getConfig().getApriltagFieldLayout(); + multiTagPNPPipe.setParams( + new MultiTargetPNPPipeParams(frameStaticProperties.cameraCalibration, atfl)); + } + } + } - private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.GREYSCALE; + @Override + protected CVPipelineResult process(Frame frame, AprilTagPipelineSettings settings) { + long sumPipeNanosElapsed = 0L; - public AprilTagPipeline() { - super(PROCESSING_TYPE); - settings = new AprilTagPipelineSettings(); + if (frame.type != FrameThresholdType.GREYSCALE) { + // TODO so all cameras should give us GREYSCALE -- how should we handle if not? + // Right now, we just return nothing + return new CVPipelineResult(0, 0, List.of(), frame); } - public AprilTagPipeline(AprilTagPipelineSettings settings) { - super(PROCESSING_TYPE); - this.settings = settings; + CVPipeResult> tagDetectionPipeResult; + tagDetectionPipeResult = aprilTagDetectionPipe.run(frame.processedImage); + sumPipeNanosElapsed += tagDetectionPipeResult.nanosElapsed; + + List detections = tagDetectionPipeResult.output; + List usedDetections = new ArrayList<>(); + List targetList = new ArrayList<>(); + + // Filter out detections based on pipeline settings + for (AprilTagDetection detection : detections) { + // TODO this should be in a pipe, not in the top level here (Matt) + if (detection.getDecisionMargin() < settings.decisionMargin) continue; + if (detection.getHamming() > settings.hammingDist) continue; + + usedDetections.add(detection); + + // Populate target list for multitag + // (TODO: Address circular dependencies. Multitag only requires corners and IDs, this should + // not be necessary.) + TrackedTarget target = + new TrackedTarget( + detection, + null, + new TargetCalculationParameters( + false, null, null, null, null, frameStaticProperties)); + + targetList.add(target); } - @Override - protected void setPipeParamsImpl() { - // Sanitize thread count - not supported to have fewer than 1 threads - settings.threads = Math.max(1, settings.threads); - - // if (cameraQuirks.hasQuirk(CameraQuirk.PiCam) && LibCameraJNI.isSupported()) { - // // TODO: Picam grayscale - // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); - // // LibCameraJNI.setShouldCopyColor(true); // need the color image to grayscale - // } - - // TODO (HACK): tag width is Fun because it really belongs in the "target model" - // We need the tag width for the JNI to figure out target pose, but we need a - // target model for the draw 3d targets pipeline to work... - - // for now, hard code tag width based on enum value - double tagWidth = Units.inchesToMeters(3 * 2); // for 6in 16h5 tag. - - // AprilTagDetectorParams aprilTagDetectionParams = - // new AprilTagDetectorParams( - // settings.tagFamily, - // settings.decimate, - // settings.blur, - // settings.threads, - // settings.debug, - // settings.refineEdges); - - var config = new AprilTagDetector.Config(); - config.numThreads = settings.threads; - config.refineEdges = settings.refineEdges; - config.quadSigma = (float) settings.blur; - config.quadDecimate = settings.decimate; - aprilTagDetectionPipe.setParams(new AprilTagDetectionPipeParams(settings.tagFamily, config)); - - if (frameStaticProperties.cameraCalibration != null) { - var cameraMatrix = frameStaticProperties.cameraCalibration.getCameraIntrinsicsMat(); - if (cameraMatrix != null && cameraMatrix.rows() > 0) { - var cx = cameraMatrix.get(0, 2)[0]; - var cy = cameraMatrix.get(1, 2)[0]; - var fx = cameraMatrix.get(0, 0)[0]; - var fy = cameraMatrix.get(1, 1)[0]; - - singleTagPoseEstimatorPipe.setParams( - new AprilTagPoseEstimatorPipeParams( - new Config(tagWidth, fx, fy, cx, cy), - frameStaticProperties.cameraCalibration, - settings.numIterations)); - - // TODO global state ew - var atfl = ConfigManager.getInstance().getConfig().getApriltagFieldLayout(); - multiTagPNPPipe.setParams( - new MultiTargetPNPPipeParams(frameStaticProperties.cameraCalibration, atfl)); - } - } + // Do multi-tag pose estimation + MultiTargetPNPResults multiTagResult = new MultiTargetPNPResults(); + if (settings.solvePNPEnabled && settings.doMultiTarget) { + var multiTagOutput = multiTagPNPPipe.run(targetList); + sumPipeNanosElapsed += multiTagOutput.nanosElapsed; + multiTagResult = multiTagOutput.output; } - @Override - protected CVPipelineResult process(Frame frame, AprilTagPipelineSettings settings) { - long sumPipeNanosElapsed = 0L; - - if (frame.type != FrameThresholdType.GREYSCALE) { - // TODO so all cameras should give us GREYSCALE -- how should we handle if not? - // Right now, we just return nothing - return new CVPipelineResult(0, 0, List.of(), frame); - } - - CVPipeResult> tagDetectionPipeResult; - tagDetectionPipeResult = aprilTagDetectionPipe.run(frame.processedImage); - sumPipeNanosElapsed += tagDetectionPipeResult.nanosElapsed; - - List detections = tagDetectionPipeResult.output; - List usedDetections = new ArrayList<>(); - List targetList = new ArrayList<>(); - - // Filter out detections based on pipeline settings - for (AprilTagDetection detection : detections) { - // TODO this should be in a pipe, not in the top level here (Matt) - if (detection.getDecisionMargin() < settings.decisionMargin) continue; - if (detection.getHamming() > settings.hammingDist) continue; - - usedDetections.add(detection); - - // Populate target list for multitag - // (TODO: Address circular dependencies. Multitag only requires corners and IDs, this should - // not be necessary.) - TrackedTarget target = - new TrackedTarget( - detection, - null, - new TargetCalculationParameters( - false, null, null, null, null, frameStaticProperties)); - - targetList.add(target); + // Do single-tag pose estimation + if (settings.solvePNPEnabled) { + // Clear target list that was used for multitag so we can add target transforms + targetList.clear(); + // TODO global state again ew + var atfl = ConfigManager.getInstance().getConfig().getApriltagFieldLayout(); + + for (AprilTagDetection detection : usedDetections) { + AprilTagPoseEstimate tagPoseEstimate = null; + // Do single-tag estimation when "always enabled" or if a tag was not used for multitag + if (settings.doSingleTargetAlways + || !multiTagResult.fiducialIDsUsed.contains(Integer.valueOf(detection.getId()))) { + var poseResult = singleTagPoseEstimatorPipe.run(detection); + sumPipeNanosElapsed += poseResult.nanosElapsed; + tagPoseEstimate = poseResult.output; } - // Do multi-tag pose estimation - MultiTargetPNPResults multiTagResult = new MultiTargetPNPResults(); - if (settings.solvePNPEnabled && settings.doMultiTarget) { - var multiTagOutput = multiTagPNPPipe.run(targetList); - sumPipeNanosElapsed += multiTagOutput.nanosElapsed; - multiTagResult = multiTagOutput.output; + // If single-tag estimation was not done, this is a multi-target tag from the layout + if (tagPoseEstimate == null) { + // compute this tag's camera-to-tag transform using the multitag result + var tagPose = atfl.getTagPose(detection.getId()); + if (tagPose.isPresent()) { + var camToTag = + new Transform3d( + new Pose3d().plus(multiTagResult.estimatedPose.best), tagPose.get()); + // match expected AprilTag coordinate system + // TODO cleanup coordinate systems in wpilib 2024 + var apriltagTrl = + CoordinateSystem.convert( + camToTag.getTranslation(), CoordinateSystem.NWU(), CoordinateSystem.EDN()); + var apriltagRot = + CoordinateSystem.convert( + new Rotation3d(), CoordinateSystem.EDN(), CoordinateSystem.NWU()) + .plus( + CoordinateSystem.convert( + camToTag.getRotation(), + CoordinateSystem.NWU(), + CoordinateSystem.EDN())); + apriltagRot = new Rotation3d(0, Math.PI, 0).plus(apriltagRot); + camToTag = new Transform3d(apriltagTrl, apriltagRot); + tagPoseEstimate = new AprilTagPoseEstimate(camToTag, camToTag, 0, 0); + } } - // Do single-tag pose estimation - if (settings.solvePNPEnabled) { - // Clear target list that was used for multitag so we can add target transforms - targetList.clear(); - // TODO global state again ew - var atfl = ConfigManager.getInstance().getConfig().getApriltagFieldLayout(); - - for (AprilTagDetection detection : usedDetections) { - AprilTagPoseEstimate tagPoseEstimate = null; - // Do single-tag estimation when "always enabled" or if a tag was not used for multitag - if (settings.doSingleTargetAlways - || !multiTagResult.fiducialIDsUsed.contains(Integer.valueOf(detection.getId()))) { - var poseResult = singleTagPoseEstimatorPipe.run(detection); - sumPipeNanosElapsed += poseResult.nanosElapsed; - tagPoseEstimate = poseResult.output; - } - - // If single-tag estimation was not done, this is a multi-target tag from the layout - if (tagPoseEstimate == null) { - // compute this tag's camera-to-tag transform using the multitag result - var tagPose = atfl.getTagPose(detection.getId()); - if (tagPose.isPresent()) { - var camToTag = - new Transform3d( - new Pose3d().plus(multiTagResult.estimatedPose.best), tagPose.get()); - // match expected AprilTag coordinate system - // TODO cleanup coordinate systems in wpilib 2024 - var apriltagTrl = - CoordinateSystem.convert( - camToTag.getTranslation(), CoordinateSystem.NWU(), CoordinateSystem.EDN()); - var apriltagRot = - CoordinateSystem.convert( - new Rotation3d(), CoordinateSystem.EDN(), CoordinateSystem.NWU()) - .plus( - CoordinateSystem.convert( - camToTag.getRotation(), - CoordinateSystem.NWU(), - CoordinateSystem.EDN())); - apriltagRot = new Rotation3d(0, Math.PI, 0).plus(apriltagRot); - camToTag = new Transform3d(apriltagTrl, apriltagRot); - tagPoseEstimate = new AprilTagPoseEstimate(camToTag, camToTag, 0, 0); - } - } - - // populate the target list - // Challenge here is that TrackedTarget functions with OpenCV Contour - TrackedTarget target = - new TrackedTarget( - detection, - tagPoseEstimate, - new TargetCalculationParameters( - false, null, null, null, null, frameStaticProperties)); - - var correctedBestPose = - MathUtils.convertOpenCVtoPhotonTransform(target.getBestCameraToTarget3d()); - var correctedAltPose = - MathUtils.convertOpenCVtoPhotonTransform(target.getAltCameraToTarget3d()); - - target.setBestCameraToTarget3d( - new Transform3d(correctedBestPose.getTranslation(), correctedBestPose.getRotation())); - target.setAltCameraToTarget3d( - new Transform3d(correctedAltPose.getTranslation(), correctedAltPose.getRotation())); - - targetList.add(target); - } - } + // populate the target list + // Challenge here is that TrackedTarget functions with OpenCV Contour + TrackedTarget target = + new TrackedTarget( + detection, + tagPoseEstimate, + new TargetCalculationParameters( + false, null, null, null, null, frameStaticProperties)); + + var correctedBestPose = + MathUtils.convertOpenCVtoPhotonTransform(target.getBestCameraToTarget3d()); + var correctedAltPose = + MathUtils.convertOpenCVtoPhotonTransform(target.getAltCameraToTarget3d()); + + target.setBestCameraToTarget3d( + new Transform3d(correctedBestPose.getTranslation(), correctedBestPose.getRotation())); + target.setAltCameraToTarget3d( + new Transform3d(correctedAltPose.getTranslation(), correctedAltPose.getRotation())); + + targetList.add(target); + } + } - var fpsResult = calculateFPSPipe.run(null); - var fps = fpsResult.output; + var fpsResult = calculateFPSPipe.run(null); + var fps = fpsResult.output; - return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, multiTagResult, frame); - } + return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, multiTagResult, frame); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipelineSettings.java index 47faf82e29..3e5038db0e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipelineSettings.java @@ -23,67 +23,67 @@ @JsonTypeName("AprilTagPipelineSettings") public class AprilTagPipelineSettings extends AdvancedPipelineSettings { - public AprilTagFamily tagFamily = AprilTagFamily.kTag16h5; - public int decimate = 1; - public double blur = 0; - public int threads = 4; // Multiple threads seems to be better performance on most platforms - public boolean debug = false; - public boolean refineEdges = true; - public int numIterations = 40; - public int hammingDist = 0; - public int decisionMargin = 35; - public boolean doMultiTarget = true; - public boolean doSingleTargetAlways = false; + public AprilTagFamily tagFamily = AprilTagFamily.kTag16h5; + public int decimate = 1; + public double blur = 0; + public int threads = 4; // Multiple threads seems to be better performance on most platforms + public boolean debug = false; + public boolean refineEdges = true; + public int numIterations = 40; + public int hammingDist = 0; + public int decisionMargin = 35; + public boolean doMultiTarget = true; + public boolean doSingleTargetAlways = false; - // 3d settings + // 3d settings - public AprilTagPipelineSettings() { - super(); - pipelineType = PipelineType.AprilTag; - outputShowMultipleTargets = true; - targetModel = TargetModel.k6in_16h5; - cameraExposure = 20; - cameraAutoExposure = false; - ledMode = false; - } + public AprilTagPipelineSettings() { + super(); + pipelineType = PipelineType.AprilTag; + outputShowMultipleTargets = true; + targetModel = TargetModel.k6in_16h5; + cameraExposure = 20; + cameraAutoExposure = false; + ledMode = false; + } - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((tagFamily == null) ? 0 : tagFamily.hashCode()); - result = prime * result + decimate; - long temp; - temp = Double.doubleToLongBits(blur); - result = prime * result + (int) (temp ^ (temp >>> 32)); - result = prime * result + threads; - result = prime * result + (debug ? 1231 : 1237); - result = prime * result + (refineEdges ? 1231 : 1237); - result = prime * result + numIterations; - result = prime * result + hammingDist; - result = prime * result + decisionMargin; - result = prime * result + (doMultiTarget ? 1231 : 1237); - result = prime * result + (doSingleTargetAlways ? 1231 : 1237); - return result; - } + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + ((tagFamily == null) ? 0 : tagFamily.hashCode()); + result = prime * result + decimate; + long temp; + temp = Double.doubleToLongBits(blur); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + threads; + result = prime * result + (debug ? 1231 : 1237); + result = prime * result + (refineEdges ? 1231 : 1237); + result = prime * result + numIterations; + result = prime * result + hammingDist; + result = prime * result + decisionMargin; + result = prime * result + (doMultiTarget ? 1231 : 1237); + result = prime * result + (doSingleTargetAlways ? 1231 : 1237); + return result; + } - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (!super.equals(obj)) return false; - if (getClass() != obj.getClass()) return false; - AprilTagPipelineSettings other = (AprilTagPipelineSettings) obj; - if (tagFamily != other.tagFamily) return false; - if (decimate != other.decimate) return false; - if (Double.doubleToLongBits(blur) != Double.doubleToLongBits(other.blur)) return false; - if (threads != other.threads) return false; - if (debug != other.debug) return false; - if (refineEdges != other.refineEdges) return false; - if (numIterations != other.numIterations) return false; - if (hammingDist != other.hammingDist) return false; - if (decisionMargin != other.decisionMargin) return false; - if (doMultiTarget != other.doMultiTarget) return false; - if (doSingleTargetAlways != other.doSingleTargetAlways) return false; - return true; - } + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (!super.equals(obj)) return false; + if (getClass() != obj.getClass()) return false; + AprilTagPipelineSettings other = (AprilTagPipelineSettings) obj; + if (tagFamily != other.tagFamily) return false; + if (decimate != other.decimate) return false; + if (Double.doubleToLongBits(blur) != Double.doubleToLongBits(other.blur)) return false; + if (threads != other.threads) return false; + if (debug != other.debug) return false; + if (refineEdges != other.refineEdges) return false; + if (numIterations != other.numIterations) return false; + if (hammingDist != other.hammingDist) return false; + if (decisionMargin != other.decisionMargin) return false; + if (doMultiTarget != other.doMultiTarget) return false; + if (doSingleTargetAlways != other.doSingleTargetAlways) return false; + return true; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipeline.java index 26a5c60151..65dbaf10d4 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipeline.java @@ -49,79 +49,79 @@ import org.photonvision.vision.target.TrackedTarget.TargetCalculationParameters; public class ArucoPipeline extends CVPipeline { - private final RotateImagePipe rotateImagePipe = new RotateImagePipe(); - private final GrayscalePipe grayscalePipe = new GrayscalePipe(); + private final RotateImagePipe rotateImagePipe = new RotateImagePipe(); + private final GrayscalePipe grayscalePipe = new GrayscalePipe(); - private final ArucoDetectionPipe arucoDetectionPipe = new ArucoDetectionPipe(); - private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); + private final ArucoDetectionPipe arucoDetectionPipe = new ArucoDetectionPipe(); + private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); - ArucoDetectorParams m_arucoDetectorParams = new ArucoDetectorParams(); + ArucoDetectorParams m_arucoDetectorParams = new ArucoDetectorParams(); - public ArucoPipeline() { - super(FrameThresholdType.GREYSCALE); - settings = new ArucoPipelineSettings(); - } + public ArucoPipeline() { + super(FrameThresholdType.GREYSCALE); + settings = new ArucoPipelineSettings(); + } - public ArucoPipeline(ArucoPipelineSettings settings) { - super(FrameThresholdType.GREYSCALE); - this.settings = settings; - } + public ArucoPipeline(ArucoPipelineSettings settings) { + super(FrameThresholdType.GREYSCALE); + this.settings = settings; + } - @Override - protected void setPipeParamsImpl() { - // Sanitize thread count - not supported to have fewer than 1 threads - settings.threads = Math.max(1, settings.threads); + @Override + protected void setPipeParamsImpl() { + // Sanitize thread count - not supported to have fewer than 1 threads + settings.threads = Math.max(1, settings.threads); - RotateImagePipe.RotateImageParams rotateImageParams = - new RotateImagePipe.RotateImageParams(settings.inputImageRotationMode); - rotateImagePipe.setParams(rotateImageParams); + RotateImagePipe.RotateImageParams rotateImageParams = + new RotateImagePipe.RotateImageParams(settings.inputImageRotationMode); + rotateImagePipe.setParams(rotateImageParams); - m_arucoDetectorParams.setDecimation((float) settings.decimate); - m_arucoDetectorParams.setCornerRefinementMaxIterations(settings.numIterations); - m_arucoDetectorParams.setCornerAccuracy(settings.cornerAccuracy); + m_arucoDetectorParams.setDecimation((float) settings.decimate); + m_arucoDetectorParams.setCornerRefinementMaxIterations(settings.numIterations); + m_arucoDetectorParams.setCornerAccuracy(settings.cornerAccuracy); - arucoDetectionPipe.setParams( - new ArucoDetectionPipeParams( - m_arucoDetectorParams.getDetector(), frameStaticProperties.cameraCalibration)); - } + arucoDetectionPipe.setParams( + new ArucoDetectionPipeParams( + m_arucoDetectorParams.getDetector(), frameStaticProperties.cameraCalibration)); + } - @Override - protected CVPipelineResult process(Frame frame, ArucoPipelineSettings settings) { - long sumPipeNanosElapsed = 0L; - Mat rawInputMat; - rawInputMat = frame.colorImage.getMat(); + @Override + protected CVPipelineResult process(Frame frame, ArucoPipelineSettings settings) { + long sumPipeNanosElapsed = 0L; + Mat rawInputMat; + rawInputMat = frame.colorImage.getMat(); - List targetList; - CVPipeResult> tagDetectionPipeResult; + List targetList; + CVPipeResult> tagDetectionPipeResult; - if (rawInputMat.empty()) { - return new CVPipelineResult(sumPipeNanosElapsed, 0, List.of(), frame); - } + if (rawInputMat.empty()) { + return new CVPipelineResult(sumPipeNanosElapsed, 0, List.of(), frame); + } - tagDetectionPipeResult = arucoDetectionPipe.run(rawInputMat); - targetList = new ArrayList<>(); - for (ArucoDetectionResult detection : tagDetectionPipeResult.output) { - // TODO this should be in a pipe, not in the top level here (Matt) + tagDetectionPipeResult = arucoDetectionPipe.run(rawInputMat); + targetList = new ArrayList<>(); + for (ArucoDetectionResult detection : tagDetectionPipeResult.output) { + // TODO this should be in a pipe, not in the top level here (Matt) - // populate the target list - // Challenge here is that TrackedTarget functions with OpenCV Contour - TrackedTarget target = - new TrackedTarget( - detection, - new TargetCalculationParameters( - false, null, null, null, null, frameStaticProperties)); + // populate the target list + // Challenge here is that TrackedTarget functions with OpenCV Contour + TrackedTarget target = + new TrackedTarget( + detection, + new TargetCalculationParameters( + false, null, null, null, null, frameStaticProperties)); - var correctedBestPose = target.getBestCameraToTarget3d(); + var correctedBestPose = target.getBestCameraToTarget3d(); - target.setBestCameraToTarget3d( - new Transform3d(correctedBestPose.getTranslation(), correctedBestPose.getRotation())); + target.setBestCameraToTarget3d( + new Transform3d(correctedBestPose.getTranslation(), correctedBestPose.getRotation())); - targetList.add(target); - } + targetList.add(target); + } - var fpsResult = calculateFPSPipe.run(null); - var fps = fpsResult.output; + var fpsResult = calculateFPSPipe.run(null); + var fps = fpsResult.output; - return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, frame); - } + return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, frame); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipelineSettings.java index 265a8d202e..103a459634 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipelineSettings.java @@ -22,21 +22,21 @@ @JsonTypeName("ArucoPipelineSettings") public class ArucoPipelineSettings extends AdvancedPipelineSettings { - public double decimate = 1; - public int threads = 2; - public int numIterations = 100; - public double cornerAccuracy = 25.0; - public boolean useAruco3 = true; + public double decimate = 1; + public int threads = 2; + public int numIterations = 100; + public double cornerAccuracy = 25.0; + public boolean useAruco3 = true; - // 3d settings + // 3d settings - public ArucoPipelineSettings() { - super(); - pipelineType = PipelineType.Aruco; - outputShowMultipleTargets = true; - targetModel = TargetModel.kAruco6in_16h5; - cameraExposure = -1; - cameraAutoExposure = true; - ledMode = false; - } + public ArucoPipelineSettings() { + super(); + pipelineType = PipelineType.Aruco; + outputShowMultipleTargets = true; + targetModel = TargetModel.kAruco6in_16h5; + cameraExposure = -1; + cameraAutoExposure = true; + ledMode = false; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipeline.java index 3df5b268f7..d1621a63e5 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipeline.java @@ -24,55 +24,55 @@ import org.photonvision.vision.pipeline.result.CVPipelineResult; public abstract class CVPipeline { - protected S settings; - protected FrameStaticProperties frameStaticProperties; - protected QuirkyCamera cameraQuirks; + protected S settings; + protected FrameStaticProperties frameStaticProperties; + protected QuirkyCamera cameraQuirks; - private final FrameThresholdType thresholdType; + private final FrameThresholdType thresholdType; - public CVPipeline(FrameThresholdType thresholdType) { - this.thresholdType = thresholdType; - } + public CVPipeline(FrameThresholdType thresholdType) { + this.thresholdType = thresholdType; + } - public FrameThresholdType getThresholdType() { - return thresholdType; - } + public FrameThresholdType getThresholdType() { + return thresholdType; + } - protected void setPipeParams( - FrameStaticProperties frameStaticProperties, S settings, QuirkyCamera cameraQuirks) { - this.settings = settings; - this.frameStaticProperties = frameStaticProperties; - this.cameraQuirks = cameraQuirks; + protected void setPipeParams( + FrameStaticProperties frameStaticProperties, S settings, QuirkyCamera cameraQuirks) { + this.settings = settings; + this.frameStaticProperties = frameStaticProperties; + this.cameraQuirks = cameraQuirks; - setPipeParamsImpl(); - } + setPipeParamsImpl(); + } - protected abstract void setPipeParamsImpl(); + protected abstract void setPipeParamsImpl(); - protected abstract R process(Frame frame, S settings); + protected abstract R process(Frame frame, S settings); - public S getSettings() { - return settings; - } + public S getSettings() { + return settings; + } - public void setSettings(S s) { - this.settings = s; - } + public void setSettings(S s) { + this.settings = s; + } - public R run(Frame frame, QuirkyCamera cameraQuirks) { - if (settings == null) { - throw new RuntimeException("No settings provided for pipeline!"); - } - setPipeParams(frame.frameStaticProperties, settings, cameraQuirks); + public R run(Frame frame, QuirkyCamera cameraQuirks) { + if (settings == null) { + throw new RuntimeException("No settings provided for pipeline!"); + } + setPipeParams(frame.frameStaticProperties, settings, cameraQuirks); - // if (frame.image.getMat().empty()) { - // //noinspection unchecked - // return (R) new CVPipelineResult(0, 0, List.of(), frame); - // } - R result = process(frame, settings); + // if (frame.image.getMat().empty()) { + // //noinspection unchecked + // return (R) new CVPipelineResult(0, 0, List.of(), frame); + // } + R result = process(frame, settings); - result.setImageCaptureTimestampNanos(frame.timestampNanos); + result.setImageCaptureTimestampNanos(frame.timestampNanos); - return result; - } + return result; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipelineSettings.java index f836e30999..60b259af25 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipelineSettings.java @@ -24,119 +24,119 @@ import org.photonvision.vision.opencv.ImageRotationMode; @JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.WRAPPER_ARRAY, - property = "type") + use = JsonTypeInfo.Id.NAME, + include = JsonTypeInfo.As.WRAPPER_ARRAY, + property = "type") @JsonSubTypes({ - @JsonSubTypes.Type(value = ColoredShapePipelineSettings.class), - @JsonSubTypes.Type(value = ReflectivePipelineSettings.class), - @JsonSubTypes.Type(value = DriverModePipelineSettings.class), - @JsonSubTypes.Type(value = AprilTagPipelineSettings.class), - @JsonSubTypes.Type(value = ArucoPipelineSettings.class) + @JsonSubTypes.Type(value = ColoredShapePipelineSettings.class), + @JsonSubTypes.Type(value = ReflectivePipelineSettings.class), + @JsonSubTypes.Type(value = DriverModePipelineSettings.class), + @JsonSubTypes.Type(value = AprilTagPipelineSettings.class), + @JsonSubTypes.Type(value = ArucoPipelineSettings.class) }) public class CVPipelineSettings implements Cloneable { - public int pipelineIndex = 0; - public PipelineType pipelineType = PipelineType.DriverMode; - public ImageRotationMode inputImageRotationMode = ImageRotationMode.DEG_0; - public String pipelineNickname = "New Pipeline"; - public boolean cameraAutoExposure = false; - // manual exposure only used if cameraAutoExposure is false - public double cameraExposure = 20; - public int cameraBrightness = 50; - // Currently only used by a few cameras (notably the zero-copy Pi Camera driver) with the Gain - // quirk - public int cameraGain = 75; - // Currently only used by the zero-copy Pi Camera driver - public int cameraRedGain = 11; - public int cameraBlueGain = 20; - public int cameraVideoModeIndex = 0; - public FrameDivisor streamingFrameDivisor = FrameDivisor.NONE; - public boolean ledMode = false; - public boolean inputShouldShow = false; - public boolean outputShouldShow = true; + public int pipelineIndex = 0; + public PipelineType pipelineType = PipelineType.DriverMode; + public ImageRotationMode inputImageRotationMode = ImageRotationMode.DEG_0; + public String pipelineNickname = "New Pipeline"; + public boolean cameraAutoExposure = false; + // manual exposure only used if cameraAutoExposure is false + public double cameraExposure = 20; + public int cameraBrightness = 50; + // Currently only used by a few cameras (notably the zero-copy Pi Camera driver) with the Gain + // quirk + public int cameraGain = 75; + // Currently only used by the zero-copy Pi Camera driver + public int cameraRedGain = 11; + public int cameraBlueGain = 20; + public int cameraVideoModeIndex = 0; + public FrameDivisor streamingFrameDivisor = FrameDivisor.NONE; + public boolean ledMode = false; + public boolean inputShouldShow = false; + public boolean outputShouldShow = true; - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CVPipelineSettings that = (CVPipelineSettings) o; - return pipelineIndex == that.pipelineIndex - && Double.compare(that.cameraExposure, cameraExposure) == 0 - && Double.compare(that.cameraBrightness, cameraBrightness) == 0 - && Double.compare(that.cameraGain, cameraGain) == 0 - && Double.compare(that.cameraRedGain, cameraRedGain) == 0 - && Double.compare(that.cameraBlueGain, cameraBlueGain) == 0 - && cameraVideoModeIndex == that.cameraVideoModeIndex - && ledMode == that.ledMode - && pipelineType == that.pipelineType - && inputImageRotationMode == that.inputImageRotationMode - && pipelineNickname.equals(that.pipelineNickname) - && streamingFrameDivisor == that.streamingFrameDivisor - && inputShouldShow == that.inputShouldShow - && outputShouldShow == that.outputShouldShow; - } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CVPipelineSettings that = (CVPipelineSettings) o; + return pipelineIndex == that.pipelineIndex + && Double.compare(that.cameraExposure, cameraExposure) == 0 + && Double.compare(that.cameraBrightness, cameraBrightness) == 0 + && Double.compare(that.cameraGain, cameraGain) == 0 + && Double.compare(that.cameraRedGain, cameraRedGain) == 0 + && Double.compare(that.cameraBlueGain, cameraBlueGain) == 0 + && cameraVideoModeIndex == that.cameraVideoModeIndex + && ledMode == that.ledMode + && pipelineType == that.pipelineType + && inputImageRotationMode == that.inputImageRotationMode + && pipelineNickname.equals(that.pipelineNickname) + && streamingFrameDivisor == that.streamingFrameDivisor + && inputShouldShow == that.inputShouldShow + && outputShouldShow == that.outputShouldShow; + } - @Override - public int hashCode() { - return Objects.hash( - pipelineIndex, - pipelineType, - inputImageRotationMode, - pipelineNickname, - cameraExposure, - cameraBrightness, - cameraGain, - cameraRedGain, - cameraBlueGain, - cameraVideoModeIndex, - streamingFrameDivisor, - ledMode, - inputShouldShow, - outputShouldShow); - } + @Override + public int hashCode() { + return Objects.hash( + pipelineIndex, + pipelineType, + inputImageRotationMode, + pipelineNickname, + cameraExposure, + cameraBrightness, + cameraGain, + cameraRedGain, + cameraBlueGain, + cameraVideoModeIndex, + streamingFrameDivisor, + ledMode, + inputShouldShow, + outputShouldShow); + } - @Override - public CVPipelineSettings clone() { - try { - return (CVPipelineSettings) super.clone(); - } catch (CloneNotSupportedException e) { - e.printStackTrace(); - return null; - } + @Override + public CVPipelineSettings clone() { + try { + return (CVPipelineSettings) super.clone(); + } catch (CloneNotSupportedException e) { + e.printStackTrace(); + return null; } + } - @Override - public String toString() { - return "CVPipelineSettings{" - + "pipelineIndex=" - + pipelineIndex - + ", pipelineType=" - + pipelineType - + ", inputImageRotationMode=" - + inputImageRotationMode - + ", pipelineNickname='" - + pipelineNickname - + '\'' - + ", cameraExposure=" - + cameraExposure - + ", cameraBrightness=" - + cameraBrightness - + ", cameraGain=" - + cameraGain - + ", cameraRedGain=" - + cameraRedGain - + ", cameraBlueGain=" - + cameraBlueGain - + ", cameraVideoModeIndex=" - + cameraVideoModeIndex - + ", streamingFrameDivisor=" - + streamingFrameDivisor - + ", ledMode=" - + ledMode - + ", inputShouldShow=" - + inputShouldShow - + ", outputShouldShow=" - + outputShouldShow - + '}'; - } + @Override + public String toString() { + return "CVPipelineSettings{" + + "pipelineIndex=" + + pipelineIndex + + ", pipelineType=" + + pipelineType + + ", inputImageRotationMode=" + + inputImageRotationMode + + ", pipelineNickname='" + + pipelineNickname + + '\'' + + ", cameraExposure=" + + cameraExposure + + ", cameraBrightness=" + + cameraBrightness + + ", cameraGain=" + + cameraGain + + ", cameraRedGain=" + + cameraRedGain + + ", cameraBlueGain=" + + cameraBlueGain + + ", cameraVideoModeIndex=" + + cameraVideoModeIndex + + ", streamingFrameDivisor=" + + streamingFrameDivisor + + ", ledMode=" + + ledMode + + ", inputShouldShow=" + + inputShouldShow + + ", outputShouldShow=" + + outputShouldShow + + '}'; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibrate3dPipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibrate3dPipeline.java index 95008e77f9..933a269041 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibrate3dPipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibrate3dPipeline.java @@ -46,191 +46,191 @@ import org.photonvision.vision.pipeline.result.CVPipelineResult; public class Calibrate3dPipeline - extends CVPipeline { - // For logging - private static final Logger logger = new Logger(Calibrate3dPipeline.class, LogGroup.General); - - // Only 2 pipes needed, one for finding the board corners and one for actually calibrating - private final FindBoardCornersPipe findBoardCornersPipe = new FindBoardCornersPipe(); - private final Calibrate3dPipe calibrate3dPipe = new Calibrate3dPipe(); - private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); - - // Getter methods have been set for calibrate and takeSnapshot - private boolean takeSnapshot = false; - - // Output of the corners - final List> foundCornersList; - - /// Output of the calibration, getter method is set for this. - private CVPipeResult calibrationOutput; - - private final int minSnapshots; - - private boolean calibrating = false; - - // Path to save images - private final Path imageDir = ConfigManager.getInstance().getCalibDir(); - - private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.NONE; - - public Calibrate3dPipeline() { - this(12); - } - - public Calibrate3dPipeline(int minSnapshots) { - super(PROCESSING_TYPE); - this.settings = new Calibration3dPipelineSettings(); - this.foundCornersList = new ArrayList<>(); - this.minSnapshots = minSnapshots; - } - - @Override - protected void setPipeParamsImpl() { - FindBoardCornersPipe.FindCornersPipeParams findCornersPipeParams = - new FindBoardCornersPipe.FindCornersPipeParams( - settings.boardHeight, - settings.boardWidth, - settings.boardType, - settings.gridSize, - settings.streamingFrameDivisor); - findBoardCornersPipe.setParams(findCornersPipeParams); - - Calibrate3dPipe.CalibratePipeParams calibratePipeParams = - new Calibrate3dPipe.CalibratePipeParams( - new Size(frameStaticProperties.imageWidth, frameStaticProperties.imageHeight)); - calibrate3dPipe.setParams(calibratePipeParams); - - // if (cameraQuirks.hasQuirk(CameraQuirk.PiCam) && LibCameraJNI.isSupported()) { - // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); - // // LibCameraJNI.setShouldCopyColor(true); - // } + extends CVPipeline { + // For logging + private static final Logger logger = new Logger(Calibrate3dPipeline.class, LogGroup.General); + + // Only 2 pipes needed, one for finding the board corners and one for actually calibrating + private final FindBoardCornersPipe findBoardCornersPipe = new FindBoardCornersPipe(); + private final Calibrate3dPipe calibrate3dPipe = new Calibrate3dPipe(); + private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); + + // Getter methods have been set for calibrate and takeSnapshot + private boolean takeSnapshot = false; + + // Output of the corners + final List> foundCornersList; + + /// Output of the calibration, getter method is set for this. + private CVPipeResult calibrationOutput; + + private final int minSnapshots; + + private boolean calibrating = false; + + // Path to save images + private final Path imageDir = ConfigManager.getInstance().getCalibDir(); + + private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.NONE; + + public Calibrate3dPipeline() { + this(12); + } + + public Calibrate3dPipeline(int minSnapshots) { + super(PROCESSING_TYPE); + this.settings = new Calibration3dPipelineSettings(); + this.foundCornersList = new ArrayList<>(); + this.minSnapshots = minSnapshots; + } + + @Override + protected void setPipeParamsImpl() { + FindBoardCornersPipe.FindCornersPipeParams findCornersPipeParams = + new FindBoardCornersPipe.FindCornersPipeParams( + settings.boardHeight, + settings.boardWidth, + settings.boardType, + settings.gridSize, + settings.streamingFrameDivisor); + findBoardCornersPipe.setParams(findCornersPipeParams); + + Calibrate3dPipe.CalibratePipeParams calibratePipeParams = + new Calibrate3dPipe.CalibratePipeParams( + new Size(frameStaticProperties.imageWidth, frameStaticProperties.imageHeight)); + calibrate3dPipe.setParams(calibratePipeParams); + + // if (cameraQuirks.hasQuirk(CameraQuirk.PiCam) && LibCameraJNI.isSupported()) { + // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); + // // LibCameraJNI.setShouldCopyColor(true); + // } + } + + @Override + protected CVPipelineResult process(Frame frame, Calibration3dPipelineSettings settings) { + Mat inputColorMat = frame.colorImage.getMat(); + + if (this.calibrating || inputColorMat.empty()) { + return new CVPipelineResult(0, 0, null, frame); } - @Override - protected CVPipelineResult process(Frame frame, Calibration3dPipelineSettings settings) { - Mat inputColorMat = frame.colorImage.getMat(); - - if (this.calibrating || inputColorMat.empty()) { - return new CVPipelineResult(0, 0, null, frame); - } - - long sumPipeNanosElapsed = 0L; - - // Check if the frame has chessboard corners - var outputColorCVMat = new CVMat(); - inputColorMat.copyTo(outputColorCVMat.getMat()); - var findBoardResult = - findBoardCornersPipe.run(Pair.of(inputColorMat, outputColorCVMat.getMat())).output; - - var fpsResult = calculateFPSPipe.run(null); - var fps = fpsResult.output; - - if (takeSnapshot) { - // Set snapshot to false even if we don't find a board - takeSnapshot = false; - - if (findBoardResult != null) { - foundCornersList.add(findBoardResult); - Imgcodecs.imwrite( - Path.of(imageDir.toString(), "img" + foundCornersList.size() + ".jpg").toString(), - inputColorMat); - - // update the UI - broadcastState(); - } - } - - frame.release(); - - // Return the drawn chessboard if corners are found, if not, then return the input image. - return new CVPipelineResult( - sumPipeNanosElapsed, - fps, // Unused but here in case - Collections.emptyList(), - new MultiTargetPNPResults(), - new Frame( - new CVMat(), outputColorCVMat, FrameThresholdType.NONE, frame.frameStaticProperties)); - } - - public void deleteSavedImages() { - imageDir.toFile().mkdirs(); - imageDir.toFile().mkdir(); - FileUtils.deleteDirectory(imageDir); - } + long sumPipeNanosElapsed = 0L; - public boolean hasEnough() { - return foundCornersList.size() >= minSnapshots; - } + // Check if the frame has chessboard corners + var outputColorCVMat = new CVMat(); + inputColorMat.copyTo(outputColorCVMat.getMat()); + var findBoardResult = + findBoardCornersPipe.run(Pair.of(inputColorMat, outputColorCVMat.getMat())).output; - public CameraCalibrationCoefficients tryCalibration() { - if (!hasEnough()) { - logger.info( - "Not enough snapshots! Only got " - + foundCornersList.size() - + " of " - + minSnapshots - + " -- returning null.."); - return null; - } + var fpsResult = calculateFPSPipe.run(null); + var fps = fpsResult.output; - this.calibrating = true; + if (takeSnapshot) { + // Set snapshot to false even if we don't find a board + takeSnapshot = false; - /*Pass the board corners to the pipe, which will check again to see if all boards are valid - and returns the corresponding image and object points*/ - calibrationOutput = calibrate3dPipe.run(foundCornersList); - - this.calibrating = false; - - return calibrationOutput.output; - } - - public void takeSnapshot() { - takeSnapshot = true; - } - - public double[] perViewErrors() { - return calibrationOutput.output.perViewErrors; - } - - public void finishCalibration() { - foundCornersList.forEach( - it -> { - it.getMiddle().release(); - it.getRight().release(); - }); - foundCornersList.clear(); + if (findBoardResult != null) { + foundCornersList.add(findBoardResult); + Imgcodecs.imwrite( + Path.of(imageDir.toString(), "img" + foundCornersList.size() + ".jpg").toString(), + inputColorMat); + // update the UI broadcastState(); + } } - private void broadcastState() { - var state = - SerializationUtils.objectToHashMap( - new UICalibrationData( - foundCornersList.size(), - settings.cameraVideoModeIndex, - minSnapshots, - hasEnough(), - Units.metersToInches(settings.gridSize), - settings.boardWidth, - settings.boardHeight, - settings.boardType)); - - DataChangeService.getInstance() - .publishEvent(OutgoingUIEvent.wrappedOf("calibrationData", state)); + frame.release(); + + // Return the drawn chessboard if corners are found, if not, then return the input image. + return new CVPipelineResult( + sumPipeNanosElapsed, + fps, // Unused but here in case + Collections.emptyList(), + new MultiTargetPNPResults(), + new Frame( + new CVMat(), outputColorCVMat, FrameThresholdType.NONE, frame.frameStaticProperties)); + } + + public void deleteSavedImages() { + imageDir.toFile().mkdirs(); + imageDir.toFile().mkdir(); + FileUtils.deleteDirectory(imageDir); + } + + public boolean hasEnough() { + return foundCornersList.size() >= minSnapshots; + } + + public CameraCalibrationCoefficients tryCalibration() { + if (!hasEnough()) { + logger.info( + "Not enough snapshots! Only got " + + foundCornersList.size() + + " of " + + minSnapshots + + " -- returning null.."); + return null; } - public boolean removeSnapshot(int index) { - try { - foundCornersList.remove(index); - return true; - } catch (ArrayIndexOutOfBoundsException e) { - logger.error("Could not remove snapshot at index " + index, e); - return false; - } + this.calibrating = true; + + /*Pass the board corners to the pipe, which will check again to see if all boards are valid + and returns the corresponding image and object points*/ + calibrationOutput = calibrate3dPipe.run(foundCornersList); + + this.calibrating = false; + + return calibrationOutput.output; + } + + public void takeSnapshot() { + takeSnapshot = true; + } + + public double[] perViewErrors() { + return calibrationOutput.output.perViewErrors; + } + + public void finishCalibration() { + foundCornersList.forEach( + it -> { + it.getMiddle().release(); + it.getRight().release(); + }); + foundCornersList.clear(); + + broadcastState(); + } + + private void broadcastState() { + var state = + SerializationUtils.objectToHashMap( + new UICalibrationData( + foundCornersList.size(), + settings.cameraVideoModeIndex, + minSnapshots, + hasEnough(), + Units.metersToInches(settings.gridSize), + settings.boardWidth, + settings.boardHeight, + settings.boardType)); + + DataChangeService.getInstance() + .publishEvent(OutgoingUIEvent.wrappedOf("calibrationData", state)); + } + + public boolean removeSnapshot(int index) { + try { + foundCornersList.remove(index); + return true; + } catch (ArrayIndexOutOfBoundsException e) { + logger.error("Could not remove snapshot at index " + index, e); + return false; } + } - public CameraCalibrationCoefficients cameraCalibrationCoefficients() { - return calibrationOutput.output; - } + public CameraCalibrationCoefficients cameraCalibrationCoefficients() { + return calibrationOutput.output; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibration3dPipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibration3dPipelineSettings.java index 1d5e57df2f..4f2b3c314e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibration3dPipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibration3dPipelineSettings.java @@ -22,18 +22,18 @@ import org.photonvision.vision.frame.FrameDivisor; public class Calibration3dPipelineSettings extends AdvancedPipelineSettings { - public int boardHeight = 8; - public int boardWidth = 8; - public UICalibrationData.BoardType boardType = UICalibrationData.BoardType.CHESSBOARD; - public double gridSize = Units.inchesToMeters(1.0); + public int boardHeight = 8; + public int boardWidth = 8; + public UICalibrationData.BoardType boardType = UICalibrationData.BoardType.CHESSBOARD; + public double gridSize = Units.inchesToMeters(1.0); - public Size resolution = new Size(640, 480); + public Size resolution = new Size(640, 480); - public Calibration3dPipelineSettings() { - super(); - this.cameraAutoExposure = true; - this.inputShouldShow = true; - this.outputShouldShow = true; - this.streamingFrameDivisor = FrameDivisor.HALF; - } + public Calibration3dPipelineSettings() { + super(); + this.cameraAutoExposure = true; + this.inputShouldShow = true; + this.outputShouldShow = true; + this.streamingFrameDivisor = FrameDivisor.HALF; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipeline.java index 190de9e68b..8b8b2269cd 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipeline.java @@ -35,199 +35,199 @@ import org.photonvision.vision.target.TrackedTarget; public class ColoredShapePipeline - extends CVPipeline { - private final SpeckleRejectPipe speckleRejectPipe = new SpeckleRejectPipe(); - private final FindContoursPipe findContoursPipe = new FindContoursPipe(); - private final FindPolygonPipe findPolygonPipe = new FindPolygonPipe(); - private final FindCirclesPipe findCirclesPipe = new FindCirclesPipe(); - private final FilterShapesPipe filterShapesPipe = new FilterShapesPipe(); - private final SortContoursPipe sortContoursPipe = new SortContoursPipe(); - private final Collect2dTargetsPipe collect2dTargetsPipe = new Collect2dTargetsPipe(); - private final CornerDetectionPipe cornerDetectionPipe = new CornerDetectionPipe(); - private final SolvePNPPipe solvePNPPipe = new SolvePNPPipe(); - private final Draw2dCrosshairPipe draw2dCrosshairPipe = new Draw2dCrosshairPipe(); - private final Draw2dTargetsPipe draw2DTargetsPipe = new Draw2dTargetsPipe(); - private final Draw3dTargetsPipe draw3dTargetsPipe = new Draw3dTargetsPipe(); - private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); - - private final Point[] rectPoints = new Point[4]; - - private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.HSV; - - public ColoredShapePipeline() { - super(PROCESSING_TYPE); - settings = new ColoredShapePipelineSettings(); + extends CVPipeline { + private final SpeckleRejectPipe speckleRejectPipe = new SpeckleRejectPipe(); + private final FindContoursPipe findContoursPipe = new FindContoursPipe(); + private final FindPolygonPipe findPolygonPipe = new FindPolygonPipe(); + private final FindCirclesPipe findCirclesPipe = new FindCirclesPipe(); + private final FilterShapesPipe filterShapesPipe = new FilterShapesPipe(); + private final SortContoursPipe sortContoursPipe = new SortContoursPipe(); + private final Collect2dTargetsPipe collect2dTargetsPipe = new Collect2dTargetsPipe(); + private final CornerDetectionPipe cornerDetectionPipe = new CornerDetectionPipe(); + private final SolvePNPPipe solvePNPPipe = new SolvePNPPipe(); + private final Draw2dCrosshairPipe draw2dCrosshairPipe = new Draw2dCrosshairPipe(); + private final Draw2dTargetsPipe draw2DTargetsPipe = new Draw2dTargetsPipe(); + private final Draw3dTargetsPipe draw3dTargetsPipe = new Draw3dTargetsPipe(); + private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); + + private final Point[] rectPoints = new Point[4]; + + private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.HSV; + + public ColoredShapePipeline() { + super(PROCESSING_TYPE); + settings = new ColoredShapePipelineSettings(); + } + + public ColoredShapePipeline(ColoredShapePipelineSettings settings) { + super(PROCESSING_TYPE); + this.settings = settings; + } + + @Override + protected void setPipeParamsImpl() { + DualOffsetValues dualOffsetValues = + new DualOffsetValues( + settings.offsetDualPointA, + settings.offsetDualPointAArea, + settings.offsetDualPointB, + settings.offsetDualPointBArea); + + SpeckleRejectPipe.SpeckleRejectParams speckleRejectParams = + new SpeckleRejectPipe.SpeckleRejectParams(settings.contourSpecklePercentage); + speckleRejectPipe.setParams(speckleRejectParams); + + FindContoursPipe.FindContoursParams findContoursParams = + new FindContoursPipe.FindContoursParams(); + findContoursPipe.setParams(findContoursParams); + + FindPolygonPipe.FindPolygonPipeParams findPolygonPipeParams = + new FindPolygonPipe.FindPolygonPipeParams(settings.accuracyPercentage); + findPolygonPipe.setParams(findPolygonPipeParams); + + FindCirclesPipe.FindCirclePipeParams findCirclePipeParams = + new FindCirclesPipe.FindCirclePipeParams( + settings.circleDetectThreshold, + settings.contourRadius.getFirst(), + settings.minDist, + settings.contourRadius.getSecond(), + settings.maxCannyThresh, + settings.circleAccuracy, + Math.hypot(frameStaticProperties.imageWidth, frameStaticProperties.imageHeight)); + findCirclesPipe.setParams(findCirclePipeParams); + + FilterShapesPipe.FilterShapesPipeParams filterShapesPipeParams = + new FilterShapesPipe.FilterShapesPipeParams( + settings.contourShape, + settings.contourArea.getFirst(), + settings.contourArea.getSecond(), + settings.contourPerimeter.getFirst(), + settings.contourPerimeter.getSecond(), + frameStaticProperties); + filterShapesPipe.setParams(filterShapesPipeParams); + + SortContoursPipe.SortContoursParams sortContoursParams = + new SortContoursPipe.SortContoursParams( + settings.contourSortMode, + settings.outputShowMultipleTargets ? 5 : 1, + frameStaticProperties); // TODO don't hardcode? + sortContoursPipe.setParams(sortContoursParams); + + Collect2dTargetsPipe.Collect2dTargetsParams collect2dTargetsParams = + new Collect2dTargetsPipe.Collect2dTargetsParams( + settings.offsetRobotOffsetMode, + settings.offsetSinglePoint, + dualOffsetValues, + settings.contourTargetOffsetPointEdge, + settings.contourTargetOrientation, + frameStaticProperties); + collect2dTargetsPipe.setParams(collect2dTargetsParams); + + var params = + new CornerDetectionPipe.CornerDetectionPipeParameters( + settings.cornerDetectionStrategy, + settings.cornerDetectionUseConvexHulls, + settings.cornerDetectionExactSideCount, + settings.cornerDetectionSideCount, + settings.cornerDetectionAccuracyPercentage); + cornerDetectionPipe.setParams(params); + + var solvePNPParams = + new SolvePNPPipe.SolvePNPPipeParams( + frameStaticProperties.cameraCalibration, settings.targetModel); + solvePNPPipe.setParams(solvePNPParams); + + Draw2dTargetsPipe.Draw2dTargetsParams draw2DTargetsParams = + new Draw2dTargetsPipe.Draw2dTargetsParams( + settings.outputShouldDraw, + settings.outputShowMultipleTargets, + settings.streamingFrameDivisor); + draw2DTargetsParams.showShape = true; + draw2DTargetsParams.showMaximumBox = false; + draw2DTargetsParams.showRotatedBox = false; + draw2DTargetsPipe.setParams(draw2DTargetsParams); + + Draw2dCrosshairPipe.Draw2dCrosshairParams draw2dCrosshairParams = + new Draw2dCrosshairPipe.Draw2dCrosshairParams( + settings.outputShouldDraw, + settings.offsetRobotOffsetMode, + settings.offsetSinglePoint, + dualOffsetValues, + frameStaticProperties, + settings.streamingFrameDivisor, + settings.inputImageRotationMode); + draw2dCrosshairPipe.setParams(draw2dCrosshairParams); + + var draw3dTargetsParams = + new Draw3dTargetsPipe.Draw3dContoursParams( + settings.outputShouldDraw, + frameStaticProperties.cameraCalibration, + settings.targetModel, + settings.streamingFrameDivisor); + draw3dTargetsPipe.setParams(draw3dTargetsParams); + } + + @Override + protected CVPipelineResult process(Frame frame, ColoredShapePipelineSettings settings) { + long sumPipeNanosElapsed = 0L; + + CVPipeResult> findContoursResult = + findContoursPipe.run(frame.processedImage.getMat()); + sumPipeNanosElapsed += findContoursResult.nanosElapsed; + + CVPipeResult> speckleRejectResult = + speckleRejectPipe.run(findContoursResult.output); + sumPipeNanosElapsed += speckleRejectResult.nanosElapsed; + + List shapes = null; + if (settings.contourShape == ContourShape.Circle) { + CVPipeResult> findCirclesResult = + findCirclesPipe.run(Pair.of(frame.processedImage.getMat(), speckleRejectResult.output)); + sumPipeNanosElapsed += findCirclesResult.nanosElapsed; + shapes = findCirclesResult.output; + } else { + CVPipeResult> findPolygonsResult = + findPolygonPipe.run(speckleRejectResult.output); + sumPipeNanosElapsed += findPolygonsResult.nanosElapsed; + shapes = findPolygonsResult.output; } - public ColoredShapePipeline(ColoredShapePipelineSettings settings) { - super(PROCESSING_TYPE); - this.settings = settings; + CVPipeResult> filterShapeResult = filterShapesPipe.run(shapes); + sumPipeNanosElapsed += filterShapeResult.nanosElapsed; + + CVPipeResult> sortContoursResult = + sortContoursPipe.run( + filterShapeResult.output.stream() + .map(shape -> new PotentialTarget(shape.getContour(), shape)) + .collect(Collectors.toList())); + sumPipeNanosElapsed += sortContoursResult.nanosElapsed; + + CVPipeResult> collect2dTargetsResult = + collect2dTargetsPipe.run(sortContoursResult.output); + sumPipeNanosElapsed += collect2dTargetsResult.nanosElapsed; + + List targetList; + + if (settings.solvePNPEnabled && settings.contourShape == ContourShape.Circle) { + var cornerDetectionResult = cornerDetectionPipe.run(collect2dTargetsResult.output); + collect2dTargetsResult.output.forEach( + shape -> { + shape.getMinAreaRect().points(rectPoints); + shape.setTargetCorners(Arrays.asList(rectPoints)); + }); + sumPipeNanosElapsed += cornerDetectionResult.nanosElapsed; + + var solvePNPResult = solvePNPPipe.run(cornerDetectionResult.output); + sumPipeNanosElapsed += solvePNPResult.nanosElapsed; + + targetList = solvePNPResult.output; + } else { + targetList = collect2dTargetsResult.output; } - @Override - protected void setPipeParamsImpl() { - DualOffsetValues dualOffsetValues = - new DualOffsetValues( - settings.offsetDualPointA, - settings.offsetDualPointAArea, - settings.offsetDualPointB, - settings.offsetDualPointBArea); - - SpeckleRejectPipe.SpeckleRejectParams speckleRejectParams = - new SpeckleRejectPipe.SpeckleRejectParams(settings.contourSpecklePercentage); - speckleRejectPipe.setParams(speckleRejectParams); - - FindContoursPipe.FindContoursParams findContoursParams = - new FindContoursPipe.FindContoursParams(); - findContoursPipe.setParams(findContoursParams); - - FindPolygonPipe.FindPolygonPipeParams findPolygonPipeParams = - new FindPolygonPipe.FindPolygonPipeParams(settings.accuracyPercentage); - findPolygonPipe.setParams(findPolygonPipeParams); - - FindCirclesPipe.FindCirclePipeParams findCirclePipeParams = - new FindCirclesPipe.FindCirclePipeParams( - settings.circleDetectThreshold, - settings.contourRadius.getFirst(), - settings.minDist, - settings.contourRadius.getSecond(), - settings.maxCannyThresh, - settings.circleAccuracy, - Math.hypot(frameStaticProperties.imageWidth, frameStaticProperties.imageHeight)); - findCirclesPipe.setParams(findCirclePipeParams); - - FilterShapesPipe.FilterShapesPipeParams filterShapesPipeParams = - new FilterShapesPipe.FilterShapesPipeParams( - settings.contourShape, - settings.contourArea.getFirst(), - settings.contourArea.getSecond(), - settings.contourPerimeter.getFirst(), - settings.contourPerimeter.getSecond(), - frameStaticProperties); - filterShapesPipe.setParams(filterShapesPipeParams); - - SortContoursPipe.SortContoursParams sortContoursParams = - new SortContoursPipe.SortContoursParams( - settings.contourSortMode, - settings.outputShowMultipleTargets ? 5 : 1, - frameStaticProperties); // TODO don't hardcode? - sortContoursPipe.setParams(sortContoursParams); - - Collect2dTargetsPipe.Collect2dTargetsParams collect2dTargetsParams = - new Collect2dTargetsPipe.Collect2dTargetsParams( - settings.offsetRobotOffsetMode, - settings.offsetSinglePoint, - dualOffsetValues, - settings.contourTargetOffsetPointEdge, - settings.contourTargetOrientation, - frameStaticProperties); - collect2dTargetsPipe.setParams(collect2dTargetsParams); - - var params = - new CornerDetectionPipe.CornerDetectionPipeParameters( - settings.cornerDetectionStrategy, - settings.cornerDetectionUseConvexHulls, - settings.cornerDetectionExactSideCount, - settings.cornerDetectionSideCount, - settings.cornerDetectionAccuracyPercentage); - cornerDetectionPipe.setParams(params); - - var solvePNPParams = - new SolvePNPPipe.SolvePNPPipeParams( - frameStaticProperties.cameraCalibration, settings.targetModel); - solvePNPPipe.setParams(solvePNPParams); - - Draw2dTargetsPipe.Draw2dTargetsParams draw2DTargetsParams = - new Draw2dTargetsPipe.Draw2dTargetsParams( - settings.outputShouldDraw, - settings.outputShowMultipleTargets, - settings.streamingFrameDivisor); - draw2DTargetsParams.showShape = true; - draw2DTargetsParams.showMaximumBox = false; - draw2DTargetsParams.showRotatedBox = false; - draw2DTargetsPipe.setParams(draw2DTargetsParams); - - Draw2dCrosshairPipe.Draw2dCrosshairParams draw2dCrosshairParams = - new Draw2dCrosshairPipe.Draw2dCrosshairParams( - settings.outputShouldDraw, - settings.offsetRobotOffsetMode, - settings.offsetSinglePoint, - dualOffsetValues, - frameStaticProperties, - settings.streamingFrameDivisor, - settings.inputImageRotationMode); - draw2dCrosshairPipe.setParams(draw2dCrosshairParams); - - var draw3dTargetsParams = - new Draw3dTargetsPipe.Draw3dContoursParams( - settings.outputShouldDraw, - frameStaticProperties.cameraCalibration, - settings.targetModel, - settings.streamingFrameDivisor); - draw3dTargetsPipe.setParams(draw3dTargetsParams); - } + var fpsResult = calculateFPSPipe.run(null); + var fps = fpsResult.output; - @Override - protected CVPipelineResult process(Frame frame, ColoredShapePipelineSettings settings) { - long sumPipeNanosElapsed = 0L; - - CVPipeResult> findContoursResult = - findContoursPipe.run(frame.processedImage.getMat()); - sumPipeNanosElapsed += findContoursResult.nanosElapsed; - - CVPipeResult> speckleRejectResult = - speckleRejectPipe.run(findContoursResult.output); - sumPipeNanosElapsed += speckleRejectResult.nanosElapsed; - - List shapes = null; - if (settings.contourShape == ContourShape.Circle) { - CVPipeResult> findCirclesResult = - findCirclesPipe.run(Pair.of(frame.processedImage.getMat(), speckleRejectResult.output)); - sumPipeNanosElapsed += findCirclesResult.nanosElapsed; - shapes = findCirclesResult.output; - } else { - CVPipeResult> findPolygonsResult = - findPolygonPipe.run(speckleRejectResult.output); - sumPipeNanosElapsed += findPolygonsResult.nanosElapsed; - shapes = findPolygonsResult.output; - } - - CVPipeResult> filterShapeResult = filterShapesPipe.run(shapes); - sumPipeNanosElapsed += filterShapeResult.nanosElapsed; - - CVPipeResult> sortContoursResult = - sortContoursPipe.run( - filterShapeResult.output.stream() - .map(shape -> new PotentialTarget(shape.getContour(), shape)) - .collect(Collectors.toList())); - sumPipeNanosElapsed += sortContoursResult.nanosElapsed; - - CVPipeResult> collect2dTargetsResult = - collect2dTargetsPipe.run(sortContoursResult.output); - sumPipeNanosElapsed += collect2dTargetsResult.nanosElapsed; - - List targetList; - - if (settings.solvePNPEnabled && settings.contourShape == ContourShape.Circle) { - var cornerDetectionResult = cornerDetectionPipe.run(collect2dTargetsResult.output); - collect2dTargetsResult.output.forEach( - shape -> { - shape.getMinAreaRect().points(rectPoints); - shape.setTargetCorners(Arrays.asList(rectPoints)); - }); - sumPipeNanosElapsed += cornerDetectionResult.nanosElapsed; - - var solvePNPResult = solvePNPPipe.run(cornerDetectionResult.output); - sumPipeNanosElapsed += solvePNPResult.nanosElapsed; - - targetList = solvePNPResult.output; - } else { - targetList = collect2dTargetsResult.output; - } - - var fpsResult = calculateFPSPipe.run(null); - var fps = fpsResult.output; - - return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, frame); - } + return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, frame); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipelineSettings.java index 62db7bc9ee..a5e69f493e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipelineSettings.java @@ -26,71 +26,71 @@ @JsonTypeName("ColoredShapePipelineSettings") public class ColoredShapePipelineSettings extends AdvancedPipelineSettings { - public ContourShape contourShape = ContourShape.Triangle; - public DoubleCouple contourPerimeter = new DoubleCouple(0, Double.MAX_VALUE); - public double accuracyPercentage = 10.0; - // Circle detection - public int circleDetectThreshold = 5; - public IntegerCouple contourRadius = new IntegerCouple(0, 100); - public int minDist = 20; - public int maxCannyThresh = 90; - public int circleAccuracy = 20; + public ContourShape contourShape = ContourShape.Triangle; + public DoubleCouple contourPerimeter = new DoubleCouple(0, Double.MAX_VALUE); + public double accuracyPercentage = 10.0; + // Circle detection + public int circleDetectThreshold = 5; + public IntegerCouple contourRadius = new IntegerCouple(0, 100); + public int minDist = 20; + public int maxCannyThresh = 90; + public int circleAccuracy = 20; - // 3d settings - public CameraCalibrationCoefficients cameraCalibration; + // 3d settings + public CameraCalibrationCoefficients cameraCalibration; - public ColoredShapePipelineSettings() { - super(); - pipelineType = PipelineType.ColoredShape; - cameraExposure = 20; - } + public ColoredShapePipelineSettings() { + super(); + pipelineType = PipelineType.ColoredShape; + cameraExposure = 20; + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; - ColoredShapePipelineSettings that = (ColoredShapePipelineSettings) o; - return Double.compare(that.accuracyPercentage, accuracyPercentage) == 0 - && circleDetectThreshold == that.circleDetectThreshold - && minDist == that.minDist - && maxCannyThresh == that.maxCannyThresh - && circleAccuracy == that.circleAccuracy - && cornerDetectionUseConvexHulls == that.cornerDetectionUseConvexHulls - && cornerDetectionExactSideCount == that.cornerDetectionExactSideCount - && cornerDetectionSideCount == that.cornerDetectionSideCount - && Double.compare(that.cornerDetectionAccuracyPercentage, cornerDetectionAccuracyPercentage) - == 0 - && contourShape == that.contourShape - && Objects.equals(contourArea, that.contourArea) - && Objects.equals(contourPerimeter, that.contourPerimeter) - && Objects.equals(contourRadius, that.contourRadius) - && contourGroupingMode == that.contourGroupingMode - && contourIntersection == that.contourIntersection - && Objects.equals(cameraCalibration, that.cameraCalibration) - && cornerDetectionStrategy == that.cornerDetectionStrategy; - } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + ColoredShapePipelineSettings that = (ColoredShapePipelineSettings) o; + return Double.compare(that.accuracyPercentage, accuracyPercentage) == 0 + && circleDetectThreshold == that.circleDetectThreshold + && minDist == that.minDist + && maxCannyThresh == that.maxCannyThresh + && circleAccuracy == that.circleAccuracy + && cornerDetectionUseConvexHulls == that.cornerDetectionUseConvexHulls + && cornerDetectionExactSideCount == that.cornerDetectionExactSideCount + && cornerDetectionSideCount == that.cornerDetectionSideCount + && Double.compare(that.cornerDetectionAccuracyPercentage, cornerDetectionAccuracyPercentage) + == 0 + && contourShape == that.contourShape + && Objects.equals(contourArea, that.contourArea) + && Objects.equals(contourPerimeter, that.contourPerimeter) + && Objects.equals(contourRadius, that.contourRadius) + && contourGroupingMode == that.contourGroupingMode + && contourIntersection == that.contourIntersection + && Objects.equals(cameraCalibration, that.cameraCalibration) + && cornerDetectionStrategy == that.cornerDetectionStrategy; + } - @Override - public int hashCode() { - return Objects.hash( - super.hashCode(), - contourShape, - contourArea, - contourPerimeter, - accuracyPercentage, - circleDetectThreshold, - contourRadius, - minDist, - maxCannyThresh, - circleAccuracy, - contourGroupingMode, - contourIntersection, - cameraCalibration, - cornerDetectionStrategy, - cornerDetectionUseConvexHulls, - cornerDetectionExactSideCount, - cornerDetectionSideCount, - cornerDetectionAccuracyPercentage); - } + @Override + public int hashCode() { + return Objects.hash( + super.hashCode(), + contourShape, + contourArea, + contourPerimeter, + accuracyPercentage, + circleDetectThreshold, + contourRadius, + minDist, + maxCannyThresh, + circleAccuracy, + contourGroupingMode, + contourIntersection, + cameraCalibration, + cornerDetectionStrategy, + cornerDetectionUseConvexHulls, + cornerDetectionExactSideCount, + cornerDetectionSideCount, + cornerDetectionAccuracyPercentage); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipeline.java index f29cb2be3d..aeec75ddb6 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipeline.java @@ -31,75 +31,75 @@ import org.photonvision.vision.pipeline.result.DriverModePipelineResult; public class DriverModePipeline - extends CVPipeline { - private final RotateImagePipe rotateImagePipe = new RotateImagePipe(); - private final Draw2dCrosshairPipe draw2dCrosshairPipe = new Draw2dCrosshairPipe(); - private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); - private final ResizeImagePipe resizeImagePipe = new ResizeImagePipe(); - - private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.NONE; - - public DriverModePipeline() { - super(PROCESSING_TYPE); - settings = new DriverModePipelineSettings(); - } - - public DriverModePipeline(DriverModePipelineSettings settings) { - super(PROCESSING_TYPE); - this.settings = settings; - } - - @Override - protected void setPipeParamsImpl() { - RotateImagePipe.RotateImageParams rotateImageParams = - new RotateImagePipe.RotateImageParams(settings.inputImageRotationMode); - rotateImagePipe.setParams(rotateImageParams); - - Draw2dCrosshairPipe.Draw2dCrosshairParams draw2dCrosshairParams = - new Draw2dCrosshairPipe.Draw2dCrosshairParams( - frameStaticProperties, settings.streamingFrameDivisor, settings.inputImageRotationMode); - draw2dCrosshairPipe.setParams(draw2dCrosshairParams); - - resizeImagePipe.setParams( - new ResizeImagePipe.ResizeImageParams(settings.streamingFrameDivisor)); - - // if (LibCameraJNI.isSupported() && cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { - // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); - // LibCameraJNI.setShouldCopyColor(true); - // } + extends CVPipeline { + private final RotateImagePipe rotateImagePipe = new RotateImagePipe(); + private final Draw2dCrosshairPipe draw2dCrosshairPipe = new Draw2dCrosshairPipe(); + private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); + private final ResizeImagePipe resizeImagePipe = new ResizeImagePipe(); + + private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.NONE; + + public DriverModePipeline() { + super(PROCESSING_TYPE); + settings = new DriverModePipelineSettings(); + } + + public DriverModePipeline(DriverModePipelineSettings settings) { + super(PROCESSING_TYPE); + this.settings = settings; + } + + @Override + protected void setPipeParamsImpl() { + RotateImagePipe.RotateImageParams rotateImageParams = + new RotateImagePipe.RotateImageParams(settings.inputImageRotationMode); + rotateImagePipe.setParams(rotateImageParams); + + Draw2dCrosshairPipe.Draw2dCrosshairParams draw2dCrosshairParams = + new Draw2dCrosshairPipe.Draw2dCrosshairParams( + frameStaticProperties, settings.streamingFrameDivisor, settings.inputImageRotationMode); + draw2dCrosshairPipe.setParams(draw2dCrosshairParams); + + resizeImagePipe.setParams( + new ResizeImagePipe.ResizeImageParams(settings.streamingFrameDivisor)); + + // if (LibCameraJNI.isSupported() && cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { + // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); + // LibCameraJNI.setShouldCopyColor(true); + // } + } + + @Override + public DriverModePipelineResult process(Frame frame, DriverModePipelineSettings settings) { + long totalNanos = 0; + boolean accelerated = LibCameraJNI.isSupported() && cameraQuirks.hasQuirk(CameraQuirk.PiCam); + + // apply pipes + var inputMat = frame.colorImage.getMat(); + + boolean emptyIn = inputMat.empty(); + + if (!emptyIn) { + if (!accelerated) { + var rotateImageResult = rotateImagePipe.run(inputMat); + totalNanos += rotateImageResult.nanosElapsed; + } + + totalNanos += resizeImagePipe.run(inputMat).nanosElapsed; + + var draw2dCrosshairResult = draw2dCrosshairPipe.run(Pair.of(inputMat, List.of())); + + // calculate elapsed nanoseconds + totalNanos += draw2dCrosshairResult.nanosElapsed; } - @Override - public DriverModePipelineResult process(Frame frame, DriverModePipelineSettings settings) { - long totalNanos = 0; - boolean accelerated = LibCameraJNI.isSupported() && cameraQuirks.hasQuirk(CameraQuirk.PiCam); + var fpsResult = calculateFPSPipe.run(null); + var fps = fpsResult.output; - // apply pipes - var inputMat = frame.colorImage.getMat(); - - boolean emptyIn = inputMat.empty(); - - if (!emptyIn) { - if (!accelerated) { - var rotateImageResult = rotateImagePipe.run(inputMat); - totalNanos += rotateImageResult.nanosElapsed; - } - - totalNanos += resizeImagePipe.run(inputMat).nanosElapsed; - - var draw2dCrosshairResult = draw2dCrosshairPipe.run(Pair.of(inputMat, List.of())); - - // calculate elapsed nanoseconds - totalNanos += draw2dCrosshairResult.nanosElapsed; - } - - var fpsResult = calculateFPSPipe.run(null); - var fps = fpsResult.output; - - // Flip-flop input and output in the Frame - return new DriverModePipelineResult( - MathUtils.nanosToMillis(totalNanos), - fps, - new Frame(frame.processedImage, frame.colorImage, frame.type, frame.frameStaticProperties)); - } + // Flip-flop input and output in the Frame + return new DriverModePipelineResult( + MathUtils.nanosToMillis(totalNanos), + fps, + new Frame(frame.processedImage, frame.colorImage, frame.type, frame.frameStaticProperties)); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipelineSettings.java index 70beebf7a5..7afb3bc169 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipelineSettings.java @@ -23,14 +23,14 @@ @JsonTypeName("DriverModePipelineSettings") public class DriverModePipelineSettings extends CVPipelineSettings { - public DoubleCouple offsetPoint = new DoubleCouple(); + public DoubleCouple offsetPoint = new DoubleCouple(); - public DriverModePipelineSettings() { - super(); - pipelineNickname = "Driver Mode"; - pipelineIndex = PipelineManager.DRIVERMODE_INDEX; - pipelineType = PipelineType.DriverMode; - inputShouldShow = true; - cameraAutoExposure = true; - } + public DriverModePipelineSettings() { + super(); + pipelineNickname = "Driver Mode"; + pipelineIndex = PipelineManager.DRIVERMODE_INDEX; + pipelineType = PipelineType.DriverMode; + inputShouldShow = true; + cameraAutoExposure = true; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/OutputStreamPipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/OutputStreamPipeline.java index 0474b6b562..53372b864b 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/OutputStreamPipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/OutputStreamPipeline.java @@ -32,198 +32,198 @@ * shall not get its settings saved, nor shall it be managed by PipelineManager */ public class OutputStreamPipeline { - private final OutputMatPipe outputMatPipe = new OutputMatPipe(); - private final Draw2dCrosshairPipe draw2dCrosshairPipe = new Draw2dCrosshairPipe(); - private final Draw2dTargetsPipe draw2dTargetsPipe = new Draw2dTargetsPipe(); - private final Draw3dTargetsPipe draw3dTargetsPipe = new Draw3dTargetsPipe(); - private final Draw2dAprilTagsPipe draw2dAprilTagsPipe = new Draw2dAprilTagsPipe(); - private final Draw3dAprilTagsPipe draw3dAprilTagsPipe = new Draw3dAprilTagsPipe(); - - private final Draw2dArucoPipe draw2dArucoPipe = new Draw2dArucoPipe(); - private final Draw3dArucoPipe draw3dArucoPipe = new Draw3dArucoPipe(); - private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); - private final ResizeImagePipe resizeImagePipe = new ResizeImagePipe(); - - private final long[] pipeProfileNanos = new long[12]; - - protected void setPipeParams( - FrameStaticProperties frameStaticProperties, AdvancedPipelineSettings settings) { - var dualOffsetValues = - new DualOffsetValues( - settings.offsetDualPointA, - settings.offsetDualPointAArea, - settings.offsetDualPointB, - settings.offsetDualPointBArea); - - var draw2DTargetsParams = - new Draw2dTargetsPipe.Draw2dTargetsParams( - settings.outputShouldDraw, - settings.outputShowMultipleTargets, - settings.streamingFrameDivisor); - draw2dTargetsPipe.setParams(draw2DTargetsParams); - - var draw2DAprilTagsParams = - new Draw2dAprilTagsPipe.Draw2dAprilTagsParams( - settings.outputShouldDraw, - settings.outputShowMultipleTargets, - settings.streamingFrameDivisor); - draw2dAprilTagsPipe.setParams(draw2DAprilTagsParams); - - var draw2DArucoParams = - new Draw2dArucoPipe.Draw2dArucoParams( - settings.outputShouldDraw, - settings.outputShowMultipleTargets, - settings.streamingFrameDivisor); - draw2dArucoPipe.setParams(draw2DArucoParams); - - var draw2dCrosshairParams = - new Draw2dCrosshairPipe.Draw2dCrosshairParams( - settings.outputShouldDraw, - settings.offsetRobotOffsetMode, - settings.offsetSinglePoint, - dualOffsetValues, - frameStaticProperties, - settings.streamingFrameDivisor, - settings.inputImageRotationMode); - draw2dCrosshairPipe.setParams(draw2dCrosshairParams); - - var draw3dTargetsParams = - new Draw3dTargetsPipe.Draw3dContoursParams( - settings.outputShouldDraw, - frameStaticProperties.cameraCalibration, - settings.targetModel, - settings.streamingFrameDivisor); - draw3dTargetsPipe.setParams(draw3dTargetsParams); - - var draw3dAprilTagsParams = - new Draw3dAprilTagsPipe.Draw3dAprilTagsParams( - settings.outputShouldDraw, - frameStaticProperties.cameraCalibration, - settings.targetModel, - settings.streamingFrameDivisor); - draw3dAprilTagsPipe.setParams(draw3dAprilTagsParams); - - var draw3dArucoParams = - new Draw3dArucoPipe.Draw3dArucoParams( - settings.outputShouldDraw, - frameStaticProperties.cameraCalibration, - TargetModel.k6in_16h5, - settings.streamingFrameDivisor); - draw3dArucoPipe.setParams(draw3dArucoParams); - - resizeImagePipe.setParams( - new ResizeImagePipe.ResizeImageParams(settings.streamingFrameDivisor)); - } + private final OutputMatPipe outputMatPipe = new OutputMatPipe(); + private final Draw2dCrosshairPipe draw2dCrosshairPipe = new Draw2dCrosshairPipe(); + private final Draw2dTargetsPipe draw2dTargetsPipe = new Draw2dTargetsPipe(); + private final Draw3dTargetsPipe draw3dTargetsPipe = new Draw3dTargetsPipe(); + private final Draw2dAprilTagsPipe draw2dAprilTagsPipe = new Draw2dAprilTagsPipe(); + private final Draw3dAprilTagsPipe draw3dAprilTagsPipe = new Draw3dAprilTagsPipe(); + + private final Draw2dArucoPipe draw2dArucoPipe = new Draw2dArucoPipe(); + private final Draw3dArucoPipe draw3dArucoPipe = new Draw3dArucoPipe(); + private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); + private final ResizeImagePipe resizeImagePipe = new ResizeImagePipe(); + + private final long[] pipeProfileNanos = new long[12]; + + protected void setPipeParams( + FrameStaticProperties frameStaticProperties, AdvancedPipelineSettings settings) { + var dualOffsetValues = + new DualOffsetValues( + settings.offsetDualPointA, + settings.offsetDualPointAArea, + settings.offsetDualPointB, + settings.offsetDualPointBArea); + + var draw2DTargetsParams = + new Draw2dTargetsPipe.Draw2dTargetsParams( + settings.outputShouldDraw, + settings.outputShowMultipleTargets, + settings.streamingFrameDivisor); + draw2dTargetsPipe.setParams(draw2DTargetsParams); + + var draw2DAprilTagsParams = + new Draw2dAprilTagsPipe.Draw2dAprilTagsParams( + settings.outputShouldDraw, + settings.outputShowMultipleTargets, + settings.streamingFrameDivisor); + draw2dAprilTagsPipe.setParams(draw2DAprilTagsParams); + + var draw2DArucoParams = + new Draw2dArucoPipe.Draw2dArucoParams( + settings.outputShouldDraw, + settings.outputShowMultipleTargets, + settings.streamingFrameDivisor); + draw2dArucoPipe.setParams(draw2DArucoParams); + + var draw2dCrosshairParams = + new Draw2dCrosshairPipe.Draw2dCrosshairParams( + settings.outputShouldDraw, + settings.offsetRobotOffsetMode, + settings.offsetSinglePoint, + dualOffsetValues, + frameStaticProperties, + settings.streamingFrameDivisor, + settings.inputImageRotationMode); + draw2dCrosshairPipe.setParams(draw2dCrosshairParams); + + var draw3dTargetsParams = + new Draw3dTargetsPipe.Draw3dContoursParams( + settings.outputShouldDraw, + frameStaticProperties.cameraCalibration, + settings.targetModel, + settings.streamingFrameDivisor); + draw3dTargetsPipe.setParams(draw3dTargetsParams); + + var draw3dAprilTagsParams = + new Draw3dAprilTagsPipe.Draw3dAprilTagsParams( + settings.outputShouldDraw, + frameStaticProperties.cameraCalibration, + settings.targetModel, + settings.streamingFrameDivisor); + draw3dAprilTagsPipe.setParams(draw3dAprilTagsParams); + + var draw3dArucoParams = + new Draw3dArucoPipe.Draw3dArucoParams( + settings.outputShouldDraw, + frameStaticProperties.cameraCalibration, + TargetModel.k6in_16h5, + settings.streamingFrameDivisor); + draw3dArucoPipe.setParams(draw3dArucoParams); + + resizeImagePipe.setParams( + new ResizeImagePipe.ResizeImageParams(settings.streamingFrameDivisor)); + } + + public CVPipelineResult process( + Frame inputAndOutputFrame, + AdvancedPipelineSettings settings, + List targetsToDraw) { + setPipeParams(inputAndOutputFrame.frameStaticProperties, settings); + var inMat = inputAndOutputFrame.colorImage.getMat(); + var outMat = inputAndOutputFrame.processedImage.getMat(); + + long sumPipeNanosElapsed = 0L; + + // Resize both in place before doing any conversion + boolean inEmpty = inMat.empty(); + if (!inEmpty) + sumPipeNanosElapsed += pipeProfileNanos[0] = resizeImagePipe.run(inMat).nanosElapsed; + + boolean outEmpty = outMat.empty(); + if (!outEmpty) + sumPipeNanosElapsed += pipeProfileNanos[1] = resizeImagePipe.run(outMat).nanosElapsed; + + // Only attempt drawing on a non-empty frame + if (!outEmpty) { + // Convert single-channel HSV output mat to 3-channel BGR in preparation for streaming + if (outMat.channels() == 1) { + var outputMatPipeResult = outputMatPipe.run(outMat); + sumPipeNanosElapsed += pipeProfileNanos[2] = outputMatPipeResult.nanosElapsed; + } else { + pipeProfileNanos[2] = 0; + } + + // Draw 2D Crosshair on output + var draw2dCrosshairResultOnInput = draw2dCrosshairPipe.run(Pair.of(inMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[3] = draw2dCrosshairResultOnInput.nanosElapsed; + + if (!(settings instanceof AprilTagPipelineSettings) + && !(settings instanceof ArucoPipelineSettings)) { + // If we're processing anything other than Apriltags... + var draw2dCrosshairResultOnOutput = draw2dCrosshairPipe.run(Pair.of(outMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[4] = draw2dCrosshairResultOnOutput.nanosElapsed; + + if (settings.solvePNPEnabled) { + // Draw 3D Targets on input and output if possible + pipeProfileNanos[5] = 0; + pipeProfileNanos[6] = 0; + pipeProfileNanos[7] = 0; + + var drawOnOutputResult = draw3dTargetsPipe.run(Pair.of(outMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[8] = drawOnOutputResult.nanosElapsed; + } else { + // Only draw 2d targets + pipeProfileNanos[5] = 0; + + var draw2dTargetsOnOutput = draw2dTargetsPipe.run(Pair.of(outMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[6] = draw2dTargetsOnOutput.nanosElapsed; + + pipeProfileNanos[7] = 0; + pipeProfileNanos[8] = 0; + } + + } else if (settings instanceof AprilTagPipelineSettings) { + // If we are doing apriltags... + if (settings.solvePNPEnabled) { + // Draw 3d Apriltag markers (camera is calibrated and running in 3d mode) + pipeProfileNanos[5] = 0; + pipeProfileNanos[6] = 0; + + var drawOnInputResult = draw3dAprilTagsPipe.run(Pair.of(outMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[7] = drawOnInputResult.nanosElapsed; + + pipeProfileNanos[8] = 0; + + } else { + // Draw 2d apriltag markers + var draw2dTargetsOnInput = draw2dAprilTagsPipe.run(Pair.of(outMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[5] = draw2dTargetsOnInput.nanosElapsed; - public CVPipelineResult process( - Frame inputAndOutputFrame, - AdvancedPipelineSettings settings, - List targetsToDraw) { - setPipeParams(inputAndOutputFrame.frameStaticProperties, settings); - var inMat = inputAndOutputFrame.colorImage.getMat(); - var outMat = inputAndOutputFrame.processedImage.getMat(); - - long sumPipeNanosElapsed = 0L; - - // Resize both in place before doing any conversion - boolean inEmpty = inMat.empty(); - if (!inEmpty) - sumPipeNanosElapsed += pipeProfileNanos[0] = resizeImagePipe.run(inMat).nanosElapsed; - - boolean outEmpty = outMat.empty(); - if (!outEmpty) - sumPipeNanosElapsed += pipeProfileNanos[1] = resizeImagePipe.run(outMat).nanosElapsed; - - // Only attempt drawing on a non-empty frame - if (!outEmpty) { - // Convert single-channel HSV output mat to 3-channel BGR in preparation for streaming - if (outMat.channels() == 1) { - var outputMatPipeResult = outputMatPipe.run(outMat); - sumPipeNanosElapsed += pipeProfileNanos[2] = outputMatPipeResult.nanosElapsed; - } else { - pipeProfileNanos[2] = 0; - } - - // Draw 2D Crosshair on output - var draw2dCrosshairResultOnInput = draw2dCrosshairPipe.run(Pair.of(inMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[3] = draw2dCrosshairResultOnInput.nanosElapsed; - - if (!(settings instanceof AprilTagPipelineSettings) - && !(settings instanceof ArucoPipelineSettings)) { - // If we're processing anything other than Apriltags... - var draw2dCrosshairResultOnOutput = draw2dCrosshairPipe.run(Pair.of(outMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[4] = draw2dCrosshairResultOnOutput.nanosElapsed; - - if (settings.solvePNPEnabled) { - // Draw 3D Targets on input and output if possible - pipeProfileNanos[5] = 0; - pipeProfileNanos[6] = 0; - pipeProfileNanos[7] = 0; - - var drawOnOutputResult = draw3dTargetsPipe.run(Pair.of(outMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[8] = drawOnOutputResult.nanosElapsed; - } else { - // Only draw 2d targets - pipeProfileNanos[5] = 0; - - var draw2dTargetsOnOutput = draw2dTargetsPipe.run(Pair.of(outMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[6] = draw2dTargetsOnOutput.nanosElapsed; - - pipeProfileNanos[7] = 0; - pipeProfileNanos[8] = 0; - } - - } else if (settings instanceof AprilTagPipelineSettings) { - // If we are doing apriltags... - if (settings.solvePNPEnabled) { - // Draw 3d Apriltag markers (camera is calibrated and running in 3d mode) - pipeProfileNanos[5] = 0; - pipeProfileNanos[6] = 0; - - var drawOnInputResult = draw3dAprilTagsPipe.run(Pair.of(outMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[7] = drawOnInputResult.nanosElapsed; - - pipeProfileNanos[8] = 0; - - } else { - // Draw 2d apriltag markers - var draw2dTargetsOnInput = draw2dAprilTagsPipe.run(Pair.of(outMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[5] = draw2dTargetsOnInput.nanosElapsed; - - pipeProfileNanos[6] = 0; - pipeProfileNanos[7] = 0; - pipeProfileNanos[8] = 0; - } - } else if (settings instanceof ArucoPipelineSettings) { - if (settings.solvePNPEnabled) { - // Draw 3d Apriltag markers (camera is calibrated and running in 3d mode) - pipeProfileNanos[5] = 0; - pipeProfileNanos[6] = 0; - - var drawOnInputResult = draw3dArucoPipe.run(Pair.of(outMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[7] = drawOnInputResult.nanosElapsed; - - pipeProfileNanos[8] = 0; - - } else { - // Draw 2d apriltag markers - var draw2dTargetsOnInput = draw2dArucoPipe.run(Pair.of(outMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[5] = draw2dTargetsOnInput.nanosElapsed; - - pipeProfileNanos[6] = 0; - pipeProfileNanos[7] = 0; - pipeProfileNanos[8] = 0; - } - } + pipeProfileNanos[6] = 0; + pipeProfileNanos[7] = 0; + pipeProfileNanos[8] = 0; } + } else if (settings instanceof ArucoPipelineSettings) { + if (settings.solvePNPEnabled) { + // Draw 3d Apriltag markers (camera is calibrated and running in 3d mode) + pipeProfileNanos[5] = 0; + pipeProfileNanos[6] = 0; - var fpsResult = calculateFPSPipe.run(null); - var fps = fpsResult.output; + var drawOnInputResult = draw3dArucoPipe.run(Pair.of(outMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[7] = drawOnInputResult.nanosElapsed; - return new CVPipelineResult( - sumPipeNanosElapsed, - fps, // Unused but here just in case - targetsToDraw, - inputAndOutputFrame); + pipeProfileNanos[8] = 0; + + } else { + // Draw 2d apriltag markers + var draw2dTargetsOnInput = draw2dArucoPipe.run(Pair.of(outMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[5] = draw2dTargetsOnInput.nanosElapsed; + + pipeProfileNanos[6] = 0; + pipeProfileNanos[7] = 0; + pipeProfileNanos[8] = 0; + } + } } + + var fpsResult = calculateFPSPipe.run(null); + var fps = fpsResult.output; + + return new CVPipelineResult( + sumPipeNanosElapsed, + fps, // Unused but here just in case + targetsToDraw, + inputAndOutputFrame); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineProfiler.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineProfiler.java index 57c94b0640..93a1c27c56 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineProfiler.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineProfiler.java @@ -22,77 +22,77 @@ import org.photonvision.common.util.math.MathUtils; public class PipelineProfiler { - private static boolean shouldLog; + private static boolean shouldLog; - private static final Logger reflectiveLogger = - new Logger(ReflectivePipeline.class, LogGroup.VisionModule); - private static final Logger coloredShapeLogger = - new Logger(ColoredShapePipeline.class, LogGroup.VisionModule); + private static final Logger reflectiveLogger = + new Logger(ReflectivePipeline.class, LogGroup.VisionModule); + private static final Logger coloredShapeLogger = + new Logger(ColoredShapePipeline.class, LogGroup.VisionModule); - /** - * Indices for Reflective profiling 0 - rotateImagePipe 1 - inputCopy (not a pipe) 2 - hsvPipe 3 - - * findContoursPipe 4 - speckleRejectPipe 5 - filterContoursPipe 6 - groupContoursPipe 7 - - * sortContoursPipe 8 - collect2dTargetsPipe 9 - cornerDetectionPipe 10 - solvePNPPipe (OPTIONAL) - * 11 - outputMatPipe (OPTIONAL) 12 - draw2dCrosshairPipe (on input) 13 - draw2dCrosshairPipe (on - * output) 14 - draw2dTargetsPipe (on input) 15 - draw2dTargetsPipe (on output) 16 - - * draw3dTargetsPipe (OPTIONAL, on input) 17 - draw3dTargetsPipe (OPTIONAL, on output) - */ - private static final String[] ReflectivePipeNames = - new String[] { - "RotateImage", - "HSV", - "FindContours", - "SpeckleReject", - "FilterContours", - "GroupContours", - "SortContours", - "Collect2dTargets", - "CornerDetection", - "SolvePNP", - }; + /** + * Indices for Reflective profiling 0 - rotateImagePipe 1 - inputCopy (not a pipe) 2 - hsvPipe 3 - + * findContoursPipe 4 - speckleRejectPipe 5 - filterContoursPipe 6 - groupContoursPipe 7 - + * sortContoursPipe 8 - collect2dTargetsPipe 9 - cornerDetectionPipe 10 - solvePNPPipe (OPTIONAL) + * 11 - outputMatPipe (OPTIONAL) 12 - draw2dCrosshairPipe (on input) 13 - draw2dCrosshairPipe (on + * output) 14 - draw2dTargetsPipe (on input) 15 - draw2dTargetsPipe (on output) 16 - + * draw3dTargetsPipe (OPTIONAL, on input) 17 - draw3dTargetsPipe (OPTIONAL, on output) + */ + private static final String[] ReflectivePipeNames = + new String[] { + "RotateImage", + "HSV", + "FindContours", + "SpeckleReject", + "FilterContours", + "GroupContours", + "SortContours", + "Collect2dTargets", + "CornerDetection", + "SolvePNP", + }; - public static final int ReflectivePipeCount = ReflectivePipeNames.length; + public static final int ReflectivePipeCount = ReflectivePipeNames.length; - protected static String getReflectiveProfileString(long[] nanos) { - if (nanos.length != ReflectivePipeCount) { - return "Invalid data"; - } + protected static String getReflectiveProfileString(long[] nanos) { + if (nanos.length != ReflectivePipeCount) { + return "Invalid data"; + } - var sb = new StringBuilder("Profiling - "); - double totalMs = 0; - for (int i = 0; i < nanos.length; i++) { - if ((i == 10 || i == 11 || i == 17 || i == 18) && nanos[i] == 0) { - continue; // skip empty pipe profiles - } + var sb = new StringBuilder("Profiling - "); + double totalMs = 0; + for (int i = 0; i < nanos.length; i++) { + if ((i == 10 || i == 11 || i == 17 || i == 18) && nanos[i] == 0) { + continue; // skip empty pipe profiles + } - sb.append(ReflectivePipeNames[i]); - sb.append(": "); - var ms = MathUtils.roundTo(nanos[i] / 1e+6, 3); - totalMs += ms; - sb.append(ms); - sb.append("ms"); - // boolean isLast = (i + 1 == 17 && nanos[i+1] == 0) || i == 18; - // if (isLast) { - // var foo = "bar"; - // } else { - sb.append(", "); - // } - } + sb.append(ReflectivePipeNames[i]); + sb.append(": "); + var ms = MathUtils.roundTo(nanos[i] / 1e+6, 3); + totalMs += ms; + sb.append(ms); + sb.append("ms"); + // boolean isLast = (i + 1 == 17 && nanos[i+1] == 0) || i == 18; + // if (isLast) { + // var foo = "bar"; + // } else { + sb.append(", "); + // } + } - sb.append("Total: "); - sb.append(MathUtils.roundTo(totalMs, 3)); - sb.append("ms"); + sb.append("Total: "); + sb.append(MathUtils.roundTo(totalMs, 3)); + sb.append("ms"); - return sb.toString(); - } + return sb.toString(); + } - public static void printReflectiveProfile(long[] nanos) { - if (shouldLog) { - reflectiveLogger.trace(() -> getReflectiveProfileString(nanos)); - } + public static void printReflectiveProfile(long[] nanos) { + if (shouldLog) { + reflectiveLogger.trace(() -> getReflectiveProfileString(nanos)); } + } - public static void enablePrint(boolean enable) { - shouldLog = enable; - } + public static void enablePrint(boolean enable) { + shouldLog = enable; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineType.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineType.java index a2f6346b89..58cf44615c 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineType.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineType.java @@ -19,18 +19,18 @@ @SuppressWarnings("rawtypes") public enum PipelineType { - Calib3d(-2, Calibrate3dPipeline.class), - DriverMode(-1, DriverModePipeline.class), - Reflective(0, ReflectivePipeline.class), - ColoredShape(1, ColoredShapePipeline.class), - AprilTag(2, AprilTagPipeline.class), - Aruco(3, ArucoPipeline.class); + Calib3d(-2, Calibrate3dPipeline.class), + DriverMode(-1, DriverModePipeline.class), + Reflective(0, ReflectivePipeline.class), + ColoredShape(1, ColoredShapePipeline.class), + AprilTag(2, AprilTagPipeline.class), + Aruco(3, ArucoPipeline.class); - public final int baseIndex; - public final Class clazz; + public final int baseIndex; + public final Class clazz; - PipelineType(int baseIndex, Class clazz) { - this.baseIndex = baseIndex; - this.clazz = clazz; - } + PipelineType(int baseIndex, Class clazz) { + this.baseIndex = baseIndex; + this.clazz = clazz; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipeline.java index e961488b78..40e38af8b5 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipeline.java @@ -31,167 +31,167 @@ /** Represents a pipeline for tracking retro-reflective targets. */ public class ReflectivePipeline extends CVPipeline { - private final FindContoursPipe findContoursPipe = new FindContoursPipe(); - private final SpeckleRejectPipe speckleRejectPipe = new SpeckleRejectPipe(); - private final FilterContoursPipe filterContoursPipe = new FilterContoursPipe(); - private final GroupContoursPipe groupContoursPipe = new GroupContoursPipe(); - private final SortContoursPipe sortContoursPipe = new SortContoursPipe(); - private final Collect2dTargetsPipe collect2dTargetsPipe = new Collect2dTargetsPipe(); - private final CornerDetectionPipe cornerDetectionPipe = new CornerDetectionPipe(); - private final SolvePNPPipe solvePNPPipe = new SolvePNPPipe(); - private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); - - private final long[] pipeProfileNanos = new long[PipelineProfiler.ReflectivePipeCount]; - - private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.HSV; - - public ReflectivePipeline() { - super(PROCESSING_TYPE); - settings = new ReflectivePipelineSettings(); + private final FindContoursPipe findContoursPipe = new FindContoursPipe(); + private final SpeckleRejectPipe speckleRejectPipe = new SpeckleRejectPipe(); + private final FilterContoursPipe filterContoursPipe = new FilterContoursPipe(); + private final GroupContoursPipe groupContoursPipe = new GroupContoursPipe(); + private final SortContoursPipe sortContoursPipe = new SortContoursPipe(); + private final Collect2dTargetsPipe collect2dTargetsPipe = new Collect2dTargetsPipe(); + private final CornerDetectionPipe cornerDetectionPipe = new CornerDetectionPipe(); + private final SolvePNPPipe solvePNPPipe = new SolvePNPPipe(); + private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); + + private final long[] pipeProfileNanos = new long[PipelineProfiler.ReflectivePipeCount]; + + private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.HSV; + + public ReflectivePipeline() { + super(PROCESSING_TYPE); + settings = new ReflectivePipelineSettings(); + } + + public ReflectivePipeline(ReflectivePipelineSettings settings) { + super(PROCESSING_TYPE); + this.settings = settings; + } + + @Override + protected void setPipeParamsImpl() { + var dualOffsetValues = + new DualOffsetValues( + settings.offsetDualPointA, + settings.offsetDualPointAArea, + settings.offsetDualPointB, + settings.offsetDualPointBArea); + + // var rotateImageParams = new + // RotateImagePipe.RotateImageParams(settings.inputImageRotationMode); + // rotateImagePipe.setParams(rotateImageParams); + + // if (cameraQuirks.hasQuirk(CameraQuirk.PiCam) && LibCameraJNI.isSupported()) { + // LibCameraJNI.setThresholds( + // settings.hsvHue.getFirst() / 180d, + // settings.hsvSaturation.getFirst() / 255d, + // settings.hsvValue.getFirst() / 255d, + // settings.hsvHue.getSecond() / 180d, + // settings.hsvSaturation.getSecond() / 255d, + // settings.hsvValue.getSecond() / 255d); + // // LibCameraJNI.setInvertHue(settings.hueInverted); + // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); + // // LibCameraJNI.setShouldCopyColor(settings.inputShouldShow); + // } else { + // var hsvParams = + // new HSVPipe.HSVParams( + // settings.hsvHue, settings.hsvSaturation, settings.hsvValue, + // settings.hueInverted); + // hsvPipe.setParams(hsvParams); + // } + + var findContoursParams = new FindContoursPipe.FindContoursParams(); + findContoursPipe.setParams(findContoursParams); + + var speckleRejectParams = + new SpeckleRejectPipe.SpeckleRejectParams(settings.contourSpecklePercentage); + speckleRejectPipe.setParams(speckleRejectParams); + + var filterContoursParams = + new FilterContoursPipe.FilterContoursParams( + settings.contourArea, + settings.contourRatio, + settings.contourFullness, + frameStaticProperties, + settings.contourFilterRangeX, + settings.contourFilterRangeY, + settings.contourTargetOrientation == TargetOrientation.Landscape); + filterContoursPipe.setParams(filterContoursParams); + + var groupContoursParams = + new GroupContoursPipe.GroupContoursParams( + settings.contourGroupingMode, settings.contourIntersection); + groupContoursPipe.setParams(groupContoursParams); + + var sortContoursParams = + new SortContoursPipe.SortContoursParams( + settings.contourSortMode, + settings.outputShowMultipleTargets ? 8 : 1, // TODO don't hardcode? + frameStaticProperties); + sortContoursPipe.setParams(sortContoursParams); + + var collect2dTargetsParams = + new Collect2dTargetsPipe.Collect2dTargetsParams( + settings.offsetRobotOffsetMode, + settings.offsetSinglePoint, + dualOffsetValues, + settings.contourTargetOffsetPointEdge, + settings.contourTargetOrientation, + frameStaticProperties); + collect2dTargetsPipe.setParams(collect2dTargetsParams); + + var cornerDetectionPipeParams = + new CornerDetectionPipe.CornerDetectionPipeParameters( + settings.cornerDetectionStrategy, + settings.cornerDetectionUseConvexHulls, + settings.cornerDetectionExactSideCount, + settings.cornerDetectionSideCount, + settings.cornerDetectionAccuracyPercentage); + cornerDetectionPipe.setParams(cornerDetectionPipeParams); + + var solvePNPParams = + new SolvePNPPipe.SolvePNPPipeParams( + frameStaticProperties.cameraCalibration, settings.targetModel); + solvePNPPipe.setParams(solvePNPParams); + } + + @Override + public CVPipelineResult process(Frame frame, ReflectivePipelineSettings settings) { + long sumPipeNanosElapsed = 0L; + + CVPipeResult> findContoursResult = + findContoursPipe.run(frame.processedImage.getMat()); + sumPipeNanosElapsed += pipeProfileNanos[2] = findContoursResult.nanosElapsed; + + CVPipeResult> speckleRejectResult = + speckleRejectPipe.run(findContoursResult.output); + sumPipeNanosElapsed += pipeProfileNanos[3] = speckleRejectResult.nanosElapsed; + + CVPipeResult> filterContoursResult = + filterContoursPipe.run(speckleRejectResult.output); + sumPipeNanosElapsed += pipeProfileNanos[4] = filterContoursResult.nanosElapsed; + + CVPipeResult> groupContoursResult = + groupContoursPipe.run(filterContoursResult.output); + sumPipeNanosElapsed += pipeProfileNanos[5] = groupContoursResult.nanosElapsed; + + CVPipeResult> sortContoursResult = + sortContoursPipe.run(groupContoursResult.output); + sumPipeNanosElapsed += pipeProfileNanos[6] = sortContoursResult.nanosElapsed; + + CVPipeResult> collect2dTargetsResult = + collect2dTargetsPipe.run(sortContoursResult.output); + sumPipeNanosElapsed += pipeProfileNanos[7] = collect2dTargetsResult.nanosElapsed; + + List targetList; + + // 3d stuff + if (settings.solvePNPEnabled) { + var cornerDetectionResult = cornerDetectionPipe.run(collect2dTargetsResult.output); + sumPipeNanosElapsed += pipeProfileNanos[8] = cornerDetectionResult.nanosElapsed; + + var solvePNPResult = solvePNPPipe.run(cornerDetectionResult.output); + sumPipeNanosElapsed += pipeProfileNanos[9] = solvePNPResult.nanosElapsed; + + targetList = solvePNPResult.output; + } else { + pipeProfileNanos[8] = 0; + pipeProfileNanos[9] = 0; + targetList = collect2dTargetsResult.output; } - public ReflectivePipeline(ReflectivePipelineSettings settings) { - super(PROCESSING_TYPE); - this.settings = settings; - } - - @Override - protected void setPipeParamsImpl() { - var dualOffsetValues = - new DualOffsetValues( - settings.offsetDualPointA, - settings.offsetDualPointAArea, - settings.offsetDualPointB, - settings.offsetDualPointBArea); - - // var rotateImageParams = new - // RotateImagePipe.RotateImageParams(settings.inputImageRotationMode); - // rotateImagePipe.setParams(rotateImageParams); - - // if (cameraQuirks.hasQuirk(CameraQuirk.PiCam) && LibCameraJNI.isSupported()) { - // LibCameraJNI.setThresholds( - // settings.hsvHue.getFirst() / 180d, - // settings.hsvSaturation.getFirst() / 255d, - // settings.hsvValue.getFirst() / 255d, - // settings.hsvHue.getSecond() / 180d, - // settings.hsvSaturation.getSecond() / 255d, - // settings.hsvValue.getSecond() / 255d); - // // LibCameraJNI.setInvertHue(settings.hueInverted); - // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); - // // LibCameraJNI.setShouldCopyColor(settings.inputShouldShow); - // } else { - // var hsvParams = - // new HSVPipe.HSVParams( - // settings.hsvHue, settings.hsvSaturation, settings.hsvValue, - // settings.hueInverted); - // hsvPipe.setParams(hsvParams); - // } - - var findContoursParams = new FindContoursPipe.FindContoursParams(); - findContoursPipe.setParams(findContoursParams); - - var speckleRejectParams = - new SpeckleRejectPipe.SpeckleRejectParams(settings.contourSpecklePercentage); - speckleRejectPipe.setParams(speckleRejectParams); - - var filterContoursParams = - new FilterContoursPipe.FilterContoursParams( - settings.contourArea, - settings.contourRatio, - settings.contourFullness, - frameStaticProperties, - settings.contourFilterRangeX, - settings.contourFilterRangeY, - settings.contourTargetOrientation == TargetOrientation.Landscape); - filterContoursPipe.setParams(filterContoursParams); - - var groupContoursParams = - new GroupContoursPipe.GroupContoursParams( - settings.contourGroupingMode, settings.contourIntersection); - groupContoursPipe.setParams(groupContoursParams); - - var sortContoursParams = - new SortContoursPipe.SortContoursParams( - settings.contourSortMode, - settings.outputShowMultipleTargets ? 8 : 1, // TODO don't hardcode? - frameStaticProperties); - sortContoursPipe.setParams(sortContoursParams); - - var collect2dTargetsParams = - new Collect2dTargetsPipe.Collect2dTargetsParams( - settings.offsetRobotOffsetMode, - settings.offsetSinglePoint, - dualOffsetValues, - settings.contourTargetOffsetPointEdge, - settings.contourTargetOrientation, - frameStaticProperties); - collect2dTargetsPipe.setParams(collect2dTargetsParams); - - var cornerDetectionPipeParams = - new CornerDetectionPipe.CornerDetectionPipeParameters( - settings.cornerDetectionStrategy, - settings.cornerDetectionUseConvexHulls, - settings.cornerDetectionExactSideCount, - settings.cornerDetectionSideCount, - settings.cornerDetectionAccuracyPercentage); - cornerDetectionPipe.setParams(cornerDetectionPipeParams); - - var solvePNPParams = - new SolvePNPPipe.SolvePNPPipeParams( - frameStaticProperties.cameraCalibration, settings.targetModel); - solvePNPPipe.setParams(solvePNPParams); - } - - @Override - public CVPipelineResult process(Frame frame, ReflectivePipelineSettings settings) { - long sumPipeNanosElapsed = 0L; - - CVPipeResult> findContoursResult = - findContoursPipe.run(frame.processedImage.getMat()); - sumPipeNanosElapsed += pipeProfileNanos[2] = findContoursResult.nanosElapsed; - - CVPipeResult> speckleRejectResult = - speckleRejectPipe.run(findContoursResult.output); - sumPipeNanosElapsed += pipeProfileNanos[3] = speckleRejectResult.nanosElapsed; - - CVPipeResult> filterContoursResult = - filterContoursPipe.run(speckleRejectResult.output); - sumPipeNanosElapsed += pipeProfileNanos[4] = filterContoursResult.nanosElapsed; - - CVPipeResult> groupContoursResult = - groupContoursPipe.run(filterContoursResult.output); - sumPipeNanosElapsed += pipeProfileNanos[5] = groupContoursResult.nanosElapsed; + var fpsResult = calculateFPSPipe.run(null); + var fps = fpsResult.output; - CVPipeResult> sortContoursResult = - sortContoursPipe.run(groupContoursResult.output); - sumPipeNanosElapsed += pipeProfileNanos[6] = sortContoursResult.nanosElapsed; + PipelineProfiler.printReflectiveProfile(pipeProfileNanos); - CVPipeResult> collect2dTargetsResult = - collect2dTargetsPipe.run(sortContoursResult.output); - sumPipeNanosElapsed += pipeProfileNanos[7] = collect2dTargetsResult.nanosElapsed; - - List targetList; - - // 3d stuff - if (settings.solvePNPEnabled) { - var cornerDetectionResult = cornerDetectionPipe.run(collect2dTargetsResult.output); - sumPipeNanosElapsed += pipeProfileNanos[8] = cornerDetectionResult.nanosElapsed; - - var solvePNPResult = solvePNPPipe.run(cornerDetectionResult.output); - sumPipeNanosElapsed += pipeProfileNanos[9] = solvePNPResult.nanosElapsed; - - targetList = solvePNPResult.output; - } else { - pipeProfileNanos[8] = 0; - pipeProfileNanos[9] = 0; - targetList = collect2dTargetsResult.output; - } - - var fpsResult = calculateFPSPipe.run(null); - var fps = fpsResult.output; - - PipelineProfiler.printReflectiveProfile(pipeProfileNanos); - - return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, frame); - } + return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, frame); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipelineSettings.java index b06dcaddef..7ac8353d8f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipelineSettings.java @@ -21,13 +21,13 @@ @JsonTypeName("ReflectivePipelineSettings") public class ReflectivePipelineSettings extends AdvancedPipelineSettings { - public double contourFilterRangeX = 2; - public double contourFilterRangeY = 2; + public double contourFilterRangeX = 2; + public double contourFilterRangeY = 2; - public ReflectivePipelineSettings() { - super(); - pipelineType = PipelineType.Reflective; - cameraExposure = 6; - cameraGain = 20; - } + public ReflectivePipelineSettings() { + super(); + pipelineType = PipelineType.Reflective; + cameraExposure = 6; + cameraGain = 20; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/UICalibrationData.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/UICalibrationData.java index 287e271974..ed90581d14 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/UICalibrationData.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/UICalibrationData.java @@ -20,70 +20,70 @@ import java.util.Map; public class UICalibrationData { - public final int videoModeIndex; - public int count; - public final int minCount; - public final boolean hasEnough; - public final double squareSizeIn; - public final int patternWidth; - public final int patternHeight; - public final BoardType boardType; // + public final int videoModeIndex; + public int count; + public final int minCount; + public final boolean hasEnough; + public final double squareSizeIn; + public final int patternWidth; + public final int patternHeight; + public final BoardType boardType; // - public UICalibrationData( - int count, - int videoModeIndex, - int minCount, - boolean hasEnough, - double squareSizeIn, - int patternWidth, - int patternHeight, - BoardType boardType) { - this.count = count; - this.minCount = minCount; - this.videoModeIndex = videoModeIndex; - this.hasEnough = hasEnough; - this.squareSizeIn = squareSizeIn; - this.patternWidth = patternWidth; - this.patternHeight = patternHeight; - this.boardType = boardType; - } + public UICalibrationData( + int count, + int videoModeIndex, + int minCount, + boolean hasEnough, + double squareSizeIn, + int patternWidth, + int patternHeight, + BoardType boardType) { + this.count = count; + this.minCount = minCount; + this.videoModeIndex = videoModeIndex; + this.hasEnough = hasEnough; + this.squareSizeIn = squareSizeIn; + this.patternWidth = patternWidth; + this.patternHeight = patternHeight; + this.boardType = boardType; + } - public enum BoardType { - CHESSBOARD, - DOTBOARD - } + public enum BoardType { + CHESSBOARD, + DOTBOARD + } - public static UICalibrationData fromMap(Map map) { - return new UICalibrationData( - ((Number) map.get("count")).intValue(), - ((Number) map.get("videoModeIndex")).intValue(), - ((Number) map.get("minCount")).intValue(), - (boolean) map.get("hasEnough"), - ((Number) map.get("squareSizeIn")).doubleValue(), - ((Number) map.get("patternWidth")).intValue(), - ((Number) map.get("patternHeight")).intValue(), - BoardType.values()[(int) map.get("boardType")]); - } + public static UICalibrationData fromMap(Map map) { + return new UICalibrationData( + ((Number) map.get("count")).intValue(), + ((Number) map.get("videoModeIndex")).intValue(), + ((Number) map.get("minCount")).intValue(), + (boolean) map.get("hasEnough"), + ((Number) map.get("squareSizeIn")).doubleValue(), + ((Number) map.get("patternWidth")).intValue(), + ((Number) map.get("patternHeight")).intValue(), + BoardType.values()[(int) map.get("boardType")]); + } - @Override - public String toString() { - return "UICalibrationData{" - + "videoModeIndex=" - + videoModeIndex - + ", count=" - + count - + ", minCount=" - + minCount - + ", hasEnough=" - + hasEnough - + ", squareSizeIn=" - + squareSizeIn - + ", patternWidth=" - + patternWidth - + ", patternHeight=" - + patternHeight - + ", boardType=" - + boardType - + '}'; - } + @Override + public String toString() { + return "UICalibrationData{" + + "videoModeIndex=" + + videoModeIndex + + ", count=" + + count + + ", minCount=" + + minCount + + ", hasEnough=" + + hasEnough + + ", squareSizeIn=" + + squareSizeIn + + ", patternWidth=" + + patternWidth + + ", patternHeight=" + + patternHeight + + ", boardType=" + + boardType + + '}'; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/result/BytePackable.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/result/BytePackable.java index c729d35f65..dd600990d3 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/result/BytePackable.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/result/BytePackable.java @@ -19,123 +19,123 @@ @SuppressWarnings("PointlessBitwiseExpression") public abstract class BytePackable { - public abstract byte[] toByteArray(); - - public abstract void fromByteArray(byte[] src); - - protected int bufferPosition = 0; - - public int getBufferPosition() { - return bufferPosition; - } - - public void resetBufferPosition() { - bufferPosition = 0; - } - - protected void bufferData(byte[] src, byte[] dest) { - System.arraycopy(src, 0, dest, bufferPosition, src.length); - bufferPosition += src.length; - } - - protected void bufferData(byte src, byte[] dest) { - System.arraycopy(new byte[] {src}, 0, dest, bufferPosition, 1); - bufferPosition++; - } - - protected void bufferData(int src, byte[] dest) { - System.arraycopy(toBytes(src), 0, dest, bufferPosition, Integer.BYTES); - bufferPosition += Integer.BYTES; - } - - protected void bufferData(double src, byte[] dest) { - System.arraycopy(toBytes(src), 0, dest, bufferPosition, Double.BYTES); - bufferPosition += Double.BYTES; - } - - protected void bufferData(boolean src, byte[] dest) { - System.arraycopy(toBytes(src), 0, dest, bufferPosition, 1); - bufferPosition += 1; - } - - protected boolean unbufferBoolean(byte[] src) { - return toBoolean(src, bufferPosition++); - } - - protected byte unbufferByte(byte[] src) { - var value = src[bufferPosition]; - bufferPosition++; - return value; - } - - protected byte[] unbufferBytes(byte[] src, int len) { - var bytes = new byte[len]; - System.arraycopy(src, bufferPosition, bytes, 0, len); - return bytes; - } - - protected int unbufferInt(byte[] src) { - var value = toInt(src, bufferPosition); - bufferPosition += Integer.BYTES; - return value; - } - - protected double unbufferDouble(byte[] src) { - var value = toDouble(src, bufferPosition); - bufferPosition += Double.BYTES; - return value; - } - - private static boolean toBoolean(byte[] src, int pos) { - return src[pos] != 0; - } - - private static int toInt(byte[] src, int pos) { - return (0xff & src[pos]) << 24 - | (0xff & src[pos + 1]) << 16 - | (0xff & src[pos + 2]) << 8 - | (0xff & src[pos + 3]) << 0; - } - - private static long toLong(byte[] src, int pos) { - return (long) (0xff & src[pos]) << 56 - | (long) (0xff & src[pos + 1]) << 48 - | (long) (0xff & src[pos + 2]) << 40 - | (long) (0xff & src[pos + 3]) << 32 - | (long) (0xff & src[pos + 4]) << 24 - | (long) (0xff & src[pos + 5]) << 16 - | (long) (0xff & src[pos + 6]) << 8 - | (long) (0xff & src[pos + 7]) << 0; - } - - private static double toDouble(byte[] src, int pos) { - return Double.longBitsToDouble(toLong(src, pos)); - } - - protected byte[] toBytes(double value) { - long data = Double.doubleToRawLongBits(value); - return new byte[] { - (byte) ((data >> 56) & 0xff), - (byte) ((data >> 48) & 0xff), - (byte) ((data >> 40) & 0xff), - (byte) ((data >> 32) & 0xff), - (byte) ((data >> 24) & 0xff), - (byte) ((data >> 16) & 0xff), - (byte) ((data >> 8) & 0xff), - (byte) ((data >> 0) & 0xff), - }; - } - - protected byte[] toBytes(int value) { - return new byte[] { - (byte) ((value >> 24) & 0xff), - (byte) ((value >> 16) & 0xff), - (byte) ((value >> 8) & 0xff), - (byte) ((value >> 0) & 0xff) - }; - } - - protected byte[] toBytes(boolean value) { - return new byte[] {(byte) (value ? 1 : 0)}; - } + public abstract byte[] toByteArray(); + + public abstract void fromByteArray(byte[] src); + + protected int bufferPosition = 0; + + public int getBufferPosition() { + return bufferPosition; + } + + public void resetBufferPosition() { + bufferPosition = 0; + } + + protected void bufferData(byte[] src, byte[] dest) { + System.arraycopy(src, 0, dest, bufferPosition, src.length); + bufferPosition += src.length; + } + + protected void bufferData(byte src, byte[] dest) { + System.arraycopy(new byte[] {src}, 0, dest, bufferPosition, 1); + bufferPosition++; + } + + protected void bufferData(int src, byte[] dest) { + System.arraycopy(toBytes(src), 0, dest, bufferPosition, Integer.BYTES); + bufferPosition += Integer.BYTES; + } + + protected void bufferData(double src, byte[] dest) { + System.arraycopy(toBytes(src), 0, dest, bufferPosition, Double.BYTES); + bufferPosition += Double.BYTES; + } + + protected void bufferData(boolean src, byte[] dest) { + System.arraycopy(toBytes(src), 0, dest, bufferPosition, 1); + bufferPosition += 1; + } + + protected boolean unbufferBoolean(byte[] src) { + return toBoolean(src, bufferPosition++); + } + + protected byte unbufferByte(byte[] src) { + var value = src[bufferPosition]; + bufferPosition++; + return value; + } + + protected byte[] unbufferBytes(byte[] src, int len) { + var bytes = new byte[len]; + System.arraycopy(src, bufferPosition, bytes, 0, len); + return bytes; + } + + protected int unbufferInt(byte[] src) { + var value = toInt(src, bufferPosition); + bufferPosition += Integer.BYTES; + return value; + } + + protected double unbufferDouble(byte[] src) { + var value = toDouble(src, bufferPosition); + bufferPosition += Double.BYTES; + return value; + } + + private static boolean toBoolean(byte[] src, int pos) { + return src[pos] != 0; + } + + private static int toInt(byte[] src, int pos) { + return (0xff & src[pos]) << 24 + | (0xff & src[pos + 1]) << 16 + | (0xff & src[pos + 2]) << 8 + | (0xff & src[pos + 3]) << 0; + } + + private static long toLong(byte[] src, int pos) { + return (long) (0xff & src[pos]) << 56 + | (long) (0xff & src[pos + 1]) << 48 + | (long) (0xff & src[pos + 2]) << 40 + | (long) (0xff & src[pos + 3]) << 32 + | (long) (0xff & src[pos + 4]) << 24 + | (long) (0xff & src[pos + 5]) << 16 + | (long) (0xff & src[pos + 6]) << 8 + | (long) (0xff & src[pos + 7]) << 0; + } + + private static double toDouble(byte[] src, int pos) { + return Double.longBitsToDouble(toLong(src, pos)); + } + + protected byte[] toBytes(double value) { + long data = Double.doubleToRawLongBits(value); + return new byte[] { + (byte) ((data >> 56) & 0xff), + (byte) ((data >> 48) & 0xff), + (byte) ((data >> 40) & 0xff), + (byte) ((data >> 32) & 0xff), + (byte) ((data >> 24) & 0xff), + (byte) ((data >> 16) & 0xff), + (byte) ((data >> 8) & 0xff), + (byte) ((data >> 0) & 0xff), + }; + } + + protected byte[] toBytes(int value) { + return new byte[] { + (byte) ((value >> 24) & 0xff), + (byte) ((value >> 16) & 0xff), + (byte) ((value >> 8) & 0xff), + (byte) ((value >> 0) & 0xff) + }; + } + + protected byte[] toBytes(boolean value) { + return new byte[] {(byte) (value ? 1 : 0)}; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/result/CVPipelineResult.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/result/CVPipelineResult.java index 5993cc9531..000611461e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/result/CVPipelineResult.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/result/CVPipelineResult.java @@ -26,71 +26,71 @@ import org.photonvision.vision.target.TrackedTarget; public class CVPipelineResult implements Releasable { - private long imageCaptureTimestampNanos; - public final double processingNanos; - public final double fps; - public final List targets; - public final Frame inputAndOutputFrame; - public MultiTargetPNPResults multiTagResult; + private long imageCaptureTimestampNanos; + public final double processingNanos; + public final double fps; + public final List targets; + public final Frame inputAndOutputFrame; + public MultiTargetPNPResults multiTagResult; - public CVPipelineResult( - double processingNanos, double fps, List targets, Frame inputFrame) { - this(processingNanos, fps, targets, new MultiTargetPNPResults(), inputFrame); - } + public CVPipelineResult( + double processingNanos, double fps, List targets, Frame inputFrame) { + this(processingNanos, fps, targets, new MultiTargetPNPResults(), inputFrame); + } - public CVPipelineResult( - double processingNanos, - double fps, - List targets, - MultiTargetPNPResults multiTagResults, - Frame inputFrame) { - this.processingNanos = processingNanos; - this.fps = fps; - this.targets = targets != null ? targets : Collections.emptyList(); - this.multiTagResult = multiTagResults; + public CVPipelineResult( + double processingNanos, + double fps, + List targets, + MultiTargetPNPResults multiTagResults, + Frame inputFrame) { + this.processingNanos = processingNanos; + this.fps = fps; + this.targets = targets != null ? targets : Collections.emptyList(); + this.multiTagResult = multiTagResults; - this.inputAndOutputFrame = inputFrame; - } + this.inputAndOutputFrame = inputFrame; + } - public CVPipelineResult( - double processingNanos, - double fps, - List targets, - MultiTargetPNPResults multiTagResults) { - this(processingNanos, fps, targets, multiTagResults, null); - } + public CVPipelineResult( + double processingNanos, + double fps, + List targets, + MultiTargetPNPResults multiTagResults) { + this(processingNanos, fps, targets, multiTagResults, null); + } - public boolean hasTargets() { - return !targets.isEmpty(); - } + public boolean hasTargets() { + return !targets.isEmpty(); + } - public void release() { - for (TrackedTarget tt : targets) { - tt.release(); - } - if (inputAndOutputFrame != null) inputAndOutputFrame.release(); + public void release() { + for (TrackedTarget tt : targets) { + tt.release(); } + if (inputAndOutputFrame != null) inputAndOutputFrame.release(); + } - /** - * Get the latency between now (wpi::Now) and the time at which the image was captured. FOOTGUN: - * the latency is relative to the time at which this method is called. Waiting to call this method - * will change the latency this method returns. - */ - @Deprecated - public double getLatencyMillis() { - var now = MathUtils.wpiNanoTime(); - return MathUtils.nanosToMillis(now - imageCaptureTimestampNanos); - } + /** + * Get the latency between now (wpi::Now) and the time at which the image was captured. FOOTGUN: + * the latency is relative to the time at which this method is called. Waiting to call this method + * will change the latency this method returns. + */ + @Deprecated + public double getLatencyMillis() { + var now = MathUtils.wpiNanoTime(); + return MathUtils.nanosToMillis(now - imageCaptureTimestampNanos); + } - public double getProcessingMillis() { - return MathUtils.nanosToMillis(processingNanos); - } + public double getProcessingMillis() { + return MathUtils.nanosToMillis(processingNanos); + } - public long getImageCaptureTimestampNanos() { - return imageCaptureTimestampNanos; - } + public long getImageCaptureTimestampNanos() { + return imageCaptureTimestampNanos; + } - public void setImageCaptureTimestampNanos(long imageCaptureTimestampNanos) { - this.imageCaptureTimestampNanos = imageCaptureTimestampNanos; - } + public void setImageCaptureTimestampNanos(long imageCaptureTimestampNanos) { + this.imageCaptureTimestampNanos = imageCaptureTimestampNanos; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/result/DriverModePipelineResult.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/result/DriverModePipelineResult.java index 609a3f4483..c87f0b746e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/result/DriverModePipelineResult.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/result/DriverModePipelineResult.java @@ -21,7 +21,7 @@ import org.photonvision.vision.frame.Frame; public class DriverModePipelineResult extends CVPipelineResult { - public DriverModePipelineResult(double latencyNanos, double fps, Frame outputFrame) { - super(latencyNanos, fps, List.of(), outputFrame); - } + public DriverModePipelineResult(double latencyNanos, double fps, Frame outputFrame) { + super(latencyNanos, fps, List.of(), outputFrame); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/Data.java b/photon-core/src/main/java/org/photonvision/vision/processes/Data.java index c146a0e275..b4f6e3d8f5 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/Data.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/Data.java @@ -22,10 +22,10 @@ // TODO replace with CTT's data class public class Data implements Releasable { - public CVPipelineResult result; + public CVPipelineResult result; - @Override - public void release() { - result.release(); - } + @Override + public void release() { + result.release(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/PipelineManager.java b/photon-core/src/main/java/org/photonvision/vision/processes/PipelineManager.java index e4f6df3a97..76932ae277 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/PipelineManager.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/PipelineManager.java @@ -31,420 +31,420 @@ @SuppressWarnings({"rawtypes", "unused"}) public class PipelineManager { - private static final Logger logger = new Logger(PipelineManager.class, LogGroup.VisionModule); - - public static final int DRIVERMODE_INDEX = -1; - public static final int CAL_3D_INDEX = -2; - - protected final List userPipelineSettings; - protected final Calibrate3dPipeline calibration3dPipeline = new Calibrate3dPipeline(); - protected final DriverModePipeline driverModePipeline = new DriverModePipeline(); - - /** Index of the currently active pipeline. Defaults to 0. */ - private int currentPipelineIndex = 0; - - /** The currently active pipeline. */ - private CVPipeline currentUserPipeline = driverModePipeline; - - /** - * Index of the last active user-created pipeline.
- *
- * Used only when switching from any of the built-in pipelines back to a user-created pipeline. - */ - private int lastUserPipelineIdx; - - /** - * Creates a PipelineManager with a DriverModePipeline, a Calibration3dPipeline, and all provided - * pipelines. - * - * @param userPipelines Pipelines to add to the manager. - */ - public PipelineManager( - DriverModePipelineSettings driverSettings, List userPipelines) { - this.userPipelineSettings = new ArrayList<>(userPipelines); - // This is to respect the default res idx for vendor cameras - - this.driverModePipeline.setSettings(driverSettings); - - if (userPipelines.isEmpty()) addPipeline(PipelineType.Reflective); + private static final Logger logger = new Logger(PipelineManager.class, LogGroup.VisionModule); + + public static final int DRIVERMODE_INDEX = -1; + public static final int CAL_3D_INDEX = -2; + + protected final List userPipelineSettings; + protected final Calibrate3dPipeline calibration3dPipeline = new Calibrate3dPipeline(); + protected final DriverModePipeline driverModePipeline = new DriverModePipeline(); + + /** Index of the currently active pipeline. Defaults to 0. */ + private int currentPipelineIndex = 0; + + /** The currently active pipeline. */ + private CVPipeline currentUserPipeline = driverModePipeline; + + /** + * Index of the last active user-created pipeline.
+ *
+ * Used only when switching from any of the built-in pipelines back to a user-created pipeline. + */ + private int lastUserPipelineIdx; + + /** + * Creates a PipelineManager with a DriverModePipeline, a Calibration3dPipeline, and all provided + * pipelines. + * + * @param userPipelines Pipelines to add to the manager. + */ + public PipelineManager( + DriverModePipelineSettings driverSettings, List userPipelines) { + this.userPipelineSettings = new ArrayList<>(userPipelines); + // This is to respect the default res idx for vendor cameras + + this.driverModePipeline.setSettings(driverSettings); + + if (userPipelines.isEmpty()) addPipeline(PipelineType.Reflective); + } + + public PipelineManager(CameraConfiguration config) { + this(config.driveModeSettings, config.pipelineSettings); + } + + /** + * Get the settings for a pipeline by index. + * + * @param index Index of pipeline whose settings need getting. + * @return The gotten settings of the pipeline whose index was provided. + */ + public CVPipelineSettings getPipelineSettings(int index) { + if (index < 0) { + switch (index) { + case DRIVERMODE_INDEX: + return driverModePipeline.getSettings(); + case CAL_3D_INDEX: + return calibration3dPipeline.getSettings(); + } } - public PipelineManager(CameraConfiguration config) { - this(config.driveModeSettings, config.pipelineSettings); + for (var setting : userPipelineSettings) { + if (setting.pipelineIndex == index) return setting; } - - /** - * Get the settings for a pipeline by index. - * - * @param index Index of pipeline whose settings need getting. - * @return The gotten settings of the pipeline whose index was provided. - */ - public CVPipelineSettings getPipelineSettings(int index) { - if (index < 0) { - switch (index) { - case DRIVERMODE_INDEX: - return driverModePipeline.getSettings(); - case CAL_3D_INDEX: - return calibration3dPipeline.getSettings(); - } - } - - for (var setting : userPipelineSettings) { - if (setting.pipelineIndex == index) return setting; - } - return null; + return null; + } + + /** + * Get the settings for a pipeline by index. + * + * @param index Index of pipeline whose nickname needs getting. + * @return the nickname of the pipeline whose index was provided. + */ + public String getPipelineNickname(int index) { + if (index < 0) { + switch (index) { + case DRIVERMODE_INDEX: + return driverModePipeline.getSettings().pipelineNickname; + case CAL_3D_INDEX: + return calibration3dPipeline.getSettings().pipelineNickname; + } } - /** - * Get the settings for a pipeline by index. - * - * @param index Index of pipeline whose nickname needs getting. - * @return the nickname of the pipeline whose index was provided. - */ - public String getPipelineNickname(int index) { - if (index < 0) { - switch (index) { - case DRIVERMODE_INDEX: - return driverModePipeline.getSettings().pipelineNickname; - case CAL_3D_INDEX: - return calibration3dPipeline.getSettings().pipelineNickname; - } - } - - for (var setting : userPipelineSettings) { - if (setting.pipelineIndex == index) return setting.pipelineNickname; - } - return null; + for (var setting : userPipelineSettings) { + if (setting.pipelineIndex == index) return setting.pipelineNickname; } - - /** - * Gets a list of nicknames for all user pipelines - * - * @return The list of nicknames for all user pipelines - */ - public List getPipelineNicknames() { - List ret = new ArrayList<>(); - for (var p : userPipelineSettings) { - ret.add(p.pipelineNickname); - } - return ret; + return null; + } + + /** + * Gets a list of nicknames for all user pipelines + * + * @return The list of nicknames for all user pipelines + */ + public List getPipelineNicknames() { + List ret = new ArrayList<>(); + for (var p : userPipelineSettings) { + ret.add(p.pipelineNickname); } - - /** - * Gets the index of the currently active pipeline - * - * @return The index of the currently active pipeline - */ - public int getCurrentPipelineIndex() { - return currentPipelineIndex; + return ret; + } + + /** + * Gets the index of the currently active pipeline + * + * @return The index of the currently active pipeline + */ + public int getCurrentPipelineIndex() { + return currentPipelineIndex; + } + + /** + * Get the currently active pipeline. + * + * @return The currently active pipeline. + */ + public CVPipeline getCurrentPipeline() { + if (currentPipelineIndex < 0) { + switch (currentPipelineIndex) { + case CAL_3D_INDEX: + return calibration3dPipeline; + case DRIVERMODE_INDEX: + return driverModePipeline; + } } - /** - * Get the currently active pipeline. - * - * @return The currently active pipeline. - */ - public CVPipeline getCurrentPipeline() { - if (currentPipelineIndex < 0) { - switch (currentPipelineIndex) { - case CAL_3D_INDEX: - return calibration3dPipeline; - case DRIVERMODE_INDEX: - return driverModePipeline; - } - } - - // Just return the current user pipeline, we're not on aa built-in one - return currentUserPipeline; + // Just return the current user pipeline, we're not on aa built-in one + return currentUserPipeline; + } + + /** + * Get the currently active pipelines settings + * + * @return The currently active pipelines settings + */ + public CVPipelineSettings getCurrentPipelineSettings() { + return getPipelineSettings(currentPipelineIndex); + } + + /** + * Internal method for setting the active pipeline.
+ *
+ * All externally accessible methods that intend to change the active pipeline MUST go through + * here to ensure all proper steps are taken. + * + * @param newIndex Index of pipeline to be active + */ + private void setPipelineInternal(int newIndex) { + if (newIndex < 0 && currentPipelineIndex >= 0) { + // Transitioning to a built-in pipe, save off the current user one + lastUserPipelineIdx = currentPipelineIndex; } - /** - * Get the currently active pipelines settings - * - * @return The currently active pipelines settings - */ - public CVPipelineSettings getCurrentPipelineSettings() { - return getPipelineSettings(currentPipelineIndex); + if (userPipelineSettings.size() - 1 < newIndex) { + logger.warn("User attempted to set index to non-existent pipeline!"); + return; } - /** - * Internal method for setting the active pipeline.
- *
- * All externally accessible methods that intend to change the active pipeline MUST go through - * here to ensure all proper steps are taken. - * - * @param newIndex Index of pipeline to be active - */ - private void setPipelineInternal(int newIndex) { - if (newIndex < 0 && currentPipelineIndex >= 0) { - // Transitioning to a built-in pipe, save off the current user one - lastUserPipelineIdx = currentPipelineIndex; - } - - if (userPipelineSettings.size() - 1 < newIndex) { - logger.warn("User attempted to set index to non-existent pipeline!"); - return; - } - - currentPipelineIndex = newIndex; - if (newIndex >= 0) { - var desiredPipelineSettings = userPipelineSettings.get(currentPipelineIndex); - switch (desiredPipelineSettings.pipelineType) { - case Reflective: - logger.debug("Creating Reflective pipeline"); - currentUserPipeline = - new ReflectivePipeline((ReflectivePipelineSettings) desiredPipelineSettings); - break; - case ColoredShape: - logger.debug("Creating ColoredShape pipeline"); - currentUserPipeline = - new ColoredShapePipeline((ColoredShapePipelineSettings) desiredPipelineSettings); - break; - case AprilTag: - logger.debug("Creating AprilTag pipeline"); - currentUserPipeline = - new AprilTagPipeline((AprilTagPipelineSettings) desiredPipelineSettings); - break; - - case Aruco: - logger.debug("Creating Aruco Pipeline"); - currentUserPipeline = new ArucoPipeline((ArucoPipelineSettings) desiredPipelineSettings); - break; - default: - // Can be calib3d or drivermode, both of which are special cases - break; - } - } - - DataChangeService.getInstance() - .publishEvent( - new OutgoingUIEvent<>( - "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); + currentPipelineIndex = newIndex; + if (newIndex >= 0) { + var desiredPipelineSettings = userPipelineSettings.get(currentPipelineIndex); + switch (desiredPipelineSettings.pipelineType) { + case Reflective: + logger.debug("Creating Reflective pipeline"); + currentUserPipeline = + new ReflectivePipeline((ReflectivePipelineSettings) desiredPipelineSettings); + break; + case ColoredShape: + logger.debug("Creating ColoredShape pipeline"); + currentUserPipeline = + new ColoredShapePipeline((ColoredShapePipelineSettings) desiredPipelineSettings); + break; + case AprilTag: + logger.debug("Creating AprilTag pipeline"); + currentUserPipeline = + new AprilTagPipeline((AprilTagPipelineSettings) desiredPipelineSettings); + break; + + case Aruco: + logger.debug("Creating Aruco Pipeline"); + currentUserPipeline = new ArucoPipeline((ArucoPipelineSettings) desiredPipelineSettings); + break; + default: + // Can be calib3d or drivermode, both of which are special cases + break; + } } - /** - * Enters or exits calibration mode based on the parameter.
- *
- * Exiting returns to the last used user pipeline. - * - * @param wantsCalibration True to enter calibration mode, false to exit calibration mode. - */ - public void setCalibrationMode(boolean wantsCalibration) { - if (!wantsCalibration) calibration3dPipeline.finishCalibration(); - setPipelineInternal(wantsCalibration ? CAL_3D_INDEX : lastUserPipelineIdx); + DataChangeService.getInstance() + .publishEvent( + new OutgoingUIEvent<>( + "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); + } + + /** + * Enters or exits calibration mode based on the parameter.
+ *
+ * Exiting returns to the last used user pipeline. + * + * @param wantsCalibration True to enter calibration mode, false to exit calibration mode. + */ + public void setCalibrationMode(boolean wantsCalibration) { + if (!wantsCalibration) calibration3dPipeline.finishCalibration(); + setPipelineInternal(wantsCalibration ? CAL_3D_INDEX : lastUserPipelineIdx); + } + + /** + * Enters or exits driver mode based on the parameter.
+ *
+ * Exiting returns to the last used user pipeline. + * + * @param state True to enter driver mode, false to exit driver mode. + */ + public void setDriverMode(boolean state) { + setPipelineInternal(state ? DRIVERMODE_INDEX : lastUserPipelineIdx); + } + + /** + * Returns whether driver mode is active. + * + * @return Whether driver mode is active. + */ + public boolean getDriverMode() { + return currentPipelineIndex == DRIVERMODE_INDEX; + } + + public static final Comparator PipelineSettingsIndexComparator = + Comparator.comparingInt(o -> o.pipelineIndex); + + /** + * Sorts the pipeline list by index, and reassigns their indexes to match the new order.
+ *
+ * I don't like this, but I have no other ideas, and it works so + */ + private void reassignIndexes() { + userPipelineSettings.sort(PipelineSettingsIndexComparator); + for (int i = 0; i < userPipelineSettings.size(); i++) { + userPipelineSettings.get(i).pipelineIndex = i; } + } - /** - * Enters or exits driver mode based on the parameter.
- *
- * Exiting returns to the last used user pipeline. - * - * @param state True to enter driver mode, false to exit driver mode. - */ - public void setDriverMode(boolean state) { - setPipelineInternal(state ? DRIVERMODE_INDEX : lastUserPipelineIdx); - } + public CVPipelineSettings addPipeline(PipelineType type) { + return addPipeline(type, "New Pipeline"); + } - /** - * Returns whether driver mode is active. - * - * @return Whether driver mode is active. - */ - public boolean getDriverMode() { - return currentPipelineIndex == DRIVERMODE_INDEX; + public CVPipelineSettings addPipeline(PipelineType type, String nickname) { + var added = createSettingsForType(type, nickname); + if (added == null) { + logger.error("Cannot add null pipeline!"); + return null; } - - public static final Comparator PipelineSettingsIndexComparator = - Comparator.comparingInt(o -> o.pipelineIndex); - - /** - * Sorts the pipeline list by index, and reassigns their indexes to match the new order.
- *
- * I don't like this, but I have no other ideas, and it works so - */ - private void reassignIndexes() { - userPipelineSettings.sort(PipelineSettingsIndexComparator); - for (int i = 0; i < userPipelineSettings.size(); i++) { - userPipelineSettings.get(i).pipelineIndex = i; + addPipelineInternal(added); + reassignIndexes(); + return added; + } + + private CVPipelineSettings createSettingsForType(PipelineType type, String nickname) { + CVPipelineSettings newSettings; + switch (type) { + case Reflective: + { + var added = new ReflectivePipelineSettings(); + added.pipelineNickname = nickname; + return added; } - } - - public CVPipelineSettings addPipeline(PipelineType type) { - return addPipeline(type, "New Pipeline"); - } - - public CVPipelineSettings addPipeline(PipelineType type, String nickname) { - var added = createSettingsForType(type, nickname); - if (added == null) { - logger.error("Cannot add null pipeline!"); - return null; + case ColoredShape: + { + var added = new ColoredShapePipelineSettings(); + added.pipelineNickname = nickname; + return added; } - addPipelineInternal(added); - reassignIndexes(); - return added; - } - - private CVPipelineSettings createSettingsForType(PipelineType type, String nickname) { - CVPipelineSettings newSettings; - switch (type) { - case Reflective: - { - var added = new ReflectivePipelineSettings(); - added.pipelineNickname = nickname; - return added; - } - case ColoredShape: - { - var added = new ColoredShapePipelineSettings(); - added.pipelineNickname = nickname; - return added; - } - case AprilTag: - { - var added = new AprilTagPipelineSettings(); - added.pipelineNickname = nickname; - return added; - } - case Aruco: - { - var added = new ArucoPipelineSettings(); - added.pipelineNickname = nickname; - return added; - } - default: - { - logger.error("Got invalid pipeline type: " + type); - return null; - } + case AprilTag: + { + var added = new AprilTagPipelineSettings(); + added.pipelineNickname = nickname; + return added; + } + case Aruco: + { + var added = new ArucoPipelineSettings(); + added.pipelineNickname = nickname; + return added; + } + default: + { + logger.error("Got invalid pipeline type: " + type); + return null; } } - - private void addPipelineInternal(CVPipelineSettings settings) { - settings.pipelineIndex = userPipelineSettings.size(); - userPipelineSettings.add(settings); - reassignIndexes(); - } - - /** - * Remove a pipeline settings at the given index and return the new current index - * - * @param index The idx to remove - */ - private int removePipelineInternal(int index) { - userPipelineSettings.remove(index); - currentPipelineIndex = Math.min(index, userPipelineSettings.size() - 1); - reassignIndexes(); - return currentPipelineIndex; - } - - public void setIndex(int index) { - this.setPipelineInternal(index); + } + + private void addPipelineInternal(CVPipelineSettings settings) { + settings.pipelineIndex = userPipelineSettings.size(); + userPipelineSettings.add(settings); + reassignIndexes(); + } + + /** + * Remove a pipeline settings at the given index and return the new current index + * + * @param index The idx to remove + */ + private int removePipelineInternal(int index) { + userPipelineSettings.remove(index); + currentPipelineIndex = Math.min(index, userPipelineSettings.size() - 1); + reassignIndexes(); + return currentPipelineIndex; + } + + public void setIndex(int index) { + this.setPipelineInternal(index); + } + + public int removePipeline(int index) { + if (index < 0) { + return currentPipelineIndex; } - - public int removePipeline(int index) { - if (index < 0) { - return currentPipelineIndex; + // TODO should we block/lock on a mutex? + return removePipelineInternal(index); + } + + public void renameCurrentPipeline(String newName) { + getCurrentPipelineSettings().pipelineNickname = newName; + } + + /** + * Duplicate a pipeline at a given index + * + * @param index the index of the target pipeline + * @return The new index + */ + public int duplicatePipeline(int index) { + var settings = userPipelineSettings.get(index); + var newSettings = settings.clone(); + newSettings.pipelineNickname = + createUniqueName(settings.pipelineNickname, userPipelineSettings); + newSettings.pipelineIndex = Integer.MAX_VALUE; + logger.debug("Duplicating pipe " + index + " to " + newSettings.pipelineNickname); + userPipelineSettings.add(newSettings); + reassignIndexes(); + + // Now we look for the index of the new pipeline and return it + return userPipelineSettings.indexOf(newSettings); + } + + private static String createUniqueName( + String nickname, List existingSettings) { + StringBuilder uniqueName = new StringBuilder(nickname); + while (true) { + String finalUniqueName = uniqueName.toString(); // To get around lambda capture + var conflictingName = + existingSettings.stream().anyMatch(it -> it.pipelineNickname.equals(finalUniqueName)); + + if (!conflictingName) { + // If no conflict, we're done + return uniqueName.toString(); + } else { + // Otherwise, we need to add a suffix to the name + // If the string doesn't already end in "([0-9]*)", we'll add it + // If it does, we'll increment the number in the suffix + + if (uniqueName.toString().matches(".*\\([0-9]*\\)")) { + // Because java strings are immutable, we have to do this curstedness + // This is like doing "New pipeline (" + 2 + ")" + + var parenStart = uniqueName.toString().lastIndexOf('('); + var parenEnd = uniqueName.length() - 1; + var number = Integer.parseInt(uniqueName.substring(parenStart + 1, parenEnd)) + 1; + + uniqueName = new StringBuilder(uniqueName.substring(0, parenStart + 1) + number + ")"); + } else { + uniqueName.append(" (1)"); } - // TODO should we block/lock on a mutex? - return removePipelineInternal(index); + } } - - public void renameCurrentPipeline(String newName) { - getCurrentPipelineSettings().pipelineNickname = newName; - } - - /** - * Duplicate a pipeline at a given index - * - * @param index the index of the target pipeline - * @return The new index - */ - public int duplicatePipeline(int index) { - var settings = userPipelineSettings.get(index); - var newSettings = settings.clone(); - newSettings.pipelineNickname = - createUniqueName(settings.pipelineNickname, userPipelineSettings); - newSettings.pipelineIndex = Integer.MAX_VALUE; - logger.debug("Duplicating pipe " + index + " to " + newSettings.pipelineNickname); - userPipelineSettings.add(newSettings); - reassignIndexes(); - - // Now we look for the index of the new pipeline and return it - return userPipelineSettings.indexOf(newSettings); + } + + public void changePipelineType(int newType) { + // Find the PipelineType proposed + // To do this we look at all the PipelineType entries and look for one with matching + // base indexes + PipelineType type = + Arrays.stream(PipelineType.values()) + .filter(it -> it.baseIndex == newType) + .findAny() + .orElse(null); + if (type == null) { + logger.error("Could not match type " + newType + " to a PipelineType!"); + return; } - private static String createUniqueName( - String nickname, List existingSettings) { - StringBuilder uniqueName = new StringBuilder(nickname); - while (true) { - String finalUniqueName = uniqueName.toString(); // To get around lambda capture - var conflictingName = - existingSettings.stream().anyMatch(it -> it.pipelineNickname.equals(finalUniqueName)); - - if (!conflictingName) { - // If no conflict, we're done - return uniqueName.toString(); - } else { - // Otherwise, we need to add a suffix to the name - // If the string doesn't already end in "([0-9]*)", we'll add it - // If it does, we'll increment the number in the suffix - - if (uniqueName.toString().matches(".*\\([0-9]*\\)")) { - // Because java strings are immutable, we have to do this curstedness - // This is like doing "New pipeline (" + 2 + ")" - - var parenStart = uniqueName.toString().lastIndexOf('('); - var parenEnd = uniqueName.length() - 1; - var number = Integer.parseInt(uniqueName.substring(parenStart + 1, parenEnd)) + 1; - - uniqueName = new StringBuilder(uniqueName.substring(0, parenStart + 1) + number + ")"); - } else { - uniqueName.append(" (1)"); - } - } - } + if (type.baseIndex == getCurrentPipelineSettings().pipelineType.baseIndex) { + logger.debug( + "Not changing settings as " + + type + + " and " + + getCurrentPipelineSettings().pipelineType + + " are identical!"); + return; } - public void changePipelineType(int newType) { - // Find the PipelineType proposed - // To do this we look at all the PipelineType entries and look for one with matching - // base indexes - PipelineType type = - Arrays.stream(PipelineType.values()) - .filter(it -> it.baseIndex == newType) - .findAny() - .orElse(null); - if (type == null) { - logger.error("Could not match type " + newType + " to a PipelineType!"); - return; - } + // Our new settings will be totally nuked, but that's ok + // We *could* set things in common between the two, if we want + // But they're different enough it shouldn't be an issue + var name = getCurrentPipelineSettings().pipelineNickname; + var newSettings = createSettingsForType(type, name); - if (type.baseIndex == getCurrentPipelineSettings().pipelineType.baseIndex) { - logger.debug( - "Not changing settings as " - + type - + " and " - + getCurrentPipelineSettings().pipelineType - + " are identical!"); - return; - } - - // Our new settings will be totally nuked, but that's ok - // We *could* set things in common between the two, if we want - // But they're different enough it shouldn't be an issue - var name = getCurrentPipelineSettings().pipelineNickname; - var newSettings = createSettingsForType(type, name); - - var idx = currentPipelineIndex; - if (idx < 0) { - logger.error("Cannot replace non-user pipeline!"); - return; - } - - logger.info("Adding new pipe of type " + type + " at idx " + idx); - newSettings.pipelineIndex = idx; - userPipelineSettings.set(idx, newSettings); - setPipelineInternal(idx); - reassignIndexes(); + var idx = currentPipelineIndex; + if (idx < 0) { + logger.error("Cannot replace non-user pipeline!"); + return; } + + logger.info("Adding new pipe of type " + type + " at idx " + idx); + newSettings.pipelineIndex = idx; + userPipelineSettings.set(idx, newSettings); + setPipelineInternal(idx); + reassignIndexes(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionModule.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionModule.java index 75978739e7..92c137baf7 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionModule.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionModule.java @@ -61,559 +61,559 @@ * provide info on settings changes. VisionModuleManager holds a list of all current vision modules. */ public class VisionModule { - private final Logger logger; - protected final PipelineManager pipelineManager; - protected final VisionSource visionSource; - private final VisionRunner visionRunner; - private final StreamRunnable streamRunnable; - private final LinkedList resultConsumers = new LinkedList<>(); - // Raw result consumers run before any drawing has been done by the OutputStreamPipeline - private final LinkedList>> streamResultConsumers = - new LinkedList<>(); - private final NTDataPublisher ntConsumer; - private final UIDataPublisher uiDataConsumer; - protected final int moduleIndex; - protected final QuirkyCamera cameraQuirks; - - protected TrackedTarget lastPipelineResultBestTarget; - - private int inputStreamPort = -1; - private int outputStreamPort = -1; - - FileSaveFrameConsumer inputFrameSaver; - FileSaveFrameConsumer outputFrameSaver; - - SocketVideoStream inputVideoStreamer; - SocketVideoStream outputVideoStreamer; - - public VisionModule(PipelineManager pipelineManager, VisionSource visionSource, int index) { - logger = - new Logger( - VisionModule.class, - visionSource.getSettables().getConfiguration().nickname, - LogGroup.VisionModule); - - // Find quirks for the current camera - if (visionSource instanceof USBCameraSource) { - cameraQuirks = ((USBCameraSource) visionSource).cameraQuirks; - } else if (visionSource instanceof LibcameraGpuSource) { - cameraQuirks = QuirkyCamera.ZeroCopyPiCamera; - } else { - cameraQuirks = QuirkyCamera.DefaultCamera; - } - - // We don't show gain if the config says it's -1. So check here to make sure it's non-negative - // if it _is_ supported - if (cameraQuirks.hasQuirk(CameraQuirk.Gain)) { - pipelineManager.userPipelineSettings.forEach( - it -> { - if (it.cameraGain == -1) it.cameraGain = 75; // Sane default - }); - } - if (cameraQuirks.hasQuirk(CameraQuirk.AWBGain)) { - pipelineManager.userPipelineSettings.forEach( - it -> { - if (it.cameraRedGain == -1) it.cameraRedGain = 11; // Sane defaults - if (it.cameraBlueGain == -1) it.cameraBlueGain = 20; - }); - } - - this.pipelineManager = pipelineManager; - this.visionSource = visionSource; - this.visionRunner = - new VisionRunner( - this.visionSource.getFrameProvider(), - this.pipelineManager::getCurrentPipeline, - this::consumeResult, - this.cameraQuirks); - this.streamRunnable = new StreamRunnable(new OutputStreamPipeline()); - this.moduleIndex = index; - - DataChangeService.getInstance().addSubscriber(new VisionModuleChangeSubscriber(this)); - - createStreams(); - - recreateStreamResultConsumers(); - - ntConsumer = - new NTDataPublisher( - visionSource.getSettables().getConfiguration().nickname, - pipelineManager::getCurrentPipelineIndex, - this::setPipeline, - pipelineManager::getDriverMode, - this::setDriverMode); - uiDataConsumer = new UIDataPublisher(index); - addResultConsumer(ntConsumer); - addResultConsumer(uiDataConsumer); - addResultConsumer( - (result) -> - lastPipelineResultBestTarget = result.hasTargets() ? result.targets.get(0) : null); - - setPipeline(visionSource.getSettables().getConfiguration().currentPipelineIndex); - - // Set vendor FOV - if (isVendorCamera()) { - var fov = ConfigManager.getInstance().getConfig().getHardwareConfig().vendorFOV; - logger.info("Setting FOV of vendor camera to " + fov); - visionSource.getSettables().setFOV(fov); - } - - // Configure LED's if supported by the underlying hardware - if (HardwareManager.getInstance().visionLED != null && this.camShouldControlLEDs()) { - HardwareManager.getInstance() - .visionLED - .setPipelineModeSupplier(() -> pipelineManager.getCurrentPipelineSettings().ledMode); - setVisionLEDs(pipelineManager.getCurrentPipelineSettings().ledMode); - } - - saveAndBroadcastAll(); + private final Logger logger; + protected final PipelineManager pipelineManager; + protected final VisionSource visionSource; + private final VisionRunner visionRunner; + private final StreamRunnable streamRunnable; + private final LinkedList resultConsumers = new LinkedList<>(); + // Raw result consumers run before any drawing has been done by the OutputStreamPipeline + private final LinkedList>> streamResultConsumers = + new LinkedList<>(); + private final NTDataPublisher ntConsumer; + private final UIDataPublisher uiDataConsumer; + protected final int moduleIndex; + protected final QuirkyCamera cameraQuirks; + + protected TrackedTarget lastPipelineResultBestTarget; + + private int inputStreamPort = -1; + private int outputStreamPort = -1; + + FileSaveFrameConsumer inputFrameSaver; + FileSaveFrameConsumer outputFrameSaver; + + SocketVideoStream inputVideoStreamer; + SocketVideoStream outputVideoStreamer; + + public VisionModule(PipelineManager pipelineManager, VisionSource visionSource, int index) { + logger = + new Logger( + VisionModule.class, + visionSource.getSettables().getConfiguration().nickname, + LogGroup.VisionModule); + + // Find quirks for the current camera + if (visionSource instanceof USBCameraSource) { + cameraQuirks = ((USBCameraSource) visionSource).cameraQuirks; + } else if (visionSource instanceof LibcameraGpuSource) { + cameraQuirks = QuirkyCamera.ZeroCopyPiCamera; + } else { + cameraQuirks = QuirkyCamera.DefaultCamera; } - private void destroyStreams() { - SocketVideoStreamManager.getInstance().removeStream(inputVideoStreamer); - SocketVideoStreamManager.getInstance().removeStream(outputVideoStreamer); + // We don't show gain if the config says it's -1. So check here to make sure it's non-negative + // if it _is_ supported + if (cameraQuirks.hasQuirk(CameraQuirk.Gain)) { + pipelineManager.userPipelineSettings.forEach( + it -> { + if (it.cameraGain == -1) it.cameraGain = 75; // Sane default + }); } - - private void createStreams() { - var camStreamIdx = visionSource.getSettables().getConfiguration().streamIndex; - // If idx = 0, we want (1181, 1182) - this.inputStreamPort = 1181 + (camStreamIdx * 2); - this.outputStreamPort = 1181 + (camStreamIdx * 2) + 1; - - inputFrameSaver = - new FileSaveFrameConsumer(visionSource.getSettables().getConfiguration().nickname, "input"); - outputFrameSaver = - new FileSaveFrameConsumer( - visionSource.getSettables().getConfiguration().nickname, "output"); - - inputVideoStreamer = new SocketVideoStream(this.inputStreamPort); - outputVideoStreamer = new SocketVideoStream(this.outputStreamPort); - SocketVideoStreamManager.getInstance().addStream(inputVideoStreamer); - SocketVideoStreamManager.getInstance().addStream(outputVideoStreamer); + if (cameraQuirks.hasQuirk(CameraQuirk.AWBGain)) { + pipelineManager.userPipelineSettings.forEach( + it -> { + if (it.cameraRedGain == -1) it.cameraRedGain = 11; // Sane defaults + if (it.cameraBlueGain == -1) it.cameraBlueGain = 20; + }); } - private void recreateStreamResultConsumers() { - streamResultConsumers.add( - (frame, tgts) -> { - if (frame != null) inputFrameSaver.accept(frame.colorImage); - }); - streamResultConsumers.add( - (frame, tgts) -> { - if (frame != null) outputFrameSaver.accept(frame.processedImage); - }); - streamResultConsumers.add( - (frame, tgts) -> { - if (frame != null) inputVideoStreamer.accept(frame.colorImage); - }); - streamResultConsumers.add( - (frame, tgts) -> { - if (frame != null) outputVideoStreamer.accept(frame.processedImage); - }); + this.pipelineManager = pipelineManager; + this.visionSource = visionSource; + this.visionRunner = + new VisionRunner( + this.visionSource.getFrameProvider(), + this.pipelineManager::getCurrentPipeline, + this::consumeResult, + this.cameraQuirks); + this.streamRunnable = new StreamRunnable(new OutputStreamPipeline()); + this.moduleIndex = index; + + DataChangeService.getInstance().addSubscriber(new VisionModuleChangeSubscriber(this)); + + createStreams(); + + recreateStreamResultConsumers(); + + ntConsumer = + new NTDataPublisher( + visionSource.getSettables().getConfiguration().nickname, + pipelineManager::getCurrentPipelineIndex, + this::setPipeline, + pipelineManager::getDriverMode, + this::setDriverMode); + uiDataConsumer = new UIDataPublisher(index); + addResultConsumer(ntConsumer); + addResultConsumer(uiDataConsumer); + addResultConsumer( + (result) -> + lastPipelineResultBestTarget = result.hasTargets() ? result.targets.get(0) : null); + + setPipeline(visionSource.getSettables().getConfiguration().currentPipelineIndex); + + // Set vendor FOV + if (isVendorCamera()) { + var fov = ConfigManager.getInstance().getConfig().getHardwareConfig().vendorFOV; + logger.info("Setting FOV of vendor camera to " + fov); + visionSource.getSettables().setFOV(fov); } - private class StreamRunnable extends Thread { - private final OutputStreamPipeline outputStreamPipeline; - - private final Object frameLock = new Object(); - private Frame latestFrame; - private AdvancedPipelineSettings settings = new AdvancedPipelineSettings(); - private List targets = new ArrayList<>(); - - private boolean shouldRun = false; - - public StreamRunnable(OutputStreamPipeline outputStreamPipeline) { - this.outputStreamPipeline = outputStreamPipeline; - } - - public void updateData( - Frame inputOutputFrame, AdvancedPipelineSettings settings, List targets) { - synchronized (frameLock) { - if (shouldRun && this.latestFrame != null) { - logger.trace("Fell behind; releasing last unused Mats"); - this.latestFrame.release(); - } - - this.latestFrame = inputOutputFrame; - this.settings = settings; - this.targets = targets; - - shouldRun = inputOutputFrame != null; - // && inputOutputFrame.colorImage != null - // && !inputOutputFrame.colorImage.getMat().empty() - // && inputOutputFrame.processedImage != null - // && !inputOutputFrame.processedImage.getMat().empty(); - } - } - - @Override - public void run() { - while (true) { - final Frame m_frame; - final AdvancedPipelineSettings settings; - final List targets; - final boolean shouldRun; - synchronized (frameLock) { - m_frame = this.latestFrame; - this.latestFrame = null; - - settings = this.settings; - targets = this.targets; - shouldRun = this.shouldRun; - - this.shouldRun = false; - } - if (shouldRun) { - try { - CVPipelineResult osr = outputStreamPipeline.process(m_frame, settings, targets); - consumeResults(m_frame, targets); - - } catch (Exception e) { - // Never die - logger.error("Exception while running stream runnable!", e); - } - try { - m_frame.release(); - } catch (Exception e) { - logger.error("Exception freeing frames", e); - } - } else { - // busy wait! hurray! - try { - Thread.sleep(1); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - } - } + // Configure LED's if supported by the underlying hardware + if (HardwareManager.getInstance().visionLED != null && this.camShouldControlLEDs()) { + HardwareManager.getInstance() + .visionLED + .setPipelineModeSupplier(() -> pipelineManager.getCurrentPipelineSettings().ledMode); + setVisionLEDs(pipelineManager.getCurrentPipelineSettings().ledMode); } - public void start() { - visionRunner.startProcess(); - streamRunnable.start(); + saveAndBroadcastAll(); + } + + private void destroyStreams() { + SocketVideoStreamManager.getInstance().removeStream(inputVideoStreamer); + SocketVideoStreamManager.getInstance().removeStream(outputVideoStreamer); + } + + private void createStreams() { + var camStreamIdx = visionSource.getSettables().getConfiguration().streamIndex; + // If idx = 0, we want (1181, 1182) + this.inputStreamPort = 1181 + (camStreamIdx * 2); + this.outputStreamPort = 1181 + (camStreamIdx * 2) + 1; + + inputFrameSaver = + new FileSaveFrameConsumer(visionSource.getSettables().getConfiguration().nickname, "input"); + outputFrameSaver = + new FileSaveFrameConsumer( + visionSource.getSettables().getConfiguration().nickname, "output"); + + inputVideoStreamer = new SocketVideoStream(this.inputStreamPort); + outputVideoStreamer = new SocketVideoStream(this.outputStreamPort); + SocketVideoStreamManager.getInstance().addStream(inputVideoStreamer); + SocketVideoStreamManager.getInstance().addStream(outputVideoStreamer); + } + + private void recreateStreamResultConsumers() { + streamResultConsumers.add( + (frame, tgts) -> { + if (frame != null) inputFrameSaver.accept(frame.colorImage); + }); + streamResultConsumers.add( + (frame, tgts) -> { + if (frame != null) outputFrameSaver.accept(frame.processedImage); + }); + streamResultConsumers.add( + (frame, tgts) -> { + if (frame != null) inputVideoStreamer.accept(frame.colorImage); + }); + streamResultConsumers.add( + (frame, tgts) -> { + if (frame != null) outputVideoStreamer.accept(frame.processedImage); + }); + } + + private class StreamRunnable extends Thread { + private final OutputStreamPipeline outputStreamPipeline; + + private final Object frameLock = new Object(); + private Frame latestFrame; + private AdvancedPipelineSettings settings = new AdvancedPipelineSettings(); + private List targets = new ArrayList<>(); + + private boolean shouldRun = false; + + public StreamRunnable(OutputStreamPipeline outputStreamPipeline) { + this.outputStreamPipeline = outputStreamPipeline; } - public void setFov(double fov) { - var settables = visionSource.getSettables(); - logger.trace(() -> "Setting " + settables.getConfiguration().nickname + ") FOV (" + fov + ")"); - - // Only set FOV if we have no vendor JSON, and we aren't using a PiCAM - if (isVendorCamera()) { - logger.info("Cannot set FOV on a vendor device! Ignoring..."); - } else { - settables.setFOV(fov); + public void updateData( + Frame inputOutputFrame, AdvancedPipelineSettings settings, List targets) { + synchronized (frameLock) { + if (shouldRun && this.latestFrame != null) { + logger.trace("Fell behind; releasing last unused Mats"); + this.latestFrame.release(); } - } - private boolean isVendorCamera() { - return visionSource.isVendorCamera(); - } + this.latestFrame = inputOutputFrame; + this.settings = settings; + this.targets = targets; - void changePipelineType(int newType) { - pipelineManager.changePipelineType(newType); - setPipeline(pipelineManager.getCurrentPipelineIndex()); - saveAndBroadcastAll(); + shouldRun = inputOutputFrame != null; + // && inputOutputFrame.colorImage != null + // && !inputOutputFrame.colorImage.getMat().empty() + // && inputOutputFrame.processedImage != null + // && !inputOutputFrame.processedImage.getMat().empty(); + } } - void setDriverMode(boolean isDriverMode) { - pipelineManager.setDriverMode(isDriverMode); - setVisionLEDs(!isDriverMode); - setPipeline( - isDriverMode - ? PipelineManager.DRIVERMODE_INDEX - : pipelineManager.getCurrentPipelineIndex()); - saveAndBroadcastAll(); - } - - public void startCalibration(UICalibrationData data) { - var settings = pipelineManager.calibration3dPipeline.getSettings(); - pipelineManager.calibration3dPipeline.deleteSavedImages(); - settings.cameraVideoModeIndex = data.videoModeIndex; - visionSource.getSettables().setVideoModeIndex(data.videoModeIndex); - logger.info( - "Starting calibration at resolution index " - + data.videoModeIndex - + " and settings " - + data); - settings.gridSize = Units.inchesToMeters(data.squareSizeIn); - settings.boardHeight = data.patternHeight; - settings.boardWidth = data.patternWidth; - settings.boardType = data.boardType; - - // Disable gain if not applicable - if (!cameraQuirks.hasQuirk(CameraQuirk.Gain)) { - settings.cameraGain = -1; + @Override + public void run() { + while (true) { + final Frame m_frame; + final AdvancedPipelineSettings settings; + final List targets; + final boolean shouldRun; + synchronized (frameLock) { + m_frame = this.latestFrame; + this.latestFrame = null; + + settings = this.settings; + targets = this.targets; + shouldRun = this.shouldRun; + + this.shouldRun = false; } - if (!cameraQuirks.hasQuirk(CameraQuirk.AWBGain)) { - settings.cameraRedGain = -1; - settings.cameraBlueGain = -1; + if (shouldRun) { + try { + CVPipelineResult osr = outputStreamPipeline.process(m_frame, settings, targets); + consumeResults(m_frame, targets); + + } catch (Exception e) { + // Never die + logger.error("Exception while running stream runnable!", e); + } + try { + m_frame.release(); + } catch (Exception e) { + logger.error("Exception freeing frames", e); + } + } else { + // busy wait! hurray! + try { + Thread.sleep(1); + } catch (InterruptedException e) { + e.printStackTrace(); + } } - - settings.cameraAutoExposure = true; - - setPipeline(PipelineManager.CAL_3D_INDEX); + } } - - public void saveInputSnapshot() { - inputFrameSaver.overrideTakeSnapshot(); + } + + public void start() { + visionRunner.startProcess(); + streamRunnable.start(); + } + + public void setFov(double fov) { + var settables = visionSource.getSettables(); + logger.trace(() -> "Setting " + settables.getConfiguration().nickname + ") FOV (" + fov + ")"); + + // Only set FOV if we have no vendor JSON, and we aren't using a PiCAM + if (isVendorCamera()) { + logger.info("Cannot set FOV on a vendor device! Ignoring..."); + } else { + settables.setFOV(fov); } - - public void saveOutputSnapshot() { - outputFrameSaver.overrideTakeSnapshot(); + } + + private boolean isVendorCamera() { + return visionSource.isVendorCamera(); + } + + void changePipelineType(int newType) { + pipelineManager.changePipelineType(newType); + setPipeline(pipelineManager.getCurrentPipelineIndex()); + saveAndBroadcastAll(); + } + + void setDriverMode(boolean isDriverMode) { + pipelineManager.setDriverMode(isDriverMode); + setVisionLEDs(!isDriverMode); + setPipeline( + isDriverMode + ? PipelineManager.DRIVERMODE_INDEX + : pipelineManager.getCurrentPipelineIndex()); + saveAndBroadcastAll(); + } + + public void startCalibration(UICalibrationData data) { + var settings = pipelineManager.calibration3dPipeline.getSettings(); + pipelineManager.calibration3dPipeline.deleteSavedImages(); + settings.cameraVideoModeIndex = data.videoModeIndex; + visionSource.getSettables().setVideoModeIndex(data.videoModeIndex); + logger.info( + "Starting calibration at resolution index " + + data.videoModeIndex + + " and settings " + + data); + settings.gridSize = Units.inchesToMeters(data.squareSizeIn); + settings.boardHeight = data.patternHeight; + settings.boardWidth = data.patternWidth; + settings.boardType = data.boardType; + + // Disable gain if not applicable + if (!cameraQuirks.hasQuirk(CameraQuirk.Gain)) { + settings.cameraGain = -1; } - - public void takeCalibrationSnapshot() { - pipelineManager.calibration3dPipeline.takeSnapshot(); - } - - public CameraCalibrationCoefficients endCalibration() { - var ret = pipelineManager.calibration3dPipeline.tryCalibration(); - pipelineManager.setCalibrationMode(false); - - setPipeline(pipelineManager.getCurrentPipelineIndex()); - - if (ret != null) { - logger.debug("Saving calibration..."); - visionSource.getSettables().addCalibration(ret); - } else { - logger.error("Calibration failed..."); - } - saveAndBroadcastAll(); - return ret; + if (!cameraQuirks.hasQuirk(CameraQuirk.AWBGain)) { + settings.cameraRedGain = -1; + settings.cameraBlueGain = -1; } - boolean setPipeline(int index) { - logger.info("Setting pipeline to " + index); - logger.info("Pipeline name: " + pipelineManager.getPipelineNickname(index)); - pipelineManager.setIndex(index); - var pipelineSettings = pipelineManager.getPipelineSettings(index); + settings.cameraAutoExposure = true; - if (pipelineSettings == null) { - logger.error("Config for index " + index + " was null! Not changing pipelines"); - return false; - } - - visionSource.getSettables().setVideoModeInternal(pipelineSettings.cameraVideoModeIndex); - visionSource.getSettables().setBrightness(pipelineSettings.cameraBrightness); - visionSource.getSettables().setGain(pipelineSettings.cameraGain); + setPipeline(PipelineManager.CAL_3D_INDEX); + } - // If manual exposure, force exposure slider to be valid - if (!pipelineSettings.cameraAutoExposure) { - if (pipelineSettings.cameraExposure < 0) - pipelineSettings.cameraExposure = 10; // reasonable default - } + public void saveInputSnapshot() { + inputFrameSaver.overrideTakeSnapshot(); + } - visionSource.getSettables().setExposure(pipelineSettings.cameraExposure); - try { - visionSource.getSettables().setAutoExposure(pipelineSettings.cameraAutoExposure); - } catch (VideoException e) { - logger.error("Unable to set camera auto exposure!"); - logger.error(e.toString()); - } - if (cameraQuirks.hasQuirk(CameraQuirk.Gain)) { - // If the gain is disabled for some reason, re-enable it - if (pipelineSettings.cameraGain == -1) pipelineSettings.cameraGain = 75; - visionSource.getSettables().setGain(Math.max(0, pipelineSettings.cameraGain)); - } else { - pipelineSettings.cameraGain = -1; - } + public void saveOutputSnapshot() { + outputFrameSaver.overrideTakeSnapshot(); + } - if (cameraQuirks.hasQuirk(CameraQuirk.AWBGain)) { - // If the AWB gains are disabled for some reason, re-enable it - if (pipelineSettings.cameraRedGain == -1) pipelineSettings.cameraRedGain = 11; - if (pipelineSettings.cameraBlueGain == -1) pipelineSettings.cameraBlueGain = 20; - visionSource.getSettables().setRedGain(Math.max(0, pipelineSettings.cameraRedGain)); - visionSource.getSettables().setBlueGain(Math.max(0, pipelineSettings.cameraBlueGain)); - } else { - pipelineSettings.cameraRedGain = -1; - pipelineSettings.cameraBlueGain = -1; - } + public void takeCalibrationSnapshot() { + pipelineManager.calibration3dPipeline.takeSnapshot(); + } - setVisionLEDs(pipelineSettings.ledMode); + public CameraCalibrationCoefficients endCalibration() { + var ret = pipelineManager.calibration3dPipeline.tryCalibration(); + pipelineManager.setCalibrationMode(false); - visionSource.getSettables().getConfiguration().currentPipelineIndex = - pipelineManager.getCurrentPipelineIndex(); + setPipeline(pipelineManager.getCurrentPipelineIndex()); - return true; + if (ret != null) { + logger.debug("Saving calibration..."); + visionSource.getSettables().addCalibration(ret); + } else { + logger.error("Calibration failed..."); } - - private boolean camShouldControlLEDs() { - // Heuristic - if the camera has a known FOV or is a piCam, assume it's in use for - // vision processing, and should command stuff to the LED's. - // TODO: Make LED control a property of the camera itself and controllable in the UI. - return isVendorCamera() || cameraQuirks.hasQuirk(CameraQuirk.PiCam); + saveAndBroadcastAll(); + return ret; + } + + boolean setPipeline(int index) { + logger.info("Setting pipeline to " + index); + logger.info("Pipeline name: " + pipelineManager.getPipelineNickname(index)); + pipelineManager.setIndex(index); + var pipelineSettings = pipelineManager.getPipelineSettings(index); + + if (pipelineSettings == null) { + logger.error("Config for index " + index + " was null! Not changing pipelines"); + return false; } - private void setVisionLEDs(boolean on) { - if (camShouldControlLEDs() && HardwareManager.getInstance().visionLED != null) - HardwareManager.getInstance().visionLED.setState(on); - } + visionSource.getSettables().setVideoModeInternal(pipelineSettings.cameraVideoModeIndex); + visionSource.getSettables().setBrightness(pipelineSettings.cameraBrightness); + visionSource.getSettables().setGain(pipelineSettings.cameraGain); - public void saveModule() { - ConfigManager.getInstance() - .saveModule( - getStateAsCameraConfig(), visionSource.getSettables().getConfiguration().uniqueName); + // If manual exposure, force exposure slider to be valid + if (!pipelineSettings.cameraAutoExposure) { + if (pipelineSettings.cameraExposure < 0) + pipelineSettings.cameraExposure = 10; // reasonable default } - void saveAndBroadcastAll() { - saveModule(); - DataChangeService.getInstance() - .publishEvent( - new OutgoingUIEvent<>( - "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); + visionSource.getSettables().setExposure(pipelineSettings.cameraExposure); + try { + visionSource.getSettables().setAutoExposure(pipelineSettings.cameraAutoExposure); + } catch (VideoException e) { + logger.error("Unable to set camera auto exposure!"); + logger.error(e.toString()); } - - void saveAndBroadcastSelective(WsContext originContext, String propertyName, Object value) { - logger.trace("Broadcasting PSC mutation - " + propertyName + ": " + value); - saveModule(); - - HashMap map = new HashMap<>(); - HashMap subMap = new HashMap<>(); - subMap.put(propertyName, value); - map.put("cameraIndex", this.moduleIndex); - map.put("mutatePipelineSettings", subMap); - - DataChangeService.getInstance() - .publishEvent(new OutgoingUIEvent<>("mutatePipeline", map, originContext)); + if (cameraQuirks.hasQuirk(CameraQuirk.Gain)) { + // If the gain is disabled for some reason, re-enable it + if (pipelineSettings.cameraGain == -1) pipelineSettings.cameraGain = 75; + visionSource.getSettables().setGain(Math.max(0, pipelineSettings.cameraGain)); + } else { + pipelineSettings.cameraGain = -1; } - public void setCameraNickname(String newName) { - visionSource.getSettables().getConfiguration().nickname = newName; - ntConsumer.updateCameraNickname(newName); - inputFrameSaver.updateCameraNickname(newName); - outputFrameSaver.updateCameraNickname(newName); - - // Rename streams - streamResultConsumers.clear(); - - // Teardown and recreate streams - destroyStreams(); - createStreams(); - - // Rebuild streamers - recreateStreamResultConsumers(); - - // Push new data to the UI - saveAndBroadcastAll(); + if (cameraQuirks.hasQuirk(CameraQuirk.AWBGain)) { + // If the AWB gains are disabled for some reason, re-enable it + if (pipelineSettings.cameraRedGain == -1) pipelineSettings.cameraRedGain = 11; + if (pipelineSettings.cameraBlueGain == -1) pipelineSettings.cameraBlueGain = 20; + visionSource.getSettables().setRedGain(Math.max(0, pipelineSettings.cameraRedGain)); + visionSource.getSettables().setBlueGain(Math.max(0, pipelineSettings.cameraBlueGain)); + } else { + pipelineSettings.cameraRedGain = -1; + pipelineSettings.cameraBlueGain = -1; } - public PhotonConfiguration.UICameraConfiguration toUICameraConfig() { - var ret = new PhotonConfiguration.UICameraConfiguration(); - - ret.fov = visionSource.getSettables().getFOV(); - ret.nickname = visionSource.getSettables().getConfiguration().nickname; - ret.currentPipelineSettings = - SerializationUtils.objectToHashMap(pipelineManager.getCurrentPipelineSettings()); - ret.currentPipelineIndex = pipelineManager.getCurrentPipelineIndex(); - ret.pipelineNicknames = pipelineManager.getPipelineNicknames(); - - // TODO refactor into helper method - var temp = new HashMap>(); - var videoModes = visionSource.getSettables().getAllVideoModes(); - for (var k : videoModes.keySet()) { - var internalMap = new HashMap(); - - internalMap.put("width", videoModes.get(k).width); - internalMap.put("height", videoModes.get(k).height); - internalMap.put("fps", videoModes.get(k).fps); - internalMap.put( - "pixelFormat", - ((videoModes.get(k) instanceof LibcameraGpuSource.FPSRatedVideoMode) - ? "kPicam" - : videoModes.get(k).pixelFormat.toString()) - .substring(1)); // Remove the k prefix - - temp.put(k, internalMap); - } - ret.videoFormatList = temp; - ret.outputStreamPort = this.outputStreamPort; - ret.inputStreamPort = this.inputStreamPort; - - var calList = new ArrayList>(); - for (var c : visionSource.getSettables().getConfiguration().calibrations) { - var internalMap = new HashMap(); - - internalMap.put("perViewErrors", c.perViewErrors); - internalMap.put("standardDeviation", c.standardDeviation); - internalMap.put("width", c.resolution.width); - internalMap.put("height", c.resolution.height); - internalMap.put("intrinsics", c.cameraIntrinsics.data); - internalMap.put("distCoeffs", c.distCoeffs.data); - - calList.add(internalMap); - } - ret.calibrations = calList; - - ret.isFovConfigurable = - !(ConfigManager.getInstance().getConfig().getHardwareConfig().hasPresetFOV() - && cameraQuirks.hasQuirk(CameraQuirk.PiCam)); - - return ret; + setVisionLEDs(pipelineSettings.ledMode); + + visionSource.getSettables().getConfiguration().currentPipelineIndex = + pipelineManager.getCurrentPipelineIndex(); + + return true; + } + + private boolean camShouldControlLEDs() { + // Heuristic - if the camera has a known FOV or is a piCam, assume it's in use for + // vision processing, and should command stuff to the LED's. + // TODO: Make LED control a property of the camera itself and controllable in the UI. + return isVendorCamera() || cameraQuirks.hasQuirk(CameraQuirk.PiCam); + } + + private void setVisionLEDs(boolean on) { + if (camShouldControlLEDs() && HardwareManager.getInstance().visionLED != null) + HardwareManager.getInstance().visionLED.setState(on); + } + + public void saveModule() { + ConfigManager.getInstance() + .saveModule( + getStateAsCameraConfig(), visionSource.getSettables().getConfiguration().uniqueName); + } + + void saveAndBroadcastAll() { + saveModule(); + DataChangeService.getInstance() + .publishEvent( + new OutgoingUIEvent<>( + "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); + } + + void saveAndBroadcastSelective(WsContext originContext, String propertyName, Object value) { + logger.trace("Broadcasting PSC mutation - " + propertyName + ": " + value); + saveModule(); + + HashMap map = new HashMap<>(); + HashMap subMap = new HashMap<>(); + subMap.put(propertyName, value); + map.put("cameraIndex", this.moduleIndex); + map.put("mutatePipelineSettings", subMap); + + DataChangeService.getInstance() + .publishEvent(new OutgoingUIEvent<>("mutatePipeline", map, originContext)); + } + + public void setCameraNickname(String newName) { + visionSource.getSettables().getConfiguration().nickname = newName; + ntConsumer.updateCameraNickname(newName); + inputFrameSaver.updateCameraNickname(newName); + outputFrameSaver.updateCameraNickname(newName); + + // Rename streams + streamResultConsumers.clear(); + + // Teardown and recreate streams + destroyStreams(); + createStreams(); + + // Rebuild streamers + recreateStreamResultConsumers(); + + // Push new data to the UI + saveAndBroadcastAll(); + } + + public PhotonConfiguration.UICameraConfiguration toUICameraConfig() { + var ret = new PhotonConfiguration.UICameraConfiguration(); + + ret.fov = visionSource.getSettables().getFOV(); + ret.nickname = visionSource.getSettables().getConfiguration().nickname; + ret.currentPipelineSettings = + SerializationUtils.objectToHashMap(pipelineManager.getCurrentPipelineSettings()); + ret.currentPipelineIndex = pipelineManager.getCurrentPipelineIndex(); + ret.pipelineNicknames = pipelineManager.getPipelineNicknames(); + + // TODO refactor into helper method + var temp = new HashMap>(); + var videoModes = visionSource.getSettables().getAllVideoModes(); + for (var k : videoModes.keySet()) { + var internalMap = new HashMap(); + + internalMap.put("width", videoModes.get(k).width); + internalMap.put("height", videoModes.get(k).height); + internalMap.put("fps", videoModes.get(k).fps); + internalMap.put( + "pixelFormat", + ((videoModes.get(k) instanceof LibcameraGpuSource.FPSRatedVideoMode) + ? "kPicam" + : videoModes.get(k).pixelFormat.toString()) + .substring(1)); // Remove the k prefix + + temp.put(k, internalMap); } - - public CameraConfiguration getStateAsCameraConfig() { - var config = visionSource.getSettables().getConfiguration(); - config.setPipelineSettings(pipelineManager.userPipelineSettings); - config.driveModeSettings = pipelineManager.driverModePipeline.getSettings(); - config.currentPipelineIndex = Math.max(pipelineManager.getCurrentPipelineIndex(), -1); - - return config; + ret.videoFormatList = temp; + ret.outputStreamPort = this.outputStreamPort; + ret.inputStreamPort = this.inputStreamPort; + + var calList = new ArrayList>(); + for (var c : visionSource.getSettables().getConfiguration().calibrations) { + var internalMap = new HashMap(); + + internalMap.put("perViewErrors", c.perViewErrors); + internalMap.put("standardDeviation", c.standardDeviation); + internalMap.put("width", c.resolution.width); + internalMap.put("height", c.resolution.height); + internalMap.put("intrinsics", c.cameraIntrinsics.data); + internalMap.put("distCoeffs", c.distCoeffs.data); + + calList.add(internalMap); } - - public void addResultConsumer(CVPipelineResultConsumer dataConsumer) { - resultConsumers.add(dataConsumer); + ret.calibrations = calList; + + ret.isFovConfigurable = + !(ConfigManager.getInstance().getConfig().getHardwareConfig().hasPresetFOV() + && cameraQuirks.hasQuirk(CameraQuirk.PiCam)); + + return ret; + } + + public CameraConfiguration getStateAsCameraConfig() { + var config = visionSource.getSettables().getConfiguration(); + config.setPipelineSettings(pipelineManager.userPipelineSettings); + config.driveModeSettings = pipelineManager.driverModePipeline.getSettings(); + config.currentPipelineIndex = Math.max(pipelineManager.getCurrentPipelineIndex(), -1); + + return config; + } + + public void addResultConsumer(CVPipelineResultConsumer dataConsumer) { + resultConsumers.add(dataConsumer); + } + + private void consumeResult(CVPipelineResult result) { + consumePipelineResult(result); + + // Pipelines like DriverMode and Calibrate3dPipeline have null output frames + if (result.inputAndOutputFrame != null + && (pipelineManager.getCurrentPipelineSettings() instanceof AdvancedPipelineSettings)) { + streamRunnable.updateData( + result.inputAndOutputFrame, + (AdvancedPipelineSettings) pipelineManager.getCurrentPipelineSettings(), + result.targets); + // The streamRunnable manages releasing in this case + } else { + consumeResults(result.inputAndOutputFrame, result.targets); + + result.release(); + // In this case we don't bother with a separate streaming thread and we release } + } - private void consumeResult(CVPipelineResult result) { - consumePipelineResult(result); - - // Pipelines like DriverMode and Calibrate3dPipeline have null output frames - if (result.inputAndOutputFrame != null - && (pipelineManager.getCurrentPipelineSettings() instanceof AdvancedPipelineSettings)) { - streamRunnable.updateData( - result.inputAndOutputFrame, - (AdvancedPipelineSettings) pipelineManager.getCurrentPipelineSettings(), - result.targets); - // The streamRunnable manages releasing in this case - } else { - consumeResults(result.inputAndOutputFrame, result.targets); - - result.release(); - // In this case we don't bother with a separate streaming thread and we release - } + private void consumePipelineResult(CVPipelineResult result) { + for (var dataConsumer : resultConsumers) { + dataConsumer.accept(result); } + } - private void consumePipelineResult(CVPipelineResult result) { - for (var dataConsumer : resultConsumers) { - dataConsumer.accept(result); - } + /** Consume stream/target results, no rate limiting applied */ + private void consumeResults(Frame frame, List targets) { + for (var c : streamResultConsumers) { + c.accept(frame, targets); } - - /** Consume stream/target results, no rate limiting applied */ - private void consumeResults(Frame frame, List targets) { - for (var c : streamResultConsumers) { - c.accept(frame, targets); - } + } + + public void setTargetModel(TargetModel targetModel) { + var settings = pipelineManager.getCurrentPipeline().getSettings(); + if (settings instanceof ReflectivePipelineSettings) { + ((ReflectivePipelineSettings) settings).targetModel = targetModel; + saveAndBroadcastAll(); + } else { + logger.error("Cannot set target model of non-reflective pipe! Ignoring..."); } - - public void setTargetModel(TargetModel targetModel) { - var settings = pipelineManager.getCurrentPipeline().getSettings(); - if (settings instanceof ReflectivePipelineSettings) { - ((ReflectivePipelineSettings) settings).targetModel = targetModel; - saveAndBroadcastAll(); - } else { - logger.error("Cannot set target model of non-reflective pipe! Ignoring..."); - } + } + + public void addCalibrationToConfig(CameraCalibrationCoefficients newCalibration) { + if (newCalibration != null) { + logger.info("Got new calibration for " + newCalibration.resolution); + visionSource.getSettables().getConfiguration().addCalibration(newCalibration); + } else { + logger.error("Got null calibration?"); } - public void addCalibrationToConfig(CameraCalibrationCoefficients newCalibration) { - if (newCalibration != null) { - logger.info("Got new calibration for " + newCalibration.resolution); - visionSource.getSettables().getConfiguration().addCalibration(newCalibration); - } else { - logger.error("Got null calibration?"); - } - - saveAndBroadcastAll(); - } + saveAndBroadcastAll(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleChangeSubscriber.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleChangeSubscriber.java index e824ce89dc..9e89074939 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleChangeSubscriber.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleChangeSubscriber.java @@ -36,205 +36,205 @@ @SuppressWarnings("unchecked") public class VisionModuleChangeSubscriber extends DataChangeSubscriber { - private final VisionModule parentModule; - private final Logger logger; - - public VisionModuleChangeSubscriber(VisionModule parentModule) { - this.parentModule = parentModule; - logger = - new Logger( - VisionModuleChangeSubscriber.class, - parentModule.visionSource.getSettables().getConfiguration().nickname, - LogGroup.VisionModule); - } - - @Override - public void onDataChangeEvent(DataChangeEvent event) { - if (event instanceof IncomingWebSocketEvent) { - var wsEvent = (IncomingWebSocketEvent) event; - - // Camera index -1 means a "multicast event" (i.e. the event is received by all cameras) - if (wsEvent.cameraIndex != null - && (wsEvent.cameraIndex == parentModule.moduleIndex || wsEvent.cameraIndex == -1)) { - logger.trace("Got PSC event - propName: " + wsEvent.propertyName); - - var propName = wsEvent.propertyName; - var newPropValue = wsEvent.data; - var currentSettings = parentModule.pipelineManager.getCurrentPipeline().getSettings(); - - // special case for non-PipelineSetting changes - switch (propName) { - // case "cameraNickname": // rename camera - // var newNickname = (String) newPropValue; - // logger.info("Changing nickname to " + newNickname); - // parentModule.setCameraNickname(newNickname); - // return; - case "pipelineName": // rename current pipeline - logger.info("Changing nick to " + newPropValue); - parentModule.pipelineManager.getCurrentPipelineSettings().pipelineNickname = - (String) newPropValue; - parentModule.saveAndBroadcastAll(); - return; - case "newPipelineInfo": // add new pipeline - var typeName = (Pair) newPropValue; - var type = typeName.getRight(); - var name = typeName.getLeft(); - - logger.info("Adding a " + type + " pipeline with name " + name); - - var addedSettings = parentModule.pipelineManager.addPipeline(type); - addedSettings.pipelineNickname = name; - parentModule.saveAndBroadcastAll(); - return; - case "deleteCurrPipeline": - var indexToDelete = parentModule.pipelineManager.getCurrentPipelineIndex(); - logger.info("Deleting current pipe at index " + indexToDelete); - int newIndex = parentModule.pipelineManager.removePipeline(indexToDelete); - parentModule.setPipeline(newIndex); - parentModule.saveAndBroadcastAll(); - return; - case "changePipeline": // change active pipeline - var index = (Integer) newPropValue; - if (index == parentModule.pipelineManager.getCurrentPipelineIndex()) { - logger.debug("Skipping pipeline change, index " + index + " already active"); - return; - } - parentModule.setPipeline(index); - parentModule.saveAndBroadcastAll(); - return; - case "startCalibration": - var data = UICalibrationData.fromMap((Map) newPropValue); - parentModule.startCalibration(data); - parentModule.saveAndBroadcastAll(); - return; - case "saveInputSnapshot": - parentModule.saveInputSnapshot(); - return; - case "saveOutputSnapshot": - parentModule.saveOutputSnapshot(); - return; - case "takeCalSnapshot": - parentModule.takeCalibrationSnapshot(); - return; - case "duplicatePipeline": - int idx = parentModule.pipelineManager.duplicatePipeline((Integer) newPropValue); - parentModule.setPipeline(idx); - parentModule.saveAndBroadcastAll(); - return; - case "calibrationUploaded": - if (newPropValue instanceof CameraCalibrationCoefficients) - parentModule.addCalibrationToConfig((CameraCalibrationCoefficients) newPropValue); - return; - case "robotOffsetPoint": - if (currentSettings instanceof AdvancedPipelineSettings) { - var curAdvSettings = (AdvancedPipelineSettings) currentSettings; - var offsetOperation = RobotOffsetPointOperation.fromIndex((int) newPropValue); - var latestTarget = parentModule.lastPipelineResultBestTarget; - - if (latestTarget != null) { - var newPoint = latestTarget.getTargetOffsetPoint(); - - switch (curAdvSettings.offsetRobotOffsetMode) { - case Single: - if (offsetOperation == RobotOffsetPointOperation.ROPO_CLEAR) { - curAdvSettings.offsetSinglePoint = new Point(); - } else if (offsetOperation == RobotOffsetPointOperation.ROPO_TAKESINGLE) { - curAdvSettings.offsetSinglePoint = newPoint; - } - break; - case Dual: - if (offsetOperation == RobotOffsetPointOperation.ROPO_CLEAR) { - curAdvSettings.offsetDualPointA = new Point(); - curAdvSettings.offsetDualPointAArea = 0; - curAdvSettings.offsetDualPointB = new Point(); - curAdvSettings.offsetDualPointBArea = 0; - } else { - // update point and area - switch (offsetOperation) { - case ROPO_TAKEFIRSTDUAL: - curAdvSettings.offsetDualPointA = newPoint; - curAdvSettings.offsetDualPointAArea = latestTarget.getArea(); - break; - case ROPO_TAKESECONDDUAL: - curAdvSettings.offsetDualPointB = newPoint; - curAdvSettings.offsetDualPointBArea = latestTarget.getArea(); - break; - default: - break; - } - } - break; - default: - break; - } - } - } - return; - case "changePipelineType": - parentModule.changePipelineType((Integer) newPropValue); - parentModule.saveAndBroadcastAll(); - return; - } - - // special case for camera settables - if (propName.startsWith("camera")) { - var propMethodName = "set" + propName.replace("camera", ""); - var methods = parentModule.visionSource.getSettables().getClass().getMethods(); - for (var method : methods) { - if (method.getName().equalsIgnoreCase(propMethodName)) { - try { - method.invoke(parentModule.visionSource.getSettables(), newPropValue); - } catch (Exception e) { - logger.error("Failed to invoke camera settable method: " + method.getName(), e); - } - } + private final VisionModule parentModule; + private final Logger logger; + + public VisionModuleChangeSubscriber(VisionModule parentModule) { + this.parentModule = parentModule; + logger = + new Logger( + VisionModuleChangeSubscriber.class, + parentModule.visionSource.getSettables().getConfiguration().nickname, + LogGroup.VisionModule); + } + + @Override + public void onDataChangeEvent(DataChangeEvent event) { + if (event instanceof IncomingWebSocketEvent) { + var wsEvent = (IncomingWebSocketEvent) event; + + // Camera index -1 means a "multicast event" (i.e. the event is received by all cameras) + if (wsEvent.cameraIndex != null + && (wsEvent.cameraIndex == parentModule.moduleIndex || wsEvent.cameraIndex == -1)) { + logger.trace("Got PSC event - propName: " + wsEvent.propertyName); + + var propName = wsEvent.propertyName; + var newPropValue = wsEvent.data; + var currentSettings = parentModule.pipelineManager.getCurrentPipeline().getSettings(); + + // special case for non-PipelineSetting changes + switch (propName) { + // case "cameraNickname": // rename camera + // var newNickname = (String) newPropValue; + // logger.info("Changing nickname to " + newNickname); + // parentModule.setCameraNickname(newNickname); + // return; + case "pipelineName": // rename current pipeline + logger.info("Changing nick to " + newPropValue); + parentModule.pipelineManager.getCurrentPipelineSettings().pipelineNickname = + (String) newPropValue; + parentModule.saveAndBroadcastAll(); + return; + case "newPipelineInfo": // add new pipeline + var typeName = (Pair) newPropValue; + var type = typeName.getRight(); + var name = typeName.getLeft(); + + logger.info("Adding a " + type + " pipeline with name " + name); + + var addedSettings = parentModule.pipelineManager.addPipeline(type); + addedSettings.pipelineNickname = name; + parentModule.saveAndBroadcastAll(); + return; + case "deleteCurrPipeline": + var indexToDelete = parentModule.pipelineManager.getCurrentPipelineIndex(); + logger.info("Deleting current pipe at index " + indexToDelete); + int newIndex = parentModule.pipelineManager.removePipeline(indexToDelete); + parentModule.setPipeline(newIndex); + parentModule.saveAndBroadcastAll(); + return; + case "changePipeline": // change active pipeline + var index = (Integer) newPropValue; + if (index == parentModule.pipelineManager.getCurrentPipelineIndex()) { + logger.debug("Skipping pipeline change, index " + index + " already active"); + return; + } + parentModule.setPipeline(index); + parentModule.saveAndBroadcastAll(); + return; + case "startCalibration": + var data = UICalibrationData.fromMap((Map) newPropValue); + parentModule.startCalibration(data); + parentModule.saveAndBroadcastAll(); + return; + case "saveInputSnapshot": + parentModule.saveInputSnapshot(); + return; + case "saveOutputSnapshot": + parentModule.saveOutputSnapshot(); + return; + case "takeCalSnapshot": + parentModule.takeCalibrationSnapshot(); + return; + case "duplicatePipeline": + int idx = parentModule.pipelineManager.duplicatePipeline((Integer) newPropValue); + parentModule.setPipeline(idx); + parentModule.saveAndBroadcastAll(); + return; + case "calibrationUploaded": + if (newPropValue instanceof CameraCalibrationCoefficients) + parentModule.addCalibrationToConfig((CameraCalibrationCoefficients) newPropValue); + return; + case "robotOffsetPoint": + if (currentSettings instanceof AdvancedPipelineSettings) { + var curAdvSettings = (AdvancedPipelineSettings) currentSettings; + var offsetOperation = RobotOffsetPointOperation.fromIndex((int) newPropValue); + var latestTarget = parentModule.lastPipelineResultBestTarget; + + if (latestTarget != null) { + var newPoint = latestTarget.getTargetOffsetPoint(); + + switch (curAdvSettings.offsetRobotOffsetMode) { + case Single: + if (offsetOperation == RobotOffsetPointOperation.ROPO_CLEAR) { + curAdvSettings.offsetSinglePoint = new Point(); + } else if (offsetOperation == RobotOffsetPointOperation.ROPO_TAKESINGLE) { + curAdvSettings.offsetSinglePoint = newPoint; } - } - - try { - var propField = currentSettings.getClass().getField(propName); - var propType = propField.getType(); - - if (propType.isEnum()) { - var actual = propType.getEnumConstants()[(int) newPropValue]; - propField.set(currentSettings, actual); - } else if (propType.isAssignableFrom(DoubleCouple.class)) { - var orig = (ArrayList) newPropValue; - var actual = new DoubleCouple(orig.get(0), orig.get(1)); - propField.set(currentSettings, actual); - } else if (propType.isAssignableFrom(IntegerCouple.class)) { - var orig = (ArrayList) newPropValue; - var actual = new IntegerCouple(orig.get(0).intValue(), orig.get(1).intValue()); - propField.set(currentSettings, actual); - } else if (propType.equals(Double.TYPE)) { - propField.setDouble(currentSettings, ((Number) newPropValue).doubleValue()); - } else if (propType.equals(Integer.TYPE)) { - propField.setInt(currentSettings, (Integer) newPropValue); - } else if (propType.equals(Boolean.TYPE)) { - if (newPropValue instanceof Integer) { - propField.setBoolean(currentSettings, (Integer) newPropValue != 0); - } else { - propField.setBoolean(currentSettings, (Boolean) newPropValue); - } + break; + case Dual: + if (offsetOperation == RobotOffsetPointOperation.ROPO_CLEAR) { + curAdvSettings.offsetDualPointA = new Point(); + curAdvSettings.offsetDualPointAArea = 0; + curAdvSettings.offsetDualPointB = new Point(); + curAdvSettings.offsetDualPointBArea = 0; } else { - propField.set(newPropValue, newPropValue); + // update point and area + switch (offsetOperation) { + case ROPO_TAKEFIRSTDUAL: + curAdvSettings.offsetDualPointA = newPoint; + curAdvSettings.offsetDualPointAArea = latestTarget.getArea(); + break; + case ROPO_TAKESECONDDUAL: + curAdvSettings.offsetDualPointB = newPoint; + curAdvSettings.offsetDualPointBArea = latestTarget.getArea(); + break; + default: + break; + } } - logger.trace("Set prop " + propName + " to value " + newPropValue); - } catch (NoSuchFieldException | IllegalAccessException e) { - logger.error( - "Could not set prop " - + propName - + " with value " - + newPropValue - + " on " - + currentSettings, - e); - } catch (Exception e) { - logger.error("Unknown exception when setting PSC prop!", e); + break; + default: + break; } + } + } + return; + case "changePipelineType": + parentModule.changePipelineType((Integer) newPropValue); + parentModule.saveAndBroadcastAll(); + return; + } + + // special case for camera settables + if (propName.startsWith("camera")) { + var propMethodName = "set" + propName.replace("camera", ""); + var methods = parentModule.visionSource.getSettables().getClass().getMethods(); + for (var method : methods) { + if (method.getName().equalsIgnoreCase(propMethodName)) { + try { + method.invoke(parentModule.visionSource.getSettables(), newPropValue); + } catch (Exception e) { + logger.error("Failed to invoke camera settable method: " + method.getName(), e); + } + } + } + } - parentModule.saveAndBroadcastSelective(wsEvent.originContext, propName, newPropValue); + try { + var propField = currentSettings.getClass().getField(propName); + var propType = propField.getType(); + + if (propType.isEnum()) { + var actual = propType.getEnumConstants()[(int) newPropValue]; + propField.set(currentSettings, actual); + } else if (propType.isAssignableFrom(DoubleCouple.class)) { + var orig = (ArrayList) newPropValue; + var actual = new DoubleCouple(orig.get(0), orig.get(1)); + propField.set(currentSettings, actual); + } else if (propType.isAssignableFrom(IntegerCouple.class)) { + var orig = (ArrayList) newPropValue; + var actual = new IntegerCouple(orig.get(0).intValue(), orig.get(1).intValue()); + propField.set(currentSettings, actual); + } else if (propType.equals(Double.TYPE)) { + propField.setDouble(currentSettings, ((Number) newPropValue).doubleValue()); + } else if (propType.equals(Integer.TYPE)) { + propField.setInt(currentSettings, (Integer) newPropValue); + } else if (propType.equals(Boolean.TYPE)) { + if (newPropValue instanceof Integer) { + propField.setBoolean(currentSettings, (Integer) newPropValue != 0); + } else { + propField.setBoolean(currentSettings, (Boolean) newPropValue); } + } else { + propField.set(newPropValue, newPropValue); + } + logger.trace("Set prop " + propName + " to value " + newPropValue); + } catch (NoSuchFieldException | IllegalAccessException e) { + logger.error( + "Could not set prop " + + propName + + " with value " + + newPropValue + + " on " + + currentSettings, + e); + } catch (Exception e) { + logger.error("Unknown exception when setting PSC prop!", e); } + + parentModule.saveAndBroadcastSelective(wsEvent.originContext, propName, newPropValue); + } } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleManager.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleManager.java index d75e62cb7e..cb932f5e31 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleManager.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleManager.java @@ -24,85 +24,85 @@ /** VisionModuleManager has many VisionModules, and provides camera configuration data to them. */ public class VisionModuleManager { - private final Logger logger = new Logger(VisionModuleManager.class, LogGroup.VisionModule); + private final Logger logger = new Logger(VisionModuleManager.class, LogGroup.VisionModule); - private static class ThreadSafeSingleton { - private static final VisionModuleManager INSTANCE = new VisionModuleManager(); - } + private static class ThreadSafeSingleton { + private static final VisionModuleManager INSTANCE = new VisionModuleManager(); + } - public static VisionModuleManager getInstance() { - return VisionModuleManager.ThreadSafeSingleton.INSTANCE; - } + public static VisionModuleManager getInstance() { + return VisionModuleManager.ThreadSafeSingleton.INSTANCE; + } - protected final List visionModules = new ArrayList<>(); + protected final List visionModules = new ArrayList<>(); - VisionModuleManager() {} + VisionModuleManager() {} - public List getModules() { - return visionModules; - } + public List getModules() { + return visionModules; + } - public VisionModule getModule(String nickname) { - for (var module : visionModules) { - if (module.getStateAsCameraConfig().nickname.equals(nickname)) return module; - } - return null; + public VisionModule getModule(String nickname) { + for (var module : visionModules) { + if (module.getStateAsCameraConfig().nickname.equals(nickname)) return module; } + return null; + } - public VisionModule getModule(int i) { - return visionModules.get(i); - } - - public List addSources(List visionSources) { - var addedModules = new HashMap(); + public VisionModule getModule(int i) { + return visionModules.get(i); + } - assignCameraIndex(visionSources); - for (var visionSource : visionSources) { - var pipelineManager = new PipelineManager(visionSource.getCameraConfiguration()); + public List addSources(List visionSources) { + var addedModules = new HashMap(); - var module = new VisionModule(pipelineManager, visionSource, visionModules.size()); - visionModules.add(module); - addedModules.put(visionSource.getCameraConfiguration().streamIndex, module); - } + assignCameraIndex(visionSources); + for (var visionSource : visionSources) { + var pipelineManager = new PipelineManager(visionSource.getCameraConfiguration()); - return addedModules.entrySet().stream() - .sorted(Comparator.comparingInt(Map.Entry::getKey)) // sort by stream index - .map(Map.Entry::getValue) // map to Stream of VisionModule - .collect(Collectors.toList()); // collect in a List + var module = new VisionModule(pipelineManager, visionSource, visionModules.size()); + visionModules.add(module); + addedModules.put(visionSource.getCameraConfiguration().streamIndex, module); } - private void assignCameraIndex(List config) { - // We won't necessarily have already added all the cameras we need to at this point - // But by operating on the list, we have a fairly good idea of which we need to change, - // but it's not guaranteed that we change the correct one - // The best we can do is try to avoid a case where the stream index runs away to infinity - // since we can only stream 5 cameras at once - - // Big list, which should contain every vision source (currently loaded plus the new ones being - // added) - var bigList = new ArrayList(); - bigList.addAll( - this.getModules().stream().map(it -> it.visionSource).collect(Collectors.toList())); - bigList.addAll(config); - - for (var v : config) { - var listNoV = new ArrayList<>(bigList); - listNoV.remove(v); - if (listNoV.stream() - .anyMatch( - it -> - it.getCameraConfiguration().streamIndex - == v.getCameraConfiguration().streamIndex)) { - int idx = 0; - while (listNoV.stream() - .map(it -> it.getCameraConfiguration().streamIndex) - .collect(Collectors.toList()) - .contains(idx)) { - idx++; - } - logger.debug("Assigning idx " + idx); - v.getCameraConfiguration().streamIndex = idx; - } + return addedModules.entrySet().stream() + .sorted(Comparator.comparingInt(Map.Entry::getKey)) // sort by stream index + .map(Map.Entry::getValue) // map to Stream of VisionModule + .collect(Collectors.toList()); // collect in a List + } + + private void assignCameraIndex(List config) { + // We won't necessarily have already added all the cameras we need to at this point + // But by operating on the list, we have a fairly good idea of which we need to change, + // but it's not guaranteed that we change the correct one + // The best we can do is try to avoid a case where the stream index runs away to infinity + // since we can only stream 5 cameras at once + + // Big list, which should contain every vision source (currently loaded plus the new ones being + // added) + var bigList = new ArrayList(); + bigList.addAll( + this.getModules().stream().map(it -> it.visionSource).collect(Collectors.toList())); + bigList.addAll(config); + + for (var v : config) { + var listNoV = new ArrayList<>(bigList); + listNoV.remove(v); + if (listNoV.stream() + .anyMatch( + it -> + it.getCameraConfiguration().streamIndex + == v.getCameraConfiguration().streamIndex)) { + int idx = 0; + while (listNoV.stream() + .map(it -> it.getCameraConfiguration().streamIndex) + .collect(Collectors.toList()) + .contains(idx)) { + idx++; } + logger.debug("Assigning idx " + idx); + v.getCameraConfiguration().streamIndex = idx; + } } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionRunner.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionRunner.java index 60acc0d391..41ec35b5ff 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionRunner.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionRunner.java @@ -31,77 +31,77 @@ /** VisionRunner has a frame supplier, a pipeline supplier, and a result consumer */ @SuppressWarnings("rawtypes") public class VisionRunner { - private final Logger logger; - private final Thread visionProcessThread; - private final FrameProvider frameSupplier; - private final Supplier pipelineSupplier; - private final Consumer pipelineResultConsumer; - private final QuirkyCamera cameraQuirks; + private final Logger logger; + private final Thread visionProcessThread; + private final FrameProvider frameSupplier; + private final Supplier pipelineSupplier; + private final Consumer pipelineResultConsumer; + private final QuirkyCamera cameraQuirks; - private long loopCount; + private long loopCount; - /** - * VisionRunner contains a thread to run a pipeline, given a frame, and will give the result to - * the consumer. - * - * @param frameSupplier The supplier of the latest frame. - * @param pipelineSupplier The supplier of the current pipeline. - * @param pipelineResultConsumer The consumer of the latest result. - */ - public VisionRunner( - FrameProvider frameSupplier, - Supplier pipelineSupplier, - Consumer pipelineResultConsumer, - QuirkyCamera cameraQuirks) { - this.frameSupplier = frameSupplier; - this.pipelineSupplier = pipelineSupplier; - this.pipelineResultConsumer = pipelineResultConsumer; - this.cameraQuirks = cameraQuirks; + /** + * VisionRunner contains a thread to run a pipeline, given a frame, and will give the result to + * the consumer. + * + * @param frameSupplier The supplier of the latest frame. + * @param pipelineSupplier The supplier of the current pipeline. + * @param pipelineResultConsumer The consumer of the latest result. + */ + public VisionRunner( + FrameProvider frameSupplier, + Supplier pipelineSupplier, + Consumer pipelineResultConsumer, + QuirkyCamera cameraQuirks) { + this.frameSupplier = frameSupplier; + this.pipelineSupplier = pipelineSupplier; + this.pipelineResultConsumer = pipelineResultConsumer; + this.cameraQuirks = cameraQuirks; - visionProcessThread = new Thread(this::update); - visionProcessThread.setName("VisionRunner - " + frameSupplier.getName()); - logger = new Logger(VisionRunner.class, frameSupplier.getName(), LogGroup.VisionModule); - } + visionProcessThread = new Thread(this::update); + visionProcessThread.setName("VisionRunner - " + frameSupplier.getName()); + logger = new Logger(VisionRunner.class, frameSupplier.getName(), LogGroup.VisionModule); + } - public void startProcess() { - visionProcessThread.start(); - } + public void startProcess() { + visionProcessThread.start(); + } - private void update() { - while (!Thread.interrupted()) { - var pipeline = pipelineSupplier.get(); + private void update() { + while (!Thread.interrupted()) { + var pipeline = pipelineSupplier.get(); - // Tell our camera implementation here what kind of pre-processing we need it to be doing - // (pipeline-dependent). I kinda hate how much leak this has... - // TODO would a callback object be a better fit? - var wantedProcessType = pipeline.getThresholdType(); - frameSupplier.requestFrameThresholdType(wantedProcessType); - var settings = pipeline.getSettings(); - if (settings instanceof AdvancedPipelineSettings) { - var advanced = (AdvancedPipelineSettings) settings; - var hsvParams = - new HSVPipe.HSVParams( - advanced.hsvHue, advanced.hsvSaturation, advanced.hsvValue, advanced.hueInverted); - // TODO who should deal with preventing this from happening _every single loop_? - frameSupplier.requestHsvSettings(hsvParams); - } - frameSupplier.requestFrameRotation(settings.inputImageRotationMode); - frameSupplier.requestFrameCopies(settings.inputShouldShow, settings.outputShouldShow); + // Tell our camera implementation here what kind of pre-processing we need it to be doing + // (pipeline-dependent). I kinda hate how much leak this has... + // TODO would a callback object be a better fit? + var wantedProcessType = pipeline.getThresholdType(); + frameSupplier.requestFrameThresholdType(wantedProcessType); + var settings = pipeline.getSettings(); + if (settings instanceof AdvancedPipelineSettings) { + var advanced = (AdvancedPipelineSettings) settings; + var hsvParams = + new HSVPipe.HSVParams( + advanced.hsvHue, advanced.hsvSaturation, advanced.hsvValue, advanced.hueInverted); + // TODO who should deal with preventing this from happening _every single loop_? + frameSupplier.requestHsvSettings(hsvParams); + } + frameSupplier.requestFrameRotation(settings.inputImageRotationMode); + frameSupplier.requestFrameCopies(settings.inputShouldShow, settings.outputShouldShow); - // Grab the new camera frame - var frame = frameSupplier.get(); + // Grab the new camera frame + var frame = frameSupplier.get(); - // There's no guarantee the processing type change will occur this tick, so pipelines should - // check themselves - try { - var pipelineResult = pipeline.run(frame, cameraQuirks); - pipelineResultConsumer.accept(pipelineResult); - } catch (Exception ex) { - logger.error("Exception on loop " + loopCount); - ex.printStackTrace(); - } + // There's no guarantee the processing type change will occur this tick, so pipelines should + // check themselves + try { + var pipelineResult = pipeline.run(frame, cameraQuirks); + pipelineResultConsumer.accept(pipelineResult); + } catch (Exception ex) { + logger.error("Exception on loop " + loopCount); + ex.printStackTrace(); + } - loopCount++; - } + loopCount++; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionSource.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionSource.java index 65caf8bd0f..b81a2928cc 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionSource.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionSource.java @@ -21,19 +21,19 @@ import org.photonvision.vision.frame.FrameProvider; public abstract class VisionSource { - protected final CameraConfiguration cameraConfiguration; + protected final CameraConfiguration cameraConfiguration; - protected VisionSource(CameraConfiguration cameraConfiguration) { - this.cameraConfiguration = cameraConfiguration; - } + protected VisionSource(CameraConfiguration cameraConfiguration) { + this.cameraConfiguration = cameraConfiguration; + } - public CameraConfiguration getCameraConfiguration() { - return cameraConfiguration; - } + public CameraConfiguration getCameraConfiguration() { + return cameraConfiguration; + } - public abstract FrameProvider getFrameProvider(); + public abstract FrameProvider getFrameProvider(); - public abstract VisionSourceSettables getSettables(); + public abstract VisionSourceSettables getSettables(); - public abstract boolean isVendorCamera(); + public abstract boolean isVendorCamera(); } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceManager.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceManager.java index 4b31ba0691..aee2ab026d 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceManager.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceManager.java @@ -41,326 +41,326 @@ import org.photonvision.vision.camera.USBCameraSource; public class VisionSourceManager { - private static final Logger logger = new Logger(VisionSourceManager.class, LogGroup.Camera); - private static final List deviceBlacklist = List.of("bcm2835-isp"); - - final List knownUsbCameras = new CopyOnWriteArrayList<>(); - final List unmatchedLoadedConfigs = new CopyOnWriteArrayList<>(); - private boolean hasWarned; - private String ignoredCamerasRegex = ""; - - private static class SingletonHolder { - private static final VisionSourceManager INSTANCE = new VisionSourceManager(); - } - - public static VisionSourceManager getInstance() { - return SingletonHolder.INSTANCE; + private static final Logger logger = new Logger(VisionSourceManager.class, LogGroup.Camera); + private static final List deviceBlacklist = List.of("bcm2835-isp"); + + final List knownUsbCameras = new CopyOnWriteArrayList<>(); + final List unmatchedLoadedConfigs = new CopyOnWriteArrayList<>(); + private boolean hasWarned; + private String ignoredCamerasRegex = ""; + + private static class SingletonHolder { + private static final VisionSourceManager INSTANCE = new VisionSourceManager(); + } + + public static VisionSourceManager getInstance() { + return SingletonHolder.INSTANCE; + } + + VisionSourceManager() {} + + public void registerTimedTask() { + TimedTaskManager.getInstance().addTask("VisionSourceManager", this::tryMatchUSBCams, 3000); + } + + public void registerLoadedConfigs(CameraConfiguration... configs) { + registerLoadedConfigs(Arrays.asList(configs)); + } + + /** + * Register new camera configs loaded from disk. This will add them to the list of configs to try + * to match, and also automatically spawn new vision processes as necessary. + * + * @param configs The loaded camera configs. + */ + public void registerLoadedConfigs(Collection configs) { + unmatchedLoadedConfigs.addAll(configs); + } + + protected Supplier> cameraInfoSupplier = + () -> List.of(UsbCamera.enumerateUsbCameras()); + + protected void tryMatchUSBCams() { + var visionSourceList = tryMatchUSBCamImpl(); + if (visionSourceList == null) return; + + logger.info("Adding " + visionSourceList.size() + " configs to VMM."); + ConfigManager.getInstance().addCameraConfigurations(visionSourceList); + var addedSources = VisionModuleManager.getInstance().addSources(visionSourceList); + addedSources.forEach(VisionModule::start); + DataChangeService.getInstance() + .publishEvent( + new OutgoingUIEvent<>( + "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); + } + + protected List tryMatchUSBCamImpl() { + return tryMatchUSBCamImpl(true); + } + + protected List tryMatchUSBCamImpl(boolean createSources) { + // Detect cameras using CSCore + List connectedCameras = + new ArrayList<>(filterAllowedDevices(cameraInfoSupplier.get())); + + // Remove all known devices + var notYetLoadedCams = new ArrayList(); + for (var connectedCam : connectedCameras) { + boolean cameraIsUnknown = true; + for (UsbCameraInfo knownCam : this.knownUsbCameras) { + if (usbCamEquals(knownCam, connectedCam)) { + cameraIsUnknown = false; + break; + } + } + if (cameraIsUnknown) { + notYetLoadedCams.add(connectedCam); + } } - VisionSourceManager() {} + if (notYetLoadedCams.isEmpty()) return null; - public void registerTimedTask() { - TimedTaskManager.getInstance().addTask("VisionSourceManager", this::tryMatchUSBCams, 3000); + if (connectedCameras.isEmpty()) { + logger.warn( + "No USB cameras were detected! Check that all cameras are connected, and that the path is correct."); + return null; } + logger.debug("Matching " + notYetLoadedCams.size() + " new cameras!"); - public void registerLoadedConfigs(CameraConfiguration... configs) { - registerLoadedConfigs(Arrays.asList(configs)); + // Sort out just the USB cams + var usbCamConfigs = new ArrayList<>(); + for (var config : unmatchedLoadedConfigs) { + if (config.cameraType == CameraType.UsbCamera) usbCamConfigs.add(config); } - /** - * Register new camera configs loaded from disk. This will add them to the list of configs to try - * to match, and also automatically spawn new vision processes as necessary. - * - * @param configs The loaded camera configs. - */ - public void registerLoadedConfigs(Collection configs) { - unmatchedLoadedConfigs.addAll(configs); + // Debug prints + for (var info : notYetLoadedCams) { + logger.info("Adding local video device - \"" + info.name + "\" at \"" + info.path + "\""); } - protected Supplier> cameraInfoSupplier = - () -> List.of(UsbCamera.enumerateUsbCameras()); - - protected void tryMatchUSBCams() { - var visionSourceList = tryMatchUSBCamImpl(); - if (visionSourceList == null) return; - - logger.info("Adding " + visionSourceList.size() + " configs to VMM."); - ConfigManager.getInstance().addCameraConfigurations(visionSourceList); - var addedSources = VisionModuleManager.getInstance().addSources(visionSourceList); - addedSources.forEach(VisionModule::start); - DataChangeService.getInstance() - .publishEvent( - new OutgoingUIEvent<>( - "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); + if (!usbCamConfigs.isEmpty()) + logger.debug("Trying to match " + usbCamConfigs.size() + " unmatched configs..."); + + // Match camera configs to physical cameras + var matchedCameras = matchUSBCameras(notYetLoadedCams, unmatchedLoadedConfigs); + unmatchedLoadedConfigs.removeAll(matchedCameras); + if (!unmatchedLoadedConfigs.isEmpty() && !hasWarned) { + logger.warn( + () -> + "After matching, " + + unmatchedLoadedConfigs.size() + + " configs remained unmatched. Is your camera disconnected?"); + logger.warn( + "Unloaded configs: " + + unmatchedLoadedConfigs.stream() + .map(it -> it.nickname) + .collect(Collectors.joining())); + hasWarned = true; } - protected List tryMatchUSBCamImpl() { - return tryMatchUSBCamImpl(true); - } - - protected List tryMatchUSBCamImpl(boolean createSources) { - // Detect cameras using CSCore - List connectedCameras = - new ArrayList<>(filterAllowedDevices(cameraInfoSupplier.get())); - - // Remove all known devices - var notYetLoadedCams = new ArrayList(); - for (var connectedCam : connectedCameras) { - boolean cameraIsUnknown = true; - for (UsbCameraInfo knownCam : this.knownUsbCameras) { - if (usbCamEquals(knownCam, connectedCam)) { - cameraIsUnknown = false; - break; - } - } - if (cameraIsUnknown) { - notYetLoadedCams.add(connectedCam); - } - } - - if (notYetLoadedCams.isEmpty()) return null; - - if (connectedCameras.isEmpty()) { - logger.warn( - "No USB cameras were detected! Check that all cameras are connected, and that the path is correct."); - return null; - } - logger.debug("Matching " + notYetLoadedCams.size() + " new cameras!"); - - // Sort out just the USB cams - var usbCamConfigs = new ArrayList<>(); - for (var config : unmatchedLoadedConfigs) { - if (config.cameraType == CameraType.UsbCamera) usbCamConfigs.add(config); - } - - // Debug prints - for (var info : notYetLoadedCams) { - logger.info("Adding local video device - \"" + info.name + "\" at \"" + info.path + "\""); - } - - if (!usbCamConfigs.isEmpty()) - logger.debug("Trying to match " + usbCamConfigs.size() + " unmatched configs..."); - - // Match camera configs to physical cameras - var matchedCameras = matchUSBCameras(notYetLoadedCams, unmatchedLoadedConfigs); - unmatchedLoadedConfigs.removeAll(matchedCameras); - if (!unmatchedLoadedConfigs.isEmpty() && !hasWarned) { - logger.warn( - () -> - "After matching, " - + unmatchedLoadedConfigs.size() - + " configs remained unmatched. Is your camera disconnected?"); - logger.warn( - "Unloaded configs: " - + unmatchedLoadedConfigs.stream() - .map(it -> it.nickname) - .collect(Collectors.joining())); - hasWarned = true; - } - - // We add the matched cameras to the known camera list - for (var cam : notYetLoadedCams) { - if (this.knownUsbCameras.stream().noneMatch(it -> usbCamEquals(it, cam))) { - this.knownUsbCameras.add(cam); - } - } - if (matchedCameras.isEmpty()) return null; - - // for unit tests only! - if (!createSources) { - return List.of(); - } - - // Turn these camera configs into vision sources - var sources = loadVisionSourcesFromCamConfigs(matchedCameras); - - // Print info about each vision source - for (var src : sources) { - logger.debug( - () -> - "Matched config for camera \"" - + src.getFrameProvider().getName() - + "\" and loaded " - + src.getCameraConfiguration().pipelineSettings.size() - + " pipelines"); - } - - return sources; + // We add the matched cameras to the known camera list + for (var cam : notYetLoadedCams) { + if (this.knownUsbCameras.stream().noneMatch(it -> usbCamEquals(it, cam))) { + this.knownUsbCameras.add(cam); + } } + if (matchedCameras.isEmpty()) return null; - /** - * Create {@link CameraConfiguration}s based on a list of detected USB cameras and the configs on - * disk. - * - * @param detectedCamInfos Information about currently connected USB cameras. - * @param loadedUsbCamConfigs The USB {@link CameraConfiguration}s loaded from disk. - * @return the matched configurations. - */ - private List matchUSBCameras( - List detectedCamInfos, List loadedUsbCamConfigs) { - var detectedCameraList = new ArrayList<>(detectedCamInfos); - ArrayList cameraConfigurations = new ArrayList<>(); - - // loop over all the configs loaded from disk - for (CameraConfiguration config : loadedUsbCamConfigs) { - UsbCameraInfo cameraInfo; - - // attempt matching by path and basename - logger.debug( - "Trying to find a match for loaded camera " - + config.baseName - + " with path " - + config.path); - cameraInfo = - detectedCameraList.stream() - .filter( - usbCameraInfo -> - usbCameraInfo.path.equals(config.path) - && cameraNameToBaseName(usbCameraInfo.name).equals(config.baseName)) - .findFirst() - .orElse(null); - - // if path based fails, attempt basename only match - if (cameraInfo == null) { - logger.debug("Failed to match by path and name, falling back to name-only match"); - cameraInfo = - detectedCameraList.stream() - .filter( - usbCameraInfo -> - cameraNameToBaseName(usbCameraInfo.name).equals(config.baseName)) - .findFirst() - .orElse(null); - } - - // If we actually matched a camera to a config, remove that camera from the list and add it to - // the output - if (cameraInfo != null) { - logger.debug("Matched the config for " + config.baseName + " to a physical camera!"); - detectedCameraList.remove(cameraInfo); - cameraConfigurations.add(mergeInfoIntoConfig(config, cameraInfo)); - } - } - - // If we have any unmatched cameras left, create a new CameraConfiguration for them here. - logger.debug( - "After matching loaded configs " + detectedCameraList.size() + " cameras were unmatched."); - for (UsbCameraInfo info : detectedCameraList) { - // create new camera config for all new cameras - String baseName = cameraNameToBaseName(info.name); - String uniqueName = baseNameToUniqueName(baseName); - - int suffix = 0; - while (containsName(cameraConfigurations, uniqueName)) { - suffix++; - uniqueName = String.format("%s (%d)", uniqueName, suffix); - } - - logger.info("Creating a new camera config for camera " + uniqueName); - - // HACK -- for picams, we want to use the camera model - String nickname = uniqueName; - if (isCsiCamera(info)) { - nickname = LibCameraJNI.getSensorModel().toString(); - } - - CameraConfiguration configuration = - new CameraConfiguration(baseName, uniqueName, nickname, info.path, info.otherPaths); - cameraConfigurations.add(configuration); - } - - logger.debug("Matched or created " + cameraConfigurations.size() + " camera configs!"); - return cameraConfigurations; + // for unit tests only! + if (!createSources) { + return List.of(); } - private boolean isCsiCamera(UsbCameraInfo configuration) { - return (Arrays.stream(configuration.otherPaths).anyMatch(it -> it.contains("csi-video")) - || cameraNameToBaseName(configuration.name).equals("unicam")); + // Turn these camera configs into vision sources + var sources = loadVisionSourcesFromCamConfigs(matchedCameras); + + // Print info about each vision source + for (var src : sources) { + logger.debug( + () -> + "Matched config for camera \"" + + src.getFrameProvider().getName() + + "\" and loaded " + + src.getCameraConfiguration().pipelineSettings.size() + + " pipelines"); } - private CameraConfiguration mergeInfoIntoConfig(CameraConfiguration cfg, UsbCameraInfo info) { - if (!cfg.path.equals(info.path)) { - logger.debug("Updating path config from " + cfg.path + " to " + info.path); - cfg.path = info.path; - } - - return cfg; + return sources; + } + + /** + * Create {@link CameraConfiguration}s based on a list of detected USB cameras and the configs on + * disk. + * + * @param detectedCamInfos Information about currently connected USB cameras. + * @param loadedUsbCamConfigs The USB {@link CameraConfiguration}s loaded from disk. + * @return the matched configurations. + */ + private List matchUSBCameras( + List detectedCamInfos, List loadedUsbCamConfigs) { + var detectedCameraList = new ArrayList<>(detectedCamInfos); + ArrayList cameraConfigurations = new ArrayList<>(); + + // loop over all the configs loaded from disk + for (CameraConfiguration config : loadedUsbCamConfigs) { + UsbCameraInfo cameraInfo; + + // attempt matching by path and basename + logger.debug( + "Trying to find a match for loaded camera " + + config.baseName + + " with path " + + config.path); + cameraInfo = + detectedCameraList.stream() + .filter( + usbCameraInfo -> + usbCameraInfo.path.equals(config.path) + && cameraNameToBaseName(usbCameraInfo.name).equals(config.baseName)) + .findFirst() + .orElse(null); + + // if path based fails, attempt basename only match + if (cameraInfo == null) { + logger.debug("Failed to match by path and name, falling back to name-only match"); + cameraInfo = + detectedCameraList.stream() + .filter( + usbCameraInfo -> + cameraNameToBaseName(usbCameraInfo.name).equals(config.baseName)) + .findFirst() + .orElse(null); + } + + // If we actually matched a camera to a config, remove that camera from the list and add it to + // the output + if (cameraInfo != null) { + logger.debug("Matched the config for " + config.baseName + " to a physical camera!"); + detectedCameraList.remove(cameraInfo); + cameraConfigurations.add(mergeInfoIntoConfig(config, cameraInfo)); + } } - public void setIgnoredCamerasRegex(String ignoredCamerasRegex) { - this.ignoredCamerasRegex = ignoredCamerasRegex; + // If we have any unmatched cameras left, create a new CameraConfiguration for them here. + logger.debug( + "After matching loaded configs " + detectedCameraList.size() + " cameras were unmatched."); + for (UsbCameraInfo info : detectedCameraList) { + // create new camera config for all new cameras + String baseName = cameraNameToBaseName(info.name); + String uniqueName = baseNameToUniqueName(baseName); + + int suffix = 0; + while (containsName(cameraConfigurations, uniqueName)) { + suffix++; + uniqueName = String.format("%s (%d)", uniqueName, suffix); + } + + logger.info("Creating a new camera config for camera " + uniqueName); + + // HACK -- for picams, we want to use the camera model + String nickname = uniqueName; + if (isCsiCamera(info)) { + nickname = LibCameraJNI.getSensorModel().toString(); + } + + CameraConfiguration configuration = + new CameraConfiguration(baseName, uniqueName, nickname, info.path, info.otherPaths); + cameraConfigurations.add(configuration); } - private List filterAllowedDevices(List allDevices) { - List filteredDevices = new ArrayList<>(); - for (var device : allDevices) { - if (deviceBlacklist.contains(device.name)) { - logger.trace( - "Skipping blacklisted device: \"" + device.name + "\" at \"" + device.path + "\""); - } else if (device.name.matches(ignoredCamerasRegex)) { - logger.trace("Skipping ignored device: \"" + device.name + "\" at \"" + device.path); - } else { - filteredDevices.add(device); - logger.trace( - "Adding local video device - \"" + device.name + "\" at \"" + device.path + "\""); - } - } - return filteredDevices; - } + logger.debug("Matched or created " + cameraConfigurations.size() + " camera configs!"); + return cameraConfigurations; + } - private boolean usbCamEquals(UsbCameraInfo a, UsbCameraInfo b) { - return a.path.equals(b.path) - && a.dev == b.dev - && a.name.equals(b.name) - && a.productId == b.productId - && a.vendorId == b.vendorId; - } + private boolean isCsiCamera(UsbCameraInfo configuration) { + return (Arrays.stream(configuration.otherPaths).anyMatch(it -> it.contains("csi-video")) + || cameraNameToBaseName(configuration.name).equals("unicam")); + } - // Remove all non-ASCII characters - private static String cameraNameToBaseName(String cameraName) { - return cameraName.replaceAll("[^\\x00-\\x7F]", ""); + private CameraConfiguration mergeInfoIntoConfig(CameraConfiguration cfg, UsbCameraInfo info) { + if (!cfg.path.equals(info.path)) { + logger.debug("Updating path config from " + cfg.path + " to " + info.path); + cfg.path = info.path; } - // Replace spaces with underscores - private static String baseNameToUniqueName(String baseName) { - return baseName.replaceAll(" ", "_"); + return cfg; + } + + public void setIgnoredCamerasRegex(String ignoredCamerasRegex) { + this.ignoredCamerasRegex = ignoredCamerasRegex; + } + + private List filterAllowedDevices(List allDevices) { + List filteredDevices = new ArrayList<>(); + for (var device : allDevices) { + if (deviceBlacklist.contains(device.name)) { + logger.trace( + "Skipping blacklisted device: \"" + device.name + "\" at \"" + device.path + "\""); + } else if (device.name.matches(ignoredCamerasRegex)) { + logger.trace("Skipping ignored device: \"" + device.name + "\" at \"" + device.path); + } else { + filteredDevices.add(device); + logger.trace( + "Adding local video device - \"" + device.name + "\" at \"" + device.path + "\""); + } } - - private static List loadVisionSourcesFromCamConfigs( - List camConfigs) { - var cameraSources = new ArrayList(); - for (var configuration : camConfigs) { - logger.debug("Creating VisionSource for " + configuration); - - // Picams should have csi-video in the path - boolean is_picam = - (Arrays.stream(configuration.otherPaths).anyMatch(it -> it.contains("csi-video")) - || configuration.baseName.equals("unicam")); - boolean is_pi = Platform.isRaspberryPi(); - if (is_picam && is_pi) { - configuration.cameraType = CameraType.ZeroCopyPicam; - var piCamSrc = new LibcameraGpuSource(configuration); - cameraSources.add(piCamSrc); - } else { - var newCam = new USBCameraSource(configuration); - if (!newCam.cameraQuirks.hasQuirk(CameraQuirk.CompletelyBroken) - && !newCam.getSettables().videoModes.isEmpty()) { - cameraSources.add(newCam); - } - } + return filteredDevices; + } + + private boolean usbCamEquals(UsbCameraInfo a, UsbCameraInfo b) { + return a.path.equals(b.path) + && a.dev == b.dev + && a.name.equals(b.name) + && a.productId == b.productId + && a.vendorId == b.vendorId; + } + + // Remove all non-ASCII characters + private static String cameraNameToBaseName(String cameraName) { + return cameraName.replaceAll("[^\\x00-\\x7F]", ""); + } + + // Replace spaces with underscores + private static String baseNameToUniqueName(String baseName) { + return baseName.replaceAll(" ", "_"); + } + + private static List loadVisionSourcesFromCamConfigs( + List camConfigs) { + var cameraSources = new ArrayList(); + for (var configuration : camConfigs) { + logger.debug("Creating VisionSource for " + configuration); + + // Picams should have csi-video in the path + boolean is_picam = + (Arrays.stream(configuration.otherPaths).anyMatch(it -> it.contains("csi-video")) + || configuration.baseName.equals("unicam")); + boolean is_pi = Platform.isRaspberryPi(); + if (is_picam && is_pi) { + configuration.cameraType = CameraType.ZeroCopyPicam; + var piCamSrc = new LibcameraGpuSource(configuration); + cameraSources.add(piCamSrc); + } else { + var newCam = new USBCameraSource(configuration); + if (!newCam.cameraQuirks.hasQuirk(CameraQuirk.CompletelyBroken) + && !newCam.getSettables().videoModes.isEmpty()) { + cameraSources.add(newCam); } - return cameraSources; - } - - /** - * Check if a given config list contains the given unique name. - * - * @param configList A list of camera configs. - * @param uniqueName The unique name. - * @return If the list of configs contains the unique name. - */ - private boolean containsName( - final List configList, final String uniqueName) { - return configList.stream() - .anyMatch(configuration -> configuration.uniqueName.equals(uniqueName)); + } } + return cameraSources; + } + + /** + * Check if a given config list contains the given unique name. + * + * @param configList A list of camera configs. + * @param uniqueName The unique name. + * @return If the list of configs contains the unique name. + */ + private boolean containsName( + final List configList, final String uniqueName) { + return configList.stream() + .anyMatch(configuration -> configuration.uniqueName.equals(uniqueName)); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceSettables.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceSettables.java index cda7d08e6f..8de085ffaf 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceSettables.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceSettables.java @@ -26,97 +26,97 @@ import org.photonvision.vision.frame.FrameStaticProperties; public abstract class VisionSourceSettables { - protected static final Logger logger = - new Logger(VisionSourceSettables.class, LogGroup.VisionModule); - - private final CameraConfiguration configuration; - - protected VisionSourceSettables(CameraConfiguration configuration) { - this.configuration = configuration; - } - - protected FrameStaticProperties frameStaticProperties; - protected HashMap videoModes; - - public CameraConfiguration getConfiguration() { - return configuration; - } - - public abstract void setExposure(double exposure); - - public abstract void setAutoExposure(boolean cameraAutoExposure); - - public abstract void setBrightness(int brightness); - - public abstract void setGain(int gain); - - // Pretty uncommon so instead of abstract this is just a no-op by default - // Overriden by cameras with AWB gain support - public void setRedGain(int red) {} - - public void setBlueGain(int blue) {} - - public abstract VideoMode getCurrentVideoMode(); - - public void setVideoModeInternal(int index) { - setVideoMode(getAllVideoModes().get(index)); - } - - public void setVideoMode(VideoMode mode) { - logger.info( - "Setting video mode to " - + "FPS: " - + mode.fps - + " Width: " - + mode.width - + " Height: " - + mode.height - + " Pixel Format: " - + mode.pixelFormat); - setVideoModeInternal(mode); - calculateFrameStaticProps(); - } - - protected abstract void setVideoModeInternal(VideoMode videoMode); - - @SuppressWarnings("unused") - public void setVideoModeIndex(int index) { - setVideoMode(videoModes.get(index)); - } - - public abstract HashMap getAllVideoModes(); - - public double getFOV() { - return configuration.FOV; - } - - public void setFOV(double fov) { - logger.info("Setting FOV to " + fov); - configuration.FOV = fov; - calculateFrameStaticProps(); - } - - public void addCalibration(CameraCalibrationCoefficients calibrationCoefficients) { - configuration.addCalibration(calibrationCoefficients); - calculateFrameStaticProps(); - } - - private void calculateFrameStaticProps() { - var videoMode = getCurrentVideoMode(); - this.frameStaticProperties = - new FrameStaticProperties( - videoMode, - getFOV(), - configuration.calibrations.stream() - .filter( - it -> - it.resolution.width == videoMode.width - && it.resolution.height == videoMode.height) - .findFirst() - .orElse(null)); - } - - public FrameStaticProperties getFrameStaticProperties() { - return frameStaticProperties; - } + protected static final Logger logger = + new Logger(VisionSourceSettables.class, LogGroup.VisionModule); + + private final CameraConfiguration configuration; + + protected VisionSourceSettables(CameraConfiguration configuration) { + this.configuration = configuration; + } + + protected FrameStaticProperties frameStaticProperties; + protected HashMap videoModes; + + public CameraConfiguration getConfiguration() { + return configuration; + } + + public abstract void setExposure(double exposure); + + public abstract void setAutoExposure(boolean cameraAutoExposure); + + public abstract void setBrightness(int brightness); + + public abstract void setGain(int gain); + + // Pretty uncommon so instead of abstract this is just a no-op by default + // Overriden by cameras with AWB gain support + public void setRedGain(int red) {} + + public void setBlueGain(int blue) {} + + public abstract VideoMode getCurrentVideoMode(); + + public void setVideoModeInternal(int index) { + setVideoMode(getAllVideoModes().get(index)); + } + + public void setVideoMode(VideoMode mode) { + logger.info( + "Setting video mode to " + + "FPS: " + + mode.fps + + " Width: " + + mode.width + + " Height: " + + mode.height + + " Pixel Format: " + + mode.pixelFormat); + setVideoModeInternal(mode); + calculateFrameStaticProps(); + } + + protected abstract void setVideoModeInternal(VideoMode videoMode); + + @SuppressWarnings("unused") + public void setVideoModeIndex(int index) { + setVideoMode(videoModes.get(index)); + } + + public abstract HashMap getAllVideoModes(); + + public double getFOV() { + return configuration.FOV; + } + + public void setFOV(double fov) { + logger.info("Setting FOV to " + fov); + configuration.FOV = fov; + calculateFrameStaticProps(); + } + + public void addCalibration(CameraCalibrationCoefficients calibrationCoefficients) { + configuration.addCalibration(calibrationCoefficients); + calculateFrameStaticProps(); + } + + private void calculateFrameStaticProps() { + var videoMode = getCurrentVideoMode(); + this.frameStaticProperties = + new FrameStaticProperties( + videoMode, + getFOV(), + configuration.calibrations.stream() + .filter( + it -> + it.resolution.width == videoMode.width + && it.resolution.height == videoMode.height) + .findFirst() + .orElse(null)); + } + + public FrameStaticProperties getFrameStaticProperties() { + return frameStaticProperties; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/PotentialTarget.java b/photon-core/src/main/java/org/photonvision/vision/target/PotentialTarget.java index deb3de569d..f4306070b8 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/PotentialTarget.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/PotentialTarget.java @@ -25,43 +25,43 @@ public class PotentialTarget implements Releasable { - public final Contour m_mainContour; - public final List m_subContours; - public final CVShape shape; + public final Contour m_mainContour; + public final List m_subContours; + public final CVShape shape; - public PotentialTarget(Contour inputContour) { - this(inputContour, List.of()); - } + public PotentialTarget(Contour inputContour) { + this(inputContour, List.of()); + } - public PotentialTarget(Contour inputContour, List subContours) { - this(inputContour, subContours, null); - } + public PotentialTarget(Contour inputContour, List subContours) { + this(inputContour, subContours, null); + } - public PotentialTarget(Contour inputContour, List subContours, CVShape shape) { - m_mainContour = inputContour; - m_subContours = new ArrayList<>(subContours); - this.shape = shape; - } + public PotentialTarget(Contour inputContour, List subContours, CVShape shape) { + m_mainContour = inputContour; + m_subContours = new ArrayList<>(subContours); + this.shape = shape; + } - public PotentialTarget(Contour inputContour, CVShape shape) { - this(inputContour, List.of(), shape); - } + public PotentialTarget(Contour inputContour, CVShape shape) { + this(inputContour, List.of(), shape); + } - public RotatedRect getMinAreaRect() { - return m_mainContour.getMinAreaRect(); - } + public RotatedRect getMinAreaRect() { + return m_mainContour.getMinAreaRect(); + } - public double getArea() { - return m_mainContour.getArea(); - } + public double getArea() { + return m_mainContour.getArea(); + } - @Override - public void release() { - m_mainContour.release(); - for (var sc : m_subContours) { - sc.release(); - } - m_subContours.clear(); - if (shape != null) shape.release(); + @Override + public void release() { + m_mainContour.release(); + for (var sc : m_subContours) { + sc.release(); } + m_subContours.clear(); + if (shape != null) shape.release(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointMode.java b/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointMode.java index 434e2de6ff..17c8478dfb 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointMode.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointMode.java @@ -17,7 +17,7 @@ package org.photonvision.vision.target; public enum RobotOffsetPointMode { - None, - Single, - Dual + None, + Single, + Dual } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointOperation.java b/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointOperation.java index 0e69fe9d8f..f1f1eee86b 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointOperation.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointOperation.java @@ -17,29 +17,29 @@ package org.photonvision.vision.target; public enum RobotOffsetPointOperation { - ROPO_CLEAR(0), - ROPO_TAKESINGLE(1), - ROPO_TAKEFIRSTDUAL(2), - ROPO_TAKESECONDDUAL(3); + ROPO_CLEAR(0), + ROPO_TAKESINGLE(1), + ROPO_TAKEFIRSTDUAL(2), + ROPO_TAKESECONDDUAL(3); - public final int index; + public final int index; - RobotOffsetPointOperation(int index) { - this.index = index; - } + RobotOffsetPointOperation(int index) { + this.index = index; + } - public static RobotOffsetPointOperation fromIndex(int index) { - switch (index) { - case 0: - return ROPO_CLEAR; - case 1: - return ROPO_TAKESINGLE; - case 2: - return ROPO_TAKEFIRSTDUAL; - case 3: - return ROPO_TAKESECONDDUAL; - default: - return ROPO_CLEAR; - } + public static RobotOffsetPointOperation fromIndex(int index) { + switch (index) { + case 0: + return ROPO_CLEAR; + case 1: + return ROPO_TAKESINGLE; + case 2: + return ROPO_TAKEFIRSTDUAL; + case 3: + return ROPO_TAKESECONDDUAL; + default: + return ROPO_CLEAR; } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/TargetCalculations.java b/photon-core/src/main/java/org/photonvision/vision/target/TargetCalculations.java index 5e64237443..8b93b37f37 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/TargetCalculations.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/TargetCalculations.java @@ -23,145 +23,145 @@ import org.photonvision.vision.opencv.DualOffsetValues; public class TargetCalculations { - /** - * Calculates the yaw and pitch of a point in the image. Yaw and pitch must be calculated together - * to account for perspective distortion. Yaw is positive right, and pitch is positive up. - * - * @param offsetCenterX The X value of the offset principal point (cx) in pixels - * @param targetCenterX The X value of the target's center point in pixels - * @param horizontalFocalLength The horizontal focal length (fx) in pixels - * @param offsetCenterY The Y value of the offset principal point (cy) in pixels - * @param targetCenterY The Y value of the target's center point in pixels - * @param verticalFocalLength The vertical focal length (fy) in pixels - * @return The yaw and pitch from the principal axis to the target center, in degrees. - */ - public static DoubleCouple calculateYawPitch( - double offsetCenterX, - double targetCenterX, - double horizontalFocalLength, - double offsetCenterY, - double targetCenterY, - double verticalFocalLength) { - double yaw = Math.atan((targetCenterX - offsetCenterX) / horizontalFocalLength); - double pitch = - Math.atan((offsetCenterY - targetCenterY) / (verticalFocalLength / Math.cos(yaw))); - return new DoubleCouple(Math.toDegrees(yaw), Math.toDegrees(pitch)); + /** + * Calculates the yaw and pitch of a point in the image. Yaw and pitch must be calculated together + * to account for perspective distortion. Yaw is positive right, and pitch is positive up. + * + * @param offsetCenterX The X value of the offset principal point (cx) in pixels + * @param targetCenterX The X value of the target's center point in pixels + * @param horizontalFocalLength The horizontal focal length (fx) in pixels + * @param offsetCenterY The Y value of the offset principal point (cy) in pixels + * @param targetCenterY The Y value of the target's center point in pixels + * @param verticalFocalLength The vertical focal length (fy) in pixels + * @return The yaw and pitch from the principal axis to the target center, in degrees. + */ + public static DoubleCouple calculateYawPitch( + double offsetCenterX, + double targetCenterX, + double horizontalFocalLength, + double offsetCenterY, + double targetCenterY, + double verticalFocalLength) { + double yaw = Math.atan((targetCenterX - offsetCenterX) / horizontalFocalLength); + double pitch = + Math.atan((offsetCenterY - targetCenterY) / (verticalFocalLength / Math.cos(yaw))); + return new DoubleCouple(Math.toDegrees(yaw), Math.toDegrees(pitch)); + } + + public static double calculateSkew(boolean isLandscape, RotatedRect minAreaRect) { + // https://namkeenman.wordpress.com/2015/12/18/open-cv-determine-angle-of-rotatedrect-minarearect/ + var angle = minAreaRect.angle; + + if (isLandscape && minAreaRect.size.width < minAreaRect.size.height) angle += 90; + else if (!isLandscape && minAreaRect.size.height < minAreaRect.size.width) angle += 90; + + // Ensure skew is bounded on [-90, 90] + while (angle > 90) angle -= 180; + while (angle < -90) angle += 180; + + return angle; + } + + public static Point calculateTargetOffsetPoint( + boolean isLandscape, TargetOffsetPointEdge offsetRegion, RotatedRect minAreaRect) { + Point[] vertices = new Point[4]; + + minAreaRect.points(vertices); + + Point bottom = getMiddle(vertices[0], vertices[1]); + Point left = getMiddle(vertices[1], vertices[2]); + Point top = getMiddle(vertices[2], vertices[3]); + Point right = getMiddle(vertices[3], vertices[0]); + + boolean orientationCorrect = minAreaRect.size.width > minAreaRect.size.height; + if (!isLandscape) orientationCorrect = !orientationCorrect; + + switch (offsetRegion) { + case Top: + if (orientationCorrect) return (left.y < right.y) ? left : right; + else return (top.y < bottom.y) ? top : bottom; + case Bottom: + if (orientationCorrect) return (left.y > right.y) ? left : right; + else return (top.y > bottom.y) ? top : bottom; + case Left: + if (orientationCorrect) return (top.x < bottom.x) ? top : bottom; + else return (left.x < right.x) ? left : right; + case Right: + if (orientationCorrect) return (top.x > bottom.x) ? top : bottom; + else return (left.x > right.x) ? left : right; + default: + return minAreaRect.center; } - - public static double calculateSkew(boolean isLandscape, RotatedRect minAreaRect) { - // https://namkeenman.wordpress.com/2015/12/18/open-cv-determine-angle-of-rotatedrect-minarearect/ - var angle = minAreaRect.angle; - - if (isLandscape && minAreaRect.size.width < minAreaRect.size.height) angle += 90; - else if (!isLandscape && minAreaRect.size.height < minAreaRect.size.width) angle += 90; - - // Ensure skew is bounded on [-90, 90] - while (angle > 90) angle -= 180; - while (angle < -90) angle += 180; - - return angle; - } - - public static Point calculateTargetOffsetPoint( - boolean isLandscape, TargetOffsetPointEdge offsetRegion, RotatedRect minAreaRect) { - Point[] vertices = new Point[4]; - - minAreaRect.points(vertices); - - Point bottom = getMiddle(vertices[0], vertices[1]); - Point left = getMiddle(vertices[1], vertices[2]); - Point top = getMiddle(vertices[2], vertices[3]); - Point right = getMiddle(vertices[3], vertices[0]); - - boolean orientationCorrect = minAreaRect.size.width > minAreaRect.size.height; - if (!isLandscape) orientationCorrect = !orientationCorrect; - - switch (offsetRegion) { - case Top: - if (orientationCorrect) return (left.y < right.y) ? left : right; - else return (top.y < bottom.y) ? top : bottom; - case Bottom: - if (orientationCorrect) return (left.y > right.y) ? left : right; - else return (top.y > bottom.y) ? top : bottom; - case Left: - if (orientationCorrect) return (top.x < bottom.x) ? top : bottom; - else return (left.x < right.x) ? left : right; - case Right: - if (orientationCorrect) return (top.x > bottom.x) ? top : bottom; - else return (left.x > right.x) ? left : right; - default: - return minAreaRect.center; - } - } - - private static Point getMiddle(Point p1, Point p2) { - return new Point(((p1.x + p2.x) / 2), ((p1.y + p2.y) / 2)); - } - - public static Point calculateRobotOffsetPoint( - Point offsetPoint, - Point camCenterPoint, - DualOffsetValues dualOffsetValues, - RobotOffsetPointMode offsetMode) { - switch (offsetMode) { - case None: - default: - return camCenterPoint; - case Single: - if (offsetPoint.x == 0 && offsetPoint.y == 0) { - return camCenterPoint; - } else { - return offsetPoint; - } - case Dual: - var resultPoint = new Point(); - var lineValues = dualOffsetValues.getLineValues(); - var offsetSlope = lineValues.getFirst(); - var offsetIntercept = lineValues.getSecond(); - - resultPoint.x = (offsetPoint.x - offsetIntercept) / offsetSlope; - resultPoint.y = (offsetPoint.y * offsetSlope) + offsetIntercept; - return resultPoint; + } + + private static Point getMiddle(Point p1, Point p2) { + return new Point(((p1.x + p2.x) / 2), ((p1.y + p2.y) / 2)); + } + + public static Point calculateRobotOffsetPoint( + Point offsetPoint, + Point camCenterPoint, + DualOffsetValues dualOffsetValues, + RobotOffsetPointMode offsetMode) { + switch (offsetMode) { + case None: + default: + return camCenterPoint; + case Single: + if (offsetPoint.x == 0 && offsetPoint.y == 0) { + return camCenterPoint; + } else { + return offsetPoint; } + case Dual: + var resultPoint = new Point(); + var lineValues = dualOffsetValues.getLineValues(); + var offsetSlope = lineValues.getFirst(); + var offsetIntercept = lineValues.getSecond(); + + resultPoint.x = (offsetPoint.x - offsetIntercept) / offsetSlope; + resultPoint.y = (offsetPoint.y * offsetSlope) + offsetIntercept; + return resultPoint; } + } - public static double getAspectRatio(RotatedRect rect, boolean isLandscape) { - if (rect.size.width == 0 || rect.size.height == 0) return 0; - double ratio = rect.size.width / rect.size.height; - - // In landscape, we should be shorter than we are wide (that is, aspect ratio should be >1) - if (isLandscape && ratio < 1) { - ratio = 1.0 / ratio; - } - - // If portrait, should always be taller than wide (ratio < 1) - else if (!isLandscape && ratio > 1) { - ratio = 1.0 / ratio; - } + public static double getAspectRatio(RotatedRect rect, boolean isLandscape) { + if (rect.size.width == 0 || rect.size.height == 0) return 0; + double ratio = rect.size.width / rect.size.height; - return ratio; + // In landscape, we should be shorter than we are wide (that is, aspect ratio should be >1) + if (isLandscape && ratio < 1) { + ratio = 1.0 / ratio; } - public static Point calculateDualOffsetCrosshair( - DualOffsetValues dualOffsetValues, double currentArea) { - boolean firstLarger = dualOffsetValues.firstPointArea >= dualOffsetValues.secondPointArea; - double upperArea = - firstLarger ? dualOffsetValues.secondPointArea : dualOffsetValues.firstPointArea; - double lowerArea = - firstLarger ? dualOffsetValues.firstPointArea : dualOffsetValues.secondPointArea; - - var areaFraction = MathUtils.map(currentArea, lowerArea, upperArea, 0, 1); - var xLerp = - MathUtils.lerp(dualOffsetValues.firstPoint.x, dualOffsetValues.secondPoint.x, areaFraction); - var yLerp = - MathUtils.lerp(dualOffsetValues.firstPoint.y, dualOffsetValues.secondPoint.y, areaFraction); - - return new Point(xLerp, yLerp); + // If portrait, should always be taller than wide (ratio < 1) + else if (!isLandscape && ratio > 1) { + ratio = 1.0 / ratio; } - public static DoubleCouple getLineFromPoints(Point firstPoint, Point secondPoint) { - var offsetLineSlope = (secondPoint.y - firstPoint.y) / (secondPoint.x - firstPoint.x); - var offsetLineIntercept = firstPoint.y - (offsetLineSlope * firstPoint.x); - return new DoubleCouple(offsetLineSlope, offsetLineIntercept); - } + return ratio; + } + + public static Point calculateDualOffsetCrosshair( + DualOffsetValues dualOffsetValues, double currentArea) { + boolean firstLarger = dualOffsetValues.firstPointArea >= dualOffsetValues.secondPointArea; + double upperArea = + firstLarger ? dualOffsetValues.secondPointArea : dualOffsetValues.firstPointArea; + double lowerArea = + firstLarger ? dualOffsetValues.firstPointArea : dualOffsetValues.secondPointArea; + + var areaFraction = MathUtils.map(currentArea, lowerArea, upperArea, 0, 1); + var xLerp = + MathUtils.lerp(dualOffsetValues.firstPoint.x, dualOffsetValues.secondPoint.x, areaFraction); + var yLerp = + MathUtils.lerp(dualOffsetValues.firstPoint.y, dualOffsetValues.secondPoint.y, areaFraction); + + return new Point(xLerp, yLerp); + } + + public static DoubleCouple getLineFromPoints(Point firstPoint, Point secondPoint) { + var offsetLineSlope = (secondPoint.y - firstPoint.y) / (secondPoint.x - firstPoint.x); + var offsetLineIntercept = firstPoint.y - (offsetLineSlope * firstPoint.x); + return new DoubleCouple(offsetLineSlope, offsetLineIntercept); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/TargetModel.java b/photon-core/src/main/java/org/photonvision/vision/target/TargetModel.java index 198cbd5139..36bd2b0485 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/TargetModel.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/TargetModel.java @@ -48,169 +48,169 @@ *

AprilTag models are currently only used for drawing on the output stream. */ public enum TargetModel implements Releasable { - k2016HighGoal( - List.of( - new Point3(Units.inchesToMeters(10), Units.inchesToMeters(6), 0), - new Point3(Units.inchesToMeters(-10), Units.inchesToMeters(6), 0), - new Point3(Units.inchesToMeters(-10), Units.inchesToMeters(-6), 0), - new Point3(Units.inchesToMeters(10), Units.inchesToMeters(-6), 0)), - Units.inchesToMeters(6)), - k2019DualTarget( - List.of( - new Point3(Units.inchesToMeters(7.313), Units.inchesToMeters(2.662), 0), - new Point3(Units.inchesToMeters(-7.313), Units.inchesToMeters(2.662), 0), - new Point3(Units.inchesToMeters(-5.936), Units.inchesToMeters(-2.662), 0), - new Point3(Units.inchesToMeters(5.936), Units.inchesToMeters(-2.662), 0)), - 0.1), - k2020HighGoalOuter( - List.of( - new Point3(Units.inchesToMeters(9.819867), Units.inchesToMeters(8.5), 0), - new Point3(Units.inchesToMeters(-9.819867), Units.inchesToMeters(8.5), 0), - new Point3(Units.inchesToMeters(-19.625), Units.inchesToMeters(-8.5), 0), - new Point3(Units.inchesToMeters(19.625), Units.inchesToMeters(-8.5), 0)), - Units.inchesToMeters(12)), - kCircularPowerCell7in( - List.of( - new Point3( - -Units.inchesToMeters(7) / 2, - -Units.inchesToMeters(7) / 2, - -Units.inchesToMeters(7) / 2), - new Point3( - -Units.inchesToMeters(7) / 2, - Units.inchesToMeters(7) / 2, - -Units.inchesToMeters(7) / 2), - new Point3( - Units.inchesToMeters(7) / 2, - Units.inchesToMeters(7) / 2, - -Units.inchesToMeters(7) / 2), - new Point3( - Units.inchesToMeters(7) / 2, - -Units.inchesToMeters(7) / 2, - -Units.inchesToMeters(7) / 2)), - 0), - k2022CircularCargoBall( - List.of( - new Point3( - -Units.inchesToMeters(9.5) / 2, - -Units.inchesToMeters(9.5) / 2, - -Units.inchesToMeters(9.5) / 2), - new Point3( - -Units.inchesToMeters(9.5) / 2, - Units.inchesToMeters(9.5) / 2, - -Units.inchesToMeters(9.5) / 2), - new Point3( - Units.inchesToMeters(9.5) / 2, - Units.inchesToMeters(9.5) / 2, - -Units.inchesToMeters(9.5) / 2), - new Point3( - Units.inchesToMeters(9.5) / 2, - -Units.inchesToMeters(9.5) / 2, - -Units.inchesToMeters(9.5) / 2)), - 0), - k200mmAprilTag( // Corners of the tag's inner black square (excluding white border) - List.of( - new Point3(Units.inchesToMeters(3.25), Units.inchesToMeters(3.25), 0), - new Point3(-Units.inchesToMeters(3.25), Units.inchesToMeters(3.25), 0), - new Point3(-Units.inchesToMeters(3.25), -Units.inchesToMeters(3.25), 0), - new Point3(Units.inchesToMeters(3.25), -Units.inchesToMeters(3.25), 0)), - Units.inchesToMeters(3.25 * 2)), - kAruco6in_16h5( // Corners of the tag's inner black square (excluding white border) - List.of( - new Point3(Units.inchesToMeters(3), Units.inchesToMeters(3), 0), - new Point3(Units.inchesToMeters(3), -Units.inchesToMeters(3), 0), - new Point3(-Units.inchesToMeters(3), -Units.inchesToMeters(3), 0), - new Point3(Units.inchesToMeters(3), -Units.inchesToMeters(3), 0)), - Units.inchesToMeters(3 * 2)), - k6in_16h5( // Corners of the tag's inner black square (excluding white border) - List.of( - new Point3(Units.inchesToMeters(3), Units.inchesToMeters(3), 0), - new Point3(-Units.inchesToMeters(3), Units.inchesToMeters(3), 0), - new Point3(-Units.inchesToMeters(3), -Units.inchesToMeters(3), 0), - new Point3(Units.inchesToMeters(3), -Units.inchesToMeters(3), 0)), - Units.inchesToMeters(3 * 2)); - - @JsonIgnore private MatOfPoint3f realWorldTargetCoordinates; - @JsonIgnore private final MatOfPoint3f visualizationBoxBottom = new MatOfPoint3f(); - @JsonIgnore private final MatOfPoint3f visualizationBoxTop = new MatOfPoint3f(); - - @JsonProperty("realWorldCoordinatesArray") - private List realWorldCoordinatesArray; - - @JsonProperty("boxHeight") - private double boxHeight; - - TargetModel() {} - - TargetModel(MatOfPoint3f realWorldTargetCoordinates, double boxHeight) { - this.realWorldTargetCoordinates = realWorldTargetCoordinates; - this.realWorldCoordinatesArray = realWorldTargetCoordinates.toList(); - this.boxHeight = boxHeight; - - var bottomList = realWorldTargetCoordinates.toList(); - var topList = new ArrayList(); - for (var c : bottomList) { - topList.add(new Point3(c.x, c.y, c.z + boxHeight)); - } - - this.visualizationBoxBottom.fromList(bottomList); - this.visualizationBoxTop.fromList(topList); + k2016HighGoal( + List.of( + new Point3(Units.inchesToMeters(10), Units.inchesToMeters(6), 0), + new Point3(Units.inchesToMeters(-10), Units.inchesToMeters(6), 0), + new Point3(Units.inchesToMeters(-10), Units.inchesToMeters(-6), 0), + new Point3(Units.inchesToMeters(10), Units.inchesToMeters(-6), 0)), + Units.inchesToMeters(6)), + k2019DualTarget( + List.of( + new Point3(Units.inchesToMeters(7.313), Units.inchesToMeters(2.662), 0), + new Point3(Units.inchesToMeters(-7.313), Units.inchesToMeters(2.662), 0), + new Point3(Units.inchesToMeters(-5.936), Units.inchesToMeters(-2.662), 0), + new Point3(Units.inchesToMeters(5.936), Units.inchesToMeters(-2.662), 0)), + 0.1), + k2020HighGoalOuter( + List.of( + new Point3(Units.inchesToMeters(9.819867), Units.inchesToMeters(8.5), 0), + new Point3(Units.inchesToMeters(-9.819867), Units.inchesToMeters(8.5), 0), + new Point3(Units.inchesToMeters(-19.625), Units.inchesToMeters(-8.5), 0), + new Point3(Units.inchesToMeters(19.625), Units.inchesToMeters(-8.5), 0)), + Units.inchesToMeters(12)), + kCircularPowerCell7in( + List.of( + new Point3( + -Units.inchesToMeters(7) / 2, + -Units.inchesToMeters(7) / 2, + -Units.inchesToMeters(7) / 2), + new Point3( + -Units.inchesToMeters(7) / 2, + Units.inchesToMeters(7) / 2, + -Units.inchesToMeters(7) / 2), + new Point3( + Units.inchesToMeters(7) / 2, + Units.inchesToMeters(7) / 2, + -Units.inchesToMeters(7) / 2), + new Point3( + Units.inchesToMeters(7) / 2, + -Units.inchesToMeters(7) / 2, + -Units.inchesToMeters(7) / 2)), + 0), + k2022CircularCargoBall( + List.of( + new Point3( + -Units.inchesToMeters(9.5) / 2, + -Units.inchesToMeters(9.5) / 2, + -Units.inchesToMeters(9.5) / 2), + new Point3( + -Units.inchesToMeters(9.5) / 2, + Units.inchesToMeters(9.5) / 2, + -Units.inchesToMeters(9.5) / 2), + new Point3( + Units.inchesToMeters(9.5) / 2, + Units.inchesToMeters(9.5) / 2, + -Units.inchesToMeters(9.5) / 2), + new Point3( + Units.inchesToMeters(9.5) / 2, + -Units.inchesToMeters(9.5) / 2, + -Units.inchesToMeters(9.5) / 2)), + 0), + k200mmAprilTag( // Corners of the tag's inner black square (excluding white border) + List.of( + new Point3(Units.inchesToMeters(3.25), Units.inchesToMeters(3.25), 0), + new Point3(-Units.inchesToMeters(3.25), Units.inchesToMeters(3.25), 0), + new Point3(-Units.inchesToMeters(3.25), -Units.inchesToMeters(3.25), 0), + new Point3(Units.inchesToMeters(3.25), -Units.inchesToMeters(3.25), 0)), + Units.inchesToMeters(3.25 * 2)), + kAruco6in_16h5( // Corners of the tag's inner black square (excluding white border) + List.of( + new Point3(Units.inchesToMeters(3), Units.inchesToMeters(3), 0), + new Point3(Units.inchesToMeters(3), -Units.inchesToMeters(3), 0), + new Point3(-Units.inchesToMeters(3), -Units.inchesToMeters(3), 0), + new Point3(Units.inchesToMeters(3), -Units.inchesToMeters(3), 0)), + Units.inchesToMeters(3 * 2)), + k6in_16h5( // Corners of the tag's inner black square (excluding white border) + List.of( + new Point3(Units.inchesToMeters(3), Units.inchesToMeters(3), 0), + new Point3(-Units.inchesToMeters(3), Units.inchesToMeters(3), 0), + new Point3(-Units.inchesToMeters(3), -Units.inchesToMeters(3), 0), + new Point3(Units.inchesToMeters(3), -Units.inchesToMeters(3), 0)), + Units.inchesToMeters(3 * 2)); + + @JsonIgnore private MatOfPoint3f realWorldTargetCoordinates; + @JsonIgnore private final MatOfPoint3f visualizationBoxBottom = new MatOfPoint3f(); + @JsonIgnore private final MatOfPoint3f visualizationBoxTop = new MatOfPoint3f(); + + @JsonProperty("realWorldCoordinatesArray") + private List realWorldCoordinatesArray; + + @JsonProperty("boxHeight") + private double boxHeight; + + TargetModel() {} + + TargetModel(MatOfPoint3f realWorldTargetCoordinates, double boxHeight) { + this.realWorldTargetCoordinates = realWorldTargetCoordinates; + this.realWorldCoordinatesArray = realWorldTargetCoordinates.toList(); + this.boxHeight = boxHeight; + + var bottomList = realWorldTargetCoordinates.toList(); + var topList = new ArrayList(); + for (var c : bottomList) { + topList.add(new Point3(c.x, c.y, c.z + boxHeight)); } - @JsonCreator - TargetModel( - @JsonProperty(value = "realWorldCoordinatesArray") List points, - @JsonProperty(value = "boxHeight") double boxHeight) { - this(listToMat(points), boxHeight); - } - - public List getRealWorldCoordinatesArray() { - return this.realWorldCoordinatesArray; - } - - public double getBoxHeight() { - return boxHeight; - } - - public void setRealWorldCoordinatesArray(List realWorldCoordinatesArray) { - this.realWorldCoordinatesArray = realWorldCoordinatesArray; - } - - public void setBoxHeight(double boxHeight) { - this.boxHeight = boxHeight; - } - - private static MatOfPoint3f listToMat(List points) { - var mat = new MatOfPoint3f(); - mat.fromList(points); - return mat; - } - - public MatOfPoint3f getRealWorldTargetCoordinates() { - return realWorldTargetCoordinates; - } - - public MatOfPoint3f getVisualizationBoxBottom() { - return visualizationBoxBottom; - } - - public MatOfPoint3f getVisualizationBoxTop() { - return visualizationBoxTop; - } - - // public static TargetModel getCircleTarget(double Units.inchesToMeters(7)) { - // var corners = - // List.of( - // new Point3(-Units.inchesToMeters(7) / 2, -radius / 2, -radius / 2), - // new Point3(-Units.inchesToMeters(7) / 2, radius / 2, -radius / 2), - // new Point3(Units.inchesToMeters(7) / 2, radius / 2, -radius / 2), - // new Point3(Units.inchesToMeters(7) / 2, -radius / 2, -radius / 2)); - // return new TargetModel(corners, 0); - // } - - @Override - public void release() { - realWorldTargetCoordinates.release(); - visualizationBoxBottom.release(); - visualizationBoxTop.release(); - } + this.visualizationBoxBottom.fromList(bottomList); + this.visualizationBoxTop.fromList(topList); + } + + @JsonCreator + TargetModel( + @JsonProperty(value = "realWorldCoordinatesArray") List points, + @JsonProperty(value = "boxHeight") double boxHeight) { + this(listToMat(points), boxHeight); + } + + public List getRealWorldCoordinatesArray() { + return this.realWorldCoordinatesArray; + } + + public double getBoxHeight() { + return boxHeight; + } + + public void setRealWorldCoordinatesArray(List realWorldCoordinatesArray) { + this.realWorldCoordinatesArray = realWorldCoordinatesArray; + } + + public void setBoxHeight(double boxHeight) { + this.boxHeight = boxHeight; + } + + private static MatOfPoint3f listToMat(List points) { + var mat = new MatOfPoint3f(); + mat.fromList(points); + return mat; + } + + public MatOfPoint3f getRealWorldTargetCoordinates() { + return realWorldTargetCoordinates; + } + + public MatOfPoint3f getVisualizationBoxBottom() { + return visualizationBoxBottom; + } + + public MatOfPoint3f getVisualizationBoxTop() { + return visualizationBoxTop; + } + + // public static TargetModel getCircleTarget(double Units.inchesToMeters(7)) { + // var corners = + // List.of( + // new Point3(-Units.inchesToMeters(7) / 2, -radius / 2, -radius / 2), + // new Point3(-Units.inchesToMeters(7) / 2, radius / 2, -radius / 2), + // new Point3(Units.inchesToMeters(7) / 2, radius / 2, -radius / 2), + // new Point3(Units.inchesToMeters(7) / 2, -radius / 2, -radius / 2)); + // return new TargetModel(corners, 0); + // } + + @Override + public void release() { + realWorldTargetCoordinates.release(); + visualizationBoxBottom.release(); + visualizationBoxTop.release(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/TargetOffsetPointEdge.java b/photon-core/src/main/java/org/photonvision/vision/target/TargetOffsetPointEdge.java index eccf287ec5..1c166ed136 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/TargetOffsetPointEdge.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/TargetOffsetPointEdge.java @@ -17,9 +17,9 @@ package org.photonvision.vision.target; public enum TargetOffsetPointEdge { - Center, - Top, - Bottom, - Left, - Right + Center, + Top, + Bottom, + Left, + Right } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/TargetOrientation.java b/photon-core/src/main/java/org/photonvision/vision/target/TargetOrientation.java index 61b19c7f89..4f88382487 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/TargetOrientation.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/TargetOrientation.java @@ -17,6 +17,6 @@ package org.photonvision.vision.target; public enum TargetOrientation { - Portrait, - Landscape + Portrait, + Landscape } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/TrackedTarget.java b/photon-core/src/main/java/org/photonvision/vision/target/TrackedTarget.java index 9be1e9162c..db1411f57a 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/TrackedTarget.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/TrackedTarget.java @@ -43,456 +43,456 @@ import org.photonvision.vision.opencv.Releasable; public class TrackedTarget implements Releasable { - public final Contour m_mainContour; - public List m_subContours; // can be empty - - private MatOfPoint2f m_approximateBoundingPolygon; - - private List m_targetCorners = List.of(); - - private Point m_targetOffsetPoint; - private Point m_robotOffsetPoint; - - private double m_pitch; - private double m_yaw; - private double m_area; - private double m_skew; - - private Transform3d m_bestCameraToTarget3d = new Transform3d(); - private Transform3d m_altCameraToTarget3d = new Transform3d(); - - private CVShape m_shape; - - private int m_fiducialId = -1; - private double m_poseAmbiguity = -1; - - private Mat m_cameraRelativeTvec, m_cameraRelativeRvec; - - public TrackedTarget( - PotentialTarget origTarget, TargetCalculationParameters params, CVShape shape) { - this.m_mainContour = origTarget.m_mainContour; - this.m_subContours = origTarget.m_subContours; - this.m_shape = shape; - calculateValues(params); - } - - public TrackedTarget( - AprilTagDetection tagDetection, - AprilTagPoseEstimate tagPose, - TargetCalculationParameters params) { - m_targetOffsetPoint = new Point(tagDetection.getCenterX(), tagDetection.getCenterY()); - m_robotOffsetPoint = new Point(); - var yawPitch = - TargetCalculations.calculateYawPitch( - params.cameraCenterPoint.x, - tagDetection.getCenterX(), - params.horizontalFocalLength, - params.cameraCenterPoint.y, - tagDetection.getCenterY(), - params.verticalFocalLength); - m_yaw = yawPitch.getFirst(); - m_pitch = yawPitch.getSecond(); - var bestPose = new Transform3d(); - var altPose = new Transform3d(); - - if (tagPose != null) { - if (tagPose.error1 <= tagPose.error2) { - bestPose = tagPose.pose1; - altPose = tagPose.pose2; - } else { - bestPose = tagPose.pose2; - altPose = tagPose.pose1; - } - - bestPose = MathUtils.convertApriltagtoOpenCV(bestPose); - altPose = MathUtils.convertApriltagtoOpenCV(altPose); - - m_bestCameraToTarget3d = bestPose; - m_altCameraToTarget3d = altPose; - - m_poseAmbiguity = tagPose.getAmbiguity(); - - var tvec = new Mat(3, 1, CvType.CV_64FC1); - tvec.put( - 0, - 0, - new double[] { - bestPose.getTranslation().getX(), - bestPose.getTranslation().getY(), - bestPose.getTranslation().getZ() - }); - setCameraRelativeTvec(tvec); - - // Opencv expects a 3d vector with norm = angle and direction = axis - var rvec = new Mat(3, 1, CvType.CV_64FC1); - MathUtils.rotationToOpencvRvec(bestPose.getRotation(), rvec); - setCameraRelativeRvec(rvec); + public final Contour m_mainContour; + public List m_subContours; // can be empty + + private MatOfPoint2f m_approximateBoundingPolygon; + + private List m_targetCorners = List.of(); + + private Point m_targetOffsetPoint; + private Point m_robotOffsetPoint; + + private double m_pitch; + private double m_yaw; + private double m_area; + private double m_skew; + + private Transform3d m_bestCameraToTarget3d = new Transform3d(); + private Transform3d m_altCameraToTarget3d = new Transform3d(); + + private CVShape m_shape; + + private int m_fiducialId = -1; + private double m_poseAmbiguity = -1; + + private Mat m_cameraRelativeTvec, m_cameraRelativeRvec; + + public TrackedTarget( + PotentialTarget origTarget, TargetCalculationParameters params, CVShape shape) { + this.m_mainContour = origTarget.m_mainContour; + this.m_subContours = origTarget.m_subContours; + this.m_shape = shape; + calculateValues(params); + } + + public TrackedTarget( + AprilTagDetection tagDetection, + AprilTagPoseEstimate tagPose, + TargetCalculationParameters params) { + m_targetOffsetPoint = new Point(tagDetection.getCenterX(), tagDetection.getCenterY()); + m_robotOffsetPoint = new Point(); + var yawPitch = + TargetCalculations.calculateYawPitch( + params.cameraCenterPoint.x, + tagDetection.getCenterX(), + params.horizontalFocalLength, + params.cameraCenterPoint.y, + tagDetection.getCenterY(), + params.verticalFocalLength); + m_yaw = yawPitch.getFirst(); + m_pitch = yawPitch.getSecond(); + var bestPose = new Transform3d(); + var altPose = new Transform3d(); + + if (tagPose != null) { + if (tagPose.error1 <= tagPose.error2) { + bestPose = tagPose.pose1; + altPose = tagPose.pose2; + } else { + bestPose = tagPose.pose2; + altPose = tagPose.pose1; + } + + bestPose = MathUtils.convertApriltagtoOpenCV(bestPose); + altPose = MathUtils.convertApriltagtoOpenCV(altPose); + + m_bestCameraToTarget3d = bestPose; + m_altCameraToTarget3d = altPose; + + m_poseAmbiguity = tagPose.getAmbiguity(); + + var tvec = new Mat(3, 1, CvType.CV_64FC1); + tvec.put( + 0, + 0, + new double[] { + bestPose.getTranslation().getX(), + bestPose.getTranslation().getY(), + bestPose.getTranslation().getZ() + }); + setCameraRelativeTvec(tvec); + + // Opencv expects a 3d vector with norm = angle and direction = axis + var rvec = new Mat(3, 1, CvType.CV_64FC1); + MathUtils.rotationToOpencvRvec(bestPose.getRotation(), rvec); + setCameraRelativeRvec(rvec); + } + + double[] corners = tagDetection.getCorners(); + Point[] cornerPoints = + new Point[] { + new Point(corners[0], corners[1]), + new Point(corners[2], corners[3]), + new Point(corners[4], corners[5]), + new Point(corners[6], corners[7]) + }; + m_targetCorners = List.of(cornerPoints); + MatOfPoint contourMat = new MatOfPoint(cornerPoints); + m_approximateBoundingPolygon = new MatOfPoint2f(cornerPoints); + m_mainContour = new Contour(contourMat); + m_area = m_mainContour.getArea() / params.imageArea * 100; + m_fiducialId = tagDetection.getId(); + m_shape = null; + + // TODO implement skew? or just yeet + m_skew = 0; + + var tvec = new Mat(3, 1, CvType.CV_64FC1); + tvec.put( + 0, + 0, + new double[] { + bestPose.getTranslation().getX(), + bestPose.getTranslation().getY(), + bestPose.getTranslation().getZ() + }); + setCameraRelativeTvec(tvec); + + // Opencv expects a 3d vector with norm = angle and direction = axis + var rvec = new Mat(3, 1, CvType.CV_64FC1); + MathUtils.rotationToOpencvRvec(bestPose.getRotation(), rvec); + setCameraRelativeRvec(rvec); + } + + public TrackedTarget(ArucoDetectionResult result, TargetCalculationParameters params) { + m_targetOffsetPoint = new Point(result.getCenterX(), result.getCenterY()); + m_robotOffsetPoint = new Point(); + var yawPitch = + TargetCalculations.calculateYawPitch( + params.cameraCenterPoint.x, + result.getCenterX(), + params.horizontalFocalLength, + params.cameraCenterPoint.y, + result.getCenterY(), + params.verticalFocalLength); + m_yaw = yawPitch.getFirst(); + m_pitch = yawPitch.getSecond(); + + double[] xCorners = result.getxCorners(); + double[] yCorners = result.getyCorners(); + + Point[] cornerPoints = + new Point[] { + new Point(xCorners[0], yCorners[0]), + new Point(xCorners[1], yCorners[1]), + new Point(xCorners[2], yCorners[2]), + new Point(xCorners[3], yCorners[3]) + }; + m_targetCorners = List.of(cornerPoints); + MatOfPoint contourMat = new MatOfPoint(cornerPoints); + m_approximateBoundingPolygon = new MatOfPoint2f(cornerPoints); + m_mainContour = new Contour(contourMat); + m_area = m_mainContour.getArea() / params.imageArea * 100; + m_fiducialId = result.getId(); + m_shape = null; + + // TODO implement skew? or just yeet + + var tvec = new Mat(3, 1, CvType.CV_64FC1); + tvec.put(0, 0, result.getTvec()); + setCameraRelativeTvec(tvec); + + var rvec = new Mat(3, 1, CvType.CV_64FC1); + rvec.put(0, 0, result.getRvec()); + setCameraRelativeRvec(rvec); + + { + Translation3d translation = + // new Translation3d(tVec.get(0, 0)[0], tVec.get(1, 0)[0], tVec.get(2, 0)[0]); + new Translation3d(result.getTvec()[0], result.getTvec()[1], result.getTvec()[2]); + var axisangle = + VecBuilder.fill(result.getRvec()[0], result.getRvec()[1], result.getRvec()[2]); + Rotation3d rotation = new Rotation3d(axisangle, axisangle.normF()); + + m_bestCameraToTarget3d = + MathUtils.convertOpenCVtoPhotonTransform(new Transform3d(translation, rotation)); + } + } + + public void setFiducialId(int id) { + m_fiducialId = id; + } + + public int getFiducialId() { + return m_fiducialId; + } + + public void setPoseAmbiguity(double ambiguity) { + m_poseAmbiguity = ambiguity; + } + + public double getPoseAmbiguity() { + return m_poseAmbiguity; + } + + /** + * Set the approximate bounding polygon. + * + * @param boundingPolygon List of points to copy. Not modified. + */ + public void setApproximateBoundingPolygon(MatOfPoint2f boundingPolygon) { + if (m_approximateBoundingPolygon == null) m_approximateBoundingPolygon = new MatOfPoint2f(); + boundingPolygon.copyTo(m_approximateBoundingPolygon); + } + + public Point getTargetOffsetPoint() { + return m_targetOffsetPoint; + } + + public Point getRobotOffsetPoint() { + return m_robotOffsetPoint; + } + + public double getPitch() { + return m_pitch; + } + + public double getYaw() { + return m_yaw; + } + + public double getSkew() { + return m_skew; + } + + public double getArea() { + return m_area; + } + + public RotatedRect getMinAreaRect() { + return m_mainContour.getMinAreaRect(); + } + + public MatOfPoint2f getApproximateBoundingPolygon() { + return m_approximateBoundingPolygon; + } + + public void calculateValues(TargetCalculationParameters params) { + // this MUST happen in this exact order! (TODO: document why) + m_targetOffsetPoint = + TargetCalculations.calculateTargetOffsetPoint( + params.isLandscape, params.targetOffsetPointEdge, getMinAreaRect()); + m_robotOffsetPoint = + TargetCalculations.calculateRobotOffsetPoint( + params.robotOffsetSinglePoint, + params.cameraCenterPoint, + params.dualOffsetValues, + params.robotOffsetPointMode); + + // order of this stuff doesnt matter though + var yawPitch = + TargetCalculations.calculateYawPitch( + m_robotOffsetPoint.x, + m_targetOffsetPoint.x, + params.horizontalFocalLength, + m_robotOffsetPoint.y, + m_targetOffsetPoint.y, + params.verticalFocalLength); + m_yaw = yawPitch.getFirst(); + m_pitch = yawPitch.getSecond(); + + m_area = m_mainContour.getMinAreaRect().size.area() / params.imageArea * 100; + + m_skew = TargetCalculations.calculateSkew(params.isLandscape, getMinAreaRect()); + } + + @Override + public void release() { + m_mainContour.release(); + + // TODO how can this check fail? + if (m_subContours != null) { + for (var sc : m_subContours) { + sc.release(); + } + } + + if (m_cameraRelativeTvec != null) m_cameraRelativeTvec.release(); + if (m_cameraRelativeRvec != null) m_cameraRelativeRvec.release(); + } + + public void setTargetCorners(List targetCorners) { + this.m_targetCorners = targetCorners; + } + + public List getTargetCorners() { + return m_targetCorners; + } + + public boolean hasSubContours() { + return !m_subContours.isEmpty(); + } + + public Transform3d getBestCameraToTarget3d() { + return m_bestCameraToTarget3d; + } + + public Transform3d getAltCameraToTarget3d() { + return m_altCameraToTarget3d; + } + + public void setBestCameraToTarget3d(Transform3d pose) { + this.m_bestCameraToTarget3d = pose; + } + + public void setAltCameraToTarget3d(Transform3d pose) { + this.m_altCameraToTarget3d = pose; + } + + public Mat getCameraRelativeTvec() { + return m_cameraRelativeTvec; + } + + public void setCameraRelativeTvec(Mat cameraRelativeTvec) { + if (this.m_cameraRelativeTvec == null) m_cameraRelativeTvec = new Mat(); + cameraRelativeTvec.copyTo(this.m_cameraRelativeTvec); + } + + public Mat getCameraRelativeRvec() { + return m_cameraRelativeRvec; + } + + public void setCameraRelativeRvec(Mat cameraRelativeRvec) { + if (this.m_cameraRelativeRvec == null) m_cameraRelativeRvec = new Mat(); + cameraRelativeRvec.copyTo(this.m_cameraRelativeRvec); + } + + public CVShape getShape() { + return m_shape; + } + + public void setShape(CVShape shape) { + this.m_shape = shape; + } + + public HashMap toHashMap() { + var ret = new HashMap(); + ret.put("pitch", getPitch()); + ret.put("yaw", getYaw()); + ret.put("skew", getSkew()); + ret.put("area", getArea()); + ret.put("ambiguity", getPoseAmbiguity()); + + var bestCameraToTarget3d = getBestCameraToTarget3d(); + if (bestCameraToTarget3d != null) { + ret.put("pose", SerializationUtils.transformToHashMap(bestCameraToTarget3d)); + } + ret.put("fiducialId", getFiducialId()); + return ret; + } + + public boolean isFiducial() { + return this.m_fiducialId >= 0; + } + + public static List simpleFromTrackedTargets(List targets) { + var ret = new ArrayList(); + for (var t : targets) { + var minAreaRectCorners = new ArrayList(); + var detectedCorners = new ArrayList(); + { + var points = new Point[4]; + t.getMinAreaRect().points(points); + for (int i = 0; i < 4; i++) { + minAreaRectCorners.add(new TargetCorner(points[i].x, points[i].y)); } - - double[] corners = tagDetection.getCorners(); - Point[] cornerPoints = - new Point[] { - new Point(corners[0], corners[1]), - new Point(corners[2], corners[3]), - new Point(corners[4], corners[5]), - new Point(corners[6], corners[7]) - }; - m_targetCorners = List.of(cornerPoints); - MatOfPoint contourMat = new MatOfPoint(cornerPoints); - m_approximateBoundingPolygon = new MatOfPoint2f(cornerPoints); - m_mainContour = new Contour(contourMat); - m_area = m_mainContour.getArea() / params.imageArea * 100; - m_fiducialId = tagDetection.getId(); - m_shape = null; - - // TODO implement skew? or just yeet - m_skew = 0; - - var tvec = new Mat(3, 1, CvType.CV_64FC1); - tvec.put( - 0, - 0, - new double[] { - bestPose.getTranslation().getX(), - bestPose.getTranslation().getY(), - bestPose.getTranslation().getZ() - }); - setCameraRelativeTvec(tvec); - - // Opencv expects a 3d vector with norm = angle and direction = axis - var rvec = new Mat(3, 1, CvType.CV_64FC1); - MathUtils.rotationToOpencvRvec(bestPose.getRotation(), rvec); - setCameraRelativeRvec(rvec); - } - - public TrackedTarget(ArucoDetectionResult result, TargetCalculationParameters params) { - m_targetOffsetPoint = new Point(result.getCenterX(), result.getCenterY()); - m_robotOffsetPoint = new Point(); - var yawPitch = - TargetCalculations.calculateYawPitch( - params.cameraCenterPoint.x, - result.getCenterX(), - params.horizontalFocalLength, - params.cameraCenterPoint.y, - result.getCenterY(), - params.verticalFocalLength); - m_yaw = yawPitch.getFirst(); - m_pitch = yawPitch.getSecond(); - - double[] xCorners = result.getxCorners(); - double[] yCorners = result.getyCorners(); - - Point[] cornerPoints = - new Point[] { - new Point(xCorners[0], yCorners[0]), - new Point(xCorners[1], yCorners[1]), - new Point(xCorners[2], yCorners[2]), - new Point(xCorners[3], yCorners[3]) - }; - m_targetCorners = List.of(cornerPoints); - MatOfPoint contourMat = new MatOfPoint(cornerPoints); - m_approximateBoundingPolygon = new MatOfPoint2f(cornerPoints); - m_mainContour = new Contour(contourMat); - m_area = m_mainContour.getArea() / params.imageArea * 100; - m_fiducialId = result.getId(); - m_shape = null; - - // TODO implement skew? or just yeet - - var tvec = new Mat(3, 1, CvType.CV_64FC1); - tvec.put(0, 0, result.getTvec()); - setCameraRelativeTvec(tvec); - - var rvec = new Mat(3, 1, CvType.CV_64FC1); - rvec.put(0, 0, result.getRvec()); - setCameraRelativeRvec(rvec); - - { - Translation3d translation = - // new Translation3d(tVec.get(0, 0)[0], tVec.get(1, 0)[0], tVec.get(2, 0)[0]); - new Translation3d(result.getTvec()[0], result.getTvec()[1], result.getTvec()[2]); - var axisangle = - VecBuilder.fill(result.getRvec()[0], result.getRvec()[1], result.getRvec()[2]); - Rotation3d rotation = new Rotation3d(axisangle, axisangle.normF()); - - m_bestCameraToTarget3d = - MathUtils.convertOpenCVtoPhotonTransform(new Transform3d(translation, rotation)); + } + { + var points = t.getTargetCorners(); + for (int i = 0; i < points.size(); i++) { + detectedCorners.add(new TargetCorner(points.get(i).x, points.get(i).y)); } - } - - public void setFiducialId(int id) { - m_fiducialId = id; - } - - public int getFiducialId() { - return m_fiducialId; - } - - public void setPoseAmbiguity(double ambiguity) { - m_poseAmbiguity = ambiguity; - } - - public double getPoseAmbiguity() { - return m_poseAmbiguity; - } - - /** - * Set the approximate bounding polygon. - * - * @param boundingPolygon List of points to copy. Not modified. - */ - public void setApproximateBoundingPolygon(MatOfPoint2f boundingPolygon) { - if (m_approximateBoundingPolygon == null) m_approximateBoundingPolygon = new MatOfPoint2f(); - boundingPolygon.copyTo(m_approximateBoundingPolygon); - } - - public Point getTargetOffsetPoint() { - return m_targetOffsetPoint; - } - - public Point getRobotOffsetPoint() { - return m_robotOffsetPoint; - } - - public double getPitch() { - return m_pitch; - } - - public double getYaw() { - return m_yaw; - } - - public double getSkew() { - return m_skew; - } - - public double getArea() { - return m_area; - } - - public RotatedRect getMinAreaRect() { - return m_mainContour.getMinAreaRect(); - } - - public MatOfPoint2f getApproximateBoundingPolygon() { - return m_approximateBoundingPolygon; - } - - public void calculateValues(TargetCalculationParameters params) { - // this MUST happen in this exact order! (TODO: document why) - m_targetOffsetPoint = - TargetCalculations.calculateTargetOffsetPoint( - params.isLandscape, params.targetOffsetPointEdge, getMinAreaRect()); - m_robotOffsetPoint = - TargetCalculations.calculateRobotOffsetPoint( - params.robotOffsetSinglePoint, - params.cameraCenterPoint, - params.dualOffsetValues, - params.robotOffsetPointMode); - - // order of this stuff doesnt matter though - var yawPitch = - TargetCalculations.calculateYawPitch( - m_robotOffsetPoint.x, - m_targetOffsetPoint.x, - params.horizontalFocalLength, - m_robotOffsetPoint.y, - m_targetOffsetPoint.y, - params.verticalFocalLength); - m_yaw = yawPitch.getFirst(); - m_pitch = yawPitch.getSecond(); - - m_area = m_mainContour.getMinAreaRect().size.area() / params.imageArea * 100; - - m_skew = TargetCalculations.calculateSkew(params.isLandscape, getMinAreaRect()); - } - - @Override - public void release() { - m_mainContour.release(); - - // TODO how can this check fail? - if (m_subContours != null) { - for (var sc : m_subContours) { - sc.release(); - } - } - - if (m_cameraRelativeTvec != null) m_cameraRelativeTvec.release(); - if (m_cameraRelativeRvec != null) m_cameraRelativeRvec.release(); - } - - public void setTargetCorners(List targetCorners) { - this.m_targetCorners = targetCorners; - } - - public List getTargetCorners() { - return m_targetCorners; - } - - public boolean hasSubContours() { - return !m_subContours.isEmpty(); - } - - public Transform3d getBestCameraToTarget3d() { - return m_bestCameraToTarget3d; - } - - public Transform3d getAltCameraToTarget3d() { - return m_altCameraToTarget3d; - } - - public void setBestCameraToTarget3d(Transform3d pose) { - this.m_bestCameraToTarget3d = pose; - } - - public void setAltCameraToTarget3d(Transform3d pose) { - this.m_altCameraToTarget3d = pose; - } - - public Mat getCameraRelativeTvec() { - return m_cameraRelativeTvec; - } - - public void setCameraRelativeTvec(Mat cameraRelativeTvec) { - if (this.m_cameraRelativeTvec == null) m_cameraRelativeTvec = new Mat(); - cameraRelativeTvec.copyTo(this.m_cameraRelativeTvec); - } - - public Mat getCameraRelativeRvec() { - return m_cameraRelativeRvec; - } - - public void setCameraRelativeRvec(Mat cameraRelativeRvec) { - if (this.m_cameraRelativeRvec == null) m_cameraRelativeRvec = new Mat(); - cameraRelativeRvec.copyTo(this.m_cameraRelativeRvec); - } - - public CVShape getShape() { - return m_shape; - } - - public void setShape(CVShape shape) { - this.m_shape = shape; - } - - public HashMap toHashMap() { - var ret = new HashMap(); - ret.put("pitch", getPitch()); - ret.put("yaw", getYaw()); - ret.put("skew", getSkew()); - ret.put("area", getArea()); - ret.put("ambiguity", getPoseAmbiguity()); - - var bestCameraToTarget3d = getBestCameraToTarget3d(); - if (bestCameraToTarget3d != null) { - ret.put("pose", SerializationUtils.transformToHashMap(bestCameraToTarget3d)); - } - ret.put("fiducialId", getFiducialId()); - return ret; - } - - public boolean isFiducial() { - return this.m_fiducialId >= 0; - } - - public static List simpleFromTrackedTargets(List targets) { - var ret = new ArrayList(); - for (var t : targets) { - var minAreaRectCorners = new ArrayList(); - var detectedCorners = new ArrayList(); - { - var points = new Point[4]; - t.getMinAreaRect().points(points); - for (int i = 0; i < 4; i++) { - minAreaRectCorners.add(new TargetCorner(points[i].x, points[i].y)); - } - } - { - var points = t.getTargetCorners(); - for (int i = 0; i < points.size(); i++) { - detectedCorners.add(new TargetCorner(points.get(i).x, points.get(i).y)); - } - } - - ret.add( - new PhotonTrackedTarget( - t.getYaw(), - t.getPitch(), - t.getArea(), - t.getSkew(), - t.getFiducialId(), - t.getBestCameraToTarget3d(), - t.getAltCameraToTarget3d(), - t.getPoseAmbiguity(), - minAreaRectCorners, - detectedCorners)); - } - return ret; - } - - public static class TargetCalculationParameters { - // TargetOffset calculation values - final boolean isLandscape; - final TargetOffsetPointEdge targetOffsetPointEdge; - - // RobotOffset calculation values - final RobotOffsetPointMode robotOffsetPointMode; - final Point robotOffsetSinglePoint; - final DualOffsetValues dualOffsetValues; - - // center point of image - final Point cameraCenterPoint; - - // yaw calculation values - final double horizontalFocalLength; - - // pitch calculation values - final double verticalFocalLength; - - // area calculation values - final double imageArea; - - public TargetCalculationParameters( - boolean isLandscape, - TargetOffsetPointEdge targetOffsetPointEdge, - RobotOffsetPointMode robotOffsetPointMode, - Point robotOffsetSinglePoint, - DualOffsetValues dualOffsetValues, - Point cameraCenterPoint, - double horizontalFocalLength, - double verticalFocalLength, - double imageArea) { - - this.isLandscape = isLandscape; - this.targetOffsetPointEdge = targetOffsetPointEdge; - this.robotOffsetPointMode = robotOffsetPointMode; - this.robotOffsetSinglePoint = robotOffsetSinglePoint; - this.dualOffsetValues = dualOffsetValues; - this.cameraCenterPoint = cameraCenterPoint; - this.horizontalFocalLength = horizontalFocalLength; - this.verticalFocalLength = verticalFocalLength; - this.imageArea = imageArea; - } - - public TargetCalculationParameters( - boolean isLandscape, - TargetOffsetPointEdge targetOffsetPointEdge, - RobotOffsetPointMode robotOffsetPointMode, - Point robotOffsetSinglePoint, - DualOffsetValues dualOffsetValues, - FrameStaticProperties frameStaticProperties) { - - this.isLandscape = isLandscape; - this.targetOffsetPointEdge = targetOffsetPointEdge; - this.robotOffsetPointMode = robotOffsetPointMode; - this.robotOffsetSinglePoint = robotOffsetSinglePoint; - this.dualOffsetValues = dualOffsetValues; - - this.cameraCenterPoint = frameStaticProperties.centerPoint; - this.horizontalFocalLength = frameStaticProperties.horizontalFocalLength; - this.verticalFocalLength = frameStaticProperties.verticalFocalLength; - this.imageArea = frameStaticProperties.imageArea; - } - } + } + + ret.add( + new PhotonTrackedTarget( + t.getYaw(), + t.getPitch(), + t.getArea(), + t.getSkew(), + t.getFiducialId(), + t.getBestCameraToTarget3d(), + t.getAltCameraToTarget3d(), + t.getPoseAmbiguity(), + minAreaRectCorners, + detectedCorners)); + } + return ret; + } + + public static class TargetCalculationParameters { + // TargetOffset calculation values + final boolean isLandscape; + final TargetOffsetPointEdge targetOffsetPointEdge; + + // RobotOffset calculation values + final RobotOffsetPointMode robotOffsetPointMode; + final Point robotOffsetSinglePoint; + final DualOffsetValues dualOffsetValues; + + // center point of image + final Point cameraCenterPoint; + + // yaw calculation values + final double horizontalFocalLength; + + // pitch calculation values + final double verticalFocalLength; + + // area calculation values + final double imageArea; + + public TargetCalculationParameters( + boolean isLandscape, + TargetOffsetPointEdge targetOffsetPointEdge, + RobotOffsetPointMode robotOffsetPointMode, + Point robotOffsetSinglePoint, + DualOffsetValues dualOffsetValues, + Point cameraCenterPoint, + double horizontalFocalLength, + double verticalFocalLength, + double imageArea) { + + this.isLandscape = isLandscape; + this.targetOffsetPointEdge = targetOffsetPointEdge; + this.robotOffsetPointMode = robotOffsetPointMode; + this.robotOffsetSinglePoint = robotOffsetSinglePoint; + this.dualOffsetValues = dualOffsetValues; + this.cameraCenterPoint = cameraCenterPoint; + this.horizontalFocalLength = horizontalFocalLength; + this.verticalFocalLength = verticalFocalLength; + this.imageArea = imageArea; + } + + public TargetCalculationParameters( + boolean isLandscape, + TargetOffsetPointEdge targetOffsetPointEdge, + RobotOffsetPointMode robotOffsetPointMode, + Point robotOffsetSinglePoint, + DualOffsetValues dualOffsetValues, + FrameStaticProperties frameStaticProperties) { + + this.isLandscape = isLandscape; + this.targetOffsetPointEdge = targetOffsetPointEdge; + this.robotOffsetPointMode = robotOffsetPointMode; + this.robotOffsetSinglePoint = robotOffsetSinglePoint; + this.dualOffsetValues = dualOffsetValues; + + this.cameraCenterPoint = frameStaticProperties.centerPoint; + this.horizontalFocalLength = frameStaticProperties.horizontalFocalLength; + this.verticalFocalLength = frameStaticProperties.verticalFocalLength; + this.imageArea = frameStaticProperties.imageArea; + } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStream.java b/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStream.java index dae270e9f6..c5802c500d 100644 --- a/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStream.java +++ b/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStream.java @@ -30,88 +30,88 @@ import org.photonvision.vision.opencv.CVMat; public class SocketVideoStream implements Consumer { - int portID = 0; // Align with cscore's port for unique identification of stream - MatOfByte jpegBytes = null; + int portID = 0; // Align with cscore's port for unique identification of stream + MatOfByte jpegBytes = null; - // Gets set to true when another class reads out valid jpeg bytes at least once - // Set back to false when another frame is freshly converted - // Should eliminate synchronization issues of differing rates of putting frames in - // and taking them back out - boolean frameWasConsumed = false; + // Gets set to true when another class reads out valid jpeg bytes at least once + // Set back to false when another frame is freshly converted + // Should eliminate synchronization issues of differing rates of putting frames in + // and taking them back out + boolean frameWasConsumed = false; - // Synclock around manipulating the jpeg bytes from multiple threads - Lock jpegBytesLock = new ReentrantLock(); - private int userCount = 0; + // Synclock around manipulating the jpeg bytes from multiple threads + Lock jpegBytesLock = new ReentrantLock(); + private int userCount = 0; - // FPS-limited MJPEG sender - private final double FPS_MAX = 30.0; - private final long minFramePeriodNanos = Math.round(1000000000.0 / FPS_MAX); - private long nextFrameSendTime = MathUtils.wpiNanoTime() + minFramePeriodNanos; - MJPGFrameConsumer oldSchoolServer; + // FPS-limited MJPEG sender + private final double FPS_MAX = 30.0; + private final long minFramePeriodNanos = Math.round(1000000000.0 / FPS_MAX); + private long nextFrameSendTime = MathUtils.wpiNanoTime() + minFramePeriodNanos; + MJPGFrameConsumer oldSchoolServer; - public SocketVideoStream(int portID) { - this.portID = portID; - oldSchoolServer = - new MJPGFrameConsumer( - CameraServerJNI.getHostname() + "_Port_" + portID + "_MJPEG_Server", portID); - } - - @Override - public void accept(CVMat image) { - if (userCount > 0) { - if (jpegBytesLock - .tryLock()) { // we assume frames are coming in frequently. Just skip this frame if we're - // locked doing something else. - try { - // Does a single-shot frame receive and convert to JPEG for efficiency - // Will not capture/convert again until convertNextFrame() is called - if (image != null && !image.getMat().empty() && jpegBytes == null) { - frameWasConsumed = false; - jpegBytes = new MatOfByte(); - Imgcodecs.imencode( - ".jpg", - image.getMat(), - jpegBytes, - new MatOfInt(Imgcodecs.IMWRITE_JPEG_QUALITY, 75)); - } - } finally { - jpegBytesLock.unlock(); - } - } - } + public SocketVideoStream(int portID) { + this.portID = portID; + oldSchoolServer = + new MJPGFrameConsumer( + CameraServerJNI.getHostname() + "_Port_" + portID + "_MJPEG_Server", portID); + } - // Send the frame in an FPS-limited fashion - var now = MathUtils.wpiNanoTime(); - if (now > nextFrameSendTime) { - oldSchoolServer.accept(image); - nextFrameSendTime = now + minFramePeriodNanos; + @Override + public void accept(CVMat image) { + if (userCount > 0) { + if (jpegBytesLock + .tryLock()) { // we assume frames are coming in frequently. Just skip this frame if we're + // locked doing something else. + try { + // Does a single-shot frame receive and convert to JPEG for efficiency + // Will not capture/convert again until convertNextFrame() is called + if (image != null && !image.getMat().empty() && jpegBytes == null) { + frameWasConsumed = false; + jpegBytes = new MatOfByte(); + Imgcodecs.imencode( + ".jpg", + image.getMat(), + jpegBytes, + new MatOfInt(Imgcodecs.IMWRITE_JPEG_QUALITY, 75)); + } + } finally { + jpegBytesLock.unlock(); } + } } - public ByteBuffer getJPEGByteBuffer() { - ByteBuffer sendStr = null; - jpegBytesLock.lock(); - if (jpegBytes != null) { - sendStr = ByteBuffer.wrap(jpegBytes.toArray()); - } - jpegBytesLock.unlock(); - return sendStr; + // Send the frame in an FPS-limited fashion + var now = MathUtils.wpiNanoTime(); + if (now > nextFrameSendTime) { + oldSchoolServer.accept(image); + nextFrameSendTime = now + minFramePeriodNanos; } + } - public void convertNextFrame() { - jpegBytesLock.lock(); - if (jpegBytes != null) { - jpegBytes.release(); - jpegBytes = null; - } - jpegBytesLock.unlock(); + public ByteBuffer getJPEGByteBuffer() { + ByteBuffer sendStr = null; + jpegBytesLock.lock(); + if (jpegBytes != null) { + sendStr = ByteBuffer.wrap(jpegBytes.toArray()); } + jpegBytesLock.unlock(); + return sendStr; + } - public void addUser() { - userCount++; + public void convertNextFrame() { + jpegBytesLock.lock(); + if (jpegBytes != null) { + jpegBytes.release(); + jpegBytes = null; } + jpegBytesLock.unlock(); + } - public void removeUser() { - userCount--; - } + public void addUser() { + userCount++; + } + + public void removeUser() { + userCount--; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStreamManager.java b/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStreamManager.java index ceb84feab6..9c4b71f791 100644 --- a/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStreamManager.java +++ b/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStreamManager.java @@ -25,77 +25,77 @@ import org.photonvision.common.logging.Logger; public class SocketVideoStreamManager { - private static final int NO_STREAM_PORT = -1; + private static final int NO_STREAM_PORT = -1; - private final Logger logger = new Logger(SocketVideoStreamManager.class, LogGroup.Camera); + private final Logger logger = new Logger(SocketVideoStreamManager.class, LogGroup.Camera); - private final Map streams = new Hashtable<>(); - private final Map userSubscriptions = new Hashtable<>(); + private final Map streams = new Hashtable<>(); + private final Map userSubscriptions = new Hashtable<>(); - private static class ThreadSafeSingleton { - private static final SocketVideoStreamManager INSTANCE = new SocketVideoStreamManager(); - } + private static class ThreadSafeSingleton { + private static final SocketVideoStreamManager INSTANCE = new SocketVideoStreamManager(); + } - public static SocketVideoStreamManager getInstance() { - return ThreadSafeSingleton.INSTANCE; - } + public static SocketVideoStreamManager getInstance() { + return ThreadSafeSingleton.INSTANCE; + } - private SocketVideoStreamManager() {} + private SocketVideoStreamManager() {} - // Register a new available camera stream - public void addStream(SocketVideoStream newStream) { - streams.put(newStream.portID, newStream); - logger.debug("Added new stream for port " + newStream.portID); - } + // Register a new available camera stream + public void addStream(SocketVideoStream newStream) { + streams.put(newStream.portID, newStream); + logger.debug("Added new stream for port " + newStream.portID); + } - // Remove a previously-added camera stream, and unsubscribe all users - public void removeStream(SocketVideoStream oldStream) { - streams.remove(oldStream.portID); - logger.debug("Removed stream for port " + oldStream.portID); - } + // Remove a previously-added camera stream, and unsubscribe all users + public void removeStream(SocketVideoStream oldStream) { + streams.remove(oldStream.portID); + logger.debug("Removed stream for port " + oldStream.portID); + } - // Indicate a user would like to subscribe to a camera stream and get frames from it periodically - public void addSubscription(WsContext user, int streamPortID) { - var stream = streams.get(streamPortID); - if (stream != null) { - userSubscriptions.put(user, streamPortID); - stream.addUser(); - } else { - logger.error("User attempted to subscribe to non-existent port " + streamPortID); - } + // Indicate a user would like to subscribe to a camera stream and get frames from it periodically + public void addSubscription(WsContext user, int streamPortID) { + var stream = streams.get(streamPortID); + if (stream != null) { + userSubscriptions.put(user, streamPortID); + stream.addUser(); + } else { + logger.error("User attempted to subscribe to non-existent port " + streamPortID); } + } - // Indicate a user would like to stop receiving one camera stream - public void removeSubscription(WsContext user) { - var port = userSubscriptions.get(user); - if (port != null && port != NO_STREAM_PORT) { - var stream = streams.get(port); - userSubscriptions.put(user, NO_STREAM_PORT); - if (stream != null) { - stream.removeUser(); - } - } else { - logger.error( - "User attempted to unsubscribe, but had not yet previously subscribed successfully."); - } + // Indicate a user would like to stop receiving one camera stream + public void removeSubscription(WsContext user) { + var port = userSubscriptions.get(user); + if (port != null && port != NO_STREAM_PORT) { + var stream = streams.get(port); + userSubscriptions.put(user, NO_STREAM_PORT); + if (stream != null) { + stream.removeUser(); + } + } else { + logger.error( + "User attempted to unsubscribe, but had not yet previously subscribed successfully."); } + } - // For a given user, return the jpeg bytes (or null) for the most recent frame - public ByteBuffer getSendFrame(WsContext user) { - var port = userSubscriptions.get(user); - if (port != null && port != NO_STREAM_PORT) { - var stream = streams.get(port); - return stream.getJPEGByteBuffer(); - } else { - return null; - } + // For a given user, return the jpeg bytes (or null) for the most recent frame + public ByteBuffer getSendFrame(WsContext user) { + var port = userSubscriptions.get(user); + if (port != null && port != NO_STREAM_PORT) { + var stream = streams.get(port); + return stream.getJPEGByteBuffer(); + } else { + return null; } + } - // Causes all streams to "re-trigger" and receive and convert their next mjpeg frame - // Only invoke this after all returned jpeg Strings have been used. - public void allStreamConvertNextFrame() { - for (SocketVideoStream stream : streams.values()) { - stream.convertNextFrame(); - } + // Causes all streams to "re-trigger" and receive and convert their next mjpeg frame + // Only invoke this after all returned jpeg Strings have been used. + public void allStreamConvertNextFrame() { + for (SocketVideoStream stream : streams.values()) { + stream.convertNextFrame(); } + } } diff --git a/photon-core/src/test/java/org/photonvision/common/BenchmarkTest.java b/photon-core/src/test/java/org/photonvision/common/BenchmarkTest.java index bbf83c6e6f..37822c4524 100644 --- a/photon-core/src/test/java/org/photonvision/common/BenchmarkTest.java +++ b/photon-core/src/test/java/org/photonvision/common/BenchmarkTest.java @@ -38,160 +38,160 @@ /** Various tests that check performance on long-running tasks (i.e. a pipeline) */ public class BenchmarkTest { - @BeforeAll - public static void init() { - TestUtils.loadLibraries(); + @BeforeAll + public static void init() { + TestUtils.loadLibraries(); + } + + @Test + @Order(1) + public void Reflective240pBenchmark() { + var pipeline = new ReflectivePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoSideStraightDark72in, false), + TestUtils.WPI2019Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + @Test + @Order(1) + public void Reflective480pBenchmark() { + var pipeline = new ReflectivePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(200, 255); + pipeline.getSettings().hsvValue.set(200, 255); + pipeline.getSettings().outputShouldDraw = true; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center, false), + TestUtils.WPI2020Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + @Test + @Order(3) + public void Reflective720pBenchmark() { + var pipeline = new ReflectivePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(200, 255); + pipeline.getSettings().hsvValue.set(200, 255); + pipeline.getSettings().outputShouldDraw = true; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center_720p, false), + TestUtils.WPI2020Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + @Test + @Order(4) + public void Reflective1920x1440Benchmark() { + var pipeline = new ReflectivePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + private static

void benchmarkPipeline( + FrameProvider frameProvider, P pipeline, int secondsToRun) { + CVMat.enablePrint(false); + // warmup for 5 loops. + System.out.println("Warming up for 5 loops..."); + for (int i = 0; i < 5; i++) { + pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); } - @Test - @Order(1) - public void Reflective240pBenchmark() { - var pipeline = new ReflectivePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoSideStraightDark72in, false), - TestUtils.WPI2019Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - @Test - @Order(1) - public void Reflective480pBenchmark() { - var pipeline = new ReflectivePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(200, 255); - pipeline.getSettings().hsvValue.set(200, 255); - pipeline.getSettings().outputShouldDraw = true; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center, false), - TestUtils.WPI2020Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - @Test - @Order(3) - public void Reflective720pBenchmark() { - var pipeline = new ReflectivePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(200, 255); - pipeline.getSettings().hsvValue.set(200, 255); - pipeline.getSettings().outputShouldDraw = true; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center_720p, false), - TestUtils.WPI2020Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - @Test - @Order(4) - public void Reflective1920x1440Benchmark() { - var pipeline = new ReflectivePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - private static

void benchmarkPipeline( - FrameProvider frameProvider, P pipeline, int secondsToRun) { - CVMat.enablePrint(false); - // warmup for 5 loops. - System.out.println("Warming up for 5 loops..."); - for (int i = 0; i < 5; i++) { - pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - } - - final List processingTimes = new ArrayList<>(); - final List latencyTimes = new ArrayList<>(); - - var frameProps = frameProvider.get().frameStaticProperties; - - // begin benchmark - System.out.println( - "Beginning " - + secondsToRun - + " second benchmark at resolution " - + frameProps.imageWidth - + "x" - + frameProps.imageHeight); - var benchmarkStartMillis = System.currentTimeMillis(); - do { - CVPipelineResult pipelineResult = - pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - pipelineResult.release(); - processingTimes.add(pipelineResult.getProcessingMillis()); - latencyTimes.add(pipelineResult.getLatencyMillis()); - } while (System.currentTimeMillis() - benchmarkStartMillis < secondsToRun * 1000.0); - System.out.println("Benchmark complete."); - - var processingMin = Collections.min(processingTimes); - var processingMean = NumberListUtils.mean(processingTimes); - var processingMax = Collections.max(processingTimes); - - var latencyMin = Collections.min(latencyTimes); - var latencyMean = NumberListUtils.mean(latencyTimes); - var latencyMax = Collections.max(latencyTimes); - - String processingResult = - "Processing times - " - + "Min: " - + MathUtils.roundTo(processingMin, 3) - + "ms (" - + MathUtils.roundTo(1000 / processingMin, 3) - + " FPS), " - + "Mean: " - + MathUtils.roundTo(processingMean, 3) - + "ms (" - + MathUtils.roundTo(1000 / processingMean, 3) - + " FPS), " - + "Max: " - + MathUtils.roundTo(processingMax, 3) - + "ms (" - + MathUtils.roundTo(1000 / processingMax, 3) - + " FPS)"; - System.out.println(processingResult); - String latencyResult = - "Latency times - " - + "Min: " - + MathUtils.roundTo(latencyMin, 3) - + "ms (" - + MathUtils.roundTo(1000 / latencyMin, 3) - + " FPS), " - + "Mean: " - + MathUtils.roundTo(latencyMean, 3) - + "ms (" - + MathUtils.roundTo(1000 / latencyMean, 3) - + " FPS), " - + "Max: " - + MathUtils.roundTo(latencyMax, 3) - + "ms (" - + MathUtils.roundTo(1000 / latencyMax, 3) - + " FPS)"; - System.out.println(latencyResult); - } + final List processingTimes = new ArrayList<>(); + final List latencyTimes = new ArrayList<>(); + + var frameProps = frameProvider.get().frameStaticProperties; + + // begin benchmark + System.out.println( + "Beginning " + + secondsToRun + + " second benchmark at resolution " + + frameProps.imageWidth + + "x" + + frameProps.imageHeight); + var benchmarkStartMillis = System.currentTimeMillis(); + do { + CVPipelineResult pipelineResult = + pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + pipelineResult.release(); + processingTimes.add(pipelineResult.getProcessingMillis()); + latencyTimes.add(pipelineResult.getLatencyMillis()); + } while (System.currentTimeMillis() - benchmarkStartMillis < secondsToRun * 1000.0); + System.out.println("Benchmark complete."); + + var processingMin = Collections.min(processingTimes); + var processingMean = NumberListUtils.mean(processingTimes); + var processingMax = Collections.max(processingTimes); + + var latencyMin = Collections.min(latencyTimes); + var latencyMean = NumberListUtils.mean(latencyTimes); + var latencyMax = Collections.max(latencyTimes); + + String processingResult = + "Processing times - " + + "Min: " + + MathUtils.roundTo(processingMin, 3) + + "ms (" + + MathUtils.roundTo(1000 / processingMin, 3) + + " FPS), " + + "Mean: " + + MathUtils.roundTo(processingMean, 3) + + "ms (" + + MathUtils.roundTo(1000 / processingMean, 3) + + " FPS), " + + "Max: " + + MathUtils.roundTo(processingMax, 3) + + "ms (" + + MathUtils.roundTo(1000 / processingMax, 3) + + " FPS)"; + System.out.println(processingResult); + String latencyResult = + "Latency times - " + + "Min: " + + MathUtils.roundTo(latencyMin, 3) + + "ms (" + + MathUtils.roundTo(1000 / latencyMin, 3) + + " FPS), " + + "Mean: " + + MathUtils.roundTo(latencyMean, 3) + + "ms (" + + MathUtils.roundTo(1000 / latencyMean, 3) + + " FPS), " + + "Max: " + + MathUtils.roundTo(latencyMax, 3) + + "ms (" + + MathUtils.roundTo(1000 / latencyMax, 3) + + " FPS)"; + System.out.println(latencyResult); + } } diff --git a/photon-core/src/test/java/org/photonvision/common/ShapeBenchmarkTest.java b/photon-core/src/test/java/org/photonvision/common/ShapeBenchmarkTest.java index 6ce4781112..988a9d1f90 100644 --- a/photon-core/src/test/java/org/photonvision/common/ShapeBenchmarkTest.java +++ b/photon-core/src/test/java/org/photonvision/common/ShapeBenchmarkTest.java @@ -54,173 +54,173 @@ /** Various tests that check performance on long-running tasks (i.e. a pipeline) */ public class ShapeBenchmarkTest { - @BeforeAll - public static void init() { - TestUtils.loadLibraries(); + @BeforeAll + public static void init() { + TestUtils.loadLibraries(); + } + + @Test + public void Shape240pBenchmark() { + var pipeline = new ColoredShapePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + pipeline.getSettings().contourShape = ContourShape.Custom; + pipeline.getSettings().circleDetectThreshold = 10; + pipeline.getSettings().accuracyPercentage = 30.0; + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoSideStraightDark72in, false), + TestUtils.WPI2019Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + @Test + public void Shape480pBenchmark() { + var pipeline = new ColoredShapePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + pipeline.getSettings().contourShape = ContourShape.Custom; + pipeline.getSettings().circleDetectThreshold = 10; + pipeline.getSettings().accuracyPercentage = 30.0; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center, false), + TestUtils.WPI2020Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + @Test + public void Shape720pBenchmark() { + var pipeline = new ColoredShapePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + pipeline.getSettings().contourShape = ContourShape.Custom; + pipeline.getSettings().circleDetectThreshold = 10; + pipeline.getSettings().accuracyPercentage = 30.0; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center_720p, false), + TestUtils.WPI2020Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + @Test + public void Shape1920x1440Benchmark() { + var pipeline = new ColoredShapePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + pipeline.getSettings().contourShape = ContourShape.Custom; + pipeline.getSettings().circleDetectThreshold = 10; + pipeline.getSettings().accuracyPercentage = 30.0; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + private static

void benchmarkPipeline( + FrameProvider frameProvider, P pipeline, int secondsToRun) { + CVMat.enablePrint(false); + // warmup for 5 loops. + System.out.println("Warming up for 5 loops..."); + for (int i = 0; i < 5; i++) { + pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); } - @Test - public void Shape240pBenchmark() { - var pipeline = new ColoredShapePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - pipeline.getSettings().contourShape = ContourShape.Custom; - pipeline.getSettings().circleDetectThreshold = 10; - pipeline.getSettings().accuracyPercentage = 30.0; - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoSideStraightDark72in, false), - TestUtils.WPI2019Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - @Test - public void Shape480pBenchmark() { - var pipeline = new ColoredShapePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - pipeline.getSettings().contourShape = ContourShape.Custom; - pipeline.getSettings().circleDetectThreshold = 10; - pipeline.getSettings().accuracyPercentage = 30.0; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center, false), - TestUtils.WPI2020Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - @Test - public void Shape720pBenchmark() { - var pipeline = new ColoredShapePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - pipeline.getSettings().contourShape = ContourShape.Custom; - pipeline.getSettings().circleDetectThreshold = 10; - pipeline.getSettings().accuracyPercentage = 30.0; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center_720p, false), - TestUtils.WPI2020Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - @Test - public void Shape1920x1440Benchmark() { - var pipeline = new ColoredShapePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - pipeline.getSettings().contourShape = ContourShape.Custom; - pipeline.getSettings().circleDetectThreshold = 10; - pipeline.getSettings().accuracyPercentage = 30.0; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - private static

void benchmarkPipeline( - FrameProvider frameProvider, P pipeline, int secondsToRun) { - CVMat.enablePrint(false); - // warmup for 5 loops. - System.out.println("Warming up for 5 loops..."); - for (int i = 0; i < 5; i++) { - pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - } - - final List processingTimes = new ArrayList<>(); - final List latencyTimes = new ArrayList<>(); - - var frameProps = frameProvider.get().frameStaticProperties; - - // begin benchmark - System.out.println( - "Beginning " - + secondsToRun - + " second benchmark at resolution " - + frameProps.imageWidth - + "x" - + frameProps.imageHeight); - var benchmarkStartMillis = System.currentTimeMillis(); - do { - CVPipelineResult pipelineResult = - pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - pipelineResult.release(); - processingTimes.add(pipelineResult.getProcessingMillis()); - latencyTimes.add(pipelineResult.getLatencyMillis()); - } while (System.currentTimeMillis() - benchmarkStartMillis < secondsToRun * 1000L); - System.out.println("Benchmark complete."); - - var processingMin = Collections.min(processingTimes); - var processingMean = NumberListUtils.mean(processingTimes); - var processingMax = Collections.max(processingTimes); - - var latencyMin = Collections.min(latencyTimes); - var latencyMean = NumberListUtils.mean(latencyTimes); - var latencyMax = Collections.max(latencyTimes); - - String processingResult = - "Processing times - " - + "Min: " - + MathUtils.roundTo(processingMin, 3) - + "ms (" - + MathUtils.roundTo(1000 / processingMin, 3) - + " FPS), " - + "Mean: " - + MathUtils.roundTo(processingMean, 3) - + "ms (" - + MathUtils.roundTo(1000 / processingMean, 3) - + " FPS), " - + "Max: " - + MathUtils.roundTo(processingMax, 3) - + "ms (" - + MathUtils.roundTo(1000 / processingMax, 3) - + " FPS)"; - System.out.println(processingResult); - String latencyResult = - "Latency times - " - + "Min: " - + MathUtils.roundTo(latencyMin, 3) - + "ms (" - + MathUtils.roundTo(1000 / latencyMin, 3) - + " FPS), " - + "Mean: " - + MathUtils.roundTo(latencyMean, 3) - + "ms (" - + MathUtils.roundTo(1000 / latencyMean, 3) - + " FPS), " - + "Max: " - + MathUtils.roundTo(latencyMax, 3) - + "ms (" - + MathUtils.roundTo(1000 / latencyMax, 3) - + " FPS)"; - System.out.println(latencyResult); - } + final List processingTimes = new ArrayList<>(); + final List latencyTimes = new ArrayList<>(); + + var frameProps = frameProvider.get().frameStaticProperties; + + // begin benchmark + System.out.println( + "Beginning " + + secondsToRun + + " second benchmark at resolution " + + frameProps.imageWidth + + "x" + + frameProps.imageHeight); + var benchmarkStartMillis = System.currentTimeMillis(); + do { + CVPipelineResult pipelineResult = + pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + pipelineResult.release(); + processingTimes.add(pipelineResult.getProcessingMillis()); + latencyTimes.add(pipelineResult.getLatencyMillis()); + } while (System.currentTimeMillis() - benchmarkStartMillis < secondsToRun * 1000L); + System.out.println("Benchmark complete."); + + var processingMin = Collections.min(processingTimes); + var processingMean = NumberListUtils.mean(processingTimes); + var processingMax = Collections.max(processingTimes); + + var latencyMin = Collections.min(latencyTimes); + var latencyMean = NumberListUtils.mean(latencyTimes); + var latencyMax = Collections.max(latencyTimes); + + String processingResult = + "Processing times - " + + "Min: " + + MathUtils.roundTo(processingMin, 3) + + "ms (" + + MathUtils.roundTo(1000 / processingMin, 3) + + " FPS), " + + "Mean: " + + MathUtils.roundTo(processingMean, 3) + + "ms (" + + MathUtils.roundTo(1000 / processingMean, 3) + + " FPS), " + + "Max: " + + MathUtils.roundTo(processingMax, 3) + + "ms (" + + MathUtils.roundTo(1000 / processingMax, 3) + + " FPS)"; + System.out.println(processingResult); + String latencyResult = + "Latency times - " + + "Min: " + + MathUtils.roundTo(latencyMin, 3) + + "ms (" + + MathUtils.roundTo(1000 / latencyMin, 3) + + " FPS), " + + "Mean: " + + MathUtils.roundTo(latencyMean, 3) + + "ms (" + + MathUtils.roundTo(1000 / latencyMean, 3) + + " FPS), " + + "Max: " + + MathUtils.roundTo(latencyMax, 3) + + "ms (" + + MathUtils.roundTo(1000 / latencyMax, 3) + + " FPS)"; + System.out.println(latencyResult); + } } diff --git a/photon-core/src/test/java/org/photonvision/common/configuration/ConfigTest.java b/photon-core/src/test/java/org/photonvision/common/configuration/ConfigTest.java index beaae0e4d9..91145f2f37 100644 --- a/photon-core/src/test/java/org/photonvision/common/configuration/ConfigTest.java +++ b/photon-core/src/test/java/org/photonvision/common/configuration/ConfigTest.java @@ -36,109 +36,109 @@ import org.photonvision.vision.target.TargetModel; public class ConfigTest { - private static ConfigManager configMgr; - private static final CameraConfiguration cameraConfig = - new CameraConfiguration("TestCamera", "/dev/video420"); - private static ReflectivePipelineSettings REFLECTIVE_PIPELINE_SETTINGS; - private static ColoredShapePipelineSettings COLORED_SHAPE_PIPELINE_SETTINGS; - private static AprilTagPipelineSettings APRIL_TAG_PIPELINE_SETTINGS; - - @BeforeAll - public static void init() { - TestUtils.loadLibraries(); - var path = Path.of("testconfigdir"); - configMgr = new ConfigManager(path, new LegacyConfigProvider(path)); - configMgr.load(); - - Logger.setLevel(LogGroup.General, LogLevel.TRACE); - - REFLECTIVE_PIPELINE_SETTINGS = new ReflectivePipelineSettings(); - COLORED_SHAPE_PIPELINE_SETTINGS = new ColoredShapePipelineSettings(); - APRIL_TAG_PIPELINE_SETTINGS = new AprilTagPipelineSettings(); - - REFLECTIVE_PIPELINE_SETTINGS.pipelineNickname = "2019Tape"; - REFLECTIVE_PIPELINE_SETTINGS.targetModel = TargetModel.k2019DualTarget; - - COLORED_SHAPE_PIPELINE_SETTINGS.pipelineNickname = "2019Cargo"; - COLORED_SHAPE_PIPELINE_SETTINGS.pipelineIndex = 1; - - APRIL_TAG_PIPELINE_SETTINGS.pipelineNickname = "apriltag"; - APRIL_TAG_PIPELINE_SETTINGS.pipelineIndex = 2; - - cameraConfig.addPipelineSetting(REFLECTIVE_PIPELINE_SETTINGS); - cameraConfig.addPipelineSetting(COLORED_SHAPE_PIPELINE_SETTINGS); - cameraConfig.addPipelineSetting(APRIL_TAG_PIPELINE_SETTINGS); + private static ConfigManager configMgr; + private static final CameraConfiguration cameraConfig = + new CameraConfiguration("TestCamera", "/dev/video420"); + private static ReflectivePipelineSettings REFLECTIVE_PIPELINE_SETTINGS; + private static ColoredShapePipelineSettings COLORED_SHAPE_PIPELINE_SETTINGS; + private static AprilTagPipelineSettings APRIL_TAG_PIPELINE_SETTINGS; + + @BeforeAll + public static void init() { + TestUtils.loadLibraries(); + var path = Path.of("testconfigdir"); + configMgr = new ConfigManager(path, new LegacyConfigProvider(path)); + configMgr.load(); + + Logger.setLevel(LogGroup.General, LogLevel.TRACE); + + REFLECTIVE_PIPELINE_SETTINGS = new ReflectivePipelineSettings(); + COLORED_SHAPE_PIPELINE_SETTINGS = new ColoredShapePipelineSettings(); + APRIL_TAG_PIPELINE_SETTINGS = new AprilTagPipelineSettings(); + + REFLECTIVE_PIPELINE_SETTINGS.pipelineNickname = "2019Tape"; + REFLECTIVE_PIPELINE_SETTINGS.targetModel = TargetModel.k2019DualTarget; + + COLORED_SHAPE_PIPELINE_SETTINGS.pipelineNickname = "2019Cargo"; + COLORED_SHAPE_PIPELINE_SETTINGS.pipelineIndex = 1; + + APRIL_TAG_PIPELINE_SETTINGS.pipelineNickname = "apriltag"; + APRIL_TAG_PIPELINE_SETTINGS.pipelineIndex = 2; + + cameraConfig.addPipelineSetting(REFLECTIVE_PIPELINE_SETTINGS); + cameraConfig.addPipelineSetting(COLORED_SHAPE_PIPELINE_SETTINGS); + cameraConfig.addPipelineSetting(APRIL_TAG_PIPELINE_SETTINGS); + } + + @Test + @Order(1) + public void serializeConfig() { + TestUtils.loadLibraries(); + + Logger.setLevel(LogGroup.General, LogLevel.TRACE); + configMgr.getConfig().addCameraConfig(cameraConfig); + configMgr.saveToDisk(); + + var camConfDir = + new File( + Path.of(configMgr.configDirectoryFile.toString(), "cameras", "TestCamera") + .toAbsolutePath() + .toString()); + Assertions.assertTrue(camConfDir.exists(), "TestCamera config folder not found!"); + + Assertions.assertTrue( + Files.exists(Path.of(configMgr.configDirectoryFile.toString(), "networkSettings.json")), + "networkSettings.json file not found!"); + } + + @Test + @Order(2) + public void deserializeConfig() { + var reflectivePipelineSettings = + configMgr.getConfig().getCameraConfigurations().get("TestCamera").pipelineSettings.get(0); + var coloredShapePipelineSettings = + configMgr.getConfig().getCameraConfigurations().get("TestCamera").pipelineSettings.get(1); + var apriltagPipelineSettings = + configMgr.getConfig().getCameraConfigurations().get("TestCamera").pipelineSettings.get(2); + + Assertions.assertEquals(REFLECTIVE_PIPELINE_SETTINGS, reflectivePipelineSettings); + Assertions.assertEquals(COLORED_SHAPE_PIPELINE_SETTINGS, coloredShapePipelineSettings); + Assertions.assertEquals(APRIL_TAG_PIPELINE_SETTINGS, apriltagPipelineSettings); + + Assertions.assertTrue( + reflectivePipelineSettings instanceof ReflectivePipelineSettings, + "Config loaded pipeline settings for index 0 not of expected type ReflectivePipelineSettings!"); + Assertions.assertTrue( + coloredShapePipelineSettings instanceof ColoredShapePipelineSettings, + "Config loaded pipeline settings for index 1 not of expected type ColoredShapePipelineSettings!"); + Assertions.assertTrue( + apriltagPipelineSettings instanceof AprilTagPipelineSettings, + "Config loaded pipeline settings for index 2 not of expected type AprilTagPipelineSettings!"); + } + + @AfterAll + public static void cleanup() throws IOException { + try { + Files.deleteIfExists(Paths.get("settings.json")); + } catch (IOException e) { + e.printStackTrace(); } - @Test - @Order(1) - public void serializeConfig() { - TestUtils.loadLibraries(); - - Logger.setLevel(LogGroup.General, LogLevel.TRACE); - configMgr.getConfig().addCameraConfig(cameraConfig); - configMgr.saveToDisk(); - - var camConfDir = - new File( - Path.of(configMgr.configDirectoryFile.toString(), "cameras", "TestCamera") - .toAbsolutePath() - .toString()); - Assertions.assertTrue(camConfDir.exists(), "TestCamera config folder not found!"); - - Assertions.assertTrue( - Files.exists(Path.of(configMgr.configDirectoryFile.toString(), "networkSettings.json")), - "networkSettings.json file not found!"); - } - - @Test - @Order(2) - public void deserializeConfig() { - var reflectivePipelineSettings = - configMgr.getConfig().getCameraConfigurations().get("TestCamera").pipelineSettings.get(0); - var coloredShapePipelineSettings = - configMgr.getConfig().getCameraConfigurations().get("TestCamera").pipelineSettings.get(1); - var apriltagPipelineSettings = - configMgr.getConfig().getCameraConfigurations().get("TestCamera").pipelineSettings.get(2); - - Assertions.assertEquals(REFLECTIVE_PIPELINE_SETTINGS, reflectivePipelineSettings); - Assertions.assertEquals(COLORED_SHAPE_PIPELINE_SETTINGS, coloredShapePipelineSettings); - Assertions.assertEquals(APRIL_TAG_PIPELINE_SETTINGS, apriltagPipelineSettings); - - Assertions.assertTrue( - reflectivePipelineSettings instanceof ReflectivePipelineSettings, - "Config loaded pipeline settings for index 0 not of expected type ReflectivePipelineSettings!"); - Assertions.assertTrue( - coloredShapePipelineSettings instanceof ColoredShapePipelineSettings, - "Config loaded pipeline settings for index 1 not of expected type ColoredShapePipelineSettings!"); - Assertions.assertTrue( - apriltagPipelineSettings instanceof AprilTagPipelineSettings, - "Config loaded pipeline settings for index 2 not of expected type AprilTagPipelineSettings!"); - } - - @AfterAll - public static void cleanup() throws IOException { - try { - Files.deleteIfExists(Paths.get("settings.json")); - } catch (IOException e) { - e.printStackTrace(); - } - - FileUtils.cleanDirectory(configMgr.configDirectoryFile); - configMgr.configDirectoryFile.delete(); - } - - @Test - public void testJacksonHandlesOldVersions() throws IOException { - var str = - "{\"baseName\":\"aaaaaa\",\"uniqueName\":\"aaaaaa\",\"nickname\":\"aaaaaa\",\"FOV\":70.0,\"path\":\"dev/vid\",\"cameraType\":\"UsbCamera\",\"currentPipelineIndex\":0,\"camPitch\":{\"radians\":0.0},\"calibrations\":[], \"cameraLEDs\":[]}"; - var writer = new FileWriter("test.json"); - writer.write(str); - writer.flush(); - writer.close(); - Assertions.assertDoesNotThrow( - () -> JacksonUtils.deserialize(Path.of("test.json"), CameraConfiguration.class)); - - new File("test.json").delete(); - } + FileUtils.cleanDirectory(configMgr.configDirectoryFile); + configMgr.configDirectoryFile.delete(); + } + + @Test + public void testJacksonHandlesOldVersions() throws IOException { + var str = + "{\"baseName\":\"aaaaaa\",\"uniqueName\":\"aaaaaa\",\"nickname\":\"aaaaaa\",\"FOV\":70.0,\"path\":\"dev/vid\",\"cameraType\":\"UsbCamera\",\"currentPipelineIndex\":0,\"camPitch\":{\"radians\":0.0},\"calibrations\":[], \"cameraLEDs\":[]}"; + var writer = new FileWriter("test.json"); + writer.write(str); + writer.flush(); + writer.close(); + Assertions.assertDoesNotThrow( + () -> JacksonUtils.deserialize(Path.of("test.json"), CameraConfiguration.class)); + + new File("test.json").delete(); + } } diff --git a/photon-core/src/test/java/org/photonvision/common/configuration/NetworkConfigTest.java b/photon-core/src/test/java/org/photonvision/common/configuration/NetworkConfigTest.java index 7c9d6d6f42..87f185e4c0 100644 --- a/photon-core/src/test/java/org/photonvision/common/configuration/NetworkConfigTest.java +++ b/photon-core/src/test/java/org/photonvision/common/configuration/NetworkConfigTest.java @@ -25,28 +25,28 @@ import org.junit.jupiter.api.Test; public class NetworkConfigTest { - @Test - public void testSerialization() throws IOException { - var mapper = new ObjectMapper(); - var path = Path.of("netTest.json"); - mapper.writeValue(path.toFile(), new NetworkConfig()); - Assertions.assertDoesNotThrow(() -> mapper.readValue(path.toFile(), NetworkConfig.class)); - new File("netTest.json").delete(); - } + @Test + public void testSerialization() throws IOException { + var mapper = new ObjectMapper(); + var path = Path.of("netTest.json"); + mapper.writeValue(path.toFile(), new NetworkConfig()); + Assertions.assertDoesNotThrow(() -> mapper.readValue(path.toFile(), NetworkConfig.class)); + new File("netTest.json").delete(); + } - @Test - public void testDeserializeTeamNumberOrNtServerAddress() { - { - var folder = Path.of("test-resources/network-old-team-number"); - var configMgr = new ConfigManager(folder, new LegacyConfigProvider(folder)); - configMgr.load(); - Assertions.assertEquals("9999", configMgr.getConfig().getNetworkConfig().ntServerAddress); - } - { - var folder = Path.of("test-resources/network-new-team-number"); - var configMgr = new ConfigManager(folder, new LegacyConfigProvider(folder)); - configMgr.load(); - Assertions.assertEquals("9999", configMgr.getConfig().getNetworkConfig().ntServerAddress); - } + @Test + public void testDeserializeTeamNumberOrNtServerAddress() { + { + var folder = Path.of("test-resources/network-old-team-number"); + var configMgr = new ConfigManager(folder, new LegacyConfigProvider(folder)); + configMgr.load(); + Assertions.assertEquals("9999", configMgr.getConfig().getNetworkConfig().ntServerAddress); + } + { + var folder = Path.of("test-resources/network-new-team-number"); + var configMgr = new ConfigManager(folder, new LegacyConfigProvider(folder)); + configMgr.load(); + Assertions.assertEquals("9999", configMgr.getConfig().getNetworkConfig().ntServerAddress); } + } } diff --git a/photon-core/src/test/java/org/photonvision/common/configuration/SQLConfigTest.java b/photon-core/src/test/java/org/photonvision/common/configuration/SQLConfigTest.java index 18d4ae5b6a..0dd61dfcc9 100644 --- a/photon-core/src/test/java/org/photonvision/common/configuration/SQLConfigTest.java +++ b/photon-core/src/test/java/org/photonvision/common/configuration/SQLConfigTest.java @@ -30,40 +30,40 @@ import org.photonvision.vision.pipeline.ReflectivePipelineSettings; public class SQLConfigTest { - @BeforeAll - public static void init() { - TestUtils.loadLibraries(); - } + @BeforeAll + public static void init() { + TestUtils.loadLibraries(); + } - @Test - public void testLoad() { - var cfgLoader = new SqlConfigProvider(Path.of("jdbc_test")); + @Test + public void testLoad() { + var cfgLoader = new SqlConfigProvider(Path.of("jdbc_test")); - cfgLoader.load(); + cfgLoader.load(); - var testcamcfg = - new CameraConfiguration( - "basename", - "a_unique_name", - "a_nick_name", - 69, - "a/path/idk", - CameraType.UsbCamera, - List.of(), - 0); - testcamcfg.pipelineSettings = - List.of( - new ReflectivePipelineSettings(), - new AprilTagPipelineSettings(), - new ColoredShapePipelineSettings()); + var testcamcfg = + new CameraConfiguration( + "basename", + "a_unique_name", + "a_nick_name", + 69, + "a/path/idk", + CameraType.UsbCamera, + List.of(), + 0); + testcamcfg.pipelineSettings = + List.of( + new ReflectivePipelineSettings(), + new AprilTagPipelineSettings(), + new ColoredShapePipelineSettings()); - cfgLoader.getConfig().addCameraConfig(testcamcfg); - cfgLoader.getConfig().getNetworkConfig().ntServerAddress = "5940"; - cfgLoader.saveToDisk(); + cfgLoader.getConfig().addCameraConfig(testcamcfg); + cfgLoader.getConfig().getNetworkConfig().ntServerAddress = "5940"; + cfgLoader.saveToDisk(); - cfgLoader.load(); - System.out.println(cfgLoader.getConfig()); + cfgLoader.load(); + System.out.println(cfgLoader.getConfig()); - assertEquals(cfgLoader.getConfig().getNetworkConfig().ntServerAddress, "5940"); - } + assertEquals(cfgLoader.getConfig().getNetworkConfig().ntServerAddress, "5940"); + } } diff --git a/photon-core/src/test/java/org/photonvision/common/util/CoordinateConversionTest.java b/photon-core/src/test/java/org/photonvision/common/util/CoordinateConversionTest.java index a452f517f5..2573fff455 100644 --- a/photon-core/src/test/java/org/photonvision/common/util/CoordinateConversionTest.java +++ b/photon-core/src/test/java/org/photonvision/common/util/CoordinateConversionTest.java @@ -28,52 +28,52 @@ import org.photonvision.common.util.math.MathUtils; public class CoordinateConversionTest { - @BeforeAll - public static void Init() { - TestUtils.loadLibraries(); - } + @BeforeAll + public static void Init() { + TestUtils.loadLibraries(); + } - @Test - public void testAprilTagToOpenCV() { - // AprilTag and OpenCV both use the EDN coordinate system. AprilTag, however, assumes the tag's - // z-axis points away from the camera while we expect it to point towards the camera. - var apriltag = - new Transform3d( - new Translation3d(1, 2, 3), - new Rotation3d(Math.toRadians(5), Math.toRadians(10), Math.toRadians(15))); - var opencv = MathUtils.convertApriltagtoOpenCV(apriltag); - final var expectedTrl = new Translation3d(1, 2, 3); - assertEquals( - expectedTrl, opencv.getTranslation(), "AprilTag to OpenCV translation conversion failed"); - var apriltagXaxis = new Translation3d(1, 0, 0).rotateBy(apriltag.getRotation()); - var apriltagYaxis = new Translation3d(0, 1, 0).rotateBy(apriltag.getRotation()); - var apriltagZaxis = new Translation3d(0, 0, 1).rotateBy(apriltag.getRotation()); - var opencvXaxis = new Translation3d(1, 0, 0).rotateBy(opencv.getRotation()); - var opencvYaxis = new Translation3d(0, 1, 0).rotateBy(opencv.getRotation()); - var opencvZaxis = new Translation3d(0, 0, 1).rotateBy(opencv.getRotation()); - assertEquals( - apriltagXaxis.unaryMinus(), - opencvXaxis, - "AprilTag to OpenCV rotation conversion failed(X-axis)"); - assertEquals( - apriltagYaxis, opencvYaxis, "AprilTag to OpenCV rotation conversion failed(Y-axis)"); - assertEquals( - apriltagZaxis.unaryMinus(), - opencvZaxis, - "AprilTag to OpenCV rotation conversion failed(Z-axis)"); - } + @Test + public void testAprilTagToOpenCV() { + // AprilTag and OpenCV both use the EDN coordinate system. AprilTag, however, assumes the tag's + // z-axis points away from the camera while we expect it to point towards the camera. + var apriltag = + new Transform3d( + new Translation3d(1, 2, 3), + new Rotation3d(Math.toRadians(5), Math.toRadians(10), Math.toRadians(15))); + var opencv = MathUtils.convertApriltagtoOpenCV(apriltag); + final var expectedTrl = new Translation3d(1, 2, 3); + assertEquals( + expectedTrl, opencv.getTranslation(), "AprilTag to OpenCV translation conversion failed"); + var apriltagXaxis = new Translation3d(1, 0, 0).rotateBy(apriltag.getRotation()); + var apriltagYaxis = new Translation3d(0, 1, 0).rotateBy(apriltag.getRotation()); + var apriltagZaxis = new Translation3d(0, 0, 1).rotateBy(apriltag.getRotation()); + var opencvXaxis = new Translation3d(1, 0, 0).rotateBy(opencv.getRotation()); + var opencvYaxis = new Translation3d(0, 1, 0).rotateBy(opencv.getRotation()); + var opencvZaxis = new Translation3d(0, 0, 1).rotateBy(opencv.getRotation()); + assertEquals( + apriltagXaxis.unaryMinus(), + opencvXaxis, + "AprilTag to OpenCV rotation conversion failed(X-axis)"); + assertEquals( + apriltagYaxis, opencvYaxis, "AprilTag to OpenCV rotation conversion failed(Y-axis)"); + assertEquals( + apriltagZaxis.unaryMinus(), + opencvZaxis, + "AprilTag to OpenCV rotation conversion failed(Z-axis)"); + } - @Test - public void testOpenCVToPhoton() { - // OpenCV uses the EDN coordinate system while wpilib is in NWU. - var opencv = - new Transform3d( - new Translation3d(1, 2, 3), new Rotation3d(VecBuilder.fill(1, 2, 3), Math.PI / 8)); - var wpilib = MathUtils.convertOpenCVtoPhotonTransform(opencv); - final var expectedTrl = new Translation3d(3, -1, -2); - assertEquals( - expectedTrl, wpilib.getTranslation(), "OpenCV to WPILib translation conversion failed"); - var expectedRot = new Rotation3d(VecBuilder.fill(3, -1, -2), Math.PI / 8); - assertEquals(expectedRot, wpilib.getRotation(), "OpenCV to WPILib rotation conversion failed"); - } + @Test + public void testOpenCVToPhoton() { + // OpenCV uses the EDN coordinate system while wpilib is in NWU. + var opencv = + new Transform3d( + new Translation3d(1, 2, 3), new Rotation3d(VecBuilder.fill(1, 2, 3), Math.PI / 8)); + var wpilib = MathUtils.convertOpenCVtoPhotonTransform(opencv); + final var expectedTrl = new Translation3d(3, -1, -2); + assertEquals( + expectedTrl, wpilib.getTranslation(), "OpenCV to WPILib translation conversion failed"); + var expectedRot = new Rotation3d(VecBuilder.fill(3, -1, -2), Math.PI / 8); + assertEquals(expectedRot, wpilib.getRotation(), "OpenCV to WPILib rotation conversion failed"); + } } diff --git a/photon-core/src/test/java/org/photonvision/common/util/LogFileManagementTest.java b/photon-core/src/test/java/org/photonvision/common/util/LogFileManagementTest.java index e834874641..ff11970f6a 100644 --- a/photon-core/src/test/java/org/photonvision/common/util/LogFileManagementTest.java +++ b/photon-core/src/test/java/org/photonvision/common/util/LogFileManagementTest.java @@ -32,53 +32,53 @@ import org.photonvision.common.logging.Logger; public class LogFileManagementTest { - @Test - public void fileCleanupTest() { - // Ensure we instantiate the new log correctly - ConfigManager.getInstance(); + @Test + public void fileCleanupTest() { + // Ensure we instantiate the new log correctly + ConfigManager.getInstance(); - String testDir = ConfigManager.getInstance().getLogsDir().toString() + "/test"; + String testDir = ConfigManager.getInstance().getLogsDir().toString() + "/test"; - Assertions.assertDoesNotThrow(() -> Files.createDirectories(Path.of(testDir))); + Assertions.assertDoesNotThrow(() -> Files.createDirectories(Path.of(testDir))); - // Create a bunch of log files with dummy contents. - for (int fileIdx = 0; fileIdx < Logger.MAX_LOGS_TO_KEEP + 5; fileIdx++) { - String fname = - ConfigManager.getInstance() - .taToLogFname( - LocalDateTime.ofEpochSecond(1500000000 + fileIdx * 60, 0, ZoneOffset.UTC)); - try { - FileWriter testLogWriter = new FileWriter(Path.of(testDir, fname).toString()); - testLogWriter.write("Test log contents created for testing purposes only"); - testLogWriter.close(); - } catch (IOException e) { - Assertions.fail("Could not create test files"); - } - } + // Create a bunch of log files with dummy contents. + for (int fileIdx = 0; fileIdx < Logger.MAX_LOGS_TO_KEEP + 5; fileIdx++) { + String fname = + ConfigManager.getInstance() + .taToLogFname( + LocalDateTime.ofEpochSecond(1500000000 + fileIdx * 60, 0, ZoneOffset.UTC)); + try { + FileWriter testLogWriter = new FileWriter(Path.of(testDir, fname).toString()); + testLogWriter.write("Test log contents created for testing purposes only"); + testLogWriter.close(); + } catch (IOException e) { + Assertions.fail("Could not create test files"); + } + } - // Confirm new log files were created - Assertions.assertTrue( - Logger.MAX_LOGS_TO_KEEP + 5 <= countLogFiles(testDir), "Not enough log files discovered"); + // Confirm new log files were created + Assertions.assertTrue( + Logger.MAX_LOGS_TO_KEEP + 5 <= countLogFiles(testDir), "Not enough log files discovered"); - // Run the log cleanup routine - Logger.cleanLogs(Path.of(testDir)); + // Run the log cleanup routine + Logger.cleanLogs(Path.of(testDir)); - // Confirm we deleted log files - Assertions.assertEquals( - Logger.MAX_LOGS_TO_KEEP, countLogFiles(testDir), "Not enough log files deleted"); + // Confirm we deleted log files + Assertions.assertEquals( + Logger.MAX_LOGS_TO_KEEP, countLogFiles(testDir), "Not enough log files deleted"); - // Clean uptest directory - org.photonvision.common.util.file.FileUtils.deleteDirectory(Path.of(testDir)); - try { - Files.delete(Path.of(testDir)); - } catch (IOException e) { - // it's OK if this fails - } + // Clean uptest directory + org.photonvision.common.util.file.FileUtils.deleteDirectory(Path.of(testDir)); + try { + Files.delete(Path.of(testDir)); + } catch (IOException e) { + // it's OK if this fails } + } - private int countLogFiles(String testDir) { - return FileUtils.listFiles( - new File(testDir), new WildcardFileFilter("photonvision-*.log"), null) - .size(); - } + private int countLogFiles(String testDir) { + return FileUtils.listFiles( + new File(testDir), new WildcardFileFilter("photonvision-*.log"), null) + .size(); + } } diff --git a/photon-core/src/test/java/org/photonvision/common/util/TimedTaskManagerTest.java b/photon-core/src/test/java/org/photonvision/common/util/TimedTaskManagerTest.java index 7b7567d671..78c1347688 100644 --- a/photon-core/src/test/java/org/photonvision/common/util/TimedTaskManagerTest.java +++ b/photon-core/src/test/java/org/photonvision/common/util/TimedTaskManagerTest.java @@ -22,11 +22,11 @@ import org.junit.jupiter.api.Test; public class TimedTaskManagerTest { - @Test - public void atomicIntegerIncrementTest() throws InterruptedException { - AtomicInteger i = new AtomicInteger(); - TimedTaskManager.getInstance().addTask("TaskManagerTest", i::getAndIncrement, 2); - Thread.sleep(400); - Assertions.assertEquals(200, i.get(), 15); - } + @Test + public void atomicIntegerIncrementTest() throws InterruptedException { + AtomicInteger i = new AtomicInteger(); + TimedTaskManager.getInstance().addTask("TaskManagerTest", i::getAndIncrement, 2); + Thread.sleep(400); + Assertions.assertEquals(200, i.get(), 15); + } } diff --git a/photon-core/src/test/java/org/photonvision/hardware/HardwareConfigTest.java b/photon-core/src/test/java/org/photonvision/hardware/HardwareConfigTest.java index 9cd13e5dda..264007da2f 100644 --- a/photon-core/src/test/java/org/photonvision/hardware/HardwareConfigTest.java +++ b/photon-core/src/test/java/org/photonvision/hardware/HardwareConfigTest.java @@ -28,21 +28,21 @@ import org.photonvision.common.util.TestUtils; public class HardwareConfigTest { - @Test - public void loadJson() { - try { - System.out.println("Loading Hardware configs..."); - var config = - new ObjectMapper().readValue(TestUtils.getHardwareConfigJson(), HardwareConfig.class); - assertEquals(config.deviceName, "PhotonVision"); - assertEquals(config.deviceLogoPath, "photonvision.png"); - assertEquals(config.supportURL, "https://support.photonvision.com"); - Assertions.assertArrayEquals( - config.ledPins.stream().mapToInt(i -> i).toArray(), new int[] {2, 13}); - CustomGPIO.setConfig(config); + @Test + public void loadJson() { + try { + System.out.println("Loading Hardware configs..."); + var config = + new ObjectMapper().readValue(TestUtils.getHardwareConfigJson(), HardwareConfig.class); + assertEquals(config.deviceName, "PhotonVision"); + assertEquals(config.deviceLogoPath, "photonvision.png"); + assertEquals(config.supportURL, "https://support.photonvision.com"); + Assertions.assertArrayEquals( + config.ledPins.stream().mapToInt(i -> i).toArray(), new int[] {2, 13}); + CustomGPIO.setConfig(config); - } catch (IOException e) { - e.printStackTrace(); - } + } catch (IOException e) { + e.printStackTrace(); } + } } diff --git a/photon-core/src/test/java/org/photonvision/hardware/HardwareManagerTest.java b/photon-core/src/test/java/org/photonvision/hardware/HardwareManagerTest.java index 6273696058..77e6b0e30c 100644 --- a/photon-core/src/test/java/org/photonvision/hardware/HardwareManagerTest.java +++ b/photon-core/src/test/java/org/photonvision/hardware/HardwareManagerTest.java @@ -27,23 +27,23 @@ import org.photonvision.common.logging.Logger; public class HardwareManagerTest { - public static final Logger logger = new Logger(HardwareManager.class, LogGroup.General); + public static final Logger logger = new Logger(HardwareManager.class, LogGroup.General); - @Test - public void managementTest() throws InterruptedException { - Assumptions.assumeTrue(Platform.isRaspberryPi()); - var socket = new PigpioSocket(); - try { - socket.gpioWrite(18, false); - socket.gpioWrite(13, false); - Thread.sleep(500); - for (int i = 0; i < 1000000; i++) { - int duty = 1000000 - i; - socket.hardwarePWM(18, 1000000, duty); - Thread.sleep(0, 25); - } - } catch (PigpioException e) { - logger.error("error", e); - } + @Test + public void managementTest() throws InterruptedException { + Assumptions.assumeTrue(Platform.isRaspberryPi()); + var socket = new PigpioSocket(); + try { + socket.gpioWrite(18, false); + socket.gpioWrite(13, false); + Thread.sleep(500); + for (int i = 0; i < 1000000; i++) { + int duty = 1000000 - i; + socket.hardwarePWM(18, 1000000, duty); + Thread.sleep(0, 25); + } + } catch (PigpioException e) { + logger.error("error", e); } + } } diff --git a/photon-core/src/test/java/org/photonvision/hardware/HardwareTest.java b/photon-core/src/test/java/org/photonvision/hardware/HardwareTest.java index 9d371d25f8..002d16bf63 100644 --- a/photon-core/src/test/java/org/photonvision/hardware/HardwareTest.java +++ b/photon-core/src/test/java/org/photonvision/hardware/HardwareTest.java @@ -28,65 +28,65 @@ import org.photonvision.common.hardware.metrics.MetricsManager; public class HardwareTest { - @Test - public void testHardware() { - MetricsManager mm = new MetricsManager(); + @Test + public void testHardware() { + MetricsManager mm = new MetricsManager(); - if (!Platform.isRaspberryPi()) return; + if (!Platform.isRaspberryPi()) return; - System.out.println("Testing on platform: " + Platform.getPlatformName()); + System.out.println("Testing on platform: " + Platform.getPlatformName()); - System.out.println("Printing CPU Info:"); - System.out.println("Memory: " + mm.getMemory() + "MB"); - System.out.println("Temperature: " + mm.getTemp() + "C"); - System.out.println("Utilization: : " + mm.getUtilization() + "%"); + System.out.println("Printing CPU Info:"); + System.out.println("Memory: " + mm.getMemory() + "MB"); + System.out.println("Temperature: " + mm.getTemp() + "C"); + System.out.println("Utilization: : " + mm.getUtilization() + "%"); - System.out.println("Printing GPU Info:"); - System.out.println("Memory: " + mm.getGPUMemorySplit() + "MB"); + System.out.println("Printing GPU Info:"); + System.out.println("Memory: " + mm.getGPUMemorySplit() + "MB"); - System.out.println("Printing RAM Info: "); - System.out.println("Used RAM: : " + mm.getUsedRam() + "MB"); - } + System.out.println("Printing RAM Info: "); + System.out.println("Used RAM: : " + mm.getUsedRam() + "MB"); + } - @Test - public void testGPIO() { - GPIOBase gpio; - if (Platform.isRaspberryPi()) { - gpio = new PigpioPin(18); - } else { - gpio = new CustomGPIO(18); - } + @Test + public void testGPIO() { + GPIOBase gpio; + if (Platform.isRaspberryPi()) { + gpio = new PigpioPin(18); + } else { + gpio = new CustomGPIO(18); + } - gpio.setOn(); // HIGH - assertTrue(gpio.getState()); + gpio.setOn(); // HIGH + assertTrue(gpio.getState()); - gpio.setOff(); // LOW - assertFalse(gpio.getState()); + gpio.setOff(); // LOW + assertFalse(gpio.getState()); - gpio.togglePin(); // HIGH - assertTrue(gpio.getState()); + gpio.togglePin(); // HIGH + assertTrue(gpio.getState()); - gpio.togglePin(); // LOW - assertFalse(gpio.getState()); + gpio.togglePin(); // LOW + assertFalse(gpio.getState()); - gpio.setState(true); // HIGH - assertTrue(gpio.getState()); + gpio.setState(true); // HIGH + assertTrue(gpio.getState()); - gpio.setState(false); // LOW - assertFalse(gpio.getState()); + gpio.setState(false); // LOW + assertFalse(gpio.getState()); - var success = gpio.shutdown(); - assertTrue(success); - } + var success = gpio.shutdown(); + assertTrue(success); + } - @Test - public void testBlink() { - if (!Platform.isRaspberryPi()) return; - GPIOBase pwm = new PigpioPin(18); - pwm.blink(125, 3); - var startms = System.currentTimeMillis(); - while (true) { - if (System.currentTimeMillis() - startms > 4500) break; - } + @Test + public void testBlink() { + if (!Platform.isRaspberryPi()) return; + GPIOBase pwm = new PigpioPin(18); + pwm.blink(125, 3); + var startms = System.currentTimeMillis(); + while (true) { + if (System.currentTimeMillis() - startms > 4500) break; } + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/QuirkyCameraTest.java b/photon-core/src/test/java/org/photonvision/vision/QuirkyCameraTest.java index 45517ee029..19c5645181 100644 --- a/photon-core/src/test/java/org/photonvision/vision/QuirkyCameraTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/QuirkyCameraTest.java @@ -24,39 +24,39 @@ import org.photonvision.vision.camera.QuirkyCamera; public class QuirkyCameraTest { - @Test - public void ps3EyeTest() { - HashMap ps3EyeQuirks = new HashMap<>(); - ps3EyeQuirks.put(CameraQuirk.Gain, true); - ps3EyeQuirks.put(CameraQuirk.FPSCap100, true); - for (var q : CameraQuirk.values()) { - ps3EyeQuirks.putIfAbsent(q, false); - } - - QuirkyCamera psEye = QuirkyCamera.getQuirkyCamera(0x2000, 0x1415); - Assertions.assertEquals(psEye.quirks, ps3EyeQuirks); + @Test + public void ps3EyeTest() { + HashMap ps3EyeQuirks = new HashMap<>(); + ps3EyeQuirks.put(CameraQuirk.Gain, true); + ps3EyeQuirks.put(CameraQuirk.FPSCap100, true); + for (var q : CameraQuirk.values()) { + ps3EyeQuirks.putIfAbsent(q, false); } - @Test - public void picamTest() { - HashMap picamQuirks = new HashMap<>(); - picamQuirks.put(CameraQuirk.PiCam, true); - for (var q : CameraQuirk.values()) { - picamQuirks.putIfAbsent(q, false); - } + QuirkyCamera psEye = QuirkyCamera.getQuirkyCamera(0x2000, 0x1415); + Assertions.assertEquals(psEye.quirks, ps3EyeQuirks); + } - QuirkyCamera picam = QuirkyCamera.getQuirkyCamera(-1, -1, "mmal service 16.1"); - Assertions.assertEquals(picam.quirks, picamQuirks); + @Test + public void picamTest() { + HashMap picamQuirks = new HashMap<>(); + picamQuirks.put(CameraQuirk.PiCam, true); + for (var q : CameraQuirk.values()) { + picamQuirks.putIfAbsent(q, false); } - @Test - public void quirklessCameraTest() { - HashMap noQuirks = new HashMap<>(); - for (var q : CameraQuirk.values()) { - noQuirks.put(q, false); - } + QuirkyCamera picam = QuirkyCamera.getQuirkyCamera(-1, -1, "mmal service 16.1"); + Assertions.assertEquals(picam.quirks, picamQuirks); + } - QuirkyCamera quirkless = QuirkyCamera.getQuirkyCamera(1234, 8888); - Assertions.assertEquals(quirkless.quirks, noQuirks); + @Test + public void quirklessCameraTest() { + HashMap noQuirks = new HashMap<>(); + for (var q : CameraQuirk.values()) { + noQuirks.put(q, false); } + + QuirkyCamera quirkless = QuirkyCamera.getQuirkyCamera(1234, 8888); + Assertions.assertEquals(quirkless.quirks, noQuirks); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/frame/provider/FileFrameProviderTest.java b/photon-core/src/test/java/org/photonvision/vision/frame/provider/FileFrameProviderTest.java index 05cddeb9d7..742542b885 100644 --- a/photon-core/src/test/java/org/photonvision/vision/frame/provider/FileFrameProviderTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/frame/provider/FileFrameProviderTest.java @@ -27,79 +27,79 @@ import org.photonvision.vision.frame.Frame; public class FileFrameProviderTest { - @BeforeAll - public static void initPath() { - TestUtils.loadLibraries(); - } - - @Test - public void TestFilesExist() { - assertTrue(Files.exists(TestUtils.getTestImagesPath(false))); - } + @BeforeAll + public static void initPath() { + TestUtils.loadLibraries(); + } - @Test - public void Load2019ImageOnceTest() { - var goodFilePath = - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in, false); + @Test + public void TestFilesExist() { + assertTrue(Files.exists(TestUtils.getTestImagesPath(false))); + } - assertTrue(Files.exists(goodFilePath)); + @Test + public void Load2019ImageOnceTest() { + var goodFilePath = + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in, false); - FileFrameProvider goodFrameProvider = new FileFrameProvider(goodFilePath, 68.5); + assertTrue(Files.exists(goodFilePath)); - Frame goodFrame = goodFrameProvider.get(); + FileFrameProvider goodFrameProvider = new FileFrameProvider(goodFilePath, 68.5); - int goodFrameCols = goodFrame.colorImage.getMat().cols(); - int goodFrameRows = goodFrame.colorImage.getMat().rows(); + Frame goodFrame = goodFrameProvider.get(); - // 2019 Images are at 320x240 - assertEquals(320, goodFrameCols); - assertEquals(240, goodFrameRows); + int goodFrameCols = goodFrame.colorImage.getMat().cols(); + int goodFrameRows = goodFrame.colorImage.getMat().rows(); - TestUtils.showImage(goodFrame.colorImage.getMat(), "2019"); + // 2019 Images are at 320x240 + assertEquals(320, goodFrameCols); + assertEquals(240, goodFrameRows); - var badFilePath = Paths.get("bad.jpg"); // this file does not exist + TestUtils.showImage(goodFrame.colorImage.getMat(), "2019"); - FileFrameProvider badFrameProvider = null; + var badFilePath = Paths.get("bad.jpg"); // this file does not exist - try { - badFrameProvider = new FileFrameProvider(badFilePath, 68.5); - } catch (Exception e) { - // ignored - } + FileFrameProvider badFrameProvider = null; - assertNull(badFrameProvider); + try { + badFrameProvider = new FileFrameProvider(badFilePath, 68.5); + } catch (Exception e) { + // ignored } - @Test - public void Load2020ImageOnceTest() { - var goodFilePath = - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_108in_Center, false); + assertNull(badFrameProvider); + } - assertTrue(Files.exists(goodFilePath)); + @Test + public void Load2020ImageOnceTest() { + var goodFilePath = + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_108in_Center, false); - FileFrameProvider goodFrameProvider = new FileFrameProvider(goodFilePath, 68.5); + assertTrue(Files.exists(goodFilePath)); - Frame goodFrame = goodFrameProvider.get(); + FileFrameProvider goodFrameProvider = new FileFrameProvider(goodFilePath, 68.5); - int goodFrameCols = goodFrame.colorImage.getMat().cols(); - int goodFrameRows = goodFrame.colorImage.getMat().rows(); + Frame goodFrame = goodFrameProvider.get(); - // 2020 Images are at 640x480 - assertEquals(640, goodFrameCols); - assertEquals(480, goodFrameRows); + int goodFrameCols = goodFrame.colorImage.getMat().cols(); + int goodFrameRows = goodFrame.colorImage.getMat().rows(); - TestUtils.showImage(goodFrame.colorImage.getMat(), "2020"); + // 2020 Images are at 640x480 + assertEquals(640, goodFrameCols); + assertEquals(480, goodFrameRows); - var badFilePath = Paths.get("bad.jpg"); // this file does not exist + TestUtils.showImage(goodFrame.colorImage.getMat(), "2020"); - FileFrameProvider badFrameProvider = null; + var badFilePath = Paths.get("bad.jpg"); // this file does not exist - try { - badFrameProvider = new FileFrameProvider(badFilePath, 68.5); - } catch (Exception e) { - // ignored - } + FileFrameProvider badFrameProvider = null; - assertNull(badFrameProvider); + try { + badFrameProvider = new FileFrameProvider(badFilePath, 68.5); + } catch (Exception e) { + // ignored } + + assertNull(badFrameProvider); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/frame/provider/LibcameraTest.java b/photon-core/src/test/java/org/photonvision/vision/frame/provider/LibcameraTest.java index b23a1cd9bd..989868d888 100644 --- a/photon-core/src/test/java/org/photonvision/vision/frame/provider/LibcameraTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/frame/provider/LibcameraTest.java @@ -22,18 +22,18 @@ // import org.photonvision.raspi.LibCameraJNI; public class LibcameraTest { - @Test - public void testBasic() { - // System.load("/home/pi/photon-libcamera-gl-driver/build/libphotonlibcamera.so"); - // LibCameraJNI.createCamera(1920, 1080, 60); - // try { - // Thread.sleep(1000); - // } catch (InterruptedException e) { - // } - // LibCameraJNI.startCamera(); - // try { - // Thread.sleep(5000); - // } catch (InterruptedException e) { - // } - } + @Test + public void testBasic() { + // System.load("/home/pi/photon-libcamera-gl-driver/build/libphotonlibcamera.so"); + // LibCameraJNI.createCamera(1920, 1080, 60); + // try { + // Thread.sleep(1000); + // } catch (InterruptedException e) { + // } + // LibCameraJNI.startCamera(); + // try { + // Thread.sleep(5000); + // } catch (InterruptedException e) { + // } + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/opencv/ContourTest.java b/photon-core/src/test/java/org/photonvision/vision/opencv/ContourTest.java index 6e6c6fb178..e84a1edd43 100644 --- a/photon-core/src/test/java/org/photonvision/vision/opencv/ContourTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/opencv/ContourTest.java @@ -28,44 +28,44 @@ import org.photonvision.common.util.TestUtils; public class ContourTest { - @BeforeEach - public void Init() { - TestUtils.loadLibraries(); - } + @BeforeEach + public void Init() { + TestUtils.loadLibraries(); + } - @Test - public void simpleContourTest() { - var mat = new MatOfPoint(); - mat.fromList(List.of(new Point(0, 0), new Point(10, 0), new Point(10, 10), new Point(0, 10))); - var contour = new Contour(mat); - assertEquals(100, contour.getArea()); - assertEquals(40, contour.getPerimeter()); - assertEquals(new Point(5, 5), contour.getCenterPoint()); - } + @Test + public void simpleContourTest() { + var mat = new MatOfPoint(); + mat.fromList(List.of(new Point(0, 0), new Point(10, 0), new Point(10, 10), new Point(0, 10))); + var contour = new Contour(mat); + assertEquals(100, contour.getArea()); + assertEquals(40, contour.getPerimeter()); + assertEquals(new Point(5, 5), contour.getCenterPoint()); + } - @Test - public void test2019() { - var firstMat = new MatOfPoint(); - // contour 0 and 1 data from kCargoStraightDark72in_HighRes - firstMat.fromList( - List.of( - new Point(1328, 976), - new Point(1272, 985), - new Point(1230, 832), - new Point(1326, 948), - new Point(1328, 971))); + @Test + public void test2019() { + var firstMat = new MatOfPoint(); + // contour 0 and 1 data from kCargoStraightDark72in_HighRes + firstMat.fromList( + List.of( + new Point(1328, 976), + new Point(1272, 985), + new Point(1230, 832), + new Point(1326, 948), + new Point(1328, 971))); - var secondMat = new MatOfPoint(); - secondMat.fromList( - List.of( - new Point(956, 832), - new Point(882, 978), - new Point(927, 810), - new Point(954, 821), - new Point(956, 825))); - var firstContour = new Contour(firstMat); - var secondContour = new Contour(secondMat); - boolean result = firstContour.isIntersecting(secondContour, ContourIntersectionDirection.Up); - assertTrue(result); - } + var secondMat = new MatOfPoint(); + secondMat.fromList( + List.of( + new Point(956, 832), + new Point(882, 978), + new Point(927, 810), + new Point(954, 821), + new Point(956, 825))); + var firstContour = new Contour(firstMat); + var secondContour = new Contour(secondMat); + boolean result = firstContour.isIntersecting(secondContour, ContourIntersectionDirection.Up); + assertTrue(result); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/AprilTagTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/AprilTagTest.java index fe1b7b9d9c..fd4ce75035 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/AprilTagTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/AprilTagTest.java @@ -31,118 +31,118 @@ import org.photonvision.vision.target.TrackedTarget; public class AprilTagTest { - @BeforeEach - public void Init() { - TestUtils.loadLibraries(); + @BeforeEach + public void Init() { + TestUtils.loadLibraries(); + } + + @Test + public void testApriltagFacingCamera() { + var pipeline = new AprilTagPipeline(); + + pipeline.getSettings().inputShouldShow = true; + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().solvePNPEnabled = true; + pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; + pipeline.getSettings().cornerDetectionUseConvexHulls = true; + pipeline.getSettings().targetModel = TargetModel.k200mmAprilTag; + pipeline.getSettings().tagFamily = AprilTagFamily.kTag36h11; + + var frameProvider = + new FileFrameProvider( + TestUtils.getApriltagImagePath(TestUtils.ApriltagTestImages.kTag1_640_480, false), + TestUtils.WPI2020Image.FOV, + TestUtils.get2020LifeCamCoeffs(false)); + frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); + + CVPipelineResult pipelineResult; + try { + pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + } catch (RuntimeException e) { + // For now, will throw because of the Rotation3d ctor + return; } - @Test - public void testApriltagFacingCamera() { - var pipeline = new AprilTagPipeline(); - - pipeline.getSettings().inputShouldShow = true; - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().solvePNPEnabled = true; - pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; - pipeline.getSettings().cornerDetectionUseConvexHulls = true; - pipeline.getSettings().targetModel = TargetModel.k200mmAprilTag; - pipeline.getSettings().tagFamily = AprilTagFamily.kTag36h11; - - var frameProvider = - new FileFrameProvider( - TestUtils.getApriltagImagePath(TestUtils.ApriltagTestImages.kTag1_640_480, false), - TestUtils.WPI2020Image.FOV, - TestUtils.get2020LifeCamCoeffs(false)); - frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); - - CVPipelineResult pipelineResult; - try { - pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - } catch (RuntimeException e) { - // For now, will throw because of the Rotation3d ctor - return; - } - - // Draw on input - var outputPipe = new OutputStreamPipeline(); - var ret = - outputPipe.process( - pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); - - TestUtils.showImage(ret.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); - - // these numbers are not *accurate*, but they are known and expected - var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); - Assertions.assertEquals(2, pose.getTranslation().getX(), 0.2); - Assertions.assertEquals(0.1, pose.getTranslation().getY(), 0.2); - Assertions.assertEquals(0.0, pose.getTranslation().getZ(), 0.2); - - // We expect the object axes to be in NWU, with the x-axis coming out of the tag - // This visible tag is facing the camera almost parallel, so in world space: - - // The object's X axis should be (-1, 0, 0) - Assertions.assertEquals( - -1, new Translation3d(1, 0, 0).rotateBy(pose.getRotation()).getX(), 0.1); - // The object's Y axis should be (0, -1, 0) - Assertions.assertEquals( - -1, new Translation3d(0, 1, 0).rotateBy(pose.getRotation()).getY(), 0.1); - // The object's Z axis should be (0, 0, 1) - Assertions.assertEquals(1, new Translation3d(0, 0, 1).rotateBy(pose.getRotation()).getZ(), 0.1); + // Draw on input + var outputPipe = new OutputStreamPipeline(); + var ret = + outputPipe.process( + pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); + + TestUtils.showImage(ret.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); + + // these numbers are not *accurate*, but they are known and expected + var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); + Assertions.assertEquals(2, pose.getTranslation().getX(), 0.2); + Assertions.assertEquals(0.1, pose.getTranslation().getY(), 0.2); + Assertions.assertEquals(0.0, pose.getTranslation().getZ(), 0.2); + + // We expect the object axes to be in NWU, with the x-axis coming out of the tag + // This visible tag is facing the camera almost parallel, so in world space: + + // The object's X axis should be (-1, 0, 0) + Assertions.assertEquals( + -1, new Translation3d(1, 0, 0).rotateBy(pose.getRotation()).getX(), 0.1); + // The object's Y axis should be (0, -1, 0) + Assertions.assertEquals( + -1, new Translation3d(0, 1, 0).rotateBy(pose.getRotation()).getY(), 0.1); + // The object's Z axis should be (0, 0, 1) + Assertions.assertEquals(1, new Translation3d(0, 0, 1).rotateBy(pose.getRotation()).getZ(), 0.1); + } + + @Test + public void testApriltagDistorted() { + var pipeline = new AprilTagPipeline(); + + pipeline.getSettings().inputShouldShow = true; + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().solvePNPEnabled = true; + pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; + pipeline.getSettings().cornerDetectionUseConvexHulls = true; + pipeline.getSettings().targetModel = TargetModel.k200mmAprilTag; + pipeline.getSettings().tagFamily = AprilTagFamily.kTag16h5; + + var frameProvider = + new FileFrameProvider( + TestUtils.getApriltagImagePath(TestUtils.ApriltagTestImages.kTag_corner_1280, false), + TestUtils.WPI2020Image.FOV, + TestUtils.getCoeffs(TestUtils.LIMELIGHT_480P_CAL_FILE, false)); + frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); + + CVPipelineResult pipelineResult; + try { + pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + } catch (RuntimeException e) { + // For now, will throw because of the Rotation3d ctor + return; } - @Test - public void testApriltagDistorted() { - var pipeline = new AprilTagPipeline(); - - pipeline.getSettings().inputShouldShow = true; - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().solvePNPEnabled = true; - pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; - pipeline.getSettings().cornerDetectionUseConvexHulls = true; - pipeline.getSettings().targetModel = TargetModel.k200mmAprilTag; - pipeline.getSettings().tagFamily = AprilTagFamily.kTag16h5; - - var frameProvider = - new FileFrameProvider( - TestUtils.getApriltagImagePath(TestUtils.ApriltagTestImages.kTag_corner_1280, false), - TestUtils.WPI2020Image.FOV, - TestUtils.getCoeffs(TestUtils.LIMELIGHT_480P_CAL_FILE, false)); - frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); - - CVPipelineResult pipelineResult; - try { - pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - } catch (RuntimeException e) { - // For now, will throw because of the Rotation3d ctor - return; - } - - // Draw on input - var outputPipe = new OutputStreamPipeline(); - var ret = - outputPipe.process( - pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); - - TestUtils.showImage(ret.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); - - // these numbers are not *accurate*, but they are known and expected - var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); - Assertions.assertEquals(4.14, pose.getTranslation().getX(), 0.2); - Assertions.assertEquals(2, pose.getTranslation().getY(), 0.2); - Assertions.assertEquals(0.0, pose.getTranslation().getZ(), 0.2); - } - - private static void printTestResults(CVPipelineResult pipelineResult) { - double fps = 1000 / pipelineResult.getLatencyMillis(); - System.out.println( - "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); - System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); - System.out.println( - "Found targets at " - + pipelineResult.targets.stream() - .map(TrackedTarget::getBestCameraToTarget3d) - .collect(Collectors.toList())); - } + // Draw on input + var outputPipe = new OutputStreamPipeline(); + var ret = + outputPipe.process( + pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); + + TestUtils.showImage(ret.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); + + // these numbers are not *accurate*, but they are known and expected + var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); + Assertions.assertEquals(4.14, pose.getTranslation().getX(), 0.2); + Assertions.assertEquals(2, pose.getTranslation().getY(), 0.2); + Assertions.assertEquals(0.0, pose.getTranslation().getZ(), 0.2); + } + + private static void printTestResults(CVPipelineResult pipelineResult) { + double fps = 1000 / pipelineResult.getLatencyMillis(); + System.out.println( + "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); + System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + System.out.println( + "Found targets at " + + pipelineResult.targets.stream() + .map(TrackedTarget::getBestCameraToTarget3d) + .collect(Collectors.toList())); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/ArucoPipelineTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/ArucoPipelineTest.java index 168b7b92e9..44e8af4a98 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/ArucoPipelineTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/ArucoPipelineTest.java @@ -29,58 +29,58 @@ import org.photonvision.vision.target.TrackedTarget; public class ArucoPipelineTest { - @BeforeEach - public void Init() throws IOException { - TestUtils.loadLibraries(); - } + @BeforeEach + public void Init() throws IOException { + TestUtils.loadLibraries(); + } - @Test - public void testApriltagFacingCameraAruco() { - var pipeline = new ArucoPipeline(); + @Test + public void testApriltagFacingCameraAruco() { + var pipeline = new ArucoPipeline(); - pipeline.getSettings().inputShouldShow = true; - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().solvePNPEnabled = true; - pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; - pipeline.getSettings().cornerDetectionUseConvexHulls = true; - pipeline.getSettings().targetModel = TargetModel.k200mmAprilTag; + pipeline.getSettings().inputShouldShow = true; + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().solvePNPEnabled = true; + pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; + pipeline.getSettings().cornerDetectionUseConvexHulls = true; + pipeline.getSettings().targetModel = TargetModel.k200mmAprilTag; - // pipeline.getSettings().tagFamily = AprilTagFamily.kTag36h11; + // pipeline.getSettings().tagFamily = AprilTagFamily.kTag36h11; - var frameProvider = - new FileFrameProvider( - TestUtils.getApriltagImagePath(TestUtils.ApriltagTestImages.kTag1_16h5_1280, false), - 106, - TestUtils.getCoeffs("laptop_1280.json", false)); - frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); + var frameProvider = + new FileFrameProvider( + TestUtils.getApriltagImagePath(TestUtils.ApriltagTestImages.kTag1_16h5_1280, false), + 106, + TestUtils.getCoeffs("laptop_1280.json", false)); + frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); - CVPipelineResult pipelineResult; - try { - pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - } catch (RuntimeException e) { - // For now, will throw because of the Rotation3d ctor - return; - } + CVPipelineResult pipelineResult; + try { + pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + } catch (RuntimeException e) { + // For now, will throw because of the Rotation3d ctor + return; + } - // Draw on input - var outputPipe = new OutputStreamPipeline(); - outputPipe.process( - pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); + // Draw on input + var outputPipe = new OutputStreamPipeline(); + outputPipe.process( + pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); - TestUtils.showImage( - pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); - } + TestUtils.showImage( + pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); + } - private static void printTestResults(CVPipelineResult pipelineResult) { - double fps = 1000 / pipelineResult.getLatencyMillis(); - System.out.println( - "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); - System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); - System.out.println( - "Found targets at " - + pipelineResult.targets.stream() - .map(TrackedTarget::getBestCameraToTarget3d) - .collect(Collectors.toList())); - } + private static void printTestResults(CVPipelineResult pipelineResult) { + double fps = 1000 / pipelineResult.getLatencyMillis(); + System.out.println( + "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); + System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + System.out.println( + "Found targets at " + + pipelineResult.targets.stream() + .map(TrackedTarget::getBestCameraToTarget3d) + .collect(Collectors.toList())); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/Calibrate3dPipeTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/Calibrate3dPipeTest.java index 72547829f1..eb960db0a2 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/Calibrate3dPipeTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/Calibrate3dPipeTest.java @@ -46,306 +46,306 @@ import org.photonvision.vision.pipe.impl.FindBoardCornersPipe; public class Calibrate3dPipeTest { - @BeforeAll - public static void init() { - TestUtils.loadLibraries(); + @BeforeAll + public static void init() { + TestUtils.loadLibraries(); + } + + @Test + public void perViewErrorsTest() { + List frames = new ArrayList<>(); + + File dir = new File(TestUtils.getDotBoardImagesPath().toAbsolutePath().toString()); + File[] directoryListing = dir.listFiles(); + for (var file : directoryListing) { + frames.add(Imgcodecs.imread(file.getAbsolutePath())); } - @Test - public void perViewErrorsTest() { - List frames = new ArrayList<>(); + FindBoardCornersPipe findBoardCornersPipe = new FindBoardCornersPipe(); + findBoardCornersPipe.setParams( + new FindBoardCornersPipe.FindCornersPipeParams( + 11, 4, UICalibrationData.BoardType.DOTBOARD, 15, FrameDivisor.NONE)); - File dir = new File(TestUtils.getDotBoardImagesPath().toAbsolutePath().toString()); - File[] directoryListing = dir.listFiles(); - for (var file : directoryListing) { - frames.add(Imgcodecs.imread(file.getAbsolutePath())); - } + List> foundCornersList = new ArrayList<>(); - FindBoardCornersPipe findBoardCornersPipe = new FindBoardCornersPipe(); - findBoardCornersPipe.setParams( - new FindBoardCornersPipe.FindCornersPipeParams( - 11, 4, UICalibrationData.BoardType.DOTBOARD, 15, FrameDivisor.NONE)); - - List> foundCornersList = new ArrayList<>(); - - for (var f : frames) { - var copy = new Mat(); - f.copyTo(copy); - foundCornersList.add(findBoardCornersPipe.run(Pair.of(f, copy)).output); - } - - Calibrate3dPipe calibrate3dPipe = new Calibrate3dPipe(); - calibrate3dPipe.setParams(new Calibrate3dPipe.CalibratePipeParams(new Size(640, 480))); - - var calibrate3dPipeOutput = calibrate3dPipe.run(foundCornersList); - assertTrue(calibrate3dPipeOutput.output.perViewErrors.length > 0); - System.out.println( - "Per View Errors: " + Arrays.toString(calibrate3dPipeOutput.output.perViewErrors)); - - for (var f : frames) { - f.release(); - } + for (var f : frames) { + var copy = new Mat(); + f.copyTo(copy); + foundCornersList.add(findBoardCornersPipe.run(Pair.of(f, copy)).output); } - @Test - public void calibrationPipelineTest() { - int startMatCount = CVMat.getMatCount(); - - File dir = new File(TestUtils.getDotBoardImagesPath().toAbsolutePath().toString()); - File[] directoryListing = dir.listFiles(); - - Calibrate3dPipeline calibration3dPipeline = new Calibrate3dPipeline(20); - calibration3dPipeline.getSettings().boardHeight = 11; - calibration3dPipeline.getSettings().boardWidth = 4; - calibration3dPipeline.getSettings().boardType = UICalibrationData.BoardType.DOTBOARD; - calibration3dPipeline.getSettings().gridSize = 15; - calibration3dPipeline.getSettings().resolution = new Size(640, 480); - - for (var file : directoryListing) { - calibration3dPipeline.takeSnapshot(); - var frame = - new Frame( - new CVMat(Imgcodecs.imread(file.getAbsolutePath())), - new CVMat(), - FrameThresholdType.NONE, - new FrameStaticProperties(640, 480, 60, null)); - var output = calibration3dPipeline.run(frame, QuirkyCamera.DefaultCamera); - // TestUtils.showImage(output.inputAndOutputFrame.processedImage.getMat()); - output.release(); - frame.release(); - } - - assertTrue( - calibration3dPipeline.foundCornersList.stream() - .map(Triple::getRight) - .allMatch(it -> it.width() > 0 && it.height() > 0)); - - calibration3dPipeline.removeSnapshot(0); - var frame = - new Frame( - new CVMat(Imgcodecs.imread(directoryListing[0].getAbsolutePath())), - new CVMat(), - FrameThresholdType.NONE, - new FrameStaticProperties(640, 480, 60, null)); - calibration3dPipeline.run(frame, QuirkyCamera.DefaultCamera).release(); - frame.release(); + Calibrate3dPipe calibrate3dPipe = new Calibrate3dPipe(); + calibrate3dPipe.setParams(new Calibrate3dPipe.CalibratePipeParams(new Size(640, 480))); - assertTrue( - calibration3dPipeline.foundCornersList.stream() - .map(Triple::getRight) - .allMatch(it -> it.width() > 0 && it.height() > 0)); - - var cal = calibration3dPipeline.tryCalibration(); - calibration3dPipeline.finishCalibration(); - - assertNotNull(cal); - assertNotNull(cal.perViewErrors); - System.out.println("Per View Errors: " + Arrays.toString(cal.perViewErrors)); - System.out.println("Camera Intrinsics: " + cal.cameraIntrinsics.toString()); - System.out.println("Dist Coeffs: " + cal.distCoeffs.toString()); - System.out.println("Standard Deviation: " + cal.standardDeviation); - System.out.println( - "Mean: " + Arrays.stream(calibration3dPipeline.perViewErrors()).average().toString()); - - // Confirm we didn't get leaky on our mat usage - // assertTrue(CVMat.getMatCount() == startMatCount); // TODO Figure out why this doesn't work in - // CI - System.out.println("CVMats left: " + CVMat.getMatCount() + " Start: " + startMatCount); - } + var calibrate3dPipeOutput = calibrate3dPipe.run(foundCornersList); + assertTrue(calibrate3dPipeOutput.output.perViewErrors.length > 0); + System.out.println( + "Per View Errors: " + Arrays.toString(calibrate3dPipeOutput.output.perViewErrors)); - @Test - public void calibrateSquares320x240_pi() { - // Pi3 and V1.3 camera - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "piCam", "320_240_1").toFile(); - Size sz = new Size(320, 240); - calibrateSquaresCommon(sz, dir); + for (var f : frames) { + f.release(); } - - @Test - public void calibrateSquares640x480_pi() { - // Pi3 and V1.3 camera - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "piCam", "640_480_1").toFile(); - Size sz = new Size(640, 480); - calibrateSquaresCommon(sz, dir); - } - - @Test - public void calibrateSquares960x720_pi() { - // Pi3 and V1.3 camera - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "piCam", "960_720_1").toFile(); - Size sz = new Size(960, 720); - calibrateSquaresCommon(sz, dir); - } - - @Test - public void calibrateSquares1920x1080_pi() { - // Pi3 and V1.3 camera - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "piCam", "1920_1080_1").toFile(); - Size sz = new Size(1920, 1080); - calibrateSquaresCommon(sz, dir); - } - - @Test - public void calibrateSquares320x240_gloworm() { - // Gloworm Beta - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "gloworm", "320_240_1").toFile(); - Size sz = new Size(320, 240); - Size boardDim = new Size(9, 7); - calibrateSquaresCommon(sz, dir, boardDim); + } + + @Test + public void calibrationPipelineTest() { + int startMatCount = CVMat.getMatCount(); + + File dir = new File(TestUtils.getDotBoardImagesPath().toAbsolutePath().toString()); + File[] directoryListing = dir.listFiles(); + + Calibrate3dPipeline calibration3dPipeline = new Calibrate3dPipeline(20); + calibration3dPipeline.getSettings().boardHeight = 11; + calibration3dPipeline.getSettings().boardWidth = 4; + calibration3dPipeline.getSettings().boardType = UICalibrationData.BoardType.DOTBOARD; + calibration3dPipeline.getSettings().gridSize = 15; + calibration3dPipeline.getSettings().resolution = new Size(640, 480); + + for (var file : directoryListing) { + calibration3dPipeline.takeSnapshot(); + var frame = + new Frame( + new CVMat(Imgcodecs.imread(file.getAbsolutePath())), + new CVMat(), + FrameThresholdType.NONE, + new FrameStaticProperties(640, 480, 60, null)); + var output = calibration3dPipeline.run(frame, QuirkyCamera.DefaultCamera); + // TestUtils.showImage(output.inputAndOutputFrame.processedImage.getMat()); + output.release(); + frame.release(); } - @Test - public void calibrateSquares_960_720_gloworm() { - // Gloworm Beta - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "gloworm", "960_720_1").toFile(); - Size sz = new Size(960, 720); - Size boardDim = new Size(9, 7); - calibrateSquaresCommon(sz, dir, boardDim); - } - - @Test - public void calibrateSquares_1280_720_gloworm() { - // Gloworm Beta - // This image set will return a fairly offset Y-pixel for the optical center point - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "gloworm", "1280_720_1").toFile(); - Size sz = new Size(1280, 720); - Size boardDim = new Size(9, 7); - calibrateSquaresCommon(sz, dir, boardDim, 640, 192); - } - - @Test - public void calibrateSquares_1920_1080_gloworm() { - // Gloworm Beta - // This image set has most samples on the right, and is expected to return a slightly - // wonky calibration. - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "gloworm", "1920_1080_1").toFile(); - Size sz = new Size(1920, 1080); - Size boardDim = new Size(9, 7); - calibrateSquaresCommon(sz, dir, boardDim, 1311, 540); - } - - public void calibrateSquaresCommon(Size imgRes, File rootFolder) { - calibrateSquaresCommon(imgRes, rootFolder, new Size(8, 8)); - } - - public void calibrateSquaresCommon(Size imgRes, File rootFolder, Size boardDim) { - calibrateSquaresCommon( - imgRes, rootFolder, boardDim, Units.inchesToMeters(1), imgRes.width / 2, imgRes.height / 2); - } - - public void calibrateSquaresCommon( - Size imgRes, File rootFolder, Size boardDim, double expectedXCenter, double expectedYCenter) { - calibrateSquaresCommon( - imgRes, rootFolder, boardDim, Units.inchesToMeters(1), expectedXCenter, expectedYCenter); - } - - public void calibrateSquaresCommon( - Size imgRes, - File rootFolder, - Size boardDim, - double boardGridSize_m, - double expectedXCenter, - double expectedYCenter) { - int startMatCount = CVMat.getMatCount(); - - File[] directoryListing = rootFolder.listFiles(); - - assertTrue(directoryListing.length >= 25); - - Calibrate3dPipeline calibration3dPipeline = new Calibrate3dPipeline(20); - calibration3dPipeline.getSettings().boardType = UICalibrationData.BoardType.CHESSBOARD; - calibration3dPipeline.getSettings().resolution = imgRes; - calibration3dPipeline.getSettings().boardHeight = (int) Math.round(boardDim.height); - calibration3dPipeline.getSettings().boardWidth = (int) Math.round(boardDim.width); - calibration3dPipeline.getSettings().gridSize = boardGridSize_m; - calibration3dPipeline.getSettings().streamingFrameDivisor = FrameDivisor.NONE; - - for (var file : directoryListing) { - if (file.isFile()) { - calibration3dPipeline.takeSnapshot(); - var frame = - new Frame( - new CVMat(Imgcodecs.imread(file.getAbsolutePath())), - new CVMat(), - FrameThresholdType.NONE, - new FrameStaticProperties((int) imgRes.width, (int) imgRes.height, 67, null)); - var output = calibration3dPipeline.run(frame, QuirkyCamera.DefaultCamera); - - // TestUtils.showImage(output.inputAndOutputFrame.processedImage.getMat(), file.getName(), - // 1); - output.release(); - frame.release(); - } - } - - assertTrue( - calibration3dPipeline.foundCornersList.stream() - .map(Triple::getRight) - .allMatch(it -> it.width() > 0 && it.height() > 0)); - - var cal = calibration3dPipeline.tryCalibration(); - calibration3dPipeline.finishCalibration(); - - // visuallyDebugDistortion(directoryListing, imgRes, cal ); - - // Confirm we have indeed gotten valid calibration objects - assertNotNull(cal); - assertNotNull(cal.perViewErrors); - - // Confirm the calibrated center pixel is fairly close to of the "expected" location at the - // center of the sensor. - // For all our data samples so far, this should be true. - double centerXErrPct = - Math.abs(cal.cameraIntrinsics.data[2] - expectedXCenter) / (expectedXCenter) * 100.0; - double centerYErrPct = - Math.abs(cal.cameraIntrinsics.data[5] - expectedYCenter) / (expectedYCenter) * 100.0; - assertTrue(centerXErrPct < 10.0); - assertTrue(centerYErrPct < 10.0); - - System.out.println("Per View Errors: " + Arrays.toString(cal.perViewErrors)); - System.out.println("Camera Intrinsics: " + cal.cameraIntrinsics); - System.out.println("Dist Coeffs: " + cal.distCoeffs.toString()); - System.out.println("Standard Deviation: " + cal.standardDeviation); - System.out.println( - "Mean: " + Arrays.stream(calibration3dPipeline.perViewErrors()).average().toString()); - - // Confirm we didn't get leaky on our mat usage - // assertEquals(startMatCount, CVMat.getMatCount()); // TODO Figure out why this doesn't - // work in CI - System.out.println("CVMats left: " + CVMat.getMatCount() + " Start: " + startMatCount); + assertTrue( + calibration3dPipeline.foundCornersList.stream() + .map(Triple::getRight) + .allMatch(it -> it.width() > 0 && it.height() > 0)); + + calibration3dPipeline.removeSnapshot(0); + var frame = + new Frame( + new CVMat(Imgcodecs.imread(directoryListing[0].getAbsolutePath())), + new CVMat(), + FrameThresholdType.NONE, + new FrameStaticProperties(640, 480, 60, null)); + calibration3dPipeline.run(frame, QuirkyCamera.DefaultCamera).release(); + frame.release(); + + assertTrue( + calibration3dPipeline.foundCornersList.stream() + .map(Triple::getRight) + .allMatch(it -> it.width() > 0 && it.height() > 0)); + + var cal = calibration3dPipeline.tryCalibration(); + calibration3dPipeline.finishCalibration(); + + assertNotNull(cal); + assertNotNull(cal.perViewErrors); + System.out.println("Per View Errors: " + Arrays.toString(cal.perViewErrors)); + System.out.println("Camera Intrinsics: " + cal.cameraIntrinsics.toString()); + System.out.println("Dist Coeffs: " + cal.distCoeffs.toString()); + System.out.println("Standard Deviation: " + cal.standardDeviation); + System.out.println( + "Mean: " + Arrays.stream(calibration3dPipeline.perViewErrors()).average().toString()); + + // Confirm we didn't get leaky on our mat usage + // assertTrue(CVMat.getMatCount() == startMatCount); // TODO Figure out why this doesn't work in + // CI + System.out.println("CVMats left: " + CVMat.getMatCount() + " Start: " + startMatCount); + } + + @Test + public void calibrateSquares320x240_pi() { + // Pi3 and V1.3 camera + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "piCam", "320_240_1").toFile(); + Size sz = new Size(320, 240); + calibrateSquaresCommon(sz, dir); + } + + @Test + public void calibrateSquares640x480_pi() { + // Pi3 and V1.3 camera + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "piCam", "640_480_1").toFile(); + Size sz = new Size(640, 480); + calibrateSquaresCommon(sz, dir); + } + + @Test + public void calibrateSquares960x720_pi() { + // Pi3 and V1.3 camera + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "piCam", "960_720_1").toFile(); + Size sz = new Size(960, 720); + calibrateSquaresCommon(sz, dir); + } + + @Test + public void calibrateSquares1920x1080_pi() { + // Pi3 and V1.3 camera + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "piCam", "1920_1080_1").toFile(); + Size sz = new Size(1920, 1080); + calibrateSquaresCommon(sz, dir); + } + + @Test + public void calibrateSquares320x240_gloworm() { + // Gloworm Beta + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "gloworm", "320_240_1").toFile(); + Size sz = new Size(320, 240); + Size boardDim = new Size(9, 7); + calibrateSquaresCommon(sz, dir, boardDim); + } + + @Test + public void calibrateSquares_960_720_gloworm() { + // Gloworm Beta + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "gloworm", "960_720_1").toFile(); + Size sz = new Size(960, 720); + Size boardDim = new Size(9, 7); + calibrateSquaresCommon(sz, dir, boardDim); + } + + @Test + public void calibrateSquares_1280_720_gloworm() { + // Gloworm Beta + // This image set will return a fairly offset Y-pixel for the optical center point + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "gloworm", "1280_720_1").toFile(); + Size sz = new Size(1280, 720); + Size boardDim = new Size(9, 7); + calibrateSquaresCommon(sz, dir, boardDim, 640, 192); + } + + @Test + public void calibrateSquares_1920_1080_gloworm() { + // Gloworm Beta + // This image set has most samples on the right, and is expected to return a slightly + // wonky calibration. + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "gloworm", "1920_1080_1").toFile(); + Size sz = new Size(1920, 1080); + Size boardDim = new Size(9, 7); + calibrateSquaresCommon(sz, dir, boardDim, 1311, 540); + } + + public void calibrateSquaresCommon(Size imgRes, File rootFolder) { + calibrateSquaresCommon(imgRes, rootFolder, new Size(8, 8)); + } + + public void calibrateSquaresCommon(Size imgRes, File rootFolder, Size boardDim) { + calibrateSquaresCommon( + imgRes, rootFolder, boardDim, Units.inchesToMeters(1), imgRes.width / 2, imgRes.height / 2); + } + + public void calibrateSquaresCommon( + Size imgRes, File rootFolder, Size boardDim, double expectedXCenter, double expectedYCenter) { + calibrateSquaresCommon( + imgRes, rootFolder, boardDim, Units.inchesToMeters(1), expectedXCenter, expectedYCenter); + } + + public void calibrateSquaresCommon( + Size imgRes, + File rootFolder, + Size boardDim, + double boardGridSize_m, + double expectedXCenter, + double expectedYCenter) { + int startMatCount = CVMat.getMatCount(); + + File[] directoryListing = rootFolder.listFiles(); + + assertTrue(directoryListing.length >= 25); + + Calibrate3dPipeline calibration3dPipeline = new Calibrate3dPipeline(20); + calibration3dPipeline.getSettings().boardType = UICalibrationData.BoardType.CHESSBOARD; + calibration3dPipeline.getSettings().resolution = imgRes; + calibration3dPipeline.getSettings().boardHeight = (int) Math.round(boardDim.height); + calibration3dPipeline.getSettings().boardWidth = (int) Math.round(boardDim.width); + calibration3dPipeline.getSettings().gridSize = boardGridSize_m; + calibration3dPipeline.getSettings().streamingFrameDivisor = FrameDivisor.NONE; + + for (var file : directoryListing) { + if (file.isFile()) { + calibration3dPipeline.takeSnapshot(); + var frame = + new Frame( + new CVMat(Imgcodecs.imread(file.getAbsolutePath())), + new CVMat(), + FrameThresholdType.NONE, + new FrameStaticProperties((int) imgRes.width, (int) imgRes.height, 67, null)); + var output = calibration3dPipeline.run(frame, QuirkyCamera.DefaultCamera); + + // TestUtils.showImage(output.inputAndOutputFrame.processedImage.getMat(), file.getName(), + // 1); + output.release(); + frame.release(); + } } - /** - * Uses a given camera coefficents matrix set to "undistort" every image file found in a given - * directory and display them. Provides an easy way to visually debug the results of the - * calibration routine. Seems to play havoc with CI and takes a chunk of time, so shouldn't - * usually be left active in tests. - * - * @param directoryListing - * @param imgRes - * @param cal - */ - @SuppressWarnings("unused") - private void visuallyDebugDistortion( - File[] directoryListing, Size imgRes, CameraCalibrationCoefficients cal) { - for (var file : directoryListing) { - if (file.isFile()) { - Mat raw = Imgcodecs.imread(file.getAbsolutePath()); - Mat undistorted = new Mat(new Size(imgRes.width * 2, imgRes.height * 2), raw.type()); - Calib3d.undistort( - raw, undistorted, cal.cameraIntrinsics.getAsMat(), cal.distCoeffs.getAsMat()); - TestUtils.showImage(undistorted, "undistorted " + file.getName(), 1); - raw.release(); - undistorted.release(); - } - } + assertTrue( + calibration3dPipeline.foundCornersList.stream() + .map(Triple::getRight) + .allMatch(it -> it.width() > 0 && it.height() > 0)); + + var cal = calibration3dPipeline.tryCalibration(); + calibration3dPipeline.finishCalibration(); + + // visuallyDebugDistortion(directoryListing, imgRes, cal ); + + // Confirm we have indeed gotten valid calibration objects + assertNotNull(cal); + assertNotNull(cal.perViewErrors); + + // Confirm the calibrated center pixel is fairly close to of the "expected" location at the + // center of the sensor. + // For all our data samples so far, this should be true. + double centerXErrPct = + Math.abs(cal.cameraIntrinsics.data[2] - expectedXCenter) / (expectedXCenter) * 100.0; + double centerYErrPct = + Math.abs(cal.cameraIntrinsics.data[5] - expectedYCenter) / (expectedYCenter) * 100.0; + assertTrue(centerXErrPct < 10.0); + assertTrue(centerYErrPct < 10.0); + + System.out.println("Per View Errors: " + Arrays.toString(cal.perViewErrors)); + System.out.println("Camera Intrinsics: " + cal.cameraIntrinsics); + System.out.println("Dist Coeffs: " + cal.distCoeffs.toString()); + System.out.println("Standard Deviation: " + cal.standardDeviation); + System.out.println( + "Mean: " + Arrays.stream(calibration3dPipeline.perViewErrors()).average().toString()); + + // Confirm we didn't get leaky on our mat usage + // assertEquals(startMatCount, CVMat.getMatCount()); // TODO Figure out why this doesn't + // work in CI + System.out.println("CVMats left: " + CVMat.getMatCount() + " Start: " + startMatCount); + } + + /** + * Uses a given camera coefficents matrix set to "undistort" every image file found in a given + * directory and display them. Provides an easy way to visually debug the results of the + * calibration routine. Seems to play havoc with CI and takes a chunk of time, so shouldn't + * usually be left active in tests. + * + * @param directoryListing + * @param imgRes + * @param cal + */ + @SuppressWarnings("unused") + private void visuallyDebugDistortion( + File[] directoryListing, Size imgRes, CameraCalibrationCoefficients cal) { + for (var file : directoryListing) { + if (file.isFile()) { + Mat raw = Imgcodecs.imread(file.getAbsolutePath()); + Mat undistorted = new Mat(new Size(imgRes.width * 2, imgRes.height * 2), raw.type()); + Calib3d.undistort( + raw, undistorted, cal.cameraIntrinsics.getAsMat(), cal.distCoeffs.getAsMat()); + TestUtils.showImage(undistorted, "undistorted " + file.getName(), 1); + raw.release(); + undistorted.release(); + } } + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/CirclePNPTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/CirclePNPTest.java index 5a4bc4fcd5..357fdc3306 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/CirclePNPTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/CirclePNPTest.java @@ -37,134 +37,134 @@ import org.photonvision.vision.target.TrackedTarget; public class CirclePNPTest { - private static final String LIFECAM_240P_CAL_FILE = "lifecam240p.json"; - private static final String LIFECAM_480P_CAL_FILE = "lifecam480p.json"; - - @BeforeEach - public void Init() { - TestUtils.loadLibraries(); - } - - @Test - public void loadCameraIntrinsics() { - var lifecam240pCal = getCoeffs(LIFECAM_240P_CAL_FILE); - var lifecam480pCal = getCoeffs(LIFECAM_480P_CAL_FILE); - - assertNotNull(lifecam240pCal); - checkCameraCoefficients(lifecam240pCal); - assertNotNull(lifecam480pCal); - checkCameraCoefficients(lifecam480pCal); - } - - private CameraCalibrationCoefficients getCoeffs(String filename) { - var cameraCalibration = TestUtils.getCoeffs(filename, false); - checkCameraCoefficients(cameraCalibration); - return cameraCalibration; - } - - private void checkCameraCoefficients(CameraCalibrationCoefficients cameraCalibration) { - assertNotNull(cameraCalibration); - assertEquals(3, cameraCalibration.cameraIntrinsics.rows); - assertEquals(3, cameraCalibration.cameraIntrinsics.cols); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().rows()); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().cols()); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().rows()); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().cols()); - assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().rows()); - assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().cols()); - assertEquals(1, cameraCalibration.distCoeffs.rows); - assertEquals(5, cameraCalibration.distCoeffs.cols); - assertEquals(1, cameraCalibration.distCoeffs.getAsMat().rows()); - assertEquals(5, cameraCalibration.distCoeffs.getAsMat().cols()); - assertEquals(1, cameraCalibration.distCoeffs.getAsMatOfDouble().rows()); - assertEquals(5, cameraCalibration.distCoeffs.getAsMatOfDouble().cols()); - assertEquals(1, cameraCalibration.getDistCoeffsMat().rows()); - assertEquals(5, cameraCalibration.getDistCoeffsMat().cols()); - } - - @Test - public void testCircle() { - var pipeline = new ColoredShapePipeline(); - - pipeline.getSettings().hsvHue.set(0, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(100, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().maxCannyThresh = 50; - pipeline.getSettings().circleAccuracy = 15; - pipeline.getSettings().circleDetectThreshold = 5; - pipeline.getSettings().solvePNPEnabled = true; - pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; - pipeline.getSettings().cornerDetectionUseConvexHulls = true; - pipeline.getSettings().cameraCalibration = getCoeffs(LIFECAM_480P_CAL_FILE); - pipeline.getSettings().targetModel = TargetModel.kCircularPowerCell7in; - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = false; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - pipeline.getSettings().contourShape = ContourShape.Circle; - pipeline.getSettings().circleDetectThreshold = 10; - pipeline.getSettings().contourRadius.setFirst(30); - pipeline.getSettings().accuracyPercentage = 30.0; - - var frameProvider = - new FileFrameProvider( - TestUtils.getPowercellImagePath(TestUtils.PowercellTestImages.kPowercell_test_6, false), - TestUtils.WPI2020Image.FOV, - TestUtils.get2020LifeCamCoeffs(true)); - frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); - - CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - - TestUtils.showImage( - pipelineResult.inputAndOutputFrame.colorImage.getMat(), "Pipeline output", 999999); - } - - private static void continuouslyRunPipeline(Frame frame, ReflectivePipelineSettings settings) { - var pipeline = new ReflectivePipeline(); - pipeline.settings = settings; - - while (true) { - CVPipelineResult pipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - int preRelease = CVMat.getMatCount(); - pipelineResult.release(); - int postRelease = CVMat.getMatCount(); - - System.out.printf("Pre: %d, Post: %d\n", preRelease, postRelease); - } - } - - // used to run VisualVM for profiling, which won't run on unit tests. - public static void main(String[] args) { - TestUtils.loadLibraries(); - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - - var settings = new ReflectivePipelineSettings(); - settings.hsvHue.set(60, 100); - settings.hsvSaturation.set(100, 255); - settings.hsvValue.set(190, 255); - settings.outputShouldDraw = true; - settings.outputShowMultipleTargets = true; - settings.contourGroupingMode = ContourGroupingMode.Dual; - settings.contourIntersection = ContourIntersectionDirection.Up; - - continuouslyRunPipeline(frameProvider.get(), settings); - } - - private static void printTestResults(CVPipelineResult pipelineResult) { - double fps = 1000 / pipelineResult.getLatencyMillis(); - System.out.println( - "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); - System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); - System.out.println( - "Found targets at " - + pipelineResult.targets.stream() - .map(TrackedTarget::getBestCameraToTarget3d) - .collect(Collectors.toList())); + private static final String LIFECAM_240P_CAL_FILE = "lifecam240p.json"; + private static final String LIFECAM_480P_CAL_FILE = "lifecam480p.json"; + + @BeforeEach + public void Init() { + TestUtils.loadLibraries(); + } + + @Test + public void loadCameraIntrinsics() { + var lifecam240pCal = getCoeffs(LIFECAM_240P_CAL_FILE); + var lifecam480pCal = getCoeffs(LIFECAM_480P_CAL_FILE); + + assertNotNull(lifecam240pCal); + checkCameraCoefficients(lifecam240pCal); + assertNotNull(lifecam480pCal); + checkCameraCoefficients(lifecam480pCal); + } + + private CameraCalibrationCoefficients getCoeffs(String filename) { + var cameraCalibration = TestUtils.getCoeffs(filename, false); + checkCameraCoefficients(cameraCalibration); + return cameraCalibration; + } + + private void checkCameraCoefficients(CameraCalibrationCoefficients cameraCalibration) { + assertNotNull(cameraCalibration); + assertEquals(3, cameraCalibration.cameraIntrinsics.rows); + assertEquals(3, cameraCalibration.cameraIntrinsics.cols); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().rows()); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().cols()); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().rows()); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().cols()); + assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().rows()); + assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().cols()); + assertEquals(1, cameraCalibration.distCoeffs.rows); + assertEquals(5, cameraCalibration.distCoeffs.cols); + assertEquals(1, cameraCalibration.distCoeffs.getAsMat().rows()); + assertEquals(5, cameraCalibration.distCoeffs.getAsMat().cols()); + assertEquals(1, cameraCalibration.distCoeffs.getAsMatOfDouble().rows()); + assertEquals(5, cameraCalibration.distCoeffs.getAsMatOfDouble().cols()); + assertEquals(1, cameraCalibration.getDistCoeffsMat().rows()); + assertEquals(5, cameraCalibration.getDistCoeffsMat().cols()); + } + + @Test + public void testCircle() { + var pipeline = new ColoredShapePipeline(); + + pipeline.getSettings().hsvHue.set(0, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(100, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().maxCannyThresh = 50; + pipeline.getSettings().circleAccuracy = 15; + pipeline.getSettings().circleDetectThreshold = 5; + pipeline.getSettings().solvePNPEnabled = true; + pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; + pipeline.getSettings().cornerDetectionUseConvexHulls = true; + pipeline.getSettings().cameraCalibration = getCoeffs(LIFECAM_480P_CAL_FILE); + pipeline.getSettings().targetModel = TargetModel.kCircularPowerCell7in; + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = false; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + pipeline.getSettings().contourShape = ContourShape.Circle; + pipeline.getSettings().circleDetectThreshold = 10; + pipeline.getSettings().contourRadius.setFirst(30); + pipeline.getSettings().accuracyPercentage = 30.0; + + var frameProvider = + new FileFrameProvider( + TestUtils.getPowercellImagePath(TestUtils.PowercellTestImages.kPowercell_test_6, false), + TestUtils.WPI2020Image.FOV, + TestUtils.get2020LifeCamCoeffs(true)); + frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); + + CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + + TestUtils.showImage( + pipelineResult.inputAndOutputFrame.colorImage.getMat(), "Pipeline output", 999999); + } + + private static void continuouslyRunPipeline(Frame frame, ReflectivePipelineSettings settings) { + var pipeline = new ReflectivePipeline(); + pipeline.settings = settings; + + while (true) { + CVPipelineResult pipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + int preRelease = CVMat.getMatCount(); + pipelineResult.release(); + int postRelease = CVMat.getMatCount(); + + System.out.printf("Pre: %d, Post: %d\n", preRelease, postRelease); } + } + + // used to run VisualVM for profiling, which won't run on unit tests. + public static void main(String[] args) { + TestUtils.loadLibraries(); + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + + var settings = new ReflectivePipelineSettings(); + settings.hsvHue.set(60, 100); + settings.hsvSaturation.set(100, 255); + settings.hsvValue.set(190, 255); + settings.outputShouldDraw = true; + settings.outputShowMultipleTargets = true; + settings.contourGroupingMode = ContourGroupingMode.Dual; + settings.contourIntersection = ContourIntersectionDirection.Up; + + continuouslyRunPipeline(frameProvider.get(), settings); + } + + private static void printTestResults(CVPipelineResult pipelineResult) { + double fps = 1000 / pipelineResult.getLatencyMillis(); + System.out.println( + "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); + System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + System.out.println( + "Found targets at " + + pipelineResult.targets.stream() + .map(TrackedTarget::getBestCameraToTarget3d) + .collect(Collectors.toList())); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/ColoredShapePipelineTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/ColoredShapePipelineTest.java index b896185624..22da41004f 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/ColoredShapePipelineTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/ColoredShapePipelineTest.java @@ -28,98 +28,98 @@ import org.photonvision.vision.pipeline.result.CVPipelineResult; public class ColoredShapePipelineTest { - public static void testTriangleDetection( - ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { - pipeline.settings = settings; - CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); - TestUtils.showImage( - colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), - "Pipeline output: Triangle."); - printTestResults(colouredShapePipelineResult); - } + public static void testTriangleDetection( + ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { + pipeline.settings = settings; + CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); + TestUtils.showImage( + colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), + "Pipeline output: Triangle."); + printTestResults(colouredShapePipelineResult); + } - public static void testQuadrilateralDetection( - ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { - settings.contourShape = ContourShape.Quadrilateral; - pipeline.settings = settings; - CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); - TestUtils.showImage( - colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), - "Pipeline output: Quadrilateral."); - printTestResults(colouredShapePipelineResult); - } + public static void testQuadrilateralDetection( + ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { + settings.contourShape = ContourShape.Quadrilateral; + pipeline.settings = settings; + CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); + TestUtils.showImage( + colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), + "Pipeline output: Quadrilateral."); + printTestResults(colouredShapePipelineResult); + } - public static void testCustomShapeDetection( - ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { - settings.contourShape = ContourShape.Custom; - pipeline.settings = settings; - CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); - TestUtils.showImage( - colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), - "Pipeline output: Custom."); - printTestResults(colouredShapePipelineResult); - } + public static void testCustomShapeDetection( + ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { + settings.contourShape = ContourShape.Custom; + pipeline.settings = settings; + CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); + TestUtils.showImage( + colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), + "Pipeline output: Custom."); + printTestResults(colouredShapePipelineResult); + } - @Test - public static void testCircleShapeDetection( - ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { - settings.contourShape = ContourShape.Circle; - pipeline.settings = settings; - CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); - TestUtils.showImage( - colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), - "Pipeline output: Circle."); - printTestResults(colouredShapePipelineResult); - } + @Test + public static void testCircleShapeDetection( + ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { + settings.contourShape = ContourShape.Circle; + pipeline.settings = settings; + CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); + TestUtils.showImage( + colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), + "Pipeline output: Circle."); + printTestResults(colouredShapePipelineResult); + } - @Test - public static void testPowercellDetection( - ColoredShapePipelineSettings settings, ColoredShapePipeline pipeline) { - settings.hsvHue.set(10, 40); - settings.hsvSaturation.set(100, 255); - settings.hsvValue.set(100, 255); - settings.maxCannyThresh = 50; - settings.circleAccuracy = 15; - settings.circleDetectThreshold = 5; - var frameProvider = - new FileFrameProvider( - TestUtils.getPowercellImagePath(TestUtils.PowercellTestImages.kPowercell_test_6, false), - TestUtils.WPI2019Image.FOV); - testCircleShapeDetection(pipeline, settings, frameProvider.get()); - } + @Test + public static void testPowercellDetection( + ColoredShapePipelineSettings settings, ColoredShapePipeline pipeline) { + settings.hsvHue.set(10, 40); + settings.hsvSaturation.set(100, 255); + settings.hsvValue.set(100, 255); + settings.maxCannyThresh = 50; + settings.circleAccuracy = 15; + settings.circleDetectThreshold = 5; + var frameProvider = + new FileFrameProvider( + TestUtils.getPowercellImagePath(TestUtils.PowercellTestImages.kPowercell_test_6, false), + TestUtils.WPI2019Image.FOV); + testCircleShapeDetection(pipeline, settings, frameProvider.get()); + } - public static void main(String[] args) { - TestUtils.loadLibraries(); - System.out.println( - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_108in_Center, false)); - var frameProvider = - new FileFrameProvider( - TestUtils.getPolygonImagePath(TestUtils.PolygonTestImages.kPolygons, false), - TestUtils.WPI2019Image.FOV); - var settings = new ColoredShapePipelineSettings(); - settings.hsvHue.set(0, 100); - settings.hsvSaturation.set(100, 255); - settings.hsvValue.set(100, 255); - settings.outputShouldDraw = true; - settings.outputShowMultipleTargets = true; - settings.contourGroupingMode = ContourGroupingMode.Single; - settings.contourIntersection = ContourIntersectionDirection.Up; - settings.contourShape = ContourShape.Triangle; - settings.circleDetectThreshold = 10; - settings.accuracyPercentage = 30.0; + public static void main(String[] args) { + TestUtils.loadLibraries(); + System.out.println( + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_108in_Center, false)); + var frameProvider = + new FileFrameProvider( + TestUtils.getPolygonImagePath(TestUtils.PolygonTestImages.kPolygons, false), + TestUtils.WPI2019Image.FOV); + var settings = new ColoredShapePipelineSettings(); + settings.hsvHue.set(0, 100); + settings.hsvSaturation.set(100, 255); + settings.hsvValue.set(100, 255); + settings.outputShouldDraw = true; + settings.outputShowMultipleTargets = true; + settings.contourGroupingMode = ContourGroupingMode.Single; + settings.contourIntersection = ContourIntersectionDirection.Up; + settings.contourShape = ContourShape.Triangle; + settings.circleDetectThreshold = 10; + settings.accuracyPercentage = 30.0; - ColoredShapePipeline pipeline = new ColoredShapePipeline(); - testTriangleDetection(pipeline, settings, frameProvider.get()); - testQuadrilateralDetection(pipeline, settings, frameProvider.get()); - testCustomShapeDetection(pipeline, settings, frameProvider.get()); - // testCircleShapeDetection(pipeline, settings, frameProvider.get()); - // testPowercellDetection(settings, pipeline); - } + ColoredShapePipeline pipeline = new ColoredShapePipeline(); + testTriangleDetection(pipeline, settings, frameProvider.get()); + testQuadrilateralDetection(pipeline, settings, frameProvider.get()); + testCustomShapeDetection(pipeline, settings, frameProvider.get()); + // testCircleShapeDetection(pipeline, settings, frameProvider.get()); + // testPowercellDetection(settings, pipeline); + } - private static void printTestResults(CVPipelineResult pipelineResult) { - double fps = 1000 / pipelineResult.getLatencyMillis(); - System.out.print( - "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " fps), "); - System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); - } + private static void printTestResults(CVPipelineResult pipelineResult) { + double fps = 1000 / pipelineResult.getLatencyMillis(); + System.out.print( + "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " fps), "); + System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/PipelineProfilerTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/PipelineProfilerTest.java index dbabd15e63..79d75b5f1f 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/PipelineProfilerTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/PipelineProfilerTest.java @@ -21,21 +21,21 @@ import org.junit.jupiter.api.Test; public class PipelineProfilerTest { - @Test - public void reflectiveProfile() { - long[] invalidNanos = new long[20]; - long[] validNanos = new long[PipelineProfiler.ReflectivePipeCount]; + @Test + public void reflectiveProfile() { + long[] invalidNanos = new long[20]; + long[] validNanos = new long[PipelineProfiler.ReflectivePipeCount]; - for (int i = 0; i < validNanos.length; i++) { - validNanos[i] = (long) (i * 1e+6); // fill data - } + for (int i = 0; i < validNanos.length; i++) { + validNanos[i] = (long) (i * 1e+6); // fill data + } - var invalidResult = PipelineProfiler.getReflectiveProfileString(invalidNanos); - var validResult = PipelineProfiler.getReflectiveProfileString(validNanos); + var invalidResult = PipelineProfiler.getReflectiveProfileString(invalidNanos); + var validResult = PipelineProfiler.getReflectiveProfileString(validNanos); - System.out.println(validResult); + System.out.println(validResult); - Assertions.assertEquals("Invalid data", invalidResult); - Assertions.assertTrue(validResult.contains("Total: 45.0ms")); - } + Assertions.assertEquals("Invalid data", invalidResult); + Assertions.assertTrue(validResult.contains("Total: 45.0ms")); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/ReflectivePipelineTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/ReflectivePipelineTest.java index 0817cf37dc..1c6f109b82 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/ReflectivePipelineTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/ReflectivePipelineTest.java @@ -30,104 +30,104 @@ import org.photonvision.vision.pipeline.result.CVPipelineResult; public class ReflectivePipelineTest { - @Test - public void test2019() { - TestUtils.loadLibraries(); - var pipeline = new ReflectivePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); - var hsvParams = - new HSVPipe.HSVParams( - pipeline.getSettings().hsvHue, - pipeline.getSettings().hsvSaturation, - pipeline.getSettings().hsvValue, - pipeline.getSettings().hueInverted); - frameProvider.requestHsvSettings(hsvParams); - - TestUtils.showImage(frameProvider.get().colorImage.getMat(), "Pipeline input", 1); - - CVPipelineResult pipelineResult; - - pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - - Assertions.assertTrue(pipelineResult.hasTargets()); - Assertions.assertEquals(2, pipelineResult.targets.size(), "Target count wrong!"); - - TestUtils.showImage(pipelineResult.inputAndOutputFrame.colorImage.getMat(), "Pipeline output"); - } - - @Test - public void test2020() { - TestUtils.loadLibraries(); - var pipeline = new ReflectivePipeline(); - - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(200, 255); - pipeline.getSettings().hsvValue.set(200, 255); - pipeline.getSettings().outputShouldDraw = true; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_108in_Center, false), - TestUtils.WPI2020Image.FOV); - - CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - - TestUtils.showImage( - pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output"); - } - - private static void continuouslyRunPipeline(Frame frame, ReflectivePipelineSettings settings) { - var pipeline = new ReflectivePipeline(); - - while (true) { - CVPipelineResult pipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - int preRelease = CVMat.getMatCount(); - pipelineResult.release(); - int postRelease = CVMat.getMatCount(); - - System.out.printf("Pre: %d, Post: %d\n", preRelease, postRelease); - } - } - - // used to run VisualVM for profiling. It won't run on unit tests. - public static void main(String[] args) { - TestUtils.loadLibraries(); - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - - var settings = new ReflectivePipelineSettings(); - settings.hsvHue.set(60, 100); - settings.hsvSaturation.set(100, 255); - settings.hsvValue.set(190, 255); - settings.outputShouldDraw = true; - settings.outputShowMultipleTargets = true; - settings.contourGroupingMode = ContourGroupingMode.Dual; - settings.contourIntersection = ContourIntersectionDirection.Up; - - continuouslyRunPipeline(frameProvider.get(), settings); - } - - private static void printTestResults(CVPipelineResult pipelineResult) { - double fps = 1000 / pipelineResult.getLatencyMillis(); - System.out.print( - "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " fps), "); - System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + @Test + public void test2019() { + TestUtils.loadLibraries(); + var pipeline = new ReflectivePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); + var hsvParams = + new HSVPipe.HSVParams( + pipeline.getSettings().hsvHue, + pipeline.getSettings().hsvSaturation, + pipeline.getSettings().hsvValue, + pipeline.getSettings().hueInverted); + frameProvider.requestHsvSettings(hsvParams); + + TestUtils.showImage(frameProvider.get().colorImage.getMat(), "Pipeline input", 1); + + CVPipelineResult pipelineResult; + + pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + + Assertions.assertTrue(pipelineResult.hasTargets()); + Assertions.assertEquals(2, pipelineResult.targets.size(), "Target count wrong!"); + + TestUtils.showImage(pipelineResult.inputAndOutputFrame.colorImage.getMat(), "Pipeline output"); + } + + @Test + public void test2020() { + TestUtils.loadLibraries(); + var pipeline = new ReflectivePipeline(); + + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(200, 255); + pipeline.getSettings().hsvValue.set(200, 255); + pipeline.getSettings().outputShouldDraw = true; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_108in_Center, false), + TestUtils.WPI2020Image.FOV); + + CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + + TestUtils.showImage( + pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output"); + } + + private static void continuouslyRunPipeline(Frame frame, ReflectivePipelineSettings settings) { + var pipeline = new ReflectivePipeline(); + + while (true) { + CVPipelineResult pipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + int preRelease = CVMat.getMatCount(); + pipelineResult.release(); + int postRelease = CVMat.getMatCount(); + + System.out.printf("Pre: %d, Post: %d\n", preRelease, postRelease); } + } + + // used to run VisualVM for profiling. It won't run on unit tests. + public static void main(String[] args) { + TestUtils.loadLibraries(); + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + + var settings = new ReflectivePipelineSettings(); + settings.hsvHue.set(60, 100); + settings.hsvSaturation.set(100, 255); + settings.hsvValue.set(190, 255); + settings.outputShouldDraw = true; + settings.outputShowMultipleTargets = true; + settings.contourGroupingMode = ContourGroupingMode.Dual; + settings.contourIntersection = ContourIntersectionDirection.Up; + + continuouslyRunPipeline(frameProvider.get(), settings); + } + + private static void printTestResults(CVPipelineResult pipelineResult) { + double fps = 1000 / pipelineResult.getLatencyMillis(); + System.out.print( + "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " fps), "); + System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/SolvePNPTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/SolvePNPTest.java index 03bcc2535d..5bc2541187 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/SolvePNPTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/SolvePNPTest.java @@ -41,211 +41,211 @@ import org.photonvision.vision.target.TrackedTarget; public class SolvePNPTest { - private static final String LIFECAM_240P_CAL_FILE = "lifecam240p.json"; - private static final String LIFECAM_480P_CAL_FILE = "lifecam480p.json"; - - @BeforeEach - public void Init() { - TestUtils.loadLibraries(); - } - - @Test - public void loadCameraIntrinsics() { - var lifecam240pCal = getCoeffs(LIFECAM_240P_CAL_FILE); - var lifecam480pCal = getCoeffs(LIFECAM_480P_CAL_FILE); - - assertNotNull(lifecam240pCal); - checkCameraCoefficients(lifecam240pCal); - assertNotNull(lifecam480pCal); - checkCameraCoefficients(lifecam480pCal); - } - - private CameraCalibrationCoefficients getCoeffs(String filename) { - var cameraCalibration = TestUtils.getCoeffs(filename, false); - checkCameraCoefficients(cameraCalibration); - return cameraCalibration; - } - - private void checkCameraCoefficients(CameraCalibrationCoefficients cameraCalibration) { - assertNotNull(cameraCalibration); - assertEquals(3, cameraCalibration.cameraIntrinsics.rows); - assertEquals(3, cameraCalibration.cameraIntrinsics.cols); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().rows()); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().cols()); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().rows()); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().cols()); - assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().rows()); - assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().cols()); - assertEquals(1, cameraCalibration.distCoeffs.rows); - assertEquals(5, cameraCalibration.distCoeffs.cols); - assertEquals(1, cameraCalibration.distCoeffs.getAsMat().rows()); - assertEquals(5, cameraCalibration.distCoeffs.getAsMat().cols()); - assertEquals(1, cameraCalibration.distCoeffs.getAsMatOfDouble().rows()); - assertEquals(5, cameraCalibration.distCoeffs.getAsMatOfDouble().cols()); - assertEquals(1, cameraCalibration.getDistCoeffsMat().rows()); - assertEquals(5, cameraCalibration.getDistCoeffsMat().cols()); - } - - @Test - public void test2019() { - var pipeline = new ReflectivePipeline(); - - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().solvePNPEnabled = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - pipeline.getSettings().cornerDetectionUseConvexHulls = true; - pipeline.getSettings().targetModel = TargetModel.k2019DualTarget; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark48in, false), - TestUtils.WPI2019Image.FOV, - TestUtils.get2019LifeCamCoeffs(false)); - - frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); - var hsvParams = - new HSVPipe.HSVParams( - pipeline.getSettings().hsvHue, - pipeline.getSettings().hsvSaturation, - pipeline.getSettings().hsvValue, - pipeline.getSettings().hueInverted); - frameProvider.requestHsvSettings(hsvParams); - - CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - - // Draw on input - var outputPipe = new OutputStreamPipeline(); - outputPipe.process( - pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); - - TestUtils.showImage( - pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); - - var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); - // these numbers are not *accurate*, but they are known and expected - var expectedTrl = new Translation3d(1.1, -0.05, -0.05); - assertTrue( - expectedTrl.getDistance(pose.getTranslation()) < 0.05, - "SolvePNP translation estimation failed"); - // We expect the object axes to be in NWU, with the x-axis coming out of the tag - // This target is facing the camera almost parallel, so in world space: - // The object's X axis should be (-1, 0, 0) - assertEquals(-1, new Translation3d(1, 0, 0).rotateBy(pose.getRotation()).getX(), 0.05); - // The object's Y axis should be (0, -1, 0) - assertEquals(-1, new Translation3d(0, 1, 0).rotateBy(pose.getRotation()).getY(), 0.05); - // The object's Z axis should be (0, 0, 1) - assertEquals(1, new Translation3d(0, 0, 1).rotateBy(pose.getRotation()).getZ(), 0.05); - } - - @Test - public void test2020() { - var pipeline = new ReflectivePipeline(); - - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(60, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().solvePNPEnabled = true; - pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; - pipeline.getSettings().cornerDetectionUseConvexHulls = true; - pipeline.getSettings().targetModel = TargetModel.k2020HighGoalOuter; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_224in_Left, false), - TestUtils.WPI2020Image.FOV, - TestUtils.get2020LifeCamCoeffs(false)); - - frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); - var hsvParams = - new HSVPipe.HSVParams( - pipeline.getSettings().hsvHue, - pipeline.getSettings().hsvSaturation, - pipeline.getSettings().hsvValue, - pipeline.getSettings().hueInverted); - frameProvider.requestHsvSettings(hsvParams); - - CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - - // Draw on input - var outputPipe = new OutputStreamPipeline(); - outputPipe.process( - pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); - - TestUtils.showImage( - pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); - - var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); - // these numbers are not *accurate*, but they are known and expected - var expectedTrl = - new Translation3d( - Units.inchesToMeters(236), Units.inchesToMeters(36), Units.inchesToMeters(-53)); - assertTrue( - expectedTrl.getDistance(pose.getTranslation()) < 0.05, - "SolvePNP translation estimation failed"); - // We expect the object axes to be in NWU, with the x-axis coming out of the tag - // Rotation around Z axis (yaw) should be mostly facing us - var xAxis = new Translation3d(1, 0, 0); - var yAxis = new Translation3d(0, 1, 0); - var zAxis = new Translation3d(0, 0, 1); - var expectedRot = - new Rotation3d(Math.toRadians(-20), Math.toRadians(-20), Math.toRadians(-120)); - assertTrue(xAxis.rotateBy(expectedRot).getDistance(xAxis.rotateBy(pose.getRotation())) < 0.1); - assertTrue(yAxis.rotateBy(expectedRot).getDistance(yAxis.rotateBy(pose.getRotation())) < 0.1); - assertTrue(zAxis.rotateBy(expectedRot).getDistance(zAxis.rotateBy(pose.getRotation())) < 0.1); - } - - private static void continuouslyRunPipeline(Frame frame, ReflectivePipelineSettings settings) { - var pipeline = new ReflectivePipeline(); - pipeline.settings = settings; - - while (true) { - CVPipelineResult pipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - int preRelease = CVMat.getMatCount(); - pipelineResult.release(); - int postRelease = CVMat.getMatCount(); - - System.out.printf("Pre: %d, Post: %d\n", preRelease, postRelease); - } - } - - // used to run VisualVM for profiling, which won't run on unit tests. - public static void main(String[] args) { - TestUtils.loadLibraries(); - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - - var settings = new ReflectivePipelineSettings(); - settings.hsvHue.set(60, 100); - settings.hsvSaturation.set(100, 255); - settings.hsvValue.set(190, 255); - settings.outputShouldDraw = true; - settings.outputShowMultipleTargets = true; - settings.contourGroupingMode = ContourGroupingMode.Dual; - settings.contourIntersection = ContourIntersectionDirection.Up; - - continuouslyRunPipeline(frameProvider.get(), settings); - } - - private static void printTestResults(CVPipelineResult pipelineResult) { - double fps = 1000 / pipelineResult.getLatencyMillis(); - System.out.println( - "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); - System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); - System.out.println( - "Found targets at " - + pipelineResult.targets.stream() - .map(TrackedTarget::getBestCameraToTarget3d) - .collect(Collectors.toList())); + private static final String LIFECAM_240P_CAL_FILE = "lifecam240p.json"; + private static final String LIFECAM_480P_CAL_FILE = "lifecam480p.json"; + + @BeforeEach + public void Init() { + TestUtils.loadLibraries(); + } + + @Test + public void loadCameraIntrinsics() { + var lifecam240pCal = getCoeffs(LIFECAM_240P_CAL_FILE); + var lifecam480pCal = getCoeffs(LIFECAM_480P_CAL_FILE); + + assertNotNull(lifecam240pCal); + checkCameraCoefficients(lifecam240pCal); + assertNotNull(lifecam480pCal); + checkCameraCoefficients(lifecam480pCal); + } + + private CameraCalibrationCoefficients getCoeffs(String filename) { + var cameraCalibration = TestUtils.getCoeffs(filename, false); + checkCameraCoefficients(cameraCalibration); + return cameraCalibration; + } + + private void checkCameraCoefficients(CameraCalibrationCoefficients cameraCalibration) { + assertNotNull(cameraCalibration); + assertEquals(3, cameraCalibration.cameraIntrinsics.rows); + assertEquals(3, cameraCalibration.cameraIntrinsics.cols); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().rows()); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().cols()); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().rows()); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().cols()); + assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().rows()); + assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().cols()); + assertEquals(1, cameraCalibration.distCoeffs.rows); + assertEquals(5, cameraCalibration.distCoeffs.cols); + assertEquals(1, cameraCalibration.distCoeffs.getAsMat().rows()); + assertEquals(5, cameraCalibration.distCoeffs.getAsMat().cols()); + assertEquals(1, cameraCalibration.distCoeffs.getAsMatOfDouble().rows()); + assertEquals(5, cameraCalibration.distCoeffs.getAsMatOfDouble().cols()); + assertEquals(1, cameraCalibration.getDistCoeffsMat().rows()); + assertEquals(5, cameraCalibration.getDistCoeffsMat().cols()); + } + + @Test + public void test2019() { + var pipeline = new ReflectivePipeline(); + + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().solvePNPEnabled = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + pipeline.getSettings().cornerDetectionUseConvexHulls = true; + pipeline.getSettings().targetModel = TargetModel.k2019DualTarget; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark48in, false), + TestUtils.WPI2019Image.FOV, + TestUtils.get2019LifeCamCoeffs(false)); + + frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); + var hsvParams = + new HSVPipe.HSVParams( + pipeline.getSettings().hsvHue, + pipeline.getSettings().hsvSaturation, + pipeline.getSettings().hsvValue, + pipeline.getSettings().hueInverted); + frameProvider.requestHsvSettings(hsvParams); + + CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + + // Draw on input + var outputPipe = new OutputStreamPipeline(); + outputPipe.process( + pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); + + TestUtils.showImage( + pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); + + var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); + // these numbers are not *accurate*, but they are known and expected + var expectedTrl = new Translation3d(1.1, -0.05, -0.05); + assertTrue( + expectedTrl.getDistance(pose.getTranslation()) < 0.05, + "SolvePNP translation estimation failed"); + // We expect the object axes to be in NWU, with the x-axis coming out of the tag + // This target is facing the camera almost parallel, so in world space: + // The object's X axis should be (-1, 0, 0) + assertEquals(-1, new Translation3d(1, 0, 0).rotateBy(pose.getRotation()).getX(), 0.05); + // The object's Y axis should be (0, -1, 0) + assertEquals(-1, new Translation3d(0, 1, 0).rotateBy(pose.getRotation()).getY(), 0.05); + // The object's Z axis should be (0, 0, 1) + assertEquals(1, new Translation3d(0, 0, 1).rotateBy(pose.getRotation()).getZ(), 0.05); + } + + @Test + public void test2020() { + var pipeline = new ReflectivePipeline(); + + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(60, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().solvePNPEnabled = true; + pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; + pipeline.getSettings().cornerDetectionUseConvexHulls = true; + pipeline.getSettings().targetModel = TargetModel.k2020HighGoalOuter; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_224in_Left, false), + TestUtils.WPI2020Image.FOV, + TestUtils.get2020LifeCamCoeffs(false)); + + frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); + var hsvParams = + new HSVPipe.HSVParams( + pipeline.getSettings().hsvHue, + pipeline.getSettings().hsvSaturation, + pipeline.getSettings().hsvValue, + pipeline.getSettings().hueInverted); + frameProvider.requestHsvSettings(hsvParams); + + CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + + // Draw on input + var outputPipe = new OutputStreamPipeline(); + outputPipe.process( + pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); + + TestUtils.showImage( + pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); + + var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); + // these numbers are not *accurate*, but they are known and expected + var expectedTrl = + new Translation3d( + Units.inchesToMeters(236), Units.inchesToMeters(36), Units.inchesToMeters(-53)); + assertTrue( + expectedTrl.getDistance(pose.getTranslation()) < 0.05, + "SolvePNP translation estimation failed"); + // We expect the object axes to be in NWU, with the x-axis coming out of the tag + // Rotation around Z axis (yaw) should be mostly facing us + var xAxis = new Translation3d(1, 0, 0); + var yAxis = new Translation3d(0, 1, 0); + var zAxis = new Translation3d(0, 0, 1); + var expectedRot = + new Rotation3d(Math.toRadians(-20), Math.toRadians(-20), Math.toRadians(-120)); + assertTrue(xAxis.rotateBy(expectedRot).getDistance(xAxis.rotateBy(pose.getRotation())) < 0.1); + assertTrue(yAxis.rotateBy(expectedRot).getDistance(yAxis.rotateBy(pose.getRotation())) < 0.1); + assertTrue(zAxis.rotateBy(expectedRot).getDistance(zAxis.rotateBy(pose.getRotation())) < 0.1); + } + + private static void continuouslyRunPipeline(Frame frame, ReflectivePipelineSettings settings) { + var pipeline = new ReflectivePipeline(); + pipeline.settings = settings; + + while (true) { + CVPipelineResult pipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + int preRelease = CVMat.getMatCount(); + pipelineResult.release(); + int postRelease = CVMat.getMatCount(); + + System.out.printf("Pre: %d, Post: %d\n", preRelease, postRelease); } + } + + // used to run VisualVM for profiling, which won't run on unit tests. + public static void main(String[] args) { + TestUtils.loadLibraries(); + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + + var settings = new ReflectivePipelineSettings(); + settings.hsvHue.set(60, 100); + settings.hsvSaturation.set(100, 255); + settings.hsvValue.set(190, 255); + settings.outputShouldDraw = true; + settings.outputShowMultipleTargets = true; + settings.contourGroupingMode = ContourGroupingMode.Dual; + settings.contourIntersection = ContourIntersectionDirection.Up; + + continuouslyRunPipeline(frameProvider.get(), settings); + } + + private static void printTestResults(CVPipelineResult pipelineResult) { + double fps = 1000 / pipelineResult.getLatencyMillis(); + System.out.println( + "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); + System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + System.out.println( + "Found targets at " + + pipelineResult.targets.stream() + .map(TrackedTarget::getBestCameraToTarget3d) + .collect(Collectors.toList())); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/processes/PipelineManagerTest.java b/photon-core/src/test/java/org/photonvision/vision/processes/PipelineManagerTest.java index 7b90ce027d..af1cf02cf5 100644 --- a/photon-core/src/test/java/org/photonvision/vision/processes/PipelineManagerTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/processes/PipelineManagerTest.java @@ -26,36 +26,36 @@ import org.photonvision.vision.pipeline.PipelineType; public class PipelineManagerTest { - @Test - public void testUniqueName() { - TestUtils.loadLibraries(); - PipelineManager manager = new PipelineManager(new DriverModePipelineSettings(), List.of()); - manager.addPipeline(PipelineType.Reflective, "Another"); + @Test + public void testUniqueName() { + TestUtils.loadLibraries(); + PipelineManager manager = new PipelineManager(new DriverModePipelineSettings(), List.of()); + manager.addPipeline(PipelineType.Reflective, "Another"); - // We now have ["New Pipeline", "Another"] - // After we duplicate 0 and 1, we expect ["New Pipeline", "Another", "New Pipeline (1)", - // "Another (1)"] - manager.duplicatePipeline(0); - manager.duplicatePipeline(1); + // We now have ["New Pipeline", "Another"] + // After we duplicate 0 and 1, we expect ["New Pipeline", "Another", "New Pipeline (1)", + // "Another (1)"] + manager.duplicatePipeline(0); + manager.duplicatePipeline(1); - // Should add "Another (2)" - manager.duplicatePipeline(3); - // Should add "Another (3) - manager.duplicatePipeline(3); - // Should add "Another (4) - manager.duplicatePipeline(1); + // Should add "Another (2)" + manager.duplicatePipeline(3); + // Should add "Another (3) + manager.duplicatePipeline(3); + // Should add "Another (4) + manager.duplicatePipeline(1); - // Should add "Another (5)" through "Another (15)" - for (int i = 5; i < 15; i++) { - manager.duplicatePipeline(1); - } + // Should add "Another (5)" through "Another (15)" + for (int i = 5; i < 15; i++) { + manager.duplicatePipeline(1); + } - var nicks = manager.getPipelineNicknames(); - var expected = - new ArrayList<>(List.of("New Pipeline", "Another", "New Pipeline (1)", "Another (1)")); - for (int i = 2; i < 15; i++) { - expected.add("Another (" + i + ")"); - } - Assertions.assertEquals(expected, nicks); + var nicks = manager.getPipelineNicknames(); + var expected = + new ArrayList<>(List.of("New Pipeline", "Another", "New Pipeline (1)", "Another (1)")); + for (int i = 2; i < 15; i++) { + expected.add("Another (" + i + ")"); } + Assertions.assertEquals(expected, nicks); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/processes/VisionModuleManagerTest.java b/photon-core/src/test/java/org/photonvision/vision/processes/VisionModuleManagerTest.java index e0a8376159..ce36cad574 100644 --- a/photon-core/src/test/java/org/photonvision/vision/processes/VisionModuleManagerTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/processes/VisionModuleManagerTest.java @@ -37,162 +37,162 @@ import org.photonvision.vision.pipeline.result.CVPipelineResult; public class VisionModuleManagerTest { - @BeforeAll - public static void init() { - TestUtils.loadLibraries(); - } - - private static class TestSource extends VisionSource { - private final FrameProvider provider; - - public TestSource(FrameProvider provider, CameraConfiguration cameraConfiguration) { - super(cameraConfiguration); - this.provider = provider; - } + @BeforeAll + public static void init() { + TestUtils.loadLibraries(); + } - @Override - public FrameProvider getFrameProvider() { - return provider; - } + private static class TestSource extends VisionSource { + private final FrameProvider provider; - @Override - public VisionSourceSettables getSettables() { - return new TestSettables(getCameraConfiguration()); - } - - @Override - public boolean isVendorCamera() { - return false; - } + public TestSource(FrameProvider provider, CameraConfiguration cameraConfiguration) { + super(cameraConfiguration); + this.provider = provider; } - private static class TestSettables extends VisionSourceSettables { - protected TestSettables(CameraConfiguration configuration) { - super(configuration); - } - - @Override - public void setExposure(double exposure) {} - - @Override - public void setBrightness(int brightness) {} - - @Override - public void setGain(int gain) {} - - @Override - public VideoMode getCurrentVideoMode() { - return new VideoMode(0, 320, 240, 254); - } - - @Override - public void setVideoModeInternal(VideoMode videoMode) { - this.frameStaticProperties = new FrameStaticProperties(getCurrentVideoMode(), getFOV(), null); - } - - @Override - public HashMap getAllVideoModes() { - var ret = new HashMap(); - ret.put(0, getCurrentVideoMode()); - return ret; - } - - @Override - public void setAutoExposure(boolean cameraAutoExposure) {} + @Override + public FrameProvider getFrameProvider() { + return provider; } - private static class TestDataConsumer implements CVPipelineResultConsumer { - private CVPipelineResult result; - - @Override - public void accept(CVPipelineResult result) { - this.result = result; - } + @Override + public VisionSourceSettables getSettables() { + return new TestSettables(getCameraConfiguration()); } - @Test - public void setupManager() { - ConfigManager.getInstance().load(); - - var conf = new CameraConfiguration("Foo", "Bar"); - var ffp = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - - var testSource = new TestSource(ffp, conf); + @Override + public boolean isVendorCamera() { + return false; + } + } - var modules = VisionModuleManager.getInstance().addSources(List.of(testSource)); - var module0DataConsumer = new TestDataConsumer(); + private static class TestSettables extends VisionSourceSettables { + protected TestSettables(CameraConfiguration configuration) { + super(configuration); + } - VisionModuleManager.getInstance().visionModules.get(0).addResultConsumer(module0DataConsumer); + @Override + public void setExposure(double exposure) {} - modules.forEach(VisionModule::start); + @Override + public void setBrightness(int brightness) {} - sleep(1500); + @Override + public void setGain(int gain) {} - Assertions.assertNotNull(module0DataConsumer.result); - printTestResults(module0DataConsumer.result); + @Override + public VideoMode getCurrentVideoMode() { + return new VideoMode(0, 320, 240, 254); } - @Test - public void testMultipleStreamIndex() { - ConfigManager.getInstance().load(); - - var vmm = new VisionModuleManager(); - - var conf = new CameraConfiguration("Foo", "Bar"); - conf.streamIndex = 1; - var ffp = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - var testSource = new TestSource(ffp, conf); - - var conf2 = new CameraConfiguration("Foo2", "Bar"); - conf2.streamIndex = 0; - var ffp2 = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - var testSource2 = new TestSource(ffp2, conf2); - - var conf3 = new CameraConfiguration("Foo3", "Bar"); - conf3.streamIndex = 0; - var ffp3 = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - var testSource3 = new TestSource(ffp3, conf3); - - var modules = vmm.addSources(List.of(testSource, testSource2, testSource3)); - - System.out.println( - Arrays.toString( - modules.stream() - .map(it -> it.visionSource.getCameraConfiguration().streamIndex) - .toArray())); - var idxs = - modules.stream() - .map(it -> it.visionSource.getCameraConfiguration().streamIndex) - .collect(Collectors.toList()); - assertTrue(idxs.contains(0)); - assertTrue(idxs.contains(1)); - assertTrue(idxs.contains(2)); + @Override + public void setVideoModeInternal(VideoMode videoMode) { + this.frameStaticProperties = new FrameStaticProperties(getCurrentVideoMode(), getFOV(), null); } - private static void printTestResults(CVPipelineResult pipelineResult) { - double fps = 1000 / pipelineResult.getLatencyMillis(); - System.out.print( - "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " fps), "); - System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + @Override + public HashMap getAllVideoModes() { + var ret = new HashMap(); + ret.put(0, getCurrentVideoMode()); + return ret; } - private void sleep(int millis) { - try { - Thread.sleep(millis); - } catch (InterruptedException e) { - // ignored - } + @Override + public void setAutoExposure(boolean cameraAutoExposure) {} + } + + private static class TestDataConsumer implements CVPipelineResultConsumer { + private CVPipelineResult result; + + @Override + public void accept(CVPipelineResult result) { + this.result = result; + } + } + + @Test + public void setupManager() { + ConfigManager.getInstance().load(); + + var conf = new CameraConfiguration("Foo", "Bar"); + var ffp = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + + var testSource = new TestSource(ffp, conf); + + var modules = VisionModuleManager.getInstance().addSources(List.of(testSource)); + var module0DataConsumer = new TestDataConsumer(); + + VisionModuleManager.getInstance().visionModules.get(0).addResultConsumer(module0DataConsumer); + + modules.forEach(VisionModule::start); + + sleep(1500); + + Assertions.assertNotNull(module0DataConsumer.result); + printTestResults(module0DataConsumer.result); + } + + @Test + public void testMultipleStreamIndex() { + ConfigManager.getInstance().load(); + + var vmm = new VisionModuleManager(); + + var conf = new CameraConfiguration("Foo", "Bar"); + conf.streamIndex = 1; + var ffp = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + var testSource = new TestSource(ffp, conf); + + var conf2 = new CameraConfiguration("Foo2", "Bar"); + conf2.streamIndex = 0; + var ffp2 = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + var testSource2 = new TestSource(ffp2, conf2); + + var conf3 = new CameraConfiguration("Foo3", "Bar"); + conf3.streamIndex = 0; + var ffp3 = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + var testSource3 = new TestSource(ffp3, conf3); + + var modules = vmm.addSources(List.of(testSource, testSource2, testSource3)); + + System.out.println( + Arrays.toString( + modules.stream() + .map(it -> it.visionSource.getCameraConfiguration().streamIndex) + .toArray())); + var idxs = + modules.stream() + .map(it -> it.visionSource.getCameraConfiguration().streamIndex) + .collect(Collectors.toList()); + assertTrue(idxs.contains(0)); + assertTrue(idxs.contains(1)); + assertTrue(idxs.contains(2)); + } + + private static void printTestResults(CVPipelineResult pipelineResult) { + double fps = 1000 / pipelineResult.getLatencyMillis(); + System.out.print( + "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " fps), "); + System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + } + + private void sleep(int millis) { + try { + Thread.sleep(millis); + } catch (InterruptedException e) { + // ignored } + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/processes/VisionSourceManagerTest.java b/photon-core/src/test/java/org/photonvision/vision/processes/VisionSourceManagerTest.java index 2c64a9a622..33741f1579 100644 --- a/photon-core/src/test/java/org/photonvision/vision/processes/VisionSourceManagerTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/processes/VisionSourceManagerTest.java @@ -27,31 +27,31 @@ import org.photonvision.common.configuration.ConfigManager; public class VisionSourceManagerTest { - @Test - public void visionSourceTest() { - var inst = new VisionSourceManager(); - var infoList = new ArrayList(); - inst.cameraInfoSupplier = () -> infoList; - ConfigManager.getInstance().load(); - - inst.tryMatchUSBCamImpl(); - var config = new CameraConfiguration("secondTestVideo", "dev/video1"); - UsbCameraInfo info1 = new UsbCameraInfo(0, "dev/video0", "testVideo", new String[0], 1, 2); - infoList.add(info1); - - inst.registerLoadedConfigs(config); - var sources = inst.tryMatchUSBCamImpl(false); - - assertTrue(inst.knownUsbCameras.contains(info1)); - assertEquals(1, inst.unmatchedLoadedConfigs.size()); - - UsbCameraInfo info2 = - new UsbCameraInfo(0, "dev/video1", "secondTestVideo", new String[0], 2, 1); - infoList.add(info2); - inst.tryMatchUSBCamImpl(false); - - assertTrue(inst.knownUsbCameras.contains(info2)); - assertEquals(2, inst.knownUsbCameras.size()); - assertEquals(0, inst.unmatchedLoadedConfigs.size()); - } + @Test + public void visionSourceTest() { + var inst = new VisionSourceManager(); + var infoList = new ArrayList(); + inst.cameraInfoSupplier = () -> infoList; + ConfigManager.getInstance().load(); + + inst.tryMatchUSBCamImpl(); + var config = new CameraConfiguration("secondTestVideo", "dev/video1"); + UsbCameraInfo info1 = new UsbCameraInfo(0, "dev/video0", "testVideo", new String[0], 1, 2); + infoList.add(info1); + + inst.registerLoadedConfigs(config); + var sources = inst.tryMatchUSBCamImpl(false); + + assertTrue(inst.knownUsbCameras.contains(info1)); + assertEquals(1, inst.unmatchedLoadedConfigs.size()); + + UsbCameraInfo info2 = + new UsbCameraInfo(0, "dev/video1", "secondTestVideo", new String[0], 2, 1); + infoList.add(info2); + inst.tryMatchUSBCamImpl(false); + + assertTrue(inst.knownUsbCameras.contains(info2)); + assertEquals(2, inst.knownUsbCameras.size()); + assertEquals(0, inst.unmatchedLoadedConfigs.size()); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/target/TargetCalculationsTest.java b/photon-core/src/test/java/org/photonvision/vision/target/TargetCalculationsTest.java index 7b23d3a600..5b93948fce 100644 --- a/photon-core/src/test/java/org/photonvision/vision/target/TargetCalculationsTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/target/TargetCalculationsTest.java @@ -38,244 +38,244 @@ public class TargetCalculationsTest { - private static Size imageSize = new Size(800, 600); - private static Point imageCenterPoint = - new Point(imageSize.width / 2.0 - 0.5, imageSize.height / 2.0 - 0.5); - private static final double diagFOV = Math.toRadians(70.0); - - private static final FrameStaticProperties props = - new FrameStaticProperties((int) imageSize.width, (int) imageSize.height, diagFOV, null); - private static final TrackedTarget.TargetCalculationParameters params = - new TrackedTarget.TargetCalculationParameters( - true, - TargetOffsetPointEdge.Center, - RobotOffsetPointMode.None, - new Point(), - new DualOffsetValues(), - imageCenterPoint, - props.horizontalFocalLength, - props.verticalFocalLength, - imageSize.width * imageSize.height); - - @BeforeAll - public static void setup() { - TestUtils.loadLibraries(); - } - - @Test - public void testYawPitchBehavior() { - double targetPixelOffsetX = 100; - double targetPixelOffsetY = 100; - var targetCenterPoint = - new Point(imageCenterPoint.x + targetPixelOffsetX, imageCenterPoint.y + targetPixelOffsetY); - - var targetYawPitch = - TargetCalculations.calculateYawPitch( - imageCenterPoint.x, - targetCenterPoint.x, - params.horizontalFocalLength, - imageCenterPoint.y, - targetCenterPoint.y, - params.verticalFocalLength); - - assertTrue(targetYawPitch.getFirst() > 0, "Yaw is not positive right"); - assertTrue(targetYawPitch.getSecond() < 0, "Pitch is not positive up"); - - var fovs = - FrameStaticProperties.calculateHorizontalVerticalFoV( - diagFOV, (int) imageSize.width, (int) imageSize.height); - var maxYaw = - TargetCalculations.calculateYawPitch( - imageCenterPoint.x, - 2 * imageCenterPoint.x, - params.horizontalFocalLength, - imageCenterPoint.y, - imageCenterPoint.y, - params.verticalFocalLength); - assertEquals(fovs.getFirst() / 2.0, maxYaw.getFirst(), 0.025, "Horizontal FOV check failed"); - var maxPitch = - TargetCalculations.calculateYawPitch( - imageCenterPoint.x, - imageCenterPoint.x, - params.horizontalFocalLength, - imageCenterPoint.y, - 0, - params.verticalFocalLength); - assertEquals(fovs.getSecond() / 2.0, maxPitch.getSecond(), 0.025, "Vertical FOV check failed"); - } - - private static Stream testYawPitchCalcArgs() { - return Stream.of( - // (yaw, pitch) in degrees - Arguments.of(0, 0), - Arguments.of(10, 0), - Arguments.of(0, 10), - Arguments.of(10, 10), - Arguments.of(-10, -10), - Arguments.of(30, 45), - Arguments.of(-45, -20)); - } - - private static double[] testCameraMatrix = {240, 0, 320, 0, 240, 320, 0, 0, 1}; - - @ParameterizedTest - @MethodSource("testYawPitchCalcArgs") - public void testYawPitchCalc(double yawDeg, double pitchDeg) { - Mat testCameraMat = new Mat(3, 3, CvType.CV_64F); - testCameraMat.put(0, 0, testCameraMatrix); - // Since we create this translation using the given yaw/pitch, we should see the same angles - // calculated - var targetTrl = - new Translation3d(1, new Rotation3d(0, Math.toRadians(pitchDeg), Math.toRadians(yawDeg))); - // NWU to EDN - var objectPoints = - new MatOfPoint3f(new Point3(-targetTrl.getY(), -targetTrl.getZ(), targetTrl.getX())); - var imagePoints = new MatOfPoint2f(); - // Project translation into camera image - Calib3d.projectPoints( - objectPoints, - new MatOfDouble(0, 0, 0), - new MatOfDouble(0, 0, 0), - testCameraMat, - new MatOfDouble(0, 0, 0, 0, 0), - imagePoints); - var point = imagePoints.toArray()[0]; - // Test if the target yaw/pitch calculation matches what the target was created with - var yawPitch = - TargetCalculations.calculateYawPitch( - point.x, - testCameraMatrix[2], - testCameraMatrix[0], - point.y, - testCameraMatrix[5], - testCameraMatrix[4]); - assertEquals(yawDeg, yawPitch.getFirst(), 1e-3, "Yaw calculation incorrect"); - assertEquals(pitchDeg, yawPitch.getSecond(), 1e-3, "Pitch calculation incorrect"); - } - - @Test - public void testTargetOffset() { - Point center = new Point(0, 0); - Size rectSize = new Size(10, 5); - double angle = 30; - RotatedRect rect = new RotatedRect(center, rectSize, angle); - - // We pretend like x/y are in pixels, so the "top" is actually the bottom - var result = - TargetCalculations.calculateTargetOffsetPoint(true, TargetOffsetPointEdge.Top, rect); - assertEquals(1.25, result.x, 0.1, "Target offset x not as expected"); - assertEquals(-2.17, result.y, 0.1, "Target offset Y not as expected"); - result = - TargetCalculations.calculateTargetOffsetPoint(true, TargetOffsetPointEdge.Bottom, rect); - assertEquals(-1.25, result.x, 0.1, "Target offset x not as expected"); - assertEquals(2.17, result.y, 0.1, "Target offset Y not as expected"); - } - - @Test - public void testSkewCalculation() { - // Setup - var isLandscape = true; - var rect = new RotatedRect(new Point(), new Size(10, 5), 170); - - // Compute min area rect - var points = new Point[4]; - rect.points(points); - var mat2f = new MatOfPoint2f(points); - var minAreaRect = Imgproc.minAreaRect(mat2f); - - // Assert result - var result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); - assertEquals(-10, result, 0.01); - - // Setup - isLandscape = true; - rect = new RotatedRect(new Point(), new Size(10, 5), -70); - - // Compute min area rect - points = new Point[4]; - rect.points(points); - mat2f.release(); - mat2f = new MatOfPoint2f(points); - minAreaRect = Imgproc.minAreaRect(mat2f); - - // Assert result - result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); - assertEquals(-70, result, 0.01); - - // Setup - isLandscape = false; - rect = new RotatedRect(new Point(), new Size(5, 10), 10); - - // Compute min area rect - points = new Point[4]; - rect.points(points); - mat2f.release(); - mat2f = new MatOfPoint2f(points); - minAreaRect = Imgproc.minAreaRect(mat2f); - - // Assert result - result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); - assertEquals(10, result, 0.01); - - // Setup - isLandscape = false; - rect = new RotatedRect(new Point(), new Size(5, 10), 70); - - // Compute min area rect - points = new Point[4]; - rect.points(points); - mat2f.release(); - mat2f = new MatOfPoint2f(points); - minAreaRect = Imgproc.minAreaRect(mat2f); - - // Assert result - result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); - assertEquals(70, result, 0.01); - - // Setup - isLandscape = false; - rect = new RotatedRect(new Point(), new Size(5, 10), -70); - - // Compute min area rect - points = new Point[4]; - rect.points(points); - mat2f.release(); - mat2f = new MatOfPoint2f(points); - minAreaRect = Imgproc.minAreaRect(mat2f); - - // Assert result - result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); - assertEquals(-70, result, 0.01); - } - - @Test - public void testCameraFOVCalculation() { - final DoubleCouple glowormHorizVert = - FrameStaticProperties.calculateHorizontalVerticalFoV(74.8, 640, 480); - var gwHorizDeg = glowormHorizVert.getFirst(); - var gwVertDeg = glowormHorizVert.getSecond(); - assertEquals(62.7, gwHorizDeg, .3); - assertEquals(49, gwVertDeg, .3); - } - - @Test - public void testDualOffsetCrosshair() { - final DualOffsetValues dualOffsetValues = - new DualOffsetValues( - new Point(400, 150), 10, - new Point(390, 260), 2); - - final Point expectedHalfway = new Point(393.75, 218.75); - final Point expectedOutside = new Point(388.75, 273.75); - - Point crosshairPointHalfway = - TargetCalculations.calculateDualOffsetCrosshair(dualOffsetValues, 5); - Point crosshairPointOutside = - TargetCalculations.calculateDualOffsetCrosshair(dualOffsetValues, 1); - - Assertions.assertEquals(expectedHalfway.x, crosshairPointHalfway.x); - Assertions.assertEquals(expectedHalfway.y, crosshairPointHalfway.y); - Assertions.assertEquals(expectedOutside.x, crosshairPointOutside.x); - Assertions.assertEquals(expectedOutside.y, crosshairPointOutside.y); - } + private static Size imageSize = new Size(800, 600); + private static Point imageCenterPoint = + new Point(imageSize.width / 2.0 - 0.5, imageSize.height / 2.0 - 0.5); + private static final double diagFOV = Math.toRadians(70.0); + + private static final FrameStaticProperties props = + new FrameStaticProperties((int) imageSize.width, (int) imageSize.height, diagFOV, null); + private static final TrackedTarget.TargetCalculationParameters params = + new TrackedTarget.TargetCalculationParameters( + true, + TargetOffsetPointEdge.Center, + RobotOffsetPointMode.None, + new Point(), + new DualOffsetValues(), + imageCenterPoint, + props.horizontalFocalLength, + props.verticalFocalLength, + imageSize.width * imageSize.height); + + @BeforeAll + public static void setup() { + TestUtils.loadLibraries(); + } + + @Test + public void testYawPitchBehavior() { + double targetPixelOffsetX = 100; + double targetPixelOffsetY = 100; + var targetCenterPoint = + new Point(imageCenterPoint.x + targetPixelOffsetX, imageCenterPoint.y + targetPixelOffsetY); + + var targetYawPitch = + TargetCalculations.calculateYawPitch( + imageCenterPoint.x, + targetCenterPoint.x, + params.horizontalFocalLength, + imageCenterPoint.y, + targetCenterPoint.y, + params.verticalFocalLength); + + assertTrue(targetYawPitch.getFirst() > 0, "Yaw is not positive right"); + assertTrue(targetYawPitch.getSecond() < 0, "Pitch is not positive up"); + + var fovs = + FrameStaticProperties.calculateHorizontalVerticalFoV( + diagFOV, (int) imageSize.width, (int) imageSize.height); + var maxYaw = + TargetCalculations.calculateYawPitch( + imageCenterPoint.x, + 2 * imageCenterPoint.x, + params.horizontalFocalLength, + imageCenterPoint.y, + imageCenterPoint.y, + params.verticalFocalLength); + assertEquals(fovs.getFirst() / 2.0, maxYaw.getFirst(), 0.025, "Horizontal FOV check failed"); + var maxPitch = + TargetCalculations.calculateYawPitch( + imageCenterPoint.x, + imageCenterPoint.x, + params.horizontalFocalLength, + imageCenterPoint.y, + 0, + params.verticalFocalLength); + assertEquals(fovs.getSecond() / 2.0, maxPitch.getSecond(), 0.025, "Vertical FOV check failed"); + } + + private static Stream testYawPitchCalcArgs() { + return Stream.of( + // (yaw, pitch) in degrees + Arguments.of(0, 0), + Arguments.of(10, 0), + Arguments.of(0, 10), + Arguments.of(10, 10), + Arguments.of(-10, -10), + Arguments.of(30, 45), + Arguments.of(-45, -20)); + } + + private static double[] testCameraMatrix = {240, 0, 320, 0, 240, 320, 0, 0, 1}; + + @ParameterizedTest + @MethodSource("testYawPitchCalcArgs") + public void testYawPitchCalc(double yawDeg, double pitchDeg) { + Mat testCameraMat = new Mat(3, 3, CvType.CV_64F); + testCameraMat.put(0, 0, testCameraMatrix); + // Since we create this translation using the given yaw/pitch, we should see the same angles + // calculated + var targetTrl = + new Translation3d(1, new Rotation3d(0, Math.toRadians(pitchDeg), Math.toRadians(yawDeg))); + // NWU to EDN + var objectPoints = + new MatOfPoint3f(new Point3(-targetTrl.getY(), -targetTrl.getZ(), targetTrl.getX())); + var imagePoints = new MatOfPoint2f(); + // Project translation into camera image + Calib3d.projectPoints( + objectPoints, + new MatOfDouble(0, 0, 0), + new MatOfDouble(0, 0, 0), + testCameraMat, + new MatOfDouble(0, 0, 0, 0, 0), + imagePoints); + var point = imagePoints.toArray()[0]; + // Test if the target yaw/pitch calculation matches what the target was created with + var yawPitch = + TargetCalculations.calculateYawPitch( + point.x, + testCameraMatrix[2], + testCameraMatrix[0], + point.y, + testCameraMatrix[5], + testCameraMatrix[4]); + assertEquals(yawDeg, yawPitch.getFirst(), 1e-3, "Yaw calculation incorrect"); + assertEquals(pitchDeg, yawPitch.getSecond(), 1e-3, "Pitch calculation incorrect"); + } + + @Test + public void testTargetOffset() { + Point center = new Point(0, 0); + Size rectSize = new Size(10, 5); + double angle = 30; + RotatedRect rect = new RotatedRect(center, rectSize, angle); + + // We pretend like x/y are in pixels, so the "top" is actually the bottom + var result = + TargetCalculations.calculateTargetOffsetPoint(true, TargetOffsetPointEdge.Top, rect); + assertEquals(1.25, result.x, 0.1, "Target offset x not as expected"); + assertEquals(-2.17, result.y, 0.1, "Target offset Y not as expected"); + result = + TargetCalculations.calculateTargetOffsetPoint(true, TargetOffsetPointEdge.Bottom, rect); + assertEquals(-1.25, result.x, 0.1, "Target offset x not as expected"); + assertEquals(2.17, result.y, 0.1, "Target offset Y not as expected"); + } + + @Test + public void testSkewCalculation() { + // Setup + var isLandscape = true; + var rect = new RotatedRect(new Point(), new Size(10, 5), 170); + + // Compute min area rect + var points = new Point[4]; + rect.points(points); + var mat2f = new MatOfPoint2f(points); + var minAreaRect = Imgproc.minAreaRect(mat2f); + + // Assert result + var result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); + assertEquals(-10, result, 0.01); + + // Setup + isLandscape = true; + rect = new RotatedRect(new Point(), new Size(10, 5), -70); + + // Compute min area rect + points = new Point[4]; + rect.points(points); + mat2f.release(); + mat2f = new MatOfPoint2f(points); + minAreaRect = Imgproc.minAreaRect(mat2f); + + // Assert result + result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); + assertEquals(-70, result, 0.01); + + // Setup + isLandscape = false; + rect = new RotatedRect(new Point(), new Size(5, 10), 10); + + // Compute min area rect + points = new Point[4]; + rect.points(points); + mat2f.release(); + mat2f = new MatOfPoint2f(points); + minAreaRect = Imgproc.minAreaRect(mat2f); + + // Assert result + result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); + assertEquals(10, result, 0.01); + + // Setup + isLandscape = false; + rect = new RotatedRect(new Point(), new Size(5, 10), 70); + + // Compute min area rect + points = new Point[4]; + rect.points(points); + mat2f.release(); + mat2f = new MatOfPoint2f(points); + minAreaRect = Imgproc.minAreaRect(mat2f); + + // Assert result + result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); + assertEquals(70, result, 0.01); + + // Setup + isLandscape = false; + rect = new RotatedRect(new Point(), new Size(5, 10), -70); + + // Compute min area rect + points = new Point[4]; + rect.points(points); + mat2f.release(); + mat2f = new MatOfPoint2f(points); + minAreaRect = Imgproc.minAreaRect(mat2f); + + // Assert result + result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); + assertEquals(-70, result, 0.01); + } + + @Test + public void testCameraFOVCalculation() { + final DoubleCouple glowormHorizVert = + FrameStaticProperties.calculateHorizontalVerticalFoV(74.8, 640, 480); + var gwHorizDeg = glowormHorizVert.getFirst(); + var gwVertDeg = glowormHorizVert.getSecond(); + assertEquals(62.7, gwHorizDeg, .3); + assertEquals(49, gwVertDeg, .3); + } + + @Test + public void testDualOffsetCrosshair() { + final DualOffsetValues dualOffsetValues = + new DualOffsetValues( + new Point(400, 150), 10, + new Point(390, 260), 2); + + final Point expectedHalfway = new Point(393.75, 218.75); + final Point expectedOutside = new Point(388.75, 273.75); + + Point crosshairPointHalfway = + TargetCalculations.calculateDualOffsetCrosshair(dualOffsetValues, 5); + Point crosshairPointOutside = + TargetCalculations.calculateDualOffsetCrosshair(dualOffsetValues, 1); + + Assertions.assertEquals(expectedHalfway.x, crosshairPointHalfway.x); + Assertions.assertEquals(expectedHalfway.y, crosshairPointHalfway.y); + Assertions.assertEquals(expectedOutside.x, crosshairPointOutside.x); + Assertions.assertEquals(expectedOutside.y, crosshairPointOutside.y); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/target/TrackedTargetTest.java b/photon-core/src/test/java/org/photonvision/vision/target/TrackedTargetTest.java index f58f355720..c1f0620359 100644 --- a/photon-core/src/test/java/org/photonvision/vision/target/TrackedTargetTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/target/TrackedTargetTest.java @@ -29,41 +29,41 @@ import org.photonvision.vision.opencv.DualOffsetValues; public class TrackedTargetTest { - @BeforeEach - public void Init() { - TestUtils.loadLibraries(); - } + @BeforeEach + public void Init() { + TestUtils.loadLibraries(); + } - @Test - void axisTest() { - MatOfPoint mat = new MatOfPoint(); - mat.fromList( - List.of( - new Point(400, 298), - new Point(426.22, 298), - new Point(426.22, 302), - new Point(400, 302))); // gives contour with center of 426, 300 - Contour contour = new Contour(mat); + @Test + void axisTest() { + MatOfPoint mat = new MatOfPoint(); + mat.fromList( + List.of( + new Point(400, 298), + new Point(426.22, 298), + new Point(426.22, 302), + new Point(400, 302))); // gives contour with center of 426, 300 + Contour contour = new Contour(mat); - var pTarget = new PotentialTarget(contour); + var pTarget = new PotentialTarget(contour); - var imageSize = new Size(800, 600); + var imageSize = new Size(800, 600); - var setting = - new TrackedTarget.TargetCalculationParameters( - false, - TargetOffsetPointEdge.Center, - RobotOffsetPointMode.None, - new Point(0, 0), - new DualOffsetValues(), - new Point(imageSize.width / 2, imageSize.height / 2), - 61, - 34.3, - imageSize.area()); + var setting = + new TrackedTarget.TargetCalculationParameters( + false, + TargetOffsetPointEdge.Center, + RobotOffsetPointMode.None, + new Point(0, 0), + new DualOffsetValues(), + new Point(imageSize.width / 2, imageSize.height / 2), + 61, + 34.3, + imageSize.area()); - var trackedTarget = new TrackedTarget(pTarget, setting, null); - // TODO change these hardcoded values - assertEquals(12.0, trackedTarget.getYaw(), 0.05, "Yaw was incorrect"); - assertEquals(0, trackedTarget.getPitch(), 0.05, "Pitch was incorrect"); - } + var trackedTarget = new TrackedTarget(pTarget, setting, null); + // TODO change these hardcoded values + assertEquals(12.0, trackedTarget.getYaw(), 0.05, "Yaw was incorrect"); + assertEquals(0, trackedTarget.getPitch(), 0.05, "Pitch was incorrect"); + } } diff --git a/photon-lib/build.gradle b/photon-lib/build.gradle index baae8fcc7d..d99ea5804a 100644 --- a/photon-lib/build.gradle +++ b/photon-lib/build.gradle @@ -120,17 +120,17 @@ task generateVendorJson() { outputs.file photonlibFileOutput inputs.file photonlibFileInput - println "Writing vendor JSON ${pubVersion} to $photonlibFileOutput" + println "Writing vendor JSON ${pubVersion} to $photonlibFileOutput" - if (photonlibFileOutput.exists()) { - photonlibFileOutput.delete() - } - photonlibFileOutput.parentFile.mkdirs() + if (photonlibFileOutput.exists()) { + photonlibFileOutput.delete() + } + photonlibFileOutput.parentFile.mkdirs() - def read = photonlibFileInput.text + def read = photonlibFileInput.text .replace('${photon_version}', pubVersion) .replace('${frc_year}', frcYear) - photonlibFileOutput.text = read + photonlibFileOutput.text = read outputs.upToDateWhen { false } } diff --git a/photon-lib/src/main/java/org/photonvision/EstimatedRobotPose.java b/photon-lib/src/main/java/org/photonvision/EstimatedRobotPose.java index a355ab00f3..9df8da3467 100644 --- a/photon-lib/src/main/java/org/photonvision/EstimatedRobotPose.java +++ b/photon-lib/src/main/java/org/photonvision/EstimatedRobotPose.java @@ -31,32 +31,32 @@ /** An estimated pose based on pipeline result */ public class EstimatedRobotPose { - /** The estimated pose */ - public final Pose3d estimatedPose; + /** The estimated pose */ + public final Pose3d estimatedPose; - /** The estimated time the frame used to derive the robot pose was taken */ - public final double timestampSeconds; + /** The estimated time the frame used to derive the robot pose was taken */ + public final double timestampSeconds; - /** A list of the targets used to compute this pose */ - public final List targetsUsed; + /** A list of the targets used to compute this pose */ + public final List targetsUsed; - /** The strategy actually used to produce this pose */ - public final PoseStrategy strategy; + /** The strategy actually used to produce this pose */ + public final PoseStrategy strategy; - /** - * Constructs an EstimatedRobotPose - * - * @param estimatedPose estimated pose - * @param timestampSeconds timestamp of the estimate - */ - public EstimatedRobotPose( - Pose3d estimatedPose, - double timestampSeconds, - List targetsUsed, - PoseStrategy strategy) { - this.estimatedPose = estimatedPose; - this.timestampSeconds = timestampSeconds; - this.targetsUsed = targetsUsed; - this.strategy = strategy; - } + /** + * Constructs an EstimatedRobotPose + * + * @param estimatedPose estimated pose + * @param timestampSeconds timestamp of the estimate + */ + public EstimatedRobotPose( + Pose3d estimatedPose, + double timestampSeconds, + List targetsUsed, + PoseStrategy strategy) { + this.estimatedPose = estimatedPose; + this.timestampSeconds = timestampSeconds; + this.targetsUsed = targetsUsed; + this.strategy = strategy; + } } diff --git a/photon-lib/src/main/java/org/photonvision/PhotonCamera.java b/photon-lib/src/main/java/org/photonvision/PhotonCamera.java index 9500629ca3..fd8e4f3b4c 100644 --- a/photon-lib/src/main/java/org/photonvision/PhotonCamera.java +++ b/photon-lib/src/main/java/org/photonvision/PhotonCamera.java @@ -57,368 +57,368 @@ /** Represents a camera that is connected to PhotonVision. */ public class PhotonCamera implements AutoCloseable { - public static final String kTableName = "photonvision"; - - private final NetworkTable cameraTable; - RawSubscriber rawBytesEntry; - BooleanPublisher driverModePublisher; - BooleanSubscriber driverModeSubscriber; - DoublePublisher latencyMillisEntry; - BooleanPublisher hasTargetEntry; - DoublePublisher targetPitchEntry; - DoublePublisher targetYawEntry; - DoublePublisher targetAreaEntry; - DoubleArrayPublisher targetPoseEntry; - DoublePublisher targetSkewEntry; - StringSubscriber versionEntry; - IntegerEntry inputSaveImgEntry, outputSaveImgEntry; - IntegerPublisher pipelineIndexRequest, ledModeRequest; - IntegerSubscriber pipelineIndexState, ledModeState; - IntegerSubscriber heartbeatEntry; - private DoubleArraySubscriber cameraIntrinsicsSubscriber; - private DoubleArraySubscriber cameraDistortionSubscriber; - private StringPublisher atflPublisher; - - @Override - public void close() { - rawBytesEntry.close(); - driverModePublisher.close(); - driverModeSubscriber.close(); - latencyMillisEntry.close(); - hasTargetEntry.close(); - targetPitchEntry.close(); - targetYawEntry.close(); - targetAreaEntry.close(); - targetPoseEntry.close(); - targetSkewEntry.close(); - versionEntry.close(); - inputSaveImgEntry.close(); - outputSaveImgEntry.close(); - pipelineIndexRequest.close(); - pipelineIndexState.close(); - ledModeRequest.close(); - ledModeState.close(); - pipelineIndexRequest.close(); - cameraIntrinsicsSubscriber.close(); - cameraDistortionSubscriber.close(); - atflPublisher.close(); + public static final String kTableName = "photonvision"; + + private final NetworkTable cameraTable; + RawSubscriber rawBytesEntry; + BooleanPublisher driverModePublisher; + BooleanSubscriber driverModeSubscriber; + DoublePublisher latencyMillisEntry; + BooleanPublisher hasTargetEntry; + DoublePublisher targetPitchEntry; + DoublePublisher targetYawEntry; + DoublePublisher targetAreaEntry; + DoubleArrayPublisher targetPoseEntry; + DoublePublisher targetSkewEntry; + StringSubscriber versionEntry; + IntegerEntry inputSaveImgEntry, outputSaveImgEntry; + IntegerPublisher pipelineIndexRequest, ledModeRequest; + IntegerSubscriber pipelineIndexState, ledModeState; + IntegerSubscriber heartbeatEntry; + private DoubleArraySubscriber cameraIntrinsicsSubscriber; + private DoubleArraySubscriber cameraDistortionSubscriber; + private StringPublisher atflPublisher; + + @Override + public void close() { + rawBytesEntry.close(); + driverModePublisher.close(); + driverModeSubscriber.close(); + latencyMillisEntry.close(); + hasTargetEntry.close(); + targetPitchEntry.close(); + targetYawEntry.close(); + targetAreaEntry.close(); + targetPoseEntry.close(); + targetSkewEntry.close(); + versionEntry.close(); + inputSaveImgEntry.close(); + outputSaveImgEntry.close(); + pipelineIndexRequest.close(); + pipelineIndexState.close(); + ledModeRequest.close(); + ledModeState.close(); + pipelineIndexRequest.close(); + cameraIntrinsicsSubscriber.close(); + cameraDistortionSubscriber.close(); + atflPublisher.close(); + } + + private final String path; + private final String name; + + private static boolean VERSION_CHECK_ENABLED = true; + private static long VERSION_CHECK_INTERVAL = 5; + private double lastVersionCheckTime = 0; + + private long prevHeartbeatValue = -1; + private double prevHeartbeatChangeTime = 0; + private static final double HEARBEAT_DEBOUNCE_SEC = 0.5; + + public static void setVersionCheckEnabled(boolean enabled) { + VERSION_CHECK_ENABLED = enabled; + } + + Packet packet = new Packet(1); + + // Existing is enough to make this multisubscriber do its thing + private final MultiSubscriber m_topicNameSubscriber; + + /** + * Constructs a PhotonCamera from a root table. + * + * @param instance The NetworkTableInstance to pull data from. This can be a custom instance in + * simulation, but should *usually* be the default NTInstance from + * NetworkTableInstance::getDefault + * @param cameraName The name of the camera, as seen in the UI. + */ + public PhotonCamera(NetworkTableInstance instance, String cameraName) { + name = cameraName; + var photonvision_root_table = instance.getTable(kTableName); + this.cameraTable = photonvision_root_table.getSubTable(cameraName); + path = cameraTable.getPath(); + rawBytesEntry = + cameraTable + .getRawTopic("rawBytes") + .subscribe( + "rawBytes", new byte[] {}, PubSubOption.periodic(0.01), PubSubOption.sendAll(true)); + driverModePublisher = cameraTable.getBooleanTopic("driverModeRequest").publish(); + driverModeSubscriber = cameraTable.getBooleanTopic("driverMode").subscribe(false); + inputSaveImgEntry = cameraTable.getIntegerTopic("inputSaveImgCmd").getEntry(0); + outputSaveImgEntry = cameraTable.getIntegerTopic("outputSaveImgCmd").getEntry(0); + pipelineIndexRequest = cameraTable.getIntegerTopic("pipelineIndexRequest").publish(); + pipelineIndexState = cameraTable.getIntegerTopic("pipelineIndexState").subscribe(0); + heartbeatEntry = cameraTable.getIntegerTopic("heartbeat").subscribe(-1); + cameraIntrinsicsSubscriber = + cameraTable.getDoubleArrayTopic("cameraIntrinsics").subscribe(null); + cameraDistortionSubscriber = + cameraTable.getDoubleArrayTopic("cameraDistortion").subscribe(null); + + ledModeRequest = photonvision_root_table.getIntegerTopic("ledModeRequest").publish(); + ledModeState = photonvision_root_table.getIntegerTopic("ledModeState").subscribe(-1); + versionEntry = photonvision_root_table.getStringTopic("version").subscribe(""); + + atflPublisher = photonvision_root_table.getStringTopic("apriltag_field_layout").publish(); + // Save the layout locally on Rio; on reboot, should be pushed out to NT clients + atflPublisher.getTopic().setPersistent(true); + + m_topicNameSubscriber = + new MultiSubscriber( + instance, new String[] {"/photonvision/"}, PubSubOption.topicsOnly(true)); + } + + /** + * Constructs a PhotonCamera from the name of the camera. + * + * @param cameraName The nickname of the camera (found in the PhotonVision UI). + */ + public PhotonCamera(String cameraName) { + this(NetworkTableInstance.getDefault(), cameraName); + } + + /** + * Returns the latest pipeline result. + * + * @return The latest pipeline result. + */ + public PhotonPipelineResult getLatestResult() { + verifyVersion(); + + // Clear the packet. + packet.clear(); + + // Create latest result. + var ret = new PhotonPipelineResult(); + + // Populate packet and create result. + packet.setData(rawBytesEntry.get(new byte[] {})); + + if (packet.getSize() < 1) return ret; + ret.createFromPacket(packet); + + // Set the timestamp of the result. + // getLatestChange returns in microseconds, so we divide by 1e6 to convert to seconds. + ret.setTimestampSeconds((rawBytesEntry.getLastChange() / 1e6) - ret.getLatencyMillis() / 1e3); + + // Return result. + return ret; + } + + /** + * Returns whether the camera is in driver mode. + * + * @return Whether the camera is in driver mode. + */ + public boolean getDriverMode() { + return driverModeSubscriber.get(); + } + + /** + * Toggles driver mode. + * + * @param driverMode Whether to set driver mode. + */ + public void setDriverMode(boolean driverMode) { + driverModePublisher.set(driverMode); + } + + /** + * Request the camera to save a new image file from the input camera stream with overlays. Images + * take up space in the filesystem of the PhotonCamera. Calling it frequently will fill up disk + * space and eventually cause the system to stop working. Clear out images in + * /opt/photonvision/photonvision_config/imgSaves frequently to prevent issues. + */ + public void takeInputSnapshot() { + inputSaveImgEntry.set(inputSaveImgEntry.get() + 1); + } + + /** + * Request the camera to save a new image file from the output stream with overlays. Images take + * up space in the filesystem of the PhotonCamera. Calling it frequently will fill up disk space + * and eventually cause the system to stop working. Clear out images in + * /opt/photonvision/photonvision_config/imgSaves frequently to prevent issues. + */ + public void takeOutputSnapshot() { + outputSaveImgEntry.set(outputSaveImgEntry.get() + 1); + } + + /** + * Returns the active pipeline index. + * + * @return The active pipeline index. + */ + public int getPipelineIndex() { + return (int) pipelineIndexState.get(0); + } + + /** + * Allows the user to select the active pipeline index. + * + * @param index The active pipeline index. + */ + public void setPipelineIndex(int index) { + pipelineIndexRequest.set(index); + } + + /** + * Returns the current LED mode. + * + * @return The current LED mode. + */ + public VisionLEDMode getLEDMode() { + int value = (int) ledModeState.get(-1); + switch (value) { + case 0: + return VisionLEDMode.kOff; + case 1: + return VisionLEDMode.kOn; + case 2: + return VisionLEDMode.kBlink; + case -1: + default: + return VisionLEDMode.kDefault; } - - private final String path; - private final String name; - - private static boolean VERSION_CHECK_ENABLED = true; - private static long VERSION_CHECK_INTERVAL = 5; - private double lastVersionCheckTime = 0; - - private long prevHeartbeatValue = -1; - private double prevHeartbeatChangeTime = 0; - private static final double HEARBEAT_DEBOUNCE_SEC = 0.5; - - public static void setVersionCheckEnabled(boolean enabled) { - VERSION_CHECK_ENABLED = enabled; - } - - Packet packet = new Packet(1); - - // Existing is enough to make this multisubscriber do its thing - private final MultiSubscriber m_topicNameSubscriber; - - /** - * Constructs a PhotonCamera from a root table. - * - * @param instance The NetworkTableInstance to pull data from. This can be a custom instance in - * simulation, but should *usually* be the default NTInstance from - * NetworkTableInstance::getDefault - * @param cameraName The name of the camera, as seen in the UI. - */ - public PhotonCamera(NetworkTableInstance instance, String cameraName) { - name = cameraName; - var photonvision_root_table = instance.getTable(kTableName); - this.cameraTable = photonvision_root_table.getSubTable(cameraName); - path = cameraTable.getPath(); - rawBytesEntry = - cameraTable - .getRawTopic("rawBytes") - .subscribe( - "rawBytes", new byte[] {}, PubSubOption.periodic(0.01), PubSubOption.sendAll(true)); - driverModePublisher = cameraTable.getBooleanTopic("driverModeRequest").publish(); - driverModeSubscriber = cameraTable.getBooleanTopic("driverMode").subscribe(false); - inputSaveImgEntry = cameraTable.getIntegerTopic("inputSaveImgCmd").getEntry(0); - outputSaveImgEntry = cameraTable.getIntegerTopic("outputSaveImgCmd").getEntry(0); - pipelineIndexRequest = cameraTable.getIntegerTopic("pipelineIndexRequest").publish(); - pipelineIndexState = cameraTable.getIntegerTopic("pipelineIndexState").subscribe(0); - heartbeatEntry = cameraTable.getIntegerTopic("heartbeat").subscribe(-1); - cameraIntrinsicsSubscriber = - cameraTable.getDoubleArrayTopic("cameraIntrinsics").subscribe(null); - cameraDistortionSubscriber = - cameraTable.getDoubleArrayTopic("cameraDistortion").subscribe(null); - - ledModeRequest = photonvision_root_table.getIntegerTopic("ledModeRequest").publish(); - ledModeState = photonvision_root_table.getIntegerTopic("ledModeState").subscribe(-1); - versionEntry = photonvision_root_table.getStringTopic("version").subscribe(""); - - atflPublisher = photonvision_root_table.getStringTopic("apriltag_field_layout").publish(); - // Save the layout locally on Rio; on reboot, should be pushed out to NT clients - atflPublisher.getTopic().setPersistent(true); - - m_topicNameSubscriber = - new MultiSubscriber( - instance, new String[] {"/photonvision/"}, PubSubOption.topicsOnly(true)); - } - - /** - * Constructs a PhotonCamera from the name of the camera. - * - * @param cameraName The nickname of the camera (found in the PhotonVision UI). - */ - public PhotonCamera(String cameraName) { - this(NetworkTableInstance.getDefault(), cameraName); - } - - /** - * Returns the latest pipeline result. - * - * @return The latest pipeline result. - */ - public PhotonPipelineResult getLatestResult() { - verifyVersion(); - - // Clear the packet. - packet.clear(); - - // Create latest result. - var ret = new PhotonPipelineResult(); - - // Populate packet and create result. - packet.setData(rawBytesEntry.get(new byte[] {})); - - if (packet.getSize() < 1) return ret; - ret.createFromPacket(packet); - - // Set the timestamp of the result. - // getLatestChange returns in microseconds, so we divide by 1e6 to convert to seconds. - ret.setTimestampSeconds((rawBytesEntry.getLastChange() / 1e6) - ret.getLatencyMillis() / 1e3); - - // Return result. - return ret; - } - - /** - * Returns whether the camera is in driver mode. - * - * @return Whether the camera is in driver mode. - */ - public boolean getDriverMode() { - return driverModeSubscriber.get(); - } - - /** - * Toggles driver mode. - * - * @param driverMode Whether to set driver mode. - */ - public void setDriverMode(boolean driverMode) { - driverModePublisher.set(driverMode); - } - - /** - * Request the camera to save a new image file from the input camera stream with overlays. Images - * take up space in the filesystem of the PhotonCamera. Calling it frequently will fill up disk - * space and eventually cause the system to stop working. Clear out images in - * /opt/photonvision/photonvision_config/imgSaves frequently to prevent issues. - */ - public void takeInputSnapshot() { - inputSaveImgEntry.set(inputSaveImgEntry.get() + 1); - } - - /** - * Request the camera to save a new image file from the output stream with overlays. Images take - * up space in the filesystem of the PhotonCamera. Calling it frequently will fill up disk space - * and eventually cause the system to stop working. Clear out images in - * /opt/photonvision/photonvision_config/imgSaves frequently to prevent issues. - */ - public void takeOutputSnapshot() { - outputSaveImgEntry.set(outputSaveImgEntry.get() + 1); + } + + /** + * Sets the LED mode. + * + * @param led The mode to set to. + */ + public void setLED(VisionLEDMode led) { + ledModeRequest.set(led.value); + } + + /** + * Returns whether the latest target result has targets. + * + *

This method is deprecated; {@link PhotonPipelineResult#hasTargets()} should be used instead. + * + * @deprecated This method should be replaced with {@link PhotonPipelineResult#hasTargets()} + * @return Whether the latest target result has targets. + */ + @Deprecated + public boolean hasTargets() { + return getLatestResult().hasTargets(); + } + + /** + * Returns the name of the camera. This will return the same value that was given to the + * constructor as cameraName. + * + * @return The name of the camera. + */ + public String getName() { + return name; + } + + /** + * Returns whether the camera is connected and actively returning new data. Connection status is + * debounced. + * + * @return True if the camera is actively sending frame data, false otherwise. + */ + public boolean isConnected() { + var curHeartbeat = heartbeatEntry.get(); + var now = Timer.getFPGATimestamp(); + + if (curHeartbeat != prevHeartbeatValue) { + // New heartbeat value from the coprocessor + prevHeartbeatChangeTime = now; + prevHeartbeatValue = curHeartbeat; } - /** - * Returns the active pipeline index. - * - * @return The active pipeline index. - */ - public int getPipelineIndex() { - return (int) pipelineIndexState.get(0); + return (now - prevHeartbeatChangeTime) < HEARBEAT_DEBOUNCE_SEC; + } + + /** + * Set the Apriltag Field Layout used by all PhotonVision coprocessors that are (or might later) + * connect to this robot. The topic is marked as persistant, so even if you only call this once + * ever, it will be saved on the RoboRIO and pushed out to all NT clients when code reboots. + * PhotonVision will also store a copy of this layout locally on the coprocessor, but subscribes + * to this topic and the local copy will be updated when this function is called. + * + * @param layout The layout to use for *all* PhotonVision cameras + * @return Success of serializing the JSON. This does *not* mean that all PhotonVision clients + * have updated their internal layouts. + */ + public boolean setApriltagFieldLayout(AprilTagFieldLayout layout) { + try { + var layout_json = new ObjectMapper().writeValueAsString(layout); + atflPublisher.set(layout_json); + return true; + } catch (JsonProcessingException e) { + MathSharedStore.reportError("Error setting ATFL in " + this.getName(), e.getStackTrace()); } - - /** - * Allows the user to select the active pipeline index. - * - * @param index The active pipeline index. - */ - public void setPipelineIndex(int index) { - pipelineIndexRequest.set(index); - } - - /** - * Returns the current LED mode. - * - * @return The current LED mode. - */ - public VisionLEDMode getLEDMode() { - int value = (int) ledModeState.get(-1); - switch (value) { - case 0: - return VisionLEDMode.kOff; - case 1: - return VisionLEDMode.kOn; - case 2: - return VisionLEDMode.kBlink; - case -1: - default: - return VisionLEDMode.kDefault; - } + return false; + } + + public Optional> getCameraMatrix() { + var cameraMatrix = cameraIntrinsicsSubscriber.get(); + if (cameraMatrix != null && cameraMatrix.length == 9) { + return Optional.of(new MatBuilder<>(Nat.N3(), Nat.N3()).fill(cameraMatrix)); + } else return Optional.empty(); + } + + public Optional> getDistCoeffs() { + var distCoeffs = cameraDistortionSubscriber.get(); + if (distCoeffs != null && distCoeffs.length == 5) { + return Optional.of(new MatBuilder<>(Nat.N5(), Nat.N1()).fill(distCoeffs)); + } else return Optional.empty(); + } + + /** + * Gets the NetworkTable representing this camera's subtable. You probably don't ever need to call + * this. + */ + public final NetworkTable getCameraTable() { + return cameraTable; + } + + private void verifyVersion() { + if (!VERSION_CHECK_ENABLED) return; + + if ((Timer.getFPGATimestamp() - lastVersionCheckTime) < VERSION_CHECK_INTERVAL) return; + lastVersionCheckTime = Timer.getFPGATimestamp(); + + // Heartbeat entry is assumed to always be present. If it's not present, we + // assume that a camera with that name was never connected in the first place. + if (!heartbeatEntry.exists()) { + Set cameraNames = cameraTable.getInstance().getTable(kTableName).getSubTables(); + if (cameraNames.isEmpty()) { + DriverStation.reportError( + "Could not find any PhotonVision coprocessors on NetworkTables. Double check that PhotonVision is running, and that your camera is connected!", + false); + } else { + DriverStation.reportError( + "PhotonVision coprocessor at path " + + path + + " not found on NetworkTables. Double check that your camera names match!", + true); + DriverStation.reportError( + "Found the following PhotonVision cameras on NetworkTables:\n" + + String.join("\n", cameraNames), + false); + } } - - /** - * Sets the LED mode. - * - * @param led The mode to set to. - */ - public void setLED(VisionLEDMode led) { - ledModeRequest.set(led.value); - } - - /** - * Returns whether the latest target result has targets. - * - *

This method is deprecated; {@link PhotonPipelineResult#hasTargets()} should be used instead. - * - * @deprecated This method should be replaced with {@link PhotonPipelineResult#hasTargets()} - * @return Whether the latest target result has targets. - */ - @Deprecated - public boolean hasTargets() { - return getLatestResult().hasTargets(); - } - - /** - * Returns the name of the camera. This will return the same value that was given to the - * constructor as cameraName. - * - * @return The name of the camera. - */ - public String getName() { - return name; - } - - /** - * Returns whether the camera is connected and actively returning new data. Connection status is - * debounced. - * - * @return True if the camera is actively sending frame data, false otherwise. - */ - public boolean isConnected() { - var curHeartbeat = heartbeatEntry.get(); - var now = Timer.getFPGATimestamp(); - - if (curHeartbeat != prevHeartbeatValue) { - // New heartbeat value from the coprocessor - prevHeartbeatChangeTime = now; - prevHeartbeatValue = curHeartbeat; - } - - return (now - prevHeartbeatChangeTime) < HEARBEAT_DEBOUNCE_SEC; - } - - /** - * Set the Apriltag Field Layout used by all PhotonVision coprocessors that are (or might later) - * connect to this robot. The topic is marked as persistant, so even if you only call this once - * ever, it will be saved on the RoboRIO and pushed out to all NT clients when code reboots. - * PhotonVision will also store a copy of this layout locally on the coprocessor, but subscribes - * to this topic and the local copy will be updated when this function is called. - * - * @param layout The layout to use for *all* PhotonVision cameras - * @return Success of serializing the JSON. This does *not* mean that all PhotonVision clients - * have updated their internal layouts. - */ - public boolean setApriltagFieldLayout(AprilTagFieldLayout layout) { - try { - var layout_json = new ObjectMapper().writeValueAsString(layout); - atflPublisher.set(layout_json); - return true; - } catch (JsonProcessingException e) { - MathSharedStore.reportError("Error setting ATFL in " + this.getName(), e.getStackTrace()); - } - return false; - } - - public Optional> getCameraMatrix() { - var cameraMatrix = cameraIntrinsicsSubscriber.get(); - if (cameraMatrix != null && cameraMatrix.length == 9) { - return Optional.of(new MatBuilder<>(Nat.N3(), Nat.N3()).fill(cameraMatrix)); - } else return Optional.empty(); - } - - public Optional> getDistCoeffs() { - var distCoeffs = cameraDistortionSubscriber.get(); - if (distCoeffs != null && distCoeffs.length == 5) { - return Optional.of(new MatBuilder<>(Nat.N5(), Nat.N1()).fill(distCoeffs)); - } else return Optional.empty(); - } - - /** - * Gets the NetworkTable representing this camera's subtable. You probably don't ever need to call - * this. - */ - public final NetworkTable getCameraTable() { - return cameraTable; + // Check for connection status. Warn if disconnected. + else if (!isConnected()) { + DriverStation.reportWarning( + "PhotonVision coprocessor at path " + path + " is not sending new data.", true); } - private void verifyVersion() { - if (!VERSION_CHECK_ENABLED) return; - - if ((Timer.getFPGATimestamp() - lastVersionCheckTime) < VERSION_CHECK_INTERVAL) return; - lastVersionCheckTime = Timer.getFPGATimestamp(); - - // Heartbeat entry is assumed to always be present. If it's not present, we - // assume that a camera with that name was never connected in the first place. - if (!heartbeatEntry.exists()) { - Set cameraNames = cameraTable.getInstance().getTable(kTableName).getSubTables(); - if (cameraNames.isEmpty()) { - DriverStation.reportError( - "Could not find any PhotonVision coprocessors on NetworkTables. Double check that PhotonVision is running, and that your camera is connected!", - false); - } else { - DriverStation.reportError( - "PhotonVision coprocessor at path " - + path - + " not found on NetworkTables. Double check that your camera names match!", - true); - DriverStation.reportError( - "Found the following PhotonVision cameras on NetworkTables:\n" - + String.join("\n", cameraNames), - false); - } - } - // Check for connection status. Warn if disconnected. - else if (!isConnected()) { - DriverStation.reportWarning( - "PhotonVision coprocessor at path " + path + " is not sending new data.", true); - } - - // Check for version. Warn if the versions aren't aligned. - String versionString = versionEntry.get(""); - if (!versionString.equals("") && !PhotonVersion.versionMatches(versionString)) { - // Error on a verified version mismatch - // But stay silent otherwise - DriverStation.reportWarning( - "Photon version " - + PhotonVersion.versionString - + " does not match coprocessor version " - + versionString - + "!", - true); - } + // Check for version. Warn if the versions aren't aligned. + String versionString = versionEntry.get(""); + if (!versionString.equals("") && !PhotonVersion.versionMatches(versionString)) { + // Error on a verified version mismatch + // But stay silent otherwise + DriverStation.reportWarning( + "Photon version " + + PhotonVersion.versionString + + " does not match coprocessor version " + + versionString + + "!", + true); } + } } diff --git a/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java b/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java index ed29a9c265..e5c1388eab 100644 --- a/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java +++ b/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java @@ -51,672 +51,672 @@ * below. Example usage can be found in our apriltagExample example project. */ public class PhotonPoseEstimator { - /** Position estimation strategies that can be used by the {@link PhotonPoseEstimator} class. */ - public enum PoseStrategy { - /** Choose the Pose with the lowest ambiguity. */ - LOWEST_AMBIGUITY, - - /** Choose the Pose which is closest to the camera height. */ - CLOSEST_TO_CAMERA_HEIGHT, - - /** Choose the Pose which is closest to a set Reference position. */ - CLOSEST_TO_REFERENCE_POSE, - - /** Choose the Pose which is closest to the last pose calculated */ - CLOSEST_TO_LAST_POSE, - - /** Return the average of the best target poses using ambiguity as weight. */ - AVERAGE_BEST_TARGETS, - - /** - * Use all visible tags to compute a single pose estimate on coprocessor. This option needs to - * be enabled on the PhotonVision web UI as well. - */ - MULTI_TAG_PNP_ON_COPROCESSOR, - - /** - * Use all visible tags to compute a single pose estimate. This runs on the RoboRIO, and can - * take a lot of time. - */ - MULTI_TAG_PNP_ON_RIO - } + /** Position estimation strategies that can be used by the {@link PhotonPoseEstimator} class. */ + public enum PoseStrategy { + /** Choose the Pose with the lowest ambiguity. */ + LOWEST_AMBIGUITY, - private AprilTagFieldLayout fieldTags; - private PoseStrategy primaryStrategy; - private PoseStrategy multiTagFallbackStrategy = PoseStrategy.LOWEST_AMBIGUITY; - private final PhotonCamera camera; - private Transform3d robotToCamera; + /** Choose the Pose which is closest to the camera height. */ + CLOSEST_TO_CAMERA_HEIGHT, - private Pose3d lastPose; - private Pose3d referencePose; - protected double poseCacheTimestampSeconds = -1; - private final Set reportedErrors = new HashSet<>(); + /** Choose the Pose which is closest to a set Reference position. */ + CLOSEST_TO_REFERENCE_POSE, - /** - * Create a new PhotonPoseEstimator. - * - * @param fieldTags A WPILib {@link AprilTagFieldLayout} linking AprilTag IDs to Pose3d objects - * with respect to the FIRST field using the Field - * Coordinate System. - * @param strategy The strategy it should use to determine the best pose. - * @param camera PhotonCamera - * @param robotToCamera Transform3d from the center of the robot to the camera mount position (ie, - * robot ➔ camera) in the Robot - * Coordinate System. - */ - public PhotonPoseEstimator( - AprilTagFieldLayout fieldTags, - PoseStrategy strategy, - PhotonCamera camera, - Transform3d robotToCamera) { - this.fieldTags = fieldTags; - this.primaryStrategy = strategy; - this.camera = camera; - this.robotToCamera = robotToCamera; - } + /** Choose the Pose which is closest to the last pose calculated */ + CLOSEST_TO_LAST_POSE, - public PhotonPoseEstimator( - AprilTagFieldLayout fieldTags, PoseStrategy strategy, Transform3d robotToCamera) { - this.fieldTags = fieldTags; - this.primaryStrategy = strategy; - this.camera = null; - this.robotToCamera = robotToCamera; - } - - /** Invalidates the pose cache. */ - private void invalidatePoseCache() { - poseCacheTimestampSeconds = -1; - } - - private void checkUpdate(Object oldObj, Object newObj) { - if (oldObj != newObj && oldObj != null && !oldObj.equals(newObj)) { - invalidatePoseCache(); - } - } + /** Return the average of the best target poses using ambiguity as weight. */ + AVERAGE_BEST_TARGETS, /** - * Get the AprilTagFieldLayout being used by the PositionEstimator. - * - * @return the AprilTagFieldLayout + * Use all visible tags to compute a single pose estimate on coprocessor. This option needs to + * be enabled on the PhotonVision web UI as well. */ - public AprilTagFieldLayout getFieldTags() { - return fieldTags; - } + MULTI_TAG_PNP_ON_COPROCESSOR, /** - * Set the AprilTagFieldLayout being used by the PositionEstimator. - * - * @param fieldTags the AprilTagFieldLayout + * Use all visible tags to compute a single pose estimate. This runs on the RoboRIO, and can + * take a lot of time. */ - public void setFieldTags(AprilTagFieldLayout fieldTags) { - checkUpdate(this.fieldTags, fieldTags); - this.fieldTags = fieldTags; + MULTI_TAG_PNP_ON_RIO + } + + private AprilTagFieldLayout fieldTags; + private PoseStrategy primaryStrategy; + private PoseStrategy multiTagFallbackStrategy = PoseStrategy.LOWEST_AMBIGUITY; + private final PhotonCamera camera; + private Transform3d robotToCamera; + + private Pose3d lastPose; + private Pose3d referencePose; + protected double poseCacheTimestampSeconds = -1; + private final Set reportedErrors = new HashSet<>(); + + /** + * Create a new PhotonPoseEstimator. + * + * @param fieldTags A WPILib {@link AprilTagFieldLayout} linking AprilTag IDs to Pose3d objects + * with respect to the FIRST field using the Field + * Coordinate System. + * @param strategy The strategy it should use to determine the best pose. + * @param camera PhotonCamera + * @param robotToCamera Transform3d from the center of the robot to the camera mount position (ie, + * robot ➔ camera) in the Robot + * Coordinate System. + */ + public PhotonPoseEstimator( + AprilTagFieldLayout fieldTags, + PoseStrategy strategy, + PhotonCamera camera, + Transform3d robotToCamera) { + this.fieldTags = fieldTags; + this.primaryStrategy = strategy; + this.camera = camera; + this.robotToCamera = robotToCamera; + } + + public PhotonPoseEstimator( + AprilTagFieldLayout fieldTags, PoseStrategy strategy, Transform3d robotToCamera) { + this.fieldTags = fieldTags; + this.primaryStrategy = strategy; + this.camera = null; + this.robotToCamera = robotToCamera; + } + + /** Invalidates the pose cache. */ + private void invalidatePoseCache() { + poseCacheTimestampSeconds = -1; + } + + private void checkUpdate(Object oldObj, Object newObj) { + if (oldObj != newObj && oldObj != null && !oldObj.equals(newObj)) { + invalidatePoseCache(); } - - /** - * Get the Position Estimation Strategy being used by the Position Estimator. - * - * @return the strategy - */ - public PoseStrategy getPrimaryStrategy() { - return primaryStrategy; + } + + /** + * Get the AprilTagFieldLayout being used by the PositionEstimator. + * + * @return the AprilTagFieldLayout + */ + public AprilTagFieldLayout getFieldTags() { + return fieldTags; + } + + /** + * Set the AprilTagFieldLayout being used by the PositionEstimator. + * + * @param fieldTags the AprilTagFieldLayout + */ + public void setFieldTags(AprilTagFieldLayout fieldTags) { + checkUpdate(this.fieldTags, fieldTags); + this.fieldTags = fieldTags; + } + + /** + * Get the Position Estimation Strategy being used by the Position Estimator. + * + * @return the strategy + */ + public PoseStrategy getPrimaryStrategy() { + return primaryStrategy; + } + + /** + * Set the Position Estimation Strategy used by the Position Estimator. + * + * @param strategy the strategy to set + */ + public void setPrimaryStrategy(PoseStrategy strategy) { + checkUpdate(this.primaryStrategy, strategy); + this.primaryStrategy = strategy; + } + + /** + * Set the Position Estimation Strategy used in multi-tag mode when only one tag can be seen. Must + * NOT be MULTI_TAG_PNP + * + * @param strategy the strategy to set + */ + public void setMultiTagFallbackStrategy(PoseStrategy strategy) { + checkUpdate(this.multiTagFallbackStrategy, strategy); + if (strategy == PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR + || strategy == PoseStrategy.MULTI_TAG_PNP_ON_RIO) { + DriverStation.reportWarning( + "Fallback cannot be set to MULTI_TAG_PNP! Setting to lowest ambiguity", false); + strategy = PoseStrategy.LOWEST_AMBIGUITY; } - - /** - * Set the Position Estimation Strategy used by the Position Estimator. - * - * @param strategy the strategy to set - */ - public void setPrimaryStrategy(PoseStrategy strategy) { - checkUpdate(this.primaryStrategy, strategy); - this.primaryStrategy = strategy; + this.multiTagFallbackStrategy = strategy; + } + + /** + * Return the reference position that is being used by the estimator. + * + * @return the referencePose + */ + public Pose3d getReferencePose() { + return referencePose; + } + + /** + * Update the stored reference pose for use when using the CLOSEST_TO_REFERENCE_POSE + * strategy. + * + * @param referencePose the referencePose to set + */ + public void setReferencePose(Pose3d referencePose) { + checkUpdate(this.referencePose, referencePose); + this.referencePose = referencePose; + } + + /** + * Update the stored reference pose for use when using the CLOSEST_TO_REFERENCE_POSE + * strategy. + * + * @param referencePose the referencePose to set + */ + public void setReferencePose(Pose2d referencePose) { + setReferencePose(new Pose3d(referencePose)); + } + + /** + * Update the stored last pose. Useful for setting the initial estimate when using the + * CLOSEST_TO_LAST_POSE strategy. + * + * @param lastPose the lastPose to set + */ + public void setLastPose(Pose3d lastPose) { + this.lastPose = lastPose; + } + + /** + * Update the stored last pose. Useful for setting the initial estimate when using the + * CLOSEST_TO_LAST_POSE strategy. + * + * @param lastPose the lastPose to set + */ + public void setLastPose(Pose2d lastPose) { + setLastPose(new Pose3d(lastPose)); + } + + /** + * @return The current transform from the center of the robot to the camera mount position + */ + public Transform3d getRobotToCameraTransform() { + return robotToCamera; + } + + /** + * Useful for pan and tilt mechanisms and such. + * + * @param robotToCamera The current transform from the center of the robot to the camera mount + * position + */ + public void setRobotToCameraTransform(Transform3d robotToCamera) { + this.robotToCamera = robotToCamera; + } + + /** + * Poll data from the configured cameras and update the estimated position of the robot. Returns + * empty if: + * + *

    + *
  • New data has not been received since the last call to {@code update()}. + *
  • No targets were found from the camera + *
  • There is no camera set + *
+ * + * @return an {@link EstimatedRobotPose} with an estimated pose, timestamp, and targets used to + * create the estimate. + */ + public Optional update() { + if (camera == null) { + DriverStation.reportError("[PhotonPoseEstimator] Missing camera!", false); + return Optional.empty(); } - /** - * Set the Position Estimation Strategy used in multi-tag mode when only one tag can be seen. Must - * NOT be MULTI_TAG_PNP - * - * @param strategy the strategy to set - */ - public void setMultiTagFallbackStrategy(PoseStrategy strategy) { - checkUpdate(this.multiTagFallbackStrategy, strategy); - if (strategy == PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR - || strategy == PoseStrategy.MULTI_TAG_PNP_ON_RIO) { - DriverStation.reportWarning( - "Fallback cannot be set to MULTI_TAG_PNP! Setting to lowest ambiguity", false); - strategy = PoseStrategy.LOWEST_AMBIGUITY; - } - this.multiTagFallbackStrategy = strategy; + PhotonPipelineResult cameraResult = camera.getLatestResult(); + + return update(cameraResult, camera.getCameraMatrix(), camera.getDistCoeffs()); + } + + /** + * Updates the estimated position of the robot. Returns empty if: + * + *
    + *
  • The timestamp of the provided pipeline result is the same as in the previous call to + * {@code update()}. + *
  • No targets were found in the pipeline results. + *
+ * + * @param cameraResult The latest pipeline result from the camera + * @return an {@link EstimatedRobotPose} with an estimated pose, timestamp, and targets used to + * create the estimate. + */ + public Optional update(PhotonPipelineResult cameraResult) { + if (camera == null) return update(cameraResult, Optional.empty(), Optional.empty()); + return update(cameraResult, camera.getCameraMatrix(), camera.getDistCoeffs()); + } + + /** + * Updates the estimated position of the robot. Returns empty if: + * + *
    + *
  • The timestamp of the provided pipeline result is the same as in the previous call to + * {@code update()}. + *
  • No targets were found in the pipeline results. + *
+ * + * @param cameraMatrix Camera calibration data that can be used in the case of no assigned + * PhotonCamera. + * @param distCoeffs Camera calibration data that can be used in the case of no assigned + * PhotonCamera + * @return an {@link EstimatedRobotPose} with an estimated pose, timestamp, and targets used to + * create the estimate. + */ + public Optional update( + PhotonPipelineResult cameraResult, + Optional> cameraMatrix, + Optional> distCoeffs) { + // Time in the past -- give up, since the following if expects times > 0 + if (cameraResult.getTimestampSeconds() < 0) { + return Optional.empty(); } - /** - * Return the reference position that is being used by the estimator. - * - * @return the referencePose - */ - public Pose3d getReferencePose() { - return referencePose; + // If the pose cache timestamp was set, and the result is from the same + // timestamp, return an + // empty result + if (poseCacheTimestampSeconds > 0 + && Math.abs(poseCacheTimestampSeconds - cameraResult.getTimestampSeconds()) < 1e-6) { + return Optional.empty(); } - /** - * Update the stored reference pose for use when using the CLOSEST_TO_REFERENCE_POSE - * strategy. - * - * @param referencePose the referencePose to set - */ - public void setReferencePose(Pose3d referencePose) { - checkUpdate(this.referencePose, referencePose); - this.referencePose = referencePose; - } + // Remember the timestamp of the current result used + poseCacheTimestampSeconds = cameraResult.getTimestampSeconds(); - /** - * Update the stored reference pose for use when using the CLOSEST_TO_REFERENCE_POSE - * strategy. - * - * @param referencePose the referencePose to set - */ - public void setReferencePose(Pose2d referencePose) { - setReferencePose(new Pose3d(referencePose)); + // If no targets seen, trivial case -- return empty result + if (!cameraResult.hasTargets()) { + return Optional.empty(); } - /** - * Update the stored last pose. Useful for setting the initial estimate when using the - * CLOSEST_TO_LAST_POSE strategy. - * - * @param lastPose the lastPose to set - */ - public void setLastPose(Pose3d lastPose) { - this.lastPose = lastPose; + return update(cameraResult, cameraMatrix, distCoeffs, this.primaryStrategy); + } + + private Optional update( + PhotonPipelineResult cameraResult, + Optional> cameraMatrix, + Optional> distCoeffs, + PoseStrategy strat) { + Optional estimatedPose; + switch (strat) { + case LOWEST_AMBIGUITY: + estimatedPose = lowestAmbiguityStrategy(cameraResult); + break; + case CLOSEST_TO_CAMERA_HEIGHT: + estimatedPose = closestToCameraHeightStrategy(cameraResult); + break; + case CLOSEST_TO_REFERENCE_POSE: + estimatedPose = closestToReferencePoseStrategy(cameraResult, referencePose); + break; + case CLOSEST_TO_LAST_POSE: + setReferencePose(lastPose); + estimatedPose = closestToReferencePoseStrategy(cameraResult, referencePose); + break; + case AVERAGE_BEST_TARGETS: + estimatedPose = averageBestTargetsStrategy(cameraResult); + break; + case MULTI_TAG_PNP_ON_RIO: + estimatedPose = multiTagOnRioStrategy(cameraResult, cameraMatrix, distCoeffs); + break; + case MULTI_TAG_PNP_ON_COPROCESSOR: + estimatedPose = multiTagOnCoprocStrategy(cameraResult, cameraMatrix, distCoeffs); + break; + default: + DriverStation.reportError( + "[PhotonPoseEstimator] Unknown Position Estimation Strategy!", false); + return Optional.empty(); } - /** - * Update the stored last pose. Useful for setting the initial estimate when using the - * CLOSEST_TO_LAST_POSE strategy. - * - * @param lastPose the lastPose to set - */ - public void setLastPose(Pose2d lastPose) { - setLastPose(new Pose3d(lastPose)); + if (estimatedPose.isEmpty()) { + lastPose = null; } - /** - * @return The current transform from the center of the robot to the camera mount position - */ - public Transform3d getRobotToCameraTransform() { - return robotToCamera; + return estimatedPose; + } + + private Optional multiTagOnCoprocStrategy( + PhotonPipelineResult result, + Optional> cameraMatrixOpt, + Optional> distCoeffsOpt) { + if (result.getMultiTagResult().estimatedPose.isPresent) { + var best_tf = result.getMultiTagResult().estimatedPose.best; + var best = + new Pose3d() + .plus(best_tf) // field-to-camera + .plus(robotToCamera.inverse()); // field-to-robot + return Optional.of( + new EstimatedRobotPose( + best, + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR)); + } else { + return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); } - - /** - * Useful for pan and tilt mechanisms and such. - * - * @param robotToCamera The current transform from the center of the robot to the camera mount - * position - */ - public void setRobotToCameraTransform(Transform3d robotToCamera) { - this.robotToCamera = robotToCamera; + } + + private Optional multiTagOnRioStrategy( + PhotonPipelineResult result, + Optional> cameraMatrixOpt, + Optional> distCoeffsOpt) { + boolean hasCalibData = cameraMatrixOpt.isPresent() && distCoeffsOpt.isPresent(); + // cannot run multitagPNP, use fallback strategy + if (!hasCalibData || result.getTargets().size() < 2) { + return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); } - /** - * Poll data from the configured cameras and update the estimated position of the robot. Returns - * empty if: - * - *
    - *
  • New data has not been received since the last call to {@code update()}. - *
  • No targets were found from the camera - *
  • There is no camera set - *
- * - * @return an {@link EstimatedRobotPose} with an estimated pose, timestamp, and targets used to - * create the estimate. - */ - public Optional update() { - if (camera == null) { - DriverStation.reportError("[PhotonPoseEstimator] Missing camera!", false); - return Optional.empty(); - } + var pnpResults = + VisionEstimation.estimateCamPosePNP( + cameraMatrixOpt.get(), distCoeffsOpt.get(), result.getTargets(), fieldTags); + // try fallback strategy if solvePNP fails for some reason + if (!pnpResults.isPresent) + return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); + var best = + new Pose3d() + .plus(pnpResults.best) // field-to-camera + .plus(robotToCamera.inverse()); // field-to-robot + + return Optional.of( + new EstimatedRobotPose( + best, + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.MULTI_TAG_PNP_ON_RIO)); + } + + /** + * Return the estimated position of the robot with the lowest position ambiguity from a List of + * pipeline results. + * + * @param result pipeline result + * @return the estimated position of the robot in the FCS and the estimated timestamp of this + * estimation. + */ + private Optional lowestAmbiguityStrategy(PhotonPipelineResult result) { + PhotonTrackedTarget lowestAmbiguityTarget = null; + + double lowestAmbiguityScore = 10; + + for (PhotonTrackedTarget target : result.targets) { + double targetPoseAmbiguity = target.getPoseAmbiguity(); + // Make sure the target is a Fiducial target. + if (targetPoseAmbiguity != -1 && targetPoseAmbiguity < lowestAmbiguityScore) { + lowestAmbiguityScore = targetPoseAmbiguity; + lowestAmbiguityTarget = target; + } + } - PhotonPipelineResult cameraResult = camera.getLatestResult(); + // Although there are confirmed to be targets, none of them may be fiducial + // targets. + if (lowestAmbiguityTarget == null) return Optional.empty(); - return update(cameraResult, camera.getCameraMatrix(), camera.getDistCoeffs()); - } + int targetFiducialId = lowestAmbiguityTarget.getFiducialId(); - /** - * Updates the estimated position of the robot. Returns empty if: - * - *
    - *
  • The timestamp of the provided pipeline result is the same as in the previous call to - * {@code update()}. - *
  • No targets were found in the pipeline results. - *
- * - * @param cameraResult The latest pipeline result from the camera - * @return an {@link EstimatedRobotPose} with an estimated pose, timestamp, and targets used to - * create the estimate. - */ - public Optional update(PhotonPipelineResult cameraResult) { - if (camera == null) return update(cameraResult, Optional.empty(), Optional.empty()); - return update(cameraResult, camera.getCameraMatrix(), camera.getDistCoeffs()); - } + Optional targetPosition = fieldTags.getTagPose(targetFiducialId); - /** - * Updates the estimated position of the robot. Returns empty if: - * - *
    - *
  • The timestamp of the provided pipeline result is the same as in the previous call to - * {@code update()}. - *
  • No targets were found in the pipeline results. - *
- * - * @param cameraMatrix Camera calibration data that can be used in the case of no assigned - * PhotonCamera. - * @param distCoeffs Camera calibration data that can be used in the case of no assigned - * PhotonCamera - * @return an {@link EstimatedRobotPose} with an estimated pose, timestamp, and targets used to - * create the estimate. - */ - public Optional update( - PhotonPipelineResult cameraResult, - Optional> cameraMatrix, - Optional> distCoeffs) { - // Time in the past -- give up, since the following if expects times > 0 - if (cameraResult.getTimestampSeconds() < 0) { - return Optional.empty(); - } - - // If the pose cache timestamp was set, and the result is from the same - // timestamp, return an - // empty result - if (poseCacheTimestampSeconds > 0 - && Math.abs(poseCacheTimestampSeconds - cameraResult.getTimestampSeconds()) < 1e-6) { - return Optional.empty(); - } - - // Remember the timestamp of the current result used - poseCacheTimestampSeconds = cameraResult.getTimestampSeconds(); - - // If no targets seen, trivial case -- return empty result - if (!cameraResult.hasTargets()) { - return Optional.empty(); - } - - return update(cameraResult, cameraMatrix, distCoeffs, this.primaryStrategy); + if (targetPosition.isEmpty()) { + reportFiducialPoseError(targetFiducialId); + return Optional.empty(); } - private Optional update( - PhotonPipelineResult cameraResult, - Optional> cameraMatrix, - Optional> distCoeffs, - PoseStrategy strat) { - Optional estimatedPose; - switch (strat) { - case LOWEST_AMBIGUITY: - estimatedPose = lowestAmbiguityStrategy(cameraResult); - break; - case CLOSEST_TO_CAMERA_HEIGHT: - estimatedPose = closestToCameraHeightStrategy(cameraResult); - break; - case CLOSEST_TO_REFERENCE_POSE: - estimatedPose = closestToReferencePoseStrategy(cameraResult, referencePose); - break; - case CLOSEST_TO_LAST_POSE: - setReferencePose(lastPose); - estimatedPose = closestToReferencePoseStrategy(cameraResult, referencePose); - break; - case AVERAGE_BEST_TARGETS: - estimatedPose = averageBestTargetsStrategy(cameraResult); - break; - case MULTI_TAG_PNP_ON_RIO: - estimatedPose = multiTagOnRioStrategy(cameraResult, cameraMatrix, distCoeffs); - break; - case MULTI_TAG_PNP_ON_COPROCESSOR: - estimatedPose = multiTagOnCoprocStrategy(cameraResult, cameraMatrix, distCoeffs); - break; - default: - DriverStation.reportError( - "[PhotonPoseEstimator] Unknown Position Estimation Strategy!", false); - return Optional.empty(); - } - - if (estimatedPose.isEmpty()) { - lastPose = null; - } - - return estimatedPose; + return Optional.of( + new EstimatedRobotPose( + targetPosition + .get() + .transformBy(lowestAmbiguityTarget.getBestCameraToTarget().inverse()) + .transformBy(robotToCamera.inverse()), + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.LOWEST_AMBIGUITY)); + } + + /** + * Return the estimated position of the robot using the target with the lowest delta height + * difference between the estimated and actual height of the camera. + * + * @param result pipeline result + * @return the estimated position of the robot in the FCS and the estimated timestamp of this + * estimation. + */ + private Optional closestToCameraHeightStrategy(PhotonPipelineResult result) { + double smallestHeightDifference = 10e9; + EstimatedRobotPose closestHeightTarget = null; + + for (PhotonTrackedTarget target : result.targets) { + int targetFiducialId = target.getFiducialId(); + + // Don't report errors for non-fiducial targets. This could also be resolved by + // adding -1 to + // the initial HashSet. + if (targetFiducialId == -1) continue; + + Optional targetPosition = fieldTags.getTagPose(target.getFiducialId()); + + if (targetPosition.isEmpty()) { + reportFiducialPoseError(target.getFiducialId()); + continue; + } + + double alternateTransformDelta = + Math.abs( + robotToCamera.getZ() + - targetPosition + .get() + .transformBy(target.getAlternateCameraToTarget().inverse()) + .getZ()); + double bestTransformDelta = + Math.abs( + robotToCamera.getZ() + - targetPosition + .get() + .transformBy(target.getBestCameraToTarget().inverse()) + .getZ()); + + if (alternateTransformDelta < smallestHeightDifference) { + smallestHeightDifference = alternateTransformDelta; + closestHeightTarget = + new EstimatedRobotPose( + targetPosition + .get() + .transformBy(target.getAlternateCameraToTarget().inverse()) + .transformBy(robotToCamera.inverse()), + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.CLOSEST_TO_CAMERA_HEIGHT); + } + + if (bestTransformDelta < smallestHeightDifference) { + smallestHeightDifference = bestTransformDelta; + closestHeightTarget = + new EstimatedRobotPose( + targetPosition + .get() + .transformBy(target.getBestCameraToTarget().inverse()) + .transformBy(robotToCamera.inverse()), + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.CLOSEST_TO_CAMERA_HEIGHT); + } } - private Optional multiTagOnCoprocStrategy( - PhotonPipelineResult result, - Optional> cameraMatrixOpt, - Optional> distCoeffsOpt) { - if (result.getMultiTagResult().estimatedPose.isPresent) { - var best_tf = result.getMultiTagResult().estimatedPose.best; - var best = - new Pose3d() - .plus(best_tf) // field-to-camera - .plus(robotToCamera.inverse()); // field-to-robot - return Optional.of( - new EstimatedRobotPose( - best, - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR)); - } else { - return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); - } + // Need to null check here in case none of the provided targets are fiducial. + return Optional.ofNullable(closestHeightTarget); + } + + /** + * Return the estimated position of the robot using the target with the lowest delta in the vector + * magnitude between it and the reference pose. + * + * @param result pipeline result + * @param referencePose reference pose to check vector magnitude difference against. + * @return the estimated position of the robot in the FCS and the estimated timestamp of this + * estimation. + */ + private Optional closestToReferencePoseStrategy( + PhotonPipelineResult result, Pose3d referencePose) { + if (referencePose == null) { + DriverStation.reportError( + "[PhotonPoseEstimator] Tried to use reference pose strategy without setting the reference!", + false); + return Optional.empty(); } - private Optional multiTagOnRioStrategy( - PhotonPipelineResult result, - Optional> cameraMatrixOpt, - Optional> distCoeffsOpt) { - boolean hasCalibData = cameraMatrixOpt.isPresent() && distCoeffsOpt.isPresent(); - // cannot run multitagPNP, use fallback strategy - if (!hasCalibData || result.getTargets().size() < 2) { - return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); - } - - var pnpResults = - VisionEstimation.estimateCamPosePNP( - cameraMatrixOpt.get(), distCoeffsOpt.get(), result.getTargets(), fieldTags); - // try fallback strategy if solvePNP fails for some reason - if (!pnpResults.isPresent) - return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); - var best = - new Pose3d() - .plus(pnpResults.best) // field-to-camera - .plus(robotToCamera.inverse()); // field-to-robot - - return Optional.of( - new EstimatedRobotPose( - best, - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.MULTI_TAG_PNP_ON_RIO)); + double smallestPoseDelta = 10e9; + EstimatedRobotPose lowestDeltaPose = null; + + for (PhotonTrackedTarget target : result.targets) { + int targetFiducialId = target.getFiducialId(); + + // Don't report errors for non-fiducial targets. This could also be resolved by + // adding -1 to + // the initial HashSet. + if (targetFiducialId == -1) continue; + + Optional targetPosition = fieldTags.getTagPose(target.getFiducialId()); + + if (targetPosition.isEmpty()) { + reportFiducialPoseError(targetFiducialId); + continue; + } + + Pose3d altTransformPosition = + targetPosition + .get() + .transformBy(target.getAlternateCameraToTarget().inverse()) + .transformBy(robotToCamera.inverse()); + Pose3d bestTransformPosition = + targetPosition + .get() + .transformBy(target.getBestCameraToTarget().inverse()) + .transformBy(robotToCamera.inverse()); + + double altDifference = Math.abs(calculateDifference(referencePose, altTransformPosition)); + double bestDifference = Math.abs(calculateDifference(referencePose, bestTransformPosition)); + + if (altDifference < smallestPoseDelta) { + smallestPoseDelta = altDifference; + lowestDeltaPose = + new EstimatedRobotPose( + altTransformPosition, + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.CLOSEST_TO_REFERENCE_POSE); + } + if (bestDifference < smallestPoseDelta) { + smallestPoseDelta = bestDifference; + lowestDeltaPose = + new EstimatedRobotPose( + bestTransformPosition, + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.CLOSEST_TO_REFERENCE_POSE); + } } - - /** - * Return the estimated position of the robot with the lowest position ambiguity from a List of - * pipeline results. - * - * @param result pipeline result - * @return the estimated position of the robot in the FCS and the estimated timestamp of this - * estimation. - */ - private Optional lowestAmbiguityStrategy(PhotonPipelineResult result) { - PhotonTrackedTarget lowestAmbiguityTarget = null; - - double lowestAmbiguityScore = 10; - - for (PhotonTrackedTarget target : result.targets) { - double targetPoseAmbiguity = target.getPoseAmbiguity(); - // Make sure the target is a Fiducial target. - if (targetPoseAmbiguity != -1 && targetPoseAmbiguity < lowestAmbiguityScore) { - lowestAmbiguityScore = targetPoseAmbiguity; - lowestAmbiguityTarget = target; - } - } - - // Although there are confirmed to be targets, none of them may be fiducial - // targets. - if (lowestAmbiguityTarget == null) return Optional.empty(); - - int targetFiducialId = lowestAmbiguityTarget.getFiducialId(); - - Optional targetPosition = fieldTags.getTagPose(targetFiducialId); - - if (targetPosition.isEmpty()) { - reportFiducialPoseError(targetFiducialId); - return Optional.empty(); - } - + return Optional.ofNullable(lowestDeltaPose); + } + + /** + * Return the average of the best target poses using ambiguity as weight. + * + * @param result pipeline result + * @return the estimated position of the robot in the FCS and the estimated timestamp of this + * estimation. + */ + private Optional averageBestTargetsStrategy(PhotonPipelineResult result) { + List> estimatedRobotPoses = new ArrayList<>(); + double totalAmbiguity = 0; + + for (PhotonTrackedTarget target : result.targets) { + int targetFiducialId = target.getFiducialId(); + + // Don't report errors for non-fiducial targets. This could also be resolved by + // adding -1 to + // the initial HashSet. + if (targetFiducialId == -1) continue; + + Optional targetPosition = fieldTags.getTagPose(target.getFiducialId()); + + if (targetPosition.isEmpty()) { + reportFiducialPoseError(targetFiducialId); + continue; + } + + double targetPoseAmbiguity = target.getPoseAmbiguity(); + + // Pose ambiguity is 0, use that pose + if (targetPoseAmbiguity == 0) { return Optional.of( - new EstimatedRobotPose( - targetPosition - .get() - .transformBy(lowestAmbiguityTarget.getBestCameraToTarget().inverse()) - .transformBy(robotToCamera.inverse()), - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.LOWEST_AMBIGUITY)); + new EstimatedRobotPose( + targetPosition + .get() + .transformBy(target.getBestCameraToTarget().inverse()) + .transformBy(robotToCamera.inverse()), + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.AVERAGE_BEST_TARGETS)); + } + + totalAmbiguity += 1.0 / target.getPoseAmbiguity(); + + estimatedRobotPoses.add( + new Pair<>( + target, + targetPosition + .get() + .transformBy(target.getBestCameraToTarget().inverse()) + .transformBy(robotToCamera.inverse()))); } - /** - * Return the estimated position of the robot using the target with the lowest delta height - * difference between the estimated and actual height of the camera. - * - * @param result pipeline result - * @return the estimated position of the robot in the FCS and the estimated timestamp of this - * estimation. - */ - private Optional closestToCameraHeightStrategy(PhotonPipelineResult result) { - double smallestHeightDifference = 10e9; - EstimatedRobotPose closestHeightTarget = null; - - for (PhotonTrackedTarget target : result.targets) { - int targetFiducialId = target.getFiducialId(); - - // Don't report errors for non-fiducial targets. This could also be resolved by - // adding -1 to - // the initial HashSet. - if (targetFiducialId == -1) continue; - - Optional targetPosition = fieldTags.getTagPose(target.getFiducialId()); - - if (targetPosition.isEmpty()) { - reportFiducialPoseError(target.getFiducialId()); - continue; - } - - double alternateTransformDelta = - Math.abs( - robotToCamera.getZ() - - targetPosition - .get() - .transformBy(target.getAlternateCameraToTarget().inverse()) - .getZ()); - double bestTransformDelta = - Math.abs( - robotToCamera.getZ() - - targetPosition - .get() - .transformBy(target.getBestCameraToTarget().inverse()) - .getZ()); - - if (alternateTransformDelta < smallestHeightDifference) { - smallestHeightDifference = alternateTransformDelta; - closestHeightTarget = - new EstimatedRobotPose( - targetPosition - .get() - .transformBy(target.getAlternateCameraToTarget().inverse()) - .transformBy(robotToCamera.inverse()), - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.CLOSEST_TO_CAMERA_HEIGHT); - } - - if (bestTransformDelta < smallestHeightDifference) { - smallestHeightDifference = bestTransformDelta; - closestHeightTarget = - new EstimatedRobotPose( - targetPosition - .get() - .transformBy(target.getBestCameraToTarget().inverse()) - .transformBy(robotToCamera.inverse()), - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.CLOSEST_TO_CAMERA_HEIGHT); - } - } - - // Need to null check here in case none of the provided targets are fiducial. - return Optional.ofNullable(closestHeightTarget); - } + // Take the average - /** - * Return the estimated position of the robot using the target with the lowest delta in the vector - * magnitude between it and the reference pose. - * - * @param result pipeline result - * @param referencePose reference pose to check vector magnitude difference against. - * @return the estimated position of the robot in the FCS and the estimated timestamp of this - * estimation. - */ - private Optional closestToReferencePoseStrategy( - PhotonPipelineResult result, Pose3d referencePose) { - if (referencePose == null) { - DriverStation.reportError( - "[PhotonPoseEstimator] Tried to use reference pose strategy without setting the reference!", - false); - return Optional.empty(); - } - - double smallestPoseDelta = 10e9; - EstimatedRobotPose lowestDeltaPose = null; - - for (PhotonTrackedTarget target : result.targets) { - int targetFiducialId = target.getFiducialId(); - - // Don't report errors for non-fiducial targets. This could also be resolved by - // adding -1 to - // the initial HashSet. - if (targetFiducialId == -1) continue; - - Optional targetPosition = fieldTags.getTagPose(target.getFiducialId()); - - if (targetPosition.isEmpty()) { - reportFiducialPoseError(targetFiducialId); - continue; - } - - Pose3d altTransformPosition = - targetPosition - .get() - .transformBy(target.getAlternateCameraToTarget().inverse()) - .transformBy(robotToCamera.inverse()); - Pose3d bestTransformPosition = - targetPosition - .get() - .transformBy(target.getBestCameraToTarget().inverse()) - .transformBy(robotToCamera.inverse()); - - double altDifference = Math.abs(calculateDifference(referencePose, altTransformPosition)); - double bestDifference = Math.abs(calculateDifference(referencePose, bestTransformPosition)); - - if (altDifference < smallestPoseDelta) { - smallestPoseDelta = altDifference; - lowestDeltaPose = - new EstimatedRobotPose( - altTransformPosition, - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.CLOSEST_TO_REFERENCE_POSE); - } - if (bestDifference < smallestPoseDelta) { - smallestPoseDelta = bestDifference; - lowestDeltaPose = - new EstimatedRobotPose( - bestTransformPosition, - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.CLOSEST_TO_REFERENCE_POSE); - } - } - return Optional.ofNullable(lowestDeltaPose); - } - - /** - * Return the average of the best target poses using ambiguity as weight. - * - * @param result pipeline result - * @return the estimated position of the robot in the FCS and the estimated timestamp of this - * estimation. - */ - private Optional averageBestTargetsStrategy(PhotonPipelineResult result) { - List> estimatedRobotPoses = new ArrayList<>(); - double totalAmbiguity = 0; - - for (PhotonTrackedTarget target : result.targets) { - int targetFiducialId = target.getFiducialId(); - - // Don't report errors for non-fiducial targets. This could also be resolved by - // adding -1 to - // the initial HashSet. - if (targetFiducialId == -1) continue; - - Optional targetPosition = fieldTags.getTagPose(target.getFiducialId()); - - if (targetPosition.isEmpty()) { - reportFiducialPoseError(targetFiducialId); - continue; - } - - double targetPoseAmbiguity = target.getPoseAmbiguity(); - - // Pose ambiguity is 0, use that pose - if (targetPoseAmbiguity == 0) { - return Optional.of( - new EstimatedRobotPose( - targetPosition - .get() - .transformBy(target.getBestCameraToTarget().inverse()) - .transformBy(robotToCamera.inverse()), - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.AVERAGE_BEST_TARGETS)); - } - - totalAmbiguity += 1.0 / target.getPoseAmbiguity(); - - estimatedRobotPoses.add( - new Pair<>( - target, - targetPosition - .get() - .transformBy(target.getBestCameraToTarget().inverse()) - .transformBy(robotToCamera.inverse()))); - } - - // Take the average - - Translation3d transform = new Translation3d(); - Rotation3d rotation = new Rotation3d(); - - if (estimatedRobotPoses.isEmpty()) return Optional.empty(); - - for (Pair pair : estimatedRobotPoses) { - // Total ambiguity is non-zero confirmed because if it was zero, that pose was - // returned. - double weight = (1.0 / pair.getFirst().getPoseAmbiguity()) / totalAmbiguity; - Pose3d estimatedPose = pair.getSecond(); - transform = transform.plus(estimatedPose.getTranslation().times(weight)); - rotation = rotation.plus(estimatedPose.getRotation().times(weight)); - } + Translation3d transform = new Translation3d(); + Rotation3d rotation = new Rotation3d(); - return Optional.of( - new EstimatedRobotPose( - new Pose3d(transform, rotation), - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.AVERAGE_BEST_TARGETS)); - } + if (estimatedRobotPoses.isEmpty()) return Optional.empty(); - /** - * Difference is defined as the vector magnitude between the two poses - * - * @return The absolute "difference" (>=0) between two Pose3ds. - */ - private double calculateDifference(Pose3d x, Pose3d y) { - return x.getTranslation().getDistance(y.getTranslation()); + for (Pair pair : estimatedRobotPoses) { + // Total ambiguity is non-zero confirmed because if it was zero, that pose was + // returned. + double weight = (1.0 / pair.getFirst().getPoseAmbiguity()) / totalAmbiguity; + Pose3d estimatedPose = pair.getSecond(); + transform = transform.plus(estimatedPose.getTranslation().times(weight)); + rotation = rotation.plus(estimatedPose.getRotation().times(weight)); } - private void reportFiducialPoseError(int fiducialId) { - if (!reportedErrors.contains(fiducialId)) { - DriverStation.reportError( - "[PhotonPoseEstimator] Tried to get pose of unknown AprilTag: " + fiducialId, false); - reportedErrors.add(fiducialId); - } + return Optional.of( + new EstimatedRobotPose( + new Pose3d(transform, rotation), + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.AVERAGE_BEST_TARGETS)); + } + + /** + * Difference is defined as the vector magnitude between the two poses + * + * @return The absolute "difference" (>=0) between two Pose3ds. + */ + private double calculateDifference(Pose3d x, Pose3d y) { + return x.getTranslation().getDistance(y.getTranslation()); + } + + private void reportFiducialPoseError(int fiducialId) { + if (!reportedErrors.contains(fiducialId)) { + DriverStation.reportError( + "[PhotonPoseEstimator] Tried to get pose of unknown AprilTag: " + fiducialId, false); + reportedErrors.add(fiducialId); } + } } diff --git a/photon-lib/src/main/java/org/photonvision/PhotonTargetSortMode.java b/photon-lib/src/main/java/org/photonvision/PhotonTargetSortMode.java index bf22644645..0cd5cb984c 100644 --- a/photon-lib/src/main/java/org/photonvision/PhotonTargetSortMode.java +++ b/photon-lib/src/main/java/org/photonvision/PhotonTargetSortMode.java @@ -28,23 +28,23 @@ import org.photonvision.targeting.PhotonTrackedTarget; public enum PhotonTargetSortMode { - Smallest(Comparator.comparingDouble(PhotonTrackedTarget::getArea)), - Largest(Smallest.m_comparator.reversed()), - Highest(Comparator.comparingDouble(PhotonTrackedTarget::getPitch)), - Lowest(Highest.m_comparator.reversed()), - Rightmost(Comparator.comparingDouble(PhotonTrackedTarget::getYaw)), - Leftmost(Rightmost.m_comparator.reversed()), - Centermost( - Comparator.comparingDouble( - target -> (Math.pow(target.getPitch(), 2) + Math.pow(target.getYaw(), 2)))); + Smallest(Comparator.comparingDouble(PhotonTrackedTarget::getArea)), + Largest(Smallest.m_comparator.reversed()), + Highest(Comparator.comparingDouble(PhotonTrackedTarget::getPitch)), + Lowest(Highest.m_comparator.reversed()), + Rightmost(Comparator.comparingDouble(PhotonTrackedTarget::getYaw)), + Leftmost(Rightmost.m_comparator.reversed()), + Centermost( + Comparator.comparingDouble( + target -> (Math.pow(target.getPitch(), 2) + Math.pow(target.getYaw(), 2)))); - private final Comparator m_comparator; + private final Comparator m_comparator; - PhotonTargetSortMode(Comparator comparator) { - m_comparator = comparator; - } + PhotonTargetSortMode(Comparator comparator) { + m_comparator = comparator; + } - public Comparator getComparator() { - return m_comparator; - } + public Comparator getComparator() { + return m_comparator; + } } diff --git a/photon-lib/src/main/java/org/photonvision/PhotonUtils.java b/photon-lib/src/main/java/org/photonvision/PhotonUtils.java index 4d35c5ac29..0a29169645 100644 --- a/photon-lib/src/main/java/org/photonvision/PhotonUtils.java +++ b/photon-lib/src/main/java/org/photonvision/PhotonUtils.java @@ -27,183 +27,183 @@ import edu.wpi.first.math.geometry.*; public final class PhotonUtils { - private PhotonUtils() { - // Utility class - } + private PhotonUtils() { + // Utility class + } - /** - * Algorithm from https://docs.limelightvision.io/en/latest/cs_estimating_distance.html Estimates - * range to a target using the target's elevation. This method can produce more stable results - * than SolvePNP when well tuned, if the full 6d robot pose is not required. Note that this method - * requires the camera to have 0 roll (not be skewed clockwise or CCW relative to the floor), and - * for there to exist a height differential between goal and camera. The larger this differential, - * the more accurate the distance estimate will be. - * - *

Units can be converted using the {@link edu.wpi.first.math.util.Units} class. - * - * @param cameraHeightMeters The physical height of the camera off the floor in meters. - * @param targetHeightMeters The physical height of the target off the floor in meters. This - * should be the height of whatever is being targeted (i.e. if the targeting region is set to - * top, this should be the height of the top of the target). - * @param cameraPitchRadians The pitch of the camera from the horizontal plane in radians. - * Positive values up. - * @param targetPitchRadians The pitch of the target in the camera's lens in radians. Positive - * values up. - * @return The estimated distance to the target in meters. - */ - public static double calculateDistanceToTargetMeters( - double cameraHeightMeters, - double targetHeightMeters, - double cameraPitchRadians, - double targetPitchRadians) { - return (targetHeightMeters - cameraHeightMeters) - / Math.tan(cameraPitchRadians + targetPitchRadians); - } + /** + * Algorithm from https://docs.limelightvision.io/en/latest/cs_estimating_distance.html Estimates + * range to a target using the target's elevation. This method can produce more stable results + * than SolvePNP when well tuned, if the full 6d robot pose is not required. Note that this method + * requires the camera to have 0 roll (not be skewed clockwise or CCW relative to the floor), and + * for there to exist a height differential between goal and camera. The larger this differential, + * the more accurate the distance estimate will be. + * + *

Units can be converted using the {@link edu.wpi.first.math.util.Units} class. + * + * @param cameraHeightMeters The physical height of the camera off the floor in meters. + * @param targetHeightMeters The physical height of the target off the floor in meters. This + * should be the height of whatever is being targeted (i.e. if the targeting region is set to + * top, this should be the height of the top of the target). + * @param cameraPitchRadians The pitch of the camera from the horizontal plane in radians. + * Positive values up. + * @param targetPitchRadians The pitch of the target in the camera's lens in radians. Positive + * values up. + * @return The estimated distance to the target in meters. + */ + public static double calculateDistanceToTargetMeters( + double cameraHeightMeters, + double targetHeightMeters, + double cameraPitchRadians, + double targetPitchRadians) { + return (targetHeightMeters - cameraHeightMeters) + / Math.tan(cameraPitchRadians + targetPitchRadians); + } - /** - * Estimate the {@link Translation2d} of the target relative to the camera. - * - * @param targetDistanceMeters The distance to the target in meters. - * @param yaw The observed yaw of the target. - * @return The target's camera-relative translation. - */ - public static Translation2d estimateCameraToTargetTranslation( - double targetDistanceMeters, Rotation2d yaw) { - return new Translation2d( - yaw.getCos() * targetDistanceMeters, yaw.getSin() * targetDistanceMeters); - } + /** + * Estimate the {@link Translation2d} of the target relative to the camera. + * + * @param targetDistanceMeters The distance to the target in meters. + * @param yaw The observed yaw of the target. + * @return The target's camera-relative translation. + */ + public static Translation2d estimateCameraToTargetTranslation( + double targetDistanceMeters, Rotation2d yaw) { + return new Translation2d( + yaw.getCos() * targetDistanceMeters, yaw.getSin() * targetDistanceMeters); + } - /** - * Estimate the position of the robot in the field. - * - * @param cameraHeightMeters The physical height of the camera off the floor in meters. - * @param targetHeightMeters The physical height of the target off the floor in meters. This - * should be the height of whatever is being targeted (i.e. if the targeting region is set to - * top, this should be the height of the top of the target). - * @param cameraPitchRadians The pitch of the camera from the horizontal plane in radians. - * Positive values up. - * @param targetPitchRadians The pitch of the target in the camera's lens in radians. Positive - * values up. - * @param targetYaw The observed yaw of the target. Note that this *must* be CCW-positive, and - * Photon returns CW-positive. - * @param gyroAngle The current robot gyro angle, likely from odometry. - * @param fieldToTarget A Pose2d representing the target position in the field coordinate system. - * @param cameraToRobot The position of the robot relative to the camera. If the camera was - * mounted 3 inches behind the "origin" (usually physical center) of the robot, this would be - * Transform2d(3 inches, 0 inches, 0 degrees). - * @return The position of the robot in the field. - */ - public static Pose2d estimateFieldToRobot( - double cameraHeightMeters, - double targetHeightMeters, - double cameraPitchRadians, - double targetPitchRadians, - Rotation2d targetYaw, - Rotation2d gyroAngle, - Pose2d fieldToTarget, - Transform2d cameraToRobot) { - return PhotonUtils.estimateFieldToRobot( - PhotonUtils.estimateCameraToTarget( - PhotonUtils.estimateCameraToTargetTranslation( - PhotonUtils.calculateDistanceToTargetMeters( - cameraHeightMeters, targetHeightMeters, cameraPitchRadians, targetPitchRadians), - targetYaw), - fieldToTarget, - gyroAngle), - fieldToTarget, - cameraToRobot); - } + /** + * Estimate the position of the robot in the field. + * + * @param cameraHeightMeters The physical height of the camera off the floor in meters. + * @param targetHeightMeters The physical height of the target off the floor in meters. This + * should be the height of whatever is being targeted (i.e. if the targeting region is set to + * top, this should be the height of the top of the target). + * @param cameraPitchRadians The pitch of the camera from the horizontal plane in radians. + * Positive values up. + * @param targetPitchRadians The pitch of the target in the camera's lens in radians. Positive + * values up. + * @param targetYaw The observed yaw of the target. Note that this *must* be CCW-positive, and + * Photon returns CW-positive. + * @param gyroAngle The current robot gyro angle, likely from odometry. + * @param fieldToTarget A Pose2d representing the target position in the field coordinate system. + * @param cameraToRobot The position of the robot relative to the camera. If the camera was + * mounted 3 inches behind the "origin" (usually physical center) of the robot, this would be + * Transform2d(3 inches, 0 inches, 0 degrees). + * @return The position of the robot in the field. + */ + public static Pose2d estimateFieldToRobot( + double cameraHeightMeters, + double targetHeightMeters, + double cameraPitchRadians, + double targetPitchRadians, + Rotation2d targetYaw, + Rotation2d gyroAngle, + Pose2d fieldToTarget, + Transform2d cameraToRobot) { + return PhotonUtils.estimateFieldToRobot( + PhotonUtils.estimateCameraToTarget( + PhotonUtils.estimateCameraToTargetTranslation( + PhotonUtils.calculateDistanceToTargetMeters( + cameraHeightMeters, targetHeightMeters, cameraPitchRadians, targetPitchRadians), + targetYaw), + fieldToTarget, + gyroAngle), + fieldToTarget, + cameraToRobot); + } - /** - * Estimates a {@link Transform2d} that maps the camera position to the target position, using the - * robot's gyro. Note that the gyro angle provided *must* line up with the field coordinate system - * -- that is, it should read zero degrees when pointed towards the opposing alliance station, and - * increase as the robot rotates CCW. - * - * @param cameraToTargetTranslation A Translation2d that encodes the x/y position of the target - * relative to the camera. - * @param fieldToTarget A Pose2d representing the target position in the field coordinate system. - * @param gyroAngle The current robot gyro angle, likely from odometry. - * @return A Transform2d that takes us from the camera to the target. - */ - public static Transform2d estimateCameraToTarget( - Translation2d cameraToTargetTranslation, Pose2d fieldToTarget, Rotation2d gyroAngle) { - // This pose maps our camera at the origin out to our target, in the robot - // reference frame - // The translation part of this Transform2d is from the above step, and the - // rotation uses our robot's - // gyro. - return new Transform2d( - cameraToTargetTranslation, gyroAngle.times(-1).minus(fieldToTarget.getRotation())); - } + /** + * Estimates a {@link Transform2d} that maps the camera position to the target position, using the + * robot's gyro. Note that the gyro angle provided *must* line up with the field coordinate system + * -- that is, it should read zero degrees when pointed towards the opposing alliance station, and + * increase as the robot rotates CCW. + * + * @param cameraToTargetTranslation A Translation2d that encodes the x/y position of the target + * relative to the camera. + * @param fieldToTarget A Pose2d representing the target position in the field coordinate system. + * @param gyroAngle The current robot gyro angle, likely from odometry. + * @return A Transform2d that takes us from the camera to the target. + */ + public static Transform2d estimateCameraToTarget( + Translation2d cameraToTargetTranslation, Pose2d fieldToTarget, Rotation2d gyroAngle) { + // This pose maps our camera at the origin out to our target, in the robot + // reference frame + // The translation part of this Transform2d is from the above step, and the + // rotation uses our robot's + // gyro. + return new Transform2d( + cameraToTargetTranslation, gyroAngle.times(-1).minus(fieldToTarget.getRotation())); + } - /** - * Estimates the pose of the robot in the field coordinate system, given the position of the - * target relative to the camera, the target relative to the field, and the robot relative to the - * camera. - * - * @param cameraToTarget The position of the target relative to the camera. - * @param fieldToTarget The position of the target in the field. - * @param cameraToRobot The position of the robot relative to the camera. If the camera was - * mounted 3 inches behind the "origin" (usually physical center) of the robot, this would be - * Transform2d(3 inches, 0 inches, 0 degrees). - * @return The position of the robot in the field. - */ - public static Pose2d estimateFieldToRobot( - Transform2d cameraToTarget, Pose2d fieldToTarget, Transform2d cameraToRobot) { - return estimateFieldToCamera(cameraToTarget, fieldToTarget).transformBy(cameraToRobot); - } + /** + * Estimates the pose of the robot in the field coordinate system, given the position of the + * target relative to the camera, the target relative to the field, and the robot relative to the + * camera. + * + * @param cameraToTarget The position of the target relative to the camera. + * @param fieldToTarget The position of the target in the field. + * @param cameraToRobot The position of the robot relative to the camera. If the camera was + * mounted 3 inches behind the "origin" (usually physical center) of the robot, this would be + * Transform2d(3 inches, 0 inches, 0 degrees). + * @return The position of the robot in the field. + */ + public static Pose2d estimateFieldToRobot( + Transform2d cameraToTarget, Pose2d fieldToTarget, Transform2d cameraToRobot) { + return estimateFieldToCamera(cameraToTarget, fieldToTarget).transformBy(cameraToRobot); + } - /** - * Estimates the pose of the camera in the field coordinate system, given the position of the - * target relative to the camera, and the target relative to the field. This *only* tracks the - * position of the camera, not the position of the robot itself. - * - * @param cameraToTarget The position of the target relative to the camera. - * @param fieldToTarget The position of the target in the field. - * @return The position of the camera in the field. - */ - public static Pose2d estimateFieldToCamera(Transform2d cameraToTarget, Pose2d fieldToTarget) { - var targetToCamera = cameraToTarget.inverse(); - return fieldToTarget.transformBy(targetToCamera); - } + /** + * Estimates the pose of the camera in the field coordinate system, given the position of the + * target relative to the camera, and the target relative to the field. This *only* tracks the + * position of the camera, not the position of the robot itself. + * + * @param cameraToTarget The position of the target relative to the camera. + * @param fieldToTarget The position of the target in the field. + * @return The position of the camera in the field. + */ + public static Pose2d estimateFieldToCamera(Transform2d cameraToTarget, Pose2d fieldToTarget) { + var targetToCamera = cameraToTarget.inverse(); + return fieldToTarget.transformBy(targetToCamera); + } - /** - * Estimates the pose of the robot in the field coordinate system, given the pose of the fiducial - * tag, the robot relative to the camera, and the target relative to the camera. - * - * @param fieldRelativeTagPose Pose3D the field relative pose of the target - * @param cameraToRobot Transform3D of the robot relative to the camera. Origin of the robot is - * defined as the center. - * @param cameraToTarget Transform3D of the target relative to the camera, returned by - * PhotonVision - * @return Transform3d Robot position relative to the field - */ - public static Pose3d estimateFieldToRobotAprilTag( - Transform3d cameraToTarget, Pose3d fieldRelativeTagPose, Transform3d cameraToRobot) { - return fieldRelativeTagPose.plus(cameraToTarget.inverse()).plus(cameraToRobot); - } + /** + * Estimates the pose of the robot in the field coordinate system, given the pose of the fiducial + * tag, the robot relative to the camera, and the target relative to the camera. + * + * @param fieldRelativeTagPose Pose3D the field relative pose of the target + * @param cameraToRobot Transform3D of the robot relative to the camera. Origin of the robot is + * defined as the center. + * @param cameraToTarget Transform3D of the target relative to the camera, returned by + * PhotonVision + * @return Transform3d Robot position relative to the field + */ + public static Pose3d estimateFieldToRobotAprilTag( + Transform3d cameraToTarget, Pose3d fieldRelativeTagPose, Transform3d cameraToRobot) { + return fieldRelativeTagPose.plus(cameraToTarget.inverse()).plus(cameraToRobot); + } - /** - * Returns the yaw between your robot and a target. - * - * @param robotPose Current pose of the robot - * @param targetPose Pose of the target on the field - * @return double Yaw to the target - */ - public static Rotation2d getYawToPose(Pose2d robotPose, Pose2d targetPose) { - Translation2d relativeTrl = targetPose.relativeTo(robotPose).getTranslation(); - return new Rotation2d(relativeTrl.getX(), relativeTrl.getY()); - } + /** + * Returns the yaw between your robot and a target. + * + * @param robotPose Current pose of the robot + * @param targetPose Pose of the target on the field + * @return double Yaw to the target + */ + public static Rotation2d getYawToPose(Pose2d robotPose, Pose2d targetPose) { + Translation2d relativeTrl = targetPose.relativeTo(robotPose).getTranslation(); + return new Rotation2d(relativeTrl.getX(), relativeTrl.getY()); + } - /** - * Returns the distance between two poses - * - * @param robotPose Pose of the robot - * @param targetPose Pose of the target - * @return - */ - public static double getDistanceToPose(Pose2d robotPose, Pose2d targetPose) { - return robotPose.getTranslation().getDistance(targetPose.getTranslation()); - } + /** + * Returns the distance between two poses + * + * @param robotPose Pose of the robot + * @param targetPose Pose of the target + * @return + */ + public static double getDistanceToPose(Pose2d robotPose, Pose2d targetPose) { + return robotPose.getTranslation().getDistance(targetPose.getTranslation()); + } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java index 1c9ef2ce1e..8887e77e26 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -62,525 +62,525 @@ */ @SuppressWarnings("unused") public class PhotonCameraSim implements AutoCloseable { - private final PhotonCamera cam; - - NTTopicSet ts = new NTTopicSet(); - private long heartbeatCounter = 0; - - /** This simulated camera's {@link SimCameraProperties} */ - public final SimCameraProperties prop; - - private long nextNTEntryTime = WPIUtilJNI.now(); - - private double maxSightRangeMeters = Double.MAX_VALUE; - private static final double kDefaultMinAreaPx = 100; - private double minTargetAreaPercent; - private PhotonTargetSortMode sortMode = PhotonTargetSortMode.Largest; - - // video stream simulation - private final CvSource videoSimRaw; - private final Mat videoSimFrameRaw = new Mat(); - private boolean videoSimRawEnabled = true; - private boolean videoSimWireframeEnabled = false; - private double videoSimWireframeResolution = 0.1; - private final CvSource videoSimProcessed; - private final Mat videoSimFrameProcessed = new Mat(); - private boolean videoSimProcEnabled = true; - - static { - try { - var loader = - new RuntimeLoader<>( - Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); - loader.loadLibrary(); - } catch (Exception e) { - throw new RuntimeException("Failed to load native libraries!", e); - } - } - - @Override - public void close() { - videoSimRaw.close(); - videoSimFrameRaw.release(); - videoSimProcessed.close(); - videoSimFrameProcessed.release(); + private final PhotonCamera cam; + + NTTopicSet ts = new NTTopicSet(); + private long heartbeatCounter = 0; + + /** This simulated camera's {@link SimCameraProperties} */ + public final SimCameraProperties prop; + + private long nextNTEntryTime = WPIUtilJNI.now(); + + private double maxSightRangeMeters = Double.MAX_VALUE; + private static final double kDefaultMinAreaPx = 100; + private double minTargetAreaPercent; + private PhotonTargetSortMode sortMode = PhotonTargetSortMode.Largest; + + // video stream simulation + private final CvSource videoSimRaw; + private final Mat videoSimFrameRaw = new Mat(); + private boolean videoSimRawEnabled = true; + private boolean videoSimWireframeEnabled = false; + private double videoSimWireframeResolution = 0.1; + private final CvSource videoSimProcessed; + private final Mat videoSimFrameProcessed = new Mat(); + private boolean videoSimProcEnabled = true; + + static { + try { + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + } catch (Exception e) { + throw new RuntimeException("Failed to load native libraries!", e); } - - /** - * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets - * through this class will change the associated PhotonCamera's results. - * - *

This constructor's camera has a 90 deg FOV with no simulated lag! - * - *

By default, the minimum target area is 100 pixels and there is no maximum sight range. - * - * @param camera The camera to be simulated - */ - public PhotonCameraSim(PhotonCamera camera) { - this(camera, SimCameraProperties.PERFECT_90DEG()); + } + + @Override + public void close() { + videoSimRaw.close(); + videoSimFrameRaw.release(); + videoSimProcessed.close(); + videoSimFrameProcessed.release(); + } + + /** + * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets + * through this class will change the associated PhotonCamera's results. + * + *

This constructor's camera has a 90 deg FOV with no simulated lag! + * + *

By default, the minimum target area is 100 pixels and there is no maximum sight range. + * + * @param camera The camera to be simulated + */ + public PhotonCameraSim(PhotonCamera camera) { + this(camera, SimCameraProperties.PERFECT_90DEG()); + } + + /** + * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets + * through this class will change the associated PhotonCamera's results. + * + *

By default, the minimum target area is 100 pixels and there is no maximum sight range. + * + * @param camera The camera to be simulated + * @param prop Properties of this camera such as FOV and FPS + */ + public PhotonCameraSim(PhotonCamera camera, SimCameraProperties prop) { + this.cam = camera; + this.prop = prop; + setMinTargetAreaPixels(kDefaultMinAreaPx); + + videoSimRaw = + CameraServer.putVideo(camera.getName() + "-raw", prop.getResWidth(), prop.getResHeight()); + videoSimRaw.setPixelFormat(PixelFormat.kGray); + videoSimProcessed = + CameraServer.putVideo( + camera.getName() + "-processed", prop.getResWidth(), prop.getResHeight()); + + ts.removeEntries(); + ts.subTable = camera.getCameraTable(); + ts.updateEntries(); + } + + /** + * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets + * through this class will change the associated PhotonCamera's results. + * + * @param camera The camera to be simulated + * @param prop Properties of this camera such as FOV and FPS + * @param minTargetAreaPercent The minimum percentage(0 - 100) a detected target must take up of + * the camera's image to be processed. Match this with your contour filtering settings in the + * PhotonVision GUI. + * @param maxSightRangeMeters Maximum distance at which the target is illuminated to your camera. + * Note that minimum target area of the image is separate from this. + */ + public PhotonCameraSim( + PhotonCamera camera, + SimCameraProperties prop, + double minTargetAreaPercent, + double maxSightRangeMeters) { + this(camera, prop); + this.minTargetAreaPercent = minTargetAreaPercent; + this.maxSightRangeMeters = maxSightRangeMeters; + } + + public PhotonCamera getCamera() { + return cam; + } + + public double getMinTargetAreaPercent() { + return minTargetAreaPercent; + } + + public double getMinTargetAreaPixels() { + return minTargetAreaPercent / 100.0 * prop.getResArea(); + } + + public double getMaxSightRangeMeters() { + return maxSightRangeMeters; + } + + public PhotonTargetSortMode getTargetSortMode() { + return sortMode; + } + + public CvSource getVideoSimRaw() { + return videoSimRaw; + } + + public Mat getVideoSimFrameRaw() { + return videoSimFrameRaw; + } + + /** + * Determines if this target's pose should be visible to the camera without considering its + * projected image points. Does not account for image area. + * + * @param camPose Camera's 3d pose + * @param target Vision target containing pose and shape + * @return If this vision target can be seen before image projection. + */ + public boolean canSeeTargetPose(Pose3d camPose, VisionTargetSim target) { + // var rel = new CameraTargetRelation(camPose, target.getPose()); + // TODO: removal awaiting wpilib Rotation3d performance improvements + var relTarget = RotTrlTransform3d.makeRelativeTo(camPose).apply(target.getPose()); + var camToTargYaw = new Rotation2d(relTarget.getX(), relTarget.getY()); + var camToTargPitch = + new Rotation2d(Math.hypot(relTarget.getX(), relTarget.getY()), -relTarget.getZ()); + var relCam = RotTrlTransform3d.makeRelativeTo(target.getPose()).apply(camPose); + var targToCamAngle = new Rotation2d(relCam.getX(), Math.hypot(relCam.getY(), relCam.getZ())); + + return ( + // target translation is outside of camera's FOV + (Math.abs(camToTargYaw.getDegrees()) < prop.getHorizFOV().getDegrees() / 2) + && (Math.abs(camToTargPitch.getDegrees()) < prop.getVertFOV().getDegrees() / 2) + && (!target.getModel().isPlanar + || Math.abs(targToCamAngle.getDegrees()) + < 90) // camera is behind planar target and it should be occluded + && (relTarget.getTranslation().getNorm() <= maxSightRangeMeters)); // target is too far + } + + /** + * Determines if all target points are inside the camera's image. + * + * @param points The target's 2d image points + */ + public boolean canSeeCorners(Point[] points) { + for (var point : points) { + if (MathUtil.clamp(point.x, 0, prop.getResWidth()) != point.x + || MathUtil.clamp(point.y, 0, prop.getResHeight()) != point.y) { + return false; // point is outside of resolution + } } - - /** - * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets - * through this class will change the associated PhotonCamera's results. - * - *

By default, the minimum target area is 100 pixels and there is no maximum sight range. - * - * @param camera The camera to be simulated - * @param prop Properties of this camera such as FOV and FPS - */ - public PhotonCameraSim(PhotonCamera camera, SimCameraProperties prop) { - this.cam = camera; - this.prop = prop; - setMinTargetAreaPixels(kDefaultMinAreaPx); - - videoSimRaw = - CameraServer.putVideo(camera.getName() + "-raw", prop.getResWidth(), prop.getResHeight()); - videoSimRaw.setPixelFormat(PixelFormat.kGray); - videoSimProcessed = - CameraServer.putVideo( - camera.getName() + "-processed", prop.getResWidth(), prop.getResHeight()); - - ts.removeEntries(); - ts.subTable = camera.getCameraTable(); - ts.updateEntries(); - } - - /** - * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets - * through this class will change the associated PhotonCamera's results. - * - * @param camera The camera to be simulated - * @param prop Properties of this camera such as FOV and FPS - * @param minTargetAreaPercent The minimum percentage(0 - 100) a detected target must take up of - * the camera's image to be processed. Match this with your contour filtering settings in the - * PhotonVision GUI. - * @param maxSightRangeMeters Maximum distance at which the target is illuminated to your camera. - * Note that minimum target area of the image is separate from this. - */ - public PhotonCameraSim( - PhotonCamera camera, - SimCameraProperties prop, - double minTargetAreaPercent, - double maxSightRangeMeters) { - this(camera, prop); - this.minTargetAreaPercent = minTargetAreaPercent; - this.maxSightRangeMeters = maxSightRangeMeters; - } - - public PhotonCamera getCamera() { - return cam; - } - - public double getMinTargetAreaPercent() { - return minTargetAreaPercent; - } - - public double getMinTargetAreaPixels() { - return minTargetAreaPercent / 100.0 * prop.getResArea(); + return true; + } + + /** + * Determine if this camera should process a new frame based on performance metrics and the time + * since the last update. This returns an Optional which is either empty if no update should occur + * or a Long of the timestamp in microseconds of when the frame which should be received by NT. If + * a timestamp is returned, the last frame update time becomes that timestamp. + * + * @return Optional long which is empty while blocked or the NT entry timestamp in microseconds if + * ready + */ + public Optional consumeNextEntryTime() { + // check if this camera is ready for another frame update + long now = WPIUtilJNI.now(); + long timestamp = -1; + int iter = 0; + // prepare next latest update + while (now >= nextNTEntryTime) { + timestamp = nextNTEntryTime; + long frameTime = (long) (prop.estMsUntilNextFrame() * 1e3); + nextNTEntryTime += frameTime; + + // if frame time is very small, avoid blocking + if (iter++ > 50) { + timestamp = now; + nextNTEntryTime = now + frameTime; + break; + } } - - public double getMaxSightRangeMeters() { - return maxSightRangeMeters; - } - - public PhotonTargetSortMode getTargetSortMode() { - return sortMode; - } - - public CvSource getVideoSimRaw() { - return videoSimRaw; - } - - public Mat getVideoSimFrameRaw() { - return videoSimFrameRaw; - } - - /** - * Determines if this target's pose should be visible to the camera without considering its - * projected image points. Does not account for image area. - * - * @param camPose Camera's 3d pose - * @param target Vision target containing pose and shape - * @return If this vision target can be seen before image projection. - */ - public boolean canSeeTargetPose(Pose3d camPose, VisionTargetSim target) { - // var rel = new CameraTargetRelation(camPose, target.getPose()); - // TODO: removal awaiting wpilib Rotation3d performance improvements - var relTarget = RotTrlTransform3d.makeRelativeTo(camPose).apply(target.getPose()); - var camToTargYaw = new Rotation2d(relTarget.getX(), relTarget.getY()); - var camToTargPitch = - new Rotation2d(Math.hypot(relTarget.getX(), relTarget.getY()), -relTarget.getZ()); - var relCam = RotTrlTransform3d.makeRelativeTo(target.getPose()).apply(camPose); - var targToCamAngle = new Rotation2d(relCam.getX(), Math.hypot(relCam.getY(), relCam.getZ())); - - return ( - // target translation is outside of camera's FOV - (Math.abs(camToTargYaw.getDegrees()) < prop.getHorizFOV().getDegrees() / 2) - && (Math.abs(camToTargPitch.getDegrees()) < prop.getVertFOV().getDegrees() / 2) - && (!target.getModel().isPlanar - || Math.abs(targToCamAngle.getDegrees()) - < 90) // camera is behind planar target and it should be occluded - && (relTarget.getTranslation().getNorm() <= maxSightRangeMeters)); // target is too far - } - - /** - * Determines if all target points are inside the camera's image. - * - * @param points The target's 2d image points - */ - public boolean canSeeCorners(Point[] points) { - for (var point : points) { - if (MathUtil.clamp(point.x, 0, prop.getResWidth()) != point.x - || MathUtil.clamp(point.y, 0, prop.getResHeight()) != point.y) { - return false; // point is outside of resolution - } + // return the timestamp of the latest update + if (timestamp >= 0) return Optional.of(timestamp); + // or this camera isn't ready to process yet + else return Optional.empty(); + } + + /** + * The minimum percentage(0 - 100) a detected target must take up of the camera's image to be + * processed. + */ + public void setMinTargetAreaPercent(double areaPercent) { + this.minTargetAreaPercent = areaPercent; + } + + /** + * The minimum number of pixels a detected target must take up in the camera's image to be + * processed. + */ + public void setMinTargetAreaPixels(double areaPx) { + this.minTargetAreaPercent = areaPx / prop.getResArea() * 100; + } + + /** + * Maximum distance at which the target is illuminated to your camera. Note that minimum target + * area of the image is separate from this. + */ + public void setMaxSightRange(double rangeMeters) { + this.maxSightRangeMeters = rangeMeters; + } + + /** Defines the order the targets are sorted in the pipeline result. */ + public void setTargetSortMode(PhotonTargetSortMode sortMode) { + if (sortMode != null) this.sortMode = sortMode; + } + + /** + * Sets whether the raw video stream simulation is enabled. + * + *

Note: This may increase loop times. + */ + public void enableRawStream(boolean enabled) { + videoSimRawEnabled = enabled; + } + + /** + * Sets whether a wireframe of the field is drawn to the raw video stream. + * + *

Note: This will dramatically increase loop times. + */ + public void enableDrawWireframe(boolean enabled) { + videoSimWireframeEnabled = enabled; + } + + /** + * Sets the resolution of the drawn wireframe if enabled. Drawn line segments will be subdivided + * into smaller segments based on a threshold set by the resolution. + * + * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in + * pixels + */ + public void setWireframeResolution(double resolution) { + videoSimWireframeResolution = resolution; + } + + /** Sets whether the processed video stream simulation is enabled. */ + public void enableProcessedStream(boolean enabled) { + videoSimProcEnabled = enabled; + } + + public PhotonPipelineResult process( + double latencyMillis, Pose3d cameraPose, List targets) { + // sort targets by distance to camera + targets = new ArrayList<>(targets); + targets.sort( + (t1, t2) -> { + double dist1 = t1.getPose().getTranslation().getDistance(cameraPose.getTranslation()); + double dist2 = t2.getPose().getTranslation().getDistance(cameraPose.getTranslation()); + if (dist1 == dist2) return 0; + return dist1 < dist2 ? 1 : -1; + }); + // all targets visible before noise + var visibleTgts = new ArrayList>(); + // all targets actually detected by camera (after noise) + var detectableTgts = new ArrayList(); + // basis change from world coordinates to camera coordinates + var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); + + // reset our frame + VideoSimUtil.updateVideoProp(videoSimRaw, prop); + VideoSimUtil.updateVideoProp(videoSimProcessed, prop); + Size videoFrameSize = new Size(prop.getResWidth(), prop.getResHeight()); + Mat.zeros(videoFrameSize, CvType.CV_8UC1).assignTo(videoSimFrameRaw); + + for (var tgt : targets) { + // pose isn't visible, skip to next + if (!canSeeTargetPose(cameraPose, tgt)) continue; + + // find target's 3d corner points + var fieldCorners = tgt.getFieldVertices(); + if (tgt.getModel().isSpherical) { // target is spherical + var model = tgt.getModel(); + // orient the model to the camera (like a sprite/decal) so it appears similar regardless of + // view + fieldCorners = + model.getFieldVertices( + TargetModel.getOrientedPose( + tgt.getPose().getTranslation(), cameraPose.getTranslation())); + } + // project 3d target points into 2d image points + var imagePoints = + OpenCVHelp.projectPoints(prop.getIntrinsics(), prop.getDistCoeffs(), camRt, fieldCorners); + // spherical targets need a rotated rectangle of their midpoints for visualization + if (tgt.getModel().isSpherical) { + var center = OpenCVHelp.avgPoint(imagePoints); + int l = 0, t, b, r = 0; + // reference point (left side midpoint) + for (int i = 1; i < 4; i++) { + if (imagePoints[i].x < imagePoints[l].x) l = i; } - return true; - } - - /** - * Determine if this camera should process a new frame based on performance metrics and the time - * since the last update. This returns an Optional which is either empty if no update should occur - * or a Long of the timestamp in microseconds of when the frame which should be received by NT. If - * a timestamp is returned, the last frame update time becomes that timestamp. - * - * @return Optional long which is empty while blocked or the NT entry timestamp in microseconds if - * ready - */ - public Optional consumeNextEntryTime() { - // check if this camera is ready for another frame update - long now = WPIUtilJNI.now(); - long timestamp = -1; - int iter = 0; - // prepare next latest update - while (now >= nextNTEntryTime) { - timestamp = nextNTEntryTime; - long frameTime = (long) (prop.estMsUntilNextFrame() * 1e3); - nextNTEntryTime += frameTime; - - // if frame time is very small, avoid blocking - if (iter++ > 50) { - timestamp = now; - nextNTEntryTime = now + frameTime; - break; - } + var lc = imagePoints[l]; + // determine top, right, bottom midpoints + double[] angles = new double[4]; + t = (l + 1) % 4; + b = (l + 1) % 4; + for (int i = 0; i < 4; i++) { + if (i == l) continue; + var ic = imagePoints[i]; + angles[i] = Math.atan2(lc.y - ic.y, ic.x - lc.x); + if (angles[i] >= angles[t]) t = i; + if (angles[i] <= angles[b]) b = i; } - // return the timestamp of the latest update - if (timestamp >= 0) return Optional.of(timestamp); - // or this camera isn't ready to process yet - else return Optional.empty(); - } - - /** - * The minimum percentage(0 - 100) a detected target must take up of the camera's image to be - * processed. - */ - public void setMinTargetAreaPercent(double areaPercent) { - this.minTargetAreaPercent = areaPercent; - } - - /** - * The minimum number of pixels a detected target must take up in the camera's image to be - * processed. - */ - public void setMinTargetAreaPixels(double areaPx) { - this.minTargetAreaPercent = areaPx / prop.getResArea() * 100; - } - - /** - * Maximum distance at which the target is illuminated to your camera. Note that minimum target - * area of the image is separate from this. - */ - public void setMaxSightRange(double rangeMeters) { - this.maxSightRangeMeters = rangeMeters; - } - - /** Defines the order the targets are sorted in the pipeline result. */ - public void setTargetSortMode(PhotonTargetSortMode sortMode) { - if (sortMode != null) this.sortMode = sortMode; - } - - /** - * Sets whether the raw video stream simulation is enabled. - * - *

Note: This may increase loop times. - */ - public void enableRawStream(boolean enabled) { - videoSimRawEnabled = enabled; - } - - /** - * Sets whether a wireframe of the field is drawn to the raw video stream. - * - *

Note: This will dramatically increase loop times. - */ - public void enableDrawWireframe(boolean enabled) { - videoSimWireframeEnabled = enabled; - } - - /** - * Sets the resolution of the drawn wireframe if enabled. Drawn line segments will be subdivided - * into smaller segments based on a threshold set by the resolution. - * - * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in - * pixels - */ - public void setWireframeResolution(double resolution) { - videoSimWireframeResolution = resolution; - } - - /** Sets whether the processed video stream simulation is enabled. */ - public void enableProcessedStream(boolean enabled) { - videoSimProcEnabled = enabled; + for (int i = 0; i < 4; i++) { + if (i != t && i != l && i != b) r = i; + } + // create RotatedRect from midpoints + var rect = + new RotatedRect( + new Point(center.x, center.y), + new Size(imagePoints[r].x - lc.x, imagePoints[b].y - imagePoints[t].y), + Math.toDegrees(-angles[r])); + // set target corners to rect corners + Point[] points = new Point[4]; + rect.points(points); + imagePoints = points; + } + // save visible targets for raw video stream simulation + visibleTgts.add(new Pair<>(tgt, imagePoints)); + // estimate pixel noise + var noisyTargetCorners = prop.estPixelNoise(imagePoints); + // find the minimum area rectangle of target corners + var minAreaRect = OpenCVHelp.getMinAreaRect(noisyTargetCorners); + Point[] minAreaRectPts = new Point[4]; + minAreaRect.points(minAreaRectPts); + // find the (naive) 2d yaw/pitch + var centerPt = minAreaRect.center; + var centerRot = prop.getPixelRot(centerPt); + // find contour area + double areaPercent = prop.getContourAreaPercent(noisyTargetCorners); + + // projected target can't be detected, skip to next + if (!(canSeeCorners(noisyTargetCorners) && areaPercent >= minTargetAreaPercent)) continue; + + var pnpSim = new PNPResults(); + if (tgt.fiducialID >= 0 && tgt.getFieldVertices().size() == 4) { // single AprilTag solvePNP + pnpSim = + OpenCVHelp.solvePNP_SQUARE( + prop.getIntrinsics(), + prop.getDistCoeffs(), + tgt.getModel().vertices, + noisyTargetCorners); + } + + detectableTgts.add( + new PhotonTrackedTarget( + Math.toDegrees(centerRot.getZ()), + -Math.toDegrees(centerRot.getY()), + areaPercent, + Math.toDegrees(centerRot.getX()), + tgt.fiducialID, + pnpSim.best, + pnpSim.alt, + pnpSim.ambiguity, + OpenCVHelp.pointsToCorners(minAreaRectPts), + OpenCVHelp.pointsToCorners(noisyTargetCorners))); } - - public PhotonPipelineResult process( - double latencyMillis, Pose3d cameraPose, List targets) { - // sort targets by distance to camera - targets = new ArrayList<>(targets); - targets.sort( - (t1, t2) -> { - double dist1 = t1.getPose().getTranslation().getDistance(cameraPose.getTranslation()); - double dist2 = t2.getPose().getTranslation().getDistance(cameraPose.getTranslation()); - if (dist1 == dist2) return 0; - return dist1 < dist2 ? 1 : -1; - }); - // all targets visible before noise - var visibleTgts = new ArrayList>(); - // all targets actually detected by camera (after noise) - var detectableTgts = new ArrayList(); - // basis change from world coordinates to camera coordinates - var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); - - // reset our frame - VideoSimUtil.updateVideoProp(videoSimRaw, prop); - VideoSimUtil.updateVideoProp(videoSimProcessed, prop); - Size videoFrameSize = new Size(prop.getResWidth(), prop.getResHeight()); - Mat.zeros(videoFrameSize, CvType.CV_8UC1).assignTo(videoSimFrameRaw); - - for (var tgt : targets) { - // pose isn't visible, skip to next - if (!canSeeTargetPose(cameraPose, tgt)) continue; - - // find target's 3d corner points - var fieldCorners = tgt.getFieldVertices(); - if (tgt.getModel().isSpherical) { // target is spherical - var model = tgt.getModel(); - // orient the model to the camera (like a sprite/decal) so it appears similar regardless of - // view - fieldCorners = - model.getFieldVertices( - TargetModel.getOrientedPose( - tgt.getPose().getTranslation(), cameraPose.getTranslation())); - } - // project 3d target points into 2d image points - var imagePoints = - OpenCVHelp.projectPoints(prop.getIntrinsics(), prop.getDistCoeffs(), camRt, fieldCorners); - // spherical targets need a rotated rectangle of their midpoints for visualization - if (tgt.getModel().isSpherical) { - var center = OpenCVHelp.avgPoint(imagePoints); - int l = 0, t, b, r = 0; - // reference point (left side midpoint) - for (int i = 1; i < 4; i++) { - if (imagePoints[i].x < imagePoints[l].x) l = i; - } - var lc = imagePoints[l]; - // determine top, right, bottom midpoints - double[] angles = new double[4]; - t = (l + 1) % 4; - b = (l + 1) % 4; - for (int i = 0; i < 4; i++) { - if (i == l) continue; - var ic = imagePoints[i]; - angles[i] = Math.atan2(lc.y - ic.y, ic.x - lc.x); - if (angles[i] >= angles[t]) t = i; - if (angles[i] <= angles[b]) b = i; - } - for (int i = 0; i < 4; i++) { - if (i != t && i != l && i != b) r = i; - } - // create RotatedRect from midpoints - var rect = - new RotatedRect( - new Point(center.x, center.y), - new Size(imagePoints[r].x - lc.x, imagePoints[b].y - imagePoints[t].y), - Math.toDegrees(-angles[r])); - // set target corners to rect corners - Point[] points = new Point[4]; - rect.points(points); - imagePoints = points; - } - // save visible targets for raw video stream simulation - visibleTgts.add(new Pair<>(tgt, imagePoints)); - // estimate pixel noise - var noisyTargetCorners = prop.estPixelNoise(imagePoints); - // find the minimum area rectangle of target corners - var minAreaRect = OpenCVHelp.getMinAreaRect(noisyTargetCorners); - Point[] minAreaRectPts = new Point[4]; - minAreaRect.points(minAreaRectPts); - // find the (naive) 2d yaw/pitch - var centerPt = minAreaRect.center; - var centerRot = prop.getPixelRot(centerPt); - // find contour area - double areaPercent = prop.getContourAreaPercent(noisyTargetCorners); - - // projected target can't be detected, skip to next - if (!(canSeeCorners(noisyTargetCorners) && areaPercent >= minTargetAreaPercent)) continue; - - var pnpSim = new PNPResults(); - if (tgt.fiducialID >= 0 && tgt.getFieldVertices().size() == 4) { // single AprilTag solvePNP - pnpSim = - OpenCVHelp.solvePNP_SQUARE( - prop.getIntrinsics(), - prop.getDistCoeffs(), - tgt.getModel().vertices, - noisyTargetCorners); - } - - detectableTgts.add( - new PhotonTrackedTarget( - Math.toDegrees(centerRot.getZ()), - -Math.toDegrees(centerRot.getY()), - areaPercent, - Math.toDegrees(centerRot.getX()), - tgt.fiducialID, - pnpSim.best, - pnpSim.alt, - pnpSim.ambiguity, - OpenCVHelp.pointsToCorners(minAreaRectPts), - OpenCVHelp.pointsToCorners(noisyTargetCorners))); + // render visible tags to raw video frame + if (videoSimRawEnabled) { + // draw field wireframe + if (videoSimWireframeEnabled) { + VideoSimUtil.drawFieldWireframe( + camRt, + prop, + videoSimWireframeResolution, + 1.5, + new Scalar(80), + 6, + 1, + new Scalar(30), + videoSimFrameRaw); + } + + // draw targets + for (var pair : visibleTgts) { + var tgt = pair.getFirst(); + var corn = pair.getSecond(); + + if (tgt.fiducialID >= 0) { // apriltags + VideoSimUtil.warp16h5TagImage(tgt.fiducialID, corn, true, videoSimFrameRaw); + } else if (!tgt.getModel().isSpherical) { // non-spherical targets + var contour = corn; + if (!tgt.getModel() + .isPlanar) { // visualization cant handle non-convex projections of 3d models + contour = OpenCVHelp.getConvexHull(contour); + } + VideoSimUtil.drawPoly(contour, -1, new Scalar(255), true, videoSimFrameRaw); + } else { // spherical targets + VideoSimUtil.drawInscribedEllipse(corn, new Scalar(255), videoSimFrameRaw); } - // render visible tags to raw video frame - if (videoSimRawEnabled) { - // draw field wireframe - if (videoSimWireframeEnabled) { - VideoSimUtil.drawFieldWireframe( - camRt, - prop, - videoSimWireframeResolution, - 1.5, - new Scalar(80), - 6, - 1, - new Scalar(30), - videoSimFrameRaw); - } - - // draw targets - for (var pair : visibleTgts) { - var tgt = pair.getFirst(); - var corn = pair.getSecond(); - - if (tgt.fiducialID >= 0) { // apriltags - VideoSimUtil.warp16h5TagImage(tgt.fiducialID, corn, true, videoSimFrameRaw); - } else if (!tgt.getModel().isSpherical) { // non-spherical targets - var contour = corn; - if (!tgt.getModel() - .isPlanar) { // visualization cant handle non-convex projections of 3d models - contour = OpenCVHelp.getConvexHull(contour); - } - VideoSimUtil.drawPoly(contour, -1, new Scalar(255), true, videoSimFrameRaw); - } else { // spherical targets - VideoSimUtil.drawInscribedEllipse(corn, new Scalar(255), videoSimFrameRaw); - } - } - videoSimRaw.putFrame(videoSimFrameRaw); - } else videoSimRaw.setConnectionStrategy(ConnectionStrategy.kForceClose); - // draw/annotate target detection outline on processed view - if (videoSimProcEnabled) { - Imgproc.cvtColor(videoSimFrameRaw, videoSimFrameProcessed, Imgproc.COLOR_GRAY2BGR); - Imgproc.drawMarker( // crosshair - videoSimFrameProcessed, - new Point(prop.getResWidth() / 2.0, prop.getResHeight() / 2.0), - new Scalar(0, 255, 0), - Imgproc.MARKER_CROSS, - (int) VideoSimUtil.getScaledThickness(15, videoSimFrameProcessed), - (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), - Imgproc.LINE_AA); - for (var tgt : detectableTgts) { - if (tgt.getFiducialId() >= 0) { // apriltags - VideoSimUtil.drawTagDetection( - tgt.getFiducialId(), - OpenCVHelp.cornersToPoints(tgt.getDetectedCorners()), - videoSimFrameProcessed); - } else { // other targets - // bounding rectangle - Imgproc.rectangle( - videoSimFrameProcessed, - OpenCVHelp.getBoundingRect(OpenCVHelp.cornersToPoints(tgt.getDetectedCorners())), - new Scalar(0, 0, 255), - (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), - Imgproc.LINE_AA); - - VideoSimUtil.drawPoly( - OpenCVHelp.cornersToPoints(tgt.getMinAreaRectCorners()), - (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), - new Scalar(255, 30, 30), - true, - videoSimFrameProcessed); - } - } - videoSimProcessed.putFrame(videoSimFrameProcessed); - } else videoSimProcessed.setConnectionStrategy(ConnectionStrategy.kForceClose); - - // put this simulated data to NT - if (sortMode != null) { - detectableTgts.sort(sortMode.getComparator()); + } + videoSimRaw.putFrame(videoSimFrameRaw); + } else videoSimRaw.setConnectionStrategy(ConnectionStrategy.kForceClose); + // draw/annotate target detection outline on processed view + if (videoSimProcEnabled) { + Imgproc.cvtColor(videoSimFrameRaw, videoSimFrameProcessed, Imgproc.COLOR_GRAY2BGR); + Imgproc.drawMarker( // crosshair + videoSimFrameProcessed, + new Point(prop.getResWidth() / 2.0, prop.getResHeight() / 2.0), + new Scalar(0, 255, 0), + Imgproc.MARKER_CROSS, + (int) VideoSimUtil.getScaledThickness(15, videoSimFrameProcessed), + (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), + Imgproc.LINE_AA); + for (var tgt : detectableTgts) { + if (tgt.getFiducialId() >= 0) { // apriltags + VideoSimUtil.drawTagDetection( + tgt.getFiducialId(), + OpenCVHelp.cornersToPoints(tgt.getDetectedCorners()), + videoSimFrameProcessed); + } else { // other targets + // bounding rectangle + Imgproc.rectangle( + videoSimFrameProcessed, + OpenCVHelp.getBoundingRect(OpenCVHelp.cornersToPoints(tgt.getDetectedCorners())), + new Scalar(0, 0, 255), + (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), + Imgproc.LINE_AA); + + VideoSimUtil.drawPoly( + OpenCVHelp.cornersToPoints(tgt.getMinAreaRectCorners()), + (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), + new Scalar(255, 30, 30), + true, + videoSimFrameProcessed); } - return new PhotonPipelineResult(latencyMillis, detectableTgts); - } + } + videoSimProcessed.putFrame(videoSimFrameProcessed); + } else videoSimProcessed.setConnectionStrategy(ConnectionStrategy.kForceClose); - /** - * Simulate one processed frame of vision data, putting one result to NT at current timestamp. - * Image capture time is assumed be (current time - latency). - * - * @param result The pipeline result to submit - */ - public void submitProcessedFrame(PhotonPipelineResult result) { - submitProcessedFrame(result, WPIUtilJNI.now()); + // put this simulated data to NT + if (sortMode != null) { + detectableTgts.sort(sortMode.getComparator()); } - - /** - * Simulate one processed frame of vision data, putting one result to NT. Image capture timestamp - * overrides {@link PhotonPipelineResult#getTimestampSeconds() getTimestampSeconds()} for more - * precise latency simulation. - * - * @param result The pipeline result to submit - * @param receiveTimestamp The (sim) timestamp when this result was read by NT in microseconds - */ - public void submitProcessedFrame(PhotonPipelineResult result, long receiveTimestamp) { - ts.latencyMillisEntry.set(result.getLatencyMillis(), receiveTimestamp); - - var newPacket = new Packet(result.getPacketSize()); - result.populatePacket(newPacket); - ts.rawBytesEntry.set(newPacket.getData(), receiveTimestamp); - - boolean hasTargets = result.hasTargets(); - ts.hasTargetEntry.set(hasTargets, receiveTimestamp); - if (!hasTargets) { - ts.targetPitchEntry.set(0.0, receiveTimestamp); - ts.targetYawEntry.set(0.0, receiveTimestamp); - ts.targetAreaEntry.set(0.0, receiveTimestamp); - ts.targetPoseEntry.set(new double[] {0.0, 0.0, 0.0}, receiveTimestamp); - ts.targetSkewEntry.set(0.0, receiveTimestamp); - } else { - var bestTarget = result.getBestTarget(); - - ts.targetPitchEntry.set(bestTarget.getPitch(), receiveTimestamp); - ts.targetYawEntry.set(bestTarget.getYaw(), receiveTimestamp); - ts.targetAreaEntry.set(bestTarget.getArea(), receiveTimestamp); - ts.targetSkewEntry.set(bestTarget.getSkew(), receiveTimestamp); - - var transform = bestTarget.getBestCameraToTarget(); - double[] poseData = { - transform.getX(), transform.getY(), transform.getRotation().toRotation2d().getDegrees() - }; - ts.targetPoseEntry.set(poseData, receiveTimestamp); - } - - ts.cameraIntrinsicsPublisher.set(prop.getIntrinsics().getData(), receiveTimestamp); - ts.cameraDistortionPublisher.set(prop.getDistCoeffs().getData(), receiveTimestamp); - ts.heartbeatPublisher.set(heartbeatCounter++, receiveTimestamp); + return new PhotonPipelineResult(latencyMillis, detectableTgts); + } + + /** + * Simulate one processed frame of vision data, putting one result to NT at current timestamp. + * Image capture time is assumed be (current time - latency). + * + * @param result The pipeline result to submit + */ + public void submitProcessedFrame(PhotonPipelineResult result) { + submitProcessedFrame(result, WPIUtilJNI.now()); + } + + /** + * Simulate one processed frame of vision data, putting one result to NT. Image capture timestamp + * overrides {@link PhotonPipelineResult#getTimestampSeconds() getTimestampSeconds()} for more + * precise latency simulation. + * + * @param result The pipeline result to submit + * @param receiveTimestamp The (sim) timestamp when this result was read by NT in microseconds + */ + public void submitProcessedFrame(PhotonPipelineResult result, long receiveTimestamp) { + ts.latencyMillisEntry.set(result.getLatencyMillis(), receiveTimestamp); + + var newPacket = new Packet(result.getPacketSize()); + result.populatePacket(newPacket); + ts.rawBytesEntry.set(newPacket.getData(), receiveTimestamp); + + boolean hasTargets = result.hasTargets(); + ts.hasTargetEntry.set(hasTargets, receiveTimestamp); + if (!hasTargets) { + ts.targetPitchEntry.set(0.0, receiveTimestamp); + ts.targetYawEntry.set(0.0, receiveTimestamp); + ts.targetAreaEntry.set(0.0, receiveTimestamp); + ts.targetPoseEntry.set(new double[] {0.0, 0.0, 0.0}, receiveTimestamp); + ts.targetSkewEntry.set(0.0, receiveTimestamp); + } else { + var bestTarget = result.getBestTarget(); + + ts.targetPitchEntry.set(bestTarget.getPitch(), receiveTimestamp); + ts.targetYawEntry.set(bestTarget.getYaw(), receiveTimestamp); + ts.targetAreaEntry.set(bestTarget.getArea(), receiveTimestamp); + ts.targetSkewEntry.set(bestTarget.getSkew(), receiveTimestamp); + + var transform = bestTarget.getBestCameraToTarget(); + double[] poseData = { + transform.getX(), transform.getY(), transform.getRotation().toRotation2d().getDegrees() + }; + ts.targetPoseEntry.set(poseData, receiveTimestamp); } + + ts.cameraIntrinsicsPublisher.set(prop.getIntrinsics().getData(), receiveTimestamp); + ts.cameraDistortionPublisher.set(prop.getDistCoeffs().getData(), receiveTimestamp); + ts.heartbeatPublisher.set(heartbeatCounter++, receiveTimestamp); + } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java index 35b0ad190f..56c18a1aad 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java @@ -65,675 +65,675 @@ * network latency and the latency reported will always be perfect. */ public class SimCameraProperties { - private final Random rand = new Random(); - // calibration - private int resWidth; - private int resHeight; - private Matrix camIntrinsics; - private Matrix distCoeffs; - private double avgErrorPx; - private double errorStdDevPx; - // performance - private double frameSpeedMs = 0; - private double exposureTimeMs = 0; - private double avgLatencyMs = 0; - private double latencyStdDevMs = 0; - // util - private List viewplanes = new ArrayList<>(); - - /** Default constructor which is the same as {@link #PERFECT_90DEG} */ - public SimCameraProperties() { - setCalibration(960, 720, Rotation2d.fromDegrees(90)); - } - - /** - * Reads camera properties from a photonvision config.json file. This is only the - * resolution, camera intrinsics, distortion coefficients, and average/std. dev. pixel error. - * Other camera properties must be set. - * - * @param path Path to the config.json file - * @param width The width of the desired resolution in the JSON - * @param height The height of the desired resolution in the JSON - * @throws IOException If reading the JSON fails, either from an invalid path or a missing/invalid - * calibrated resolution. - */ - public SimCameraProperties(String path, int width, int height) throws IOException { - this(Path.of(path), width, height); - } - - /** - * Reads camera properties from a photonvision config.json file. This is only the - * resolution, camera intrinsics, distortion coefficients, and average/std. dev. pixel error. - * Other camera properties must be set. - * - * @param path Path to the config.json file - * @param width The width of the desired resolution in the JSON - * @param height The height of the desired resolution in the JSON - * @throws IOException If reading the JSON fails, either from an invalid path or a missing/invalid - * calibrated resolution. - */ - public SimCameraProperties(Path path, int width, int height) throws IOException { - var mapper = new ObjectMapper(); - var json = mapper.readTree(path.toFile()); - json = json.get("calibrations"); - boolean success = false; - try { - for (int i = 0; i < json.size() && !success; i++) { - // check if this calibration entry is our desired resolution - var calib = json.get(i); - int jsonWidth = calib.get("resolution").get("width").asInt(); - int jsonHeight = calib.get("resolution").get("height").asInt(); - if (jsonWidth != width || jsonHeight != height) continue; - // get the relevant calibration values - var jsonIntrinsicsNode = calib.get("cameraIntrinsics").get("data"); - double[] jsonIntrinsics = new double[jsonIntrinsicsNode.size()]; - for (int j = 0; j < jsonIntrinsicsNode.size(); j++) { - jsonIntrinsics[j] = jsonIntrinsicsNode.get(j).asDouble(); - } - var jsonDistortNode = calib.get("cameraExtrinsics").get("data"); - double[] jsonDistortion = new double[jsonDistortNode.size()]; - for (int j = 0; j < jsonDistortNode.size(); j++) { - jsonDistortion[j] = jsonDistortNode.get(j).asDouble(); - } - var jsonViewErrors = calib.get("perViewErrors"); - double jsonAvgError = 0; - for (int j = 0; j < jsonViewErrors.size(); j++) { - jsonAvgError += jsonViewErrors.get(j).asDouble(); - } - jsonAvgError /= jsonViewErrors.size(); - double jsonErrorStdDev = calib.get("standardDeviation").asDouble(); - // assign the read JSON values to this CameraProperties - setCalibration( - jsonWidth, - jsonHeight, - Matrix.mat(Nat.N3(), Nat.N3()).fill(jsonIntrinsics), - Matrix.mat(Nat.N5(), Nat.N1()).fill(jsonDistortion)); - setCalibError(jsonAvgError, jsonErrorStdDev); - success = true; - } - } catch (Exception e) { - throw new IOException("Invalid calibration JSON"); - } - if (!success) throw new IOException("Requested resolution not found in calibration"); - } - - public void setRandomSeed(long seed) { - rand.setSeed(seed); - } - - public void setCalibration(int resWidth, int resHeight, Rotation2d fovDiag) { - if (fovDiag.getDegrees() < 1 || fovDiag.getDegrees() > 179) { - fovDiag = Rotation2d.fromDegrees(MathUtil.clamp(fovDiag.getDegrees(), 1, 179)); - DriverStation.reportError( - "Requested invalid FOV! Clamping between (1, 179) degrees...", false); - } - double resDiag = Math.sqrt(resWidth * resWidth + resHeight * resHeight); - double diagRatio = Math.tan(fovDiag.getRadians() / 2); - var fovWidth = new Rotation2d(Math.atan(diagRatio * (resWidth / resDiag)) * 2); - var fovHeight = new Rotation2d(Math.atan(diagRatio * (resHeight / resDiag)) * 2); - - // assume no distortion - var distCoeff = VecBuilder.fill(0, 0, 0, 0, 0); - - // assume centered principal point (pixels) - double cx = resWidth / 2.0 - 0.5; - double cy = resHeight / 2.0 - 0.5; - - // use given fov to determine focal point (pixels) - double fx = cx / Math.tan(fovWidth.getRadians() / 2.0); - double fy = cy / Math.tan(fovHeight.getRadians() / 2.0); - - // create camera intrinsics matrix - var camIntrinsics = Matrix.mat(Nat.N3(), Nat.N3()).fill(fx, 0, cx, 0, fy, cy, 0, 0, 1); - setCalibration(resWidth, resHeight, camIntrinsics, distCoeff); - } - - public void setCalibration( - int resWidth, int resHeight, Matrix camIntrinsics, Matrix distCoeffs) { - this.resWidth = resWidth; - this.resHeight = resHeight; - this.camIntrinsics = camIntrinsics; - this.distCoeffs = distCoeffs; - - // left, right, up, and down view planes - var p = - new Translation3d[] { - new Translation3d( - 1, - new Rotation3d(0, 0, getPixelYaw(0).plus(new Rotation2d(-Math.PI / 2)).getRadians())), - new Translation3d( - 1, - new Rotation3d( - 0, 0, getPixelYaw(resWidth).plus(new Rotation2d(Math.PI / 2)).getRadians())), - new Translation3d( - 1, - new Rotation3d( - 0, getPixelPitch(0).plus(new Rotation2d(Math.PI / 2)).getRadians(), 0)), - new Translation3d( - 1, - new Rotation3d( - 0, getPixelPitch(resHeight).plus(new Rotation2d(-Math.PI / 2)).getRadians(), 0)) - }; - viewplanes.clear(); - for (int i = 0; i < p.length; i++) { - viewplanes.add(new DMatrix3(p[i].getX(), p[i].getY(), p[i].getZ())); - } - } - - public void setCalibError(double avgErrorPx, double errorStdDevPx) { - this.avgErrorPx = avgErrorPx; - this.errorStdDevPx = errorStdDevPx; - } - - /** - * @param fps The average frames per second the camera should process at. Exposure time limits - * FPS if set! - */ - public void setFPS(double fps) { - frameSpeedMs = Math.max(1000.0 / fps, exposureTimeMs); - } - - /** - * @param exposureTimeMs The amount of time the "shutter" is open for one frame. Affects motion - * blur. Frame speed(from FPS) is limited to this! - */ - public void setExposureTimeMs(double exposureTimeMs) { - this.exposureTimeMs = exposureTimeMs; - frameSpeedMs = Math.max(frameSpeedMs, exposureTimeMs); - } - - /** - * @param avgLatencyMs The average latency (from image capture to data published) in milliseconds - * a frame should have - */ - public void setAvgLatencyMs(double avgLatencyMs) { - this.avgLatencyMs = avgLatencyMs; - } - - /** - * @param latencyStdDevMs The standard deviation in milliseconds of the latency - */ - public void setLatencyStdDevMs(double latencyStdDevMs) { - this.latencyStdDevMs = latencyStdDevMs; - } - - public int getResWidth() { - return resWidth; - } - - public int getResHeight() { - return resHeight; - } - - public int getResArea() { - return resWidth * resHeight; - } - - /** Width:height */ - public double getAspectRatio() { - return (double) resWidth / resHeight; - } - - public Matrix getIntrinsics() { - return camIntrinsics.copy(); - } - - public Vector getDistCoeffs() { - return new Vector<>(distCoeffs); - } - - public double getFPS() { - return 1000.0 / frameSpeedMs; - } - - public double getFrameSpeedMs() { - return frameSpeedMs; - } - - public double getExposureTimeMs() { - return exposureTimeMs; - } - - public double getAvgLatencyMs() { - return avgLatencyMs; - } - - public double getLatencyStdDevMs() { - return latencyStdDevMs; - } - - public SimCameraProperties copy() { - var newProp = new SimCameraProperties(); - newProp.setCalibration(resWidth, resHeight, camIntrinsics, distCoeffs); - newProp.setCalibError(avgErrorPx, errorStdDevPx); - newProp.setFPS(getFPS()); - newProp.setExposureTimeMs(exposureTimeMs); - newProp.setAvgLatencyMs(avgLatencyMs); - newProp.setLatencyStdDevMs(latencyStdDevMs); - return newProp; - } - - /** - * The percentage(0 - 100) of this camera's resolution the contour takes up in pixels of the - * image. - * - * @param points Points of the contour - */ - public double getContourAreaPercent(Point[] points) { - return Imgproc.contourArea(new MatOfPoint2f(OpenCVHelp.getConvexHull(points))) - / getResArea() - * 100; - } - - /** The yaw from the principal point of this camera to the pixel x value. Positive values left. */ - public Rotation2d getPixelYaw(double pixelX) { - double fx = camIntrinsics.get(0, 0); - // account for principal point not being centered - double cx = camIntrinsics.get(0, 2); - double xOffset = cx - pixelX; - return new Rotation2d(fx, xOffset); - } - - /** - * The pitch from the principal point of this camera to the pixel y value. Pitch is positive down. - * - *

Note that this angle is naively computed and may be incorrect. See {@link - * #getCorrectedPixelRot(Point)}. - */ - public Rotation2d getPixelPitch(double pixelY) { - double fy = camIntrinsics.get(1, 1); - // account for principal point not being centered - double cy = camIntrinsics.get(1, 2); - double yOffset = cy - pixelY; - return new Rotation2d(fy, -yOffset); - } - - /** - * Finds the yaw and pitch to the given image point. Yaw is positive left, and pitch is positive - * down. - * - *

Note that pitch is naively computed and may be incorrect. See {@link - * #getCorrectedPixelRot(Point)}. - */ - public Rotation3d getPixelRot(Point point) { - return new Rotation3d( - 0, getPixelPitch(point.y).getRadians(), getPixelYaw(point.x).getRadians()); - } - - /** - * Gives the yaw and pitch of the line intersecting the camera lens and the given pixel - * coordinates on the sensor. Yaw is positive left, and pitch positive down. - * - *

The pitch traditionally calculated from pixel offsets do not correctly account for non-zero - * values of yaw because of perspective distortion (not to be confused with lens distortion)-- for - * example, the pitch angle is naively calculated as: - * - *

pitch = arctan(pixel y offset / focal length y)
- * - * However, using focal length as a side of the associated right triangle is not correct when the - * pixel x value is not 0, because the distance from this pixel (projected on the x-axis) to the - * camera lens increases. Projecting a line back out of the camera with these naive angles will - * not intersect the 3d point that was originally projected into this 2d pixel. Instead, this - * length should be: - * - *
focal length y ⟶ (focal length y / cos(arctan(pixel x offset / focal length x)))
- * - * @return Rotation3d with yaw and pitch of the line projected out of the camera from the given - * pixel (roll is zero). - */ - public Rotation3d getCorrectedPixelRot(Point point) { - double fx = camIntrinsics.get(0, 0); - double cx = camIntrinsics.get(0, 2); - double xOffset = cx - point.x; - - double fy = camIntrinsics.get(1, 1); - double cy = camIntrinsics.get(1, 2); - double yOffset = cy - point.y; - - // calculate yaw normally - var yaw = new Rotation2d(fx, xOffset); - // correct pitch based on yaw - var pitch = new Rotation2d(fy / Math.cos(Math.atan(xOffset / fx)), -yOffset); - - return new Rotation3d(0, pitch.getRadians(), yaw.getRadians()); - } - - public Rotation2d getHorizFOV() { - // sum of FOV left and right principal point - var left = getPixelYaw(0); - var right = getPixelYaw(resWidth); - return left.minus(right); - } - - public Rotation2d getVertFOV() { - // sum of FOV above and below principal point - var above = getPixelPitch(0); - var below = getPixelPitch(resHeight); - return below.minus(above); - } - - public Rotation2d getDiagFOV() { - return new Rotation2d(Math.hypot(getHorizFOV().getRadians(), getVertFOV().getRadians())); - } - - /** - * Determines where the line segment defined by the two given translations intersects the camera's - * frustum/field-of-vision, if at all. - * - *

The line is parametrized so any of its points p = t * (b - a) + a. This method - * returns these values of t, minimum first, defining the region of the line segment which is - * visible in the frustum. If both ends of the line segment are visible, this simply returns {0, - * 1}. If, for example, point b is visible while a is not, and half of the line segment is inside - * the camera frustum, {0.5, 1} would be returned. - * - * @param camRt The change in basis from world coordinates to camera coordinates. See {@link - * RotTrlTransform3d#makeRelativeTo(Pose3d)}. - * @param a The initial translation of the line - * @param b The final translation of the line - * @return A Pair of Doubles. The values may be null: - *

    - *
  • {Double, Double} : Two parametrized values(t), minimum first, representing which - * segment of the line is visible in the camera frustum. - *
  • {Double, null} : One value(t) representing a single intersection point. For example, - * the line only intersects the intersection of two adjacent viewplanes. - *
  • {null, null} : No values. The line segment is not visible in the camera frustum. - *
- */ - public Pair getVisibleLine( - RotTrlTransform3d camRt, Translation3d a, Translation3d b) { - // translations relative to the camera - var rela = camRt.apply(a); - var relb = camRt.apply(b); - - // check if both ends are behind camera - if (rela.getX() <= 0 && relb.getX() <= 0) return new Pair<>(null, null); - - var av = new DMatrix3(rela.getX(), rela.getY(), rela.getZ()); - var bv = new DMatrix3(relb.getX(), relb.getY(), relb.getZ()); - // a to b - var abv = new DMatrix3(); - CommonOps_DDF3.subtract(bv, av, abv); - - // check if the ends of the line segment are visible - boolean aVisible = true; - boolean bVisible = true; - for (int i = 0; i < viewplanes.size(); i++) { - var normal = viewplanes.get(i); - double aVisibility = CommonOps_DDF3.dot(av, normal); - if (aVisibility < 0) aVisible = false; - double bVisibility = CommonOps_DDF3.dot(bv, normal); - if (bVisibility < 0) bVisible = false; - // both ends are outside at least one of the same viewplane - if (aVisibility <= 0 && bVisibility <= 0) return new Pair<>(null, null); + private final Random rand = new Random(); + // calibration + private int resWidth; + private int resHeight; + private Matrix camIntrinsics; + private Matrix distCoeffs; + private double avgErrorPx; + private double errorStdDevPx; + // performance + private double frameSpeedMs = 0; + private double exposureTimeMs = 0; + private double avgLatencyMs = 0; + private double latencyStdDevMs = 0; + // util + private List viewplanes = new ArrayList<>(); + + /** Default constructor which is the same as {@link #PERFECT_90DEG} */ + public SimCameraProperties() { + setCalibration(960, 720, Rotation2d.fromDegrees(90)); + } + + /** + * Reads camera properties from a photonvision config.json file. This is only the + * resolution, camera intrinsics, distortion coefficients, and average/std. dev. pixel error. + * Other camera properties must be set. + * + * @param path Path to the config.json file + * @param width The width of the desired resolution in the JSON + * @param height The height of the desired resolution in the JSON + * @throws IOException If reading the JSON fails, either from an invalid path or a missing/invalid + * calibrated resolution. + */ + public SimCameraProperties(String path, int width, int height) throws IOException { + this(Path.of(path), width, height); + } + + /** + * Reads camera properties from a photonvision config.json file. This is only the + * resolution, camera intrinsics, distortion coefficients, and average/std. dev. pixel error. + * Other camera properties must be set. + * + * @param path Path to the config.json file + * @param width The width of the desired resolution in the JSON + * @param height The height of the desired resolution in the JSON + * @throws IOException If reading the JSON fails, either from an invalid path or a missing/invalid + * calibrated resolution. + */ + public SimCameraProperties(Path path, int width, int height) throws IOException { + var mapper = new ObjectMapper(); + var json = mapper.readTree(path.toFile()); + json = json.get("calibrations"); + boolean success = false; + try { + for (int i = 0; i < json.size() && !success; i++) { + // check if this calibration entry is our desired resolution + var calib = json.get(i); + int jsonWidth = calib.get("resolution").get("width").asInt(); + int jsonHeight = calib.get("resolution").get("height").asInt(); + if (jsonWidth != width || jsonHeight != height) continue; + // get the relevant calibration values + var jsonIntrinsicsNode = calib.get("cameraIntrinsics").get("data"); + double[] jsonIntrinsics = new double[jsonIntrinsicsNode.size()]; + for (int j = 0; j < jsonIntrinsicsNode.size(); j++) { + jsonIntrinsics[j] = jsonIntrinsicsNode.get(j).asDouble(); } - // both ends are inside frustum - if (aVisible && bVisible) return new Pair<>(Double.valueOf(0), Double.valueOf(1)); - - // parametrized (t=0 at a, t=1 at b) intersections with viewplanes - double[] intersections = {Double.NaN, Double.NaN, Double.NaN, Double.NaN}; - // intersection points - List ipts = new ArrayList<>(); - for (double val : intersections) ipts.add(null); - - // find intersections - for (int i = 0; i < viewplanes.size(); i++) { - var normal = viewplanes.get(i); - - // we want to know the value of t when the line intercepts this plane - // parametrized: v = t * ab + a, where v lies on the plane - // we can find the projection of a onto the plane normal - // a_projn = normal.times(av.dot(normal) / normal.dot(normal)); - var a_projn = new DMatrix3(); - CommonOps_DDF3.scale( - CommonOps_DDF3.dot(av, normal) / CommonOps_DDF3.dot(normal, normal), normal, a_projn); - // this projection lets us determine the scalar multiple t of ab where - // (t * ab + a) is a vector which lies on the plane - if (Math.abs(CommonOps_DDF3.dot(abv, a_projn)) < 1e-5) continue; // line is parallel to plane - intersections[i] = CommonOps_DDF3.dot(a_projn, a_projn) / -CommonOps_DDF3.dot(abv, a_projn); - - // vector a to the viewplane - var apv = new DMatrix3(); - CommonOps_DDF3.scale(intersections[i], abv, apv); - // av + apv = intersection point - var intersectpt = new DMatrix3(); - CommonOps_DDF3.add(av, apv, intersectpt); - ipts.set(i, intersectpt); - - // discard intersections outside the camera frustum - for (int j = 1; j < viewplanes.size(); j++) { - int oi = (i + j) % viewplanes.size(); - var onormal = viewplanes.get(oi); - // if the dot of the intersection point with any plane normal is negative, it is outside - if (CommonOps_DDF3.dot(intersectpt, onormal) < 0) { - intersections[i] = Double.NaN; - ipts.set(i, null); - break; - } - } - - // discard duplicate intersections - if (ipts.get(i) == null) continue; - for (int j = i - 1; j >= 0; j--) { - var oipt = ipts.get(j); - if (oipt == null) continue; - var diff = new DMatrix3(); - CommonOps_DDF3.subtract(oipt, intersectpt, diff); - if (CommonOps_DDF3.elementMaxAbs(diff) < 1e-4) { - intersections[i] = Double.NaN; - ipts.set(i, null); - break; - } - } + var jsonDistortNode = calib.get("cameraExtrinsics").get("data"); + double[] jsonDistortion = new double[jsonDistortNode.size()]; + for (int j = 0; j < jsonDistortNode.size(); j++) { + jsonDistortion[j] = jsonDistortNode.get(j).asDouble(); } - - // determine visible segment (minimum and maximum t) - double inter1 = Double.NaN; - double inter2 = Double.NaN; - for (double inter : intersections) { - if (!Double.isNaN(inter)) { - if (Double.isNaN(inter1)) inter1 = inter; - else inter2 = inter; - } - } - - // two viewplane intersections - if (!Double.isNaN(inter2)) { - double max = Math.max(inter1, inter2); - double min = Math.min(inter1, inter2); - if (aVisible) min = 0; - if (bVisible) max = 1; - return new Pair<>(min, max); + var jsonViewErrors = calib.get("perViewErrors"); + double jsonAvgError = 0; + for (int j = 0; j < jsonViewErrors.size(); j++) { + jsonAvgError += jsonViewErrors.get(j).asDouble(); } - // one viewplane intersection - else if (!Double.isNaN(inter1)) { - if (aVisible) return new Pair<>(Double.valueOf(0), inter1); - if (bVisible) return new Pair<>(inter1, Double.valueOf(1)); - return new Pair<>(inter1, null); + jsonAvgError /= jsonViewErrors.size(); + double jsonErrorStdDev = calib.get("standardDeviation").asDouble(); + // assign the read JSON values to this CameraProperties + setCalibration( + jsonWidth, + jsonHeight, + Matrix.mat(Nat.N3(), Nat.N3()).fill(jsonIntrinsics), + Matrix.mat(Nat.N5(), Nat.N1()).fill(jsonDistortion)); + setCalibError(jsonAvgError, jsonErrorStdDev); + success = true; + } + } catch (Exception e) { + throw new IOException("Invalid calibration JSON"); + } + if (!success) throw new IOException("Requested resolution not found in calibration"); + } + + public void setRandomSeed(long seed) { + rand.setSeed(seed); + } + + public void setCalibration(int resWidth, int resHeight, Rotation2d fovDiag) { + if (fovDiag.getDegrees() < 1 || fovDiag.getDegrees() > 179) { + fovDiag = Rotation2d.fromDegrees(MathUtil.clamp(fovDiag.getDegrees(), 1, 179)); + DriverStation.reportError( + "Requested invalid FOV! Clamping between (1, 179) degrees...", false); + } + double resDiag = Math.sqrt(resWidth * resWidth + resHeight * resHeight); + double diagRatio = Math.tan(fovDiag.getRadians() / 2); + var fovWidth = new Rotation2d(Math.atan(diagRatio * (resWidth / resDiag)) * 2); + var fovHeight = new Rotation2d(Math.atan(diagRatio * (resHeight / resDiag)) * 2); + + // assume no distortion + var distCoeff = VecBuilder.fill(0, 0, 0, 0, 0); + + // assume centered principal point (pixels) + double cx = resWidth / 2.0 - 0.5; + double cy = resHeight / 2.0 - 0.5; + + // use given fov to determine focal point (pixels) + double fx = cx / Math.tan(fovWidth.getRadians() / 2.0); + double fy = cy / Math.tan(fovHeight.getRadians() / 2.0); + + // create camera intrinsics matrix + var camIntrinsics = Matrix.mat(Nat.N3(), Nat.N3()).fill(fx, 0, cx, 0, fy, cy, 0, 0, 1); + setCalibration(resWidth, resHeight, camIntrinsics, distCoeff); + } + + public void setCalibration( + int resWidth, int resHeight, Matrix camIntrinsics, Matrix distCoeffs) { + this.resWidth = resWidth; + this.resHeight = resHeight; + this.camIntrinsics = camIntrinsics; + this.distCoeffs = distCoeffs; + + // left, right, up, and down view planes + var p = + new Translation3d[] { + new Translation3d( + 1, + new Rotation3d(0, 0, getPixelYaw(0).plus(new Rotation2d(-Math.PI / 2)).getRadians())), + new Translation3d( + 1, + new Rotation3d( + 0, 0, getPixelYaw(resWidth).plus(new Rotation2d(Math.PI / 2)).getRadians())), + new Translation3d( + 1, + new Rotation3d( + 0, getPixelPitch(0).plus(new Rotation2d(Math.PI / 2)).getRadians(), 0)), + new Translation3d( + 1, + new Rotation3d( + 0, getPixelPitch(resHeight).plus(new Rotation2d(-Math.PI / 2)).getRadians(), 0)) + }; + viewplanes.clear(); + for (int i = 0; i < p.length; i++) { + viewplanes.add(new DMatrix3(p[i].getX(), p[i].getY(), p[i].getZ())); + } + } + + public void setCalibError(double avgErrorPx, double errorStdDevPx) { + this.avgErrorPx = avgErrorPx; + this.errorStdDevPx = errorStdDevPx; + } + + /** + * @param fps The average frames per second the camera should process at. Exposure time limits + * FPS if set! + */ + public void setFPS(double fps) { + frameSpeedMs = Math.max(1000.0 / fps, exposureTimeMs); + } + + /** + * @param exposureTimeMs The amount of time the "shutter" is open for one frame. Affects motion + * blur. Frame speed(from FPS) is limited to this! + */ + public void setExposureTimeMs(double exposureTimeMs) { + this.exposureTimeMs = exposureTimeMs; + frameSpeedMs = Math.max(frameSpeedMs, exposureTimeMs); + } + + /** + * @param avgLatencyMs The average latency (from image capture to data published) in milliseconds + * a frame should have + */ + public void setAvgLatencyMs(double avgLatencyMs) { + this.avgLatencyMs = avgLatencyMs; + } + + /** + * @param latencyStdDevMs The standard deviation in milliseconds of the latency + */ + public void setLatencyStdDevMs(double latencyStdDevMs) { + this.latencyStdDevMs = latencyStdDevMs; + } + + public int getResWidth() { + return resWidth; + } + + public int getResHeight() { + return resHeight; + } + + public int getResArea() { + return resWidth * resHeight; + } + + /** Width:height */ + public double getAspectRatio() { + return (double) resWidth / resHeight; + } + + public Matrix getIntrinsics() { + return camIntrinsics.copy(); + } + + public Vector getDistCoeffs() { + return new Vector<>(distCoeffs); + } + + public double getFPS() { + return 1000.0 / frameSpeedMs; + } + + public double getFrameSpeedMs() { + return frameSpeedMs; + } + + public double getExposureTimeMs() { + return exposureTimeMs; + } + + public double getAvgLatencyMs() { + return avgLatencyMs; + } + + public double getLatencyStdDevMs() { + return latencyStdDevMs; + } + + public SimCameraProperties copy() { + var newProp = new SimCameraProperties(); + newProp.setCalibration(resWidth, resHeight, camIntrinsics, distCoeffs); + newProp.setCalibError(avgErrorPx, errorStdDevPx); + newProp.setFPS(getFPS()); + newProp.setExposureTimeMs(exposureTimeMs); + newProp.setAvgLatencyMs(avgLatencyMs); + newProp.setLatencyStdDevMs(latencyStdDevMs); + return newProp; + } + + /** + * The percentage(0 - 100) of this camera's resolution the contour takes up in pixels of the + * image. + * + * @param points Points of the contour + */ + public double getContourAreaPercent(Point[] points) { + return Imgproc.contourArea(new MatOfPoint2f(OpenCVHelp.getConvexHull(points))) + / getResArea() + * 100; + } + + /** The yaw from the principal point of this camera to the pixel x value. Positive values left. */ + public Rotation2d getPixelYaw(double pixelX) { + double fx = camIntrinsics.get(0, 0); + // account for principal point not being centered + double cx = camIntrinsics.get(0, 2); + double xOffset = cx - pixelX; + return new Rotation2d(fx, xOffset); + } + + /** + * The pitch from the principal point of this camera to the pixel y value. Pitch is positive down. + * + *

Note that this angle is naively computed and may be incorrect. See {@link + * #getCorrectedPixelRot(Point)}. + */ + public Rotation2d getPixelPitch(double pixelY) { + double fy = camIntrinsics.get(1, 1); + // account for principal point not being centered + double cy = camIntrinsics.get(1, 2); + double yOffset = cy - pixelY; + return new Rotation2d(fy, -yOffset); + } + + /** + * Finds the yaw and pitch to the given image point. Yaw is positive left, and pitch is positive + * down. + * + *

Note that pitch is naively computed and may be incorrect. See {@link + * #getCorrectedPixelRot(Point)}. + */ + public Rotation3d getPixelRot(Point point) { + return new Rotation3d( + 0, getPixelPitch(point.y).getRadians(), getPixelYaw(point.x).getRadians()); + } + + /** + * Gives the yaw and pitch of the line intersecting the camera lens and the given pixel + * coordinates on the sensor. Yaw is positive left, and pitch positive down. + * + *

The pitch traditionally calculated from pixel offsets do not correctly account for non-zero + * values of yaw because of perspective distortion (not to be confused with lens distortion)-- for + * example, the pitch angle is naively calculated as: + * + *

pitch = arctan(pixel y offset / focal length y)
+ * + * However, using focal length as a side of the associated right triangle is not correct when the + * pixel x value is not 0, because the distance from this pixel (projected on the x-axis) to the + * camera lens increases. Projecting a line back out of the camera with these naive angles will + * not intersect the 3d point that was originally projected into this 2d pixel. Instead, this + * length should be: + * + *
focal length y ⟶ (focal length y / cos(arctan(pixel x offset / focal length x)))
+ * + * @return Rotation3d with yaw and pitch of the line projected out of the camera from the given + * pixel (roll is zero). + */ + public Rotation3d getCorrectedPixelRot(Point point) { + double fx = camIntrinsics.get(0, 0); + double cx = camIntrinsics.get(0, 2); + double xOffset = cx - point.x; + + double fy = camIntrinsics.get(1, 1); + double cy = camIntrinsics.get(1, 2); + double yOffset = cy - point.y; + + // calculate yaw normally + var yaw = new Rotation2d(fx, xOffset); + // correct pitch based on yaw + var pitch = new Rotation2d(fy / Math.cos(Math.atan(xOffset / fx)), -yOffset); + + return new Rotation3d(0, pitch.getRadians(), yaw.getRadians()); + } + + public Rotation2d getHorizFOV() { + // sum of FOV left and right principal point + var left = getPixelYaw(0); + var right = getPixelYaw(resWidth); + return left.minus(right); + } + + public Rotation2d getVertFOV() { + // sum of FOV above and below principal point + var above = getPixelPitch(0); + var below = getPixelPitch(resHeight); + return below.minus(above); + } + + public Rotation2d getDiagFOV() { + return new Rotation2d(Math.hypot(getHorizFOV().getRadians(), getVertFOV().getRadians())); + } + + /** + * Determines where the line segment defined by the two given translations intersects the camera's + * frustum/field-of-vision, if at all. + * + *

The line is parametrized so any of its points p = t * (b - a) + a. This method + * returns these values of t, minimum first, defining the region of the line segment which is + * visible in the frustum. If both ends of the line segment are visible, this simply returns {0, + * 1}. If, for example, point b is visible while a is not, and half of the line segment is inside + * the camera frustum, {0.5, 1} would be returned. + * + * @param camRt The change in basis from world coordinates to camera coordinates. See {@link + * RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * @param a The initial translation of the line + * @param b The final translation of the line + * @return A Pair of Doubles. The values may be null: + *

    + *
  • {Double, Double} : Two parametrized values(t), minimum first, representing which + * segment of the line is visible in the camera frustum. + *
  • {Double, null} : One value(t) representing a single intersection point. For example, + * the line only intersects the intersection of two adjacent viewplanes. + *
  • {null, null} : No values. The line segment is not visible in the camera frustum. + *
+ */ + public Pair getVisibleLine( + RotTrlTransform3d camRt, Translation3d a, Translation3d b) { + // translations relative to the camera + var rela = camRt.apply(a); + var relb = camRt.apply(b); + + // check if both ends are behind camera + if (rela.getX() <= 0 && relb.getX() <= 0) return new Pair<>(null, null); + + var av = new DMatrix3(rela.getX(), rela.getY(), rela.getZ()); + var bv = new DMatrix3(relb.getX(), relb.getY(), relb.getZ()); + // a to b + var abv = new DMatrix3(); + CommonOps_DDF3.subtract(bv, av, abv); + + // check if the ends of the line segment are visible + boolean aVisible = true; + boolean bVisible = true; + for (int i = 0; i < viewplanes.size(); i++) { + var normal = viewplanes.get(i); + double aVisibility = CommonOps_DDF3.dot(av, normal); + if (aVisibility < 0) aVisible = false; + double bVisibility = CommonOps_DDF3.dot(bv, normal); + if (bVisibility < 0) bVisible = false; + // both ends are outside at least one of the same viewplane + if (aVisibility <= 0 && bVisibility <= 0) return new Pair<>(null, null); + } + // both ends are inside frustum + if (aVisible && bVisible) return new Pair<>(Double.valueOf(0), Double.valueOf(1)); + + // parametrized (t=0 at a, t=1 at b) intersections with viewplanes + double[] intersections = {Double.NaN, Double.NaN, Double.NaN, Double.NaN}; + // intersection points + List ipts = new ArrayList<>(); + for (double val : intersections) ipts.add(null); + + // find intersections + for (int i = 0; i < viewplanes.size(); i++) { + var normal = viewplanes.get(i); + + // we want to know the value of t when the line intercepts this plane + // parametrized: v = t * ab + a, where v lies on the plane + // we can find the projection of a onto the plane normal + // a_projn = normal.times(av.dot(normal) / normal.dot(normal)); + var a_projn = new DMatrix3(); + CommonOps_DDF3.scale( + CommonOps_DDF3.dot(av, normal) / CommonOps_DDF3.dot(normal, normal), normal, a_projn); + // this projection lets us determine the scalar multiple t of ab where + // (t * ab + a) is a vector which lies on the plane + if (Math.abs(CommonOps_DDF3.dot(abv, a_projn)) < 1e-5) continue; // line is parallel to plane + intersections[i] = CommonOps_DDF3.dot(a_projn, a_projn) / -CommonOps_DDF3.dot(abv, a_projn); + + // vector a to the viewplane + var apv = new DMatrix3(); + CommonOps_DDF3.scale(intersections[i], abv, apv); + // av + apv = intersection point + var intersectpt = new DMatrix3(); + CommonOps_DDF3.add(av, apv, intersectpt); + ipts.set(i, intersectpt); + + // discard intersections outside the camera frustum + for (int j = 1; j < viewplanes.size(); j++) { + int oi = (i + j) % viewplanes.size(); + var onormal = viewplanes.get(oi); + // if the dot of the intersection point with any plane normal is negative, it is outside + if (CommonOps_DDF3.dot(intersectpt, onormal) < 0) { + intersections[i] = Double.NaN; + ipts.set(i, null); + break; } - // no intersections - else return new Pair<>(null, null); - } - - /** Returns these points after applying this camera's estimated noise. */ - public Point[] estPixelNoise(Point[] points) { - if (avgErrorPx == 0 && errorStdDevPx == 0) return points; - - Point[] noisyPts = new Point[points.length]; - for (int i = 0; i < points.length; i++) { - var p = points[i]; - // error pixels in random direction - double error = avgErrorPx + rand.nextGaussian() * errorStdDevPx; - double errorAngle = rand.nextDouble() * 2 * Math.PI - Math.PI; - noisyPts[i] = - new Point(p.x + error * Math.cos(errorAngle), p.y + error * Math.sin(errorAngle)); + } + + // discard duplicate intersections + if (ipts.get(i) == null) continue; + for (int j = i - 1; j >= 0; j--) { + var oipt = ipts.get(j); + if (oipt == null) continue; + var diff = new DMatrix3(); + CommonOps_DDF3.subtract(oipt, intersectpt, diff); + if (CommonOps_DDF3.elementMaxAbs(diff) < 1e-4) { + intersections[i] = Double.NaN; + ipts.set(i, null); + break; } - return noisyPts; - } - - /** - * @return Noisy estimation of a frame's processing latency in milliseconds - */ - public double estLatencyMs() { - return Math.max(avgLatencyMs + rand.nextGaussian() * latencyStdDevMs, 0); - } - - /** - * @return Estimate how long until the next frame should be processed in milliseconds - */ - public double estMsUntilNextFrame() { - // exceptional processing latency blocks the next frame - return frameSpeedMs + Math.max(0, estLatencyMs() - frameSpeedMs); - } - - // pre-calibrated example cameras - - /** 960x720 resolution, 90 degree FOV, "perfect" lagless camera */ - public static SimCameraProperties PERFECT_90DEG() { - return new SimCameraProperties(); - } - - public static SimCameraProperties PI4_LIFECAM_320_240() { - var prop = new SimCameraProperties(); - prop.setCalibration( - 320, - 240, - Matrix.mat(Nat.N3(), Nat.N3()) - .fill( // intrinsic - 328.2733242048587, - 0.0, - 164.8190261141906, - 0.0, - 318.0609794305216, - 123.8633838438093, - 0.0, - 0.0, - 1.0), - VecBuilder.fill( // distort - 0.09957946553445934, - -0.9166265114485799, - 0.0019519890627236526, - -0.0036071725380870333, - 1.5627234622420942)); - prop.setCalibError(0.21, 0.0124); - prop.setFPS(30); - prop.setAvgLatencyMs(30); - prop.setLatencyStdDevMs(10); - return prop; - } - - public static SimCameraProperties PI4_LIFECAM_640_480() { - var prop = new SimCameraProperties(); - prop.setCalibration( - 640, - 480, - Matrix.mat(Nat.N3(), Nat.N3()) - .fill( // intrinsic - 669.1428078983059, - 0.0, - 322.53377249329213, - 0.0, - 646.9843137061716, - 241.26567383784163, - 0.0, - 0.0, - 1.0), - VecBuilder.fill( // distort - 0.12788470750464645, - -1.2350335805796528, - 0.0024990767286192732, - -0.0026958287600230705, - 2.2951386729115537)); - prop.setCalibError(0.26, 0.046); - prop.setFPS(15); - prop.setAvgLatencyMs(65); - prop.setLatencyStdDevMs(15); - return prop; - } - - public static SimCameraProperties LL2_640_480() { - var prop = new SimCameraProperties(); - prop.setCalibration( - 640, - 480, - Matrix.mat(Nat.N3(), Nat.N3()) - .fill( // intrinsic - 511.22843367007755, - 0.0, - 323.62049380211096, - 0.0, - 514.5452336723849, - 261.8827920543568, - 0.0, - 0.0, - 1.0), - VecBuilder.fill( // distort - 0.1917469998873756, - -0.5142936883324216, - 0.012461562046896614, - 0.0014084973492408186, - 0.35160648971214437)); - prop.setCalibError(0.25, 0.05); - prop.setFPS(15); - prop.setAvgLatencyMs(35); - prop.setLatencyStdDevMs(8); - return prop; - } - - public static SimCameraProperties LL2_960_720() { - var prop = new SimCameraProperties(); - prop.setCalibration( - 960, - 720, - Matrix.mat(Nat.N3(), Nat.N3()) - .fill( // intrinsic - 769.6873145148892, - 0.0, - 486.1096609458122, - 0.0, - 773.8164483705323, - 384.66071662358354, - 0.0, - 0.0, - 1.0), - VecBuilder.fill( // distort - 0.189462064814501, - -0.49903003669627627, - 0.007468423590519429, - 0.002496885298683693, - 0.3443122090208624)); - prop.setCalibError(0.35, 0.10); - prop.setFPS(10); - prop.setAvgLatencyMs(50); - prop.setLatencyStdDevMs(15); - return prop; - } - - public static SimCameraProperties LL2_1280_720() { - var prop = new SimCameraProperties(); - prop.setCalibration( - 1280, - 720, - Matrix.mat(Nat.N3(), Nat.N3()) - .fill( // intrinsic - 1011.3749416937393, - 0.0, - 645.4955139388737, - 0.0, - 1008.5391755084075, - 508.32877656020196, - 0.0, - 0.0, - 1.0), - VecBuilder.fill( // distort - 0.13730101577061535, - -0.2904345656989261, - 8.32475714507539E-4, - -3.694397782014239E-4, - 0.09487962227027584)); - prop.setCalibError(0.37, 0.06); - prop.setFPS(7); - prop.setAvgLatencyMs(60); - prop.setLatencyStdDevMs(20); - return prop; - } + } + } + + // determine visible segment (minimum and maximum t) + double inter1 = Double.NaN; + double inter2 = Double.NaN; + for (double inter : intersections) { + if (!Double.isNaN(inter)) { + if (Double.isNaN(inter1)) inter1 = inter; + else inter2 = inter; + } + } + + // two viewplane intersections + if (!Double.isNaN(inter2)) { + double max = Math.max(inter1, inter2); + double min = Math.min(inter1, inter2); + if (aVisible) min = 0; + if (bVisible) max = 1; + return new Pair<>(min, max); + } + // one viewplane intersection + else if (!Double.isNaN(inter1)) { + if (aVisible) return new Pair<>(Double.valueOf(0), inter1); + if (bVisible) return new Pair<>(inter1, Double.valueOf(1)); + return new Pair<>(inter1, null); + } + // no intersections + else return new Pair<>(null, null); + } + + /** Returns these points after applying this camera's estimated noise. */ + public Point[] estPixelNoise(Point[] points) { + if (avgErrorPx == 0 && errorStdDevPx == 0) return points; + + Point[] noisyPts = new Point[points.length]; + for (int i = 0; i < points.length; i++) { + var p = points[i]; + // error pixels in random direction + double error = avgErrorPx + rand.nextGaussian() * errorStdDevPx; + double errorAngle = rand.nextDouble() * 2 * Math.PI - Math.PI; + noisyPts[i] = + new Point(p.x + error * Math.cos(errorAngle), p.y + error * Math.sin(errorAngle)); + } + return noisyPts; + } + + /** + * @return Noisy estimation of a frame's processing latency in milliseconds + */ + public double estLatencyMs() { + return Math.max(avgLatencyMs + rand.nextGaussian() * latencyStdDevMs, 0); + } + + /** + * @return Estimate how long until the next frame should be processed in milliseconds + */ + public double estMsUntilNextFrame() { + // exceptional processing latency blocks the next frame + return frameSpeedMs + Math.max(0, estLatencyMs() - frameSpeedMs); + } + + // pre-calibrated example cameras + + /** 960x720 resolution, 90 degree FOV, "perfect" lagless camera */ + public static SimCameraProperties PERFECT_90DEG() { + return new SimCameraProperties(); + } + + public static SimCameraProperties PI4_LIFECAM_320_240() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 320, + 240, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 328.2733242048587, + 0.0, + 164.8190261141906, + 0.0, + 318.0609794305216, + 123.8633838438093, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.09957946553445934, + -0.9166265114485799, + 0.0019519890627236526, + -0.0036071725380870333, + 1.5627234622420942)); + prop.setCalibError(0.21, 0.0124); + prop.setFPS(30); + prop.setAvgLatencyMs(30); + prop.setLatencyStdDevMs(10); + return prop; + } + + public static SimCameraProperties PI4_LIFECAM_640_480() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 640, + 480, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 669.1428078983059, + 0.0, + 322.53377249329213, + 0.0, + 646.9843137061716, + 241.26567383784163, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.12788470750464645, + -1.2350335805796528, + 0.0024990767286192732, + -0.0026958287600230705, + 2.2951386729115537)); + prop.setCalibError(0.26, 0.046); + prop.setFPS(15); + prop.setAvgLatencyMs(65); + prop.setLatencyStdDevMs(15); + return prop; + } + + public static SimCameraProperties LL2_640_480() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 640, + 480, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 511.22843367007755, + 0.0, + 323.62049380211096, + 0.0, + 514.5452336723849, + 261.8827920543568, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.1917469998873756, + -0.5142936883324216, + 0.012461562046896614, + 0.0014084973492408186, + 0.35160648971214437)); + prop.setCalibError(0.25, 0.05); + prop.setFPS(15); + prop.setAvgLatencyMs(35); + prop.setLatencyStdDevMs(8); + return prop; + } + + public static SimCameraProperties LL2_960_720() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 960, + 720, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 769.6873145148892, + 0.0, + 486.1096609458122, + 0.0, + 773.8164483705323, + 384.66071662358354, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.189462064814501, + -0.49903003669627627, + 0.007468423590519429, + 0.002496885298683693, + 0.3443122090208624)); + prop.setCalibError(0.35, 0.10); + prop.setFPS(10); + prop.setAvgLatencyMs(50); + prop.setLatencyStdDevMs(15); + return prop; + } + + public static SimCameraProperties LL2_1280_720() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 1280, + 720, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 1011.3749416937393, + 0.0, + 645.4955139388737, + 0.0, + 1008.5391755084075, + 508.32877656020196, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.13730101577061535, + -0.2904345656989261, + 8.32475714507539E-4, + -3.694397782014239E-4, + 0.09487962227027584)); + prop.setCalibError(0.37, 0.06); + prop.setFPS(7); + prop.setAvgLatencyMs(60); + prop.setLatencyStdDevMs(20); + return prop; + } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimPhotonCamera.java b/photon-lib/src/main/java/org/photonvision/simulation/SimPhotonCamera.java index 58e3e2fcf7..f9936dae65 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimPhotonCamera.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimPhotonCamera.java @@ -45,137 +45,137 @@ @Deprecated @SuppressWarnings("unused") public class SimPhotonCamera { - NTTopicSet ts = new NTTopicSet(); - PhotonPipelineResult latestResult; - private long heartbeatCounter = 0; - - /** - * Constructs a Simulated PhotonCamera from a root table. - * - * @param instance The NetworkTableInstance to pull data from. This can be a custom instance in - * simulation, but should *usually* be the default NTInstance from - * NetworkTableInstance::getDefault - * @param cameraName The name of the camera, as seen in the UI. - */ - public SimPhotonCamera(NetworkTableInstance instance, String cameraName) { - ts.removeEntries(); - ts.subTable = instance.getTable(PhotonCamera.kTableName).getSubTable(cameraName); - ts.updateEntries(); - } - - /** - * Publishes the camera intrinsics matrix. The matrix should be in the form: spotless:off + NTTopicSet ts = new NTTopicSet(); + PhotonPipelineResult latestResult; + private long heartbeatCounter = 0; + + /** + * Constructs a Simulated PhotonCamera from a root table. + * + * @param instance The NetworkTableInstance to pull data from. This can be a custom instance in + * simulation, but should *usually* be the default NTInstance from + * NetworkTableInstance::getDefault + * @param cameraName The name of the camera, as seen in the UI. + */ + public SimPhotonCamera(NetworkTableInstance instance, String cameraName) { + ts.removeEntries(); + ts.subTable = instance.getTable(PhotonCamera.kTableName).getSubTable(cameraName); + ts.updateEntries(); + } + + /** + * Publishes the camera intrinsics matrix. The matrix should be in the form: spotless:off * fx 0 cx * 0 fy cy * 0 0 1 * @param cameraMatrix The cam matrix * spotless:on - */ - public void setCameraIntrinsicsMat(Matrix cameraMatrix) { - ts.cameraIntrinsicsPublisher.set(cameraMatrix.getData()); - } - - /** - * Publishes the camera distortion matrix. The matrix should be in the form [k1 k2 p1 p2 k3]. See - * more: https://docs.opencv.org/3.4/d4/d94/tutorial_camera_calibration.html - * - * @param distortionMat The distortion mat - */ - public void setCameraDistortionMat(Matrix distortionMat) { - ts.cameraDistortionPublisher.set(distortionMat.getData()); + */ + public void setCameraIntrinsicsMat(Matrix cameraMatrix) { + ts.cameraIntrinsicsPublisher.set(cameraMatrix.getData()); + } + + /** + * Publishes the camera distortion matrix. The matrix should be in the form [k1 k2 p1 p2 k3]. See + * more: https://docs.opencv.org/3.4/d4/d94/tutorial_camera_calibration.html + * + * @param distortionMat The distortion mat + */ + public void setCameraDistortionMat(Matrix distortionMat) { + ts.cameraDistortionPublisher.set(distortionMat.getData()); + } + + /** + * Constructs a Simulated PhotonCamera from the name of the camera. + * + * @param cameraName The nickname of the camera (found in the PhotonVision UI). + */ + public SimPhotonCamera(String cameraName) { + this(NetworkTableInstance.getDefault(), cameraName); + } + + /** + * Simulate one processed frame of vision data, putting one result to NT. + * + * @param latencyMillis Latency of the provided frame + * @param targets Each target detected + */ + public void submitProcessedFrame(double latencyMillis, PhotonTrackedTarget... targets) { + submitProcessedFrame(latencyMillis, Arrays.asList(targets)); + } + + /** + * Simulate one processed frame of vision data, putting one result to NT. + * + * @param latencyMillis Latency of the provided frame + * @param sortMode Order in which to sort targets + * @param targets Each target detected + */ + public void submitProcessedFrame( + double latencyMillis, PhotonTargetSortMode sortMode, PhotonTrackedTarget... targets) { + submitProcessedFrame(latencyMillis, sortMode, Arrays.asList(targets)); + } + + /** + * Simulate one processed frame of vision data, putting one result to NT. + * + * @param latencyMillis Latency of the provided frame + * @param targetList List of targets detected + */ + public void submitProcessedFrame(double latencyMillis, List targetList) { + submitProcessedFrame(latencyMillis, null, targetList); + } + + /** + * Simulate one processed frame of vision data, putting one result to NT. + * + * @param latencyMillis Latency of the provided frame + * @param sortMode Order in which to sort targets + * @param targetList List of targets detected + */ + public void submitProcessedFrame( + double latencyMillis, PhotonTargetSortMode sortMode, List targetList) { + ts.latencyMillisEntry.set(latencyMillis); + + if (sortMode != null) { + targetList.sort(sortMode.getComparator()); } - /** - * Constructs a Simulated PhotonCamera from the name of the camera. - * - * @param cameraName The nickname of the camera (found in the PhotonVision UI). - */ - public SimPhotonCamera(String cameraName) { - this(NetworkTableInstance.getDefault(), cameraName); + PhotonPipelineResult newResult = + new PhotonPipelineResult(latencyMillis, targetList, new MultiTargetPNPResults()); + var newPacket = new Packet(newResult.getPacketSize()); + newResult.populatePacket(newPacket); + ts.rawBytesEntry.set(newPacket.getData()); + + boolean hasTargets = newResult.hasTargets(); + ts.hasTargetEntry.set(hasTargets); + if (!hasTargets) { + ts.targetPitchEntry.set(0.0); + ts.targetYawEntry.set(0.0); + ts.targetAreaEntry.set(0.0); + ts.targetPoseEntry.set(new double[] {0.0, 0.0, 0.0}); + ts.targetSkewEntry.set(0.0); + } else { + var bestTarget = newResult.getBestTarget(); + + ts.targetPitchEntry.set(bestTarget.getPitch()); + ts.targetYawEntry.set(bestTarget.getYaw()); + ts.targetAreaEntry.set(bestTarget.getArea()); + ts.targetSkewEntry.set(bestTarget.getSkew()); + + var transform = bestTarget.getBestCameraToTarget(); + double[] poseData = { + transform.getX(), transform.getY(), transform.getRotation().toRotation2d().getDegrees() + }; + ts.targetPoseEntry.set(poseData); } - /** - * Simulate one processed frame of vision data, putting one result to NT. - * - * @param latencyMillis Latency of the provided frame - * @param targets Each target detected - */ - public void submitProcessedFrame(double latencyMillis, PhotonTrackedTarget... targets) { - submitProcessedFrame(latencyMillis, Arrays.asList(targets)); - } + ts.heartbeatPublisher.set(heartbeatCounter++); - /** - * Simulate one processed frame of vision data, putting one result to NT. - * - * @param latencyMillis Latency of the provided frame - * @param sortMode Order in which to sort targets - * @param targets Each target detected - */ - public void submitProcessedFrame( - double latencyMillis, PhotonTargetSortMode sortMode, PhotonTrackedTarget... targets) { - submitProcessedFrame(latencyMillis, sortMode, Arrays.asList(targets)); - } + latestResult = newResult; + } - /** - * Simulate one processed frame of vision data, putting one result to NT. - * - * @param latencyMillis Latency of the provided frame - * @param targetList List of targets detected - */ - public void submitProcessedFrame(double latencyMillis, List targetList) { - submitProcessedFrame(latencyMillis, null, targetList); - } - - /** - * Simulate one processed frame of vision data, putting one result to NT. - * - * @param latencyMillis Latency of the provided frame - * @param sortMode Order in which to sort targets - * @param targetList List of targets detected - */ - public void submitProcessedFrame( - double latencyMillis, PhotonTargetSortMode sortMode, List targetList) { - ts.latencyMillisEntry.set(latencyMillis); - - if (sortMode != null) { - targetList.sort(sortMode.getComparator()); - } - - PhotonPipelineResult newResult = - new PhotonPipelineResult(latencyMillis, targetList, new MultiTargetPNPResults()); - var newPacket = new Packet(newResult.getPacketSize()); - newResult.populatePacket(newPacket); - ts.rawBytesEntry.set(newPacket.getData()); - - boolean hasTargets = newResult.hasTargets(); - ts.hasTargetEntry.set(hasTargets); - if (!hasTargets) { - ts.targetPitchEntry.set(0.0); - ts.targetYawEntry.set(0.0); - ts.targetAreaEntry.set(0.0); - ts.targetPoseEntry.set(new double[] {0.0, 0.0, 0.0}); - ts.targetSkewEntry.set(0.0); - } else { - var bestTarget = newResult.getBestTarget(); - - ts.targetPitchEntry.set(bestTarget.getPitch()); - ts.targetYawEntry.set(bestTarget.getYaw()); - ts.targetAreaEntry.set(bestTarget.getArea()); - ts.targetSkewEntry.set(bestTarget.getSkew()); - - var transform = bestTarget.getBestCameraToTarget(); - double[] poseData = { - transform.getX(), transform.getY(), transform.getRotation().toRotation2d().getDegrees() - }; - ts.targetPoseEntry.set(poseData); - } - - ts.heartbeatPublisher.set(heartbeatCounter++); - - latestResult = newResult; - } - - PhotonPipelineResult getLatestResult() { - return latestResult; - } + PhotonPipelineResult getLatestResult() { + return latestResult; + } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimVisionSystem.java b/photon-lib/src/main/java/org/photonvision/simulation/SimVisionSystem.java index 52756f4727..c7fdf445b6 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimVisionSystem.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimVisionSystem.java @@ -46,228 +46,228 @@ */ @Deprecated public class SimVisionSystem { - SimPhotonCamera cam; - - double camHorizFOVDegrees; - double camVertFOVDegrees; - double cameraHeightOffGroundMeters; - double maxLEDRangeMeters; - int cameraResWidth; - int cameraResHeight; - double minTargetArea; - Transform3d robotToCamera; - - Field2d dbgField; - FieldObject2d dbgRobot; - FieldObject2d dbgCamera; - - ArrayList tgtList; - - /** - * Create a simulated vision system involving a camera and coprocessor mounted on a mobile robot - * running PhotonVision, detecting one or more targets scattered around the field. This assumes a - * fairly simple and distortion-less pinhole camera model. - * - * @param camName Name of the PhotonVision camera to create. Align it with the settings you use in - * the PhotonVision GUI. - * @param camDiagFOVDegrees Diagonal Field of View of the camera used. Align it with the - * manufacturer specifications, and/or whatever is configured in the PhotonVision Setting - * page. - * @param robotToCamera Transform to move from the center of the robot to the camera's mount - * position - * @param maxLEDRangeMeters Maximum distance at which your camera can illuminate the target and - * make it visible. Set to 9000 or more if your vision system does not rely on LED's. - * @param cameraResWidth Width of your camera's image sensor in pixels - * @param cameraResHeight Height of your camera's image sensor in pixels - * @param minTargetArea Minimum area that that the target should be before it's recognized as a - * target by the camera. Match this with your contour filtering settings in the PhotonVision - * GUI. - */ - public SimVisionSystem( - String camName, - double camDiagFOVDegrees, - Transform3d robotToCamera, - double maxLEDRangeMeters, - int cameraResWidth, - int cameraResHeight, - double minTargetArea) { - this.robotToCamera = robotToCamera; - this.maxLEDRangeMeters = maxLEDRangeMeters; - this.cameraResWidth = cameraResWidth; - this.cameraResHeight = cameraResHeight; - this.minTargetArea = minTargetArea; - - // Calculate horizontal/vertical FOV by similar triangles - double hypotPixels = Math.hypot(cameraResWidth, cameraResHeight); - this.camHorizFOVDegrees = camDiagFOVDegrees * cameraResWidth / hypotPixels; - this.camVertFOVDegrees = camDiagFOVDegrees * cameraResHeight / hypotPixels; - - cam = new SimPhotonCamera(camName); - tgtList = new ArrayList<>(); - - dbgField = new Field2d(); - dbgRobot = dbgField.getRobotObject(); - dbgCamera = dbgField.getObject(camName + " Camera"); - SmartDashboard.putData(camName + " Sim Field", dbgField); - } - - /** - * Add a target on the field which your vision system is designed to detect. The PhotonCamera from - * this system will report the location of the robot relative to the subset of these targets which - * are visible from the given robot position. - * - * @param target Target to add to the simulated field - */ - public void addSimVisionTarget(SimVisionTarget target) { - tgtList.add(target); - dbgField - .getObject("Target " + Integer.toString(target.targetID)) - .setPose(target.targetPose.toPose2d()); - } - - /** - * Adds all apriltags from the provided {@link AprilTagFieldLayout} as sim vision targets. The - * poses added will preserve the tag layout's alliance origin at the time of calling this method. - * - * @param tagLayout The field tag layout to get Apriltag poses and IDs from - */ - public void addVisionTargets(AprilTagFieldLayout tagLayout) { - for (AprilTag tag : tagLayout.getTags()) { - addSimVisionTarget( - new SimVisionTarget( - tagLayout.getTagPose(tag.ID).get(), // preserve alliance rotation - Units.inchesToMeters(6), - Units.inchesToMeters(6), - tag.ID)); - } - } - - /** - * Clears all sim vision targets. This is useful for switching alliances and needing to repopulate - * the sim targets. NOTE: Old targets will still show on the Field2d unless overwritten by new - * targets with the same ID - */ - public void clearVisionTargets() { - tgtList.clear(); - } - - /** - * Adjust the camera position relative to the robot. Use this if your camera is on a gimbal or - * turret or some other mobile platform. - * - * @param newRobotToCamera New Transform from the robot to the camera - */ - public void moveCamera(Transform3d newRobotToCamera) { - this.robotToCamera = newRobotToCamera; - } - - /** - * Periodic update. Call this once per frame of image data you wish to process and send to - * NetworkTables - * - * @param robotPoseMeters current pose of the robot on the field. Will be used to calculate which - * targets are actually in view, where they are at relative to the robot, and relevant - * PhotonVision parameters. - */ - public void processFrame(Pose2d robotPoseMeters) { - processFrame(new Pose3d(robotPoseMeters)); - } - - /** - * Periodic update. Call this once per frame of image data you wish to process and send to - * NetworkTables - * - * @param robotPoseMeters current pose of the robot in space. Will be used to calculate which - * targets are actually in view, where they are at relative to the robot, and relevant - * PhotonVision parameters. - */ - public void processFrame(Pose3d robotPoseMeters) { - Pose3d cameraPose = robotPoseMeters.transformBy(robotToCamera); - - dbgRobot.setPose(robotPoseMeters.toPose2d()); - dbgCamera.setPose(cameraPose.toPose2d()); - - ArrayList visibleTgtList = new ArrayList<>(tgtList.size()); - - tgtList.forEach( - (tgt) -> { - var camToTargetTrans = new Transform3d(cameraPose, tgt.targetPose); - - // Generate a transformation from camera to target, - // ignoring rotation. - var t = camToTargetTrans.getTranslation(); - - // Rough approximation of the alternate solution, which is (so far) always incorrect. - var altTrans = - new Translation3d( - t.getX(), - -1.0 * t.getY(), - t.getZ()); // mirrored across camera axis in Y direction - var altRot = camToTargetTrans.getRotation().times(-1.0); // flipped - var camToTargetTransAlt = new Transform3d(altTrans, altRot); - - double distMeters = t.getNorm(); - - double area_px = tgt.tgtAreaMeters2 / getM2PerPx(distMeters); - - var translationAlongGround = - new Translation2d( - tgt.targetPose.toPose2d().getX() - cameraPose.toPose2d().getX(), - tgt.targetPose.toPose2d().getY() - cameraPose.toPose2d().getY()); - - var camAngle = cameraPose.getRotation().toRotation2d(); - var camToTgtRotation = - new Rotation2d(translationAlongGround.getX(), translationAlongGround.getY()); - double yawDegrees = camToTgtRotation.minus(camAngle).getDegrees(); - - double camHeightAboveGround = cameraPose.getZ(); - double tgtHeightAboveGround = tgt.targetPose.getZ(); - double camPitchDegrees = Units.radiansToDegrees(cameraPose.getRotation().getY()); - - double distAlongGround = translationAlongGround.getNorm(); - - double pitchDegrees = - Units.radiansToDegrees( - Math.atan2((tgtHeightAboveGround - camHeightAboveGround), distAlongGround)) - - camPitchDegrees; - - if (camCanSeeTarget(distMeters, yawDegrees, pitchDegrees, area_px)) { - // TODO simulate target corners - visibleTgtList.add( - new PhotonTrackedTarget( - yawDegrees, - pitchDegrees, - area_px, - 0.0, - tgt.targetID, - camToTargetTrans, - camToTargetTransAlt, - 0.0, // TODO - simulate ambiguity when straight on? - List.of( - new TargetCorner(0, 0), new TargetCorner(0, 0), - new TargetCorner(0, 0), new TargetCorner(0, 0)), - List.of( - new TargetCorner(0, 0), new TargetCorner(0, 0), - new TargetCorner(0, 0), new TargetCorner(0, 0)))); - } - }); - - cam.submitProcessedFrame(0.0, visibleTgtList); - } - - double getM2PerPx(double dist) { - double widthMPerPx = - 2 * dist * Math.tan(Units.degreesToRadians(this.camHorizFOVDegrees) / 2) / cameraResWidth; - double heightMPerPx = - 2 * dist * Math.tan(Units.degreesToRadians(this.camVertFOVDegrees) / 2) / cameraResHeight; - return widthMPerPx * heightMPerPx; - } - - boolean camCanSeeTarget(double distMeters, double yaw, double pitch, double area) { - boolean inRange = (distMeters < this.maxLEDRangeMeters); - boolean inHorizAngle = Math.abs(yaw) < (this.camHorizFOVDegrees / 2); - boolean inVertAngle = Math.abs(pitch) < (this.camVertFOVDegrees / 2); - boolean targetBigEnough = area > this.minTargetArea; - return (inRange && inHorizAngle && inVertAngle && targetBigEnough); + SimPhotonCamera cam; + + double camHorizFOVDegrees; + double camVertFOVDegrees; + double cameraHeightOffGroundMeters; + double maxLEDRangeMeters; + int cameraResWidth; + int cameraResHeight; + double minTargetArea; + Transform3d robotToCamera; + + Field2d dbgField; + FieldObject2d dbgRobot; + FieldObject2d dbgCamera; + + ArrayList tgtList; + + /** + * Create a simulated vision system involving a camera and coprocessor mounted on a mobile robot + * running PhotonVision, detecting one or more targets scattered around the field. This assumes a + * fairly simple and distortion-less pinhole camera model. + * + * @param camName Name of the PhotonVision camera to create. Align it with the settings you use in + * the PhotonVision GUI. + * @param camDiagFOVDegrees Diagonal Field of View of the camera used. Align it with the + * manufacturer specifications, and/or whatever is configured in the PhotonVision Setting + * page. + * @param robotToCamera Transform to move from the center of the robot to the camera's mount + * position + * @param maxLEDRangeMeters Maximum distance at which your camera can illuminate the target and + * make it visible. Set to 9000 or more if your vision system does not rely on LED's. + * @param cameraResWidth Width of your camera's image sensor in pixels + * @param cameraResHeight Height of your camera's image sensor in pixels + * @param minTargetArea Minimum area that that the target should be before it's recognized as a + * target by the camera. Match this with your contour filtering settings in the PhotonVision + * GUI. + */ + public SimVisionSystem( + String camName, + double camDiagFOVDegrees, + Transform3d robotToCamera, + double maxLEDRangeMeters, + int cameraResWidth, + int cameraResHeight, + double minTargetArea) { + this.robotToCamera = robotToCamera; + this.maxLEDRangeMeters = maxLEDRangeMeters; + this.cameraResWidth = cameraResWidth; + this.cameraResHeight = cameraResHeight; + this.minTargetArea = minTargetArea; + + // Calculate horizontal/vertical FOV by similar triangles + double hypotPixels = Math.hypot(cameraResWidth, cameraResHeight); + this.camHorizFOVDegrees = camDiagFOVDegrees * cameraResWidth / hypotPixels; + this.camVertFOVDegrees = camDiagFOVDegrees * cameraResHeight / hypotPixels; + + cam = new SimPhotonCamera(camName); + tgtList = new ArrayList<>(); + + dbgField = new Field2d(); + dbgRobot = dbgField.getRobotObject(); + dbgCamera = dbgField.getObject(camName + " Camera"); + SmartDashboard.putData(camName + " Sim Field", dbgField); + } + + /** + * Add a target on the field which your vision system is designed to detect. The PhotonCamera from + * this system will report the location of the robot relative to the subset of these targets which + * are visible from the given robot position. + * + * @param target Target to add to the simulated field + */ + public void addSimVisionTarget(SimVisionTarget target) { + tgtList.add(target); + dbgField + .getObject("Target " + Integer.toString(target.targetID)) + .setPose(target.targetPose.toPose2d()); + } + + /** + * Adds all apriltags from the provided {@link AprilTagFieldLayout} as sim vision targets. The + * poses added will preserve the tag layout's alliance origin at the time of calling this method. + * + * @param tagLayout The field tag layout to get Apriltag poses and IDs from + */ + public void addVisionTargets(AprilTagFieldLayout tagLayout) { + for (AprilTag tag : tagLayout.getTags()) { + addSimVisionTarget( + new SimVisionTarget( + tagLayout.getTagPose(tag.ID).get(), // preserve alliance rotation + Units.inchesToMeters(6), + Units.inchesToMeters(6), + tag.ID)); } + } + + /** + * Clears all sim vision targets. This is useful for switching alliances and needing to repopulate + * the sim targets. NOTE: Old targets will still show on the Field2d unless overwritten by new + * targets with the same ID + */ + public void clearVisionTargets() { + tgtList.clear(); + } + + /** + * Adjust the camera position relative to the robot. Use this if your camera is on a gimbal or + * turret or some other mobile platform. + * + * @param newRobotToCamera New Transform from the robot to the camera + */ + public void moveCamera(Transform3d newRobotToCamera) { + this.robotToCamera = newRobotToCamera; + } + + /** + * Periodic update. Call this once per frame of image data you wish to process and send to + * NetworkTables + * + * @param robotPoseMeters current pose of the robot on the field. Will be used to calculate which + * targets are actually in view, where they are at relative to the robot, and relevant + * PhotonVision parameters. + */ + public void processFrame(Pose2d robotPoseMeters) { + processFrame(new Pose3d(robotPoseMeters)); + } + + /** + * Periodic update. Call this once per frame of image data you wish to process and send to + * NetworkTables + * + * @param robotPoseMeters current pose of the robot in space. Will be used to calculate which + * targets are actually in view, where they are at relative to the robot, and relevant + * PhotonVision parameters. + */ + public void processFrame(Pose3d robotPoseMeters) { + Pose3d cameraPose = robotPoseMeters.transformBy(robotToCamera); + + dbgRobot.setPose(robotPoseMeters.toPose2d()); + dbgCamera.setPose(cameraPose.toPose2d()); + + ArrayList visibleTgtList = new ArrayList<>(tgtList.size()); + + tgtList.forEach( + (tgt) -> { + var camToTargetTrans = new Transform3d(cameraPose, tgt.targetPose); + + // Generate a transformation from camera to target, + // ignoring rotation. + var t = camToTargetTrans.getTranslation(); + + // Rough approximation of the alternate solution, which is (so far) always incorrect. + var altTrans = + new Translation3d( + t.getX(), + -1.0 * t.getY(), + t.getZ()); // mirrored across camera axis in Y direction + var altRot = camToTargetTrans.getRotation().times(-1.0); // flipped + var camToTargetTransAlt = new Transform3d(altTrans, altRot); + + double distMeters = t.getNorm(); + + double area_px = tgt.tgtAreaMeters2 / getM2PerPx(distMeters); + + var translationAlongGround = + new Translation2d( + tgt.targetPose.toPose2d().getX() - cameraPose.toPose2d().getX(), + tgt.targetPose.toPose2d().getY() - cameraPose.toPose2d().getY()); + + var camAngle = cameraPose.getRotation().toRotation2d(); + var camToTgtRotation = + new Rotation2d(translationAlongGround.getX(), translationAlongGround.getY()); + double yawDegrees = camToTgtRotation.minus(camAngle).getDegrees(); + + double camHeightAboveGround = cameraPose.getZ(); + double tgtHeightAboveGround = tgt.targetPose.getZ(); + double camPitchDegrees = Units.radiansToDegrees(cameraPose.getRotation().getY()); + + double distAlongGround = translationAlongGround.getNorm(); + + double pitchDegrees = + Units.radiansToDegrees( + Math.atan2((tgtHeightAboveGround - camHeightAboveGround), distAlongGround)) + - camPitchDegrees; + + if (camCanSeeTarget(distMeters, yawDegrees, pitchDegrees, area_px)) { + // TODO simulate target corners + visibleTgtList.add( + new PhotonTrackedTarget( + yawDegrees, + pitchDegrees, + area_px, + 0.0, + tgt.targetID, + camToTargetTrans, + camToTargetTransAlt, + 0.0, // TODO - simulate ambiguity when straight on? + List.of( + new TargetCorner(0, 0), new TargetCorner(0, 0), + new TargetCorner(0, 0), new TargetCorner(0, 0)), + List.of( + new TargetCorner(0, 0), new TargetCorner(0, 0), + new TargetCorner(0, 0), new TargetCorner(0, 0)))); + } + }); + + cam.submitProcessedFrame(0.0, visibleTgtList); + } + + double getM2PerPx(double dist) { + double widthMPerPx = + 2 * dist * Math.tan(Units.degreesToRadians(this.camHorizFOVDegrees) / 2) / cameraResWidth; + double heightMPerPx = + 2 * dist * Math.tan(Units.degreesToRadians(this.camVertFOVDegrees) / 2) / cameraResHeight; + return widthMPerPx * heightMPerPx; + } + + boolean camCanSeeTarget(double distMeters, double yaw, double pitch, double area) { + boolean inRange = (distMeters < this.maxLEDRangeMeters); + boolean inHorizAngle = Math.abs(yaw) < (this.camHorizFOVDegrees / 2); + boolean inVertAngle = Math.abs(pitch) < (this.camVertFOVDegrees / 2); + boolean targetBigEnough = area > this.minTargetArea; + return (inRange && inHorizAngle && inVertAngle && targetBigEnough); + } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimVisionTarget.java b/photon-lib/src/main/java/org/photonvision/simulation/SimVisionTarget.java index 0d66154d9f..cd56c95fef 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimVisionTarget.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimVisionTarget.java @@ -31,25 +31,25 @@ */ @Deprecated public class SimVisionTarget { - Pose3d targetPose; - double targetWidthMeters; - double targetHeightMeters; - double tgtAreaMeters2; - int targetID; + Pose3d targetPose; + double targetWidthMeters; + double targetHeightMeters; + double tgtAreaMeters2; + int targetID; - /** - * Describes a vision target located somewhere on the field that your SimVisionSystem can detect. - * - * @param targetPos Pose3d of the target in field-relative coordinates - * @param targetWidthMeters Width of the outer bounding box of the target in meters. - * @param targetHeightMeters Pair Height of the outer bounding box of the target in meters. - */ - public SimVisionTarget( - Pose3d targetPos, double targetWidthMeters, double targetHeightMeters, int targetID) { - this.targetPose = targetPos; - this.targetWidthMeters = targetWidthMeters; - this.targetHeightMeters = targetHeightMeters; - this.tgtAreaMeters2 = targetWidthMeters * targetHeightMeters; - this.targetID = targetID; - } + /** + * Describes a vision target located somewhere on the field that your SimVisionSystem can detect. + * + * @param targetPos Pose3d of the target in field-relative coordinates + * @param targetWidthMeters Width of the outer bounding box of the target in meters. + * @param targetHeightMeters Pair Height of the outer bounding box of the target in meters. + */ + public SimVisionTarget( + Pose3d targetPos, double targetWidthMeters, double targetHeightMeters, int targetID) { + this.targetPose = targetPos; + this.targetWidthMeters = targetWidthMeters; + this.targetHeightMeters = targetHeightMeters; + this.tgtAreaMeters2 = targetWidthMeters * targetHeightMeters; + this.targetID = targetID; + } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java index 7a32e413a3..ed44e864e3 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java @@ -52,551 +52,551 @@ import org.photonvision.estimation.RotTrlTransform3d; public class VideoSimUtil { - public static final String kLocalTagImagesPath = "./src/main/resources/images/apriltags/"; - public static final String kResourceTagImagesPath = "/images/apriltags/"; - public static final String kTag16h5ImageName = "tag16_05_00000"; - public static final int kNumTags16h5 = 30; - - // All 16h5 tag images - private static final Map kTag16h5Images = new HashMap<>(); - // Points corresponding to marker(black square) corners of 8x8 16h5 tag images - public static final Point[] kTag16h5MarkerPts; - - // field dimensions for wireframe - private static double fieldLength = 16.54175; - private static double fieldWidth = 8.0137; - - static { - try { - var loader = - new RuntimeLoader<>( - Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); - loader.loadLibrary(); - } catch (Exception e) { - throw new RuntimeException("Failed to load native libraries!", e); - } - - // create Mats of 8x8 apriltag images - for (int i = 0; i < VideoSimUtil.kNumTags16h5; i++) { - Mat tagImage = VideoSimUtil.get16h5TagImage(i); - kTag16h5Images.put(i, tagImage); - } - - kTag16h5MarkerPts = get16h5MarkerPts(); + public static final String kLocalTagImagesPath = "./src/main/resources/images/apriltags/"; + public static final String kResourceTagImagesPath = "/images/apriltags/"; + public static final String kTag16h5ImageName = "tag16_05_00000"; + public static final int kNumTags16h5 = 30; + + // All 16h5 tag images + private static final Map kTag16h5Images = new HashMap<>(); + // Points corresponding to marker(black square) corners of 8x8 16h5 tag images + public static final Point[] kTag16h5MarkerPts; + + // field dimensions for wireframe + private static double fieldLength = 16.54175; + private static double fieldWidth = 8.0137; + + static { + try { + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + } catch (Exception e) { + throw new RuntimeException("Failed to load native libraries!", e); } - /** Updates the properties of this CvSource video stream with the given camera properties. */ - public static void updateVideoProp(CvSource video, SimCameraProperties prop) { - video.setResolution(prop.getResWidth(), prop.getResHeight()); - video.setFPS((int) prop.getFPS()); + // create Mats of 8x8 apriltag images + for (int i = 0; i < VideoSimUtil.kNumTags16h5; i++) { + Mat tagImage = VideoSimUtil.get16h5TagImage(i); + kTag16h5Images.put(i, tagImage); } - /** - * Gets the points representing the corners of this image. Because image pixels are accessed - * through a Mat, the point (0,0) actually represents the center of the top-left pixel and not the - * actual top-left corner. - * - * @param size Size of image - */ - public static Point[] getImageCorners(Size size) { - return new Point[] { - new Point(-0.5, -0.5), - new Point(size.width - 0.5, -0.5), - new Point(size.width - 0.5, size.height - 0.5), - new Point(-0.5, size.height - 0.5) - }; - } - - /** - * Gets the 8x8 (grayscale) image of a specific 16h5 AprilTag. - * - * @param id The fiducial id of the desired tag - */ - public static Mat get16h5TagImage(int id) { - String name = kTag16h5ImageName; - String idString = String.valueOf(id); - name = name.substring(0, name.length() - idString.length()) + idString; - - var resource = VideoSimUtil.class.getResource(kResourceTagImagesPath + name + ".png"); - - Mat result = new Mat(); - // reading jar file - if (resource != null && resource.getPath().startsWith("file")) { - BufferedImage buf; - try { - buf = ImageIO.read(resource); - } catch (IOException e) { - System.err.println("Couldn't read tag image!"); - return result; - } - - result = new Mat(buf.getHeight(), buf.getWidth(), CvType.CV_8UC1); - - byte[] px = new byte[1]; - for (int y = 0; y < result.height(); y++) { - for (int x = 0; x < result.width(); x++) { - px[0] = (byte) (buf.getRGB(x, y) & 0xFF); - result.put(y, x, px); - } - } - } - // local IDE tests - else result = Imgcodecs.imread(kLocalTagImagesPath + name + ".png", Imgcodecs.IMREAD_GRAYSCALE); + kTag16h5MarkerPts = get16h5MarkerPts(); + } + + /** Updates the properties of this CvSource video stream with the given camera properties. */ + public static void updateVideoProp(CvSource video, SimCameraProperties prop) { + video.setResolution(prop.getResWidth(), prop.getResHeight()); + video.setFPS((int) prop.getFPS()); + } + + /** + * Gets the points representing the corners of this image. Because image pixels are accessed + * through a Mat, the point (0,0) actually represents the center of the top-left pixel and not the + * actual top-left corner. + * + * @param size Size of image + */ + public static Point[] getImageCorners(Size size) { + return new Point[] { + new Point(-0.5, -0.5), + new Point(size.width - 0.5, -0.5), + new Point(size.width - 0.5, size.height - 0.5), + new Point(-0.5, size.height - 0.5) + }; + } + + /** + * Gets the 8x8 (grayscale) image of a specific 16h5 AprilTag. + * + * @param id The fiducial id of the desired tag + */ + public static Mat get16h5TagImage(int id) { + String name = kTag16h5ImageName; + String idString = String.valueOf(id); + name = name.substring(0, name.length() - idString.length()) + idString; + + var resource = VideoSimUtil.class.getResource(kResourceTagImagesPath + name + ".png"); + + Mat result = new Mat(); + // reading jar file + if (resource != null && resource.getPath().startsWith("file")) { + BufferedImage buf; + try { + buf = ImageIO.read(resource); + } catch (IOException e) { + System.err.println("Couldn't read tag image!"); return result; - } + } - /** Gets the points representing the marker(black square) corners. */ - public static Point[] get16h5MarkerPts() { - return get16h5MarkerPts(1); - } + result = new Mat(buf.getHeight(), buf.getWidth(), CvType.CV_8UC1); - /** - * Gets the points representing the marker(black square) corners. - * - * @param scale The scale of the tag image (8*scale x 8*scale image) - */ - public static Point[] get16h5MarkerPts(int scale) { - var roi16h5 = new Rect(new Point(1, 1), new Size(6, 6)); - roi16h5.x *= scale; - roi16h5.y *= scale; - roi16h5.width *= scale; - roi16h5.height *= scale; - var pts = getImageCorners(roi16h5.size()); - for (int i = 0; i < pts.length; i++) { - var pt = pts[i]; - pts[i] = new Point(roi16h5.tl().x + pt.x, roi16h5.tl().y + pt.y); - } - return pts; - } - - /** - * Warps the image of a specific 16h5 AprilTag onto the destination image at the given points. - * - * @param tagId The id of the specific tag to warp onto the destination image - * @param dstPoints Points(4) in destination image where the tag marker(black square) corners - * should be warped onto. - * @param antialiasing If antialiasing should be performed by automatically - * supersampling/interpolating the warped image. This should be used if better stream quality - * is desired or target detection is being done on the stream, but can hurt performance. - * @param destination The destination image to place the warped tag image onto. - */ - public static void warp16h5TagImage( - int tagId, Point[] dstPoints, boolean antialiasing, Mat destination) { - Mat tagImage = kTag16h5Images.get(tagId); - if (tagImage == null || tagImage.empty()) return; - var tagPoints = new MatOfPoint2f(kTag16h5MarkerPts); - // points of tag image corners - var tagImageCorners = new MatOfPoint2f(getImageCorners(tagImage.size())); - var dstPointMat = new MatOfPoint2f(dstPoints); - // the rectangle describing the rectangle-of-interest(ROI) - var boundingRect = Imgproc.boundingRect(dstPointMat); - // find the perspective transform from the tag image to the warped destination points - Mat perspecTrf = Imgproc.getPerspectiveTransform(tagPoints, dstPointMat); - // check extreme image corners after transform to check if we need to expand bounding rect - var extremeCorners = new MatOfPoint2f(); - Core.perspectiveTransform(tagImageCorners, extremeCorners, perspecTrf); - // dilate ROI to fit full tag - boundingRect = Imgproc.boundingRect(extremeCorners); - - // adjust interpolation strategy based on size of warped tag compared to tag image - var warpedContourArea = Imgproc.contourArea(extremeCorners); - double warpedTagUpscale = Math.sqrt(warpedContourArea) / Math.sqrt(tagImage.size().area()); - int warpStrategy = Imgproc.INTER_NEAREST; - // automatically determine the best supersampling of warped image and scale of tag image - /* - (warpPerspective does not properly resample, so this is used to avoid aliasing in the - warped image. Supersampling is used when the warped tag is small, but is very slow - when the warped tag is large-- scaling the tag image up and using linear interpolation - instead can be performant while still effectively antialiasing. Some combination of these - two can be used in between those extremes.) - - TODO: Simplify magic numbers to one or two variables, or use a more proper approach? - */ - int supersampling = 6; - supersampling = (int) Math.ceil(supersampling / warpedTagUpscale); - supersampling = Math.max(Math.min(supersampling, 8), 1); - - Mat scaledTagImage = new Mat(); - if (warpedTagUpscale > 2.0) { - warpStrategy = Imgproc.INTER_LINEAR; - int scaleFactor = (int) (warpedTagUpscale / 3.0) + 2; - scaleFactor = Math.max(Math.min(scaleFactor, 40), 1); - scaleFactor *= supersampling; - Imgproc.resize( - tagImage, scaledTagImage, new Size(), scaleFactor, scaleFactor, Imgproc.INTER_NEAREST); - tagPoints.fromArray(get16h5MarkerPts(scaleFactor)); - } else tagImage.assignTo(scaledTagImage); - - // constrain the bounding rect inside of the destination image - boundingRect.x -= 1; - boundingRect.y -= 1; - boundingRect.width += 2; - boundingRect.height += 2; - if (boundingRect.x < 0) { - boundingRect.width += boundingRect.x; - boundingRect.x = 0; - } - if (boundingRect.y < 0) { - boundingRect.height += boundingRect.y; - boundingRect.y = 0; - } - boundingRect.width = Math.min(destination.width() - boundingRect.x, boundingRect.width); - boundingRect.height = Math.min(destination.height() - boundingRect.y, boundingRect.height); - if (boundingRect.width <= 0 || boundingRect.height <= 0) return; - - // upscale if supersampling - Mat scaledDstPts = new Mat(); - if (supersampling > 1) { - Core.multiply(dstPointMat, new Scalar(supersampling, supersampling), scaledDstPts); - boundingRect.x *= supersampling; - boundingRect.y *= supersampling; - boundingRect.width *= supersampling; - boundingRect.height *= supersampling; - } else dstPointMat.assignTo(scaledDstPts); - - // update transform relative to expanded, scaled bounding rect - Core.subtract(scaledDstPts, new Scalar(boundingRect.tl().x, boundingRect.tl().y), scaledDstPts); - perspecTrf = Imgproc.getPerspectiveTransform(tagPoints, scaledDstPts); - - // warp (scaled) tag image onto (scaled) ROI image representing the portion of - // the destination image encapsulated by boundingRect - Mat tempROI = new Mat(); - Imgproc.warpPerspective(scaledTagImage, tempROI, perspecTrf, boundingRect.size(), warpStrategy); - - // downscale ROI with interpolation if supersampling - if (supersampling > 1) { - boundingRect.x /= supersampling; - boundingRect.y /= supersampling; - boundingRect.width /= supersampling; - boundingRect.height /= supersampling; - Imgproc.resize(tempROI, tempROI, boundingRect.size(), 0, 0, Imgproc.INTER_AREA); + byte[] px = new byte[1]; + for (int y = 0; y < result.height(); y++) { + for (int x = 0; x < result.width(); x++) { + px[0] = (byte) (buf.getRGB(x, y) & 0xFF); + result.put(y, x, px); } - - // we want to copy ONLY the transformed tag to the result image, not the entire bounding rect - // using a mask only copies the source pixels which have an associated non-zero value in the - // mask - Mat tempMask = Mat.zeros(tempROI.size(), CvType.CV_8UC1); - Core.subtract( - extremeCorners, new Scalar(boundingRect.tl().x, boundingRect.tl().y), extremeCorners); - Point tempCenter = new Point(); - tempCenter.x = - Arrays.stream(extremeCorners.toArray()).mapToDouble(p -> p.x).average().getAsDouble(); - tempCenter.y = - Arrays.stream(extremeCorners.toArray()).mapToDouble(p -> p.y).average().getAsDouble(); - // dilate tag corners - Arrays.stream(extremeCorners.toArray()) - .forEach( - p -> { - double xdiff = p.x - tempCenter.x; - double ydiff = p.y - tempCenter.y; - xdiff += 1 * Math.signum(xdiff); - ydiff += 1 * Math.signum(ydiff); - new Point(tempCenter.x + xdiff, tempCenter.y + ydiff); - }); - // (make inside of tag completely white in mask) - Imgproc.fillConvexPoly(tempMask, new MatOfPoint(extremeCorners.toArray()), new Scalar(255)); - - // copy transformed tag onto result image - tempROI.copyTo(destination.submat(boundingRect), tempMask); + } } - - /** - * Given a line thickness in a 640x480 image, try to scale to the given destination image - * resolution. - * - * @param thickness480p A hypothetical line thickness in a 640x480 image - * @param destinationImg The destination image to scale to - * @return Scaled thickness which cannot be less than 1 - */ - public static double getScaledThickness(double thickness480p, Mat destinationImg) { - double scaleX = destinationImg.width() / 640.0; - double scaleY = destinationImg.height() / 480.0; - double minScale = Math.min(scaleX, scaleY); - return Math.max(thickness480p * minScale, 1.0); + // local IDE tests + else result = Imgcodecs.imread(kLocalTagImagesPath + name + ".png", Imgcodecs.IMREAD_GRAYSCALE); + return result; + } + + /** Gets the points representing the marker(black square) corners. */ + public static Point[] get16h5MarkerPts() { + return get16h5MarkerPts(1); + } + + /** + * Gets the points representing the marker(black square) corners. + * + * @param scale The scale of the tag image (8*scale x 8*scale image) + */ + public static Point[] get16h5MarkerPts(int scale) { + var roi16h5 = new Rect(new Point(1, 1), new Size(6, 6)); + roi16h5.x *= scale; + roi16h5.y *= scale; + roi16h5.width *= scale; + roi16h5.height *= scale; + var pts = getImageCorners(roi16h5.size()); + for (int i = 0; i < pts.length; i++) { + var pt = pts[i]; + pts[i] = new Point(roi16h5.tl().x + pt.x, roi16h5.tl().y + pt.y); } - - /** - * Draw a filled ellipse in the destination image. - * - * @param dstPoints The points in the destination image representing the rectangle in which the - * ellipse is inscribed. - * @param color The color of the ellipse. This is a scalar with BGR values (0-255) - * @param destination The destination image to draw onto. The image should be in the BGR color - * space. - */ - public static void drawInscribedEllipse(Point[] dstPoints, Scalar color, Mat destination) { - // create RotatedRect from points - var rect = OpenCVHelp.getMinAreaRect(dstPoints); - // inscribe ellipse inside rectangle - Imgproc.ellipse(destination, rect, color, -1, Imgproc.LINE_AA); + return pts; + } + + /** + * Warps the image of a specific 16h5 AprilTag onto the destination image at the given points. + * + * @param tagId The id of the specific tag to warp onto the destination image + * @param dstPoints Points(4) in destination image where the tag marker(black square) corners + * should be warped onto. + * @param antialiasing If antialiasing should be performed by automatically + * supersampling/interpolating the warped image. This should be used if better stream quality + * is desired or target detection is being done on the stream, but can hurt performance. + * @param destination The destination image to place the warped tag image onto. + */ + public static void warp16h5TagImage( + int tagId, Point[] dstPoints, boolean antialiasing, Mat destination) { + Mat tagImage = kTag16h5Images.get(tagId); + if (tagImage == null || tagImage.empty()) return; + var tagPoints = new MatOfPoint2f(kTag16h5MarkerPts); + // points of tag image corners + var tagImageCorners = new MatOfPoint2f(getImageCorners(tagImage.size())); + var dstPointMat = new MatOfPoint2f(dstPoints); + // the rectangle describing the rectangle-of-interest(ROI) + var boundingRect = Imgproc.boundingRect(dstPointMat); + // find the perspective transform from the tag image to the warped destination points + Mat perspecTrf = Imgproc.getPerspectiveTransform(tagPoints, dstPointMat); + // check extreme image corners after transform to check if we need to expand bounding rect + var extremeCorners = new MatOfPoint2f(); + Core.perspectiveTransform(tagImageCorners, extremeCorners, perspecTrf); + // dilate ROI to fit full tag + boundingRect = Imgproc.boundingRect(extremeCorners); + + // adjust interpolation strategy based on size of warped tag compared to tag image + var warpedContourArea = Imgproc.contourArea(extremeCorners); + double warpedTagUpscale = Math.sqrt(warpedContourArea) / Math.sqrt(tagImage.size().area()); + int warpStrategy = Imgproc.INTER_NEAREST; + // automatically determine the best supersampling of warped image and scale of tag image + /* + (warpPerspective does not properly resample, so this is used to avoid aliasing in the + warped image. Supersampling is used when the warped tag is small, but is very slow + when the warped tag is large-- scaling the tag image up and using linear interpolation + instead can be performant while still effectively antialiasing. Some combination of these + two can be used in between those extremes.) + + TODO: Simplify magic numbers to one or two variables, or use a more proper approach? + */ + int supersampling = 6; + supersampling = (int) Math.ceil(supersampling / warpedTagUpscale); + supersampling = Math.max(Math.min(supersampling, 8), 1); + + Mat scaledTagImage = new Mat(); + if (warpedTagUpscale > 2.0) { + warpStrategy = Imgproc.INTER_LINEAR; + int scaleFactor = (int) (warpedTagUpscale / 3.0) + 2; + scaleFactor = Math.max(Math.min(scaleFactor, 40), 1); + scaleFactor *= supersampling; + Imgproc.resize( + tagImage, scaledTagImage, new Size(), scaleFactor, scaleFactor, Imgproc.INTER_NEAREST); + tagPoints.fromArray(get16h5MarkerPts(scaleFactor)); + } else tagImage.assignTo(scaledTagImage); + + // constrain the bounding rect inside of the destination image + boundingRect.x -= 1; + boundingRect.y -= 1; + boundingRect.width += 2; + boundingRect.height += 2; + if (boundingRect.x < 0) { + boundingRect.width += boundingRect.x; + boundingRect.x = 0; } - - /** - * Draw a polygon outline or filled polygon to the destination image with the given points. - * - * @param dstPoints The points in the destination image representing the polygon. - * @param thickness The thickness of the outline in pixels. If this is not positive, a filled - * polygon is drawn instead. - * @param color The color drawn. This should match the color space of the destination image. - * @param isClosed If the last point should connect to the first point in the polygon outline. - * @param destination The destination image to draw onto. - */ - public static void drawPoly( - Point[] dstPoints, int thickness, Scalar color, boolean isClosed, Mat destination) { - var dstPointsd = new MatOfPoint(dstPoints); - if (thickness > 0) { - Imgproc.polylines( - destination, List.of(dstPointsd), isClosed, color, thickness, Imgproc.LINE_AA); - } else { - Imgproc.fillPoly(destination, List.of(dstPointsd), color, Imgproc.LINE_AA); - } + if (boundingRect.y < 0) { + boundingRect.height += boundingRect.y; + boundingRect.y = 0; } - - /** - * Draws a contour around the given points and text of the id onto the destination image. - * - * @param id Fiducial ID number to draw - * @param dstPoints Points representing the four corners of the tag marker(black square) in the - * destination image. - * @param destination The destination image to draw onto. The image should be in the BGR color - * space. - */ - public static void drawTagDetection(int id, Point[] dstPoints, Mat destination) { - double thickness = getScaledThickness(1, destination); - drawPoly(dstPoints, (int) thickness, new Scalar(0, 0, 255), true, destination); - var rect = Imgproc.boundingRect(new MatOfPoint(dstPoints)); - var textPt = new Point(rect.x + rect.width, rect.y); - textPt.x += thickness; - textPt.y += thickness; - Imgproc.putText( - destination, - String.valueOf(id), - textPt, - Imgproc.FONT_HERSHEY_PLAIN, - 1.5 * thickness, - new Scalar(0, 200, 0), - (int) thickness, - Imgproc.LINE_AA); + boundingRect.width = Math.min(destination.width() - boundingRect.x, boundingRect.width); + boundingRect.height = Math.min(destination.height() - boundingRect.y, boundingRect.height); + if (boundingRect.width <= 0 || boundingRect.height <= 0) return; + + // upscale if supersampling + Mat scaledDstPts = new Mat(); + if (supersampling > 1) { + Core.multiply(dstPointMat, new Scalar(supersampling, supersampling), scaledDstPts); + boundingRect.x *= supersampling; + boundingRect.y *= supersampling; + boundingRect.width *= supersampling; + boundingRect.height *= supersampling; + } else dstPointMat.assignTo(scaledDstPts); + + // update transform relative to expanded, scaled bounding rect + Core.subtract(scaledDstPts, new Scalar(boundingRect.tl().x, boundingRect.tl().y), scaledDstPts); + perspecTrf = Imgproc.getPerspectiveTransform(tagPoints, scaledDstPts); + + // warp (scaled) tag image onto (scaled) ROI image representing the portion of + // the destination image encapsulated by boundingRect + Mat tempROI = new Mat(); + Imgproc.warpPerspective(scaledTagImage, tempROI, perspecTrf, boundingRect.size(), warpStrategy); + + // downscale ROI with interpolation if supersampling + if (supersampling > 1) { + boundingRect.x /= supersampling; + boundingRect.y /= supersampling; + boundingRect.width /= supersampling; + boundingRect.height /= supersampling; + Imgproc.resize(tempROI, tempROI, boundingRect.size(), 0, 0, Imgproc.INTER_AREA); } - /** - * Set the field dimensions that are used for drawing the field wireframe. - * - * @param fieldLengthMeters - * @param fieldWidthMeters - */ - public static void setFieldDimensionsMeters(double fieldLengthMeters, double fieldWidthMeters) { - fieldLength = fieldLengthMeters; - fieldWidth = fieldWidthMeters; + // we want to copy ONLY the transformed tag to the result image, not the entire bounding rect + // using a mask only copies the source pixels which have an associated non-zero value in the + // mask + Mat tempMask = Mat.zeros(tempROI.size(), CvType.CV_8UC1); + Core.subtract( + extremeCorners, new Scalar(boundingRect.tl().x, boundingRect.tl().y), extremeCorners); + Point tempCenter = new Point(); + tempCenter.x = + Arrays.stream(extremeCorners.toArray()).mapToDouble(p -> p.x).average().getAsDouble(); + tempCenter.y = + Arrays.stream(extremeCorners.toArray()).mapToDouble(p -> p.y).average().getAsDouble(); + // dilate tag corners + Arrays.stream(extremeCorners.toArray()) + .forEach( + p -> { + double xdiff = p.x - tempCenter.x; + double ydiff = p.y - tempCenter.y; + xdiff += 1 * Math.signum(xdiff); + ydiff += 1 * Math.signum(ydiff); + new Point(tempCenter.x + xdiff, tempCenter.y + ydiff); + }); + // (make inside of tag completely white in mask) + Imgproc.fillConvexPoly(tempMask, new MatOfPoint(extremeCorners.toArray()), new Scalar(255)); + + // copy transformed tag onto result image + tempROI.copyTo(destination.submat(boundingRect), tempMask); + } + + /** + * Given a line thickness in a 640x480 image, try to scale to the given destination image + * resolution. + * + * @param thickness480p A hypothetical line thickness in a 640x480 image + * @param destinationImg The destination image to scale to + * @return Scaled thickness which cannot be less than 1 + */ + public static double getScaledThickness(double thickness480p, Mat destinationImg) { + double scaleX = destinationImg.width() / 640.0; + double scaleY = destinationImg.height() / 480.0; + double minScale = Math.min(scaleX, scaleY); + return Math.max(thickness480p * minScale, 1.0); + } + + /** + * Draw a filled ellipse in the destination image. + * + * @param dstPoints The points in the destination image representing the rectangle in which the + * ellipse is inscribed. + * @param color The color of the ellipse. This is a scalar with BGR values (0-255) + * @param destination The destination image to draw onto. The image should be in the BGR color + * space. + */ + public static void drawInscribedEllipse(Point[] dstPoints, Scalar color, Mat destination) { + // create RotatedRect from points + var rect = OpenCVHelp.getMinAreaRect(dstPoints); + // inscribe ellipse inside rectangle + Imgproc.ellipse(destination, rect, color, -1, Imgproc.LINE_AA); + } + + /** + * Draw a polygon outline or filled polygon to the destination image with the given points. + * + * @param dstPoints The points in the destination image representing the polygon. + * @param thickness The thickness of the outline in pixels. If this is not positive, a filled + * polygon is drawn instead. + * @param color The color drawn. This should match the color space of the destination image. + * @param isClosed If the last point should connect to the first point in the polygon outline. + * @param destination The destination image to draw onto. + */ + public static void drawPoly( + Point[] dstPoints, int thickness, Scalar color, boolean isClosed, Mat destination) { + var dstPointsd = new MatOfPoint(dstPoints); + if (thickness > 0) { + Imgproc.polylines( + destination, List.of(dstPointsd), isClosed, color, thickness, Imgproc.LINE_AA); + } else { + Imgproc.fillPoly(destination, List.of(dstPointsd), color, Imgproc.LINE_AA); } - - /** - * The translations used to draw the field side walls and driver station walls. It is a List of - * Lists because the translations are not all connected. - */ - private static List> getFieldWallLines() { - var list = new ArrayList>(); - - final double sideHt = Units.inchesToMeters(19.5); - final double driveHt = Units.inchesToMeters(35); - final double topHt = Units.inchesToMeters(78); - - // field floor - list.add( - List.of( - new Translation3d(0, 0, 0), - new Translation3d(fieldLength, 0, 0), - new Translation3d(fieldLength, fieldWidth, 0), - new Translation3d(0, fieldWidth, 0), - new Translation3d(0, 0, 0))); - // right side wall - list.add( - List.of( - new Translation3d(0, 0, 0), - new Translation3d(0, 0, sideHt), - new Translation3d(fieldLength, 0, sideHt), - new Translation3d(fieldLength, 0, 0))); - // red driverstation - list.add( - List.of( - new Translation3d(fieldLength, 0, sideHt), - new Translation3d(fieldLength, 0, topHt), - new Translation3d(fieldLength, fieldWidth, topHt), - new Translation3d(fieldLength, fieldWidth, sideHt))); - list.add( - List.of( - new Translation3d(fieldLength, 0, driveHt), - new Translation3d(fieldLength, fieldWidth, driveHt))); - // left side wall - list.add( - List.of( - new Translation3d(0, fieldWidth, 0), - new Translation3d(0, fieldWidth, sideHt), - new Translation3d(fieldLength, fieldWidth, sideHt), - new Translation3d(fieldLength, fieldWidth, 0))); - // blue driverstation - list.add( - List.of( - new Translation3d(0, 0, sideHt), - new Translation3d(0, 0, topHt), - new Translation3d(0, fieldWidth, topHt), - new Translation3d(0, fieldWidth, sideHt))); - list.add(List.of(new Translation3d(0, 0, driveHt), new Translation3d(0, fieldWidth, driveHt))); - - return list; + } + + /** + * Draws a contour around the given points and text of the id onto the destination image. + * + * @param id Fiducial ID number to draw + * @param dstPoints Points representing the four corners of the tag marker(black square) in the + * destination image. + * @param destination The destination image to draw onto. The image should be in the BGR color + * space. + */ + public static void drawTagDetection(int id, Point[] dstPoints, Mat destination) { + double thickness = getScaledThickness(1, destination); + drawPoly(dstPoints, (int) thickness, new Scalar(0, 0, 255), true, destination); + var rect = Imgproc.boundingRect(new MatOfPoint(dstPoints)); + var textPt = new Point(rect.x + rect.width, rect.y); + textPt.x += thickness; + textPt.y += thickness; + Imgproc.putText( + destination, + String.valueOf(id), + textPt, + Imgproc.FONT_HERSHEY_PLAIN, + 1.5 * thickness, + new Scalar(0, 200, 0), + (int) thickness, + Imgproc.LINE_AA); + } + + /** + * Set the field dimensions that are used for drawing the field wireframe. + * + * @param fieldLengthMeters + * @param fieldWidthMeters + */ + public static void setFieldDimensionsMeters(double fieldLengthMeters, double fieldWidthMeters) { + fieldLength = fieldLengthMeters; + fieldWidth = fieldWidthMeters; + } + + /** + * The translations used to draw the field side walls and driver station walls. It is a List of + * Lists because the translations are not all connected. + */ + private static List> getFieldWallLines() { + var list = new ArrayList>(); + + final double sideHt = Units.inchesToMeters(19.5); + final double driveHt = Units.inchesToMeters(35); + final double topHt = Units.inchesToMeters(78); + + // field floor + list.add( + List.of( + new Translation3d(0, 0, 0), + new Translation3d(fieldLength, 0, 0), + new Translation3d(fieldLength, fieldWidth, 0), + new Translation3d(0, fieldWidth, 0), + new Translation3d(0, 0, 0))); + // right side wall + list.add( + List.of( + new Translation3d(0, 0, 0), + new Translation3d(0, 0, sideHt), + new Translation3d(fieldLength, 0, sideHt), + new Translation3d(fieldLength, 0, 0))); + // red driverstation + list.add( + List.of( + new Translation3d(fieldLength, 0, sideHt), + new Translation3d(fieldLength, 0, topHt), + new Translation3d(fieldLength, fieldWidth, topHt), + new Translation3d(fieldLength, fieldWidth, sideHt))); + list.add( + List.of( + new Translation3d(fieldLength, 0, driveHt), + new Translation3d(fieldLength, fieldWidth, driveHt))); + // left side wall + list.add( + List.of( + new Translation3d(0, fieldWidth, 0), + new Translation3d(0, fieldWidth, sideHt), + new Translation3d(fieldLength, fieldWidth, sideHt), + new Translation3d(fieldLength, fieldWidth, 0))); + // blue driverstation + list.add( + List.of( + new Translation3d(0, 0, sideHt), + new Translation3d(0, 0, topHt), + new Translation3d(0, fieldWidth, topHt), + new Translation3d(0, fieldWidth, sideHt))); + list.add(List.of(new Translation3d(0, 0, driveHt), new Translation3d(0, fieldWidth, driveHt))); + + return list; + } + + /** + * The translations used to draw the field floor subdivisions (not the floor outline). It is a + * List of Lists because the translations are not all connected. + * + * @param subdivisions How many "subdivisions" along the width/length of the floor. E.g. 3 + * subdivisions would mean 2 lines along the length and 2 lines along the width creating a 3x3 + * "grid". + */ + private static List> getFieldFloorLines(int subdivisions) { + var list = new ArrayList>(); + final double subLength = fieldLength / subdivisions; + final double subWidth = fieldWidth / subdivisions; + + // field floor subdivisions + for (int i = 0; i < subdivisions; i++) { + list.add( + List.of( + new Translation3d(0, subWidth * (i + 1), 0), + new Translation3d(fieldLength, subWidth * (i + 1), 0))); + list.add( + List.of( + new Translation3d(subLength * (i + 1), 0, 0), + new Translation3d(subLength * (i + 1), fieldWidth, 0))); } - /** - * The translations used to draw the field floor subdivisions (not the floor outline). It is a - * List of Lists because the translations are not all connected. - * - * @param subdivisions How many "subdivisions" along the width/length of the floor. E.g. 3 - * subdivisions would mean 2 lines along the length and 2 lines along the width creating a 3x3 - * "grid". - */ - private static List> getFieldFloorLines(int subdivisions) { - var list = new ArrayList>(); - final double subLength = fieldLength / subdivisions; - final double subWidth = fieldWidth / subdivisions; - - // field floor subdivisions - for (int i = 0; i < subdivisions; i++) { - list.add( - List.of( - new Translation3d(0, subWidth * (i + 1), 0), - new Translation3d(fieldLength, subWidth * (i + 1), 0))); - list.add( - List.of( - new Translation3d(subLength * (i + 1), 0, 0), - new Translation3d(subLength * (i + 1), fieldWidth, 0))); - } - - return list; + return list; + } + + /** + * Convert 3D lines represented by the given series of translations into a polygon(s) in the + * camera's image. + * + * @param camRt The change in basis from world coordinates to camera coordinates. See {@link + * RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * @param prop The simulated camera's properties. + * @param trls A sequential series of translations defining the polygon to be drawn. + * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in + * pixels. Line segments will be subdivided if they exceed this resolution. + * @param isClosed If the final translation should also draw a line to the first translation. + * @param destination The destination image that is being drawn to. + * @return A list of polygons(which are an array of points) + */ + public static List polyFrom3dLines( + RotTrlTransform3d camRt, + SimCameraProperties prop, + List trls, + double resolution, + boolean isClosed, + Mat destination) { + resolution = Math.hypot(destination.size().height, destination.size().width) * resolution; + List pts = new ArrayList<>(trls); + if (isClosed) pts.add(pts.get(0)); + List polyPointList = new ArrayList<>(); + + for (int i = 0; i < pts.size() - 1; i++) { + var pta = pts.get(i); + var ptb = pts.get(i + 1); + + // check if line is inside camera fulcrum + var inter = prop.getVisibleLine(camRt, pta, ptb); + if (inter.getSecond() == null) continue; + + // cull line to the inside of the camera fulcrum + double inter1 = inter.getFirst().doubleValue(); + double inter2 = inter.getSecond().doubleValue(); + var baseDelta = ptb.minus(pta); + var old_pta = pta; + if (inter1 > 0) pta = old_pta.plus(baseDelta.times(inter1)); + if (inter2 < 1) ptb = old_pta.plus(baseDelta.times(inter2)); + baseDelta = ptb.minus(pta); + + // project points into 2d + var poly = new ArrayList(); + poly.addAll( + Arrays.asList( + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, List.of(pta, ptb)))); + var pxa = poly.get(0); + var pxb = poly.get(1); + + // subdivide projected line based on desired resolution + double pxDist = Math.hypot(pxb.x - pxa.x, pxb.y - pxa.y); + int subdivisions = (int) (pxDist / resolution); + var subDelta = baseDelta.div(subdivisions + 1); + var subPts = new ArrayList(); + for (int j = 0; j < subdivisions; j++) { + subPts.add(pta.plus(subDelta.times(j + 1))); + } + if (subPts.size() > 0) { + poly.addAll( + 1, + Arrays.asList( + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, subPts))); + } + + polyPointList.add(poly.toArray(Point[]::new)); } - /** - * Convert 3D lines represented by the given series of translations into a polygon(s) in the - * camera's image. - * - * @param camRt The change in basis from world coordinates to camera coordinates. See {@link - * RotTrlTransform3d#makeRelativeTo(Pose3d)}. - * @param prop The simulated camera's properties. - * @param trls A sequential series of translations defining the polygon to be drawn. - * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in - * pixels. Line segments will be subdivided if they exceed this resolution. - * @param isClosed If the final translation should also draw a line to the first translation. - * @param destination The destination image that is being drawn to. - * @return A list of polygons(which are an array of points) - */ - public static List polyFrom3dLines( - RotTrlTransform3d camRt, - SimCameraProperties prop, - List trls, - double resolution, - boolean isClosed, - Mat destination) { - resolution = Math.hypot(destination.size().height, destination.size().width) * resolution; - List pts = new ArrayList<>(trls); - if (isClosed) pts.add(pts.get(0)); - List polyPointList = new ArrayList<>(); - - for (int i = 0; i < pts.size() - 1; i++) { - var pta = pts.get(i); - var ptb = pts.get(i + 1); - - // check if line is inside camera fulcrum - var inter = prop.getVisibleLine(camRt, pta, ptb); - if (inter.getSecond() == null) continue; - - // cull line to the inside of the camera fulcrum - double inter1 = inter.getFirst().doubleValue(); - double inter2 = inter.getSecond().doubleValue(); - var baseDelta = ptb.minus(pta); - var old_pta = pta; - if (inter1 > 0) pta = old_pta.plus(baseDelta.times(inter1)); - if (inter2 < 1) ptb = old_pta.plus(baseDelta.times(inter2)); - baseDelta = ptb.minus(pta); - - // project points into 2d - var poly = new ArrayList(); - poly.addAll( - Arrays.asList( - OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, List.of(pta, ptb)))); - var pxa = poly.get(0); - var pxb = poly.get(1); - - // subdivide projected line based on desired resolution - double pxDist = Math.hypot(pxb.x - pxa.x, pxb.y - pxa.y); - int subdivisions = (int) (pxDist / resolution); - var subDelta = baseDelta.div(subdivisions + 1); - var subPts = new ArrayList(); - for (int j = 0; j < subdivisions; j++) { - subPts.add(pta.plus(subDelta.times(j + 1))); - } - if (subPts.size() > 0) { - poly.addAll( - 1, - Arrays.asList( - OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, subPts))); - } - - polyPointList.add(poly.toArray(Point[]::new)); - } - - return polyPointList; + return polyPointList; + } + + /** + * Draw a wireframe of the field to the given image. + * + * @param camRt The change in basis from world coordinates to camera coordinates. See {@link + * RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * @param prop The simulated camera's properties. + * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in + * pixels. Line segments will be subdivided if they exceed this resolution. + * @param wallThickness Thickness of the lines used for drawing the field walls in pixels. This is + * scaled by {@link #getScaledThickness(double, Mat)}. + * @param wallColor Color of the lines used for drawing the field walls. + * @param floorSubdivisions A NxN "grid" is created from the floor where this parameter is N, + * which defines the floor lines. + * @param floorThickness Thickness of the lines used for drawing the field floor grid in pixels. + * This is scaled by {@link #getScaledThickness(double, Mat)}. + * @param floorColor Color of the lines used for drawing the field floor grid. + * @param destination The destination image to draw to. + */ + public static void drawFieldWireframe( + RotTrlTransform3d camRt, + SimCameraProperties prop, + double resolution, + double wallThickness, + Scalar wallColor, + int floorSubdivisions, + double floorThickness, + Scalar floorColor, + Mat destination) { + for (var trls : getFieldFloorLines(floorSubdivisions)) { + var polys = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); + for (var poly : polys) { + drawPoly( + poly, + (int) Math.round(getScaledThickness(floorThickness, destination)), + floorColor, + false, + destination); + } } - - /** - * Draw a wireframe of the field to the given image. - * - * @param camRt The change in basis from world coordinates to camera coordinates. See {@link - * RotTrlTransform3d#makeRelativeTo(Pose3d)}. - * @param prop The simulated camera's properties. - * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in - * pixels. Line segments will be subdivided if they exceed this resolution. - * @param wallThickness Thickness of the lines used for drawing the field walls in pixels. This is - * scaled by {@link #getScaledThickness(double, Mat)}. - * @param wallColor Color of the lines used for drawing the field walls. - * @param floorSubdivisions A NxN "grid" is created from the floor where this parameter is N, - * which defines the floor lines. - * @param floorThickness Thickness of the lines used for drawing the field floor grid in pixels. - * This is scaled by {@link #getScaledThickness(double, Mat)}. - * @param floorColor Color of the lines used for drawing the field floor grid. - * @param destination The destination image to draw to. - */ - public static void drawFieldWireframe( - RotTrlTransform3d camRt, - SimCameraProperties prop, - double resolution, - double wallThickness, - Scalar wallColor, - int floorSubdivisions, - double floorThickness, - Scalar floorColor, - Mat destination) { - for (var trls : getFieldFloorLines(floorSubdivisions)) { - var polys = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); - for (var poly : polys) { - drawPoly( - poly, - (int) Math.round(getScaledThickness(floorThickness, destination)), - floorColor, - false, - destination); - } - } - for (var trls : getFieldWallLines()) { - var polys = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); - for (var poly : polys) { - drawPoly( - poly, - (int) Math.round(getScaledThickness(wallThickness, destination)), - wallColor, - false, - destination); - } - } + for (var trls : getFieldWallLines()) { + var polys = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); + for (var poly : polys) { + drawPoly( + poly, + (int) Math.round(getScaledThickness(wallThickness, destination)), + wallColor, + false, + destination); + } } + } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java index ed0207096e..eb9986ad4a 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java @@ -53,358 +53,358 @@ * camera target info. */ public class VisionSystemSim { - private final Map camSimMap = new HashMap<>(); - private static final double kBufferLengthSeconds = 1.5; - // save robot-to-camera for each camera over time (Pose3d is easily interpolatable) - private final Map> camTrfMap = new HashMap<>(); - - // interpolate drivetrain with twists - private final TimeInterpolatableBuffer robotPoseBuffer = - TimeInterpolatableBuffer.createBuffer(kBufferLengthSeconds); - - private final Map> targetSets = new HashMap<>(); - - private final Field2d dbgField; - - private final Transform3d kEmptyTrf = new Transform3d(); - - /** - * A simulated vision system involving a camera(s) and coprocessor(s) mounted on a mobile robot - * running PhotonVision, detecting targets placed on the field. {@link VisionTargetSim}s added to - * this class will be detected by the {@link PhotonCameraSim}s added to this class. This class - * should be updated periodically with the robot's current pose in order to publish the simulated - * camera target info. - * - * @param visionSystemName The specific identifier for this vision system in NetworkTables. - */ - public VisionSystemSim(String visionSystemName) { - dbgField = new Field2d(); - String tableName = "VisionSystemSim-" + visionSystemName; - SmartDashboard.putData(tableName + "/Sim Field", dbgField); + private final Map camSimMap = new HashMap<>(); + private static final double kBufferLengthSeconds = 1.5; + // save robot-to-camera for each camera over time (Pose3d is easily interpolatable) + private final Map> camTrfMap = new HashMap<>(); + + // interpolate drivetrain with twists + private final TimeInterpolatableBuffer robotPoseBuffer = + TimeInterpolatableBuffer.createBuffer(kBufferLengthSeconds); + + private final Map> targetSets = new HashMap<>(); + + private final Field2d dbgField; + + private final Transform3d kEmptyTrf = new Transform3d(); + + /** + * A simulated vision system involving a camera(s) and coprocessor(s) mounted on a mobile robot + * running PhotonVision, detecting targets placed on the field. {@link VisionTargetSim}s added to + * this class will be detected by the {@link PhotonCameraSim}s added to this class. This class + * should be updated periodically with the robot's current pose in order to publish the simulated + * camera target info. + * + * @param visionSystemName The specific identifier for this vision system in NetworkTables. + */ + public VisionSystemSim(String visionSystemName) { + dbgField = new Field2d(); + String tableName = "VisionSystemSim-" + visionSystemName; + SmartDashboard.putData(tableName + "/Sim Field", dbgField); + } + + /** Get one of the simulated cameras. */ + public Optional getCameraSim(String name) { + return Optional.ofNullable(camSimMap.get(name)); + } + + /** Get all the simulated cameras. */ + public Collection getCameraSims() { + return camSimMap.values(); + } + + /** + * Adds a simulated camera to this vision system with a specified robot-to-camera transformation. + * The vision targets registered with this vision system simulation will be observed by the + * simulated {@link PhotonCamera}. + * + * @param cameraSim The camera simulation + * @param robotToCamera The transform from the robot pose to the camera pose + */ + public void addCamera(PhotonCameraSim cameraSim, Transform3d robotToCamera) { + var existing = camSimMap.putIfAbsent(cameraSim.getCamera().getName(), cameraSim); + if (existing == null) { + camTrfMap.put(cameraSim, TimeInterpolatableBuffer.createBuffer(kBufferLengthSeconds)); + camTrfMap + .get(cameraSim) + .addSample(Timer.getFPGATimestamp(), new Pose3d().plus(robotToCamera)); } - - /** Get one of the simulated cameras. */ - public Optional getCameraSim(String name) { - return Optional.ofNullable(camSimMap.get(name)); - } - - /** Get all the simulated cameras. */ - public Collection getCameraSims() { - return camSimMap.values(); - } - - /** - * Adds a simulated camera to this vision system with a specified robot-to-camera transformation. - * The vision targets registered with this vision system simulation will be observed by the - * simulated {@link PhotonCamera}. - * - * @param cameraSim The camera simulation - * @param robotToCamera The transform from the robot pose to the camera pose - */ - public void addCamera(PhotonCameraSim cameraSim, Transform3d robotToCamera) { - var existing = camSimMap.putIfAbsent(cameraSim.getCamera().getName(), cameraSim); - if (existing == null) { - camTrfMap.put(cameraSim, TimeInterpolatableBuffer.createBuffer(kBufferLengthSeconds)); - camTrfMap - .get(cameraSim) - .addSample(Timer.getFPGATimestamp(), new Pose3d().plus(robotToCamera)); - } - } - - /** Remove all simulated cameras from this vision system. */ - public void clearCameras() { - camSimMap.clear(); - camTrfMap.clear(); - } - - /** - * Remove a simulated camera from this vision system. - * - * @return If the camera was present and removed - */ - public boolean removeCamera(PhotonCameraSim cameraSim) { - boolean success = camSimMap.remove(cameraSim.getCamera().getName()) != null; - camTrfMap.remove(cameraSim); - return success; - } - - /** - * Get a simulated camera's position relative to the robot. If the requested camera is invalid, an - * empty optional is returned. - * - * @param cameraSim The specific camera to get the robot-to-camera transform of - * @return The transform of this camera, or an empty optional if it is invalid - */ - public Optional getRobotToCamera(PhotonCameraSim cameraSim) { - return getRobotToCamera(cameraSim, Timer.getFPGATimestamp()); - } - - /** - * Get a simulated camera's position relative to the robot. If the requested camera is invalid, an - * empty optional is returned. - * - * @param cameraSim The specific camera to get the robot-to-camera transform of - * @param timeSeconds Timestamp in seconds of when the transform should be observed - * @return The transform of this camera, or an empty optional if it is invalid - */ - public Optional getRobotToCamera(PhotonCameraSim cameraSim, double timeSeconds) { - var trfBuffer = camTrfMap.get(cameraSim); - if (trfBuffer == null) return Optional.empty(); - var sample = trfBuffer.getSample(timeSeconds); - if (sample.isEmpty()) return Optional.empty(); - return Optional.of(new Transform3d(new Pose3d(), sample.orElse(new Pose3d()))); + } + + /** Remove all simulated cameras from this vision system. */ + public void clearCameras() { + camSimMap.clear(); + camTrfMap.clear(); + } + + /** + * Remove a simulated camera from this vision system. + * + * @return If the camera was present and removed + */ + public boolean removeCamera(PhotonCameraSim cameraSim) { + boolean success = camSimMap.remove(cameraSim.getCamera().getName()) != null; + camTrfMap.remove(cameraSim); + return success; + } + + /** + * Get a simulated camera's position relative to the robot. If the requested camera is invalid, an + * empty optional is returned. + * + * @param cameraSim The specific camera to get the robot-to-camera transform of + * @return The transform of this camera, or an empty optional if it is invalid + */ + public Optional getRobotToCamera(PhotonCameraSim cameraSim) { + return getRobotToCamera(cameraSim, Timer.getFPGATimestamp()); + } + + /** + * Get a simulated camera's position relative to the robot. If the requested camera is invalid, an + * empty optional is returned. + * + * @param cameraSim The specific camera to get the robot-to-camera transform of + * @param timeSeconds Timestamp in seconds of when the transform should be observed + * @return The transform of this camera, or an empty optional if it is invalid + */ + public Optional getRobotToCamera(PhotonCameraSim cameraSim, double timeSeconds) { + var trfBuffer = camTrfMap.get(cameraSim); + if (trfBuffer == null) return Optional.empty(); + var sample = trfBuffer.getSample(timeSeconds); + if (sample.isEmpty()) return Optional.empty(); + return Optional.of(new Transform3d(new Pose3d(), sample.orElse(new Pose3d()))); + } + + /** + * Get a simulated camera's position on the field. If the requested camera is invalid, an empty + * optional is returned. + * + * @param cameraSim The specific camera to get the field pose of + * @return The pose of this camera, or an empty optional if it is invalid + */ + public Optional getCameraPose(PhotonCameraSim cameraSim) { + return getCameraPose(cameraSim, Timer.getFPGATimestamp()); + } + + /** + * Get a simulated camera's position on the field. If the requested camera is invalid, an empty + * optional is returned. + * + * @param cameraSim The specific camera to get the field pose of + * @param timeSeconds Timestamp in seconds of when the pose should be observed + * @return The pose of this camera, or an empty optional if it is invalid + */ + public Optional getCameraPose(PhotonCameraSim cameraSim, double timeSeconds) { + var robotToCamera = getRobotToCamera(cameraSim, timeSeconds); + if (robotToCamera.isEmpty()) return Optional.empty(); + return Optional.of(getRobotPose(timeSeconds).plus(robotToCamera.get())); + } + + /** + * Adjust a camera's position relative to the robot. Use this if your camera is on a gimbal or + * turret or some other mobile platform. + * + * @param cameraSim The simulated camera to change the relative position of + * @param robotToCamera New transform from the robot to the camera + * @return If the cameraSim was valid and transform was adjusted + */ + public boolean adjustCamera(PhotonCameraSim cameraSim, Transform3d robotToCamera) { + var trfBuffer = camTrfMap.get(cameraSim); + if (trfBuffer == null) return false; + trfBuffer.addSample(Timer.getFPGATimestamp(), new Pose3d().plus(robotToCamera)); + return true; + } + + /** Reset the previous transforms for all cameras to their current transform. */ + public void resetCameraTransforms() { + for (var cam : camTrfMap.keySet()) resetCameraTransforms(cam); + } + + /** + * Reset the transform history for this camera to just the current transform. + * + * @return If the cameraSim was valid and transforms were reset + */ + public boolean resetCameraTransforms(PhotonCameraSim cameraSim) { + double now = Timer.getFPGATimestamp(); + var trfBuffer = camTrfMap.get(cameraSim); + if (trfBuffer == null) return false; + var lastTrf = new Transform3d(new Pose3d(), trfBuffer.getSample(now).orElse(new Pose3d())); + trfBuffer.clear(); + adjustCamera(cameraSim, lastTrf); + return true; + } + + public Set getVisionTargets() { + var all = new HashSet(); + for (var entry : targetSets.entrySet()) { + all.addAll(entry.getValue()); } - - /** - * Get a simulated camera's position on the field. If the requested camera is invalid, an empty - * optional is returned. - * - * @param cameraSim The specific camera to get the field pose of - * @return The pose of this camera, or an empty optional if it is invalid - */ - public Optional getCameraPose(PhotonCameraSim cameraSim) { - return getCameraPose(cameraSim, Timer.getFPGATimestamp()); - } - - /** - * Get a simulated camera's position on the field. If the requested camera is invalid, an empty - * optional is returned. - * - * @param cameraSim The specific camera to get the field pose of - * @param timeSeconds Timestamp in seconds of when the pose should be observed - * @return The pose of this camera, or an empty optional if it is invalid - */ - public Optional getCameraPose(PhotonCameraSim cameraSim, double timeSeconds) { - var robotToCamera = getRobotToCamera(cameraSim, timeSeconds); - if (robotToCamera.isEmpty()) return Optional.empty(); - return Optional.of(getRobotPose(timeSeconds).plus(robotToCamera.get())); + return all; + } + + public Set getVisionTargets(String type) { + return targetSets.get(type); + } + + /** + * Adds targets on the field which your vision system is designed to detect. The {@link + * PhotonCamera}s simulated from this system will report the location of the camera relative to + * the subset of these targets which are visible from the given camera position. + * + *

By default these are added under the type "targets". + * + * @param targets Targets to add to the simulated field + */ + public void addVisionTargets(VisionTargetSim... targets) { + addVisionTargets("targets", targets); + } + + /** + * Adds targets on the field which your vision system is designed to detect. The {@link + * PhotonCamera}s simulated from this system will report the location of the camera relative to + * the subset of these targets which are visible from the given camera position. + * + *

The AprilTags from this layout will be added as vision targets under the type "apriltag". + * The poses added preserve the tag layout's current alliance origin. If the tag layout's alliance + * origin is changed, these added tags will have to be cleared and re-added. + * + * @param tagLayout The field tag layout to get Apriltag poses and IDs from + */ + public void addAprilTags(AprilTagFieldLayout tagLayout) { + for (AprilTag tag : tagLayout.getTags()) { + addVisionTargets( + "apriltag", + new VisionTargetSim( + tagLayout.getTagPose(tag.ID).get(), // preserve alliance rotation + TargetModel.kTag16h5, + tag.ID)); } - - /** - * Adjust a camera's position relative to the robot. Use this if your camera is on a gimbal or - * turret or some other mobile platform. - * - * @param cameraSim The simulated camera to change the relative position of - * @param robotToCamera New transform from the robot to the camera - * @return If the cameraSim was valid and transform was adjusted - */ - public boolean adjustCamera(PhotonCameraSim cameraSim, Transform3d robotToCamera) { - var trfBuffer = camTrfMap.get(cameraSim); - if (trfBuffer == null) return false; - trfBuffer.addSample(Timer.getFPGATimestamp(), new Pose3d().plus(robotToCamera)); - return true; + } + + /** + * Adds targets on the field which your vision system is designed to detect. The {@link + * PhotonCamera}s simulated from this system will report the location of the camera relative to + * the subset of these targets which are visible from the given camera position. + * + * @param type Type of target (e.g. "cargo"). + * @param targets Targets to add to the simulated field + */ + public void addVisionTargets(String type, VisionTargetSim... targets) { + targetSets.computeIfAbsent(type, k -> new HashSet<>()); + for (var tgt : targets) { + targetSets.get(type).add(tgt); } - - /** Reset the previous transforms for all cameras to their current transform. */ - public void resetCameraTransforms() { - for (var cam : camTrfMap.keySet()) resetCameraTransforms(cam); - } - - /** - * Reset the transform history for this camera to just the current transform. - * - * @return If the cameraSim was valid and transforms were reset - */ - public boolean resetCameraTransforms(PhotonCameraSim cameraSim) { - double now = Timer.getFPGATimestamp(); - var trfBuffer = camTrfMap.get(cameraSim); - if (trfBuffer == null) return false; - var lastTrf = new Transform3d(new Pose3d(), trfBuffer.getSample(now).orElse(new Pose3d())); - trfBuffer.clear(); - adjustCamera(cameraSim, lastTrf); - return true; - } - - public Set getVisionTargets() { - var all = new HashSet(); - for (var entry : targetSets.entrySet()) { - all.addAll(entry.getValue()); - } - return all; - } - - public Set getVisionTargets(String type) { - return targetSets.get(type); - } - - /** - * Adds targets on the field which your vision system is designed to detect. The {@link - * PhotonCamera}s simulated from this system will report the location of the camera relative to - * the subset of these targets which are visible from the given camera position. - * - *

By default these are added under the type "targets". - * - * @param targets Targets to add to the simulated field - */ - public void addVisionTargets(VisionTargetSim... targets) { - addVisionTargets("targets", targets); + } + + public void clearVisionTargets() { + targetSets.clear(); + } + + public void clearAprilTags() { + removeVisionTargets("apriltag"); + } + + public Set removeVisionTargets(String type) { + return targetSets.remove(type); + } + + public Set removeVisionTargets(VisionTargetSim... targets) { + var removeList = List.of(targets); + var removedSet = new HashSet(); + for (var entry : targetSets.entrySet()) { + entry + .getValue() + .removeIf( + t -> { + if (removeList.contains(t)) { + removedSet.add(t); + return true; + } else return false; + }); } - - /** - * Adds targets on the field which your vision system is designed to detect. The {@link - * PhotonCamera}s simulated from this system will report the location of the camera relative to - * the subset of these targets which are visible from the given camera position. - * - *

The AprilTags from this layout will be added as vision targets under the type "apriltag". - * The poses added preserve the tag layout's current alliance origin. If the tag layout's alliance - * origin is changed, these added tags will have to be cleared and re-added. - * - * @param tagLayout The field tag layout to get Apriltag poses and IDs from - */ - public void addAprilTags(AprilTagFieldLayout tagLayout) { - for (AprilTag tag : tagLayout.getTags()) { - addVisionTargets( - "apriltag", - new VisionTargetSim( - tagLayout.getTagPose(tag.ID).get(), // preserve alliance rotation - TargetModel.kTag16h5, - tag.ID)); - } - } - - /** - * Adds targets on the field which your vision system is designed to detect. The {@link - * PhotonCamera}s simulated from this system will report the location of the camera relative to - * the subset of these targets which are visible from the given camera position. - * - * @param type Type of target (e.g. "cargo"). - * @param targets Targets to add to the simulated field - */ - public void addVisionTargets(String type, VisionTargetSim... targets) { - targetSets.computeIfAbsent(type, k -> new HashSet<>()); - for (var tgt : targets) { - targetSets.get(type).add(tgt); - } - } - - public void clearVisionTargets() { - targetSets.clear(); - } - - public void clearAprilTags() { - removeVisionTargets("apriltag"); - } - - public Set removeVisionTargets(String type) { - return targetSets.remove(type); - } - - public Set removeVisionTargets(VisionTargetSim... targets) { - var removeList = List.of(targets); - var removedSet = new HashSet(); - for (var entry : targetSets.entrySet()) { - entry - .getValue() - .removeIf( - t -> { - if (removeList.contains(t)) { - removedSet.add(t); - return true; - } else return false; - }); - } - return removedSet; - } - - /** Get the latest robot pose in meters saved by the vision system. */ - public Pose3d getRobotPose() { - return getRobotPose(Timer.getFPGATimestamp()); - } - - /** - * Get the robot pose in meters saved by the vision system at this timestamp. - * - * @param timestamp Timestamp of the desired robot pose - */ - public Pose3d getRobotPose(double timestamp) { - return robotPoseBuffer.getSample(timestamp).orElse(new Pose3d()); - } - - /** Clears all previous robot poses and sets robotPose at current time. */ - public void resetRobotPose(Pose2d robotPose) { - resetRobotPose(new Pose3d(robotPose)); - } - - /** Clears all previous robot poses and sets robotPose at current time. */ - public void resetRobotPose(Pose3d robotPose) { - robotPoseBuffer.clear(); - robotPoseBuffer.addSample(Timer.getFPGATimestamp(), robotPose); - } - - public Field2d getDebugField() { - return dbgField; - } - - /** - * Periodic update. Ensure this is called repeatedly-- camera performance is used to automatically - * determine if a new frame should be submitted. - * - * @param robotPoseMeters The simulated robot pose in meters - */ - public void update(Pose2d robotPoseMeters) { - update(new Pose3d(robotPoseMeters)); - } - - /** - * Periodic update. Ensure this is called repeatedly-- camera performance is used to automatically - * determine if a new frame should be submitted. - * - * @param robotPoseMeters The simulated robot pose in meters - */ - public void update(Pose3d robotPoseMeters) { - var targetTypes = targetSets.entrySet(); - // update vision targets on field - targetTypes.forEach( - entry -> - dbgField - .getObject(entry.getKey()) - .setPoses( - entry.getValue().stream() - .map(t -> t.getPose().toPose2d()) - .collect(Collectors.toList()))); - - if (robotPoseMeters == null) return; - - // save "real" robot poses over time - double now = Timer.getFPGATimestamp(); - robotPoseBuffer.addSample(now, robotPoseMeters); - dbgField.setRobotPose(robotPoseMeters.toPose2d()); - - var allTargets = new ArrayList(); - targetTypes.forEach((entry) -> allTargets.addAll(entry.getValue())); - var visTgtPoses2d = new ArrayList(); - var cameraPoses2d = new ArrayList(); - boolean processed = false; - // process each camera - for (var camSim : camSimMap.values()) { - // check if this camera is ready to process and get latency - var optTimestamp = camSim.consumeNextEntryTime(); - if (optTimestamp.isEmpty()) continue; - else processed = true; - // when this result "was" read by NT - long timestampNT = optTimestamp.get(); - // this result's processing latency in milliseconds - double latencyMillis = camSim.prop.estLatencyMs(); - // the image capture timestamp in seconds of this result - double timestampCapture = timestampNT / 1e6 - latencyMillis / 1e3; - - // use camera pose from the image capture timestamp - Pose3d lateRobotPose = getRobotPose(timestampCapture); - Pose3d lateCameraPose = lateRobotPose.plus(getRobotToCamera(camSim, timestampCapture).get()); - cameraPoses2d.add(lateCameraPose.toPose2d()); - - // process a PhotonPipelineResult with visible targets - var camResult = camSim.process(latencyMillis, lateCameraPose, allTargets); - // publish this info to NT at estimated timestamp of receive - camSim.submitProcessedFrame(camResult, timestampNT); - // display debug results - for (var target : camResult.getTargets()) { - var trf = target.getBestCameraToTarget(); - if (trf.equals(kEmptyTrf)) continue; - visTgtPoses2d.add(lateCameraPose.transformBy(trf).toPose2d()); - } - } - if (processed) dbgField.getObject("visibleTargetPoses").setPoses(visTgtPoses2d); - if (cameraPoses2d.size() != 0) dbgField.getObject("cameras").setPoses(cameraPoses2d); + return removedSet; + } + + /** Get the latest robot pose in meters saved by the vision system. */ + public Pose3d getRobotPose() { + return getRobotPose(Timer.getFPGATimestamp()); + } + + /** + * Get the robot pose in meters saved by the vision system at this timestamp. + * + * @param timestamp Timestamp of the desired robot pose + */ + public Pose3d getRobotPose(double timestamp) { + return robotPoseBuffer.getSample(timestamp).orElse(new Pose3d()); + } + + /** Clears all previous robot poses and sets robotPose at current time. */ + public void resetRobotPose(Pose2d robotPose) { + resetRobotPose(new Pose3d(robotPose)); + } + + /** Clears all previous robot poses and sets robotPose at current time. */ + public void resetRobotPose(Pose3d robotPose) { + robotPoseBuffer.clear(); + robotPoseBuffer.addSample(Timer.getFPGATimestamp(), robotPose); + } + + public Field2d getDebugField() { + return dbgField; + } + + /** + * Periodic update. Ensure this is called repeatedly-- camera performance is used to automatically + * determine if a new frame should be submitted. + * + * @param robotPoseMeters The simulated robot pose in meters + */ + public void update(Pose2d robotPoseMeters) { + update(new Pose3d(robotPoseMeters)); + } + + /** + * Periodic update. Ensure this is called repeatedly-- camera performance is used to automatically + * determine if a new frame should be submitted. + * + * @param robotPoseMeters The simulated robot pose in meters + */ + public void update(Pose3d robotPoseMeters) { + var targetTypes = targetSets.entrySet(); + // update vision targets on field + targetTypes.forEach( + entry -> + dbgField + .getObject(entry.getKey()) + .setPoses( + entry.getValue().stream() + .map(t -> t.getPose().toPose2d()) + .collect(Collectors.toList()))); + + if (robotPoseMeters == null) return; + + // save "real" robot poses over time + double now = Timer.getFPGATimestamp(); + robotPoseBuffer.addSample(now, robotPoseMeters); + dbgField.setRobotPose(robotPoseMeters.toPose2d()); + + var allTargets = new ArrayList(); + targetTypes.forEach((entry) -> allTargets.addAll(entry.getValue())); + var visTgtPoses2d = new ArrayList(); + var cameraPoses2d = new ArrayList(); + boolean processed = false; + // process each camera + for (var camSim : camSimMap.values()) { + // check if this camera is ready to process and get latency + var optTimestamp = camSim.consumeNextEntryTime(); + if (optTimestamp.isEmpty()) continue; + else processed = true; + // when this result "was" read by NT + long timestampNT = optTimestamp.get(); + // this result's processing latency in milliseconds + double latencyMillis = camSim.prop.estLatencyMs(); + // the image capture timestamp in seconds of this result + double timestampCapture = timestampNT / 1e6 - latencyMillis / 1e3; + + // use camera pose from the image capture timestamp + Pose3d lateRobotPose = getRobotPose(timestampCapture); + Pose3d lateCameraPose = lateRobotPose.plus(getRobotToCamera(camSim, timestampCapture).get()); + cameraPoses2d.add(lateCameraPose.toPose2d()); + + // process a PhotonPipelineResult with visible targets + var camResult = camSim.process(latencyMillis, lateCameraPose, allTargets); + // publish this info to NT at estimated timestamp of receive + camSim.submitProcessedFrame(camResult, timestampNT); + // display debug results + for (var target : camResult.getTargets()) { + var trf = target.getBestCameraToTarget(); + if (trf.equals(kEmptyTrf)) continue; + visTgtPoses2d.add(lateCameraPose.transformBy(trf).toPose2d()); + } } + if (processed) dbgField.getObject("visibleTargetPoses").setPoses(visTgtPoses2d); + if (cameraPoses2d.size() != 0) dbgField.getObject("cameras").setPoses(cameraPoses2d); + } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VisionTargetSim.java b/photon-lib/src/main/java/org/photonvision/simulation/VisionTargetSim.java index 88d19756c3..2ca98ccbd1 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VisionTargetSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VisionTargetSim.java @@ -31,64 +31,64 @@ /** Describes a vision target located somewhere on the field that your vision system can detect. */ public class VisionTargetSim { - private Pose3d pose; - private TargetModel model; + private Pose3d pose; + private TargetModel model; - public final int fiducialID; + public final int fiducialID; - /** - * Describes a vision target located somewhere on the field that your vision system can detect. - * - * @param pose Pose3d of the tag in field-relative coordinates - * @param model TargetModel which describes the shape of the target - */ - public VisionTargetSim(Pose3d pose, TargetModel model) { - this.pose = pose; - this.model = model; - this.fiducialID = -1; - } + /** + * Describes a vision target located somewhere on the field that your vision system can detect. + * + * @param pose Pose3d of the tag in field-relative coordinates + * @param model TargetModel which describes the shape of the target + */ + public VisionTargetSim(Pose3d pose, TargetModel model) { + this.pose = pose; + this.model = model; + this.fiducialID = -1; + } - /** - * Describes a fiducial tag located somewhere on the field that your vision system can detect. - * - * @param pose Pose3d of the tag in field-relative coordinates - * @param model TargetModel which describes the shape of the target(tag) - * @param id The ID of this fiducial tag - */ - public VisionTargetSim(Pose3d pose, TargetModel model, int id) { - this.pose = pose; - this.model = model; - this.fiducialID = id; - } + /** + * Describes a fiducial tag located somewhere on the field that your vision system can detect. + * + * @param pose Pose3d of the tag in field-relative coordinates + * @param model TargetModel which describes the shape of the target(tag) + * @param id The ID of this fiducial tag + */ + public VisionTargetSim(Pose3d pose, TargetModel model, int id) { + this.pose = pose; + this.model = model; + this.fiducialID = id; + } - public void setPose(Pose3d pose) { - this.pose = pose; - } + public void setPose(Pose3d pose) { + this.pose = pose; + } - public void setModel(TargetModel model) { - this.model = model; - } + public void setModel(TargetModel model) { + this.model = model; + } - public Pose3d getPose() { - return pose; - } + public Pose3d getPose() { + return pose; + } - public TargetModel getModel() { - return model; - } + public TargetModel getModel() { + return model; + } - /** This target's vertices offset from its field pose. */ - public List getFieldVertices() { - return model.getFieldVertices(pose); - } + /** This target's vertices offset from its field pose. */ + public List getFieldVertices() { + return model.getFieldVertices(pose); + } - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj instanceof VisionTargetSim) { - var o = (VisionTargetSim) obj; - return pose.equals(o.pose) && model.equals(o.model); - } - return false; + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj instanceof VisionTargetSim) { + var o = (VisionTargetSim) obj; + return pose.equals(o.pose) && model.equals(o.model); } + return false; + } } diff --git a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java index e483833111..98be84b677 100644 --- a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java +++ b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java @@ -55,200 +55,200 @@ import org.photonvision.simulation.VisionTargetSim; public class OpenCVTest { - private static final double kTrlDelta = 0.005; - private static final double kRotDeltaDeg = 0.25; - - public static void assertSame(Translation3d trl1, Translation3d trl2) { - assertEquals(0, trl1.getX() - trl2.getX(), kTrlDelta, "Trl X Diff"); - assertEquals(0, trl1.getY() - trl2.getY(), kTrlDelta, "Trl Y Diff"); - assertEquals(0, trl1.getZ() - trl2.getZ(), kTrlDelta, "Trl Z Diff"); - } - - public static void assertSame(Rotation3d rot1, Rotation3d rot2) { - assertEquals(0, MathUtil.angleModulus(rot1.getX() - rot2.getX()), kRotDeltaDeg, "Rot X Diff"); - assertEquals(0, MathUtil.angleModulus(rot1.getY() - rot2.getY()), kRotDeltaDeg, "Rot Y Diff"); - assertEquals(0, MathUtil.angleModulus(rot1.getZ() - rot2.getZ()), kRotDeltaDeg, "Rot Z Diff"); - assertEquals( - 0, MathUtil.angleModulus(rot1.getAngle() - rot2.getAngle()), kRotDeltaDeg, "Rot W Diff"); - } - - public static void assertSame(Pose3d pose1, Pose3d pose2) { - assertSame(pose1.getTranslation(), pose2.getTranslation()); - assertSame(pose1.getRotation(), pose2.getRotation()); - } - - public static void assertSame(Transform3d trf1, Transform3d trf2) { - assertSame(trf1.getTranslation(), trf2.getTranslation()); - assertSame(trf1.getRotation(), trf2.getRotation()); - } - - private static final SimCameraProperties prop = new SimCameraProperties(); - - @BeforeAll - public static void setUp() { - JNIWrapper.Helper.setExtractOnStaticLoad(false); - WPIUtilJNI.Helper.setExtractOnStaticLoad(false); - NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); - WPINetJNI.Helper.setExtractOnStaticLoad(false); - CameraServerJNI.Helper.setExtractOnStaticLoad(false); - CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); - AprilTagJNI.Helper.setExtractOnStaticLoad(false); - - try { - CombinedRuntimeLoader.loadLibraries( - VisionSystemSim.class, - "wpiutiljni", - "ntcorejni", - "wpinetjni", - "wpiHaljni", - "cscorejni", - "cscorejnicvstatic"); - - var loader = - new RuntimeLoader<>( - Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); - loader.loadLibrary(); - } catch (Exception e) { - e.printStackTrace(); - } - - // NT live for debug purposes - NetworkTableInstance.getDefault().startServer(); - - // No version check for testing - PhotonCamera.setVersionCheckEnabled(false); - } - - @Test - public void testTrlConvert() { - var trl = new Translation3d(0.75, -0.4, 0.1); - var tvec = OpenCVHelp.translationToTvec(trl); - var result = OpenCVHelp.tvecToTranslation(tvec); - tvec.release(); - assertSame(trl, result); - } - - @Test - public void testRotConvert() { - var rot = new Rotation3d(0.5, 1, -1); - var rvec = OpenCVHelp.rotationToRvec(rot); - var result = OpenCVHelp.rvecToRotation(rvec); - rvec.release(); - var diff = result.minus(rot); - assertSame(new Rotation3d(), diff); - } - - @Test - public void testProjection() { - var target = - new VisionTargetSim( - new Pose3d(1, 0, 0, new Rotation3d(0, 0, Math.PI)), TargetModel.kTag16h5, 0); - var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); - var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); - var imagePoints = - OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); - - // find circulation (counter/clockwise-ness) - double circulation = 0; - for (int i = 0; i < imagePoints.length; i++) { - double xDiff = imagePoints[(i + 1) % 4].x - imagePoints[i].x; - double ySum = imagePoints[(i + 1) % 4].y + imagePoints[i].y; - circulation += xDiff * ySum; - } - assertTrue(circulation > 0, "2d fiducial points aren't counter-clockwise"); - - // undo projection distortion - imagePoints = - OpenCVHelp.undistortPoints(prop.getIntrinsics(), prop.getDistCoeffs(), imagePoints); - - // test projection results after moving camera - var avgCenterRot1 = prop.getPixelRot(OpenCVHelp.avgPoint(imagePoints)); - cameraPose = - cameraPose.plus(new Transform3d(new Translation3d(), new Rotation3d(0, 0.25, 0.25))); - camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); - imagePoints = - OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); - var avgCenterRot2 = prop.getPixelRot(OpenCVHelp.avgPoint(imagePoints)); - - var yaw2d = new Rotation2d(avgCenterRot2.getZ()); - var pitch2d = new Rotation2d(avgCenterRot2.getY()); - var yawDiff = yaw2d.minus(new Rotation2d(avgCenterRot1.getZ())); - var pitchDiff = pitch2d.minus(new Rotation2d(avgCenterRot1.getY())); - assertTrue(yawDiff.getRadians() < 0, "2d points don't follow yaw"); - assertTrue(pitchDiff.getRadians() < 0, "2d points don't follow pitch"); - - var actualRelation = new CameraTargetRelation(cameraPose, target.getPose()); - assertEquals( - actualRelation.camToTargPitch.getDegrees(), - pitchDiff.getDegrees() - * Math.cos(yaw2d.getRadians()), // adjust for unaccounted perpsective distortion - kRotDeltaDeg, - "2d pitch doesn't match 3d"); - assertEquals( - actualRelation.camToTargYaw.getDegrees(), - yawDiff.getDegrees(), - kRotDeltaDeg, - "2d yaw doesn't match 3d"); - } - - @Test - public void testSolvePNP_SQUARE() { - // square AprilTag target - var target = - new VisionTargetSim( - new Pose3d(5, 0.5, 1, new Rotation3d(0, 0, Math.PI)), TargetModel.kTag16h5, 0); - var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); - var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); - // target relative to camera - var relTarget = camRt.apply(target.getPose()); - - // simulate solvePNP estimation - var targetCorners = - OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); - var pnpSim = - OpenCVHelp.solvePNP_SQUARE( - prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); - - // check solvePNP estimation accuracy - assertSame(relTarget.getRotation(), pnpSim.best.getRotation()); - assertSame(relTarget.getTranslation(), pnpSim.best.getTranslation()); + private static final double kTrlDelta = 0.005; + private static final double kRotDeltaDeg = 0.25; + + public static void assertSame(Translation3d trl1, Translation3d trl2) { + assertEquals(0, trl1.getX() - trl2.getX(), kTrlDelta, "Trl X Diff"); + assertEquals(0, trl1.getY() - trl2.getY(), kTrlDelta, "Trl Y Diff"); + assertEquals(0, trl1.getZ() - trl2.getZ(), kTrlDelta, "Trl Z Diff"); + } + + public static void assertSame(Rotation3d rot1, Rotation3d rot2) { + assertEquals(0, MathUtil.angleModulus(rot1.getX() - rot2.getX()), kRotDeltaDeg, "Rot X Diff"); + assertEquals(0, MathUtil.angleModulus(rot1.getY() - rot2.getY()), kRotDeltaDeg, "Rot Y Diff"); + assertEquals(0, MathUtil.angleModulus(rot1.getZ() - rot2.getZ()), kRotDeltaDeg, "Rot Z Diff"); + assertEquals( + 0, MathUtil.angleModulus(rot1.getAngle() - rot2.getAngle()), kRotDeltaDeg, "Rot W Diff"); + } + + public static void assertSame(Pose3d pose1, Pose3d pose2) { + assertSame(pose1.getTranslation(), pose2.getTranslation()); + assertSame(pose1.getRotation(), pose2.getRotation()); + } + + public static void assertSame(Transform3d trf1, Transform3d trf2) { + assertSame(trf1.getTranslation(), trf2.getTranslation()); + assertSame(trf1.getRotation(), trf2.getRotation()); + } + + private static final SimCameraProperties prop = new SimCameraProperties(); + + @BeforeAll + public static void setUp() { + JNIWrapper.Helper.setExtractOnStaticLoad(false); + WPIUtilJNI.Helper.setExtractOnStaticLoad(false); + NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); + WPINetJNI.Helper.setExtractOnStaticLoad(false); + CameraServerJNI.Helper.setExtractOnStaticLoad(false); + CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); + AprilTagJNI.Helper.setExtractOnStaticLoad(false); + + try { + CombinedRuntimeLoader.loadLibraries( + VisionSystemSim.class, + "wpiutiljni", + "ntcorejni", + "wpinetjni", + "wpiHaljni", + "cscorejni", + "cscorejnicvstatic"); + + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + } catch (Exception e) { + e.printStackTrace(); } - @Test - public void testSolvePNP_SQPNP() { - // (for targets with arbitrary number of non-colinear points > 2) - var target = - new VisionTargetSim( - new Pose3d(5, 0.5, 1, new Rotation3d(0, 0, Math.PI)), - new TargetModel( - List.of( - new Translation3d(0, 0, 0), - new Translation3d(1, 0, 0), - new Translation3d(0, 1, 0), - new Translation3d(0, 0, 1), - new Translation3d(0.125, 0.25, 0.5), - new Translation3d(0, 0, -1), - new Translation3d(0, -1, 0), - new Translation3d(-1, 0, 0))), - 0); - var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); - var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); - // target relative to camera - var relTarget = camRt.apply(target.getPose()); - - // simulate solvePNP estimation - var targetCorners = - OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); - var pnpSim = - OpenCVHelp.solvePNP_SQPNP( - prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); - - // check solvePNP estimation accuracy - assertSame(relTarget.getRotation(), pnpSim.best.getRotation()); - assertSame(relTarget.getTranslation(), pnpSim.best.getTranslation()); + // NT live for debug purposes + NetworkTableInstance.getDefault().startServer(); + + // No version check for testing + PhotonCamera.setVersionCheckEnabled(false); + } + + @Test + public void testTrlConvert() { + var trl = new Translation3d(0.75, -0.4, 0.1); + var tvec = OpenCVHelp.translationToTvec(trl); + var result = OpenCVHelp.tvecToTranslation(tvec); + tvec.release(); + assertSame(trl, result); + } + + @Test + public void testRotConvert() { + var rot = new Rotation3d(0.5, 1, -1); + var rvec = OpenCVHelp.rotationToRvec(rot); + var result = OpenCVHelp.rvecToRotation(rvec); + rvec.release(); + var diff = result.minus(rot); + assertSame(new Rotation3d(), diff); + } + + @Test + public void testProjection() { + var target = + new VisionTargetSim( + new Pose3d(1, 0, 0, new Rotation3d(0, 0, Math.PI)), TargetModel.kTag16h5, 0); + var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); + var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); + var imagePoints = + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); + + // find circulation (counter/clockwise-ness) + double circulation = 0; + for (int i = 0; i < imagePoints.length; i++) { + double xDiff = imagePoints[(i + 1) % 4].x - imagePoints[i].x; + double ySum = imagePoints[(i + 1) % 4].y + imagePoints[i].y; + circulation += xDiff * ySum; } + assertTrue(circulation > 0, "2d fiducial points aren't counter-clockwise"); + + // undo projection distortion + imagePoints = + OpenCVHelp.undistortPoints(prop.getIntrinsics(), prop.getDistCoeffs(), imagePoints); + + // test projection results after moving camera + var avgCenterRot1 = prop.getPixelRot(OpenCVHelp.avgPoint(imagePoints)); + cameraPose = + cameraPose.plus(new Transform3d(new Translation3d(), new Rotation3d(0, 0.25, 0.25))); + camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); + imagePoints = + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); + var avgCenterRot2 = prop.getPixelRot(OpenCVHelp.avgPoint(imagePoints)); + + var yaw2d = new Rotation2d(avgCenterRot2.getZ()); + var pitch2d = new Rotation2d(avgCenterRot2.getY()); + var yawDiff = yaw2d.minus(new Rotation2d(avgCenterRot1.getZ())); + var pitchDiff = pitch2d.minus(new Rotation2d(avgCenterRot1.getY())); + assertTrue(yawDiff.getRadians() < 0, "2d points don't follow yaw"); + assertTrue(pitchDiff.getRadians() < 0, "2d points don't follow pitch"); + + var actualRelation = new CameraTargetRelation(cameraPose, target.getPose()); + assertEquals( + actualRelation.camToTargPitch.getDegrees(), + pitchDiff.getDegrees() + * Math.cos(yaw2d.getRadians()), // adjust for unaccounted perpsective distortion + kRotDeltaDeg, + "2d pitch doesn't match 3d"); + assertEquals( + actualRelation.camToTargYaw.getDegrees(), + yawDiff.getDegrees(), + kRotDeltaDeg, + "2d yaw doesn't match 3d"); + } + + @Test + public void testSolvePNP_SQUARE() { + // square AprilTag target + var target = + new VisionTargetSim( + new Pose3d(5, 0.5, 1, new Rotation3d(0, 0, Math.PI)), TargetModel.kTag16h5, 0); + var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); + var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); + // target relative to camera + var relTarget = camRt.apply(target.getPose()); + + // simulate solvePNP estimation + var targetCorners = + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); + var pnpSim = + OpenCVHelp.solvePNP_SQUARE( + prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); + + // check solvePNP estimation accuracy + assertSame(relTarget.getRotation(), pnpSim.best.getRotation()); + assertSame(relTarget.getTranslation(), pnpSim.best.getTranslation()); + } + + @Test + public void testSolvePNP_SQPNP() { + // (for targets with arbitrary number of non-colinear points > 2) + var target = + new VisionTargetSim( + new Pose3d(5, 0.5, 1, new Rotation3d(0, 0, Math.PI)), + new TargetModel( + List.of( + new Translation3d(0, 0, 0), + new Translation3d(1, 0, 0), + new Translation3d(0, 1, 0), + new Translation3d(0, 0, 1), + new Translation3d(0.125, 0.25, 0.5), + new Translation3d(0, 0, -1), + new Translation3d(0, -1, 0), + new Translation3d(-1, 0, 0))), + 0); + var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); + var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); + // target relative to camera + var relTarget = camRt.apply(target.getPose()); + + // simulate solvePNP estimation + var targetCorners = + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); + var pnpSim = + OpenCVHelp.solvePNP_SQPNP( + prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); + + // check solvePNP estimation accuracy + assertSame(relTarget.getRotation(), pnpSim.best.getRotation()); + assertSame(relTarget.getTranslation(), pnpSim.best.getTranslation()); + } } diff --git a/photon-lib/src/test/java/org/photonvision/PacketTest.java b/photon-lib/src/test/java/org/photonvision/PacketTest.java index c29a629b5b..de2a0fb46e 100644 --- a/photon-lib/src/test/java/org/photonvision/PacketTest.java +++ b/photon-lib/src/test/java/org/photonvision/PacketTest.java @@ -37,154 +37,154 @@ import org.photonvision.targeting.TargetCorner; class PacketTest { - @Test - void testSimpleTrackedTarget() { - var target = - new PhotonTrackedTarget( - 3.0, - 4.0, - 9.0, - -5.0, - -1, - new Transform3d(new Translation3d(), new Rotation3d()), - new Transform3d(new Translation3d(), new Rotation3d()), - 0.25, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))); - var p = new Packet(PhotonTrackedTarget.PACK_SIZE_BYTES); - target.populatePacket(p); + @Test + void testSimpleTrackedTarget() { + var target = + new PhotonTrackedTarget( + 3.0, + 4.0, + 9.0, + -5.0, + -1, + new Transform3d(new Translation3d(), new Rotation3d()), + new Transform3d(new Translation3d(), new Rotation3d()), + 0.25, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))); + var p = new Packet(PhotonTrackedTarget.PACK_SIZE_BYTES); + target.populatePacket(p); - var b = new PhotonTrackedTarget(); - b.createFromPacket(p); + var b = new PhotonTrackedTarget(); + b.createFromPacket(p); - Assertions.assertEquals(target, b); - } + Assertions.assertEquals(target, b); + } - @Test - void testSimplePipelineResult() { - var result = new PhotonPipelineResult(1, new ArrayList<>()); - var p = new Packet(result.getPacketSize()); - result.populatePacket(p); + @Test + void testSimplePipelineResult() { + var result = new PhotonPipelineResult(1, new ArrayList<>()); + var p = new Packet(result.getPacketSize()); + result.populatePacket(p); - var b = new PhotonPipelineResult(); - b.createFromPacket(p); + var b = new PhotonPipelineResult(); + b.createFromPacket(p); - Assertions.assertEquals(result, b); + Assertions.assertEquals(result, b); - var result2 = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 2, - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - 0.25, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 3, - new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), - new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), - 0.25, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); - var p2 = new Packet(result2.getPacketSize()); - result2.populatePacket(p2); + var result2 = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 2, + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + 0.25, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 3, + new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), + new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), + 0.25, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); + var p2 = new Packet(result2.getPacketSize()); + result2.populatePacket(p2); - var b2 = new PhotonPipelineResult(); - b2.createFromPacket(p2); + var b2 = new PhotonPipelineResult(); + b2.createFromPacket(p2); - Assertions.assertEquals(result2, b2); - } + Assertions.assertEquals(result2, b2); + } - @Test - public void testMultiTargetSerde() { - var result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 2, - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - 0.25, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 3, - new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), - new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), - 0.25, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)))), - new MultiTargetPNPResults( - new PNPResults( - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), 0.1), - List.of(1, 2, 3))); + @Test + public void testMultiTargetSerde() { + var result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 2, + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + 0.25, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 3, + new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), + new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), + 0.25, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)))), + new MultiTargetPNPResults( + new PNPResults( + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), 0.1), + List.of(1, 2, 3))); - Packet packet = new Packet(result.getPacketSize()); - result.populatePacket(packet); + Packet packet = new Packet(result.getPacketSize()); + result.populatePacket(packet); - var result_deserialized = new PhotonPipelineResult(); - result_deserialized.createFromPacket(packet); + var result_deserialized = new PhotonPipelineResult(); + result_deserialized.createFromPacket(packet); - Assertions.assertEquals(result, result_deserialized); - } + Assertions.assertEquals(result, result_deserialized); + } } diff --git a/photon-lib/src/test/java/org/photonvision/PhotonCameraTest.java b/photon-lib/src/test/java/org/photonvision/PhotonCameraTest.java index 8a3fb5fad8..52533b6c74 100644 --- a/photon-lib/src/test/java/org/photonvision/PhotonCameraTest.java +++ b/photon-lib/src/test/java/org/photonvision/PhotonCameraTest.java @@ -30,17 +30,17 @@ import org.photonvision.targeting.PhotonPipelineResult; class PhotonCameraTest { - @Test - public void testEmpty() { - Assertions.assertDoesNotThrow( - () -> { - var packet = new Packet(1); - var ret = new PhotonPipelineResult(); - packet.setData(new byte[0]); - if (packet.getSize() < 1) { - return; - } - ret.createFromPacket(packet); - }); - } + @Test + public void testEmpty() { + Assertions.assertDoesNotThrow( + () -> { + var packet = new Packet(1); + var ret = new PhotonPipelineResult(); + packet.setData(new byte[0]); + if (packet.getSize() < 1) { + return; + } + ret.createFromPacket(packet); + }); + } } diff --git a/photon-lib/src/test/java/org/photonvision/PhotonPoseEstimatorTest.java b/photon-lib/src/test/java/org/photonvision/PhotonPoseEstimatorTest.java index 1b386cc747..0f3ab1fc80 100644 --- a/photon-lib/src/test/java/org/photonvision/PhotonPoseEstimatorTest.java +++ b/photon-lib/src/test/java/org/photonvision/PhotonPoseEstimatorTest.java @@ -52,583 +52,583 @@ import org.photonvision.targeting.TargetCorner; class PhotonPoseEstimatorTest { - static AprilTagFieldLayout aprilTags; - - @BeforeAll - public static void init() { - JNIWrapper.Helper.setExtractOnStaticLoad(false); - WPIUtilJNI.Helper.setExtractOnStaticLoad(false); - NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); - WPINetJNI.Helper.setExtractOnStaticLoad(false); - - try { - CombinedRuntimeLoader.loadLibraries( - PhotonPoseEstimatorTest.class, "wpiutiljni", "ntcorejni", "wpinetjni", "wpiHaljni"); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - List tagList = new ArrayList(2); - tagList.add(new AprilTag(0, new Pose3d(3, 3, 3, new Rotation3d()))); - tagList.add(new AprilTag(1, new Pose3d(5, 5, 5, new Rotation3d()))); - double fieldLength = Units.feetToMeters(54.0); - double fieldWidth = Units.feetToMeters(27.0); - aprilTags = new AprilTagFieldLayout(tagList, fieldLength, fieldWidth); + static AprilTagFieldLayout aprilTags; + + @BeforeAll + public static void init() { + JNIWrapper.Helper.setExtractOnStaticLoad(false); + WPIUtilJNI.Helper.setExtractOnStaticLoad(false); + NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); + WPINetJNI.Helper.setExtractOnStaticLoad(false); + + try { + CombinedRuntimeLoader.loadLibraries( + PhotonPoseEstimatorTest.class, "wpiutiljni", "ntcorejni", "wpinetjni", "wpiHaljni"); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); } - @Test - void testLowestAmbiguityStrategy() { - PhotonCameraInjector cameraOne = new PhotonCameraInjector(); - cameraOne.result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 0, - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - 0.7, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 1, - new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(0, 0, 0)), - new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), - 0.3, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 9.0, - -2.0, - 19.0, - 3.0, - 0, - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - 0.4, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); - cameraOne.result.setTimestampSeconds(11); - - PhotonPoseEstimator estimator = - new PhotonPoseEstimator( - aprilTags, PoseStrategy.LOWEST_AMBIGUITY, cameraOne, new Transform3d()); - - Optional estimatedPose = estimator.update(); - Pose3d pose = estimatedPose.get().estimatedPose; - - assertEquals(11, estimatedPose.get().timestampSeconds); - assertEquals(1, pose.getX(), .01); - assertEquals(3, pose.getY(), .01); - assertEquals(2, pose.getZ(), .01); + List tagList = new ArrayList(2); + tagList.add(new AprilTag(0, new Pose3d(3, 3, 3, new Rotation3d()))); + tagList.add(new AprilTag(1, new Pose3d(5, 5, 5, new Rotation3d()))); + double fieldLength = Units.feetToMeters(54.0); + double fieldWidth = Units.feetToMeters(27.0); + aprilTags = new AprilTagFieldLayout(tagList, fieldLength, fieldWidth); + } + + @Test + void testLowestAmbiguityStrategy() { + PhotonCameraInjector cameraOne = new PhotonCameraInjector(); + cameraOne.result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 0, + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + 0.7, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 1, + new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(0, 0, 0)), + new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), + 0.3, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 9.0, + -2.0, + 19.0, + 3.0, + 0, + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + 0.4, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); + cameraOne.result.setTimestampSeconds(11); + + PhotonPoseEstimator estimator = + new PhotonPoseEstimator( + aprilTags, PoseStrategy.LOWEST_AMBIGUITY, cameraOne, new Transform3d()); + + Optional estimatedPose = estimator.update(); + Pose3d pose = estimatedPose.get().estimatedPose; + + assertEquals(11, estimatedPose.get().timestampSeconds); + assertEquals(1, pose.getX(), .01); + assertEquals(3, pose.getY(), .01); + assertEquals(2, pose.getZ(), .01); + } + + @Test + void testClosestToCameraHeightStrategy() { + PhotonCameraInjector cameraOne = new PhotonCameraInjector(); + cameraOne.result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 1, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), + new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), + 0.7, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 1, + new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), + new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), + 0.3, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 9.0, + -2.0, + 19.0, + 3.0, + 0, + new Transform3d(new Translation3d(4, 4, 4), new Rotation3d()), + new Transform3d(new Translation3d(5, 5, 5), new Rotation3d()), + 0.4, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); + + cameraOne.result.setTimestampSeconds(4); + + PhotonPoseEstimator estimator = + new PhotonPoseEstimator( + aprilTags, + PoseStrategy.CLOSEST_TO_CAMERA_HEIGHT, + cameraOne, + new Transform3d(new Translation3d(0, 0, 4), new Rotation3d())); + + Optional estimatedPose = estimator.update(); + Pose3d pose = estimatedPose.get().estimatedPose; + + assertEquals(4, estimatedPose.get().timestampSeconds); + assertEquals(4, pose.getX(), .01); + assertEquals(4, pose.getY(), .01); + assertEquals(0, pose.getZ(), .01); + } + + @Test + void closestToReferencePoseStrategy() { + PhotonCameraInjector cameraOne = new PhotonCameraInjector(); + cameraOne.result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 1, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), + new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), + 0.7, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 1, + new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), + new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), + 0.3, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 9.0, + -2.0, + 19.0, + 3.0, + 0, + new Transform3d(new Translation3d(2.2, 2.2, 2.2), new Rotation3d()), + new Transform3d(new Translation3d(2, 1.9, 2.1), new Rotation3d()), + 0.4, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); + cameraOne.result.setTimestampSeconds(17); + + PhotonPoseEstimator estimator = + new PhotonPoseEstimator( + aprilTags, + PoseStrategy.CLOSEST_TO_REFERENCE_POSE, + cameraOne, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); + estimator.setReferencePose(new Pose3d(1, 1, 1, new Rotation3d())); + + Optional estimatedPose = estimator.update(); + Pose3d pose = estimatedPose.get().estimatedPose; + + assertEquals(17, estimatedPose.get().timestampSeconds); + assertEquals(1, pose.getX(), .01); + assertEquals(1.1, pose.getY(), .01); + assertEquals(.9, pose.getZ(), .01); + } + + @Test + void closestToLastPose() { + PhotonCameraInjector cameraOne = new PhotonCameraInjector(); + cameraOne.result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 1, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), + new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), + 0.7, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 1, + new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), + new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), + 0.3, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 9.0, + -2.0, + 19.0, + 3.0, + 0, + new Transform3d(new Translation3d(2.2, 2.2, 2.2), new Rotation3d()), + new Transform3d(new Translation3d(2, 1.9, 2.1), new Rotation3d()), + 0.4, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); + cameraOne.result.setTimestampSeconds(1); + + PhotonPoseEstimator estimator = + new PhotonPoseEstimator( + aprilTags, + PoseStrategy.CLOSEST_TO_LAST_POSE, + cameraOne, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); + + estimator.setLastPose(new Pose3d(1, 1, 1, new Rotation3d())); + + Optional estimatedPose = estimator.update(); + Pose3d pose = estimatedPose.get().estimatedPose; + + cameraOne.result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 1, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), + new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), + 0.7, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 0, + new Transform3d(new Translation3d(2.1, 1.9, 2), new Rotation3d()), + new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), + 0.3, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 9.0, + -2.0, + 19.0, + 3.0, + 0, + new Transform3d(new Translation3d(2.4, 2.4, 2.2), new Rotation3d()), + new Transform3d(new Translation3d(2, 1, 2), new Rotation3d()), + 0.4, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); + cameraOne.result.setTimestampSeconds(7); + + estimatedPose = estimator.update(); + pose = estimatedPose.get().estimatedPose; + + assertEquals(7, estimatedPose.get().timestampSeconds); + assertEquals(.9, pose.getX(), .01); + assertEquals(1.1, pose.getY(), .01); + assertEquals(1, pose.getZ(), .01); + } + + @Test + void cacheIsInvalidated() { + PhotonCameraInjector cameraOne = new PhotonCameraInjector(); + var result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 0, + new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), + new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), + 0.7, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); + result.setTimestampSeconds(20); + + PhotonPoseEstimator estimator = + new PhotonPoseEstimator( + aprilTags, + PoseStrategy.AVERAGE_BEST_TARGETS, + cameraOne, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); + + // Empty result, expect empty result + cameraOne.result = new PhotonPipelineResult(); + cameraOne.result.setTimestampSeconds(1); + Optional estimatedPose = estimator.update(); + assertFalse(estimatedPose.isPresent()); + + // Set actual result + cameraOne.result = result; + estimatedPose = estimator.update(); + assertTrue(estimatedPose.isPresent()); + assertEquals(20, estimatedPose.get().timestampSeconds, .01); + assertEquals(20, estimator.poseCacheTimestampSeconds); + + // And again -- pose cache should mean this is empty + cameraOne.result = result; + estimatedPose = estimator.update(); + assertFalse(estimatedPose.isPresent()); + // Expect the old timestamp to still be here + assertEquals(20, estimator.poseCacheTimestampSeconds); + + // Set new field layout -- right after, the pose cache timestamp should be -1 + estimator.setFieldTags(new AprilTagFieldLayout(List.of(new AprilTag(0, new Pose3d())), 0, 0)); + assertEquals(-1, estimator.poseCacheTimestampSeconds); + // Update should cache the current timestamp (20) again + cameraOne.result = result; + estimatedPose = estimator.update(); + assertEquals(20, estimatedPose.get().timestampSeconds, .01); + assertEquals(20, estimator.poseCacheTimestampSeconds); + } + + @Test + void averageBestPoses() { + PhotonCameraInjector cameraOne = new PhotonCameraInjector(); + cameraOne.result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 0, + new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), + new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), + 0.7, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), // 1 1 1 ambig: .7 + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 1, + new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), + new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), + 0.3, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), // 2 2 2 ambig .3 + new PhotonTrackedTarget( + 9.0, + -2.0, + 19.0, + 3.0, + 0, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), + new Transform3d(new Translation3d(2, 1.9, 2.1), new Rotation3d()), + 0.4, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); // 3 3 3 ambig .4 + cameraOne.result.setTimestampSeconds(20); + + PhotonPoseEstimator estimator = + new PhotonPoseEstimator( + aprilTags, + PoseStrategy.AVERAGE_BEST_TARGETS, + cameraOne, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); + + Optional estimatedPose = estimator.update(); + Pose3d pose = estimatedPose.get().estimatedPose; + + assertEquals(20, estimatedPose.get().timestampSeconds, .01); + assertEquals(2.15, pose.getX(), .01); + assertEquals(2.15, pose.getY(), .01); + assertEquals(2.15, pose.getZ(), .01); + } + + private class PhotonCameraInjector extends PhotonCamera { + public PhotonCameraInjector() { + super("Test"); } - @Test - void testClosestToCameraHeightStrategy() { - PhotonCameraInjector cameraOne = new PhotonCameraInjector(); - cameraOne.result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 1, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), - new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), - 0.7, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 1, - new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), - new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), - 0.3, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 9.0, - -2.0, - 19.0, - 3.0, - 0, - new Transform3d(new Translation3d(4, 4, 4), new Rotation3d()), - new Transform3d(new Translation3d(5, 5, 5), new Rotation3d()), - 0.4, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); - - cameraOne.result.setTimestampSeconds(4); - - PhotonPoseEstimator estimator = - new PhotonPoseEstimator( - aprilTags, - PoseStrategy.CLOSEST_TO_CAMERA_HEIGHT, - cameraOne, - new Transform3d(new Translation3d(0, 0, 4), new Rotation3d())); - - Optional estimatedPose = estimator.update(); - Pose3d pose = estimatedPose.get().estimatedPose; - - assertEquals(4, estimatedPose.get().timestampSeconds); - assertEquals(4, pose.getX(), .01); - assertEquals(4, pose.getY(), .01); - assertEquals(0, pose.getZ(), .01); - } - - @Test - void closestToReferencePoseStrategy() { - PhotonCameraInjector cameraOne = new PhotonCameraInjector(); - cameraOne.result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 1, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), - new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), - 0.7, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 1, - new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), - new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), - 0.3, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 9.0, - -2.0, - 19.0, - 3.0, - 0, - new Transform3d(new Translation3d(2.2, 2.2, 2.2), new Rotation3d()), - new Transform3d(new Translation3d(2, 1.9, 2.1), new Rotation3d()), - 0.4, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); - cameraOne.result.setTimestampSeconds(17); - - PhotonPoseEstimator estimator = - new PhotonPoseEstimator( - aprilTags, - PoseStrategy.CLOSEST_TO_REFERENCE_POSE, - cameraOne, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); - estimator.setReferencePose(new Pose3d(1, 1, 1, new Rotation3d())); - - Optional estimatedPose = estimator.update(); - Pose3d pose = estimatedPose.get().estimatedPose; - - assertEquals(17, estimatedPose.get().timestampSeconds); - assertEquals(1, pose.getX(), .01); - assertEquals(1.1, pose.getY(), .01); - assertEquals(.9, pose.getZ(), .01); - } - - @Test - void closestToLastPose() { - PhotonCameraInjector cameraOne = new PhotonCameraInjector(); - cameraOne.result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 1, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), - new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), - 0.7, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 1, - new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), - new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), - 0.3, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 9.0, - -2.0, - 19.0, - 3.0, - 0, - new Transform3d(new Translation3d(2.2, 2.2, 2.2), new Rotation3d()), - new Transform3d(new Translation3d(2, 1.9, 2.1), new Rotation3d()), - 0.4, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); - cameraOne.result.setTimestampSeconds(1); - - PhotonPoseEstimator estimator = - new PhotonPoseEstimator( - aprilTags, - PoseStrategy.CLOSEST_TO_LAST_POSE, - cameraOne, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); - - estimator.setLastPose(new Pose3d(1, 1, 1, new Rotation3d())); - - Optional estimatedPose = estimator.update(); - Pose3d pose = estimatedPose.get().estimatedPose; - - cameraOne.result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 1, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), - new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), - 0.7, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 0, - new Transform3d(new Translation3d(2.1, 1.9, 2), new Rotation3d()), - new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), - 0.3, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 9.0, - -2.0, - 19.0, - 3.0, - 0, - new Transform3d(new Translation3d(2.4, 2.4, 2.2), new Rotation3d()), - new Transform3d(new Translation3d(2, 1, 2), new Rotation3d()), - 0.4, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); - cameraOne.result.setTimestampSeconds(7); - - estimatedPose = estimator.update(); - pose = estimatedPose.get().estimatedPose; - - assertEquals(7, estimatedPose.get().timestampSeconds); - assertEquals(.9, pose.getX(), .01); - assertEquals(1.1, pose.getY(), .01); - assertEquals(1, pose.getZ(), .01); - } - - @Test - void cacheIsInvalidated() { - PhotonCameraInjector cameraOne = new PhotonCameraInjector(); - var result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 0, - new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), - new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), - 0.7, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); - result.setTimestampSeconds(20); - - PhotonPoseEstimator estimator = - new PhotonPoseEstimator( - aprilTags, - PoseStrategy.AVERAGE_BEST_TARGETS, - cameraOne, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); - - // Empty result, expect empty result - cameraOne.result = new PhotonPipelineResult(); - cameraOne.result.setTimestampSeconds(1); - Optional estimatedPose = estimator.update(); - assertFalse(estimatedPose.isPresent()); - - // Set actual result - cameraOne.result = result; - estimatedPose = estimator.update(); - assertTrue(estimatedPose.isPresent()); - assertEquals(20, estimatedPose.get().timestampSeconds, .01); - assertEquals(20, estimator.poseCacheTimestampSeconds); - - // And again -- pose cache should mean this is empty - cameraOne.result = result; - estimatedPose = estimator.update(); - assertFalse(estimatedPose.isPresent()); - // Expect the old timestamp to still be here - assertEquals(20, estimator.poseCacheTimestampSeconds); - - // Set new field layout -- right after, the pose cache timestamp should be -1 - estimator.setFieldTags(new AprilTagFieldLayout(List.of(new AprilTag(0, new Pose3d())), 0, 0)); - assertEquals(-1, estimator.poseCacheTimestampSeconds); - // Update should cache the current timestamp (20) again - cameraOne.result = result; - estimatedPose = estimator.update(); - assertEquals(20, estimatedPose.get().timestampSeconds, .01); - assertEquals(20, estimator.poseCacheTimestampSeconds); - } - - @Test - void averageBestPoses() { - PhotonCameraInjector cameraOne = new PhotonCameraInjector(); - cameraOne.result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 0, - new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), - new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), - 0.7, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), // 1 1 1 ambig: .7 - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 1, - new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), - new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), - 0.3, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), // 2 2 2 ambig .3 - new PhotonTrackedTarget( - 9.0, - -2.0, - 19.0, - 3.0, - 0, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), - new Transform3d(new Translation3d(2, 1.9, 2.1), new Rotation3d()), - 0.4, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); // 3 3 3 ambig .4 - cameraOne.result.setTimestampSeconds(20); - - PhotonPoseEstimator estimator = - new PhotonPoseEstimator( - aprilTags, - PoseStrategy.AVERAGE_BEST_TARGETS, - cameraOne, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); - - Optional estimatedPose = estimator.update(); - Pose3d pose = estimatedPose.get().estimatedPose; - - assertEquals(20, estimatedPose.get().timestampSeconds, .01); - assertEquals(2.15, pose.getX(), .01); - assertEquals(2.15, pose.getY(), .01); - assertEquals(2.15, pose.getZ(), .01); - } - - private class PhotonCameraInjector extends PhotonCamera { - public PhotonCameraInjector() { - super("Test"); - } - - PhotonPipelineResult result; + PhotonPipelineResult result; - @Override - public PhotonPipelineResult getLatestResult() { - return result; - } + @Override + public PhotonPipelineResult getLatestResult() { + return result; } + } } diff --git a/photon-lib/src/test/java/org/photonvision/PhotonUtilTest.java b/photon-lib/src/test/java/org/photonvision/PhotonUtilTest.java index bbc70cf856..fa3ea2fa31 100644 --- a/photon-lib/src/test/java/org/photonvision/PhotonUtilTest.java +++ b/photon-lib/src/test/java/org/photonvision/PhotonUtilTest.java @@ -30,73 +30,73 @@ import org.junit.jupiter.api.Test; class PhotonUtilTest { - @Test - public void testDistance() { - var camHeight = 1; - var targetHeight = 3; - var camPitch = Units.degreesToRadians(0); - var targetPitch = Units.degreesToRadians(30); + @Test + public void testDistance() { + var camHeight = 1; + var targetHeight = 3; + var camPitch = Units.degreesToRadians(0); + var targetPitch = Units.degreesToRadians(30); - var dist = - PhotonUtils.calculateDistanceToTargetMeters(camHeight, targetHeight, camPitch, targetPitch); + var dist = + PhotonUtils.calculateDistanceToTargetMeters(camHeight, targetHeight, camPitch, targetPitch); - Assertions.assertEquals(3.464, dist, 0.01); + Assertions.assertEquals(3.464, dist, 0.01); - camHeight = 1; - targetHeight = 2; - camPitch = Units.degreesToRadians(20); - targetPitch = Units.degreesToRadians(-10); + camHeight = 1; + targetHeight = 2; + camPitch = Units.degreesToRadians(20); + targetPitch = Units.degreesToRadians(-10); - dist = - PhotonUtils.calculateDistanceToTargetMeters(camHeight, targetHeight, camPitch, targetPitch); - Assertions.assertEquals(5.671, dist, 0.01); - } + dist = + PhotonUtils.calculateDistanceToTargetMeters(camHeight, targetHeight, camPitch, targetPitch); + Assertions.assertEquals(5.671, dist, 0.01); + } - @Test - public void testTransform() { - var camHeight = 1; - var tgtHeight = 3; - var camPitch = 0; - var tgtPitch = Units.degreesToRadians(30); - var tgtYaw = new Rotation2d(); - var gyroAngle = new Rotation2d(); - var fieldToTarget = new Pose2d(); - var cameraToRobot = new Transform2d(); + @Test + public void testTransform() { + var camHeight = 1; + var tgtHeight = 3; + var camPitch = 0; + var tgtPitch = Units.degreesToRadians(30); + var tgtYaw = new Rotation2d(); + var gyroAngle = new Rotation2d(); + var fieldToTarget = new Pose2d(); + var cameraToRobot = new Transform2d(); - var fieldToRobot = - PhotonUtils.estimateFieldToRobot( - PhotonUtils.estimateCameraToTarget( - PhotonUtils.estimateCameraToTargetTranslation( - PhotonUtils.calculateDistanceToTargetMeters( - camHeight, tgtHeight, camPitch, tgtPitch), - tgtYaw), - fieldToTarget, - gyroAngle), - fieldToTarget, - cameraToRobot); + var fieldToRobot = + PhotonUtils.estimateFieldToRobot( + PhotonUtils.estimateCameraToTarget( + PhotonUtils.estimateCameraToTargetTranslation( + PhotonUtils.calculateDistanceToTargetMeters( + camHeight, tgtHeight, camPitch, tgtPitch), + tgtYaw), + fieldToTarget, + gyroAngle), + fieldToTarget, + cameraToRobot); - Assertions.assertEquals(-3.464, fieldToRobot.getX(), 0.1); - Assertions.assertEquals(0, fieldToRobot.getY(), 0.1); - Assertions.assertEquals(0, fieldToRobot.getRotation().getDegrees(), 0.1); - } + Assertions.assertEquals(-3.464, fieldToRobot.getX(), 0.1); + Assertions.assertEquals(0, fieldToRobot.getY(), 0.1); + Assertions.assertEquals(0, fieldToRobot.getRotation().getDegrees(), 0.1); + } - @Test - public void testAprilTagUtils() { - var cameraToTarget = new Transform3d(new Translation3d(1, 0, 0), new Rotation3d()); - var tagPose = new Pose3d(5, 0, 0, new Rotation3d()); - var cameraToRobot = new Transform3d(); + @Test + public void testAprilTagUtils() { + var cameraToTarget = new Transform3d(new Translation3d(1, 0, 0), new Rotation3d()); + var tagPose = new Pose3d(5, 0, 0, new Rotation3d()); + var cameraToRobot = new Transform3d(); - var fieldToRobot = - PhotonUtils.estimateFieldToRobotAprilTag(cameraToTarget, tagPose, cameraToRobot); + var fieldToRobot = + PhotonUtils.estimateFieldToRobotAprilTag(cameraToTarget, tagPose, cameraToRobot); - var targetPose = - new Pose2d( - new Translation2d(Units.inchesToMeters(324), Units.inchesToMeters(162)), - new Rotation2d()); - var currentPose = new Pose2d(0, 0, Rotation2d.fromDegrees(0)); - Assertions.assertEquals(4.0, fieldToRobot.getX()); - Assertions.assertEquals( - Math.toDegrees(Math.atan2((Units.inchesToMeters(162)), (Units.inchesToMeters(324)))), - PhotonUtils.getYawToPose(currentPose, targetPose).getDegrees()); - } + var targetPose = + new Pose2d( + new Translation2d(Units.inchesToMeters(324), Units.inchesToMeters(162)), + new Rotation2d()); + var currentPose = new Pose2d(0, 0, Rotation2d.fromDegrees(0)); + Assertions.assertEquals(4.0, fieldToRobot.getX()); + Assertions.assertEquals( + Math.toDegrees(Math.atan2((Units.inchesToMeters(162)), (Units.inchesToMeters(324)))), + PhotonUtils.getYawToPose(currentPose, targetPose).getDegrees()); + } } diff --git a/photon-lib/src/test/java/org/photonvision/PhotonVersionTest.java b/photon-lib/src/test/java/org/photonvision/PhotonVersionTest.java index 6dc5a5854b..d62f528781 100644 --- a/photon-lib/src/test/java/org/photonvision/PhotonVersionTest.java +++ b/photon-lib/src/test/java/org/photonvision/PhotonVersionTest.java @@ -30,30 +30,30 @@ import org.junit.jupiter.api.Test; public class PhotonVersionTest { - public static final boolean versionMatches(String versionString, String other) { - String c = versionString; - Pattern p = Pattern.compile("v[0-9]+.[0-9]+.[0-9]+"); - Matcher m = p.matcher(c); - if (m.find()) { - c = m.group(0); - } else { - return false; - } - m = p.matcher(other); - if (m.find()) { - other = m.group(0); - } else { - return false; - } - return c.equals(other); + public static final boolean versionMatches(String versionString, String other) { + String c = versionString; + Pattern p = Pattern.compile("v[0-9]+.[0-9]+.[0-9]+"); + Matcher m = p.matcher(c); + if (m.find()) { + c = m.group(0); + } else { + return false; } - - @Test - public void testVersion() { - Assertions.assertTrue(versionMatches("v2021.1.6", "v2021.1.6")); - Assertions.assertTrue(versionMatches("dev-v2021.1.6", "v2021.1.6")); - Assertions.assertTrue(versionMatches("dev-v2021.1.6-5-gca49ea50", "v2021.1.6")); - Assertions.assertFalse(versionMatches("", "v2021.1.6")); - Assertions.assertFalse(versionMatches("v2021.1.6", "")); + m = p.matcher(other); + if (m.find()) { + other = m.group(0); + } else { + return false; } + return c.equals(other); + } + + @Test + public void testVersion() { + Assertions.assertTrue(versionMatches("v2021.1.6", "v2021.1.6")); + Assertions.assertTrue(versionMatches("dev-v2021.1.6", "v2021.1.6")); + Assertions.assertTrue(versionMatches("dev-v2021.1.6-5-gca49ea50", "v2021.1.6")); + Assertions.assertFalse(versionMatches("", "v2021.1.6")); + Assertions.assertFalse(versionMatches("v2021.1.6", "")); + } } diff --git a/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java index c53e006717..71ed7f7670 100644 --- a/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java +++ b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java @@ -68,480 +68,480 @@ import org.photonvision.targeting.PhotonTrackedTarget; class VisionSystemSimTest { - private static final double kTrlDelta = 0.005; - private static final double kRotDeltaDeg = 0.25; - - @Test - public void testEmpty() { - Assertions.assertDoesNotThrow( - () -> { - var sysUnderTest = new VisionSystemSim("Test"); - sysUnderTest.addVisionTargets( - new VisionTargetSim(new Pose3d(), new TargetModel(1.0, 1.0))); - for (int loopIdx = 0; loopIdx < 100; loopIdx++) { - sysUnderTest.update(new Pose2d()); - } - }); + private static final double kTrlDelta = 0.005; + private static final double kRotDeltaDeg = 0.25; + + @Test + public void testEmpty() { + Assertions.assertDoesNotThrow( + () -> { + var sysUnderTest = new VisionSystemSim("Test"); + sysUnderTest.addVisionTargets( + new VisionTargetSim(new Pose3d(), new TargetModel(1.0, 1.0))); + for (int loopIdx = 0; loopIdx < 100; loopIdx++) { + sysUnderTest.update(new Pose2d()); + } + }); + } + + @BeforeAll + public static void setUp() { + JNIWrapper.Helper.setExtractOnStaticLoad(false); + WPIUtilJNI.Helper.setExtractOnStaticLoad(false); + NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); + WPINetJNI.Helper.setExtractOnStaticLoad(false); + CameraServerJNI.Helper.setExtractOnStaticLoad(false); + CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); + AprilTagJNI.Helper.setExtractOnStaticLoad(false); + + try { + CombinedRuntimeLoader.loadLibraries( + VisionSystemSim.class, + "wpiutiljni", + "ntcorejni", + "wpinetjni", + "wpiHaljni", + "cscorejni", + "cscorejnicvstatic"); + + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + } catch (Exception e) { + e.printStackTrace(); } - @BeforeAll - public static void setUp() { - JNIWrapper.Helper.setExtractOnStaticLoad(false); - WPIUtilJNI.Helper.setExtractOnStaticLoad(false); - NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); - WPINetJNI.Helper.setExtractOnStaticLoad(false); - CameraServerJNI.Helper.setExtractOnStaticLoad(false); - CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); - AprilTagJNI.Helper.setExtractOnStaticLoad(false); - - try { - CombinedRuntimeLoader.loadLibraries( - VisionSystemSim.class, - "wpiutiljni", - "ntcorejni", - "wpinetjni", - "wpiHaljni", - "cscorejni", - "cscorejnicvstatic"); - - var loader = - new RuntimeLoader<>( - Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); - loader.loadLibrary(); - } catch (Exception e) { - e.printStackTrace(); - } - - // NT live for debug purposes - NetworkTableInstance.getDefault().startServer(); - - // No version check for testing - PhotonCamera.setVersionCheckEnabled(false); - } - - @AfterAll - public static void shutDown() {} - - // @ParameterizedTest - // @ValueSource(doubles = {5, 10, 15, 20, 25, 30}) - // public void testDistanceAligned(double dist) { - // final var targetPose = new Pose2d(new Translation2d(15.98, 0), new Rotation2d()); - // var sysUnderTest = - // new SimVisionSystem("Test", 80.0, 0.0, new Transform2d(), 1, 99999, 320, 240, 0); - // sysUnderTest.addSimVisionTarget(new SimVisionTarget(targetPose, 0.0, 1.0, 1.0)); - - // final var robotPose = new Pose2d(new Translation2d(35 - dist, 0), new Rotation2d()); - // sysUnderTest.processFrame(robotPose); - - // var result = sysUnderTest.cam.getLatestResult(); - - // assertTrue(result.hasTargets()); - // assertEquals(result.getBestTarget().getCameraToTarget().getTranslation().getNorm(), dist); - // } - - @Test - public void testVisibilityCupidShuffle() { - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 2), new Rotation3d(0, 0, Math.PI)); - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 1.0), 3)); - - // To the right, to the right - var robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(-70)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - - // To the right, to the right - robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(-95)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - - // To the left, to the left - robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(90)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - - // To the left, to the left - robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(65)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - - // now kick, now kick - robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertTrue(camera.getLatestResult().hasTargets()); - - // now kick, now kick - robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(-5)); - visionSysSim.update(robotPose); - assertTrue(camera.getLatestResult().hasTargets()); - - // now walk it by yourself - robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(-179)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - - // now walk it by yourself - visionSysSim.adjustCamera( - cameraSim, new Transform3d(new Translation3d(), new Rotation3d(0, 0, Math.PI))); - visionSysSim.update(robotPose); - assertTrue(camera.getLatestResult().hasTargets()); - } - - @Test - public void testNotVisibleVert1() { - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 3.0), 3)); - - var robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertTrue(camera.getLatestResult().hasTargets()); - - visionSysSim.adjustCamera( // vooop selfie stick - cameraSim, new Transform3d(new Translation3d(0, 0, 5000), new Rotation3d(0, 0, Math.PI))); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - } - - @Test - public void testNotVisibleVert2() { - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 2), new Rotation3d(0, 0, Math.PI)); - var robotToCamera = - new Transform3d(new Translation3d(0, 0, 1), new Rotation3d(0, -Math.PI / 4, 0)); - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, robotToCamera); - cameraSim.prop.setCalibration(1234, 1234, Rotation2d.fromDegrees(80)); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 0.5), 1736)); - - var robotPose = new Pose2d(new Translation2d(13.98, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertTrue(camera.getLatestResult().hasTargets()); - - // Pitched back camera should mean target goes out of view below the robot as distance increases - robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - } - - @Test - public void testNotVisibleTgtSize() { - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); - cameraSim.setMinTargetAreaPixels(20.0); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.1, 0.025), 24)); - - var robotPose = new Pose2d(new Translation2d(12, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertTrue(camera.getLatestResult().hasTargets()); - - robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - } - - @Test - public void testNotVisibleTooFarForLEDs() { - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); - cameraSim.setMaxSightRange(10); - cameraSim.setMinTargetAreaPixels(1.0); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 0.25), 78)); - - var robotPose = new Pose2d(new Translation2d(10, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertTrue(camera.getLatestResult().hasTargets()); - - robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - } - - @ParameterizedTest - @ValueSource(doubles = {-10, -5, -0, -1, -2, 5, 7, 10.23}) - public void testYawAngles(double testYaw) { - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, 3 * Math.PI / 4)); - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); - cameraSim.setMinTargetAreaPixels(0.0); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 3)); - - var robotPose = new Pose2d(new Translation2d(10, 0), Rotation2d.fromDegrees(-1.0 * testYaw)); - visionSysSim.update(robotPose); - var res = camera.getLatestResult(); - assertTrue(res.hasTargets()); - var tgt = res.getBestTarget(); - assertEquals(testYaw, tgt.getYaw(), kRotDeltaDeg); - } - - @ParameterizedTest - @ValueSource(doubles = {-10, -5, -0, -1, -2, 5, 7, 10.23, 20.21, -19.999}) - public void testPitchAngles(double testPitch) { - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 0), new Rotation3d(0, 0, 3 * Math.PI / 4)); - final var robotPose = new Pose2d(new Translation2d(10, 0), new Rotation2d(0)); - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(120)); - cameraSim.setMinTargetAreaPixels(0.0); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 23)); - - // Transform is now robot -> camera - visionSysSim.adjustCamera( - cameraSim, - new Transform3d( - new Translation3d(), new Rotation3d(0, Units.degreesToRadians(testPitch), 0))); - visionSysSim.update(robotPose); - var res = camera.getLatestResult(); - assertTrue(res.hasTargets()); - var tgt = res.getBestTarget(); - - // Since the camera is level with the target, a positive-upward point will mean the target is in - // the - // lower half of the image - // which should produce negative pitch. - assertEquals(testPitch, tgt.getPitch(), kRotDeltaDeg); - } - - private static Stream testDistanceCalcArgs() { - // Arbitrary and fairly random assortment of distances, camera pitches, and heights - return Stream.of( - Arguments.of(5, -15.98, 0), - Arguments.of(6, -15.98, 1), - Arguments.of(10, -15.98, 0), - Arguments.of(15, -15.98, 2), - Arguments.of(19.95, -15.98, 0), - Arguments.of(20, -15.98, 0), - Arguments.of(5, -42, 1), - Arguments.of(6, -42, 0), - Arguments.of(10, -42, 2), - Arguments.of(15, -42, 0.5), - Arguments.of(19.42, -15.98, 0), - Arguments.of(20, -42, 0), - Arguments.of(5, -35, 2), - Arguments.of(6, -35, 0), - Arguments.of(10, -34, 2.4), - Arguments.of(15, -33, 0), - Arguments.of(19.52, -15.98, 1.1)); - } - - @ParameterizedTest - @MethodSource("testDistanceCalcArgs") - public void testDistanceCalc(double testDist, double testPitch, double testHeight) { - // Assume dist along ground and tgt height the same. Iterate over other parameters. - - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI * 0.98)); - final var robotPose = - new Pose3d(new Translation3d(15.98 - Units.feetToMeters(testDist), 0, 0), new Rotation3d()); - final var robotToCamera = - new Transform3d( - new Translation3d(0, 0, Units.feetToMeters(testHeight)), - new Rotation3d(0, Units.degreesToRadians(testPitch), 0)); - - var visionSysSim = - new VisionSystemSim( - "absurdlylongnamewhichshouldneveractuallyhappenbuteehwelltestitanywaysohowsyourdaygoingihopegoodhaveagreatrestofyourlife!"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(160)); - cameraSim.setMinTargetAreaPixels(0.0); - visionSysSim.adjustCamera(cameraSim, robotToCamera); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 0)); - - visionSysSim.update(robotPose); - - // Note that target 2d yaw/pitch accuracy is hindered by two factors in photonvision: - // 1. These are calculated with the average of the minimum area rectangle, which does not - // actually find the target center because of perspective distortion. - // 2. Yaw and pitch are calculated separately which gives incorrect pitch values. - var res = camera.getLatestResult(); - assertTrue(res.hasTargets()); - var tgt = res.getBestTarget(); - assertEquals(0.0, tgt.getYaw(), 0.5); - - // Distance calculation using this trigonometry may be wildly incorrect when - // there is not much height difference between the target and the camera. - double distMeas = - PhotonUtils.calculateDistanceToTargetMeters( - robotToCamera.getZ(), - targetPose.getZ(), - Units.degreesToRadians(-testPitch), - Units.degreesToRadians(tgt.getPitch())); - assertEquals(Units.feetToMeters(testDist), distMeas, 0.15); - } - - @Test - public void testMultipleTargets() { - final var targetPoseL = - new Pose3d(new Translation3d(15.98, 2, 0), new Rotation3d(0, 0, Math.PI)); - final var targetPoseC = - new Pose3d(new Translation3d(15.98, 0, 0), new Rotation3d(0, 0, Math.PI)); - final var targetPoseR = - new Pose3d(new Translation3d(15.98, -2, 0), new Rotation3d(0, 0, Math.PI)); - - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); - cameraSim.setMinTargetAreaPixels(20.0); - - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseL.transformBy( - new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), - TargetModel.kTag16h5, - 1)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseC.transformBy( - new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), - TargetModel.kTag16h5, - 2)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseR.transformBy( - new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), - TargetModel.kTag16h5, - 3)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseL.transformBy( - new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), - TargetModel.kTag16h5, - 4)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseC.transformBy( - new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), - TargetModel.kTag16h5, - 5)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseR.transformBy( - new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), - TargetModel.kTag16h5, - 6)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseL.transformBy( - new Transform3d(new Translation3d(0, 0, 0.50), new Rotation3d())), - TargetModel.kTag16h5, - 7)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseC.transformBy( - new Transform3d(new Translation3d(0, 0, 0.50), new Rotation3d())), - TargetModel.kTag16h5, - 8)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseL.transformBy( - new Transform3d(new Translation3d(0, 0, 0.75), new Rotation3d())), - TargetModel.kTag16h5, - 9)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseR.transformBy( - new Transform3d(new Translation3d(0, 0, 0.75), new Rotation3d())), - TargetModel.kTag16h5, - 10)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseL.transformBy( - new Transform3d(new Translation3d(0, 0, 0.25), new Rotation3d())), - TargetModel.kTag16h5, - 11)); - - var robotPose = new Pose2d(new Translation2d(6.0, 0), Rotation2d.fromDegrees(0.25)); - visionSysSim.update(robotPose); - var res = camera.getLatestResult(); - assertTrue(res.hasTargets()); - List tgtList; - tgtList = res.getTargets(); - assertEquals(11, tgtList.size()); - } - - @Test - public void testPoseEstimation() { - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(90)); - cameraSim.setMinTargetAreaPixels(20.0); - - List tagList = new ArrayList<>(); - tagList.add(new AprilTag(0, new Pose3d(12, 3, 1, new Rotation3d(0, 0, Math.PI)))); - tagList.add(new AprilTag(1, new Pose3d(12, 1, -1, new Rotation3d(0, 0, Math.PI)))); - tagList.add(new AprilTag(2, new Pose3d(11, 0, 2, new Rotation3d(0, 0, Math.PI)))); - double fieldLength = Units.feetToMeters(54.0); - double fieldWidth = Units.feetToMeters(27.0); - AprilTagFieldLayout layout = new AprilTagFieldLayout(tagList, fieldLength, fieldWidth); - Pose2d robotPose = new Pose2d(5, 1, Rotation2d.fromDegrees(5)); - - visionSysSim.addVisionTargets( - new VisionTargetSim(tagList.get(0).pose, TargetModel.kTag16h5, 0)); - - visionSysSim.update(robotPose); - var results = - VisionEstimation.estimateCamPosePNP( - camera.getCameraMatrix().get(), - camera.getDistCoeffs().get(), - camera.getLatestResult().getTargets(), - layout); - Pose3d pose = new Pose3d().plus(results.best); - assertEquals(5, pose.getX(), .01); - assertEquals(1, pose.getY(), .01); - assertEquals(0, pose.getZ(), .01); - assertEquals(Math.toRadians(5), pose.getRotation().getZ(), 0.01); - - visionSysSim.addVisionTargets( - new VisionTargetSim(tagList.get(1).pose, TargetModel.kTag16h5, 1)); - visionSysSim.addVisionTargets( - new VisionTargetSim(tagList.get(2).pose, TargetModel.kTag16h5, 2)); - - visionSysSim.update(robotPose); - results = - VisionEstimation.estimateCamPosePNP( - camera.getCameraMatrix().get(), - camera.getDistCoeffs().get(), - camera.getLatestResult().getTargets(), - layout); - pose = new Pose3d().plus(results.best); - assertEquals(5, pose.getX(), .01); - assertEquals(1, pose.getY(), .01); - assertEquals(0, pose.getZ(), .01); - assertEquals(Math.toRadians(5), pose.getRotation().getZ(), 0.01); - } + // NT live for debug purposes + NetworkTableInstance.getDefault().startServer(); + + // No version check for testing + PhotonCamera.setVersionCheckEnabled(false); + } + + @AfterAll + public static void shutDown() {} + + // @ParameterizedTest + // @ValueSource(doubles = {5, 10, 15, 20, 25, 30}) + // public void testDistanceAligned(double dist) { + // final var targetPose = new Pose2d(new Translation2d(15.98, 0), new Rotation2d()); + // var sysUnderTest = + // new SimVisionSystem("Test", 80.0, 0.0, new Transform2d(), 1, 99999, 320, 240, 0); + // sysUnderTest.addSimVisionTarget(new SimVisionTarget(targetPose, 0.0, 1.0, 1.0)); + + // final var robotPose = new Pose2d(new Translation2d(35 - dist, 0), new Rotation2d()); + // sysUnderTest.processFrame(robotPose); + + // var result = sysUnderTest.cam.getLatestResult(); + + // assertTrue(result.hasTargets()); + // assertEquals(result.getBestTarget().getCameraToTarget().getTranslation().getNorm(), dist); + // } + + @Test + public void testVisibilityCupidShuffle() { + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 2), new Rotation3d(0, 0, Math.PI)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 1.0), 3)); + + // To the right, to the right + var robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(-70)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + + // To the right, to the right + robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(-95)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + + // To the left, to the left + robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(90)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + + // To the left, to the left + robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(65)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + + // now kick, now kick + robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); + + // now kick, now kick + robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(-5)); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); + + // now walk it by yourself + robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(-179)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + + // now walk it by yourself + visionSysSim.adjustCamera( + cameraSim, new Transform3d(new Translation3d(), new Rotation3d(0, 0, Math.PI))); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); + } + + @Test + public void testNotVisibleVert1() { + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 3.0), 3)); + + var robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); + + visionSysSim.adjustCamera( // vooop selfie stick + cameraSim, new Transform3d(new Translation3d(0, 0, 5000), new Rotation3d(0, 0, Math.PI))); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + } + + @Test + public void testNotVisibleVert2() { + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 2), new Rotation3d(0, 0, Math.PI)); + var robotToCamera = + new Transform3d(new Translation3d(0, 0, 1), new Rotation3d(0, -Math.PI / 4, 0)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, robotToCamera); + cameraSim.prop.setCalibration(1234, 1234, Rotation2d.fromDegrees(80)); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 0.5), 1736)); + + var robotPose = new Pose2d(new Translation2d(13.98, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); + + // Pitched back camera should mean target goes out of view below the robot as distance increases + robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + } + + @Test + public void testNotVisibleTgtSize() { + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + cameraSim.setMinTargetAreaPixels(20.0); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.1, 0.025), 24)); + + var robotPose = new Pose2d(new Translation2d(12, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); + + robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + } + + @Test + public void testNotVisibleTooFarForLEDs() { + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + cameraSim.setMaxSightRange(10); + cameraSim.setMinTargetAreaPixels(1.0); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 0.25), 78)); + + var robotPose = new Pose2d(new Translation2d(10, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); + + robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + } + + @ParameterizedTest + @ValueSource(doubles = {-10, -5, -0, -1, -2, 5, 7, 10.23}) + public void testYawAngles(double testYaw) { + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, 3 * Math.PI / 4)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + cameraSim.setMinTargetAreaPixels(0.0); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 3)); + + var robotPose = new Pose2d(new Translation2d(10, 0), Rotation2d.fromDegrees(-1.0 * testYaw)); + visionSysSim.update(robotPose); + var res = camera.getLatestResult(); + assertTrue(res.hasTargets()); + var tgt = res.getBestTarget(); + assertEquals(testYaw, tgt.getYaw(), kRotDeltaDeg); + } + + @ParameterizedTest + @ValueSource(doubles = {-10, -5, -0, -1, -2, 5, 7, 10.23, 20.21, -19.999}) + public void testPitchAngles(double testPitch) { + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 0), new Rotation3d(0, 0, 3 * Math.PI / 4)); + final var robotPose = new Pose2d(new Translation2d(10, 0), new Rotation2d(0)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(120)); + cameraSim.setMinTargetAreaPixels(0.0); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 23)); + + // Transform is now robot -> camera + visionSysSim.adjustCamera( + cameraSim, + new Transform3d( + new Translation3d(), new Rotation3d(0, Units.degreesToRadians(testPitch), 0))); + visionSysSim.update(robotPose); + var res = camera.getLatestResult(); + assertTrue(res.hasTargets()); + var tgt = res.getBestTarget(); + + // Since the camera is level with the target, a positive-upward point will mean the target is in + // the + // lower half of the image + // which should produce negative pitch. + assertEquals(testPitch, tgt.getPitch(), kRotDeltaDeg); + } + + private static Stream testDistanceCalcArgs() { + // Arbitrary and fairly random assortment of distances, camera pitches, and heights + return Stream.of( + Arguments.of(5, -15.98, 0), + Arguments.of(6, -15.98, 1), + Arguments.of(10, -15.98, 0), + Arguments.of(15, -15.98, 2), + Arguments.of(19.95, -15.98, 0), + Arguments.of(20, -15.98, 0), + Arguments.of(5, -42, 1), + Arguments.of(6, -42, 0), + Arguments.of(10, -42, 2), + Arguments.of(15, -42, 0.5), + Arguments.of(19.42, -15.98, 0), + Arguments.of(20, -42, 0), + Arguments.of(5, -35, 2), + Arguments.of(6, -35, 0), + Arguments.of(10, -34, 2.4), + Arguments.of(15, -33, 0), + Arguments.of(19.52, -15.98, 1.1)); + } + + @ParameterizedTest + @MethodSource("testDistanceCalcArgs") + public void testDistanceCalc(double testDist, double testPitch, double testHeight) { + // Assume dist along ground and tgt height the same. Iterate over other parameters. + + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI * 0.98)); + final var robotPose = + new Pose3d(new Translation3d(15.98 - Units.feetToMeters(testDist), 0, 0), new Rotation3d()); + final var robotToCamera = + new Transform3d( + new Translation3d(0, 0, Units.feetToMeters(testHeight)), + new Rotation3d(0, Units.degreesToRadians(testPitch), 0)); + + var visionSysSim = + new VisionSystemSim( + "absurdlylongnamewhichshouldneveractuallyhappenbuteehwelltestitanywaysohowsyourdaygoingihopegoodhaveagreatrestofyourlife!"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(160)); + cameraSim.setMinTargetAreaPixels(0.0); + visionSysSim.adjustCamera(cameraSim, robotToCamera); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 0)); + + visionSysSim.update(robotPose); + + // Note that target 2d yaw/pitch accuracy is hindered by two factors in photonvision: + // 1. These are calculated with the average of the minimum area rectangle, which does not + // actually find the target center because of perspective distortion. + // 2. Yaw and pitch are calculated separately which gives incorrect pitch values. + var res = camera.getLatestResult(); + assertTrue(res.hasTargets()); + var tgt = res.getBestTarget(); + assertEquals(0.0, tgt.getYaw(), 0.5); + + // Distance calculation using this trigonometry may be wildly incorrect when + // there is not much height difference between the target and the camera. + double distMeas = + PhotonUtils.calculateDistanceToTargetMeters( + robotToCamera.getZ(), + targetPose.getZ(), + Units.degreesToRadians(-testPitch), + Units.degreesToRadians(tgt.getPitch())); + assertEquals(Units.feetToMeters(testDist), distMeas, 0.15); + } + + @Test + public void testMultipleTargets() { + final var targetPoseL = + new Pose3d(new Translation3d(15.98, 2, 0), new Rotation3d(0, 0, Math.PI)); + final var targetPoseC = + new Pose3d(new Translation3d(15.98, 0, 0), new Rotation3d(0, 0, Math.PI)); + final var targetPoseR = + new Pose3d(new Translation3d(15.98, -2, 0), new Rotation3d(0, 0, Math.PI)); + + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + cameraSim.setMinTargetAreaPixels(20.0); + + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseL.transformBy( + new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), + TargetModel.kTag16h5, + 1)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseC.transformBy( + new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), + TargetModel.kTag16h5, + 2)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseR.transformBy( + new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), + TargetModel.kTag16h5, + 3)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseL.transformBy( + new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), + TargetModel.kTag16h5, + 4)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseC.transformBy( + new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), + TargetModel.kTag16h5, + 5)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseR.transformBy( + new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), + TargetModel.kTag16h5, + 6)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseL.transformBy( + new Transform3d(new Translation3d(0, 0, 0.50), new Rotation3d())), + TargetModel.kTag16h5, + 7)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseC.transformBy( + new Transform3d(new Translation3d(0, 0, 0.50), new Rotation3d())), + TargetModel.kTag16h5, + 8)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseL.transformBy( + new Transform3d(new Translation3d(0, 0, 0.75), new Rotation3d())), + TargetModel.kTag16h5, + 9)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseR.transformBy( + new Transform3d(new Translation3d(0, 0, 0.75), new Rotation3d())), + TargetModel.kTag16h5, + 10)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseL.transformBy( + new Transform3d(new Translation3d(0, 0, 0.25), new Rotation3d())), + TargetModel.kTag16h5, + 11)); + + var robotPose = new Pose2d(new Translation2d(6.0, 0), Rotation2d.fromDegrees(0.25)); + visionSysSim.update(robotPose); + var res = camera.getLatestResult(); + assertTrue(res.hasTargets()); + List tgtList; + tgtList = res.getTargets(); + assertEquals(11, tgtList.size()); + } + + @Test + public void testPoseEstimation() { + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(90)); + cameraSim.setMinTargetAreaPixels(20.0); + + List tagList = new ArrayList<>(); + tagList.add(new AprilTag(0, new Pose3d(12, 3, 1, new Rotation3d(0, 0, Math.PI)))); + tagList.add(new AprilTag(1, new Pose3d(12, 1, -1, new Rotation3d(0, 0, Math.PI)))); + tagList.add(new AprilTag(2, new Pose3d(11, 0, 2, new Rotation3d(0, 0, Math.PI)))); + double fieldLength = Units.feetToMeters(54.0); + double fieldWidth = Units.feetToMeters(27.0); + AprilTagFieldLayout layout = new AprilTagFieldLayout(tagList, fieldLength, fieldWidth); + Pose2d robotPose = new Pose2d(5, 1, Rotation2d.fromDegrees(5)); + + visionSysSim.addVisionTargets( + new VisionTargetSim(tagList.get(0).pose, TargetModel.kTag16h5, 0)); + + visionSysSim.update(robotPose); + var results = + VisionEstimation.estimateCamPosePNP( + camera.getCameraMatrix().get(), + camera.getDistCoeffs().get(), + camera.getLatestResult().getTargets(), + layout); + Pose3d pose = new Pose3d().plus(results.best); + assertEquals(5, pose.getX(), .01); + assertEquals(1, pose.getY(), .01); + assertEquals(0, pose.getZ(), .01); + assertEquals(Math.toRadians(5), pose.getRotation().getZ(), 0.01); + + visionSysSim.addVisionTargets( + new VisionTargetSim(tagList.get(1).pose, TargetModel.kTag16h5, 1)); + visionSysSim.addVisionTargets( + new VisionTargetSim(tagList.get(2).pose, TargetModel.kTag16h5, 2)); + + visionSysSim.update(robotPose); + results = + VisionEstimation.estimateCamPosePNP( + camera.getCameraMatrix().get(), + camera.getDistCoeffs().get(), + camera.getLatestResult().getTargets(), + layout); + pose = new Pose3d().plus(results.best); + assertEquals(5, pose.getX(), .01); + assertEquals(1, pose.getY(), .01); + assertEquals(0, pose.getZ(), .01); + assertEquals(Math.toRadians(5), pose.getRotation().getZ(), 0.01); + } } diff --git a/photon-lib/src/test/java/org/photonvision/estimation/ApriltagWorkbenchTest.java b/photon-lib/src/test/java/org/photonvision/estimation/ApriltagWorkbenchTest.java index fa70467c5f..cbc247fc14 100644 --- a/photon-lib/src/test/java/org/photonvision/estimation/ApriltagWorkbenchTest.java +++ b/photon-lib/src/test/java/org/photonvision/estimation/ApriltagWorkbenchTest.java @@ -42,63 +42,63 @@ import org.photonvision.PhotonPoseEstimator; public class ApriltagWorkbenchTest { - @BeforeAll - public static void setUp() { - JNIWrapper.Helper.setExtractOnStaticLoad(false); - WPIUtilJNI.Helper.setExtractOnStaticLoad(false); - NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); - WPINetJNI.Helper.setExtractOnStaticLoad(false); - CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); + @BeforeAll + public static void setUp() { + JNIWrapper.Helper.setExtractOnStaticLoad(false); + WPIUtilJNI.Helper.setExtractOnStaticLoad(false); + NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); + WPINetJNI.Helper.setExtractOnStaticLoad(false); + CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); - try { - CombinedRuntimeLoader.loadLibraries( - ApriltagWorkbenchTest.class, - "wpiutiljni", - "ntcorejni", - "wpinetjni", - "wpiHaljni", - "cscorejnicvstatic"); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - // No version check for testing - PhotonCamera.setVersionCheckEnabled(false); + try { + CombinedRuntimeLoader.loadLibraries( + ApriltagWorkbenchTest.class, + "wpiutiljni", + "ntcorejni", + "wpinetjni", + "wpiHaljni", + "cscorejnicvstatic"); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); } - // @Test - public void testMeme() throws IOException, InterruptedException { - NetworkTableInstance instance = NetworkTableInstance.getDefault(); - instance.stopServer(); - // set the NT server if simulating this code. - // "localhost" for photon on desktop, or "photonvision.local" / "[ip-address]" - // for coprocessor - instance.setServer("localhost"); - instance.startClient4("myRobot"); + // No version check for testing + PhotonCamera.setVersionCheckEnabled(false); + } + + // @Test + public void testMeme() throws IOException, InterruptedException { + NetworkTableInstance instance = NetworkTableInstance.getDefault(); + instance.stopServer(); + // set the NT server if simulating this code. + // "localhost" for photon on desktop, or "photonvision.local" / "[ip-address]" + // for coprocessor + instance.setServer("localhost"); + instance.startClient4("myRobot"); - var robotToCamera = new Transform3d(); - var cam = new PhotonCamera("WPI2023"); - var tagLayout = - AprilTagFieldLayout.loadFromResource(AprilTagFields.k2023ChargedUp.m_resourceFile); + var robotToCamera = new Transform3d(); + var cam = new PhotonCamera("WPI2023"); + var tagLayout = + AprilTagFieldLayout.loadFromResource(AprilTagFields.k2023ChargedUp.m_resourceFile); - var pe = - new PhotonPoseEstimator( - tagLayout, - PhotonPoseEstimator.PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR, - cam, - robotToCamera); + var pe = + new PhotonPoseEstimator( + tagLayout, + PhotonPoseEstimator.PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR, + cam, + robotToCamera); - var field = new Field2d(); - SmartDashboard.putData(field); + var field = new Field2d(); + SmartDashboard.putData(field); - while (!Thread.interrupted()) { - Thread.sleep(500); + while (!Thread.interrupted()) { + Thread.sleep(500); - var ret = pe.update(); - System.out.println(ret); + var ret = pe.update(); + System.out.println(ret); - field.setRobotPose(ret.get().estimatedPose.toPose2d()); - } + field.setRobotPose(ret.get().estimatedPose.toPose2d()); } + } } diff --git a/photon-server/build.gradle b/photon-server/build.gradle index 92b50a81b8..4a8b1691f1 100644 --- a/photon-server/build.gradle +++ b/photon-server/build.gradle @@ -50,7 +50,9 @@ dependencies { } shadowJar { - configurations = [project.configurations.runtimeClasspath] + configurations = [ + project.configurations.runtimeClasspath + ] String name = "photonvision-${project.version}" archiveClassifier.set(wpilibTools.platformMapper.currentPlatform.platformName) archiveBaseName = "photonvision" @@ -81,11 +83,11 @@ copyClientUIToResources.shouldRunAfter runNpmOnClient run { if (project.hasProperty("profile")) { jvmArgs=[ - "-Dcom.sun.management.jmxremote=true", - "-Dcom.sun.management.jmxremote.ssl=false", - "-Dcom.sun.management.jmxremote.authenticate=false", - "-Dcom.sun.management.jmxremote.port=5000", - "-Djava.rmi.server.hostname=0.0.0.0", + "-Dcom.sun.management.jmxremote=true", + "-Dcom.sun.management.jmxremote.ssl=false", + "-Dcom.sun.management.jmxremote.authenticate=false", + "-Dcom.sun.management.jmxremote.port=5000", + "-Djava.rmi.server.hostname=0.0.0.0", ] } } @@ -143,7 +145,7 @@ task findDeployTarget { } run { - environment "PATH_PREFIX", "../" + environment "PATH_PREFIX", "../" } // task overrideToPi { @@ -159,7 +161,7 @@ task deploy { dependsOn assemble dependsOn findDeployTarget - doLast { + doLast { println 'Starting deployment to ' + findDeployTarget.rmt.host println 'targetArch = ' + wpilibTools.platformMapper.currentPlatform.platformName ssh.run{ @@ -170,7 +172,7 @@ task deploy { execute 'sleep 3' // Copy into a folder owned by PI. Mostly because, as far as I can tell, the put command doesn't support sudo. put from: "${projectDir}/build/libs/photonvision-${project.version}-${wpilibTools.platformMapper.currentPlatform.platformName}.jar", into: "/tmp/photonvision.jar" - //belt-and-suspenders. Make sure the old jar is gone first. + //belt-and-suspenders. Make sure the old jar is gone first. execute 'sudo rm -f /opt/photonvision/photonvision.jar' //Copy in the new .jar and make sure it's executable execute 'sudo mv /tmp/photonvision.jar /opt/photonvision/photonvision.jar' diff --git a/photon-server/src/main/java/org/photonvision/Main.java b/photon-server/src/main/java/org/photonvision/Main.java index db8787208d..a683311479 100644 --- a/photon-server/src/main/java/org/photonvision/Main.java +++ b/photon-server/src/main/java/org/photonvision/Main.java @@ -55,326 +55,326 @@ import org.photonvision.vision.target.TargetModel; public class Main { - public static final int DEFAULT_WEBPORT = 5800; - - private static final Logger logger = new Logger(Main.class, LogGroup.General); - private static final boolean isRelease = PhotonVersion.isRelease; - - private static boolean isTestMode = false; - private static Path testModeFolder = null; - private static boolean printDebugLogs; - - private static boolean handleArgs(String[] args) throws ParseException { - final var options = new Options(); - options.addOption("d", "debug", false, "Enable debug logging prints"); - options.addOption("h", "help", false, "Show this help text and exit"); - options.addOption( - "t", - "test-mode", - false, - "Run in test mode with 2019 and 2020 WPI field images in place of cameras"); - - options.addOption("p", "path", true, "Point test mode to a specific folder"); - options.addOption( - "i", - "ignore-cameras", - true, - "Ignore cameras that match a regex. Uses camera name as provided by cscore."); - options.addOption("n", "disable-networking", false, "Disables control device network settings"); - options.addOption( - "c", - "clear-config", - false, - "Clears PhotonVision pipeline and networking settings. Preserves log files"); - - CommandLineParser parser = new DefaultParser(); - CommandLine cmd = parser.parse(options, args); - - if (cmd.hasOption("help")) { - HelpFormatter formatter = new HelpFormatter(); - formatter.printHelp("java -jar photonvision.jar [options]", options); - return false; // exit program - } else { - if (cmd.hasOption("debug")) { - printDebugLogs = true; - logger.info("Enabled debug logging"); - } - - if (cmd.hasOption("test-mode")) { - isTestMode = true; - logger.info("Running in test mode - Cameras will not be used"); - - if (cmd.hasOption("path")) { - Path p = Path.of(System.getProperty("PATH_PREFIX", "") + cmd.getOptionValue("path")); - logger.info("Loading from Path " + p.toAbsolutePath().toString()); - testModeFolder = p; - } - } - - if (cmd.hasOption("ignore-cameras")) { - VisionSourceManager.getInstance() - .setIgnoredCamerasRegex(cmd.getOptionValue("ignore-cameras")); - } - - if (cmd.hasOption("disable-networking")) { - NetworkManager.getInstance().networkingIsDisabled = true; - } - - if (cmd.hasOption("clear-config")) { - ConfigManager.getInstance().clearConfig(); - } + public static final int DEFAULT_WEBPORT = 5800; + + private static final Logger logger = new Logger(Main.class, LogGroup.General); + private static final boolean isRelease = PhotonVersion.isRelease; + + private static boolean isTestMode = false; + private static Path testModeFolder = null; + private static boolean printDebugLogs; + + private static boolean handleArgs(String[] args) throws ParseException { + final var options = new Options(); + options.addOption("d", "debug", false, "Enable debug logging prints"); + options.addOption("h", "help", false, "Show this help text and exit"); + options.addOption( + "t", + "test-mode", + false, + "Run in test mode with 2019 and 2020 WPI field images in place of cameras"); + + options.addOption("p", "path", true, "Point test mode to a specific folder"); + options.addOption( + "i", + "ignore-cameras", + true, + "Ignore cameras that match a regex. Uses camera name as provided by cscore."); + options.addOption("n", "disable-networking", false, "Disables control device network settings"); + options.addOption( + "c", + "clear-config", + false, + "Clears PhotonVision pipeline and networking settings. Preserves log files"); + + CommandLineParser parser = new DefaultParser(); + CommandLine cmd = parser.parse(options, args); + + if (cmd.hasOption("help")) { + HelpFormatter formatter = new HelpFormatter(); + formatter.printHelp("java -jar photonvision.jar [options]", options); + return false; // exit program + } else { + if (cmd.hasOption("debug")) { + printDebugLogs = true; + logger.info("Enabled debug logging"); + } + + if (cmd.hasOption("test-mode")) { + isTestMode = true; + logger.info("Running in test mode - Cameras will not be used"); + + if (cmd.hasOption("path")) { + Path p = Path.of(System.getProperty("PATH_PREFIX", "") + cmd.getOptionValue("path")); + logger.info("Loading from Path " + p.toAbsolutePath().toString()); + testModeFolder = p; } - return true; - } + } - private static void addTestModeFromFolder() { - ConfigManager.getInstance().load(); - - try { - List collectedSources = - Files.list(testModeFolder) - .filter(p -> p.toFile().isFile()) - .map( - p -> { - try { - CameraConfiguration camConf = - new CameraConfiguration( - p.getFileName().toString(), p.toAbsolutePath().toString()); - camConf.FOV = TestUtils.WPI2019Image.FOV; // Good guess? - camConf.addCalibration(TestUtils.get2020LifeCamCoeffs(false)); - - var pipeSettings = new AprilTagPipelineSettings(); - pipeSettings.pipelineNickname = p.getFileName().toString(); - pipeSettings.outputShowMultipleTargets = true; - pipeSettings.inputShouldShow = true; - pipeSettings.outputShouldShow = false; - pipeSettings.solvePNPEnabled = true; - - var aprilTag = new AprilTagPipelineSettings(); - var psList = new ArrayList(); - psList.add(aprilTag); - camConf.pipelineSettings = psList; - - return new FileVisionSource(camConf); - } catch (Exception e) { - logger.error("Couldn't load image " + p.getFileName().toString(), e); - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - - ConfigManager.getInstance().unloadCameraConfigs(); - VisionModuleManager.getInstance().addSources(collectedSources).forEach(VisionModule::start); - ConfigManager.getInstance().addCameraConfigurations(collectedSources); - } catch (IOException e) { - logger.error("Path does not exist!"); - System.exit(1); - } - } + if (cmd.hasOption("ignore-cameras")) { + VisionSourceManager.getInstance() + .setIgnoredCamerasRegex(cmd.getOptionValue("ignore-cameras")); + } - private static void addTestModeSources() { - ConfigManager.getInstance().load(); - - var camConf2019 = - ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2019"); - if (camConf2019 == null) { - camConf2019 = - new CameraConfiguration("WPI2019", TestUtils.getTestMode2019ImagePath().toString()); - camConf2019.FOV = TestUtils.WPI2019Image.FOV; - camConf2019.calibrations.add(TestUtils.get2019LifeCamCoeffs(true)); - - var pipeline2019 = new ReflectivePipelineSettings(); - pipeline2019.pipelineNickname = "CargoShip"; - pipeline2019.targetModel = TargetModel.k2019DualTarget; - pipeline2019.outputShowMultipleTargets = true; - pipeline2019.contourGroupingMode = ContourGroupingMode.Dual; - pipeline2019.inputShouldShow = true; - - var psList2019 = new ArrayList(); - psList2019.add(pipeline2019); - camConf2019.pipelineSettings = psList2019; - } + if (cmd.hasOption("disable-networking")) { + NetworkManager.getInstance().networkingIsDisabled = true; + } - var camConf2020 = - ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2020"); - if (camConf2020 == null) { - camConf2020 = - new CameraConfiguration("WPI2020", TestUtils.getTestMode2020ImagePath().toString()); - camConf2020.FOV = TestUtils.WPI2020Image.FOV; - camConf2020.calibrations.add(TestUtils.get2019LifeCamCoeffs(true)); - - var pipeline2020 = new ReflectivePipelineSettings(); - pipeline2020.pipelineNickname = "OuterPort"; - pipeline2020.targetModel = TargetModel.k2020HighGoalOuter; - camConf2020.calibrations.add(TestUtils.get2020LifeCamCoeffs(true)); - pipeline2020.inputShouldShow = true; - - var psList2020 = new ArrayList(); - psList2020.add(pipeline2020); - camConf2020.pipelineSettings = psList2020; - } - - var camConf2022 = - ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2022"); - if (camConf2022 == null) { - camConf2022 = - new CameraConfiguration("WPI2022", TestUtils.getTestMode2022ImagePath().toString()); - camConf2022.FOV = TestUtils.WPI2022Image.FOV; - camConf2022.calibrations.add(TestUtils.get2019LifeCamCoeffs(true)); - - var pipeline2022 = new ReflectivePipelineSettings(); - pipeline2022.pipelineNickname = "OuterPort"; - pipeline2022.targetModel = TargetModel.k2020HighGoalOuter; - pipeline2022.inputShouldShow = true; - // camConf2020.calibrations.add(TestUtils.get2020LifeCamCoeffs(true)); - - var psList2022 = new ArrayList(); - psList2022.add(pipeline2022); - camConf2022.pipelineSettings = psList2022; - } - - CameraConfiguration camConf2023 = - ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2023"); - if (camConf2023 == null) { - camConf2023 = - new CameraConfiguration( - "WPI2023", - TestUtils.getResourcesFolderPath(true) - .resolve("testimages") - .resolve(TestUtils.WPI2023Apriltags.k383_60_Angle2.path) - .toString()); - - camConf2023.FOV = TestUtils.WPI2023Apriltags.FOV; - camConf2023.calibrations.add(TestUtils.get2023LifeCamCoeffs(true)); - - var pipeline2023 = new AprilTagPipelineSettings(); - var path_split = Path.of(camConf2023.path).getFileName().toString(); - pipeline2023.pipelineNickname = path_split.replace(".png", ""); - pipeline2023.targetModel = TargetModel.k6in_16h5; - pipeline2023.inputShouldShow = true; - pipeline2023.solvePNPEnabled = true; - - var psList2023 = new ArrayList(); - psList2023.add(pipeline2023); - camConf2023.pipelineSettings = psList2023; - } - - // Colored shape testing - var camConfShape = - ConfigManager.getInstance().getConfig().getCameraConfigurations().get("Shape"); - - // If we haven't saved shape settings, create a new one - if (camConfShape == null) { - camConfShape = - new CameraConfiguration( - "Shape", - TestUtils.getPowercellImagePath(TestUtils.PowercellTestImages.kPowercell_test_1, true) - .toString()); - var settings = new ColoredShapePipelineSettings(); - settings.hsvHue = new IntegerCouple(0, 35); - settings.hsvSaturation = new IntegerCouple(82, 255); - settings.hsvValue = new IntegerCouple(62, 255); - settings.contourShape = ContourShape.Triangle; - settings.outputShowMultipleTargets = true; - settings.circleAccuracy = 15; - settings.inputShouldShow = true; - camConfShape.addPipelineSetting(settings); - } - - var collectedSources = new ArrayList(); + if (cmd.hasOption("clear-config")) { + ConfigManager.getInstance().clearConfig(); + } + } + return true; + } + + private static void addTestModeFromFolder() { + ConfigManager.getInstance().load(); + + try { + List collectedSources = + Files.list(testModeFolder) + .filter(p -> p.toFile().isFile()) + .map( + p -> { + try { + CameraConfiguration camConf = + new CameraConfiguration( + p.getFileName().toString(), p.toAbsolutePath().toString()); + camConf.FOV = TestUtils.WPI2019Image.FOV; // Good guess? + camConf.addCalibration(TestUtils.get2020LifeCamCoeffs(false)); + + var pipeSettings = new AprilTagPipelineSettings(); + pipeSettings.pipelineNickname = p.getFileName().toString(); + pipeSettings.outputShowMultipleTargets = true; + pipeSettings.inputShouldShow = true; + pipeSettings.outputShouldShow = false; + pipeSettings.solvePNPEnabled = true; + + var aprilTag = new AprilTagPipelineSettings(); + var psList = new ArrayList(); + psList.add(aprilTag); + camConf.pipelineSettings = psList; + + return new FileVisionSource(camConf); + } catch (Exception e) { + logger.error("Couldn't load image " + p.getFileName().toString(), e); + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + + ConfigManager.getInstance().unloadCameraConfigs(); + VisionModuleManager.getInstance().addSources(collectedSources).forEach(VisionModule::start); + ConfigManager.getInstance().addCameraConfigurations(collectedSources); + } catch (IOException e) { + logger.error("Path does not exist!"); + System.exit(1); + } + } + + private static void addTestModeSources() { + ConfigManager.getInstance().load(); + + var camConf2019 = + ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2019"); + if (camConf2019 == null) { + camConf2019 = + new CameraConfiguration("WPI2019", TestUtils.getTestMode2019ImagePath().toString()); + camConf2019.FOV = TestUtils.WPI2019Image.FOV; + camConf2019.calibrations.add(TestUtils.get2019LifeCamCoeffs(true)); + + var pipeline2019 = new ReflectivePipelineSettings(); + pipeline2019.pipelineNickname = "CargoShip"; + pipeline2019.targetModel = TargetModel.k2019DualTarget; + pipeline2019.outputShowMultipleTargets = true; + pipeline2019.contourGroupingMode = ContourGroupingMode.Dual; + pipeline2019.inputShouldShow = true; + + var psList2019 = new ArrayList(); + psList2019.add(pipeline2019); + camConf2019.pipelineSettings = psList2019; + } - var fvsShape = new FileVisionSource(camConfShape); - var fvs2019 = new FileVisionSource(camConf2019); - var fvs2020 = new FileVisionSource(camConf2020); - var fvs2022 = new FileVisionSource(camConf2022); - var fvs2023 = new FileVisionSource(camConf2023); + var camConf2020 = + ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2020"); + if (camConf2020 == null) { + camConf2020 = + new CameraConfiguration("WPI2020", TestUtils.getTestMode2020ImagePath().toString()); + camConf2020.FOV = TestUtils.WPI2020Image.FOV; + camConf2020.calibrations.add(TestUtils.get2019LifeCamCoeffs(true)); + + var pipeline2020 = new ReflectivePipelineSettings(); + pipeline2020.pipelineNickname = "OuterPort"; + pipeline2020.targetModel = TargetModel.k2020HighGoalOuter; + camConf2020.calibrations.add(TestUtils.get2020LifeCamCoeffs(true)); + pipeline2020.inputShouldShow = true; + + var psList2020 = new ArrayList(); + psList2020.add(pipeline2020); + camConf2020.pipelineSettings = psList2020; + } - collectedSources.add(fvs2023); - collectedSources.add(fvs2022); - collectedSources.add(fvsShape); - collectedSources.add(fvs2020); - collectedSources.add(fvs2019); + var camConf2022 = + ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2022"); + if (camConf2022 == null) { + camConf2022 = + new CameraConfiguration("WPI2022", TestUtils.getTestMode2022ImagePath().toString()); + camConf2022.FOV = TestUtils.WPI2022Image.FOV; + camConf2022.calibrations.add(TestUtils.get2019LifeCamCoeffs(true)); + + var pipeline2022 = new ReflectivePipelineSettings(); + pipeline2022.pipelineNickname = "OuterPort"; + pipeline2022.targetModel = TargetModel.k2020HighGoalOuter; + pipeline2022.inputShouldShow = true; + // camConf2020.calibrations.add(TestUtils.get2020LifeCamCoeffs(true)); + + var psList2022 = new ArrayList(); + psList2022.add(pipeline2022); + camConf2022.pipelineSettings = psList2022; + } - ConfigManager.getInstance().unloadCameraConfigs(); - VisionModuleManager.getInstance().addSources(collectedSources).forEach(VisionModule::start); - ConfigManager.getInstance().addCameraConfigurations(collectedSources); + CameraConfiguration camConf2023 = + ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2023"); + if (camConf2023 == null) { + camConf2023 = + new CameraConfiguration( + "WPI2023", + TestUtils.getResourcesFolderPath(true) + .resolve("testimages") + .resolve(TestUtils.WPI2023Apriltags.k383_60_Angle2.path) + .toString()); + + camConf2023.FOV = TestUtils.WPI2023Apriltags.FOV; + camConf2023.calibrations.add(TestUtils.get2023LifeCamCoeffs(true)); + + var pipeline2023 = new AprilTagPipelineSettings(); + var path_split = Path.of(camConf2023.path).getFileName().toString(); + pipeline2023.pipelineNickname = path_split.replace(".png", ""); + pipeline2023.targetModel = TargetModel.k6in_16h5; + pipeline2023.inputShouldShow = true; + pipeline2023.solvePNPEnabled = true; + + var psList2023 = new ArrayList(); + psList2023.add(pipeline2023); + camConf2023.pipelineSettings = psList2023; } - public static void main(String[] args) { - try { - TestUtils.loadLibraries(); - logger.info("Native libraries loaded."); - } catch (Exception e) { - logger.error("Failed to load native libraries!", e); - } + // Colored shape testing + var camConfShape = + ConfigManager.getInstance().getConfig().getCameraConfigurations().get("Shape"); + + // If we haven't saved shape settings, create a new one + if (camConfShape == null) { + camConfShape = + new CameraConfiguration( + "Shape", + TestUtils.getPowercellImagePath(TestUtils.PowercellTestImages.kPowercell_test_1, true) + .toString()); + var settings = new ColoredShapePipelineSettings(); + settings.hsvHue = new IntegerCouple(0, 35); + settings.hsvSaturation = new IntegerCouple(82, 255); + settings.hsvValue = new IntegerCouple(62, 255); + settings.contourShape = ContourShape.Triangle; + settings.outputShowMultipleTargets = true; + settings.circleAccuracy = 15; + settings.inputShouldShow = true; + camConfShape.addPipelineSetting(settings); + } - try { - if (Platform.isRaspberryPi()) { - LibCameraJNI.forceLoad(); - } - } catch (IOException e) { - logger.error("Failed to load libcamera-JNI!", e); - } + var collectedSources = new ArrayList(); + + var fvsShape = new FileVisionSource(camConfShape); + var fvs2019 = new FileVisionSource(camConf2019); + var fvs2020 = new FileVisionSource(camConf2020); + var fvs2022 = new FileVisionSource(camConf2022); + var fvs2023 = new FileVisionSource(camConf2023); + + collectedSources.add(fvs2023); + collectedSources.add(fvs2022); + collectedSources.add(fvsShape); + collectedSources.add(fvs2020); + collectedSources.add(fvs2019); + + ConfigManager.getInstance().unloadCameraConfigs(); + VisionModuleManager.getInstance().addSources(collectedSources).forEach(VisionModule::start); + ConfigManager.getInstance().addCameraConfigurations(collectedSources); + } + + public static void main(String[] args) { + try { + TestUtils.loadLibraries(); + logger.info("Native libraries loaded."); + } catch (Exception e) { + logger.error("Failed to load native libraries!", e); + } - try { - if (!handleArgs(args)) { - System.exit(0); - } - } catch (ParseException e) { - logger.error("Failed to parse command-line options!", e); - } + try { + if (Platform.isRaspberryPi()) { + LibCameraJNI.forceLoad(); + } + } catch (IOException e) { + logger.error("Failed to load libcamera-JNI!", e); + } - CVMat.enablePrint(false); - PipelineProfiler.enablePrint(false); - - var logLevel = printDebugLogs ? LogLevel.TRACE : LogLevel.DEBUG; - Logger.setLevel(LogGroup.Camera, logLevel); - Logger.setLevel(LogGroup.WebServer, logLevel); - Logger.setLevel(LogGroup.VisionModule, logLevel); - Logger.setLevel(LogGroup.Data, logLevel); - Logger.setLevel(LogGroup.Config, logLevel); - Logger.setLevel(LogGroup.General, logLevel); - logger.info("Logging initialized in debug mode."); - - logger.info( - "Starting PhotonVision version " - + PhotonVersion.versionString - + " on " - + Platform.getPlatformName() - + (Platform.isRaspberryPi() ? (" (Pi " + PiVersion.getPiVersion() + ")") : "")); - - logger.debug("Loading ConfigManager..."); - ConfigManager.getInstance().load(); // init config manager - ConfigManager.getInstance().requestSave(); - - logger.debug("Loading HardwareManager..."); - // Force load the hardware manager - HardwareManager.getInstance(); - - logger.debug("Loading NetworkManager..."); - NetworkManager.getInstance().reinitialize(); - - logger.debug("Loading NetworkTablesManager..."); - NetworkTablesManager.getInstance() - .setConfig(ConfigManager.getInstance().getConfig().getNetworkConfig()); - - if (!isTestMode) { - logger.debug("Loading VisionSourceManager..."); - VisionSourceManager.getInstance() - .registerLoadedConfigs( - ConfigManager.getInstance().getConfig().getCameraConfigurations().values()); - - VisionSourceManager.getInstance().registerTimedTask(); - } else { - if (testModeFolder == null) { - addTestModeSources(); - } else { - addTestModeFromFolder(); - } - } + try { + if (!handleArgs(args)) { + System.exit(0); + } + } catch (ParseException e) { + logger.error("Failed to parse command-line options!", e); + } - logger.info("Starting server..."); - Server.start(DEFAULT_WEBPORT); + CVMat.enablePrint(false); + PipelineProfiler.enablePrint(false); + + var logLevel = printDebugLogs ? LogLevel.TRACE : LogLevel.DEBUG; + Logger.setLevel(LogGroup.Camera, logLevel); + Logger.setLevel(LogGroup.WebServer, logLevel); + Logger.setLevel(LogGroup.VisionModule, logLevel); + Logger.setLevel(LogGroup.Data, logLevel); + Logger.setLevel(LogGroup.Config, logLevel); + Logger.setLevel(LogGroup.General, logLevel); + logger.info("Logging initialized in debug mode."); + + logger.info( + "Starting PhotonVision version " + + PhotonVersion.versionString + + " on " + + Platform.getPlatformName() + + (Platform.isRaspberryPi() ? (" (Pi " + PiVersion.getPiVersion() + ")") : "")); + + logger.debug("Loading ConfigManager..."); + ConfigManager.getInstance().load(); // init config manager + ConfigManager.getInstance().requestSave(); + + logger.debug("Loading HardwareManager..."); + // Force load the hardware manager + HardwareManager.getInstance(); + + logger.debug("Loading NetworkManager..."); + NetworkManager.getInstance().reinitialize(); + + logger.debug("Loading NetworkTablesManager..."); + NetworkTablesManager.getInstance() + .setConfig(ConfigManager.getInstance().getConfig().getNetworkConfig()); + + if (!isTestMode) { + logger.debug("Loading VisionSourceManager..."); + VisionSourceManager.getInstance() + .registerLoadedConfigs( + ConfigManager.getInstance().getConfig().getCameraConfigurations().values()); + + VisionSourceManager.getInstance().registerTimedTask(); + } else { + if (testModeFolder == null) { + addTestModeSources(); + } else { + addTestModeFromFolder(); + } } + + logger.info("Starting server..."); + Server.start(DEFAULT_WEBPORT); + } } diff --git a/photon-server/src/main/java/org/photonvision/server/CameraSocketHandler.java b/photon-server/src/main/java/org/photonvision/server/CameraSocketHandler.java index d93f9a9e44..3a44f09f16 100644 --- a/photon-server/src/main/java/org/photonvision/server/CameraSocketHandler.java +++ b/photon-server/src/main/java/org/photonvision/server/CameraSocketHandler.java @@ -35,109 +35,109 @@ import org.photonvision.vision.videoStream.SocketVideoStreamManager; public class CameraSocketHandler { - private final Logger logger = new Logger(CameraSocketHandler.class, LogGroup.WebServer); - private final List users = new CopyOnWriteArrayList<>(); - private final SocketVideoStreamManager svsManager = SocketVideoStreamManager.getInstance(); - - private Thread cameraBroadcastThread; - - public static class UIMap extends HashMap {} - - private static class ThreadSafeSingleton { - private static final CameraSocketHandler INSTANCE = new CameraSocketHandler(); - } - - public static CameraSocketHandler getInstance() { - return CameraSocketHandler.ThreadSafeSingleton.INSTANCE; - } - - private CameraSocketHandler() { - cameraBroadcastThread = new Thread(this::broadcastFramesTask); - cameraBroadcastThread.setPriority(Thread.MAX_PRIORITY - 3); // fairly high priority - cameraBroadcastThread.start(); - } - - public void onConnect(WsConnectContext context) { - context.session.setIdleTimeout( - Duration.ofMillis(Long.MAX_VALUE)); // TODO: determine better value - var remote = (InetSocketAddress) context.session.getRemoteAddress(); - var host = remote.getAddress().toString() + ":" + remote.getPort(); - logger.info("New camera websocket connection from " + host); - users.add(context); - } - - protected void onClose(WsCloseContext context) { - var remote = (InetSocketAddress) context.session.getRemoteAddress(); - var host = remote.getAddress().toString() + ":" + remote.getPort(); - var reason = context.reason() != null ? context.reason() : "Connection closed by client"; - logger.info("Closing camera websocket connection from " + host + " for reason: " + reason); - svsManager.removeSubscription(context); - users.remove(context); - } - - @SuppressWarnings({"unchecked"}) - public void onMessage(WsMessageContext context) { - var messageStr = context.message(); - ObjectMapper mapper = new ObjectMapper(); - try { - JsonNode actualObj = mapper.readTree(messageStr); - - try { - var entryCmd = actualObj.get("cmd").asText(); - var socketMessageType = CameraSocketMessageType.fromEntryKey(entryCmd); - - logger.trace(() -> "Got Camera WS message: [" + socketMessageType + "]"); - - if (socketMessageType == null) { - logger.warn("Got unknown socket message command: " + entryCmd); - } - - switch (socketMessageType) { - case CSMT_SUBSCRIBE: - { - int portId = actualObj.get("port").asInt(); - svsManager.addSubscription(context, portId); - break; - } - case CSMT_UNSUBSCRIBE: - { - svsManager.removeSubscription(context); - break; - } - } - } catch (Exception e) { - logger.error("Failed to parse message!", e); - } - - } catch (JsonProcessingException e) { - logger.warn("Could not parse message \"" + messageStr + "\""); - e.printStackTrace(); - return; + private final Logger logger = new Logger(CameraSocketHandler.class, LogGroup.WebServer); + private final List users = new CopyOnWriteArrayList<>(); + private final SocketVideoStreamManager svsManager = SocketVideoStreamManager.getInstance(); + + private Thread cameraBroadcastThread; + + public static class UIMap extends HashMap {} + + private static class ThreadSafeSingleton { + private static final CameraSocketHandler INSTANCE = new CameraSocketHandler(); + } + + public static CameraSocketHandler getInstance() { + return CameraSocketHandler.ThreadSafeSingleton.INSTANCE; + } + + private CameraSocketHandler() { + cameraBroadcastThread = new Thread(this::broadcastFramesTask); + cameraBroadcastThread.setPriority(Thread.MAX_PRIORITY - 3); // fairly high priority + cameraBroadcastThread.start(); + } + + public void onConnect(WsConnectContext context) { + context.session.setIdleTimeout( + Duration.ofMillis(Long.MAX_VALUE)); // TODO: determine better value + var remote = (InetSocketAddress) context.session.getRemoteAddress(); + var host = remote.getAddress().toString() + ":" + remote.getPort(); + logger.info("New camera websocket connection from " + host); + users.add(context); + } + + protected void onClose(WsCloseContext context) { + var remote = (InetSocketAddress) context.session.getRemoteAddress(); + var host = remote.getAddress().toString() + ":" + remote.getPort(); + var reason = context.reason() != null ? context.reason() : "Connection closed by client"; + logger.info("Closing camera websocket connection from " + host + " for reason: " + reason); + svsManager.removeSubscription(context); + users.remove(context); + } + + @SuppressWarnings({"unchecked"}) + public void onMessage(WsMessageContext context) { + var messageStr = context.message(); + ObjectMapper mapper = new ObjectMapper(); + try { + JsonNode actualObj = mapper.readTree(messageStr); + + try { + var entryCmd = actualObj.get("cmd").asText(); + var socketMessageType = CameraSocketMessageType.fromEntryKey(entryCmd); + + logger.trace(() -> "Got Camera WS message: [" + socketMessageType + "]"); + + if (socketMessageType == null) { + logger.warn("Got unknown socket message command: " + entryCmd); } - } - @SuppressWarnings({"unchecked"}) - public void onBinaryMessage(WsBinaryMessageContext context) { - return; // ignoring binary messages for now - } - - private void broadcastFramesTask() { - // Background camera image broadcasting thread - while (!Thread.currentThread().isInterrupted()) { - svsManager.allStreamConvertNextFrame(); - - try { - Thread.sleep(1); - } catch (InterruptedException e) { - logger.error("Exception waiting for camera stream broadcast semaphore", e); + switch (socketMessageType) { + case CSMT_SUBSCRIBE: + { + int portId = actualObj.get("port").asInt(); + svsManager.addSubscription(context, portId); + break; } - - for (var user : users) { - var sendBytes = svsManager.getSendFrame(user); - if (sendBytes != null) { - user.send(sendBytes); - } + case CSMT_UNSUBSCRIBE: + { + svsManager.removeSubscription(context); + break; } } + } catch (Exception e) { + logger.error("Failed to parse message!", e); + } + + } catch (JsonProcessingException e) { + logger.warn("Could not parse message \"" + messageStr + "\""); + e.printStackTrace(); + return; + } + } + + @SuppressWarnings({"unchecked"}) + public void onBinaryMessage(WsBinaryMessageContext context) { + return; // ignoring binary messages for now + } + + private void broadcastFramesTask() { + // Background camera image broadcasting thread + while (!Thread.currentThread().isInterrupted()) { + svsManager.allStreamConvertNextFrame(); + + try { + Thread.sleep(1); + } catch (InterruptedException e) { + logger.error("Exception waiting for camera stream broadcast semaphore", e); + } + + for (var user : users) { + var sendBytes = svsManager.getSendFrame(user); + if (sendBytes != null) { + user.send(sendBytes); + } + } } + } } diff --git a/photon-server/src/main/java/org/photonvision/server/CameraSocketMessageType.java b/photon-server/src/main/java/org/photonvision/server/CameraSocketMessageType.java index 74841e2022..fda054d924 100644 --- a/photon-server/src/main/java/org/photonvision/server/CameraSocketMessageType.java +++ b/photon-server/src/main/java/org/photonvision/server/CameraSocketMessageType.java @@ -23,24 +23,24 @@ @SuppressWarnings("unused") public enum CameraSocketMessageType { - CSMT_SUBSCRIBE("subscribe"), - CSMT_UNSUBSCRIBE("unsubscribe"); + CSMT_SUBSCRIBE("subscribe"), + CSMT_UNSUBSCRIBE("unsubscribe"); - public final String entryKey; + public final String entryKey; - CameraSocketMessageType(String entryKey) { - this.entryKey = entryKey; - } + CameraSocketMessageType(String entryKey) { + this.entryKey = entryKey; + } - private static final Map entryKeyToValueMap = new HashMap<>(); + private static final Map entryKeyToValueMap = new HashMap<>(); - static { - for (var value : EnumSet.allOf(CameraSocketMessageType.class)) { - entryKeyToValueMap.put(value.entryKey, value); - } + static { + for (var value : EnumSet.allOf(CameraSocketMessageType.class)) { + entryKeyToValueMap.put(value.entryKey, value); } + } - public static CameraSocketMessageType fromEntryKey(String entryKey) { - return entryKeyToValueMap.get(entryKey); - } + public static CameraSocketMessageType fromEntryKey(String entryKey) { + return entryKeyToValueMap.get(entryKey); + } } diff --git a/photon-server/src/main/java/org/photonvision/server/DataSocketHandler.java b/photon-server/src/main/java/org/photonvision/server/DataSocketHandler.java index f7f0345723..92e1f40064 100644 --- a/photon-server/src/main/java/org/photonvision/server/DataSocketHandler.java +++ b/photon-server/src/main/java/org/photonvision/server/DataSocketHandler.java @@ -45,327 +45,327 @@ @SuppressWarnings("rawtypes") public class DataSocketHandler { - private final Logger logger = new Logger(DataSocketHandler.class, LogGroup.WebServer); - private final List users = new CopyOnWriteArrayList<>(); - private final ObjectMapper objectMapper = new ObjectMapper(new MessagePackFactory()); - private final DataChangeService dcService = DataChangeService.getInstance(); + private final Logger logger = new Logger(DataSocketHandler.class, LogGroup.WebServer); + private final List users = new CopyOnWriteArrayList<>(); + private final ObjectMapper objectMapper = new ObjectMapper(new MessagePackFactory()); + private final DataChangeService dcService = DataChangeService.getInstance(); - @SuppressWarnings("FieldCanBeLocal") - private final UIOutboundSubscriber uiOutboundSubscriber = new UIOutboundSubscriber(this); + @SuppressWarnings("FieldCanBeLocal") + private final UIOutboundSubscriber uiOutboundSubscriber = new UIOutboundSubscriber(this); - public static class UIMap extends HashMap {} + public static class UIMap extends HashMap {} - private static class ThreadSafeSingleton { - private static final DataSocketHandler INSTANCE = new DataSocketHandler(); - } + private static class ThreadSafeSingleton { + private static final DataSocketHandler INSTANCE = new DataSocketHandler(); + } - public static DataSocketHandler getInstance() { - return DataSocketHandler.ThreadSafeSingleton.INSTANCE; - } + public static DataSocketHandler getInstance() { + return DataSocketHandler.ThreadSafeSingleton.INSTANCE; + } - private DataSocketHandler() { - dcService.addSubscribers( - uiOutboundSubscriber, - new UIInboundSubscriber()); // Subscribe outgoing messages to the data change service - } + private DataSocketHandler() { + dcService.addSubscribers( + uiOutboundSubscriber, + new UIInboundSubscriber()); // Subscribe outgoing messages to the data change service + } - public void onConnect(WsConnectContext context) { - context.session.setIdleTimeout( - Duration.ofMillis(Long.MAX_VALUE)); // TODO: determine better value - var remote = (InetSocketAddress) context.session.getRemoteAddress(); - var host = remote.getAddress().toString() + ":" + remote.getPort(); - logger.info("New websocket connection from " + host); - users.add(context); - dcService.publishEvent( - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_GENSETTINGS, "userConnected", context)); - } + public void onConnect(WsConnectContext context) { + context.session.setIdleTimeout( + Duration.ofMillis(Long.MAX_VALUE)); // TODO: determine better value + var remote = (InetSocketAddress) context.session.getRemoteAddress(); + var host = remote.getAddress().toString() + ":" + remote.getPort(); + logger.info("New websocket connection from " + host); + users.add(context); + dcService.publishEvent( + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_GENSETTINGS, "userConnected", context)); + } - protected void onClose(WsCloseContext context) { - var remote = (InetSocketAddress) context.session.getRemoteAddress(); - var host = remote.getAddress().toString() + ":" + remote.getPort(); - var reason = context.reason() != null ? context.reason() : "Connection closed by client"; - logger.info("Closing websocket connection from " + host + " for reason: " + reason); - users.remove(context); - } + protected void onClose(WsCloseContext context) { + var remote = (InetSocketAddress) context.session.getRemoteAddress(); + var host = remote.getAddress().toString() + ":" + remote.getPort(); + var reason = context.reason() != null ? context.reason() : "Connection closed by client"; + logger.info("Closing websocket connection from " + host + " for reason: " + reason); + users.remove(context); + } - @SuppressWarnings({"unchecked"}) - public void onBinaryMessage(WsBinaryMessageContext context) { - try { - Map deserializedData = - objectMapper.readValue(context.data(), new TypeReference<>() {}); + @SuppressWarnings({"unchecked"}) + public void onBinaryMessage(WsBinaryMessageContext context) { + try { + Map deserializedData = + objectMapper.readValue(context.data(), new TypeReference<>() {}); - // Special case the current camera index - var camIndexValue = deserializedData.get("cameraIndex"); - Integer cameraIndex = null; - if (camIndexValue instanceof Integer) { - cameraIndex = (Integer) camIndexValue; - deserializedData.remove("cameraIndex"); - } + // Special case the current camera index + var camIndexValue = deserializedData.get("cameraIndex"); + Integer cameraIndex = null; + if (camIndexValue instanceof Integer) { + cameraIndex = (Integer) camIndexValue; + deserializedData.remove("cameraIndex"); + } - for (Map.Entry entry : deserializedData.entrySet()) { - try { - var entryKey = entry.getKey(); - var entryValue = entry.getValue(); - var socketMessageType = DataSocketMessageType.fromEntryKey(entryKey); + for (Map.Entry entry : deserializedData.entrySet()) { + try { + var entryKey = entry.getKey(); + var entryValue = entry.getValue(); + var socketMessageType = DataSocketMessageType.fromEntryKey(entryKey); - logger.trace( - () -> - "Got WS message: [" - + socketMessageType - + "] ==> [" - + entryKey - + "], [" - + entryValue - + "]"); + logger.trace( + () -> + "Got WS message: [" + + socketMessageType + + "] ==> [" + + entryKey + + "], [" + + entryValue + + "]"); - if (socketMessageType == null) { - logger.warn("Got unknown socket message type: " + entryKey); - continue; - } + if (socketMessageType == null) { + logger.warn("Got unknown socket message type: " + entryKey); + continue; + } - switch (socketMessageType) { - case SMT_DRIVERMODE: - { - // TODO: what is this event? - var data = (HashMap) entryValue; - var dmExpEvent = - new IncomingWebSocketEvent( - DataChangeDestination.DCD_ACTIVEMODULE, "driverExposure", data); - var dmBrightEvent = - new IncomingWebSocketEvent( - DataChangeDestination.DCD_ACTIVEMODULE, "driverBrightness", data); - var dmIsDriverEvent = - new IncomingWebSocketEvent( - DataChangeDestination.DCD_ACTIVEMODULE, "isDriver", data); + switch (socketMessageType) { + case SMT_DRIVERMODE: + { + // TODO: what is this event? + var data = (HashMap) entryValue; + var dmExpEvent = + new IncomingWebSocketEvent( + DataChangeDestination.DCD_ACTIVEMODULE, "driverExposure", data); + var dmBrightEvent = + new IncomingWebSocketEvent( + DataChangeDestination.DCD_ACTIVEMODULE, "driverBrightness", data); + var dmIsDriverEvent = + new IncomingWebSocketEvent( + DataChangeDestination.DCD_ACTIVEMODULE, "isDriver", data); - dcService.publishEvents(dmExpEvent, dmBrightEvent, dmIsDriverEvent); - break; - } - case SMT_CHANGECAMERANAME: - { - var ccnEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "cameraNickname", - (String) entryValue, - cameraIndex, - context); - dcService.publishEvent(ccnEvent); - break; - } - case SMT_CHANGEPIPELINENAME: - { - var cpnEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "pipelineName", - (String) entryValue, - cameraIndex, - context); - dcService.publishEvent(cpnEvent); - break; - } - case SMT_ADDNEWPIPELINE: - { - // HashMap data = (HashMap) entryValue; - // var type = (PipelineType) - // data.get("pipelineType"); - // var name = (String) data.get("pipelineName"); - var arr = (ArrayList) entryValue; - var name = (String) arr.get(0); - var type = PipelineType.values()[(Integer) arr.get(1) + 2]; + dcService.publishEvents(dmExpEvent, dmBrightEvent, dmIsDriverEvent); + break; + } + case SMT_CHANGECAMERANAME: + { + var ccnEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "cameraNickname", + (String) entryValue, + cameraIndex, + context); + dcService.publishEvent(ccnEvent); + break; + } + case SMT_CHANGEPIPELINENAME: + { + var cpnEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "pipelineName", + (String) entryValue, + cameraIndex, + context); + dcService.publishEvent(cpnEvent); + break; + } + case SMT_ADDNEWPIPELINE: + { + // HashMap data = (HashMap) entryValue; + // var type = (PipelineType) + // data.get("pipelineType"); + // var name = (String) data.get("pipelineName"); + var arr = (ArrayList) entryValue; + var name = (String) arr.get(0); + var type = PipelineType.values()[(Integer) arr.get(1) + 2]; - var newPipelineEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "newPipelineInfo", - Pair.of(name, type), - cameraIndex, - context); - dcService.publishEvent(newPipelineEvent); - break; - } - case SMT_CHANGEBRIGHTNESS: - { - HardwareManager.getInstance() - .setBrightnessPercent(Integer.parseInt(entryValue.toString())); - break; - } - case SMT_DUPLICATEPIPELINE: - { - var pipeIndex = (Integer) entryValue; + var newPipelineEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "newPipelineInfo", + Pair.of(name, type), + cameraIndex, + context); + dcService.publishEvent(newPipelineEvent); + break; + } + case SMT_CHANGEBRIGHTNESS: + { + HardwareManager.getInstance() + .setBrightnessPercent(Integer.parseInt(entryValue.toString())); + break; + } + case SMT_DUPLICATEPIPELINE: + { + var pipeIndex = (Integer) entryValue; - logger.info("Duplicating pipe@index" + pipeIndex + " for camera " + cameraIndex); + logger.info("Duplicating pipe@index" + pipeIndex + " for camera " + cameraIndex); - var newPipelineEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "duplicatePipeline", - pipeIndex, - cameraIndex, - context); - dcService.publishEvent(newPipelineEvent); - break; - } - case SMT_DELETECURRENTPIPELINE: - { - var deleteCurrentPipelineEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "deleteCurrPipeline", - 0, - cameraIndex, - context); - dcService.publishEvent(deleteCurrentPipelineEvent); - break; - } - case SMT_ROBOTOFFSETPOINT: - { - var robotOffsetPointEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "robotOffsetPoint", - (Integer) entryValue, - cameraIndex, - null); - dcService.publishEvent(robotOffsetPointEvent); - break; - } - case SMT_CURRENTCAMERA: - { - var changeCurrentCameraEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_OTHER, "changeUICamera", (Integer) entryValue); - dcService.publishEvent(changeCurrentCameraEvent); - break; - } - case SMT_CURRENTPIPELINE: - { - var changePipelineEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "changePipeline", - (Integer) entryValue, - cameraIndex, - context); - dcService.publishEvent(changePipelineEvent); - break; - } - case SMT_STARTPNPCALIBRATION: - { - var changePipelineEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "startCalibration", - (Map) entryValue, - cameraIndex, - context); - dcService.publishEvent(changePipelineEvent); - break; - } - case SMT_SAVEINPUTSNAPSHOT: - { - var takeInputSnapshotEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "saveInputSnapshot", - 0, - cameraIndex, - context); - dcService.publishEvent(takeInputSnapshotEvent); - break; - } - case SMT_SAVEOUTPUTSNAPSHOT: - { - var takeOutputSnapshotEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "saveOutputSnapshot", - 0, - cameraIndex, - context); - dcService.publishEvent(takeOutputSnapshotEvent); - break; - } - case SMT_TAKECALIBRATIONSNAPSHOT: - { - var takeCalSnapshotEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "takeCalSnapshot", - 0, - cameraIndex, - context); - dcService.publishEvent(takeCalSnapshotEvent); - break; - } - case SMT_PIPELINESETTINGCHANGE: - { - HashMap data = (HashMap) entryValue; + var newPipelineEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "duplicatePipeline", + pipeIndex, + cameraIndex, + context); + dcService.publishEvent(newPipelineEvent); + break; + } + case SMT_DELETECURRENTPIPELINE: + { + var deleteCurrentPipelineEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "deleteCurrPipeline", + 0, + cameraIndex, + context); + dcService.publishEvent(deleteCurrentPipelineEvent); + break; + } + case SMT_ROBOTOFFSETPOINT: + { + var robotOffsetPointEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "robotOffsetPoint", + (Integer) entryValue, + cameraIndex, + null); + dcService.publishEvent(robotOffsetPointEvent); + break; + } + case SMT_CURRENTCAMERA: + { + var changeCurrentCameraEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_OTHER, "changeUICamera", (Integer) entryValue); + dcService.publishEvent(changeCurrentCameraEvent); + break; + } + case SMT_CURRENTPIPELINE: + { + var changePipelineEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "changePipeline", + (Integer) entryValue, + cameraIndex, + context); + dcService.publishEvent(changePipelineEvent); + break; + } + case SMT_STARTPNPCALIBRATION: + { + var changePipelineEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "startCalibration", + (Map) entryValue, + cameraIndex, + context); + dcService.publishEvent(changePipelineEvent); + break; + } + case SMT_SAVEINPUTSNAPSHOT: + { + var takeInputSnapshotEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "saveInputSnapshot", + 0, + cameraIndex, + context); + dcService.publishEvent(takeInputSnapshotEvent); + break; + } + case SMT_SAVEOUTPUTSNAPSHOT: + { + var takeOutputSnapshotEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "saveOutputSnapshot", + 0, + cameraIndex, + context); + dcService.publishEvent(takeOutputSnapshotEvent); + break; + } + case SMT_TAKECALIBRATIONSNAPSHOT: + { + var takeCalSnapshotEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "takeCalSnapshot", + 0, + cameraIndex, + context); + dcService.publishEvent(takeCalSnapshotEvent); + break; + } + case SMT_PIPELINESETTINGCHANGE: + { + HashMap data = (HashMap) entryValue; - if (data.size() >= 2) { - var cameraIndex2 = (int) data.get("cameraIndex"); - for (var dataEntry : data.entrySet()) { - if (dataEntry.getKey().equals("cameraIndex")) { - continue; - } - var pipelineSettingChangeEvent = - new IncomingWebSocketEvent( - DataChangeDestination.DCD_ACTIVEPIPELINESETTINGS, - dataEntry.getKey(), - dataEntry.getValue(), - cameraIndex2, - context); - dcService.publishEvent(pipelineSettingChangeEvent); - } - } else { - logger.warn("Unknown message for PSC: " + data.keySet().iterator().next()); - } - break; - } - case SMT_CHANGEPIPELINETYPE: - { - var changePipelineEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "changePipelineType", - (Integer) entryValue, - cameraIndex, - context); - dcService.publishEvent(changePipelineEvent); - break; - } + if (data.size() >= 2) { + var cameraIndex2 = (int) data.get("cameraIndex"); + for (var dataEntry : data.entrySet()) { + if (dataEntry.getKey().equals("cameraIndex")) { + continue; } - } catch (Exception e) { - logger.error("Failed to parse message!", e); + var pipelineSettingChangeEvent = + new IncomingWebSocketEvent( + DataChangeDestination.DCD_ACTIVEPIPELINESETTINGS, + dataEntry.getKey(), + dataEntry.getValue(), + cameraIndex2, + context); + dcService.publishEvent(pipelineSettingChangeEvent); + } + } else { + logger.warn("Unknown message for PSC: " + data.keySet().iterator().next()); } - } - } catch (IOException e) { - logger.error("Failed to deserialize message!", e); + break; + } + case SMT_CHANGEPIPELINETYPE: + { + var changePipelineEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "changePipelineType", + (Integer) entryValue, + cameraIndex, + context); + dcService.publishEvent(changePipelineEvent); + break; + } + } + } catch (Exception e) { + logger.error("Failed to parse message!", e); } + } + } catch (IOException e) { + logger.error("Failed to deserialize message!", e); } + } - private void sendMessage(Object message, WsContext user) throws JsonProcessingException { - ByteBuffer b = ByteBuffer.wrap(objectMapper.writeValueAsBytes(message)); - user.send(b); - } + private void sendMessage(Object message, WsContext user) throws JsonProcessingException { + ByteBuffer b = ByteBuffer.wrap(objectMapper.writeValueAsBytes(message)); + user.send(b); + } - public void broadcastMessage(Object message, WsContext userToSkip) - throws JsonProcessingException { - if (userToSkip == null) { - for (WsContext user : users) { - sendMessage(message, user); - } - } else { - var skipUserPort = ((InetSocketAddress) userToSkip.session.getRemoteAddress()).getPort(); - for (WsContext user : users) { - var userPort = ((InetSocketAddress) user.session.getRemoteAddress()).getPort(); - if (userPort != skipUserPort) { - sendMessage(message, user); - } - } + public void broadcastMessage(Object message, WsContext userToSkip) + throws JsonProcessingException { + if (userToSkip == null) { + for (WsContext user : users) { + sendMessage(message, user); + } + } else { + var skipUserPort = ((InetSocketAddress) userToSkip.session.getRemoteAddress()).getPort(); + for (WsContext user : users) { + var userPort = ((InetSocketAddress) user.session.getRemoteAddress()).getPort(); + if (userPort != skipUserPort) { + sendMessage(message, user); } + } } + } } diff --git a/photon-server/src/main/java/org/photonvision/server/DataSocketMessageType.java b/photon-server/src/main/java/org/photonvision/server/DataSocketMessageType.java index f5222cf371..0e702458f8 100644 --- a/photon-server/src/main/java/org/photonvision/server/DataSocketMessageType.java +++ b/photon-server/src/main/java/org/photonvision/server/DataSocketMessageType.java @@ -23,38 +23,38 @@ @SuppressWarnings("unused") public enum DataSocketMessageType { - SMT_DRIVERMODE("driverMode"), - SMT_CHANGECAMERANAME("changeCameraName"), - SMT_CHANGEPIPELINENAME("changePipelineName"), - SMT_ADDNEWPIPELINE("addNewPipeline"), - SMT_DELETECURRENTPIPELINE("deleteCurrentPipeline"), - SMT_CURRENTCAMERA("currentCamera"), - SMT_PIPELINESETTINGCHANGE("changePipelineSetting"), - SMT_CURRENTPIPELINE("currentPipeline"), - SMT_STARTPNPCALIBRATION("startPnpCalibration"), - SMT_SAVEINPUTSNAPSHOT("saveInputSnapshot"), - SMT_SAVEOUTPUTSNAPSHOT("saveOutputSnapshot"), - SMT_TAKECALIBRATIONSNAPSHOT("takeCalibrationSnapshot"), - SMT_DUPLICATEPIPELINE("duplicatePipeline"), - SMT_CHANGEBRIGHTNESS("enabledLEDPercentage"), - SMT_ROBOTOFFSETPOINT("robotOffsetPoint"), - SMT_CHANGEPIPELINETYPE("pipelineType"); - - public final String entryKey; - - DataSocketMessageType(String entryKey) { - this.entryKey = entryKey; + SMT_DRIVERMODE("driverMode"), + SMT_CHANGECAMERANAME("changeCameraName"), + SMT_CHANGEPIPELINENAME("changePipelineName"), + SMT_ADDNEWPIPELINE("addNewPipeline"), + SMT_DELETECURRENTPIPELINE("deleteCurrentPipeline"), + SMT_CURRENTCAMERA("currentCamera"), + SMT_PIPELINESETTINGCHANGE("changePipelineSetting"), + SMT_CURRENTPIPELINE("currentPipeline"), + SMT_STARTPNPCALIBRATION("startPnpCalibration"), + SMT_SAVEINPUTSNAPSHOT("saveInputSnapshot"), + SMT_SAVEOUTPUTSNAPSHOT("saveOutputSnapshot"), + SMT_TAKECALIBRATIONSNAPSHOT("takeCalibrationSnapshot"), + SMT_DUPLICATEPIPELINE("duplicatePipeline"), + SMT_CHANGEBRIGHTNESS("enabledLEDPercentage"), + SMT_ROBOTOFFSETPOINT("robotOffsetPoint"), + SMT_CHANGEPIPELINETYPE("pipelineType"); + + public final String entryKey; + + DataSocketMessageType(String entryKey) { + this.entryKey = entryKey; + } + + private static final Map entryKeyToValueMap = new HashMap<>(); + + static { + for (var value : EnumSet.allOf(DataSocketMessageType.class)) { + entryKeyToValueMap.put(value.entryKey, value); } + } - private static final Map entryKeyToValueMap = new HashMap<>(); - - static { - for (var value : EnumSet.allOf(DataSocketMessageType.class)) { - entryKeyToValueMap.put(value.entryKey, value); - } - } - - public static DataSocketMessageType fromEntryKey(String entryKey) { - return entryKeyToValueMap.get(entryKey); - } + public static DataSocketMessageType fromEntryKey(String entryKey) { + return entryKeyToValueMap.get(entryKey); + } } diff --git a/photon-server/src/main/java/org/photonvision/server/RequestHandler.java b/photon-server/src/main/java/org/photonvision/server/RequestHandler.java index 9547c2cdf0..88df5d84e7 100644 --- a/photon-server/src/main/java/org/photonvision/server/RequestHandler.java +++ b/photon-server/src/main/java/org/photonvision/server/RequestHandler.java @@ -49,536 +49,536 @@ import org.photonvision.vision.processes.VisionModuleManager; public class RequestHandler { - // Treat all 2XX calls as "INFO" - // Treat all 4XX calls as "ERROR" - // Treat all 5XX calls as "ERROR" - - private static final Logger logger = new Logger(RequestHandler.class, LogGroup.WebServer); - - private static final ObjectMapper kObjectMapper = new ObjectMapper(); - - public static void onSettingsImportRequest(Context ctx) { - var file = ctx.uploadedFile("data"); - - if (file == null) { - ctx.status(400); - ctx.result( - "No File was sent with the request. Make sure that the settings zip is sent at the key 'data'"); - logger.error( - "No File was sent with the request. Make sure that the settings zip is sent at the key 'data'"); - return; - } - - if (!file.extension().contains("zip")) { - ctx.status(400); - ctx.result( - "The uploaded file was not of type 'zip'. The uploaded file should be a .zip file."); - logger.error( - "The uploaded file was not of type 'zip'. The uploaded file should be a .zip file."); - return; - } - - // Create a temp file - var tempFilePath = handleTempFileCreation(file); - - if (tempFilePath.isEmpty()) { - ctx.status(500); - ctx.result("There was an error while creating a temporary copy of the file"); - logger.error("There was an error while creating a temporary copy of the file"); - return; - } - - if (ConfigManager.saveUploadedSettingsZip(tempFilePath.get())) { - ctx.status(200); - ctx.result("Successfully saved the uploaded settings zip"); - logger.info("Successfully saved the uploaded settings zip"); - } else { - ctx.status(500); - ctx.result("There was an error while saving the uploaded zip file"); - logger.error("There was an error while saving the uploaded zip file"); - } - } - - public static void onSettingsExportRequest(Context ctx) { - logger.info("Exporting Settings to ZIP Archive"); - - try { - var zip = ConfigManager.getInstance().getSettingsFolderAsZip(); - var stream = new FileInputStream(zip); - logger.info("Uploading settings with size " + stream.available()); - - ctx.contentType("application/zip"); - ctx.header( - "Content-Disposition", "attachment; filename=\"photonvision-settings-export.zip\""); - - ctx.result(stream); - ctx.status(200); - } catch (IOException e) { - logger.error("Unable to export settings archive, bad recode from zip to byte"); - ctx.status(500); - ctx.result("There was an error while exporting the settings archive"); - } - } - - public static void onHardwareConfigRequest(Context ctx) { - var file = ctx.uploadedFile("data"); - - if (file == null) { - ctx.status(400); - ctx.result( - "No File was sent with the request. Make sure that the hardware config json is sent at the key 'data'"); - logger.error( - "No File was sent with the request. Make sure that the hardware config json is sent at the key 'data'"); - return; - } - - if (!file.extension().contains("json")) { - ctx.status(400); - ctx.result( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - logger.error( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - return; - } - - // Create a temp file - var tempFilePath = handleTempFileCreation(file); - - if (tempFilePath.isEmpty()) { - ctx.status(500); - ctx.result("There was an error while creating a temporary copy of the file"); - logger.error("There was an error while creating a temporary copy of the file"); - return; - } - - if (ConfigManager.getInstance().saveUploadedHardwareConfig(tempFilePath.get().toPath())) { - ctx.status(200); - ctx.result("Successfully saved the uploaded hardware config"); - logger.info("Successfully saved the uploaded hardware config"); - } else { - ctx.status(500); - ctx.result("There was an error while saving the uploaded hardware config"); - logger.error("There was an error while saving the uploaded hardware config"); - } - } - - public static void onHardwareSettingsRequest(Context ctx) { - var file = ctx.uploadedFile("data"); - - if (file == null) { - ctx.status(400); - ctx.result( - "No File was sent with the request. Make sure that the hardware settings json is sent at the key 'data'"); - logger.error( - "No File was sent with the request. Make sure that the hardware settings json is sent at the key 'data'"); - return; - } - - if (!file.extension().contains("json")) { - ctx.status(400); - ctx.result( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - logger.error( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - return; - } - - // Create a temp file - var tempFilePath = handleTempFileCreation(file); - - if (tempFilePath.isEmpty()) { - ctx.status(500); - ctx.result("There was an error while creating a temporary copy of the file"); - logger.error("There was an error while creating a temporary copy of the file"); - return; - } - - if (ConfigManager.getInstance().saveUploadedHardwareSettings(tempFilePath.get().toPath())) { - ctx.status(200); - ctx.result("Successfully saved the uploaded hardware settings"); - logger.info("Successfully saved the uploaded hardware settings"); - } else { - ctx.status(500); - ctx.result("There was an error while saving the uploaded hardware settings"); - logger.error("There was an error while saving the uploaded hardware settings"); - } - } - - public static void onNetworkConfigRequest(Context ctx) { - var file = ctx.uploadedFile("data"); - - if (file == null) { - ctx.status(400); - ctx.result( - "No File was sent with the request. Make sure that the network config json is sent at the key 'data'"); - logger.error( - "No File was sent with the request. Make sure that the network config json is sent at the key 'data'"); - return; - } - - if (!file.extension().contains("json")) { - ctx.status(400); - ctx.result( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - logger.error( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - return; - } - - // Create a temp file - var tempFilePath = handleTempFileCreation(file); - - if (tempFilePath.isEmpty()) { - ctx.status(500); - ctx.result("There was an error while creating a temporary copy of the file"); - logger.error("There was an error while creating a temporary copy of the file"); - return; - } - - if (ConfigManager.getInstance().saveUploadedNetworkConfig(tempFilePath.get().toPath())) { - ctx.status(200); - ctx.result("Successfully saved the uploaded network config"); - logger.info("Successfully saved the uploaded network config"); - } else { - ctx.status(500); - ctx.result("There was an error while saving the uploaded network config"); - logger.error("There was an error while saving the uploaded network config"); - } - } - - public static void onAprilTagFieldLayoutRequest(Context ctx) { - var file = ctx.uploadedFile("data"); - - if (file == null) { - ctx.status(400); - ctx.result( - "No File was sent with the request. Make sure that the field layout json is sent at the key 'data'"); - logger.error( - "No File was sent with the request. Make sure that the field layout json is sent at the key 'data'"); - return; - } - - if (!file.extension().contains("json")) { - ctx.status(400); - ctx.result( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - logger.error( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - return; - } - - // Create a temp file - var tempFilePath = handleTempFileCreation(file); - - if (tempFilePath.isEmpty()) { - ctx.status(500); - ctx.result("There was an error while creating a temporary copy of the file"); - logger.error("There was an error while creating a temporary copy of the file"); - return; - } - - if (ConfigManager.getInstance().saveUploadedAprilTagFieldLayout(tempFilePath.get().toPath())) { - ctx.status(200); - ctx.result("Successfully saved the uploaded AprilTagFieldLayout"); - logger.info("Successfully saved the uploaded AprilTagFieldLayout"); - } else { - ctx.status(500); - ctx.result("There was an error while saving the uploaded AprilTagFieldLayout"); - logger.error("There was an error while saving the uploaded AprilTagFieldLayout"); - } - } - - public static void onOfflineUpdateRequest(Context ctx) { - var file = ctx.uploadedFile("jarData"); - - if (file == null) { - ctx.status(400); - ctx.result( - "No File was sent with the request. Make sure that the new jar is sent at the key 'jarData'"); - logger.error( - "No File was sent with the request. Make sure that the new jar is sent at the key 'jarData'"); - return; - } - - if (!file.extension().contains("jar")) { - ctx.status(400); - ctx.result( - "The uploaded file was not of type 'jar'. The uploaded file should be a .jar file."); - logger.error( - "The uploaded file was not of type 'jar'. The uploaded file should be a .jar file."); - return; - } - - try { - Path filePath = - Paths.get(ProgramDirectoryUtilities.getProgramDirectory(), "photonvision.jar"); - File targetFile = new File(filePath.toString()); - var stream = new FileOutputStream(targetFile); - - file.content().transferTo(stream); - stream.close(); - - ctx.status(200); - ctx.result( - "Offline update successfully complete. PhotonVision will restart in the background."); - logger.info( - "Offline update successfully complete. PhotonVision will restart in the background."); - restartProgram(); - } catch (FileNotFoundException e) { - ctx.result("The current program jar file couldn't be found."); - ctx.status(500); - logger.error("The current program jar file couldn't be found.", e); - } catch (IOException e) { - ctx.result("Unable to overwrite the existing program with the new program."); - ctx.status(500); - logger.error("Unable to overwrite the existing program with the new program.", e); - } - } - - public static void onGeneralSettingsRequest(Context ctx) { - NetworkConfig config; - try { - config = kObjectMapper.readValue(ctx.body(), NetworkConfig.class); - - ctx.status(200); - ctx.result("Successfully saved general settings"); - logger.info("Successfully saved general settings"); - } catch (JsonProcessingException e) { - // If the settings can't be parsed, use the default network settings - config = new NetworkConfig(); - - ctx.status(400); - ctx.result("The provided general settings were malformed"); - logger.error("The provided general settings were malformed", e); - } - - ConfigManager.getInstance().setNetworkSettings(config); - ConfigManager.getInstance().requestSave(); - - NetworkManager.getInstance().reinitialize(); - - NetworkTablesManager.getInstance().setConfig(config); - } - - public static void onCameraSettingsRequest(Context ctx) { - try { - var data = kObjectMapper.readTree(ctx.body()); - - int index = data.get("index").asInt(); - double fov = data.get("settings").get("fov").asDouble(); - - var module = VisionModuleManager.getInstance().getModule(index); - module.setFov(fov); - - module.saveModule(); - - ctx.status(200); - ctx.result("Successfully saved camera settings"); - logger.info("Successfully saved camera settings"); - } catch (JsonProcessingException | NullPointerException e) { - ctx.status(400); - ctx.result("The provided camera settings were malformed"); - logger.error("The provided camera settings were malformed", e); - } - } - - public static void onLogExportRequest(Context ctx) { - if (!Platform.isLinux()) { - ctx.status(405); - ctx.result("Logs can only be exported on a Linux platform"); - // INFO only log because this isn't ERROR worthy - logger.info("Logs can only be exported on a Linux platform"); - return; - } - - try { - ShellExec shell = new ShellExec(); - var tempPath = Files.createTempFile("photonvision-journalctl", ".txt"); - shell.executeBashCommand("journalctl -u photonvision.service > " + tempPath.toAbsolutePath()); - - while (!shell.isOutputCompleted()) { - // TODO: add timeout - } - - if (shell.getExitCode() == 0) { - // Wrote to the temp file! Add it to the ctx - var stream = new FileInputStream(tempPath.toFile()); - ctx.contentType("text/plain"); - ctx.header("Content-Disposition", "attachment; filename=\"photonvision-journalctl.txt\""); - ctx.status(200); - ctx.result(stream); - logger.info("Uploading settings with size " + stream.available()); - } else { - ctx.status(500); - ctx.result("The journalctl service was unable to export logs"); - logger.error("The journalctl service was unable to export logs"); - } - } catch (IOException e) { - ctx.status(500); - ctx.result("There was an error while exporting journactl logs"); - logger.error("There was an error while exporting journactl logs", e); - } - } - - public static void onCalibrationEndRequest(Context ctx) { - logger.info("Calibrating camera! This will take a long time..."); - - int index; - - try { - index = kObjectMapper.readTree(ctx.body()).get("index").asInt(); - - var calData = VisionModuleManager.getInstance().getModule(index).endCalibration(); - if (calData == null) { - ctx.result("The calibration process failed"); - ctx.status(500); - logger.error( - "The calibration process failed. Calibration data for module at index (" - + index - + ") was null"); - return; - } - - ctx.result("Camera calibration successfully completed!"); - ctx.status(200); - logger.info("Camera calibration successfully completed!"); - } catch (JsonProcessingException e) { - ctx.status(400); - ctx.result( - "The 'index' field was not found in the request. Please make sure the index of the vision module is specified with the 'index' key."); - logger.error( - "The 'index' field was not found in the request. Please make sure the index of the vision module is specified with the 'index' key.", - e); - } catch (Exception e) { - ctx.status(500); - ctx.result("There was an error while ending calibration"); - logger.error("There was an error while ending calibration", e); - } - } - - public static void onCalibrationImportRequest(Context ctx) { - var data = ctx.body(); - - try { - var actualObj = kObjectMapper.readTree(data); - - int cameraIndex = actualObj.get("cameraIndex").asInt(); - var payload = kObjectMapper.readTree(actualObj.get("payload").asText()); - var coeffs = CameraCalibrationCoefficients.parseFromCalibdbJson(payload); - - var uploadCalibrationEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "calibrationUploaded", - coeffs, - cameraIndex, - null); - DataChangeService.getInstance().publishEvent(uploadCalibrationEvent); - - ctx.status(200); - ctx.result("Calibration imported successfully from CalibDB data!"); - logger.info("Calibration imported successfully from CalibDB data!"); - } catch (JsonProcessingException e) { - ctx.status(400); - ctx.result( - "The Provided CalibDB data is malformed and cannot be parsed for the required fields."); - logger.error( - "The Provided CalibDB data is malformed and cannot be parsed for the required fields.", - e); - } - } - - public static void onProgramRestartRequest(Context ctx) { - // TODO, check if this was successful or not - ctx.status(204); - restartProgram(); - } - - public static void onDeviceRestartRequest(Context ctx) { - ctx.status(HardwareManager.getInstance().restartDevice() ? 204 : 500); - } - - public static void onCameraNicknameChangeRequest(Context ctx) { - try { - var data = kObjectMapper.readTree(ctx.body()); - - String name = data.get("name").asText(); - int idx = data.get("cameraIndex").asInt(); - - VisionModuleManager.getInstance().getModule(idx).setCameraNickname(name); - ctx.status(200); - ctx.result("Successfully changed the camera name to: " + name); - logger.info("Successfully changed the camera name to: " + name); - } catch (JsonProcessingException e) { - ctx.status(400); - ctx.result("The provided nickname data was malformed"); - logger.error("The provided nickname data was malformed", e); - - } catch (Exception e) { - ctx.status(500); - ctx.result("An error occurred while changing the camera's nickname"); - logger.error("An error occurred while changing the camera's nickname", e); - } - } - - public static void onMetricsPublishRequest(Context ctx) { - HardwareManager.getInstance().publishMetrics(); - ctx.status(204); - } - - /** - * Create a temporary file using the UploadedFile from Javalin. - * - * @param file the uploaded file. - * @return Temporary file. Empty if the temporary file was unable to be created. - */ - private static Optional handleTempFileCreation(UploadedFile file) { - var tempFilePath = - new File(Path.of(System.getProperty("java.io.tmpdir"), file.filename()).toString()); - boolean makeDirsRes = tempFilePath.getParentFile().mkdirs(); - - if (!makeDirsRes && !(tempFilePath.getParentFile().exists())) { - logger.error( - "There was an error while creating " - + tempFilePath.getAbsolutePath() - + "! Exists: " - + tempFilePath.getParentFile().exists()); - return Optional.empty(); - } - - try { - FileUtils.copyInputStreamToFile(file.content(), tempFilePath); - } catch (IOException e) { - logger.error( - "There was an error while copying " - + file.filename() - + " to the temp file " - + tempFilePath.getAbsolutePath()); - return Optional.empty(); - } - - return Optional.of(tempFilePath); - } - - /** - * Restart the running program. Note that this doesn't actually restart the program itself, - * instead, it relies on systemd or an equivalent. - */ - private static void restartProgram() { - TimedTaskManager.getInstance() - .addOneShotTask( - () -> { - if (Platform.isLinux()) { - try { - new ShellExec().executeBashCommand("systemctl restart photonvision.service"); - } catch (IOException e) { - logger.error("Could not restart device!", e); - System.exit(0); - } - } else { - System.exit(0); - } - }, - 0); + // Treat all 2XX calls as "INFO" + // Treat all 4XX calls as "ERROR" + // Treat all 5XX calls as "ERROR" + + private static final Logger logger = new Logger(RequestHandler.class, LogGroup.WebServer); + + private static final ObjectMapper kObjectMapper = new ObjectMapper(); + + public static void onSettingsImportRequest(Context ctx) { + var file = ctx.uploadedFile("data"); + + if (file == null) { + ctx.status(400); + ctx.result( + "No File was sent with the request. Make sure that the settings zip is sent at the key 'data'"); + logger.error( + "No File was sent with the request. Make sure that the settings zip is sent at the key 'data'"); + return; + } + + if (!file.extension().contains("zip")) { + ctx.status(400); + ctx.result( + "The uploaded file was not of type 'zip'. The uploaded file should be a .zip file."); + logger.error( + "The uploaded file was not of type 'zip'. The uploaded file should be a .zip file."); + return; + } + + // Create a temp file + var tempFilePath = handleTempFileCreation(file); + + if (tempFilePath.isEmpty()) { + ctx.status(500); + ctx.result("There was an error while creating a temporary copy of the file"); + logger.error("There was an error while creating a temporary copy of the file"); + return; + } + + if (ConfigManager.saveUploadedSettingsZip(tempFilePath.get())) { + ctx.status(200); + ctx.result("Successfully saved the uploaded settings zip"); + logger.info("Successfully saved the uploaded settings zip"); + } else { + ctx.status(500); + ctx.result("There was an error while saving the uploaded zip file"); + logger.error("There was an error while saving the uploaded zip file"); + } + } + + public static void onSettingsExportRequest(Context ctx) { + logger.info("Exporting Settings to ZIP Archive"); + + try { + var zip = ConfigManager.getInstance().getSettingsFolderAsZip(); + var stream = new FileInputStream(zip); + logger.info("Uploading settings with size " + stream.available()); + + ctx.contentType("application/zip"); + ctx.header( + "Content-Disposition", "attachment; filename=\"photonvision-settings-export.zip\""); + + ctx.result(stream); + ctx.status(200); + } catch (IOException e) { + logger.error("Unable to export settings archive, bad recode from zip to byte"); + ctx.status(500); + ctx.result("There was an error while exporting the settings archive"); + } + } + + public static void onHardwareConfigRequest(Context ctx) { + var file = ctx.uploadedFile("data"); + + if (file == null) { + ctx.status(400); + ctx.result( + "No File was sent with the request. Make sure that the hardware config json is sent at the key 'data'"); + logger.error( + "No File was sent with the request. Make sure that the hardware config json is sent at the key 'data'"); + return; + } + + if (!file.extension().contains("json")) { + ctx.status(400); + ctx.result( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + logger.error( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + return; + } + + // Create a temp file + var tempFilePath = handleTempFileCreation(file); + + if (tempFilePath.isEmpty()) { + ctx.status(500); + ctx.result("There was an error while creating a temporary copy of the file"); + logger.error("There was an error while creating a temporary copy of the file"); + return; } + + if (ConfigManager.getInstance().saveUploadedHardwareConfig(tempFilePath.get().toPath())) { + ctx.status(200); + ctx.result("Successfully saved the uploaded hardware config"); + logger.info("Successfully saved the uploaded hardware config"); + } else { + ctx.status(500); + ctx.result("There was an error while saving the uploaded hardware config"); + logger.error("There was an error while saving the uploaded hardware config"); + } + } + + public static void onHardwareSettingsRequest(Context ctx) { + var file = ctx.uploadedFile("data"); + + if (file == null) { + ctx.status(400); + ctx.result( + "No File was sent with the request. Make sure that the hardware settings json is sent at the key 'data'"); + logger.error( + "No File was sent with the request. Make sure that the hardware settings json is sent at the key 'data'"); + return; + } + + if (!file.extension().contains("json")) { + ctx.status(400); + ctx.result( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + logger.error( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + return; + } + + // Create a temp file + var tempFilePath = handleTempFileCreation(file); + + if (tempFilePath.isEmpty()) { + ctx.status(500); + ctx.result("There was an error while creating a temporary copy of the file"); + logger.error("There was an error while creating a temporary copy of the file"); + return; + } + + if (ConfigManager.getInstance().saveUploadedHardwareSettings(tempFilePath.get().toPath())) { + ctx.status(200); + ctx.result("Successfully saved the uploaded hardware settings"); + logger.info("Successfully saved the uploaded hardware settings"); + } else { + ctx.status(500); + ctx.result("There was an error while saving the uploaded hardware settings"); + logger.error("There was an error while saving the uploaded hardware settings"); + } + } + + public static void onNetworkConfigRequest(Context ctx) { + var file = ctx.uploadedFile("data"); + + if (file == null) { + ctx.status(400); + ctx.result( + "No File was sent with the request. Make sure that the network config json is sent at the key 'data'"); + logger.error( + "No File was sent with the request. Make sure that the network config json is sent at the key 'data'"); + return; + } + + if (!file.extension().contains("json")) { + ctx.status(400); + ctx.result( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + logger.error( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + return; + } + + // Create a temp file + var tempFilePath = handleTempFileCreation(file); + + if (tempFilePath.isEmpty()) { + ctx.status(500); + ctx.result("There was an error while creating a temporary copy of the file"); + logger.error("There was an error while creating a temporary copy of the file"); + return; + } + + if (ConfigManager.getInstance().saveUploadedNetworkConfig(tempFilePath.get().toPath())) { + ctx.status(200); + ctx.result("Successfully saved the uploaded network config"); + logger.info("Successfully saved the uploaded network config"); + } else { + ctx.status(500); + ctx.result("There was an error while saving the uploaded network config"); + logger.error("There was an error while saving the uploaded network config"); + } + } + + public static void onAprilTagFieldLayoutRequest(Context ctx) { + var file = ctx.uploadedFile("data"); + + if (file == null) { + ctx.status(400); + ctx.result( + "No File was sent with the request. Make sure that the field layout json is sent at the key 'data'"); + logger.error( + "No File was sent with the request. Make sure that the field layout json is sent at the key 'data'"); + return; + } + + if (!file.extension().contains("json")) { + ctx.status(400); + ctx.result( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + logger.error( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + return; + } + + // Create a temp file + var tempFilePath = handleTempFileCreation(file); + + if (tempFilePath.isEmpty()) { + ctx.status(500); + ctx.result("There was an error while creating a temporary copy of the file"); + logger.error("There was an error while creating a temporary copy of the file"); + return; + } + + if (ConfigManager.getInstance().saveUploadedAprilTagFieldLayout(tempFilePath.get().toPath())) { + ctx.status(200); + ctx.result("Successfully saved the uploaded AprilTagFieldLayout"); + logger.info("Successfully saved the uploaded AprilTagFieldLayout"); + } else { + ctx.status(500); + ctx.result("There was an error while saving the uploaded AprilTagFieldLayout"); + logger.error("There was an error while saving the uploaded AprilTagFieldLayout"); + } + } + + public static void onOfflineUpdateRequest(Context ctx) { + var file = ctx.uploadedFile("jarData"); + + if (file == null) { + ctx.status(400); + ctx.result( + "No File was sent with the request. Make sure that the new jar is sent at the key 'jarData'"); + logger.error( + "No File was sent with the request. Make sure that the new jar is sent at the key 'jarData'"); + return; + } + + if (!file.extension().contains("jar")) { + ctx.status(400); + ctx.result( + "The uploaded file was not of type 'jar'. The uploaded file should be a .jar file."); + logger.error( + "The uploaded file was not of type 'jar'. The uploaded file should be a .jar file."); + return; + } + + try { + Path filePath = + Paths.get(ProgramDirectoryUtilities.getProgramDirectory(), "photonvision.jar"); + File targetFile = new File(filePath.toString()); + var stream = new FileOutputStream(targetFile); + + file.content().transferTo(stream); + stream.close(); + + ctx.status(200); + ctx.result( + "Offline update successfully complete. PhotonVision will restart in the background."); + logger.info( + "Offline update successfully complete. PhotonVision will restart in the background."); + restartProgram(); + } catch (FileNotFoundException e) { + ctx.result("The current program jar file couldn't be found."); + ctx.status(500); + logger.error("The current program jar file couldn't be found.", e); + } catch (IOException e) { + ctx.result("Unable to overwrite the existing program with the new program."); + ctx.status(500); + logger.error("Unable to overwrite the existing program with the new program.", e); + } + } + + public static void onGeneralSettingsRequest(Context ctx) { + NetworkConfig config; + try { + config = kObjectMapper.readValue(ctx.body(), NetworkConfig.class); + + ctx.status(200); + ctx.result("Successfully saved general settings"); + logger.info("Successfully saved general settings"); + } catch (JsonProcessingException e) { + // If the settings can't be parsed, use the default network settings + config = new NetworkConfig(); + + ctx.status(400); + ctx.result("The provided general settings were malformed"); + logger.error("The provided general settings were malformed", e); + } + + ConfigManager.getInstance().setNetworkSettings(config); + ConfigManager.getInstance().requestSave(); + + NetworkManager.getInstance().reinitialize(); + + NetworkTablesManager.getInstance().setConfig(config); + } + + public static void onCameraSettingsRequest(Context ctx) { + try { + var data = kObjectMapper.readTree(ctx.body()); + + int index = data.get("index").asInt(); + double fov = data.get("settings").get("fov").asDouble(); + + var module = VisionModuleManager.getInstance().getModule(index); + module.setFov(fov); + + module.saveModule(); + + ctx.status(200); + ctx.result("Successfully saved camera settings"); + logger.info("Successfully saved camera settings"); + } catch (JsonProcessingException | NullPointerException e) { + ctx.status(400); + ctx.result("The provided camera settings were malformed"); + logger.error("The provided camera settings were malformed", e); + } + } + + public static void onLogExportRequest(Context ctx) { + if (!Platform.isLinux()) { + ctx.status(405); + ctx.result("Logs can only be exported on a Linux platform"); + // INFO only log because this isn't ERROR worthy + logger.info("Logs can only be exported on a Linux platform"); + return; + } + + try { + ShellExec shell = new ShellExec(); + var tempPath = Files.createTempFile("photonvision-journalctl", ".txt"); + shell.executeBashCommand("journalctl -u photonvision.service > " + tempPath.toAbsolutePath()); + + while (!shell.isOutputCompleted()) { + // TODO: add timeout + } + + if (shell.getExitCode() == 0) { + // Wrote to the temp file! Add it to the ctx + var stream = new FileInputStream(tempPath.toFile()); + ctx.contentType("text/plain"); + ctx.header("Content-Disposition", "attachment; filename=\"photonvision-journalctl.txt\""); + ctx.status(200); + ctx.result(stream); + logger.info("Uploading settings with size " + stream.available()); + } else { + ctx.status(500); + ctx.result("The journalctl service was unable to export logs"); + logger.error("The journalctl service was unable to export logs"); + } + } catch (IOException e) { + ctx.status(500); + ctx.result("There was an error while exporting journactl logs"); + logger.error("There was an error while exporting journactl logs", e); + } + } + + public static void onCalibrationEndRequest(Context ctx) { + logger.info("Calibrating camera! This will take a long time..."); + + int index; + + try { + index = kObjectMapper.readTree(ctx.body()).get("index").asInt(); + + var calData = VisionModuleManager.getInstance().getModule(index).endCalibration(); + if (calData == null) { + ctx.result("The calibration process failed"); + ctx.status(500); + logger.error( + "The calibration process failed. Calibration data for module at index (" + + index + + ") was null"); + return; + } + + ctx.result("Camera calibration successfully completed!"); + ctx.status(200); + logger.info("Camera calibration successfully completed!"); + } catch (JsonProcessingException e) { + ctx.status(400); + ctx.result( + "The 'index' field was not found in the request. Please make sure the index of the vision module is specified with the 'index' key."); + logger.error( + "The 'index' field was not found in the request. Please make sure the index of the vision module is specified with the 'index' key.", + e); + } catch (Exception e) { + ctx.status(500); + ctx.result("There was an error while ending calibration"); + logger.error("There was an error while ending calibration", e); + } + } + + public static void onCalibrationImportRequest(Context ctx) { + var data = ctx.body(); + + try { + var actualObj = kObjectMapper.readTree(data); + + int cameraIndex = actualObj.get("cameraIndex").asInt(); + var payload = kObjectMapper.readTree(actualObj.get("payload").asText()); + var coeffs = CameraCalibrationCoefficients.parseFromCalibdbJson(payload); + + var uploadCalibrationEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "calibrationUploaded", + coeffs, + cameraIndex, + null); + DataChangeService.getInstance().publishEvent(uploadCalibrationEvent); + + ctx.status(200); + ctx.result("Calibration imported successfully from CalibDB data!"); + logger.info("Calibration imported successfully from CalibDB data!"); + } catch (JsonProcessingException e) { + ctx.status(400); + ctx.result( + "The Provided CalibDB data is malformed and cannot be parsed for the required fields."); + logger.error( + "The Provided CalibDB data is malformed and cannot be parsed for the required fields.", + e); + } + } + + public static void onProgramRestartRequest(Context ctx) { + // TODO, check if this was successful or not + ctx.status(204); + restartProgram(); + } + + public static void onDeviceRestartRequest(Context ctx) { + ctx.status(HardwareManager.getInstance().restartDevice() ? 204 : 500); + } + + public static void onCameraNicknameChangeRequest(Context ctx) { + try { + var data = kObjectMapper.readTree(ctx.body()); + + String name = data.get("name").asText(); + int idx = data.get("cameraIndex").asInt(); + + VisionModuleManager.getInstance().getModule(idx).setCameraNickname(name); + ctx.status(200); + ctx.result("Successfully changed the camera name to: " + name); + logger.info("Successfully changed the camera name to: " + name); + } catch (JsonProcessingException e) { + ctx.status(400); + ctx.result("The provided nickname data was malformed"); + logger.error("The provided nickname data was malformed", e); + + } catch (Exception e) { + ctx.status(500); + ctx.result("An error occurred while changing the camera's nickname"); + logger.error("An error occurred while changing the camera's nickname", e); + } + } + + public static void onMetricsPublishRequest(Context ctx) { + HardwareManager.getInstance().publishMetrics(); + ctx.status(204); + } + + /** + * Create a temporary file using the UploadedFile from Javalin. + * + * @param file the uploaded file. + * @return Temporary file. Empty if the temporary file was unable to be created. + */ + private static Optional handleTempFileCreation(UploadedFile file) { + var tempFilePath = + new File(Path.of(System.getProperty("java.io.tmpdir"), file.filename()).toString()); + boolean makeDirsRes = tempFilePath.getParentFile().mkdirs(); + + if (!makeDirsRes && !(tempFilePath.getParentFile().exists())) { + logger.error( + "There was an error while creating " + + tempFilePath.getAbsolutePath() + + "! Exists: " + + tempFilePath.getParentFile().exists()); + return Optional.empty(); + } + + try { + FileUtils.copyInputStreamToFile(file.content(), tempFilePath); + } catch (IOException e) { + logger.error( + "There was an error while copying " + + file.filename() + + " to the temp file " + + tempFilePath.getAbsolutePath()); + return Optional.empty(); + } + + return Optional.of(tempFilePath); + } + + /** + * Restart the running program. Note that this doesn't actually restart the program itself, + * instead, it relies on systemd or an equivalent. + */ + private static void restartProgram() { + TimedTaskManager.getInstance() + .addOneShotTask( + () -> { + if (Platform.isLinux()) { + try { + new ShellExec().executeBashCommand("systemctl restart photonvision.service"); + } catch (IOException e) { + logger.error("Could not restart device!", e); + System.exit(0); + } + } else { + System.exit(0); + } + }, + 0); + } } diff --git a/photon-server/src/main/java/org/photonvision/server/Server.java b/photon-server/src/main/java/org/photonvision/server/Server.java index 4026ab58fa..e83c1e2b46 100644 --- a/photon-server/src/main/java/org/photonvision/server/Server.java +++ b/photon-server/src/main/java/org/photonvision/server/Server.java @@ -25,94 +25,94 @@ import org.photonvision.common.logging.Logger; public class Server { - private static final Logger logger = new Logger(Server.class, LogGroup.WebServer); + private static final Logger logger = new Logger(Server.class, LogGroup.WebServer); - public static void start(int port) { - var app = - Javalin.create( - javalinConfig -> { - javalinConfig.showJavalinBanner = false; - javalinConfig.staticFiles.add("web"); - javalinConfig.plugins.enableCors( - corsContainer -> { - corsContainer.add(CorsPluginConfig::anyHost); - }); + public static void start(int port) { + var app = + Javalin.create( + javalinConfig -> { + javalinConfig.showJavalinBanner = false; + javalinConfig.staticFiles.add("web"); + javalinConfig.plugins.enableCors( + corsContainer -> { + corsContainer.add(CorsPluginConfig::anyHost); + }); - javalinConfig.requestLogger.http( - (ctx, ms) -> { - StringJoiner joiner = - new StringJoiner(" ") - .add("Handled HTTP request of type") - .add(ctx.req().getMethod()) - .add("from endpoint") - .add(ctx.path()) - .add("for host") - .add(ctx.req().getRemoteHost()) - .add("in") - .add(ms.toString()) - .add("ms"); + javalinConfig.requestLogger.http( + (ctx, ms) -> { + StringJoiner joiner = + new StringJoiner(" ") + .add("Handled HTTP request of type") + .add(ctx.req().getMethod()) + .add("from endpoint") + .add(ctx.path()) + .add("for host") + .add(ctx.req().getRemoteHost()) + .add("in") + .add(ms.toString()) + .add("ms"); - logger.debug(joiner.toString()); - }); - javalinConfig.requestLogger.ws( - ws -> { - ws.onMessage(ctx -> logger.debug("Got WebSockets message: " + ctx.message())); - ws.onBinaryMessage( - ctx -> - logger.trace( - () -> { - var remote = (InetSocketAddress) ctx.session.getRemoteAddress(); - var host = - remote.getAddress().toString() + ":" + remote.getPort(); - return "Got WebSockets binary message from host: " + host; - })); - }); - }); + logger.debug(joiner.toString()); + }); + javalinConfig.requestLogger.ws( + ws -> { + ws.onMessage(ctx -> logger.debug("Got WebSockets message: " + ctx.message())); + ws.onBinaryMessage( + ctx -> + logger.trace( + () -> { + var remote = (InetSocketAddress) ctx.session.getRemoteAddress(); + var host = + remote.getAddress().toString() + ":" + remote.getPort(); + return "Got WebSockets binary message from host: " + host; + })); + }); + }); - /*Web Socket Events for Data Exchange */ - var dsHandler = DataSocketHandler.getInstance(); - app.ws( - "/websocket_data", - ws -> { - ws.onConnect(dsHandler::onConnect); - ws.onClose(dsHandler::onClose); - ws.onBinaryMessage(dsHandler::onBinaryMessage); - }); + /*Web Socket Events for Data Exchange */ + var dsHandler = DataSocketHandler.getInstance(); + app.ws( + "/websocket_data", + ws -> { + ws.onConnect(dsHandler::onConnect); + ws.onClose(dsHandler::onClose); + ws.onBinaryMessage(dsHandler::onBinaryMessage); + }); - /*Web Socket Events for Camera Streaming */ - var camDsHandler = CameraSocketHandler.getInstance(); - app.ws( - "/websocket_cameras", - ws -> { - ws.onConnect(camDsHandler::onConnect); - ws.onClose(camDsHandler::onClose); - ws.onBinaryMessage(camDsHandler::onBinaryMessage); - ws.onMessage(camDsHandler::onMessage); - }); + /*Web Socket Events for Camera Streaming */ + var camDsHandler = CameraSocketHandler.getInstance(); + app.ws( + "/websocket_cameras", + ws -> { + ws.onConnect(camDsHandler::onConnect); + ws.onClose(camDsHandler::onClose); + ws.onBinaryMessage(camDsHandler::onBinaryMessage); + ws.onMessage(camDsHandler::onMessage); + }); - /*API Events*/ - // Settings - app.post("/api/settings", RequestHandler::onSettingsImportRequest); - app.get("/api/settings/photonvision_config.zip", RequestHandler::onSettingsExportRequest); - app.post("/api/settings/hardwareConfig", RequestHandler::onHardwareConfigRequest); - app.post("/api/settings/hardwareSettings", RequestHandler::onHardwareSettingsRequest); - app.post("/api/settings/networkConfig", RequestHandler::onNetworkConfigRequest); - app.post("/api/settings/aprilTagFieldLayout", RequestHandler::onAprilTagFieldLayoutRequest); - app.post("/api/settings/general", RequestHandler::onGeneralSettingsRequest); - app.post("/api/settings/camera", RequestHandler::onCameraSettingsRequest); - app.post("/api/settings/camera/setNickname", RequestHandler::onCameraNicknameChangeRequest); + /*API Events*/ + // Settings + app.post("/api/settings", RequestHandler::onSettingsImportRequest); + app.get("/api/settings/photonvision_config.zip", RequestHandler::onSettingsExportRequest); + app.post("/api/settings/hardwareConfig", RequestHandler::onHardwareConfigRequest); + app.post("/api/settings/hardwareSettings", RequestHandler::onHardwareSettingsRequest); + app.post("/api/settings/networkConfig", RequestHandler::onNetworkConfigRequest); + app.post("/api/settings/aprilTagFieldLayout", RequestHandler::onAprilTagFieldLayoutRequest); + app.post("/api/settings/general", RequestHandler::onGeneralSettingsRequest); + app.post("/api/settings/camera", RequestHandler::onCameraSettingsRequest); + app.post("/api/settings/camera/setNickname", RequestHandler::onCameraNicknameChangeRequest); - // Utilities - app.post("/api/utils/offlineUpdate", RequestHandler::onOfflineUpdateRequest); - app.get("/api/utils/photonvision-journalctl.txt", RequestHandler::onLogExportRequest); - app.post("/api/utils/restartProgram", RequestHandler::onProgramRestartRequest); - app.post("/api/utils/restartDevice", RequestHandler::onDeviceRestartRequest); - app.post("/api/utils/publishMetrics", RequestHandler::onMetricsPublishRequest); + // Utilities + app.post("/api/utils/offlineUpdate", RequestHandler::onOfflineUpdateRequest); + app.get("/api/utils/photonvision-journalctl.txt", RequestHandler::onLogExportRequest); + app.post("/api/utils/restartProgram", RequestHandler::onProgramRestartRequest); + app.post("/api/utils/restartDevice", RequestHandler::onDeviceRestartRequest); + app.post("/api/utils/publishMetrics", RequestHandler::onMetricsPublishRequest); - // Calibration - app.post("/api/calibration/end", RequestHandler::onCalibrationEndRequest); - app.post("/api/calibration/importFromCalibDB", RequestHandler::onCalibrationImportRequest); + // Calibration + app.post("/api/calibration/end", RequestHandler::onCalibrationEndRequest); + app.post("/api/calibration/importFromCalibDB", RequestHandler::onCalibrationImportRequest); - app.start(port); - } + app.start(port); + } } diff --git a/photon-server/src/main/java/org/photonvision/server/UIInboundSubscriber.java b/photon-server/src/main/java/org/photonvision/server/UIInboundSubscriber.java index 5f1e125a4b..cca65b54ed 100644 --- a/photon-server/src/main/java/org/photonvision/server/UIInboundSubscriber.java +++ b/photon-server/src/main/java/org/photonvision/server/UIInboundSubscriber.java @@ -30,26 +30,26 @@ import org.photonvision.common.logging.Logger; public class UIInboundSubscriber extends DataChangeSubscriber { - public UIInboundSubscriber() { - super( - Collections.singletonList(DataChangeSource.DCS_WEBSOCKET), - Collections.singletonList(DataChangeDestination.DCD_GENSETTINGS)); - } + public UIInboundSubscriber() { + super( + Collections.singletonList(DataChangeSource.DCS_WEBSOCKET), + Collections.singletonList(DataChangeDestination.DCD_GENSETTINGS)); + } - @Override - public void onDataChangeEvent(DataChangeEvent event) { - if (event instanceof IncomingWebSocketEvent) { - var incomingWSEvent = (IncomingWebSocketEvent) event; - if (incomingWSEvent.propertyName.equals("userConnected") - || incomingWSEvent.propertyName.equals("sendFullSettings")) { - // Send full settings - var settings = ConfigManager.getInstance().getConfig().toHashMap(); - var message = - new OutgoingUIEvent<>("fullsettings", settings, incomingWSEvent.originContext); - DataChangeService.getInstance().publishEvent(message); - Logger.sendConnectedBacklog(); - NetworkTablesManager.getInstance().broadcastConnectedStatus(); - } - } + @Override + public void onDataChangeEvent(DataChangeEvent event) { + if (event instanceof IncomingWebSocketEvent) { + var incomingWSEvent = (IncomingWebSocketEvent) event; + if (incomingWSEvent.propertyName.equals("userConnected") + || incomingWSEvent.propertyName.equals("sendFullSettings")) { + // Send full settings + var settings = ConfigManager.getInstance().getConfig().toHashMap(); + var message = + new OutgoingUIEvent<>("fullsettings", settings, incomingWSEvent.originContext); + DataChangeService.getInstance().publishEvent(message); + Logger.sendConnectedBacklog(); + NetworkTablesManager.getInstance().broadcastConnectedStatus(); + } } + } } diff --git a/photon-server/src/main/java/org/photonvision/server/UIOutboundSubscriber.java b/photon-server/src/main/java/org/photonvision/server/UIOutboundSubscriber.java index e502b5eec9..91af46b54b 100644 --- a/photon-server/src/main/java/org/photonvision/server/UIOutboundSubscriber.java +++ b/photon-server/src/main/java/org/photonvision/server/UIOutboundSubscriber.java @@ -33,29 +33,29 @@ * DO NOT use logging in this class. If you do, the logs will recurse forever! */ class UIOutboundSubscriber extends DataChangeSubscriber { - Logger logger = new Logger(UIOutboundSubscriber.class, LogGroup.WebServer); + Logger logger = new Logger(UIOutboundSubscriber.class, LogGroup.WebServer); - private final DataSocketHandler socketHandler; + private final DataSocketHandler socketHandler; - public UIOutboundSubscriber(DataSocketHandler socketHandler) { - super(DataChangeSource.AllSources, Collections.singletonList(DataChangeDestination.DCD_UI)); - this.socketHandler = socketHandler; - } + public UIOutboundSubscriber(DataSocketHandler socketHandler) { + super(DataChangeSource.AllSources, Collections.singletonList(DataChangeDestination.DCD_UI)); + this.socketHandler = socketHandler; + } - @Override - public void onDataChangeEvent(DataChangeEvent event) { - if (event instanceof OutgoingUIEvent) { - var thisEvent = (OutgoingUIEvent) event; - try { - if (event.data instanceof HashMap) { - var data = (HashMap) event.data; - socketHandler.broadcastMessage(data, thisEvent.originContext); - } else { - socketHandler.broadcastMessage(event.data, thisEvent.originContext); - } - } catch (JsonProcessingException e) { - logger.error("Failed to process outgoing message!", e); - } + @Override + public void onDataChangeEvent(DataChangeEvent event) { + if (event instanceof OutgoingUIEvent) { + var thisEvent = (OutgoingUIEvent) event; + try { + if (event.data instanceof HashMap) { + var data = (HashMap) event.data; + socketHandler.broadcastMessage(data, thisEvent.originContext); + } else { + socketHandler.broadcastMessage(event.data, thisEvent.originContext); } + } catch (JsonProcessingException e) { + logger.error("Failed to process outgoing message!", e); + } } + } } diff --git a/photon-server/src/main/java/org/photonvision/server/UISettings.java b/photon-server/src/main/java/org/photonvision/server/UISettings.java index f4361c2670..0aa3731134 100644 --- a/photon-server/src/main/java/org/photonvision/server/UISettings.java +++ b/photon-server/src/main/java/org/photonvision/server/UISettings.java @@ -21,6 +21,6 @@ import org.photonvision.common.configuration.NetworkConfig; public class UISettings { - public NetworkConfig networkConfig; - public CameraConfiguration currentCameraConfiguration; + public NetworkConfig networkConfig; + public CameraConfiguration currentCameraConfiguration; } diff --git a/photon-targeting/src/main/java/org/photonvision/common/dataflow/structures/Packet.java b/photon-targeting/src/main/java/org/photonvision/common/dataflow/structures/Packet.java index e97e3f1658..a7b46cf64b 100644 --- a/photon-targeting/src/main/java/org/photonvision/common/dataflow/structures/Packet.java +++ b/photon-targeting/src/main/java/org/photonvision/common/dataflow/structures/Packet.java @@ -19,198 +19,198 @@ /** A packet that holds byte-packed data to be sent over NetworkTables. */ public class Packet { - // Size of the packet. - int size; - // Data stored in the packet. - byte[] packetData; - // Read and write positions. - int readPos, writePos; - - /** - * Constructs an empty packet. - * - * @param size The size of the packet buffer. - */ - public Packet(int size) { - this.size = size; - packetData = new byte[size]; - } - - /** - * Constructs a packet with the given data. - * - * @param data The packet data. - */ - public Packet(byte[] data) { - packetData = data; - size = packetData.length; - } - - /** Clears the packet and resets the read and write positions. */ - public void clear() { - packetData = new byte[size]; - readPos = 0; - writePos = 0; - } - - public int getSize() { - return size; - } - - /** - * Returns the packet data. - * - * @return The packet data. - */ - public byte[] getData() { - return packetData; - } - - /** - * Sets the packet data. - * - * @param data The packet data. - */ - public void setData(byte[] data) { - packetData = data; - size = data.length; - } - - /** - * Encodes the byte into the packet. - * - * @param src The byte to encode. - */ - public void encode(byte src) { - packetData[writePos++] = src; - } - - /** - * Encodes the short into the packet. - * - * @param src The short to encode. - */ - public void encode(short src) { - packetData[writePos++] = (byte) (src >>> 8); - packetData[writePos++] = (byte) src; - } - - /** - * Encodes the integer into the packet. - * - * @param src The integer to encode. - */ - public void encode(int src) { - packetData[writePos++] = (byte) (src >>> 24); - packetData[writePos++] = (byte) (src >>> 16); - packetData[writePos++] = (byte) (src >>> 8); - packetData[writePos++] = (byte) src; - } - - /** - * Encodes the double into the packet. - * - * @param src The double to encode. - */ - public void encode(double src) { - long data = Double.doubleToRawLongBits(src); - packetData[writePos++] = (byte) ((data >> 56) & 0xff); - packetData[writePos++] = (byte) ((data >> 48) & 0xff); - packetData[writePos++] = (byte) ((data >> 40) & 0xff); - packetData[writePos++] = (byte) ((data >> 32) & 0xff); - packetData[writePos++] = (byte) ((data >> 24) & 0xff); - packetData[writePos++] = (byte) ((data >> 16) & 0xff); - packetData[writePos++] = (byte) ((data >> 8) & 0xff); - packetData[writePos++] = (byte) (data & 0xff); - } - - /** - * Encodes the boolean into the packet. - * - * @param src The boolean to encode. - */ - public void encode(boolean src) { - packetData[writePos++] = src ? (byte) 1 : (byte) 0; - } - - /** - * Returns a decoded byte from the packet. - * - * @return A decoded byte from the packet. - */ - public byte decodeByte() { - if (packetData.length < readPos) { - return '\0'; - } - return packetData[readPos++]; - } - - /** - * Returns a decoded int from the packet. - * - * @return A decoded int from the packet. - */ - public int decodeInt() { - if (packetData.length < readPos + 3) { - return 0; - } - return (0xff & packetData[readPos++]) << 24 - | (0xff & packetData[readPos++]) << 16 - | (0xff & packetData[readPos++]) << 8 - | (0xff & packetData[readPos++]); - } - - /** - * Returns a decoded double from the packet. - * - * @return A decoded double from the packet. - */ - public double decodeDouble() { - if (packetData.length < (readPos + 7)) { - return 0; - } - long data = - (long) (0xff & packetData[readPos++]) << 56 - | (long) (0xff & packetData[readPos++]) << 48 - | (long) (0xff & packetData[readPos++]) << 40 - | (long) (0xff & packetData[readPos++]) << 32 - | (long) (0xff & packetData[readPos++]) << 24 - | (long) (0xff & packetData[readPos++]) << 16 - | (long) (0xff & packetData[readPos++]) << 8 - | (long) (0xff & packetData[readPos++]); - return Double.longBitsToDouble(data); - } - - /** - * Returns a decoded boolean from the packet. - * - * @return A decoded boolean from the packet. - */ - public boolean decodeBoolean() { - if (packetData.length < readPos) { - return false; - } - return packetData[readPos++] == 1; - } - - public void encode(double[] data) { - for (double d : data) { - encode(d); - } - } - - public double[] decodeDoubleArray(int len) { - double[] ret = new double[len]; - for (int i = 0; i < len; i++) { - ret[i] = decodeDouble(); - } - return ret; - } - - public short decodeShort() { - if (packetData.length < readPos + 1) { - return 0; - } - return (short) ((0xff & packetData[readPos++]) << 8 | (0xff & packetData[readPos++])); - } + // Size of the packet. + int size; + // Data stored in the packet. + byte[] packetData; + // Read and write positions. + int readPos, writePos; + + /** + * Constructs an empty packet. + * + * @param size The size of the packet buffer. + */ + public Packet(int size) { + this.size = size; + packetData = new byte[size]; + } + + /** + * Constructs a packet with the given data. + * + * @param data The packet data. + */ + public Packet(byte[] data) { + packetData = data; + size = packetData.length; + } + + /** Clears the packet and resets the read and write positions. */ + public void clear() { + packetData = new byte[size]; + readPos = 0; + writePos = 0; + } + + public int getSize() { + return size; + } + + /** + * Returns the packet data. + * + * @return The packet data. + */ + public byte[] getData() { + return packetData; + } + + /** + * Sets the packet data. + * + * @param data The packet data. + */ + public void setData(byte[] data) { + packetData = data; + size = data.length; + } + + /** + * Encodes the byte into the packet. + * + * @param src The byte to encode. + */ + public void encode(byte src) { + packetData[writePos++] = src; + } + + /** + * Encodes the short into the packet. + * + * @param src The short to encode. + */ + public void encode(short src) { + packetData[writePos++] = (byte) (src >>> 8); + packetData[writePos++] = (byte) src; + } + + /** + * Encodes the integer into the packet. + * + * @param src The integer to encode. + */ + public void encode(int src) { + packetData[writePos++] = (byte) (src >>> 24); + packetData[writePos++] = (byte) (src >>> 16); + packetData[writePos++] = (byte) (src >>> 8); + packetData[writePos++] = (byte) src; + } + + /** + * Encodes the double into the packet. + * + * @param src The double to encode. + */ + public void encode(double src) { + long data = Double.doubleToRawLongBits(src); + packetData[writePos++] = (byte) ((data >> 56) & 0xff); + packetData[writePos++] = (byte) ((data >> 48) & 0xff); + packetData[writePos++] = (byte) ((data >> 40) & 0xff); + packetData[writePos++] = (byte) ((data >> 32) & 0xff); + packetData[writePos++] = (byte) ((data >> 24) & 0xff); + packetData[writePos++] = (byte) ((data >> 16) & 0xff); + packetData[writePos++] = (byte) ((data >> 8) & 0xff); + packetData[writePos++] = (byte) (data & 0xff); + } + + /** + * Encodes the boolean into the packet. + * + * @param src The boolean to encode. + */ + public void encode(boolean src) { + packetData[writePos++] = src ? (byte) 1 : (byte) 0; + } + + /** + * Returns a decoded byte from the packet. + * + * @return A decoded byte from the packet. + */ + public byte decodeByte() { + if (packetData.length < readPos) { + return '\0'; + } + return packetData[readPos++]; + } + + /** + * Returns a decoded int from the packet. + * + * @return A decoded int from the packet. + */ + public int decodeInt() { + if (packetData.length < readPos + 3) { + return 0; + } + return (0xff & packetData[readPos++]) << 24 + | (0xff & packetData[readPos++]) << 16 + | (0xff & packetData[readPos++]) << 8 + | (0xff & packetData[readPos++]); + } + + /** + * Returns a decoded double from the packet. + * + * @return A decoded double from the packet. + */ + public double decodeDouble() { + if (packetData.length < (readPos + 7)) { + return 0; + } + long data = + (long) (0xff & packetData[readPos++]) << 56 + | (long) (0xff & packetData[readPos++]) << 48 + | (long) (0xff & packetData[readPos++]) << 40 + | (long) (0xff & packetData[readPos++]) << 32 + | (long) (0xff & packetData[readPos++]) << 24 + | (long) (0xff & packetData[readPos++]) << 16 + | (long) (0xff & packetData[readPos++]) << 8 + | (long) (0xff & packetData[readPos++]); + return Double.longBitsToDouble(data); + } + + /** + * Returns a decoded boolean from the packet. + * + * @return A decoded boolean from the packet. + */ + public boolean decodeBoolean() { + if (packetData.length < readPos) { + return false; + } + return packetData[readPos++] == 1; + } + + public void encode(double[] data) { + for (double d : data) { + encode(d); + } + } + + public double[] decodeDoubleArray(int len) { + double[] ret = new double[len]; + for (int i = 0; i < len; i++) { + ret[i] = decodeDouble(); + } + return ret; + } + + public short decodeShort() { + if (packetData.length < readPos + 1) { + return 0; + } + return (short) ((0xff & packetData[readPos++]) << 8 | (0xff & packetData[readPos++])); + } } diff --git a/photon-targeting/src/main/java/org/photonvision/common/hardware/VisionLEDMode.java b/photon-targeting/src/main/java/org/photonvision/common/hardware/VisionLEDMode.java index af95fc2cbb..b3ed52b970 100644 --- a/photon-targeting/src/main/java/org/photonvision/common/hardware/VisionLEDMode.java +++ b/photon-targeting/src/main/java/org/photonvision/common/hardware/VisionLEDMode.java @@ -18,29 +18,29 @@ package org.photonvision.common.hardware; public enum VisionLEDMode { - kDefault(-1), - kOff(0), - kOn(1), - kBlink(2); + kDefault(-1), + kOff(0), + kOn(1), + kBlink(2); - public final int value; + public final int value; - VisionLEDMode(int value) { - this.value = value; - } + VisionLEDMode(int value) { + this.value = value; + } - @Override - public String toString() { - switch (this) { - case kDefault: - return "Default"; - case kOff: - return "Off"; - case kOn: - return "On"; - case kBlink: - return "Blink"; - } - return ""; + @Override + public String toString() { + switch (this) { + case kDefault: + return "Default"; + case kOff: + return "Off"; + case kOn: + return "On"; + case kBlink: + return "Blink"; } + return ""; + } } diff --git a/photon-targeting/src/main/java/org/photonvision/common/networktables/NTTopicSet.java b/photon-targeting/src/main/java/org/photonvision/common/networktables/NTTopicSet.java index 5a0d31eaed..a986d5946b 100644 --- a/photon-targeting/src/main/java/org/photonvision/common/networktables/NTTopicSet.java +++ b/photon-targeting/src/main/java/org/photonvision/common/networktables/NTTopicSet.java @@ -38,92 +38,92 @@ * different for sim vs. real camera */ public class NTTopicSet { - public NetworkTable subTable; - public RawPublisher rawBytesEntry; - - public IntegerPublisher pipelineIndexPublisher; - public IntegerSubscriber pipelineIndexRequestSub; - - public BooleanTopic driverModeEntry; - public BooleanPublisher driverModePublisher; - public BooleanSubscriber driverModeSubscriber; - - public DoublePublisher latencyMillisEntry; - public BooleanPublisher hasTargetEntry; - public DoublePublisher targetPitchEntry; - public DoublePublisher targetYawEntry; - public DoublePublisher targetAreaEntry; - public DoubleArrayPublisher targetPoseEntry; - public DoublePublisher targetSkewEntry; - - // The raw position of the best target, in pixels. - public DoublePublisher bestTargetPosX; - public DoublePublisher bestTargetPosY; - - // Heartbeat - public IntegerTopic heartbeatTopic; - public IntegerPublisher heartbeatPublisher; - - // Camera Calibration - public DoubleArrayPublisher cameraIntrinsicsPublisher; - public DoubleArrayPublisher cameraDistortionPublisher; - - public void updateEntries() { - rawBytesEntry = - subTable - .getRawTopic("rawBytes") - .publish("rawBytes", PubSubOption.periodic(0.01), PubSubOption.sendAll(true)); - - pipelineIndexPublisher = subTable.getIntegerTopic("pipelineIndexState").publish(); - pipelineIndexRequestSub = subTable.getIntegerTopic("pipelineIndexRequest").subscribe(0); - - driverModePublisher = subTable.getBooleanTopic("driverMode").publish(); - driverModeSubscriber = subTable.getBooleanTopic("driverModeRequest").subscribe(false); - - // Fun little hack to make the request show up - driverModeSubscriber.getTopic().publish().setDefault(false); - - latencyMillisEntry = subTable.getDoubleTopic("latencyMillis").publish(); - hasTargetEntry = subTable.getBooleanTopic("hasTarget").publish(); - - targetPitchEntry = subTable.getDoubleTopic("targetPitch").publish(); - targetAreaEntry = subTable.getDoubleTopic("targetArea").publish(); - targetYawEntry = subTable.getDoubleTopic("targetYaw").publish(); - targetPoseEntry = subTable.getDoubleArrayTopic("targetPose").publish(); - targetSkewEntry = subTable.getDoubleTopic("targetSkew").publish(); - - bestTargetPosX = subTable.getDoubleTopic("targetPixelsX").publish(); - bestTargetPosY = subTable.getDoubleTopic("targetPixelsY").publish(); - - heartbeatTopic = subTable.getIntegerTopic("heartbeat"); - heartbeatPublisher = heartbeatTopic.publish(); - - cameraIntrinsicsPublisher = subTable.getDoubleArrayTopic("cameraIntrinsics").publish(); - cameraDistortionPublisher = subTable.getDoubleArrayTopic("cameraDistortion").publish(); - } - - @SuppressWarnings("DuplicatedCode") - public void removeEntries() { - if (rawBytesEntry != null) rawBytesEntry.close(); - if (pipelineIndexPublisher != null) pipelineIndexPublisher.close(); - if (pipelineIndexRequestSub != null) pipelineIndexRequestSub.close(); - - if (driverModePublisher != null) driverModePublisher.close(); - if (driverModeSubscriber != null) driverModeSubscriber.close(); - - if (latencyMillisEntry != null) latencyMillisEntry.close(); - if (hasTargetEntry != null) hasTargetEntry.close(); - if (targetPitchEntry != null) targetPitchEntry.close(); - if (targetAreaEntry != null) targetAreaEntry.close(); - if (targetYawEntry != null) targetYawEntry.close(); - if (targetPoseEntry != null) targetPoseEntry.close(); - if (targetSkewEntry != null) targetSkewEntry.close(); - if (bestTargetPosX != null) bestTargetPosX.close(); - if (bestTargetPosY != null) bestTargetPosY.close(); - - if (heartbeatPublisher != null) heartbeatPublisher.close(); - - if (cameraIntrinsicsPublisher != null) cameraIntrinsicsPublisher.close(); - if (cameraDistortionPublisher != null) cameraDistortionPublisher.close(); - } + public NetworkTable subTable; + public RawPublisher rawBytesEntry; + + public IntegerPublisher pipelineIndexPublisher; + public IntegerSubscriber pipelineIndexRequestSub; + + public BooleanTopic driverModeEntry; + public BooleanPublisher driverModePublisher; + public BooleanSubscriber driverModeSubscriber; + + public DoublePublisher latencyMillisEntry; + public BooleanPublisher hasTargetEntry; + public DoublePublisher targetPitchEntry; + public DoublePublisher targetYawEntry; + public DoublePublisher targetAreaEntry; + public DoubleArrayPublisher targetPoseEntry; + public DoublePublisher targetSkewEntry; + + // The raw position of the best target, in pixels. + public DoublePublisher bestTargetPosX; + public DoublePublisher bestTargetPosY; + + // Heartbeat + public IntegerTopic heartbeatTopic; + public IntegerPublisher heartbeatPublisher; + + // Camera Calibration + public DoubleArrayPublisher cameraIntrinsicsPublisher; + public DoubleArrayPublisher cameraDistortionPublisher; + + public void updateEntries() { + rawBytesEntry = + subTable + .getRawTopic("rawBytes") + .publish("rawBytes", PubSubOption.periodic(0.01), PubSubOption.sendAll(true)); + + pipelineIndexPublisher = subTable.getIntegerTopic("pipelineIndexState").publish(); + pipelineIndexRequestSub = subTable.getIntegerTopic("pipelineIndexRequest").subscribe(0); + + driverModePublisher = subTable.getBooleanTopic("driverMode").publish(); + driverModeSubscriber = subTable.getBooleanTopic("driverModeRequest").subscribe(false); + + // Fun little hack to make the request show up + driverModeSubscriber.getTopic().publish().setDefault(false); + + latencyMillisEntry = subTable.getDoubleTopic("latencyMillis").publish(); + hasTargetEntry = subTable.getBooleanTopic("hasTarget").publish(); + + targetPitchEntry = subTable.getDoubleTopic("targetPitch").publish(); + targetAreaEntry = subTable.getDoubleTopic("targetArea").publish(); + targetYawEntry = subTable.getDoubleTopic("targetYaw").publish(); + targetPoseEntry = subTable.getDoubleArrayTopic("targetPose").publish(); + targetSkewEntry = subTable.getDoubleTopic("targetSkew").publish(); + + bestTargetPosX = subTable.getDoubleTopic("targetPixelsX").publish(); + bestTargetPosY = subTable.getDoubleTopic("targetPixelsY").publish(); + + heartbeatTopic = subTable.getIntegerTopic("heartbeat"); + heartbeatPublisher = heartbeatTopic.publish(); + + cameraIntrinsicsPublisher = subTable.getDoubleArrayTopic("cameraIntrinsics").publish(); + cameraDistortionPublisher = subTable.getDoubleArrayTopic("cameraDistortion").publish(); + } + + @SuppressWarnings("DuplicatedCode") + public void removeEntries() { + if (rawBytesEntry != null) rawBytesEntry.close(); + if (pipelineIndexPublisher != null) pipelineIndexPublisher.close(); + if (pipelineIndexRequestSub != null) pipelineIndexRequestSub.close(); + + if (driverModePublisher != null) driverModePublisher.close(); + if (driverModeSubscriber != null) driverModeSubscriber.close(); + + if (latencyMillisEntry != null) latencyMillisEntry.close(); + if (hasTargetEntry != null) hasTargetEntry.close(); + if (targetPitchEntry != null) targetPitchEntry.close(); + if (targetAreaEntry != null) targetAreaEntry.close(); + if (targetYawEntry != null) targetYawEntry.close(); + if (targetPoseEntry != null) targetPoseEntry.close(); + if (targetSkewEntry != null) targetSkewEntry.close(); + if (bestTargetPosX != null) bestTargetPosX.close(); + if (bestTargetPosY != null) bestTargetPosY.close(); + + if (heartbeatPublisher != null) heartbeatPublisher.close(); + + if (cameraIntrinsicsPublisher != null) cameraIntrinsicsPublisher.close(); + if (cameraDistortionPublisher != null) cameraDistortionPublisher.close(); + } } diff --git a/photon-targeting/src/main/java/org/photonvision/estimation/CameraTargetRelation.java b/photon-targeting/src/main/java/org/photonvision/estimation/CameraTargetRelation.java index b0b915c93e..5ff59cdb47 100644 --- a/photon-targeting/src/main/java/org/photonvision/estimation/CameraTargetRelation.java +++ b/photon-targeting/src/main/java/org/photonvision/estimation/CameraTargetRelation.java @@ -23,37 +23,37 @@ /** Holds various helper geometries describing the relation between camera and target. */ public class CameraTargetRelation { - public final Pose3d camPose; - public final Transform3d camToTarg; - public final double camToTargDist; - public final double camToTargDistXY; - public final Rotation2d camToTargYaw; - public final Rotation2d camToTargPitch; + public final Pose3d camPose; + public final Transform3d camToTarg; + public final double camToTargDist; + public final double camToTargDistXY; + public final Rotation2d camToTargYaw; + public final Rotation2d camToTargPitch; - /** Angle from the camera's relative x-axis */ - public final Rotation2d camToTargAngle; + /** Angle from the camera's relative x-axis */ + public final Rotation2d camToTargAngle; - public final Transform3d targToCam; - public final Rotation2d targToCamYaw; - public final Rotation2d targToCamPitch; + public final Transform3d targToCam; + public final Rotation2d targToCamYaw; + public final Rotation2d targToCamPitch; - /** Angle from the target's relative x-axis */ - public final Rotation2d targToCamAngle; + /** Angle from the target's relative x-axis */ + public final Rotation2d targToCamAngle; - public CameraTargetRelation(Pose3d cameraPose, Pose3d targetPose) { - this.camPose = cameraPose; - camToTarg = new Transform3d(cameraPose, targetPose); - camToTargDist = camToTarg.getTranslation().getNorm(); - camToTargDistXY = - Math.hypot(camToTarg.getTranslation().getX(), camToTarg.getTranslation().getY()); - camToTargYaw = new Rotation2d(camToTarg.getX(), camToTarg.getY()); - camToTargPitch = new Rotation2d(camToTargDistXY, -camToTarg.getZ()); - camToTargAngle = - new Rotation2d(Math.hypot(camToTargYaw.getRadians(), camToTargPitch.getRadians())); - targToCam = new Transform3d(targetPose, cameraPose); - targToCamYaw = new Rotation2d(targToCam.getX(), targToCam.getY()); - targToCamPitch = new Rotation2d(camToTargDistXY, -targToCam.getZ()); - targToCamAngle = - new Rotation2d(Math.hypot(targToCamYaw.getRadians(), targToCamPitch.getRadians())); - } + public CameraTargetRelation(Pose3d cameraPose, Pose3d targetPose) { + this.camPose = cameraPose; + camToTarg = new Transform3d(cameraPose, targetPose); + camToTargDist = camToTarg.getTranslation().getNorm(); + camToTargDistXY = + Math.hypot(camToTarg.getTranslation().getX(), camToTarg.getTranslation().getY()); + camToTargYaw = new Rotation2d(camToTarg.getX(), camToTarg.getY()); + camToTargPitch = new Rotation2d(camToTargDistXY, -camToTarg.getZ()); + camToTargAngle = + new Rotation2d(Math.hypot(camToTargYaw.getRadians(), camToTargPitch.getRadians())); + targToCam = new Transform3d(targetPose, cameraPose); + targToCamYaw = new Rotation2d(targToCam.getX(), targToCam.getY()); + targToCamPitch = new Rotation2d(camToTargDistXY, -targToCam.getZ()); + targToCamAngle = + new Rotation2d(Math.hypot(targToCamYaw.getRadians(), targToCamPitch.getRadians())); + } } diff --git a/photon-targeting/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-targeting/src/main/java/org/photonvision/estimation/OpenCVHelp.java index 3f89918d62..53c6179e29 100644 --- a/photon-targeting/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-targeting/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -49,528 +49,528 @@ import org.photonvision.targeting.TargetCorner; public final class OpenCVHelp { - private static RotTrlTransform3d NWU_TO_EDN; - private static RotTrlTransform3d EDN_TO_NWU; - - static { - try { - var loader = - new RuntimeLoader<>( - Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); - loader.loadLibrary(); - } catch (Exception e) { - throw new RuntimeException("Failed to load native libraries!", e); - } - - NWU_TO_EDN = - new RotTrlTransform3d( - new Rotation3d(Matrix.mat(Nat.N3(), Nat.N3()).fill(0, -1, 0, 0, 0, -1, 1, 0, 0)), - new Translation3d()); - EDN_TO_NWU = - new RotTrlTransform3d( - new Rotation3d(Matrix.mat(Nat.N3(), Nat.N3()).fill(0, 0, 1, -1, 0, 0, 0, -1, 0)), - new Translation3d()); - } - - public static Mat matrixToMat(SimpleMatrix matrix) { - var mat = new Mat(matrix.numRows(), matrix.numCols(), CvType.CV_64F); - mat.put(0, 0, matrix.getDDRM().getData()); - return mat; - } - - public static Matrix matToMatrix(Mat mat) { - double[] data = new double[(int) mat.total() * mat.channels()]; - var doubleMat = new Mat(mat.rows(), mat.cols(), CvType.CV_64F); - mat.convertTo(doubleMat, CvType.CV_64F); - doubleMat.get(0, 0, data); - return new Matrix<>(new SimpleMatrix(mat.rows(), mat.cols(), true, data)); + private static RotTrlTransform3d NWU_TO_EDN; + private static RotTrlTransform3d EDN_TO_NWU; + + static { + try { + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + } catch (Exception e) { + throw new RuntimeException("Failed to load native libraries!", e); } - /** - * Creates a new {@link MatOfPoint3f} with these 3d translations. The opencv tvec is a vector with - * three elements representing {x, y, z} in the EDN coordinate system. - * - * @param translations The translations to convert into a MatOfPoint3f - */ - public static MatOfPoint3f translationToTvec(Translation3d... translations) { - Point3[] points = new Point3[translations.length]; - for (int i = 0; i < translations.length; i++) { - var trl = translationNWUtoEDN(translations[i]); - points[i] = new Point3(trl.getX(), trl.getY(), trl.getZ()); - } - return new MatOfPoint3f(points); + NWU_TO_EDN = + new RotTrlTransform3d( + new Rotation3d(Matrix.mat(Nat.N3(), Nat.N3()).fill(0, -1, 0, 0, 0, -1, 1, 0, 0)), + new Translation3d()); + EDN_TO_NWU = + new RotTrlTransform3d( + new Rotation3d(Matrix.mat(Nat.N3(), Nat.N3()).fill(0, 0, 1, -1, 0, 0, 0, -1, 0)), + new Translation3d()); + } + + public static Mat matrixToMat(SimpleMatrix matrix) { + var mat = new Mat(matrix.numRows(), matrix.numCols(), CvType.CV_64F); + mat.put(0, 0, matrix.getDDRM().getData()); + return mat; + } + + public static Matrix matToMatrix(Mat mat) { + double[] data = new double[(int) mat.total() * mat.channels()]; + var doubleMat = new Mat(mat.rows(), mat.cols(), CvType.CV_64F); + mat.convertTo(doubleMat, CvType.CV_64F); + doubleMat.get(0, 0, data); + return new Matrix<>(new SimpleMatrix(mat.rows(), mat.cols(), true, data)); + } + + /** + * Creates a new {@link MatOfPoint3f} with these 3d translations. The opencv tvec is a vector with + * three elements representing {x, y, z} in the EDN coordinate system. + * + * @param translations The translations to convert into a MatOfPoint3f + */ + public static MatOfPoint3f translationToTvec(Translation3d... translations) { + Point3[] points = new Point3[translations.length]; + for (int i = 0; i < translations.length; i++) { + var trl = translationNWUtoEDN(translations[i]); + points[i] = new Point3(trl.getX(), trl.getY(), trl.getZ()); } - - /** - * Returns a new 3d translation from this {@link Mat}. The opencv tvec is a vector with three - * elements representing {x, y, z} in the EDN coordinate system. - * - * @param tvecInput The tvec to create a Translation3d from - */ - public static Translation3d tvecToTranslation(Mat tvecInput) { - float[] data = new float[3]; - var wrapped = new Mat(tvecInput.rows(), tvecInput.cols(), CvType.CV_32F); - tvecInput.convertTo(wrapped, CvType.CV_32F); - wrapped.get(0, 0, data); - wrapped.release(); - return translationEDNtoNWU(new Translation3d(data[0], data[1], data[2])); - } - - /** - * Creates a new {@link MatOfPoint3f} with this 3d rotation. The opencv rvec Mat is a vector with - * three elements representing the axis scaled by the angle in the EDN coordinate system. (angle = - * norm, and axis = rvec / norm) - * - * @param rotation The rotation to convert into a MatOfPoint3f - */ - public static MatOfPoint3f rotationToRvec(Rotation3d rotation) { - rotation = rotationNWUtoEDN(rotation); - return new MatOfPoint3f(new Point3(rotation.getQuaternion().toRotationVector().getData())); + return new MatOfPoint3f(points); + } + + /** + * Returns a new 3d translation from this {@link Mat}. The opencv tvec is a vector with three + * elements representing {x, y, z} in the EDN coordinate system. + * + * @param tvecInput The tvec to create a Translation3d from + */ + public static Translation3d tvecToTranslation(Mat tvecInput) { + float[] data = new float[3]; + var wrapped = new Mat(tvecInput.rows(), tvecInput.cols(), CvType.CV_32F); + tvecInput.convertTo(wrapped, CvType.CV_32F); + wrapped.get(0, 0, data); + wrapped.release(); + return translationEDNtoNWU(new Translation3d(data[0], data[1], data[2])); + } + + /** + * Creates a new {@link MatOfPoint3f} with this 3d rotation. The opencv rvec Mat is a vector with + * three elements representing the axis scaled by the angle in the EDN coordinate system. (angle = + * norm, and axis = rvec / norm) + * + * @param rotation The rotation to convert into a MatOfPoint3f + */ + public static MatOfPoint3f rotationToRvec(Rotation3d rotation) { + rotation = rotationNWUtoEDN(rotation); + return new MatOfPoint3f(new Point3(rotation.getQuaternion().toRotationVector().getData())); + } + + /** + * Returns a 3d rotation from this {@link Mat}. The opencv rvec Mat is a vector with three + * elements representing the axis scaled by the angle in the EDN coordinate system. (angle = norm, + * and axis = rvec / norm) + * + * @param rvecInput The rvec to create a Rotation3d from + */ + public static Rotation3d rvecToRotation(Mat rvecInput) { + // Get the 'rodriguez' (axis-angle, where the norm is the angle about the normalized direction + // of the vector) + float[] data = new float[3]; + var wrapped = new Mat(rvecInput.rows(), rvecInput.cols(), CvType.CV_32F); + rvecInput.convertTo(wrapped, CvType.CV_32F); + wrapped.get(0, 0, data); + wrapped.release(); + + return rotationEDNtoNWU(new Rotation3d(VecBuilder.fill(data[0], data[1], data[2]))); + } + + public static Point avgPoint(Point[] points) { + if (points == null || points.length == 0) return null; + var pointMat = new MatOfPoint2f(points); + Core.reduce(pointMat, pointMat, 0, Core.REDUCE_AVG); + var avgPt = pointMat.toArray()[0]; + pointMat.release(); + return avgPt; + } + + public static Point[] cornersToPoints(List corners) { + var points = new Point[corners.size()]; + for (int i = 0; i < corners.size(); i++) { + var corn = corners.get(i); + points[i] = new Point(corn.x, corn.y); } + return points; + } - /** - * Returns a 3d rotation from this {@link Mat}. The opencv rvec Mat is a vector with three - * elements representing the axis scaled by the angle in the EDN coordinate system. (angle = norm, - * and axis = rvec / norm) - * - * @param rvecInput The rvec to create a Rotation3d from - */ - public static Rotation3d rvecToRotation(Mat rvecInput) { - // Get the 'rodriguez' (axis-angle, where the norm is the angle about the normalized direction - // of the vector) - float[] data = new float[3]; - var wrapped = new Mat(rvecInput.rows(), rvecInput.cols(), CvType.CV_32F); - rvecInput.convertTo(wrapped, CvType.CV_32F); - wrapped.get(0, 0, data); - wrapped.release(); - - return rotationEDNtoNWU(new Rotation3d(VecBuilder.fill(data[0], data[1], data[2]))); + public static Point[] cornersToPoints(TargetCorner... corners) { + var points = new Point[corners.length]; + for (int i = 0; i < corners.length; i++) { + points[i] = new Point(corners[i].x, corners[i].y); } + return points; + } - public static Point avgPoint(Point[] points) { - if (points == null || points.length == 0) return null; - var pointMat = new MatOfPoint2f(points); - Core.reduce(pointMat, pointMat, 0, Core.REDUCE_AVG); - var avgPt = pointMat.toArray()[0]; - pointMat.release(); - return avgPt; + public static List pointsToCorners(Point... points) { + var corners = new ArrayList(points.length); + for (int i = 0; i < points.length; i++) { + corners.add(new TargetCorner(points[i].x, points[i].y)); } - - public static Point[] cornersToPoints(List corners) { - var points = new Point[corners.size()]; - for (int i = 0; i < corners.size(); i++) { - var corn = corners.get(i); - points[i] = new Point(corn.x, corn.y); - } - return points; + return corners; + } + + public static List pointsToCorners(MatOfPoint2f matInput) { + var corners = new ArrayList(); + float[] data = new float[(int) matInput.total() * matInput.channels()]; + matInput.get(0, 0, data); + for (int i = 0; i < (int) matInput.total(); i++) { + corners.add(new TargetCorner(data[0 + 2 * i], data[1 + 2 * i])); } - - public static Point[] cornersToPoints(TargetCorner... corners) { - var points = new Point[corners.length]; - for (int i = 0; i < corners.length; i++) { - points[i] = new Point(corners[i].x, corners[i].y); - } - return points; + return corners; + } + + /** + * Reorders the list, optionally indexing backwards and wrapping around to the last element after + * the first, and shifting all indices in the direction of indexing. + * + *

e.g. + * + *

({1,2,3}, false, 1) == {2,3,1} + * + *

({1,2,3}, true, 0) == {1,3,2} + * + *

({1,2,3}, true, 1) == {3,2,1} + * + * @param Element type + * @param elements + * @param backwards If indexing should happen in reverse (0, size-1, size-2, ...) + * @param shiftStart How much the inital index should be shifted (instead of starting at index 0, + * start at shiftStart, negated if backwards) + * @return Reordered list + */ + public static List reorderCircular(List elements, boolean backwards, int shiftStart) { + int size = elements.size(); + int dir = backwards ? -1 : 1; + var reordered = new ArrayList<>(elements); + for (int i = 0; i < size; i++) { + int index = (i * dir + shiftStart * dir) % size; + if (index < 0) index = size + index; + reordered.set(i, elements.get(index)); } - - public static List pointsToCorners(Point... points) { - var corners = new ArrayList(points.length); - for (int i = 0; i < points.length; i++) { - corners.add(new TargetCorner(points[i].x, points[i].y)); - } - return corners; + return reordered; + } + + // TODO: RotTrlTransform3d removal awaiting Rotation3d performance improvements + /** + * Convert a rotation delta from EDN to NWU. For example, if you have a rotation X,Y,Z {1, 0, 0} + * in EDN, this would be {0, -1, 0} in NWU. + */ + private static Rotation3d rotationEDNtoNWU(Rotation3d rot) { + return new RotTrlTransform3d(EDN_TO_NWU.apply(rot), new Translation3d()) + .apply(EDN_TO_NWU.inverse().getRotation()); + } + + /** + * Convert a rotation delta from NWU to EDN. For example, if you have a rotation X,Y,Z {1, 0, 0} + * in NWU, this would be {0, 0, 1} in EDN. + */ + private static Rotation3d rotationNWUtoEDN(Rotation3d rot) { + return new RotTrlTransform3d(NWU_TO_EDN.apply(rot), new Translation3d()) + .apply(NWU_TO_EDN.inverse().getRotation()); + } + + /** + * Convert a translation from EDN to NWU. For example, if you have a translation X,Y,Z {1, 0, 0} + * in EDN, this would be {0, -1, 0} in NWU. + */ + private static Translation3d translationEDNtoNWU(Translation3d trl) { + return EDN_TO_NWU.apply(trl); + } + + /** + * Convert a translation from NWU to EDN. For example, if you have a translation X,Y,Z {1, 0, 0} + * in NWU, this would be {0, 0, 1} in EDN. + */ + private static Translation3d translationNWUtoEDN(Translation3d trl) { + return NWU_TO_EDN.apply(trl); + } + + /** + * Project object points from the 3d world into the 2d camera image. The camera + * properties(intrinsics, distortion) determine the results of this projection. + * + * @param cameraMatrix the camera intrinsics matrix in standard opencv form + * @param distCoeffs the camera distortion matrix in standard opencv form + * @param camRt The change in basis from world coordinates to camera coordinates. See {@link + * RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * @param objectTranslations The 3d points to be projected + * @return The 2d points in pixels which correspond to the camera's image of the 3d points + */ + public static Point[] projectPoints( + Matrix cameraMatrix, + Matrix distCoeffs, + RotTrlTransform3d camRt, + List objectTranslations) { + // translate to opencv classes + var objectPoints = translationToTvec(objectTranslations.toArray(new Translation3d[0])); + // opencv rvec/tvec describe a change in basis from world to camera + var rvec = rotationToRvec(camRt.getRotation()); + var tvec = translationToTvec(camRt.getTranslation()); + var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); + var distCoeffsMat = new MatOfDouble(matrixToMat(distCoeffs.getStorage())); + var imagePoints = new MatOfPoint2f(); + // project to 2d + Calib3d.projectPoints(objectPoints, rvec, tvec, cameraMatrixMat, distCoeffsMat, imagePoints); + var points = imagePoints.toArray(); + + // release our Mats from native memory + objectPoints.release(); + rvec.release(); + tvec.release(); + cameraMatrixMat.release(); + distCoeffsMat.release(); + imagePoints.release(); + + return points; + } + + /** + * Undistort 2d image points using a given camera's intrinsics and distortion. + * + *

2d image points from {@link #projectPoints(Matrix, Matrix, RotTrlTransform3d, List) + * projectPoints()} will naturally be distorted, so this operation is important if the image + * points need to be directly used (e.g. 2d yaw/pitch). + * + * @param cameraMatrix The camera intrinsics matrix in standard opencv form + * @param distCoeffs The camera distortion matrix in standard opencv form + * @param points The distorted image points + * @return The undistorted image points + */ + public static Point[] undistortPoints( + Matrix cameraMatrix, Matrix distCoeffs, Point[] points) { + var distMat = new MatOfPoint2f(points); + var undistMat = new MatOfPoint2f(); + var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); + var distCoeffsMat = matrixToMat(distCoeffs.getStorage()); + + Calib3d.undistortImagePoints(distMat, undistMat, cameraMatrixMat, distCoeffsMat); + var undistPoints = undistMat.toArray(); + + distMat.release(); + undistMat.release(); + cameraMatrixMat.release(); + distCoeffsMat.release(); + + return undistPoints; + } + + /** + * Gets the (upright) rectangle which bounds this contour. + * + *

Note that rectangle size and position are stored with ints and do not have sub-pixel + * accuracy. + * + * @param points The points to be bounded + * @return Rectangle bounding the given points + */ + public static Rect getBoundingRect(Point[] points) { + var pointMat = new MatOfPoint2f(points); + var rect = Imgproc.boundingRect(pointMat); + pointMat.release(); + return rect; + } + + /** + * Gets the rotated rectangle with minimum area which bounds this contour. + * + *

Note that rectangle size and position are stored with floats and have sub-pixel accuracy. + * + * @param points The points to be bounded + * @return Rotated rectangle bounding the given points + */ + public static RotatedRect getMinAreaRect(Point[] points) { + var pointMat = new MatOfPoint2f(points); + var rect = Imgproc.minAreaRect(pointMat); + pointMat.release(); + return rect; + } + + /** + * Gets the convex hull contour (the outline) of a list of points. + * + * @param points The input contour + * @return The subset of points defining the convex hull. Note that these use ints and not floats. + */ + public static Point[] getConvexHull(Point[] points) { + var pointMat = new MatOfPoint(points); + // outputHull gives us indices (of corn) that make a convex hull contour + var outputHull = new MatOfInt(); + + Imgproc.convexHull(pointMat, outputHull); + + int[] indices = outputHull.toArray(); + outputHull.release(); + pointMat.release(); + var convexPoints = new Point[indices.length]; + for (int i = 0; i < indices.length; i++) { + convexPoints[i] = points[indices[i]]; } - - public static List pointsToCorners(MatOfPoint2f matInput) { - var corners = new ArrayList(); - float[] data = new float[(int) matInput.total() * matInput.channels()]; - matInput.get(0, 0, data); - for (int i = 0; i < (int) matInput.total(); i++) { - corners.add(new TargetCorner(data[0 + 2 * i], data[1 + 2 * i])); + return convexPoints; + } + + /** + * Finds the transformation(s) that map the camera's pose to the target's pose. The camera's pose + * relative to the target is determined by the supplied 3d points of the target's model and their + * associated 2d points imaged by the camera. The supplied model translations must be relative to + * the target's pose. + * + *

For planar targets, there may be an alternate solution which is plausible given the 2d image + * points. This has an associated "ambiguity" which describes the ratio of reprojection error + * between the "best" and "alternate" solution. + * + *

This method is intended for use with individual AprilTags, and will not work unless 4 points + * are provided. + * + * @param cameraMatrix The camera intrinsics matrix in standard opencv form + * @param distCoeffs The camera distortion matrix in standard opencv form + * @param modelTrls The translations of the object corners. These should have the object pose as + * their origin. These must come in a specific, pose-relative order (in NWU): + *

    + *
  • Point 0: [0, -squareLength / 2, squareLength / 2] + *
  • Point 1: [0, squareLength / 2, squareLength / 2] + *
  • Point 2: [0, squareLength / 2, -squareLength / 2] + *
  • Point 3: [0, -squareLength / 2, -squareLength / 2] + *
+ * + * @param imagePoints The projection of these 3d object points into the 2d camera image. The order + * should match the given object point translations. + * @return The resulting transformation that maps the camera pose to the target pose and the + * ambiguity if an alternate solution is available. + */ + public static PNPResults solvePNP_SQUARE( + Matrix cameraMatrix, + Matrix distCoeffs, + List modelTrls, + Point[] imagePoints) { + // solvepnp inputs + MatOfPoint3f objectMat = new MatOfPoint3f(); + MatOfPoint2f imageMat = new MatOfPoint2f(); + MatOfDouble cameraMatrixMat = new MatOfDouble(); + MatOfDouble distCoeffsMat = new MatOfDouble(); + var rvecs = new ArrayList(); + var tvecs = new ArrayList(); + Mat rvec = Mat.zeros(3, 1, CvType.CV_32F); + Mat tvec = Mat.zeros(3, 1, CvType.CV_32F); + Mat reprojectionError = Mat.zeros(2, 1, CvType.CV_32F); + try { + // IPPE_SQUARE expects our corners in a specific order + modelTrls = reorderCircular(modelTrls, true, -1); + imagePoints = reorderCircular(Arrays.asList(imagePoints), true, -1).toArray(Point[]::new); + // translate to opencv classes + translationToTvec(modelTrls.toArray(new Translation3d[0])).assignTo(objectMat); + imageMat.fromArray(imagePoints); + matrixToMat(cameraMatrix.getStorage()).assignTo(cameraMatrixMat); + matrixToMat(distCoeffs.getStorage()).assignTo(distCoeffsMat); + + float[] errors = new float[2]; + Transform3d best = null; + Transform3d alt = null; + + for (int tries = 0; tries < 2; tries++) { + // calc rvecs/tvecs and associated reprojection error from image points + Calib3d.solvePnPGeneric( + objectMat, + imageMat, + cameraMatrixMat, + distCoeffsMat, + rvecs, + tvecs, + false, + Calib3d.SOLVEPNP_IPPE_SQUARE, + rvec, + tvec, + reprojectionError); + + reprojectionError.get(0, 0, errors); + // convert to wpilib coordinates + best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); + + if (tvecs.size() > 1) { + alt = new Transform3d(tvecToTranslation(tvecs.get(1)), rvecToRotation(rvecs.get(1))); } - return corners; - } - /** - * Reorders the list, optionally indexing backwards and wrapping around to the last element after - * the first, and shifting all indices in the direction of indexing. - * - *

e.g. - * - *

({1,2,3}, false, 1) == {2,3,1} - * - *

({1,2,3}, true, 0) == {1,3,2} - * - *

({1,2,3}, true, 1) == {3,2,1} - * - * @param Element type - * @param elements - * @param backwards If indexing should happen in reverse (0, size-1, size-2, ...) - * @param shiftStart How much the inital index should be shifted (instead of starting at index 0, - * start at shiftStart, negated if backwards) - * @return Reordered list - */ - public static List reorderCircular(List elements, boolean backwards, int shiftStart) { - int size = elements.size(); - int dir = backwards ? -1 : 1; - var reordered = new ArrayList<>(elements); - for (int i = 0; i < size; i++) { - int index = (i * dir + shiftStart * dir) % size; - if (index < 0) index = size + index; - reordered.set(i, elements.get(index)); + // check if we got a NaN result + if (!Double.isNaN(errors[0])) break; + else { // add noise and retry + double[] br = imageMat.get(0, 0); + br[0] -= 0.001; + br[1] -= 0.001; + imageMat.put(0, 0, br); } - return reordered; - } - - // TODO: RotTrlTransform3d removal awaiting Rotation3d performance improvements - /** - * Convert a rotation delta from EDN to NWU. For example, if you have a rotation X,Y,Z {1, 0, 0} - * in EDN, this would be {0, -1, 0} in NWU. - */ - private static Rotation3d rotationEDNtoNWU(Rotation3d rot) { - return new RotTrlTransform3d(EDN_TO_NWU.apply(rot), new Translation3d()) - .apply(EDN_TO_NWU.inverse().getRotation()); - } - - /** - * Convert a rotation delta from NWU to EDN. For example, if you have a rotation X,Y,Z {1, 0, 0} - * in NWU, this would be {0, 0, 1} in EDN. - */ - private static Rotation3d rotationNWUtoEDN(Rotation3d rot) { - return new RotTrlTransform3d(NWU_TO_EDN.apply(rot), new Translation3d()) - .apply(NWU_TO_EDN.inverse().getRotation()); - } - - /** - * Convert a translation from EDN to NWU. For example, if you have a translation X,Y,Z {1, 0, 0} - * in EDN, this would be {0, -1, 0} in NWU. - */ - private static Translation3d translationEDNtoNWU(Translation3d trl) { - return EDN_TO_NWU.apply(trl); - } - - /** - * Convert a translation from NWU to EDN. For example, if you have a translation X,Y,Z {1, 0, 0} - * in NWU, this would be {0, 0, 1} in EDN. - */ - private static Translation3d translationNWUtoEDN(Translation3d trl) { - return NWU_TO_EDN.apply(trl); - } - - /** - * Project object points from the 3d world into the 2d camera image. The camera - * properties(intrinsics, distortion) determine the results of this projection. - * - * @param cameraMatrix the camera intrinsics matrix in standard opencv form - * @param distCoeffs the camera distortion matrix in standard opencv form - * @param camRt The change in basis from world coordinates to camera coordinates. See {@link - * RotTrlTransform3d#makeRelativeTo(Pose3d)}. - * @param objectTranslations The 3d points to be projected - * @return The 2d points in pixels which correspond to the camera's image of the 3d points - */ - public static Point[] projectPoints( - Matrix cameraMatrix, - Matrix distCoeffs, - RotTrlTransform3d camRt, - List objectTranslations) { - // translate to opencv classes - var objectPoints = translationToTvec(objectTranslations.toArray(new Translation3d[0])); - // opencv rvec/tvec describe a change in basis from world to camera - var rvec = rotationToRvec(camRt.getRotation()); - var tvec = translationToTvec(camRt.getTranslation()); - var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); - var distCoeffsMat = new MatOfDouble(matrixToMat(distCoeffs.getStorage())); - var imagePoints = new MatOfPoint2f(); - // project to 2d - Calib3d.projectPoints(objectPoints, rvec, tvec, cameraMatrixMat, distCoeffsMat, imagePoints); - var points = imagePoints.toArray(); - - // release our Mats from native memory - objectPoints.release(); - rvec.release(); - tvec.release(); - cameraMatrixMat.release(); - distCoeffsMat.release(); - imagePoints.release(); - - return points; - } - - /** - * Undistort 2d image points using a given camera's intrinsics and distortion. - * - *

2d image points from {@link #projectPoints(Matrix, Matrix, RotTrlTransform3d, List) - * projectPoints()} will naturally be distorted, so this operation is important if the image - * points need to be directly used (e.g. 2d yaw/pitch). - * - * @param cameraMatrix The camera intrinsics matrix in standard opencv form - * @param distCoeffs The camera distortion matrix in standard opencv form - * @param points The distorted image points - * @return The undistorted image points - */ - public static Point[] undistortPoints( - Matrix cameraMatrix, Matrix distCoeffs, Point[] points) { - var distMat = new MatOfPoint2f(points); - var undistMat = new MatOfPoint2f(); - var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); - var distCoeffsMat = matrixToMat(distCoeffs.getStorage()); - - Calib3d.undistortImagePoints(distMat, undistMat, cameraMatrixMat, distCoeffsMat); - var undistPoints = undistMat.toArray(); - - distMat.release(); - undistMat.release(); - cameraMatrixMat.release(); - distCoeffsMat.release(); - - return undistPoints; - } + } - /** - * Gets the (upright) rectangle which bounds this contour. - * - *

Note that rectangle size and position are stored with ints and do not have sub-pixel - * accuracy. - * - * @param points The points to be bounded - * @return Rectangle bounding the given points - */ - public static Rect getBoundingRect(Point[] points) { - var pointMat = new MatOfPoint2f(points); - var rect = Imgproc.boundingRect(pointMat); - pointMat.release(); - return rect; - } + // check if solvePnP failed with NaN results and retrying failed + if (Double.isNaN(errors[0])) throw new Exception("SolvePNP_SQUARE NaN result"); - /** - * Gets the rotated rectangle with minimum area which bounds this contour. - * - *

Note that rectangle size and position are stored with floats and have sub-pixel accuracy. - * - * @param points The points to be bounded - * @return Rotated rectangle bounding the given points - */ - public static RotatedRect getMinAreaRect(Point[] points) { - var pointMat = new MatOfPoint2f(points); - var rect = Imgproc.minAreaRect(pointMat); - pointMat.release(); - return rect; - } - - /** - * Gets the convex hull contour (the outline) of a list of points. - * - * @param points The input contour - * @return The subset of points defining the convex hull. Note that these use ints and not floats. - */ - public static Point[] getConvexHull(Point[] points) { - var pointMat = new MatOfPoint(points); - // outputHull gives us indices (of corn) that make a convex hull contour - var outputHull = new MatOfInt(); - - Imgproc.convexHull(pointMat, outputHull); - - int[] indices = outputHull.toArray(); - outputHull.release(); - pointMat.release(); - var convexPoints = new Point[indices.length]; - for (int i = 0; i < indices.length; i++) { - convexPoints[i] = points[indices[i]]; - } - return convexPoints; + if (alt != null) + return new PNPResults(best, alt, errors[0] / errors[1], errors[0], errors[1]); + else return new PNPResults(best, errors[0]); } - - /** - * Finds the transformation(s) that map the camera's pose to the target's pose. The camera's pose - * relative to the target is determined by the supplied 3d points of the target's model and their - * associated 2d points imaged by the camera. The supplied model translations must be relative to - * the target's pose. - * - *

For planar targets, there may be an alternate solution which is plausible given the 2d image - * points. This has an associated "ambiguity" which describes the ratio of reprojection error - * between the "best" and "alternate" solution. - * - *

This method is intended for use with individual AprilTags, and will not work unless 4 points - * are provided. - * - * @param cameraMatrix The camera intrinsics matrix in standard opencv form - * @param distCoeffs The camera distortion matrix in standard opencv form - * @param modelTrls The translations of the object corners. These should have the object pose as - * their origin. These must come in a specific, pose-relative order (in NWU): - *

    - *
  • Point 0: [0, -squareLength / 2, squareLength / 2] - *
  • Point 1: [0, squareLength / 2, squareLength / 2] - *
  • Point 2: [0, squareLength / 2, -squareLength / 2] - *
  • Point 3: [0, -squareLength / 2, -squareLength / 2] - *
- * - * @param imagePoints The projection of these 3d object points into the 2d camera image. The order - * should match the given object point translations. - * @return The resulting transformation that maps the camera pose to the target pose and the - * ambiguity if an alternate solution is available. - */ - public static PNPResults solvePNP_SQUARE( - Matrix cameraMatrix, - Matrix distCoeffs, - List modelTrls, - Point[] imagePoints) { - // solvepnp inputs - MatOfPoint3f objectMat = new MatOfPoint3f(); - MatOfPoint2f imageMat = new MatOfPoint2f(); - MatOfDouble cameraMatrixMat = new MatOfDouble(); - MatOfDouble distCoeffsMat = new MatOfDouble(); - var rvecs = new ArrayList(); - var tvecs = new ArrayList(); - Mat rvec = Mat.zeros(3, 1, CvType.CV_32F); - Mat tvec = Mat.zeros(3, 1, CvType.CV_32F); - Mat reprojectionError = Mat.zeros(2, 1, CvType.CV_32F); - try { - // IPPE_SQUARE expects our corners in a specific order - modelTrls = reorderCircular(modelTrls, true, -1); - imagePoints = reorderCircular(Arrays.asList(imagePoints), true, -1).toArray(Point[]::new); - // translate to opencv classes - translationToTvec(modelTrls.toArray(new Translation3d[0])).assignTo(objectMat); - imageMat.fromArray(imagePoints); - matrixToMat(cameraMatrix.getStorage()).assignTo(cameraMatrixMat); - matrixToMat(distCoeffs.getStorage()).assignTo(distCoeffsMat); - - float[] errors = new float[2]; - Transform3d best = null; - Transform3d alt = null; - - for (int tries = 0; tries < 2; tries++) { - // calc rvecs/tvecs and associated reprojection error from image points - Calib3d.solvePnPGeneric( - objectMat, - imageMat, - cameraMatrixMat, - distCoeffsMat, - rvecs, - tvecs, - false, - Calib3d.SOLVEPNP_IPPE_SQUARE, - rvec, - tvec, - reprojectionError); - - reprojectionError.get(0, 0, errors); - // convert to wpilib coordinates - best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); - - if (tvecs.size() > 1) { - alt = new Transform3d(tvecToTranslation(tvecs.get(1)), rvecToRotation(rvecs.get(1))); - } - - // check if we got a NaN result - if (!Double.isNaN(errors[0])) break; - else { // add noise and retry - double[] br = imageMat.get(0, 0); - br[0] -= 0.001; - br[1] -= 0.001; - imageMat.put(0, 0, br); - } - } - - // check if solvePnP failed with NaN results and retrying failed - if (Double.isNaN(errors[0])) throw new Exception("SolvePNP_SQUARE NaN result"); - - if (alt != null) - return new PNPResults(best, alt, errors[0] / errors[1], errors[0], errors[1]); - else return new PNPResults(best, errors[0]); - } - // solvePnP failed - catch (Exception e) { - System.err.println("SolvePNP_SQUARE failed!"); - e.printStackTrace(); - return new PNPResults(); - } finally { - // release our Mats from native memory - objectMat.release(); - imageMat.release(); - cameraMatrixMat.release(); - distCoeffsMat.release(); - for (var v : rvecs) v.release(); - for (var v : tvecs) v.release(); - rvec.release(); - tvec.release(); - reprojectionError.release(); - } + // solvePnP failed + catch (Exception e) { + System.err.println("SolvePNP_SQUARE failed!"); + e.printStackTrace(); + return new PNPResults(); + } finally { + // release our Mats from native memory + objectMat.release(); + imageMat.release(); + cameraMatrixMat.release(); + distCoeffsMat.release(); + for (var v : rvecs) v.release(); + for (var v : tvecs) v.release(); + rvec.release(); + tvec.release(); + reprojectionError.release(); } - - /** - * Finds the transformation that maps the camera's pose to the origin of the supplied object. An - * "object" is simply a set of known 3d translations that correspond to the given 2d points. If, - * for example, the object translations are given relative to close-right corner of the blue - * alliance(the default origin), a camera-to-origin transformation is returned. If the - * translations are relative to a target's pose, a camera-to-target transformation is returned. - * - *

There must be at least 3 points to use this method. This does not return an alternate - * solution-- if you are intending to use solvePNP on a single AprilTag, see {@link - * #solvePNP_SQUARE} instead. - * - * @param cameraMatrix The camera intrinsics matrix in standard opencv form - * @param distCoeffs The camera distortion matrix in standard opencv form - * @param objectTrls The translations of the object corners, relative to the field. - * @param imagePoints The projection of these 3d object points into the 2d camera image. The order - * should match the given object point translations. - * @return The resulting transformation that maps the camera pose to the target pose. If the 3d - * model points are supplied relative to the origin, this transformation brings the camera to - * the origin. - */ - public static PNPResults solvePNP_SQPNP( - Matrix cameraMatrix, - Matrix distCoeffs, - List objectTrls, - Point[] imagePoints) { - try { - // translate to opencv classes - MatOfPoint3f objectMat = translationToTvec(objectTrls.toArray(new Translation3d[0])); - MatOfPoint2f imageMat = new MatOfPoint2f(imagePoints); - Mat cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); - Mat distCoeffsMat = matrixToMat(distCoeffs.getStorage()); - var rvecs = new ArrayList(); - var tvecs = new ArrayList(); - Mat rvec = Mat.zeros(3, 1, CvType.CV_32F); - Mat tvec = Mat.zeros(3, 1, CvType.CV_32F); - Mat reprojectionError = new Mat(); - // calc rvec/tvec from image points - Calib3d.solvePnPGeneric( - objectMat, - imageMat, - cameraMatrixMat, - distCoeffsMat, - rvecs, - tvecs, - false, - Calib3d.SOLVEPNP_SQPNP, - rvec, - tvec, - reprojectionError); - - float[] error = new float[1]; - reprojectionError.get(0, 0, error); - // convert to wpilib coordinates - var best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); - - // release our Mats from native memory - objectMat.release(); - imageMat.release(); - cameraMatrixMat.release(); - distCoeffsMat.release(); - for (var v : rvecs) v.release(); - for (var v : tvecs) v.release(); - rvec.release(); - tvec.release(); - reprojectionError.release(); - - // check if solvePnP failed with NaN results - if (Double.isNaN(error[0])) throw new Exception("SolvePNP_SQPNP NaN result"); - - return new PNPResults(best, error[0]); - } catch (Exception e) { - System.err.println("SolvePNP_SQPNP failed!"); - e.printStackTrace(); - return new PNPResults(); - } + } + + /** + * Finds the transformation that maps the camera's pose to the origin of the supplied object. An + * "object" is simply a set of known 3d translations that correspond to the given 2d points. If, + * for example, the object translations are given relative to close-right corner of the blue + * alliance(the default origin), a camera-to-origin transformation is returned. If the + * translations are relative to a target's pose, a camera-to-target transformation is returned. + * + *

There must be at least 3 points to use this method. This does not return an alternate + * solution-- if you are intending to use solvePNP on a single AprilTag, see {@link + * #solvePNP_SQUARE} instead. + * + * @param cameraMatrix The camera intrinsics matrix in standard opencv form + * @param distCoeffs The camera distortion matrix in standard opencv form + * @param objectTrls The translations of the object corners, relative to the field. + * @param imagePoints The projection of these 3d object points into the 2d camera image. The order + * should match the given object point translations. + * @return The resulting transformation that maps the camera pose to the target pose. If the 3d + * model points are supplied relative to the origin, this transformation brings the camera to + * the origin. + */ + public static PNPResults solvePNP_SQPNP( + Matrix cameraMatrix, + Matrix distCoeffs, + List objectTrls, + Point[] imagePoints) { + try { + // translate to opencv classes + MatOfPoint3f objectMat = translationToTvec(objectTrls.toArray(new Translation3d[0])); + MatOfPoint2f imageMat = new MatOfPoint2f(imagePoints); + Mat cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); + Mat distCoeffsMat = matrixToMat(distCoeffs.getStorage()); + var rvecs = new ArrayList(); + var tvecs = new ArrayList(); + Mat rvec = Mat.zeros(3, 1, CvType.CV_32F); + Mat tvec = Mat.zeros(3, 1, CvType.CV_32F); + Mat reprojectionError = new Mat(); + // calc rvec/tvec from image points + Calib3d.solvePnPGeneric( + objectMat, + imageMat, + cameraMatrixMat, + distCoeffsMat, + rvecs, + tvecs, + false, + Calib3d.SOLVEPNP_SQPNP, + rvec, + tvec, + reprojectionError); + + float[] error = new float[1]; + reprojectionError.get(0, 0, error); + // convert to wpilib coordinates + var best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); + + // release our Mats from native memory + objectMat.release(); + imageMat.release(); + cameraMatrixMat.release(); + distCoeffsMat.release(); + for (var v : rvecs) v.release(); + for (var v : tvecs) v.release(); + rvec.release(); + tvec.release(); + reprojectionError.release(); + + // check if solvePnP failed with NaN results + if (Double.isNaN(error[0])) throw new Exception("SolvePNP_SQPNP NaN result"); + + return new PNPResults(best, error[0]); + } catch (Exception e) { + System.err.println("SolvePNP_SQPNP failed!"); + e.printStackTrace(); + return new PNPResults(); } + } } diff --git a/photon-targeting/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java b/photon-targeting/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java index 2b79e920d6..bbd26c08ca 100644 --- a/photon-targeting/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java +++ b/photon-targeting/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java @@ -29,198 +29,198 @@ * Represents a transformation that first rotates a pose around the origin, and then translates it. */ public class RotTrlTransform3d { - private final Translation3d trl; - private final Rotation3d rot; - // TODO: removal awaiting wpilib Rotation3d performance improvements - private double m_w; - private double m_x; - private double m_y; - private double m_z; - - /** - * A rotation-translation transformation. - * - *

Applying this RotTrlTransform3d to poses will preserve their current origin-to-pose - * transform as if the origin was transformed by these components instead. - * - * @param rot The rotation component - * @param trl The translation component - */ - public RotTrlTransform3d(Rotation3d rot, Translation3d trl) { - this.rot = rot; - var quat = rot.getQuaternion(); - m_w = quat.getW(); - m_x = quat.getX(); - m_y = quat.getY(); - m_z = quat.getZ(); - this.trl = trl; - } - - public RotTrlTransform3d(Pose3d initial, Pose3d last) { - // this.rot = last.getRotation().minus(initial.getRotation()); - // this.trl = last.getTranslation().minus(initial.getTranslation().rotateBy(rot)); - - var quat = initial.getRotation().getQuaternion(); - m_w = quat.getW(); - m_x = quat.getX(); - m_y = quat.getY(); - m_z = quat.getZ(); - this.rot = invrotate(last.getRotation()); - this.trl = last.getTranslation().minus(rotate(initial.getTranslation())); - } - - /** - * Creates a rotation-translation transformation from a Transform3d. - * - *

Applying this RotTrlTransform3d to poses will preserve their current origin-to-pose - * transform as if the origin was transformed by trf instead. - * - * @param trf The origin transformation - */ - public RotTrlTransform3d(Transform3d trf) { - this(trf.getRotation(), trf.getTranslation()); - } - - public RotTrlTransform3d() { - this(new Rotation3d(), new Translation3d()); - } - - private Translation3d rotate(Translation3d otrl) { - final var p = new Quaternion(0.0, otrl.getX(), otrl.getY(), otrl.getZ()); - final var qprime = times(times(p), new Quaternion(m_w, -m_x, -m_y, -m_z)); - return new Translation3d(qprime.getX(), qprime.getY(), qprime.getZ()); - } - - private Translation3d invrotate(Translation3d otrl) { - m_x = -m_x; - m_y = -m_y; - m_z = -m_z; - var result = rotate(otrl); - m_x = -m_x; - m_y = -m_y; - m_z = -m_z; - return result; - } - - private Rotation3d rotate(Rotation3d orot) { - return new Rotation3d(times(orot.getQuaternion())); - } - - private Rotation3d invrotate(Rotation3d orot) { - m_x = -m_x; - m_y = -m_y; - m_z = -m_z; - var result = rotate(orot); - m_x = -m_x; - m_y = -m_y; - m_z = -m_z; - return result; - } - - /** - * The rotation-translation transformation that makes poses in the world consider this pose as the - * new origin, or change the basis to this pose. - * - * @param pose The new origin - */ - public static RotTrlTransform3d makeRelativeTo(Pose3d pose) { - return new RotTrlTransform3d(pose.getRotation(), pose.getTranslation()).inverse(); - } - - /** The inverse of this transformation. Applying the inverse will "undo" this transformation. */ - public RotTrlTransform3d inverse() { - // var inverseRot = rot.unaryMinus(); - // var inverseTrl = trl.rotateBy(inverseRot).unaryMinus(); - // return new RotTrlTransform3d(inverseRot, inverseTrl); - - var inverseTrl = invrotate(trl).unaryMinus(); - return new RotTrlTransform3d(new Rotation3d(new Quaternion(m_w, -m_x, -m_y, -m_z)), inverseTrl); - } - - /** This transformation as a Transform3d (as if of the origin) */ - public Transform3d getTransform() { - return new Transform3d(trl, rot); - } - - /** The translation component of this transformation */ - public Translation3d getTranslation() { - return trl; - } - - /** The rotation component of this transformation */ - public Rotation3d getRotation() { - return rot; - } - - public Translation3d apply(Translation3d trl) { - // return trl.rotateBy(rot).plus(this.trl); - return rotate(trl).plus(this.trl); - } - - public List applyTrls(List trls) { - return trls.stream().map(this::apply).collect(Collectors.toList()); - } - - public Rotation3d apply(Rotation3d rot) { - return rotate(rot); - } - - public List applyRots(List rots) { - return rots.stream().map(this::apply).collect(Collectors.toList()); - } - - public Pose3d apply(Pose3d pose) { - // return new Pose3d(pose.getTranslation().rotateBy(rot).plus(trl), - // pose.getRotation().plus(rot)); - return new Pose3d(apply(pose.getTranslation()), apply(pose.getRotation())); - } - - public List applyPoses(List poses) { - return poses.stream().map(this::apply).collect(Collectors.toList()); - } - - // TODO: removal awaiting wpilib Rotation3d performance improvements - private Quaternion times(Quaternion other) { - final double o_w = other.getW(); - final double o_x = other.getX(); - final double o_y = other.getY(); - final double o_z = other.getZ(); - return times(m_w, m_x, m_y, m_z, o_w, o_x, o_y, o_z); - } - - private static Quaternion times(Quaternion a, Quaternion b) { - final double m_w = a.getW(); - final double m_x = a.getX(); - final double m_y = a.getY(); - final double m_z = a.getZ(); - final double o_w = b.getW(); - final double o_x = b.getX(); - final double o_y = b.getY(); - final double o_z = b.getZ(); - return times(m_w, m_x, m_y, m_z, o_w, o_x, o_y, o_z); - } - - private static Quaternion times( - double m_w, - double m_x, - double m_y, - double m_z, - double o_w, - double o_x, - double o_y, - double o_z) { - // https://en.wikipedia.org/wiki/Quaternion#Scalar_and_vector_parts - - // v₁ x v₂ - final double cross_x = m_y * o_z - o_y * m_z; - final double cross_y = o_x * m_z - m_x * o_z; - final double cross_z = m_x * o_y - o_x * m_y; - - // v = w₁v₂ + w₂v₁ + v₁ x v₂ - final double new_x = o_x * m_w + (m_x * o_w) + cross_x; - final double new_y = o_y * m_w + (m_y * o_w) + cross_y; - final double new_z = o_z * m_w + (m_z * o_w) + cross_z; - - return new Quaternion(m_w * o_w - (m_x * o_x + m_y * o_y + m_z * o_z), new_x, new_y, new_z); - } + private final Translation3d trl; + private final Rotation3d rot; + // TODO: removal awaiting wpilib Rotation3d performance improvements + private double m_w; + private double m_x; + private double m_y; + private double m_z; + + /** + * A rotation-translation transformation. + * + *

Applying this RotTrlTransform3d to poses will preserve their current origin-to-pose + * transform as if the origin was transformed by these components instead. + * + * @param rot The rotation component + * @param trl The translation component + */ + public RotTrlTransform3d(Rotation3d rot, Translation3d trl) { + this.rot = rot; + var quat = rot.getQuaternion(); + m_w = quat.getW(); + m_x = quat.getX(); + m_y = quat.getY(); + m_z = quat.getZ(); + this.trl = trl; + } + + public RotTrlTransform3d(Pose3d initial, Pose3d last) { + // this.rot = last.getRotation().minus(initial.getRotation()); + // this.trl = last.getTranslation().minus(initial.getTranslation().rotateBy(rot)); + + var quat = initial.getRotation().getQuaternion(); + m_w = quat.getW(); + m_x = quat.getX(); + m_y = quat.getY(); + m_z = quat.getZ(); + this.rot = invrotate(last.getRotation()); + this.trl = last.getTranslation().minus(rotate(initial.getTranslation())); + } + + /** + * Creates a rotation-translation transformation from a Transform3d. + * + *

Applying this RotTrlTransform3d to poses will preserve their current origin-to-pose + * transform as if the origin was transformed by trf instead. + * + * @param trf The origin transformation + */ + public RotTrlTransform3d(Transform3d trf) { + this(trf.getRotation(), trf.getTranslation()); + } + + public RotTrlTransform3d() { + this(new Rotation3d(), new Translation3d()); + } + + private Translation3d rotate(Translation3d otrl) { + final var p = new Quaternion(0.0, otrl.getX(), otrl.getY(), otrl.getZ()); + final var qprime = times(times(p), new Quaternion(m_w, -m_x, -m_y, -m_z)); + return new Translation3d(qprime.getX(), qprime.getY(), qprime.getZ()); + } + + private Translation3d invrotate(Translation3d otrl) { + m_x = -m_x; + m_y = -m_y; + m_z = -m_z; + var result = rotate(otrl); + m_x = -m_x; + m_y = -m_y; + m_z = -m_z; + return result; + } + + private Rotation3d rotate(Rotation3d orot) { + return new Rotation3d(times(orot.getQuaternion())); + } + + private Rotation3d invrotate(Rotation3d orot) { + m_x = -m_x; + m_y = -m_y; + m_z = -m_z; + var result = rotate(orot); + m_x = -m_x; + m_y = -m_y; + m_z = -m_z; + return result; + } + + /** + * The rotation-translation transformation that makes poses in the world consider this pose as the + * new origin, or change the basis to this pose. + * + * @param pose The new origin + */ + public static RotTrlTransform3d makeRelativeTo(Pose3d pose) { + return new RotTrlTransform3d(pose.getRotation(), pose.getTranslation()).inverse(); + } + + /** The inverse of this transformation. Applying the inverse will "undo" this transformation. */ + public RotTrlTransform3d inverse() { + // var inverseRot = rot.unaryMinus(); + // var inverseTrl = trl.rotateBy(inverseRot).unaryMinus(); + // return new RotTrlTransform3d(inverseRot, inverseTrl); + + var inverseTrl = invrotate(trl).unaryMinus(); + return new RotTrlTransform3d(new Rotation3d(new Quaternion(m_w, -m_x, -m_y, -m_z)), inverseTrl); + } + + /** This transformation as a Transform3d (as if of the origin) */ + public Transform3d getTransform() { + return new Transform3d(trl, rot); + } + + /** The translation component of this transformation */ + public Translation3d getTranslation() { + return trl; + } + + /** The rotation component of this transformation */ + public Rotation3d getRotation() { + return rot; + } + + public Translation3d apply(Translation3d trl) { + // return trl.rotateBy(rot).plus(this.trl); + return rotate(trl).plus(this.trl); + } + + public List applyTrls(List trls) { + return trls.stream().map(this::apply).collect(Collectors.toList()); + } + + public Rotation3d apply(Rotation3d rot) { + return rotate(rot); + } + + public List applyRots(List rots) { + return rots.stream().map(this::apply).collect(Collectors.toList()); + } + + public Pose3d apply(Pose3d pose) { + // return new Pose3d(pose.getTranslation().rotateBy(rot).plus(trl), + // pose.getRotation().plus(rot)); + return new Pose3d(apply(pose.getTranslation()), apply(pose.getRotation())); + } + + public List applyPoses(List poses) { + return poses.stream().map(this::apply).collect(Collectors.toList()); + } + + // TODO: removal awaiting wpilib Rotation3d performance improvements + private Quaternion times(Quaternion other) { + final double o_w = other.getW(); + final double o_x = other.getX(); + final double o_y = other.getY(); + final double o_z = other.getZ(); + return times(m_w, m_x, m_y, m_z, o_w, o_x, o_y, o_z); + } + + private static Quaternion times(Quaternion a, Quaternion b) { + final double m_w = a.getW(); + final double m_x = a.getX(); + final double m_y = a.getY(); + final double m_z = a.getZ(); + final double o_w = b.getW(); + final double o_x = b.getX(); + final double o_y = b.getY(); + final double o_z = b.getZ(); + return times(m_w, m_x, m_y, m_z, o_w, o_x, o_y, o_z); + } + + private static Quaternion times( + double m_w, + double m_x, + double m_y, + double m_z, + double o_w, + double o_x, + double o_y, + double o_z) { + // https://en.wikipedia.org/wiki/Quaternion#Scalar_and_vector_parts + + // v₁ x v₂ + final double cross_x = m_y * o_z - o_y * m_z; + final double cross_y = o_x * m_z - m_x * o_z; + final double cross_z = m_x * o_y - o_x * m_y; + + // v = w₁v₂ + w₂v₁ + v₁ x v₂ + final double new_x = o_x * m_w + (m_x * o_w) + cross_x; + final double new_y = o_y * m_w + (m_y * o_w) + cross_y; + final double new_z = o_z * m_w + (m_z * o_w) + cross_z; + + return new Quaternion(m_w * o_w - (m_x * o_x + m_y * o_y + m_z * o_z), new_x, new_y, new_z); + } } diff --git a/photon-targeting/src/main/java/org/photonvision/estimation/TargetModel.java b/photon-targeting/src/main/java/org/photonvision/estimation/TargetModel.java index 4a4c22449d..96f473a3b6 100644 --- a/photon-targeting/src/main/java/org/photonvision/estimation/TargetModel.java +++ b/photon-targeting/src/main/java/org/photonvision/estimation/TargetModel.java @@ -28,156 +28,156 @@ /** Describes the 3d model of a target. */ public class TargetModel { - /** - * Translations of this target's vertices relative to its pose. Rectangular and spherical targets - * will have four vertices. See their respective constructors for more info. - */ - public final List vertices; + /** + * Translations of this target's vertices relative to its pose. Rectangular and spherical targets + * will have four vertices. See their respective constructors for more info. + */ + public final List vertices; - public final boolean isPlanar; - public final boolean isSpherical; + public final boolean isPlanar; + public final boolean isSpherical; - public static final TargetModel kTag16h5 = - new TargetModel(Units.inchesToMeters(6), Units.inchesToMeters(6)); + public static final TargetModel kTag16h5 = + new TargetModel(Units.inchesToMeters(6), Units.inchesToMeters(6)); - /** - * Creates a rectangular, planar target model given the width and height. The model has four - * vertices: - * - *

    - *
  • Point 0: [0, -width/2, -height/2] - *
  • Point 1: [0, width/2, -height/2] - *
  • Point 2: [0, width/2, height/2] - *
  • Point 3: [0, -width/2, height/2] - *
- */ - public TargetModel(double widthMeters, double heightMeters) { - this.vertices = - List.of( - // this order is relevant for AprilTag compatibility - new Translation3d(0, -widthMeters / 2.0, -heightMeters / 2.0), - new Translation3d(0, widthMeters / 2.0, -heightMeters / 2.0), - new Translation3d(0, widthMeters / 2.0, heightMeters / 2.0), - new Translation3d(0, -widthMeters / 2.0, heightMeters / 2.0)); - this.isPlanar = true; - this.isSpherical = false; - } + /** + * Creates a rectangular, planar target model given the width and height. The model has four + * vertices: + * + *
    + *
  • Point 0: [0, -width/2, -height/2] + *
  • Point 1: [0, width/2, -height/2] + *
  • Point 2: [0, width/2, height/2] + *
  • Point 3: [0, -width/2, height/2] + *
+ */ + public TargetModel(double widthMeters, double heightMeters) { + this.vertices = + List.of( + // this order is relevant for AprilTag compatibility + new Translation3d(0, -widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(0, widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(0, widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(0, -widthMeters / 2.0, heightMeters / 2.0)); + this.isPlanar = true; + this.isSpherical = false; + } - /** - * Creates a cuboid target model given the length, width, height. The model has eight vertices: - * - *
    - *
  • Point 0: [length/2, -width/2, -height/2] - *
  • Point 1: [length/2, width/2, -height/2] - *
  • Point 2: [length/2, width/2, height/2] - *
  • Point 3: [length/2, -width/2, height/2] - *
  • Point 4: [-length/2, -width/2, height/2] - *
  • Point 5: [-length/2, width/2, height/2] - *
  • Point 6: [-length/2, width/2, -height/2] - *
  • Point 7: [-length/2, -width/2, -height/2] - *
- */ - public TargetModel(double lengthMeters, double widthMeters, double heightMeters) { - this( - List.of( - new Translation3d(lengthMeters / 2.0, -widthMeters / 2.0, -heightMeters / 2.0), - new Translation3d(lengthMeters / 2.0, widthMeters / 2.0, -heightMeters / 2.0), - new Translation3d(lengthMeters / 2.0, widthMeters / 2.0, heightMeters / 2.0), - new Translation3d(lengthMeters / 2.0, -widthMeters / 2.0, heightMeters / 2.0), - new Translation3d(-lengthMeters / 2.0, -widthMeters / 2.0, heightMeters / 2.0), - new Translation3d(-lengthMeters / 2.0, widthMeters / 2.0, heightMeters / 2.0), - new Translation3d(-lengthMeters / 2.0, widthMeters / 2.0, -heightMeters / 2.0), - new Translation3d(-lengthMeters / 2.0, -widthMeters / 2.0, -heightMeters / 2.0))); - } + /** + * Creates a cuboid target model given the length, width, height. The model has eight vertices: + * + *
    + *
  • Point 0: [length/2, -width/2, -height/2] + *
  • Point 1: [length/2, width/2, -height/2] + *
  • Point 2: [length/2, width/2, height/2] + *
  • Point 3: [length/2, -width/2, height/2] + *
  • Point 4: [-length/2, -width/2, height/2] + *
  • Point 5: [-length/2, width/2, height/2] + *
  • Point 6: [-length/2, width/2, -height/2] + *
  • Point 7: [-length/2, -width/2, -height/2] + *
+ */ + public TargetModel(double lengthMeters, double widthMeters, double heightMeters) { + this( + List.of( + new Translation3d(lengthMeters / 2.0, -widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(lengthMeters / 2.0, widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(lengthMeters / 2.0, widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(lengthMeters / 2.0, -widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, -widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, -widthMeters / 2.0, -heightMeters / 2.0))); + } - /** - * Creates a spherical target model which has similar dimensions regardless of its rotation. This - * model has four vertices: - * - *
    - *
  • Point 0: [0, -radius, 0] - *
  • Point 1: [0, 0, -radius] - *
  • Point 2: [0, radius, 0] - *
  • Point 3: [0, 0, radius] - *
- * - * Q: Why these vertices? A: This target should be oriented to the camera every frame, much - * like a sprite/decal, and these vertices represent the ellipse vertices (maxima). These vertices - * are used for drawing the image of this sphere, but do not match the corners that will be - * published by photonvision. - */ - public TargetModel(double diameterMeters) { - double radius = diameterMeters / 2.0; - this.vertices = - List.of( - new Translation3d(0, -radius, 0), - new Translation3d(0, 0, -radius), - new Translation3d(0, radius, 0), - new Translation3d(0, 0, radius)); - this.isPlanar = false; - this.isSpherical = true; - } + /** + * Creates a spherical target model which has similar dimensions regardless of its rotation. This + * model has four vertices: + * + *
    + *
  • Point 0: [0, -radius, 0] + *
  • Point 1: [0, 0, -radius] + *
  • Point 2: [0, radius, 0] + *
  • Point 3: [0, 0, radius] + *
+ * + * Q: Why these vertices? A: This target should be oriented to the camera every frame, much + * like a sprite/decal, and these vertices represent the ellipse vertices (maxima). These vertices + * are used for drawing the image of this sphere, but do not match the corners that will be + * published by photonvision. + */ + public TargetModel(double diameterMeters) { + double radius = diameterMeters / 2.0; + this.vertices = + List.of( + new Translation3d(0, -radius, 0), + new Translation3d(0, 0, -radius), + new Translation3d(0, radius, 0), + new Translation3d(0, 0, radius)); + this.isPlanar = false; + this.isSpherical = true; + } - /** - * Creates a target model from arbitrary 3d vertices. Automatically determines if the given - * vertices are planar(x == 0). More than 2 vertices must be given. If this is a planar model, the - * vertices should define a non-intersecting contour. - * - * @param vertices Translations representing the vertices of this target model relative to its - * pose. - */ - public TargetModel(List vertices) { - this.isSpherical = false; - if (vertices == null || vertices.size() <= 2) { - vertices = new ArrayList<>(); - this.isPlanar = false; - } else { - boolean cornersPlanar = true; - for (Translation3d corner : vertices) { - if (corner.getX() != 0) cornersPlanar = false; - } - this.isPlanar = cornersPlanar; - } - this.vertices = vertices; + /** + * Creates a target model from arbitrary 3d vertices. Automatically determines if the given + * vertices are planar(x == 0). More than 2 vertices must be given. If this is a planar model, the + * vertices should define a non-intersecting contour. + * + * @param vertices Translations representing the vertices of this target model relative to its + * pose. + */ + public TargetModel(List vertices) { + this.isSpherical = false; + if (vertices == null || vertices.size() <= 2) { + vertices = new ArrayList<>(); + this.isPlanar = false; + } else { + boolean cornersPlanar = true; + for (Translation3d corner : vertices) { + if (corner.getX() != 0) cornersPlanar = false; + } + this.isPlanar = cornersPlanar; } + this.vertices = vertices; + } - /** - * This target's vertices offset from its field pose. - * - *

Note: If this target is spherical, use {@link #getOrientedPose(Translation3d, - * Translation3d)} with this method. - */ - public List getFieldVertices(Pose3d targetPose) { - var basisChange = new RotTrlTransform3d(targetPose.getRotation(), targetPose.getTranslation()); - return vertices.stream().map(t -> basisChange.apply(t)).collect(Collectors.toList()); - } + /** + * This target's vertices offset from its field pose. + * + *

Note: If this target is spherical, use {@link #getOrientedPose(Translation3d, + * Translation3d)} with this method. + */ + public List getFieldVertices(Pose3d targetPose) { + var basisChange = new RotTrlTransform3d(targetPose.getRotation(), targetPose.getTranslation()); + return vertices.stream().map(t -> basisChange.apply(t)).collect(Collectors.toList()); + } - /** - * Returns a Pose3d with the given target translation oriented (with its relative x-axis aligned) - * to the camera translation. This is used for spherical targets which should not have their - * projection change regardless of their own rotation. - * - * @param tgtTrl This target's translation - * @param cameraTrl Camera's translation - * @return This target's pose oriented to the camera - */ - public static Pose3d getOrientedPose(Translation3d tgtTrl, Translation3d cameraTrl) { - var relCam = cameraTrl.minus(tgtTrl); - var orientToCam = - new Rotation3d( - 0, - new Rotation2d(Math.hypot(relCam.getX(), relCam.getY()), -relCam.getZ()).getRadians(), - new Rotation2d(relCam.getX(), relCam.getY()).getRadians()); - return new Pose3d(tgtTrl, orientToCam); - } + /** + * Returns a Pose3d with the given target translation oriented (with its relative x-axis aligned) + * to the camera translation. This is used for spherical targets which should not have their + * projection change regardless of their own rotation. + * + * @param tgtTrl This target's translation + * @param cameraTrl Camera's translation + * @return This target's pose oriented to the camera + */ + public static Pose3d getOrientedPose(Translation3d tgtTrl, Translation3d cameraTrl) { + var relCam = cameraTrl.minus(tgtTrl); + var orientToCam = + new Rotation3d( + 0, + new Rotation2d(Math.hypot(relCam.getX(), relCam.getY()), -relCam.getZ()).getRadians(), + new Rotation2d(relCam.getX(), relCam.getY()).getRadians()); + return new Pose3d(tgtTrl, orientToCam); + } - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj instanceof TargetModel) { - var o = (TargetModel) obj; - return vertices.equals(o.vertices) && isPlanar == o.isPlanar && isSpherical == o.isSpherical; - } - return false; + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj instanceof TargetModel) { + var o = (TargetModel) obj; + return vertices.equals(o.vertices) && isPlanar == o.isPlanar && isSpherical == o.isSpherical; } + return false; + } } diff --git a/photon-targeting/src/main/java/org/photonvision/estimation/VisionEstimation.java b/photon-targeting/src/main/java/org/photonvision/estimation/VisionEstimation.java index 387176d57b..69493fc0c3 100644 --- a/photon-targeting/src/main/java/org/photonvision/estimation/VisionEstimation.java +++ b/photon-targeting/src/main/java/org/photonvision/estimation/VisionEstimation.java @@ -34,99 +34,99 @@ import org.photonvision.targeting.TargetCorner; public class VisionEstimation { - /** Get the visible {@link AprilTag}s which are in the tag layout using the visible tag IDs. */ - public static List getVisibleLayoutTags( - List visTags, AprilTagFieldLayout tagLayout) { - return visTags.stream() - .map( - t -> { - int id = t.getFiducialId(); - var maybePose = tagLayout.getTagPose(id); - return maybePose.map(pose3d -> new AprilTag(id, pose3d)).orElse(null); - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } + /** Get the visible {@link AprilTag}s which are in the tag layout using the visible tag IDs. */ + public static List getVisibleLayoutTags( + List visTags, AprilTagFieldLayout tagLayout) { + return visTags.stream() + .map( + t -> { + int id = t.getFiducialId(); + var maybePose = tagLayout.getTagPose(id); + return maybePose.map(pose3d -> new AprilTag(id, pose3d)).orElse(null); + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } - /** - * Performs solvePNP using 3d-2d point correspondences of visible AprilTags to estimate the - * field-to-camera transformation. If only one tag is visible, the result may have an alternate - * solution. - * - *

Note: The returned transformation is from the field origin to the camera pose! - * - *

With only one tag: {@link OpenCVHelp#solvePNP_SQUARE} - * - *

With multiple tags: {@link OpenCVHelp#solvePNP_SQPNP} - * - * @param cameraMatrix The camera intrinsics matrix in standard opencv form - * @param distCoeffs The camera distortion matrix in standard opencv form - * @param visTags The visible tags reported by PV. Non-tag targets are automatically excluded. - * @param tagLayout The known tag layout on the field - * @return The transformation that maps the field origin to the camera pose. Ensure the {@link - * PNPResults} are present before utilizing them. - */ - public static PNPResults estimateCamPosePNP( - Matrix cameraMatrix, - Matrix distCoeffs, - List visTags, - AprilTagFieldLayout tagLayout) { - if (tagLayout == null - || visTags == null - || tagLayout.getTags().size() == 0 - || visTags.size() == 0) { - return new PNPResults(); - } + /** + * Performs solvePNP using 3d-2d point correspondences of visible AprilTags to estimate the + * field-to-camera transformation. If only one tag is visible, the result may have an alternate + * solution. + * + *

Note: The returned transformation is from the field origin to the camera pose! + * + *

With only one tag: {@link OpenCVHelp#solvePNP_SQUARE} + * + *

With multiple tags: {@link OpenCVHelp#solvePNP_SQPNP} + * + * @param cameraMatrix The camera intrinsics matrix in standard opencv form + * @param distCoeffs The camera distortion matrix in standard opencv form + * @param visTags The visible tags reported by PV. Non-tag targets are automatically excluded. + * @param tagLayout The known tag layout on the field + * @return The transformation that maps the field origin to the camera pose. Ensure the {@link + * PNPResults} are present before utilizing them. + */ + public static PNPResults estimateCamPosePNP( + Matrix cameraMatrix, + Matrix distCoeffs, + List visTags, + AprilTagFieldLayout tagLayout) { + if (tagLayout == null + || visTags == null + || tagLayout.getTags().size() == 0 + || visTags.size() == 0) { + return new PNPResults(); + } - var corners = new ArrayList(); - var knownTags = new ArrayList(); - // ensure these are AprilTags in our layout - for (var tgt : visTags) { - int id = tgt.getFiducialId(); - tagLayout - .getTagPose(id) - .ifPresent( - pose -> { - knownTags.add(new AprilTag(id, pose)); - corners.addAll(tgt.getDetectedCorners()); - }); - } - if (knownTags.size() == 0 || corners.size() == 0 || corners.size() % 4 != 0) { - return new PNPResults(); - } - Point[] points = OpenCVHelp.cornersToPoints(corners); + var corners = new ArrayList(); + var knownTags = new ArrayList(); + // ensure these are AprilTags in our layout + for (var tgt : visTags) { + int id = tgt.getFiducialId(); + tagLayout + .getTagPose(id) + .ifPresent( + pose -> { + knownTags.add(new AprilTag(id, pose)); + corners.addAll(tgt.getDetectedCorners()); + }); + } + if (knownTags.size() == 0 || corners.size() == 0 || corners.size() % 4 != 0) { + return new PNPResults(); + } + Point[] points = OpenCVHelp.cornersToPoints(corners); - // single-tag pnp - if (knownTags.size() == 1) { - var camToTag = - OpenCVHelp.solvePNP_SQUARE( - cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, points); - if (!camToTag.isPresent) return new PNPResults(); - var bestPose = knownTags.get(0).pose.transformBy(camToTag.best.inverse()); - var altPose = new Pose3d(); - if (camToTag.ambiguity != 0) - altPose = knownTags.get(0).pose.transformBy(camToTag.alt.inverse()); + // single-tag pnp + if (knownTags.size() == 1) { + var camToTag = + OpenCVHelp.solvePNP_SQUARE( + cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, points); + if (!camToTag.isPresent) return new PNPResults(); + var bestPose = knownTags.get(0).pose.transformBy(camToTag.best.inverse()); + var altPose = new Pose3d(); + if (camToTag.ambiguity != 0) + altPose = knownTags.get(0).pose.transformBy(camToTag.alt.inverse()); - var o = new Pose3d(); - return new PNPResults( - new Transform3d(o, bestPose), - new Transform3d(o, altPose), - camToTag.ambiguity, - camToTag.bestReprojErr, - camToTag.altReprojErr); - } - // multi-tag pnp - else { - var objectTrls = new ArrayList(); - for (var tag : knownTags) objectTrls.addAll(TargetModel.kTag16h5.getFieldVertices(tag.pose)); - var camToOrigin = OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, points); - if (!camToOrigin.isPresent) return new PNPResults(); - return new PNPResults( - camToOrigin.best.inverse(), - camToOrigin.alt.inverse(), - camToOrigin.ambiguity, - camToOrigin.bestReprojErr, - camToOrigin.altReprojErr); - } + var o = new Pose3d(); + return new PNPResults( + new Transform3d(o, bestPose), + new Transform3d(o, altPose), + camToTag.ambiguity, + camToTag.bestReprojErr, + camToTag.altReprojErr); + } + // multi-tag pnp + else { + var objectTrls = new ArrayList(); + for (var tag : knownTags) objectTrls.addAll(TargetModel.kTag16h5.getFieldVertices(tag.pose)); + var camToOrigin = OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, points); + if (!camToOrigin.isPresent) return new PNPResults(); + return new PNPResults( + camToOrigin.best.inverse(), + camToOrigin.alt.inverse(), + camToOrigin.ambiguity, + camToOrigin.bestReprojErr, + camToOrigin.altReprojErr); } + } } diff --git a/photon-targeting/src/main/java/org/photonvision/targeting/MultiTargetPNPResults.java b/photon-targeting/src/main/java/org/photonvision/targeting/MultiTargetPNPResults.java index 453c0a57e5..410064a9b3 100644 --- a/photon-targeting/src/main/java/org/photonvision/targeting/MultiTargetPNPResults.java +++ b/photon-targeting/src/main/java/org/photonvision/targeting/MultiTargetPNPResults.java @@ -22,72 +22,72 @@ import org.photonvision.common.dataflow.structures.Packet; public class MultiTargetPNPResults { - // Seeing 32 apriltags at once seems like a sane limit - private static final int MAX_IDS = 32; - // pnpresult + MAX_IDS possible targets (arbitrary upper limit that should never be hit, ideally) - public static final int PACK_SIZE_BYTES = PNPResults.PACK_SIZE_BYTES + (Short.BYTES * MAX_IDS); + // Seeing 32 apriltags at once seems like a sane limit + private static final int MAX_IDS = 32; + // pnpresult + MAX_IDS possible targets (arbitrary upper limit that should never be hit, ideally) + public static final int PACK_SIZE_BYTES = PNPResults.PACK_SIZE_BYTES + (Short.BYTES * MAX_IDS); - public PNPResults estimatedPose = new PNPResults(); - public List fiducialIDsUsed = List.of(); + public PNPResults estimatedPose = new PNPResults(); + public List fiducialIDsUsed = List.of(); - public MultiTargetPNPResults() {} + public MultiTargetPNPResults() {} - public MultiTargetPNPResults(PNPResults results, List ids) { - estimatedPose = results; - fiducialIDsUsed = ids; - } + public MultiTargetPNPResults(PNPResults results, List ids) { + estimatedPose = results; + fiducialIDsUsed = ids; + } - public static MultiTargetPNPResults createFromPacket(Packet packet) { - var results = PNPResults.createFromPacket(packet); - var ids = new ArrayList(MAX_IDS); - for (int i = 0; i < MAX_IDS; i++) { - int targetId = (int) packet.decodeShort(); - if (targetId > -1) ids.add(targetId); - } - return new MultiTargetPNPResults(results, ids); + public static MultiTargetPNPResults createFromPacket(Packet packet) { + var results = PNPResults.createFromPacket(packet); + var ids = new ArrayList(MAX_IDS); + for (int i = 0; i < MAX_IDS; i++) { + int targetId = (int) packet.decodeShort(); + if (targetId > -1) ids.add(targetId); } + return new MultiTargetPNPResults(results, ids); + } - public void populatePacket(Packet packet) { - estimatedPose.populatePacket(packet); - for (int i = 0; i < MAX_IDS; i++) { - if (i < fiducialIDsUsed.size()) { - packet.encode((short) fiducialIDsUsed.get(i).byteValue()); - } else { - packet.encode((short) -1); - } - } + public void populatePacket(Packet packet) { + estimatedPose.populatePacket(packet); + for (int i = 0; i < MAX_IDS; i++) { + if (i < fiducialIDsUsed.size()) { + packet.encode((short) fiducialIDsUsed.get(i).byteValue()); + } else { + packet.encode((short) -1); + } } + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((estimatedPose == null) ? 0 : estimatedPose.hashCode()); - result = prime * result + ((fiducialIDsUsed == null) ? 0 : fiducialIDsUsed.hashCode()); - return result; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((estimatedPose == null) ? 0 : estimatedPose.hashCode()); + result = prime * result + ((fiducialIDsUsed == null) ? 0 : fiducialIDsUsed.hashCode()); + return result; + } - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - MultiTargetPNPResults other = (MultiTargetPNPResults) obj; - if (estimatedPose == null) { - if (other.estimatedPose != null) return false; - } else if (!estimatedPose.equals(other.estimatedPose)) return false; - if (fiducialIDsUsed == null) { - if (other.fiducialIDsUsed != null) return false; - } else if (!fiducialIDsUsed.equals(other.fiducialIDsUsed)) return false; - return true; - } + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + MultiTargetPNPResults other = (MultiTargetPNPResults) obj; + if (estimatedPose == null) { + if (other.estimatedPose != null) return false; + } else if (!estimatedPose.equals(other.estimatedPose)) return false; + if (fiducialIDsUsed == null) { + if (other.fiducialIDsUsed != null) return false; + } else if (!fiducialIDsUsed.equals(other.fiducialIDsUsed)) return false; + return true; + } - @Override - public String toString() { - return "MultiTargetPNPResults [estimatedPose=" - + estimatedPose - + ", fiducialIDsUsed=" - + fiducialIDsUsed - + "]"; - } + @Override + public String toString() { + return "MultiTargetPNPResults [estimatedPose=" + + estimatedPose + + ", fiducialIDsUsed=" + + fiducialIDsUsed + + "]"; + } } diff --git a/photon-targeting/src/main/java/org/photonvision/targeting/PNPResults.java b/photon-targeting/src/main/java/org/photonvision/targeting/PNPResults.java index 11a77547fc..93b04f2386 100644 --- a/photon-targeting/src/main/java/org/photonvision/targeting/PNPResults.java +++ b/photon-targeting/src/main/java/org/photonvision/targeting/PNPResults.java @@ -31,140 +31,140 @@ * method. */ public class PNPResults { - /** - * If this result is valid. A false value indicates there was an error in estimation, and this - * result should not be used. - */ - public final boolean isPresent; - - /** - * The best-fit transform. The coordinate frame of this transform depends on the method which gave - * this result. - */ - public final Transform3d best; - - /** Reprojection error of the best solution, in pixels */ - public final double bestReprojErr; - - /** - * Alternate, ambiguous solution from solvepnp. If no alternate solution is found, this is equal - * to the best solution. - */ - public final Transform3d alt; - - /** If no alternate solution is found, this is bestReprojErr */ - public final double altReprojErr; - - /** If no alternate solution is found, this is 0 */ - public final double ambiguity; - - /** An empty (invalid) result. */ - public PNPResults() { - this.isPresent = false; - this.best = new Transform3d(); - this.alt = new Transform3d(); - this.ambiguity = 0; - this.bestReprojErr = 0; - this.altReprojErr = 0; - } - - public PNPResults(Transform3d best, double bestReprojErr) { - this(best, best, 0, bestReprojErr, bestReprojErr); - } - - public PNPResults( - Transform3d best, - Transform3d alt, - double ambiguity, - double bestReprojErr, - double altReprojErr) { - this.isPresent = true; - this.best = best; - this.alt = alt; - this.ambiguity = ambiguity; - this.bestReprojErr = bestReprojErr; - this.altReprojErr = altReprojErr; - } - - public static final int PACK_SIZE_BYTES = 1 + (Double.BYTES * 7 * 2) + (Double.BYTES * 3); - - public static PNPResults createFromPacket(Packet packet) { - var present = packet.decodeBoolean(); - var best = PacketUtils.decodeTransform(packet); - var alt = PacketUtils.decodeTransform(packet); - var bestEr = packet.decodeDouble(); - var altEr = packet.decodeDouble(); - var ambiguity = packet.decodeDouble(); - if (present) { - return new PNPResults(best, alt, ambiguity, bestEr, altEr); - } else { - return new PNPResults(); - } - } - - public Packet populatePacket(Packet packet) { - packet.encode(isPresent); - PacketUtils.encodeTransform(packet, best); - PacketUtils.encodeTransform(packet, alt); - packet.encode(bestReprojErr); - packet.encode(altReprojErr); - packet.encode(ambiguity); - return packet; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (isPresent ? 1231 : 1237); - result = prime * result + ((best == null) ? 0 : best.hashCode()); - long temp; - temp = Double.doubleToLongBits(bestReprojErr); - result = prime * result + (int) (temp ^ (temp >>> 32)); - result = prime * result + ((alt == null) ? 0 : alt.hashCode()); - temp = Double.doubleToLongBits(altReprojErr); - result = prime * result + (int) (temp ^ (temp >>> 32)); - temp = Double.doubleToLongBits(ambiguity); - result = prime * result + (int) (temp ^ (temp >>> 32)); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - PNPResults other = (PNPResults) obj; - if (isPresent != other.isPresent) return false; - if (best == null) { - if (other.best != null) return false; - } else if (!best.equals(other.best)) return false; - if (Double.doubleToLongBits(bestReprojErr) != Double.doubleToLongBits(other.bestReprojErr)) - return false; - if (alt == null) { - if (other.alt != null) return false; - } else if (!alt.equals(other.alt)) return false; - if (Double.doubleToLongBits(altReprojErr) != Double.doubleToLongBits(other.altReprojErr)) - return false; - if (Double.doubleToLongBits(ambiguity) != Double.doubleToLongBits(other.ambiguity)) - return false; - return true; - } - - @Override - public String toString() { - return "PNPResults [isPresent=" - + isPresent - + ", best=" - + best - + ", bestReprojErr=" - + bestReprojErr - + ", alt=" - + alt - + ", altReprojErr=" - + altReprojErr - + ", ambiguity=" - + ambiguity - + "]"; + /** + * If this result is valid. A false value indicates there was an error in estimation, and this + * result should not be used. + */ + public final boolean isPresent; + + /** + * The best-fit transform. The coordinate frame of this transform depends on the method which gave + * this result. + */ + public final Transform3d best; + + /** Reprojection error of the best solution, in pixels */ + public final double bestReprojErr; + + /** + * Alternate, ambiguous solution from solvepnp. If no alternate solution is found, this is equal + * to the best solution. + */ + public final Transform3d alt; + + /** If no alternate solution is found, this is bestReprojErr */ + public final double altReprojErr; + + /** If no alternate solution is found, this is 0 */ + public final double ambiguity; + + /** An empty (invalid) result. */ + public PNPResults() { + this.isPresent = false; + this.best = new Transform3d(); + this.alt = new Transform3d(); + this.ambiguity = 0; + this.bestReprojErr = 0; + this.altReprojErr = 0; + } + + public PNPResults(Transform3d best, double bestReprojErr) { + this(best, best, 0, bestReprojErr, bestReprojErr); + } + + public PNPResults( + Transform3d best, + Transform3d alt, + double ambiguity, + double bestReprojErr, + double altReprojErr) { + this.isPresent = true; + this.best = best; + this.alt = alt; + this.ambiguity = ambiguity; + this.bestReprojErr = bestReprojErr; + this.altReprojErr = altReprojErr; + } + + public static final int PACK_SIZE_BYTES = 1 + (Double.BYTES * 7 * 2) + (Double.BYTES * 3); + + public static PNPResults createFromPacket(Packet packet) { + var present = packet.decodeBoolean(); + var best = PacketUtils.decodeTransform(packet); + var alt = PacketUtils.decodeTransform(packet); + var bestEr = packet.decodeDouble(); + var altEr = packet.decodeDouble(); + var ambiguity = packet.decodeDouble(); + if (present) { + return new PNPResults(best, alt, ambiguity, bestEr, altEr); + } else { + return new PNPResults(); } + } + + public Packet populatePacket(Packet packet) { + packet.encode(isPresent); + PacketUtils.encodeTransform(packet, best); + PacketUtils.encodeTransform(packet, alt); + packet.encode(bestReprojErr); + packet.encode(altReprojErr); + packet.encode(ambiguity); + return packet; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (isPresent ? 1231 : 1237); + result = prime * result + ((best == null) ? 0 : best.hashCode()); + long temp; + temp = Double.doubleToLongBits(bestReprojErr); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + ((alt == null) ? 0 : alt.hashCode()); + temp = Double.doubleToLongBits(altReprojErr); + result = prime * result + (int) (temp ^ (temp >>> 32)); + temp = Double.doubleToLongBits(ambiguity); + result = prime * result + (int) (temp ^ (temp >>> 32)); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + PNPResults other = (PNPResults) obj; + if (isPresent != other.isPresent) return false; + if (best == null) { + if (other.best != null) return false; + } else if (!best.equals(other.best)) return false; + if (Double.doubleToLongBits(bestReprojErr) != Double.doubleToLongBits(other.bestReprojErr)) + return false; + if (alt == null) { + if (other.alt != null) return false; + } else if (!alt.equals(other.alt)) return false; + if (Double.doubleToLongBits(altReprojErr) != Double.doubleToLongBits(other.altReprojErr)) + return false; + if (Double.doubleToLongBits(ambiguity) != Double.doubleToLongBits(other.ambiguity)) + return false; + return true; + } + + @Override + public String toString() { + return "PNPResults [isPresent=" + + isPresent + + ", best=" + + best + + ", bestReprojErr=" + + bestReprojErr + + ", alt=" + + alt + + ", altReprojErr=" + + altReprojErr + + ", ambiguity=" + + ambiguity + + "]"; + } } diff --git a/photon-targeting/src/main/java/org/photonvision/targeting/PhotonPipelineResult.java b/photon-targeting/src/main/java/org/photonvision/targeting/PhotonPipelineResult.java index 4472078f8a..b09841db74 100644 --- a/photon-targeting/src/main/java/org/photonvision/targeting/PhotonPipelineResult.java +++ b/photon-targeting/src/main/java/org/photonvision/targeting/PhotonPipelineResult.java @@ -23,219 +23,219 @@ /** Represents a pipeline result from a PhotonCamera. */ public class PhotonPipelineResult { - private static boolean HAS_WARNED = false; - - // Targets to store. - public final List targets = new ArrayList<>(); - - // Latency in milliseconds. - private double latencyMillis; - - // Timestamp in milliseconds. - private double timestampSeconds = -1; - - // Multi-tag result - private MultiTargetPNPResults multiTagResult = new MultiTargetPNPResults(); - - /** Constructs an empty pipeline result. */ - public PhotonPipelineResult() {} - - /** - * Constructs a pipeline result. - * - * @param latencyMillis The latency in the pipeline. - * @param targets The list of targets identified by the pipeline. - */ - public PhotonPipelineResult(double latencyMillis, List targets) { - this.latencyMillis = latencyMillis; - this.targets.addAll(targets); - } - - /** - * Constructs a pipeline result. - * - * @param latencyMillis The latency in the pipeline. - * @param targets The list of targets identified by the pipeline. - * @param result Result from multi-target PNP. - */ - public PhotonPipelineResult( - double latencyMillis, List targets, MultiTargetPNPResults result) { - this.latencyMillis = latencyMillis; - this.targets.addAll(targets); - this.multiTagResult = result; - } - - /** - * Returns the size of the packet needed to store this pipeline result. - * - * @return The size of the packet needed to store this pipeline result. - */ - public int getPacketSize() { - return targets.size() * PhotonTrackedTarget.PACK_SIZE_BYTES - + 8 // latency - + MultiTargetPNPResults.PACK_SIZE_BYTES - + 1; // target count - } - - /** - * Returns the best target in this pipeline result. If there are no targets, this method will - * return null. The best target is determined by the target sort mode in the PhotonVision UI. - * - * @return The best target of the pipeline result. - */ - public PhotonTrackedTarget getBestTarget() { - if (!hasTargets() && !HAS_WARNED) { - String errStr = - "This PhotonPipelineResult object has no targets associated with it! Please check hasTargets() " - + "before calling this method. For more information, please review the PhotonLib " - + "documentation at http://docs.photonvision.org"; - System.err.println(errStr); - new Exception().printStackTrace(); - HAS_WARNED = true; - } - return hasTargets() ? targets.get(0) : null; - } - - /** - * Returns the latency in the pipeline. - * - * @return The latency in the pipeline. - */ - public double getLatencyMillis() { - return latencyMillis; - } - - /** - * Returns the estimated time the frame was taken, This is more accurate than using - * getLatencyMillis() - * - * @return The timestamp in seconds, or -1 if this result has no timestamp set. - */ - public double getTimestampSeconds() { - return timestampSeconds; + private static boolean HAS_WARNED = false; + + // Targets to store. + public final List targets = new ArrayList<>(); + + // Latency in milliseconds. + private double latencyMillis; + + // Timestamp in milliseconds. + private double timestampSeconds = -1; + + // Multi-tag result + private MultiTargetPNPResults multiTagResult = new MultiTargetPNPResults(); + + /** Constructs an empty pipeline result. */ + public PhotonPipelineResult() {} + + /** + * Constructs a pipeline result. + * + * @param latencyMillis The latency in the pipeline. + * @param targets The list of targets identified by the pipeline. + */ + public PhotonPipelineResult(double latencyMillis, List targets) { + this.latencyMillis = latencyMillis; + this.targets.addAll(targets); + } + + /** + * Constructs a pipeline result. + * + * @param latencyMillis The latency in the pipeline. + * @param targets The list of targets identified by the pipeline. + * @param result Result from multi-target PNP. + */ + public PhotonPipelineResult( + double latencyMillis, List targets, MultiTargetPNPResults result) { + this.latencyMillis = latencyMillis; + this.targets.addAll(targets); + this.multiTagResult = result; + } + + /** + * Returns the size of the packet needed to store this pipeline result. + * + * @return The size of the packet needed to store this pipeline result. + */ + public int getPacketSize() { + return targets.size() * PhotonTrackedTarget.PACK_SIZE_BYTES + + 8 // latency + + MultiTargetPNPResults.PACK_SIZE_BYTES + + 1; // target count + } + + /** + * Returns the best target in this pipeline result. If there are no targets, this method will + * return null. The best target is determined by the target sort mode in the PhotonVision UI. + * + * @return The best target of the pipeline result. + */ + public PhotonTrackedTarget getBestTarget() { + if (!hasTargets() && !HAS_WARNED) { + String errStr = + "This PhotonPipelineResult object has no targets associated with it! Please check hasTargets() " + + "before calling this method. For more information, please review the PhotonLib " + + "documentation at http://docs.photonvision.org"; + System.err.println(errStr); + new Exception().printStackTrace(); + HAS_WARNED = true; } - - /** - * Sets the FPGA timestamp of this result in seconds. - * - * @param timestampSeconds The timestamp in seconds. - */ - public void setTimestampSeconds(double timestampSeconds) { - this.timestampSeconds = timestampSeconds; - } - - /** - * Returns whether the pipeline has targets. - * - * @return Whether the pipeline has targets. - */ - public boolean hasTargets() { - return targets.size() > 0; - } - - /** - * Returns a copy of the vector of targets. - * - * @return A copy of the vector of targets. - */ - public List getTargets() { - return new ArrayList<>(targets); - } - - /** - * Return the latest mulit-target result. Be sure to check - * getMultiTagResult().estimatedPose.isPresent before using the pose estimate! - */ - public MultiTargetPNPResults getMultiTagResult() { - return multiTagResult; - } - - /** - * Populates the fields of the pipeline result from the packet. - * - * @param packet The incoming packet. - * @return The incoming packet. - */ - public Packet createFromPacket(Packet packet) { - // Decode latency, existence of targets, and number of targets. - latencyMillis = packet.decodeDouble(); - this.multiTagResult = MultiTargetPNPResults.createFromPacket(packet); - byte targetCount = packet.decodeByte(); - - targets.clear(); - - // Decode the information of each target. - for (int i = 0; i < (int) targetCount; ++i) { - var target = new PhotonTrackedTarget(); - target.createFromPacket(packet); - targets.add(target); - } - - return packet; - } - - /** - * Populates the outgoing packet with information from this pipeline result. - * - * @param packet The outgoing packet. - * @return The outgoing packet. - */ - public Packet populatePacket(Packet packet) { - // Encode latency, existence of targets, and number of targets. - packet.encode(latencyMillis); - multiTagResult.populatePacket(packet); - packet.encode((byte) targets.size()); - - // Encode the information of each target. - for (var target : targets) target.populatePacket(packet); - - // Return the packet. - return packet; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((targets == null) ? 0 : targets.hashCode()); - long temp; - temp = Double.doubleToLongBits(latencyMillis); - result = prime * result + (int) (temp ^ (temp >>> 32)); - temp = Double.doubleToLongBits(timestampSeconds); - result = prime * result + (int) (temp ^ (temp >>> 32)); - result = prime * result + ((multiTagResult == null) ? 0 : multiTagResult.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - PhotonPipelineResult other = (PhotonPipelineResult) obj; - if (targets == null) { - if (other.targets != null) return false; - } else if (!targets.equals(other.targets)) return false; - if (Double.doubleToLongBits(latencyMillis) != Double.doubleToLongBits(other.latencyMillis)) - return false; - if (Double.doubleToLongBits(timestampSeconds) - != Double.doubleToLongBits(other.timestampSeconds)) return false; - if (multiTagResult == null) { - if (other.multiTagResult != null) return false; - } else if (!multiTagResult.equals(other.multiTagResult)) return false; - return true; + return hasTargets() ? targets.get(0) : null; + } + + /** + * Returns the latency in the pipeline. + * + * @return The latency in the pipeline. + */ + public double getLatencyMillis() { + return latencyMillis; + } + + /** + * Returns the estimated time the frame was taken, This is more accurate than using + * getLatencyMillis() + * + * @return The timestamp in seconds, or -1 if this result has no timestamp set. + */ + public double getTimestampSeconds() { + return timestampSeconds; + } + + /** + * Sets the FPGA timestamp of this result in seconds. + * + * @param timestampSeconds The timestamp in seconds. + */ + public void setTimestampSeconds(double timestampSeconds) { + this.timestampSeconds = timestampSeconds; + } + + /** + * Returns whether the pipeline has targets. + * + * @return Whether the pipeline has targets. + */ + public boolean hasTargets() { + return targets.size() > 0; + } + + /** + * Returns a copy of the vector of targets. + * + * @return A copy of the vector of targets. + */ + public List getTargets() { + return new ArrayList<>(targets); + } + + /** + * Return the latest mulit-target result. Be sure to check + * getMultiTagResult().estimatedPose.isPresent before using the pose estimate! + */ + public MultiTargetPNPResults getMultiTagResult() { + return multiTagResult; + } + + /** + * Populates the fields of the pipeline result from the packet. + * + * @param packet The incoming packet. + * @return The incoming packet. + */ + public Packet createFromPacket(Packet packet) { + // Decode latency, existence of targets, and number of targets. + latencyMillis = packet.decodeDouble(); + this.multiTagResult = MultiTargetPNPResults.createFromPacket(packet); + byte targetCount = packet.decodeByte(); + + targets.clear(); + + // Decode the information of each target. + for (int i = 0; i < (int) targetCount; ++i) { + var target = new PhotonTrackedTarget(); + target.createFromPacket(packet); + targets.add(target); } - @Override - public String toString() { - return "PhotonPipelineResult [targets=" - + targets - + ", latencyMillis=" - + latencyMillis - + ", timestampSeconds=" - + timestampSeconds - + ", multiTagResult=" - + multiTagResult - + "]"; - } + return packet; + } + + /** + * Populates the outgoing packet with information from this pipeline result. + * + * @param packet The outgoing packet. + * @return The outgoing packet. + */ + public Packet populatePacket(Packet packet) { + // Encode latency, existence of targets, and number of targets. + packet.encode(latencyMillis); + multiTagResult.populatePacket(packet); + packet.encode((byte) targets.size()); + + // Encode the information of each target. + for (var target : targets) target.populatePacket(packet); + + // Return the packet. + return packet; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((targets == null) ? 0 : targets.hashCode()); + long temp; + temp = Double.doubleToLongBits(latencyMillis); + result = prime * result + (int) (temp ^ (temp >>> 32)); + temp = Double.doubleToLongBits(timestampSeconds); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + ((multiTagResult == null) ? 0 : multiTagResult.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + PhotonPipelineResult other = (PhotonPipelineResult) obj; + if (targets == null) { + if (other.targets != null) return false; + } else if (!targets.equals(other.targets)) return false; + if (Double.doubleToLongBits(latencyMillis) != Double.doubleToLongBits(other.latencyMillis)) + return false; + if (Double.doubleToLongBits(timestampSeconds) + != Double.doubleToLongBits(other.timestampSeconds)) return false; + if (multiTagResult == null) { + if (other.multiTagResult != null) return false; + } else if (!multiTagResult.equals(other.multiTagResult)) return false; + return true; + } + + @Override + public String toString() { + return "PhotonPipelineResult [targets=" + + targets + + ", latencyMillis=" + + latencyMillis + + ", timestampSeconds=" + + timestampSeconds + + ", multiTagResult=" + + multiTagResult + + "]"; + } } diff --git a/photon-targeting/src/main/java/org/photonvision/targeting/PhotonTrackedTarget.java b/photon-targeting/src/main/java/org/photonvision/targeting/PhotonTrackedTarget.java index 40263f5cc5..b4a50722f3 100644 --- a/photon-targeting/src/main/java/org/photonvision/targeting/PhotonTrackedTarget.java +++ b/photon-targeting/src/main/java/org/photonvision/targeting/PhotonTrackedTarget.java @@ -24,270 +24,270 @@ import org.photonvision.utils.PacketUtils; public class PhotonTrackedTarget { - private static final int MAX_CORNERS = 8; - public static final int PACK_SIZE_BYTES = - Double.BYTES * (5 + 7 + 2 * 4 + 1 + 7 + 2 * MAX_CORNERS); - - private double yaw; - private double pitch; - private double area; - private double skew; - private int fiducialId; - private Transform3d bestCameraToTarget = new Transform3d(); - private Transform3d altCameraToTarget = new Transform3d(); - private double poseAmbiguity; - - // Corners from the min-area rectangle bounding the target - private List minAreaRectCorners; - - // Corners from whatever corner detection method was used - private List detectedCorners; - - public PhotonTrackedTarget() {} - - /** Construct a tracked target, given exactly 4 corners */ - public PhotonTrackedTarget( - double yaw, - double pitch, - double area, - double skew, - int id, - Transform3d pose, - Transform3d altPose, - double ambiguity, - List minAreaRectCorners, - List detectedCorners) { - assert minAreaRectCorners.size() == 4; - - if (detectedCorners.size() > MAX_CORNERS) { - detectedCorners = detectedCorners.subList(0, MAX_CORNERS); - } - - this.yaw = yaw; - this.pitch = pitch; - this.area = area; - this.skew = skew; - this.fiducialId = id; - this.bestCameraToTarget = pose; - this.altCameraToTarget = altPose; - this.minAreaRectCorners = minAreaRectCorners; - this.detectedCorners = detectedCorners; - this.poseAmbiguity = ambiguity; + private static final int MAX_CORNERS = 8; + public static final int PACK_SIZE_BYTES = + Double.BYTES * (5 + 7 + 2 * 4 + 1 + 7 + 2 * MAX_CORNERS); + + private double yaw; + private double pitch; + private double area; + private double skew; + private int fiducialId; + private Transform3d bestCameraToTarget = new Transform3d(); + private Transform3d altCameraToTarget = new Transform3d(); + private double poseAmbiguity; + + // Corners from the min-area rectangle bounding the target + private List minAreaRectCorners; + + // Corners from whatever corner detection method was used + private List detectedCorners; + + public PhotonTrackedTarget() {} + + /** Construct a tracked target, given exactly 4 corners */ + public PhotonTrackedTarget( + double yaw, + double pitch, + double area, + double skew, + int id, + Transform3d pose, + Transform3d altPose, + double ambiguity, + List minAreaRectCorners, + List detectedCorners) { + assert minAreaRectCorners.size() == 4; + + if (detectedCorners.size() > MAX_CORNERS) { + detectedCorners = detectedCorners.subList(0, MAX_CORNERS); } - public double getYaw() { - return yaw; + this.yaw = yaw; + this.pitch = pitch; + this.area = area; + this.skew = skew; + this.fiducialId = id; + this.bestCameraToTarget = pose; + this.altCameraToTarget = altPose; + this.minAreaRectCorners = minAreaRectCorners; + this.detectedCorners = detectedCorners; + this.poseAmbiguity = ambiguity; + } + + public double getYaw() { + return yaw; + } + + public double getPitch() { + return pitch; + } + + public double getArea() { + return area; + } + + public double getSkew() { + return skew; + } + + /** Get the Fiducial ID, or -1 if not set. */ + public int getFiducialId() { + return fiducialId; + } + + /** + * Get the ratio of pose reprojection errors, called ambiguity. Numbers above 0.2 are likely to be + * ambiguous. -1 if invalid. + */ + public double getPoseAmbiguity() { + return poseAmbiguity; + } + + /** + * Return a list of the 4 corners in image space (origin top left, x right, y down), in no + * particular order, of the minimum area bounding rectangle of this target + */ + public List getMinAreaRectCorners() { + return minAreaRectCorners; + } + + /** + * Return a list of the n corners in image space (origin top left, x right, y down), in no + * particular order, detected for this target. + * + *

For fiducials, the order is known and is always counter-clock wise around the tag, like so: + * + *

​
+   * ⟶ +X  3 ----- 2
+   * |      |       |
+   * V      |       |
+   * +Y     0 ----- 1
+   * 
+ */ + public List getDetectedCorners() { + return detectedCorners; + } + + /** + * Get the transform that maps camera space (X = forward, Y = left, Z = up) to object/fiducial tag + * space (X forward, Y left, Z up) with the lowest reprojection error + */ + public Transform3d getBestCameraToTarget() { + return bestCameraToTarget; + } + + /** + * Get the transform that maps camera space (X = forward, Y = left, Z = up) to object/fiducial tag + * space (X forward, Y left, Z up) with the highest reprojection error + */ + public Transform3d getAlternateCameraToTarget() { + return altCameraToTarget; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + long temp; + temp = Double.doubleToLongBits(yaw); + result = prime * result + (int) (temp ^ (temp >>> 32)); + temp = Double.doubleToLongBits(pitch); + result = prime * result + (int) (temp ^ (temp >>> 32)); + temp = Double.doubleToLongBits(area); + result = prime * result + (int) (temp ^ (temp >>> 32)); + temp = Double.doubleToLongBits(skew); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + fiducialId; + result = prime * result + ((bestCameraToTarget == null) ? 0 : bestCameraToTarget.hashCode()); + result = prime * result + ((altCameraToTarget == null) ? 0 : altCameraToTarget.hashCode()); + temp = Double.doubleToLongBits(poseAmbiguity); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + ((minAreaRectCorners == null) ? 0 : minAreaRectCorners.hashCode()); + result = prime * result + ((detectedCorners == null) ? 0 : detectedCorners.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + PhotonTrackedTarget other = (PhotonTrackedTarget) obj; + if (Double.doubleToLongBits(yaw) != Double.doubleToLongBits(other.yaw)) return false; + if (Double.doubleToLongBits(pitch) != Double.doubleToLongBits(other.pitch)) return false; + if (Double.doubleToLongBits(area) != Double.doubleToLongBits(other.area)) return false; + if (Double.doubleToLongBits(skew) != Double.doubleToLongBits(other.skew)) return false; + if (fiducialId != other.fiducialId) return false; + if (bestCameraToTarget == null) { + if (other.bestCameraToTarget != null) return false; + } else if (!bestCameraToTarget.equals(other.bestCameraToTarget)) return false; + if (altCameraToTarget == null) { + if (other.altCameraToTarget != null) return false; + } else if (!altCameraToTarget.equals(other.altCameraToTarget)) return false; + if (Double.doubleToLongBits(poseAmbiguity) != Double.doubleToLongBits(other.poseAmbiguity)) + return false; + if (minAreaRectCorners == null) { + if (other.minAreaRectCorners != null) return false; + } else if (!minAreaRectCorners.equals(other.minAreaRectCorners)) return false; + if (detectedCorners == null) { + if (other.detectedCorners != null) return false; + } else if (!detectedCorners.equals(other.detectedCorners)) return false; + return true; + } + + private static void encodeList(Packet packet, List list) { + packet.encode((byte) Math.min(list.size(), Byte.MAX_VALUE)); + for (int i = 0; i < list.size(); i++) { + packet.encode(list.get(i).x); + packet.encode(list.get(i).y); } - - public double getPitch() { - return pitch; - } - - public double getArea() { - return area; - } - - public double getSkew() { - return skew; - } - - /** Get the Fiducial ID, or -1 if not set. */ - public int getFiducialId() { - return fiducialId; + } + + private static List decodeList(Packet p) { + byte len = p.decodeByte(); + var ret = new ArrayList(); + for (int i = 0; i < len; i++) { + double cx = p.decodeDouble(); + double cy = p.decodeDouble(); + ret.add(new TargetCorner(cx, cy)); } - - /** - * Get the ratio of pose reprojection errors, called ambiguity. Numbers above 0.2 are likely to be - * ambiguous. -1 if invalid. - */ - public double getPoseAmbiguity() { - return poseAmbiguity; - } - - /** - * Return a list of the 4 corners in image space (origin top left, x right, y down), in no - * particular order, of the minimum area bounding rectangle of this target - */ - public List getMinAreaRectCorners() { - return minAreaRectCorners; - } - - /** - * Return a list of the n corners in image space (origin top left, x right, y down), in no - * particular order, detected for this target. - * - *

For fiducials, the order is known and is always counter-clock wise around the tag, like so: - * - *

​
-     * ⟶ +X  3 ----- 2
-     * |      |       |
-     * V      |       |
-     * +Y     0 ----- 1
-     * 
- */ - public List getDetectedCorners() { - return detectedCorners; - } - - /** - * Get the transform that maps camera space (X = forward, Y = left, Z = up) to object/fiducial tag - * space (X forward, Y left, Z up) with the lowest reprojection error - */ - public Transform3d getBestCameraToTarget() { - return bestCameraToTarget; + return ret; + } + + /** + * Populates the fields of this class with information from the incoming packet. + * + * @param packet The incoming packet. + * @return The incoming packet. + */ + public Packet createFromPacket(Packet packet) { + this.yaw = packet.decodeDouble(); + this.pitch = packet.decodeDouble(); + this.area = packet.decodeDouble(); + this.skew = packet.decodeDouble(); + this.fiducialId = packet.decodeInt(); + + this.bestCameraToTarget = PacketUtils.decodeTransform(packet); + this.altCameraToTarget = PacketUtils.decodeTransform(packet); + + this.poseAmbiguity = packet.decodeDouble(); + + this.minAreaRectCorners = new ArrayList<>(4); + for (int i = 0; i < 4; i++) { + double cx = packet.decodeDouble(); + double cy = packet.decodeDouble(); + minAreaRectCorners.add(new TargetCorner(cx, cy)); } - /** - * Get the transform that maps camera space (X = forward, Y = left, Z = up) to object/fiducial tag - * space (X forward, Y left, Z up) with the highest reprojection error - */ - public Transform3d getAlternateCameraToTarget() { - return altCameraToTarget; + detectedCorners = decodeList(packet); + + return packet; + } + + /** + * Populates the outgoing packet with information from the current target. + * + * @param packet The outgoing packet. + * @return The outgoing packet. + */ + public Packet populatePacket(Packet packet) { + packet.encode(yaw); + packet.encode(pitch); + packet.encode(area); + packet.encode(skew); + packet.encode(fiducialId); + PacketUtils.encodeTransform(packet, bestCameraToTarget); + PacketUtils.encodeTransform(packet, altCameraToTarget); + packet.encode(poseAmbiguity); + + for (int i = 0; i < 4; i++) { + packet.encode(minAreaRectCorners.get(i).x); + packet.encode(minAreaRectCorners.get(i).y); } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - long temp; - temp = Double.doubleToLongBits(yaw); - result = prime * result + (int) (temp ^ (temp >>> 32)); - temp = Double.doubleToLongBits(pitch); - result = prime * result + (int) (temp ^ (temp >>> 32)); - temp = Double.doubleToLongBits(area); - result = prime * result + (int) (temp ^ (temp >>> 32)); - temp = Double.doubleToLongBits(skew); - result = prime * result + (int) (temp ^ (temp >>> 32)); - result = prime * result + fiducialId; - result = prime * result + ((bestCameraToTarget == null) ? 0 : bestCameraToTarget.hashCode()); - result = prime * result + ((altCameraToTarget == null) ? 0 : altCameraToTarget.hashCode()); - temp = Double.doubleToLongBits(poseAmbiguity); - result = prime * result + (int) (temp ^ (temp >>> 32)); - result = prime * result + ((minAreaRectCorners == null) ? 0 : minAreaRectCorners.hashCode()); - result = prime * result + ((detectedCorners == null) ? 0 : detectedCorners.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - PhotonTrackedTarget other = (PhotonTrackedTarget) obj; - if (Double.doubleToLongBits(yaw) != Double.doubleToLongBits(other.yaw)) return false; - if (Double.doubleToLongBits(pitch) != Double.doubleToLongBits(other.pitch)) return false; - if (Double.doubleToLongBits(area) != Double.doubleToLongBits(other.area)) return false; - if (Double.doubleToLongBits(skew) != Double.doubleToLongBits(other.skew)) return false; - if (fiducialId != other.fiducialId) return false; - if (bestCameraToTarget == null) { - if (other.bestCameraToTarget != null) return false; - } else if (!bestCameraToTarget.equals(other.bestCameraToTarget)) return false; - if (altCameraToTarget == null) { - if (other.altCameraToTarget != null) return false; - } else if (!altCameraToTarget.equals(other.altCameraToTarget)) return false; - if (Double.doubleToLongBits(poseAmbiguity) != Double.doubleToLongBits(other.poseAmbiguity)) - return false; - if (minAreaRectCorners == null) { - if (other.minAreaRectCorners != null) return false; - } else if (!minAreaRectCorners.equals(other.minAreaRectCorners)) return false; - if (detectedCorners == null) { - if (other.detectedCorners != null) return false; - } else if (!detectedCorners.equals(other.detectedCorners)) return false; - return true; - } - - private static void encodeList(Packet packet, List list) { - packet.encode((byte) Math.min(list.size(), Byte.MAX_VALUE)); - for (int i = 0; i < list.size(); i++) { - packet.encode(list.get(i).x); - packet.encode(list.get(i).y); - } - } - - private static List decodeList(Packet p) { - byte len = p.decodeByte(); - var ret = new ArrayList(); - for (int i = 0; i < len; i++) { - double cx = p.decodeDouble(); - double cy = p.decodeDouble(); - ret.add(new TargetCorner(cx, cy)); - } - return ret; - } - - /** - * Populates the fields of this class with information from the incoming packet. - * - * @param packet The incoming packet. - * @return The incoming packet. - */ - public Packet createFromPacket(Packet packet) { - this.yaw = packet.decodeDouble(); - this.pitch = packet.decodeDouble(); - this.area = packet.decodeDouble(); - this.skew = packet.decodeDouble(); - this.fiducialId = packet.decodeInt(); - - this.bestCameraToTarget = PacketUtils.decodeTransform(packet); - this.altCameraToTarget = PacketUtils.decodeTransform(packet); - - this.poseAmbiguity = packet.decodeDouble(); - - this.minAreaRectCorners = new ArrayList<>(4); - for (int i = 0; i < 4; i++) { - double cx = packet.decodeDouble(); - double cy = packet.decodeDouble(); - minAreaRectCorners.add(new TargetCorner(cx, cy)); - } - - detectedCorners = decodeList(packet); - - return packet; - } - - /** - * Populates the outgoing packet with information from the current target. - * - * @param packet The outgoing packet. - * @return The outgoing packet. - */ - public Packet populatePacket(Packet packet) { - packet.encode(yaw); - packet.encode(pitch); - packet.encode(area); - packet.encode(skew); - packet.encode(fiducialId); - PacketUtils.encodeTransform(packet, bestCameraToTarget); - PacketUtils.encodeTransform(packet, altCameraToTarget); - packet.encode(poseAmbiguity); - - for (int i = 0; i < 4; i++) { - packet.encode(minAreaRectCorners.get(i).x); - packet.encode(minAreaRectCorners.get(i).y); - } - - encodeList(packet, detectedCorners); - - return packet; - } - - @Override - public String toString() { - return "PhotonTrackedTarget{" - + "yaw=" - + yaw - + ", pitch=" - + pitch - + ", area=" - + area - + ", skew=" - + skew - + ", fiducialId=" - + fiducialId - + ", cameraToTarget=" - + bestCameraToTarget - + ", targetCorners=" - + minAreaRectCorners - + '}'; - } + encodeList(packet, detectedCorners); + + return packet; + } + + @Override + public String toString() { + return "PhotonTrackedTarget{" + + "yaw=" + + yaw + + ", pitch=" + + pitch + + ", area=" + + area + + ", skew=" + + skew + + ", fiducialId=" + + fiducialId + + ", cameraToTarget=" + + bestCameraToTarget + + ", targetCorners=" + + minAreaRectCorners + + '}'; + } } diff --git a/photon-targeting/src/main/java/org/photonvision/targeting/TargetCorner.java b/photon-targeting/src/main/java/org/photonvision/targeting/TargetCorner.java index 9808079a5d..83ff3a7631 100644 --- a/photon-targeting/src/main/java/org/photonvision/targeting/TargetCorner.java +++ b/photon-targeting/src/main/java/org/photonvision/targeting/TargetCorner.java @@ -24,29 +24,29 @@ * Origin at the top left, plus-x to the right, plus-y down. */ public class TargetCorner { - public final double x; - public final double y; + public final double x; + public final double y; - public TargetCorner(double cx, double cy) { - this.x = cx; - this.y = cy; - } + public TargetCorner(double cx, double cy) { + this.x = cx; + this.y = cy; + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TargetCorner that = (TargetCorner) o; - return Double.compare(that.x, x) == 0 && Double.compare(that.y, y) == 0; - } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TargetCorner that = (TargetCorner) o; + return Double.compare(that.x, x) == 0 && Double.compare(that.y, y) == 0; + } - @Override - public int hashCode() { - return Objects.hash(x, y); - } + @Override + public int hashCode() { + return Objects.hash(x, y); + } - @Override - public String toString() { - return "(" + x + "," + y + ')'; - } + @Override + public String toString() { + return "(" + x + "," + y + ')'; + } } diff --git a/photon-targeting/src/main/java/org/photonvision/utils/PacketUtils.java b/photon-targeting/src/main/java/org/photonvision/utils/PacketUtils.java index 43a0dd4cf6..34d0aeb562 100644 --- a/photon-targeting/src/main/java/org/photonvision/utils/PacketUtils.java +++ b/photon-targeting/src/main/java/org/photonvision/utils/PacketUtils.java @@ -24,26 +24,26 @@ import org.photonvision.common.dataflow.structures.Packet; public class PacketUtils { - public static Transform3d decodeTransform(Packet packet) { - double x = packet.decodeDouble(); - double y = packet.decodeDouble(); - double z = packet.decodeDouble(); - var translation = new Translation3d(x, y, z); - double w = packet.decodeDouble(); - x = packet.decodeDouble(); - y = packet.decodeDouble(); - z = packet.decodeDouble(); - var rotation = new Rotation3d(new Quaternion(w, x, y, z)); - return new Transform3d(translation, rotation); - } + public static Transform3d decodeTransform(Packet packet) { + double x = packet.decodeDouble(); + double y = packet.decodeDouble(); + double z = packet.decodeDouble(); + var translation = new Translation3d(x, y, z); + double w = packet.decodeDouble(); + x = packet.decodeDouble(); + y = packet.decodeDouble(); + z = packet.decodeDouble(); + var rotation = new Rotation3d(new Quaternion(w, x, y, z)); + return new Transform3d(translation, rotation); + } - public static void encodeTransform(Packet packet, Transform3d transform) { - packet.encode(transform.getTranslation().getX()); - packet.encode(transform.getTranslation().getY()); - packet.encode(transform.getTranslation().getZ()); - packet.encode(transform.getRotation().getQuaternion().getW()); - packet.encode(transform.getRotation().getQuaternion().getX()); - packet.encode(transform.getRotation().getQuaternion().getY()); - packet.encode(transform.getRotation().getQuaternion().getZ()); - } + public static void encodeTransform(Packet packet, Transform3d transform) { + packet.encode(transform.getTranslation().getX()); + packet.encode(transform.getTranslation().getY()); + packet.encode(transform.getTranslation().getZ()); + packet.encode(transform.getRotation().getQuaternion().getW()); + packet.encode(transform.getRotation().getQuaternion().getX()); + packet.encode(transform.getRotation().getQuaternion().getY()); + packet.encode(transform.getRotation().getQuaternion().getZ()); + } } diff --git a/photonlib-cpp-examples/examples.gradle b/photonlib-cpp-examples/examples.gradle index 976440f33f..1207d05f1d 100644 --- a/photonlib-cpp-examples/examples.gradle +++ b/photonlib-cpp-examples/examples.gradle @@ -4,11 +4,11 @@ def EXCLUDED_DIRS = ["bin", "build"] // List all non-hidden directories not in EXCUDED_DIRS ext.exampleFolderNames = file("${rootDir}") - .listFiles() - .findAll { - return (it.isDirectory() - && !it.isHidden() - && !(it.name in EXCLUDED_DIRS) && !it.name.startsWith(".") - && it.toPath().resolve("build.gradle").toFile().exists()) - } - .collect { it.name } + .listFiles() + .findAll { + return (it.isDirectory() + && !it.isHidden() + && !(it.name in EXCLUDED_DIRS) && !it.name.startsWith(".") + && it.toPath().resolve("build.gradle").toFile().exists()) + } + .collect { it.name } diff --git a/photonlib-java-examples/aimandrange/build.gradle b/photonlib-java-examples/aimandrange/build.gradle index 7dda3b59b0..f3750f10b2 100644 --- a/photonlib-java-examples/aimandrange/build.gradle +++ b/photonlib-java-examples/aimandrange/build.gradle @@ -78,7 +78,11 @@ wpi.sim.addDriverstation() // in order to make them all available at runtime. Also adding the manifest so WPILib // knows where to look for our Robot Class. jar { - from { configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) } } + from { + configurations.runtimeClasspath.collect { + it.isDirectory() ? it : zipTree(it) + } + } manifest edu.wpi.first.gradlerio.GradleRIOPlugin.javaManifest(ROBOT_MAIN_CLASS) duplicatesStrategy = DuplicatesStrategy.INCLUDE } diff --git a/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Main.java b/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Main.java index 077f979f97..a92703e411 100644 --- a/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Main.java +++ b/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Main.java @@ -32,14 +32,14 @@ * call. */ public final class Main { - private Main() {} + private Main() {} - /** - * Main initialization function. Do not perform any initialization here. - * - *

If you change your main robot class, change the parameter type. - */ - public static void main(String... args) { - RobotBase.startRobot(Robot::new); - } + /** + * Main initialization function. Do not perform any initialization here. + * + *

If you change your main robot class, change the parameter type. + */ + public static void main(String... args) { + RobotBase.startRobot(Robot::new); + } } diff --git a/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Robot.java index d7a0c2a23c..e8fb3ddf02 100644 --- a/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Robot.java @@ -40,72 +40,72 @@ * project. */ public class Robot extends TimedRobot { - // Constants such as camera and target height stored. Change per robot and goal! - final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); - final double TARGET_HEIGHT_METERS = Units.feetToMeters(5); - // Angle between horizontal and the camera. - final double CAMERA_PITCH_RADIANS = Units.degreesToRadians(0); + // Constants such as camera and target height stored. Change per robot and goal! + final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); + final double TARGET_HEIGHT_METERS = Units.feetToMeters(5); + // Angle between horizontal and the camera. + final double CAMERA_PITCH_RADIANS = Units.degreesToRadians(0); - // How far from the target we want to be - final double GOAL_RANGE_METERS = Units.feetToMeters(3); + // How far from the target we want to be + final double GOAL_RANGE_METERS = Units.feetToMeters(3); - // Change this to match the name of your camera - PhotonCamera camera = new PhotonCamera("photonvision"); + // Change this to match the name of your camera + PhotonCamera camera = new PhotonCamera("photonvision"); - // PID constants should be tuned per robot - final double LINEAR_P = 0.1; - final double LINEAR_D = 0.0; - PIDController forwardController = new PIDController(LINEAR_P, 0, LINEAR_D); + // PID constants should be tuned per robot + final double LINEAR_P = 0.1; + final double LINEAR_D = 0.0; + PIDController forwardController = new PIDController(LINEAR_P, 0, LINEAR_D); - final double ANGULAR_P = 0.1; - final double ANGULAR_D = 0.0; - PIDController turnController = new PIDController(ANGULAR_P, 0, ANGULAR_D); + final double ANGULAR_P = 0.1; + final double ANGULAR_D = 0.0; + PIDController turnController = new PIDController(ANGULAR_P, 0, ANGULAR_D); - XboxController xboxController = new XboxController(0); + XboxController xboxController = new XboxController(0); - // Drive motors - PWMVictorSPX leftMotor = new PWMVictorSPX(0); - PWMVictorSPX rightMotor = new PWMVictorSPX(1); - DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); + // Drive motors + PWMVictorSPX leftMotor = new PWMVictorSPX(0); + PWMVictorSPX rightMotor = new PWMVictorSPX(1); + DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); - @Override - public void teleopPeriodic() { - double forwardSpeed; - double rotationSpeed; + @Override + public void teleopPeriodic() { + double forwardSpeed; + double rotationSpeed; - if (xboxController.getAButton()) { - // Vision-alignment mode - // Query the latest result from PhotonVision - var result = camera.getLatestResult(); + if (xboxController.getAButton()) { + // Vision-alignment mode + // Query the latest result from PhotonVision + var result = camera.getLatestResult(); - if (result.hasTargets()) { - // First calculate range - double range = - PhotonUtils.calculateDistanceToTargetMeters( - CAMERA_HEIGHT_METERS, - TARGET_HEIGHT_METERS, - CAMERA_PITCH_RADIANS, - Units.degreesToRadians(result.getBestTarget().getPitch())); + if (result.hasTargets()) { + // First calculate range + double range = + PhotonUtils.calculateDistanceToTargetMeters( + CAMERA_HEIGHT_METERS, + TARGET_HEIGHT_METERS, + CAMERA_PITCH_RADIANS, + Units.degreesToRadians(result.getBestTarget().getPitch())); - // Use this range as the measurement we give to the PID controller. - // -1.0 required to ensure positive PID controller effort _increases_ range - forwardSpeed = -forwardController.calculate(range, GOAL_RANGE_METERS); + // Use this range as the measurement we give to the PID controller. + // -1.0 required to ensure positive PID controller effort _increases_ range + forwardSpeed = -forwardController.calculate(range, GOAL_RANGE_METERS); - // Also calculate angular power - // -1.0 required to ensure positive PID controller effort _increases_ yaw - rotationSpeed = -turnController.calculate(result.getBestTarget().getYaw(), 0); - } else { - // If we have no targets, stay still. - forwardSpeed = 0; - rotationSpeed = 0; - } - } else { - // Manual Driver Mode - forwardSpeed = -xboxController.getRightY(); - rotationSpeed = xboxController.getLeftX(); - } - - // Use our forward/turn speeds to control the drivetrain - drive.arcadeDrive(forwardSpeed, rotationSpeed); + // Also calculate angular power + // -1.0 required to ensure positive PID controller effort _increases_ yaw + rotationSpeed = -turnController.calculate(result.getBestTarget().getYaw(), 0); + } else { + // If we have no targets, stay still. + forwardSpeed = 0; + rotationSpeed = 0; + } + } else { + // Manual Driver Mode + forwardSpeed = -xboxController.getRightY(); + rotationSpeed = xboxController.getLeftX(); } + + // Use our forward/turn speeds to control the drivetrain + drive.arcadeDrive(forwardSpeed, rotationSpeed); + } } diff --git a/photonlib-java-examples/aimattarget/build.gradle b/photonlib-java-examples/aimattarget/build.gradle index 1442f184c9..d0f6ecfecc 100644 --- a/photonlib-java-examples/aimattarget/build.gradle +++ b/photonlib-java-examples/aimattarget/build.gradle @@ -78,7 +78,11 @@ wpi.sim.addDriverstation() // in order to make them all available at runtime. Also adding the manifest so WPILib // knows where to look for our Robot Class. jar { - from { configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) } } + from { + configurations.runtimeClasspath.collect { + it.isDirectory() ? it : zipTree(it) + } + } manifest edu.wpi.first.gradlerio.GradleRIOPlugin.javaManifest(ROBOT_MAIN_CLASS) duplicatesStrategy = DuplicatesStrategy.INCLUDE } diff --git a/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Main.java b/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Main.java index 077f979f97..a92703e411 100644 --- a/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Main.java +++ b/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Main.java @@ -32,14 +32,14 @@ * call. */ public final class Main { - private Main() {} + private Main() {} - /** - * Main initialization function. Do not perform any initialization here. - * - *

If you change your main robot class, change the parameter type. - */ - public static void main(String... args) { - RobotBase.startRobot(Robot::new); - } + /** + * Main initialization function. Do not perform any initialization here. + * + *

If you change your main robot class, change the parameter type. + */ + public static void main(String... args) { + RobotBase.startRobot(Robot::new); + } } diff --git a/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Robot.java index 5d41404fdc..0868e18c95 100644 --- a/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Robot.java @@ -39,60 +39,60 @@ * project. */ public class Robot extends TimedRobot { - // Constants such as camera and target height stored. Change per robot and goal! - final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); - final double TARGET_HEIGHT_METERS = Units.feetToMeters(5); - // Angle between horizontal and the camera. - final double CAMERA_PITCH_RADIANS = Units.degreesToRadians(0); + // Constants such as camera and target height stored. Change per robot and goal! + final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); + final double TARGET_HEIGHT_METERS = Units.feetToMeters(5); + // Angle between horizontal and the camera. + final double CAMERA_PITCH_RADIANS = Units.degreesToRadians(0); - // How far from the target we want to be - final double GOAL_RANGE_METERS = Units.feetToMeters(3); + // How far from the target we want to be + final double GOAL_RANGE_METERS = Units.feetToMeters(3); - // Change this to match the name of your camera - PhotonCamera camera = new PhotonCamera("photonvision"); + // Change this to match the name of your camera + PhotonCamera camera = new PhotonCamera("photonvision"); - // PID constants should be tuned per robot - final double LINEAR_P = 0.1; - final double LINEAR_D = 0.0; - PIDController forwardController = new PIDController(LINEAR_P, 0, LINEAR_D); + // PID constants should be tuned per robot + final double LINEAR_P = 0.1; + final double LINEAR_D = 0.0; + PIDController forwardController = new PIDController(LINEAR_P, 0, LINEAR_D); - final double ANGULAR_P = 0.1; - final double ANGULAR_D = 0.0; - PIDController turnController = new PIDController(ANGULAR_P, 0, ANGULAR_D); + final double ANGULAR_P = 0.1; + final double ANGULAR_D = 0.0; + PIDController turnController = new PIDController(ANGULAR_P, 0, ANGULAR_D); - XboxController xboxController = new XboxController(0); + XboxController xboxController = new XboxController(0); - // Drive motors - PWMVictorSPX leftMotor = new PWMVictorSPX(0); - PWMVictorSPX rightMotor = new PWMVictorSPX(1); - DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); + // Drive motors + PWMVictorSPX leftMotor = new PWMVictorSPX(0); + PWMVictorSPX rightMotor = new PWMVictorSPX(1); + DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); - @Override - public void teleopPeriodic() { - double forwardSpeed; - double rotationSpeed; + @Override + public void teleopPeriodic() { + double forwardSpeed; + double rotationSpeed; - forwardSpeed = -xboxController.getRightY(); + forwardSpeed = -xboxController.getRightY(); - if (xboxController.getAButton()) { - // Vision-alignment mode - // Query the latest result from PhotonVision - var result = camera.getLatestResult(); + if (xboxController.getAButton()) { + // Vision-alignment mode + // Query the latest result from PhotonVision + var result = camera.getLatestResult(); - if (result.hasTargets()) { - // Calculate angular turn power - // -1.0 required to ensure positive PID controller effort _increases_ yaw - rotationSpeed = -turnController.calculate(result.getBestTarget().getYaw(), 0); - } else { - // If we have no targets, stay still. - rotationSpeed = 0; - } - } else { - // Manual Driver Mode - rotationSpeed = xboxController.getLeftX(); - } - - // Use our forward/turn speeds to control the drivetrain - drive.arcadeDrive(forwardSpeed, rotationSpeed); + if (result.hasTargets()) { + // Calculate angular turn power + // -1.0 required to ensure positive PID controller effort _increases_ yaw + rotationSpeed = -turnController.calculate(result.getBestTarget().getYaw(), 0); + } else { + // If we have no targets, stay still. + rotationSpeed = 0; + } + } else { + // Manual Driver Mode + rotationSpeed = xboxController.getLeftX(); } + + // Use our forward/turn speeds to control the drivetrain + drive.arcadeDrive(forwardSpeed, rotationSpeed); + } } diff --git a/photonlib-java-examples/examples.gradle b/photonlib-java-examples/examples.gradle index 976440f33f..1207d05f1d 100644 --- a/photonlib-java-examples/examples.gradle +++ b/photonlib-java-examples/examples.gradle @@ -4,11 +4,11 @@ def EXCLUDED_DIRS = ["bin", "build"] // List all non-hidden directories not in EXCUDED_DIRS ext.exampleFolderNames = file("${rootDir}") - .listFiles() - .findAll { - return (it.isDirectory() - && !it.isHidden() - && !(it.name in EXCLUDED_DIRS) && !it.name.startsWith(".") - && it.toPath().resolve("build.gradle").toFile().exists()) - } - .collect { it.name } + .listFiles() + .findAll { + return (it.isDirectory() + && !it.isHidden() + && !(it.name in EXCLUDED_DIRS) && !it.name.startsWith(".") + && it.toPath().resolve("build.gradle").toFile().exists()) + } + .collect { it.name } diff --git a/photonlib-java-examples/getinrange/build.gradle b/photonlib-java-examples/getinrange/build.gradle index 7dda3b59b0..f3750f10b2 100644 --- a/photonlib-java-examples/getinrange/build.gradle +++ b/photonlib-java-examples/getinrange/build.gradle @@ -78,7 +78,11 @@ wpi.sim.addDriverstation() // in order to make them all available at runtime. Also adding the manifest so WPILib // knows where to look for our Robot Class. jar { - from { configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) } } + from { + configurations.runtimeClasspath.collect { + it.isDirectory() ? it : zipTree(it) + } + } manifest edu.wpi.first.gradlerio.GradleRIOPlugin.javaManifest(ROBOT_MAIN_CLASS) duplicatesStrategy = DuplicatesStrategy.INCLUDE } diff --git a/photonlib-java-examples/getinrange/src/main/java/frc/robot/Main.java b/photonlib-java-examples/getinrange/src/main/java/frc/robot/Main.java index 077f979f97..a92703e411 100644 --- a/photonlib-java-examples/getinrange/src/main/java/frc/robot/Main.java +++ b/photonlib-java-examples/getinrange/src/main/java/frc/robot/Main.java @@ -32,14 +32,14 @@ * call. */ public final class Main { - private Main() {} + private Main() {} - /** - * Main initialization function. Do not perform any initialization here. - * - *

If you change your main robot class, change the parameter type. - */ - public static void main(String... args) { - RobotBase.startRobot(Robot::new); - } + /** + * Main initialization function. Do not perform any initialization here. + * + *

If you change your main robot class, change the parameter type. + */ + public static void main(String... args) { + RobotBase.startRobot(Robot::new); + } } diff --git a/photonlib-java-examples/getinrange/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/getinrange/src/main/java/frc/robot/Robot.java index c776021844..e59c49fcc4 100644 --- a/photonlib-java-examples/getinrange/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/getinrange/src/main/java/frc/robot/Robot.java @@ -40,68 +40,68 @@ * project. */ public class Robot extends TimedRobot { - // Constants such as camera and target height stored. Change per robot and goal! - final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); - final double TARGET_HEIGHT_METERS = Units.feetToMeters(5); + // Constants such as camera and target height stored. Change per robot and goal! + final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); + final double TARGET_HEIGHT_METERS = Units.feetToMeters(5); - // Angle between horizontal and the camera. - final double CAMERA_PITCH_RADIANS = Units.degreesToRadians(0); + // Angle between horizontal and the camera. + final double CAMERA_PITCH_RADIANS = Units.degreesToRadians(0); - // How far from the target we want to be - final double GOAL_RANGE_METERS = Units.feetToMeters(3); + // How far from the target we want to be + final double GOAL_RANGE_METERS = Units.feetToMeters(3); - // Change this to match the name of your camera - PhotonCamera camera = new PhotonCamera("photonvision"); + // Change this to match the name of your camera + PhotonCamera camera = new PhotonCamera("photonvision"); - // PID constants should be tuned per robot - final double P_GAIN = 0.1; - final double D_GAIN = 0.0; - PIDController controller = new PIDController(P_GAIN, 0, D_GAIN); + // PID constants should be tuned per robot + final double P_GAIN = 0.1; + final double D_GAIN = 0.0; + PIDController controller = new PIDController(P_GAIN, 0, D_GAIN); - XboxController xboxController; + XboxController xboxController; - // Drive motors - PWMVictorSPX leftMotor = new PWMVictorSPX(0); - PWMVictorSPX rightMotor = new PWMVictorSPX(1); - DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); + // Drive motors + PWMVictorSPX leftMotor = new PWMVictorSPX(0); + PWMVictorSPX rightMotor = new PWMVictorSPX(1); + DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); - @Override - public void robotInit() { - xboxController = new XboxController(0); - } - - @Override - public void teleopPeriodic() { - double forwardSpeed; - double rotationSpeed = xboxController.getLeftX(); + @Override + public void robotInit() { + xboxController = new XboxController(0); + } - if (xboxController.getAButton()) { - // Vision-alignment mode - // Query the latest result from PhotonVision - var result = camera.getLatestResult(); + @Override + public void teleopPeriodic() { + double forwardSpeed; + double rotationSpeed = xboxController.getLeftX(); - if (result.hasTargets()) { - // First calculate range - double range = - PhotonUtils.calculateDistanceToTargetMeters( - CAMERA_HEIGHT_METERS, - TARGET_HEIGHT_METERS, - CAMERA_PITCH_RADIANS, - Units.degreesToRadians(result.getBestTarget().getPitch())); + if (xboxController.getAButton()) { + // Vision-alignment mode + // Query the latest result from PhotonVision + var result = camera.getLatestResult(); - // Use this range as the measurement we give to the PID controller. - // -1.0 required to ensure positive PID controller effort _increases_ range - forwardSpeed = -controller.calculate(range, GOAL_RANGE_METERS); - } else { - // If we have no targets, stay still. - forwardSpeed = 0; - } - } else { - // Manual Driver Mode - forwardSpeed = -xboxController.getRightY(); - } + if (result.hasTargets()) { + // First calculate range + double range = + PhotonUtils.calculateDistanceToTargetMeters( + CAMERA_HEIGHT_METERS, + TARGET_HEIGHT_METERS, + CAMERA_PITCH_RADIANS, + Units.degreesToRadians(result.getBestTarget().getPitch())); - // Use our forward/turn speeds to control the drivetrain - drive.arcadeDrive(forwardSpeed, rotationSpeed); + // Use this range as the measurement we give to the PID controller. + // -1.0 required to ensure positive PID controller effort _increases_ range + forwardSpeed = -controller.calculate(range, GOAL_RANGE_METERS); + } else { + // If we have no targets, stay still. + forwardSpeed = 0; + } + } else { + // Manual Driver Mode + forwardSpeed = -xboxController.getRightY(); } + + // Use our forward/turn speeds to control the drivetrain + drive.arcadeDrive(forwardSpeed, rotationSpeed); + } } diff --git a/photonlib-java-examples/simaimandrange/build.gradle b/photonlib-java-examples/simaimandrange/build.gradle index 7dda3b59b0..f3750f10b2 100644 --- a/photonlib-java-examples/simaimandrange/build.gradle +++ b/photonlib-java-examples/simaimandrange/build.gradle @@ -78,7 +78,11 @@ wpi.sim.addDriverstation() // in order to make them all available at runtime. Also adding the manifest so WPILib // knows where to look for our Robot Class. jar { - from { configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) } } + from { + configurations.runtimeClasspath.collect { + it.isDirectory() ? it : zipTree(it) + } + } manifest edu.wpi.first.gradlerio.GradleRIOPlugin.javaManifest(ROBOT_MAIN_CLASS) duplicatesStrategy = DuplicatesStrategy.INCLUDE } diff --git a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Constants.java b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Constants.java index 8e0f41cd57..d26a898b57 100644 --- a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Constants.java +++ b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Constants.java @@ -31,61 +31,61 @@ import edu.wpi.first.math.util.Units; public class Constants { - // ---------- Vision - // Constants about how your camera is mounted to the robot - public static final double CAMERA_PITCH_RADIANS = - Units.degreesToRadians(15); // Angle "up" from horizontal - public static final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); // Height above floor + // ---------- Vision + // Constants about how your camera is mounted to the robot + public static final double CAMERA_PITCH_RADIANS = + Units.degreesToRadians(15); // Angle "up" from horizontal + public static final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); // Height above floor - // How far from the target we want to be - public static final double GOAL_RANGE_METERS = Units.feetToMeters(10); + // How far from the target we want to be + public static final double GOAL_RANGE_METERS = Units.feetToMeters(10); - // Where the 2020 High goal target is located on the field - // See - // https://docs.wpilib.org/en/stable/docs/software/advanced-controls/geometry/coordinate-systems.html#field-coordinate-system - // and https://firstfrc.blob.core.windows.net/frc2020/PlayingField/LayoutandMarkingDiagram.pdf - // (pages 4 and 5) - public static final Pose3d TARGET_POSE = - new Pose3d( - new Translation3d( - Units.feetToMeters(52.46), - Units.inchesToMeters(94.66), - Units.inchesToMeters(89.69)), // (center of vision target) - new Rotation3d(0.0, 0.0, Math.PI)); - // ---------- + // Where the 2020 High goal target is located on the field + // See + // https://docs.wpilib.org/en/stable/docs/software/advanced-controls/geometry/coordinate-systems.html#field-coordinate-system + // and https://firstfrc.blob.core.windows.net/frc2020/PlayingField/LayoutandMarkingDiagram.pdf + // (pages 4 and 5) + public static final Pose3d TARGET_POSE = + new Pose3d( + new Translation3d( + Units.feetToMeters(52.46), + Units.inchesToMeters(94.66), + Units.inchesToMeters(89.69)), // (center of vision target) + new Rotation3d(0.0, 0.0, Math.PI)); + // ---------- - // ---------- Drivetrain - public static final int LEFT_MOTOR_CHANNEL = 0; - public static final int RIGHT_MOTOR_CHANNEL = 1; + // ---------- Drivetrain + public static final int LEFT_MOTOR_CHANNEL = 0; + public static final int RIGHT_MOTOR_CHANNEL = 1; - // PID constants should be tuned per robot - public static final double LINEAR_P = 0.5; - public static final double LINEAR_I = 0; - public static final double LINEAR_D = 0.1; + // PID constants should be tuned per robot + public static final double LINEAR_P = 0.5; + public static final double LINEAR_I = 0; + public static final double LINEAR_D = 0.1; - public static final double ANGULAR_P = 0.03; - public static final double ANGULAR_I = 0; - public static final double ANGULAR_D = 0.003; + public static final double ANGULAR_P = 0.03; + public static final double ANGULAR_I = 0; + public static final double ANGULAR_D = 0.003; - // Ratio to multiply joystick inputs by - public static final double DRIVESPEED = 0.75; + // Ratio to multiply joystick inputs by + public static final double DRIVESPEED = 0.75; - // The following properties are necessary for simulation: + // The following properties are necessary for simulation: - // Distance from drivetrain left wheels to right wheels - public static final double TRACKWIDTH_METERS = Units.feetToMeters(2.0); - public static final double WHEEL_DIAMETER_METERS = Units.inchesToMeters(6.0); + // Distance from drivetrain left wheels to right wheels + public static final double TRACKWIDTH_METERS = Units.feetToMeters(2.0); + public static final double WHEEL_DIAMETER_METERS = Units.inchesToMeters(6.0); - // The motors used in the gearbox for one drivetrain side - public static final DCMotor DRIVE_MOTORS = DCMotor.getCIM(2); - // The gearbox reduction, or how many motor rotations per wheel rotation - public static final double GEARING = 8.0; + // The motors used in the gearbox for one drivetrain side + public static final DCMotor DRIVE_MOTORS = DCMotor.getCIM(2); + // The gearbox reduction, or how many motor rotations per wheel rotation + public static final double GEARING = 8.0; - // The drivetrain feedforward values - public static final double LINEAR_KV = 2.0; - public static final double LINEAR_KA = 0.5; + // The drivetrain feedforward values + public static final double LINEAR_KV = 2.0; + public static final double LINEAR_KA = 0.5; - public static final double ANGULAR_KV = 2.25; - public static final double ANGULAR_KA = 0.3; - // ---------- + public static final double ANGULAR_KV = 2.25; + public static final double ANGULAR_KA = 0.3; + // ---------- } diff --git a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Main.java b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Main.java index 077f979f97..a92703e411 100644 --- a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Main.java +++ b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Main.java @@ -32,14 +32,14 @@ * call. */ public final class Main { - private Main() {} + private Main() {} - /** - * Main initialization function. Do not perform any initialization here. - * - *

If you change your main robot class, change the parameter type. - */ - public static void main(String... args) { - RobotBase.startRobot(Robot::new); - } + /** + * Main initialization function. Do not perform any initialization here. + * + *

If you change your main robot class, change the parameter type. + */ + public static void main(String... args) { + RobotBase.startRobot(Robot::new); + } } diff --git a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Robot.java index 4f22aa287c..702f2b91c3 100644 --- a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Robot.java @@ -44,82 +44,82 @@ * project. */ public class Robot extends TimedRobot { - // Change this to match the name of your camera - PhotonCamera camera = new PhotonCamera("photonvision"); - - // PID constants should be tuned per robot - PIDController forwardController = new PIDController(LINEAR_P, 0, LINEAR_D); - PIDController turnController = new PIDController(ANGULAR_P, 0, ANGULAR_D); - - XboxController xboxController = new XboxController(0); - - // Drive motors - PWMVictorSPX leftMotor = new PWMVictorSPX(LEFT_MOTOR_CHANNEL); - PWMVictorSPX rightMotor = new PWMVictorSPX(RIGHT_MOTOR_CHANNEL); - DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); - - @Override - public void robotInit() { - leftMotor.setInverted(false); - rightMotor.setInverted(true); + // Change this to match the name of your camera + PhotonCamera camera = new PhotonCamera("photonvision"); + + // PID constants should be tuned per robot + PIDController forwardController = new PIDController(LINEAR_P, 0, LINEAR_D); + PIDController turnController = new PIDController(ANGULAR_P, 0, ANGULAR_D); + + XboxController xboxController = new XboxController(0); + + // Drive motors + PWMVictorSPX leftMotor = new PWMVictorSPX(LEFT_MOTOR_CHANNEL); + PWMVictorSPX rightMotor = new PWMVictorSPX(RIGHT_MOTOR_CHANNEL); + DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); + + @Override + public void robotInit() { + leftMotor.setInverted(false); + rightMotor.setInverted(true); + } + + @Override + public void teleopPeriodic() { + double forwardSpeed; + double rotationSpeed; + + if (xboxController.getAButton()) { + // Vision-alignment mode + // Query the latest result from PhotonVision + var result = camera.getLatestResult(); + + if (result.hasTargets()) { + // First calculate range + double range = + PhotonUtils.calculateDistanceToTargetMeters( + CAMERA_HEIGHT_METERS, + TARGET_POSE.getZ(), + CAMERA_PITCH_RADIANS, + Units.degreesToRadians(result.getBestTarget().getPitch())); + + // Use this range as the measurement we give to the PID controller. + // (This forwardSpeed must be positive to go forward.) + forwardSpeed = -forwardController.calculate(range, GOAL_RANGE_METERS); + + // Also calculate angular power + // (This rotationSpeed must be positive to turn counter-clockwise.) + rotationSpeed = -turnController.calculate(result.getBestTarget().getYaw(), 0); + } else { + // If we have no targets, stay still. + forwardSpeed = 0; + rotationSpeed = 0; + } + } else { + // Manual Driver Mode + forwardSpeed = -xboxController.getLeftY() * DRIVESPEED; + rotationSpeed = -xboxController.getRightX() * DRIVESPEED; } - @Override - public void teleopPeriodic() { - double forwardSpeed; - double rotationSpeed; - - if (xboxController.getAButton()) { - // Vision-alignment mode - // Query the latest result from PhotonVision - var result = camera.getLatestResult(); - - if (result.hasTargets()) { - // First calculate range - double range = - PhotonUtils.calculateDistanceToTargetMeters( - CAMERA_HEIGHT_METERS, - TARGET_POSE.getZ(), - CAMERA_PITCH_RADIANS, - Units.degreesToRadians(result.getBestTarget().getPitch())); - - // Use this range as the measurement we give to the PID controller. - // (This forwardSpeed must be positive to go forward.) - forwardSpeed = -forwardController.calculate(range, GOAL_RANGE_METERS); - - // Also calculate angular power - // (This rotationSpeed must be positive to turn counter-clockwise.) - rotationSpeed = -turnController.calculate(result.getBestTarget().getYaw(), 0); - } else { - // If we have no targets, stay still. - forwardSpeed = 0; - rotationSpeed = 0; - } - } else { - // Manual Driver Mode - forwardSpeed = -xboxController.getLeftY() * DRIVESPEED; - rotationSpeed = -xboxController.getRightX() * DRIVESPEED; - } - - // Use our forward/turn speeds to control the drivetrain - drive.arcadeDrive(forwardSpeed, rotationSpeed); - } + // Use our forward/turn speeds to control the drivetrain + drive.arcadeDrive(forwardSpeed, rotationSpeed); + } - //////////////////////////////////////////////////// - // Simulation support + //////////////////////////////////////////////////// + // Simulation support - DrivetrainSim dtSim; - VisionSim visionSim; + DrivetrainSim dtSim; + VisionSim visionSim; - @Override - public void simulationInit() { - dtSim = new DrivetrainSim(leftMotor, rightMotor); - visionSim = new VisionSim("main", camera); - } + @Override + public void simulationInit() { + dtSim = new DrivetrainSim(leftMotor, rightMotor); + visionSim = new VisionSim("main", camera); + } - @Override - public void simulationPeriodic() { - dtSim.update(); - visionSim.update(dtSim.getPose()); - } + @Override + public void simulationPeriodic() { + dtSim.update(); + visionSim.update(dtSim.getPose()); + } } diff --git a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java index 2a9c532e84..57d1382d6d 100644 --- a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java +++ b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java @@ -44,62 +44,62 @@ * real motors/sensors/physics are used instead. */ public class DrivetrainSim { - // ----- Simulation specific constants - // Drivetrain plant and simulation. This will calculate how the robot pose changes over time as - // motor voltages are applied. - private static final LinearSystem drivetrainSystem = - LinearSystemId.identifyDrivetrainSystem( - LINEAR_KV, LINEAR_KA, ANGULAR_KV, ANGULAR_KA, TRACKWIDTH_METERS); - private static final DifferentialDrivetrainSim drivetrainSimulator = - new DifferentialDrivetrainSim( - drivetrainSystem, - DRIVE_MOTORS, - GEARING, - TRACKWIDTH_METERS, - WHEEL_DIAMETER_METERS / 2.0, - null); - // ----- + // ----- Simulation specific constants + // Drivetrain plant and simulation. This will calculate how the robot pose changes over time as + // motor voltages are applied. + private static final LinearSystem drivetrainSystem = + LinearSystemId.identifyDrivetrainSystem( + LINEAR_KV, LINEAR_KA, ANGULAR_KV, ANGULAR_KA, TRACKWIDTH_METERS); + private static final DifferentialDrivetrainSim drivetrainSimulator = + new DifferentialDrivetrainSim( + drivetrainSystem, + DRIVE_MOTORS, + GEARING, + TRACKWIDTH_METERS, + WHEEL_DIAMETER_METERS / 2.0, + null); + // ----- - // PWM handles for getting commanded motor controller speeds - private final PWMSim leftLeader; - private final PWMSim rightLeader; + // PWM handles for getting commanded motor controller speeds + private final PWMSim leftLeader; + private final PWMSim rightLeader; - public DrivetrainSim(PWMMotorController leftController, PWMMotorController rightController) { - leftLeader = new PWMSim(leftController); - rightLeader = new PWMSim(rightController); - } + public DrivetrainSim(PWMMotorController leftController, PWMMotorController rightController) { + leftLeader = new PWMSim(leftController); + rightLeader = new PWMSim(rightController); + } - /** - * Perform all periodic drivetrain simulation related tasks to advance our simulation of robot - * physics forward by a single 20ms step. - */ - public void update() { - double leftMotorCmd = 0; - double rightMotorCmd = 0; + /** + * Perform all periodic drivetrain simulation related tasks to advance our simulation of robot + * physics forward by a single 20ms step. + */ + public void update() { + double leftMotorCmd = 0; + double rightMotorCmd = 0; - if (DriverStation.isEnabled() && !RobotController.isBrownedOut()) { - leftMotorCmd = leftLeader.getSpeed(); - rightMotorCmd = rightLeader.getSpeed(); - } + if (DriverStation.isEnabled() && !RobotController.isBrownedOut()) { + leftMotorCmd = leftLeader.getSpeed(); + rightMotorCmd = rightLeader.getSpeed(); + } - drivetrainSimulator.setInputs( - leftMotorCmd * RobotController.getBatteryVoltage(), - -rightMotorCmd * RobotController.getBatteryVoltage()); + drivetrainSimulator.setInputs( + leftMotorCmd * RobotController.getBatteryVoltage(), + -rightMotorCmd * RobotController.getBatteryVoltage()); - drivetrainSimulator.update(Robot.kDefaultPeriod); - } + drivetrainSimulator.update(Robot.kDefaultPeriod); + } - public Pose2d getPose() { - return drivetrainSimulator.getPose(); - } + public Pose2d getPose() { + return drivetrainSimulator.getPose(); + } - /** - * Resets the simulation back to a pre-defined pose. Useful to simulate the action of placing the - * robot onto a specific spot in the field (e.g. at the start of each match). - * - * @param pose - */ - public void resetPose(Pose2d pose) { - drivetrainSimulator.setPose(pose); - } + /** + * Resets the simulation back to a pre-defined pose. Useful to simulate the action of placing the + * robot onto a specific spot in the field (e.g. at the start of each match). + * + * @param pose + */ + public void resetPose(Pose2d pose) { + drivetrainSimulator.setPose(pose); + } } diff --git a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/VisionSim.java b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/VisionSim.java index 0be5357c69..0be75f7fee 100644 --- a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/VisionSim.java +++ b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/VisionSim.java @@ -48,80 +48,80 @@ * real camera data is used instead. */ public class VisionSim { - // ----- Simulation specific constants - // 2020 High goal target shape - // See - // https://firstfrc.blob.core.windows.net/frc2020/PlayingField/2020FieldDrawing-SeasonSpecific.pdf - // page 208 - private static final TargetModel kTargetModel = - new TargetModel( - List.of( - new Translation3d(0, Units.inchesToMeters(-9.819867), Units.inchesToMeters(-8.5)), - new Translation3d(0, Units.inchesToMeters(9.819867), Units.inchesToMeters(-8.5)), - new Translation3d(0, Units.inchesToMeters(19.625), Units.inchesToMeters(8.5)), - new Translation3d(0, Units.inchesToMeters(-19.625), Units.inchesToMeters(8.5)))); + // ----- Simulation specific constants + // 2020 High goal target shape + // See + // https://firstfrc.blob.core.windows.net/frc2020/PlayingField/2020FieldDrawing-SeasonSpecific.pdf + // page 208 + private static final TargetModel kTargetModel = + new TargetModel( + List.of( + new Translation3d(0, Units.inchesToMeters(-9.819867), Units.inchesToMeters(-8.5)), + new Translation3d(0, Units.inchesToMeters(9.819867), Units.inchesToMeters(-8.5)), + new Translation3d(0, Units.inchesToMeters(19.625), Units.inchesToMeters(8.5)), + new Translation3d(0, Units.inchesToMeters(-19.625), Units.inchesToMeters(8.5)))); - // Simulated camera properties. These can be set to mimic your actual camera. - private static final int kResolutionWidth = 640; // pixels - private static final int kResolutionHeight = 480; // pixels - private static final Rotation2d kFOVDiag = Rotation2d.fromDegrees(100.0); // degrees - private static final double kAvgErrorPx = 0.2; - private static final double kErrorStdDevPx = 0.05; - private static final double kFPS = 25; - private static final double kAvgLatencyMs = 30; - private static final double kLatencyStdDevMs = 4; - private static final double kMinTargetArea = 0.1; // percentage (0 - 100) - private static final double kMaxLEDRange = 15; // meters - // ----- + // Simulated camera properties. These can be set to mimic your actual camera. + private static final int kResolutionWidth = 640; // pixels + private static final int kResolutionHeight = 480; // pixels + private static final Rotation2d kFOVDiag = Rotation2d.fromDegrees(100.0); // degrees + private static final double kAvgErrorPx = 0.2; + private static final double kErrorStdDevPx = 0.05; + private static final double kFPS = 25; + private static final double kAvgLatencyMs = 30; + private static final double kLatencyStdDevMs = 4; + private static final double kMinTargetArea = 0.1; // percentage (0 - 100) + private static final double kMaxLEDRange = 15; // meters + // ----- - // A simulated vision system which handles simulated cameras and targets. - private VisionSystemSim visionSim; - // The simulation of our PhotonCamera, which will simulate camera frames and target info. - private PhotonCameraSim cameraSim; + // A simulated vision system which handles simulated cameras and targets. + private VisionSystemSim visionSim; + // The simulation of our PhotonCamera, which will simulate camera frames and target info. + private PhotonCameraSim cameraSim; - public VisionSim(String name, PhotonCamera camera) { - visionSim = new VisionSystemSim(name); - // Make the vision target visible to this simulated field. - var visionTarget = new VisionTargetSim(TARGET_POSE, kTargetModel); - visionSim.addVisionTargets(visionTarget); + public VisionSim(String name, PhotonCamera camera) { + visionSim = new VisionSystemSim(name); + // Make the vision target visible to this simulated field. + var visionTarget = new VisionTargetSim(TARGET_POSE, kTargetModel); + visionSim.addVisionTargets(visionTarget); - // Create simulated camera properties from our defined constants. - var cameraProp = new SimCameraProperties(); - cameraProp.setCalibration(kResolutionWidth, kResolutionHeight, kFOVDiag); - cameraProp.setCalibError(kAvgErrorPx, kErrorStdDevPx); - cameraProp.setFPS(kFPS); - cameraProp.setAvgLatencyMs(kAvgLatencyMs); - cameraProp.setLatencyStdDevMs(kLatencyStdDevMs); - // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible - // targets. - cameraSim = new PhotonCameraSim(camera, cameraProp, kMinTargetArea, kMaxLEDRange); + // Create simulated camera properties from our defined constants. + var cameraProp = new SimCameraProperties(); + cameraProp.setCalibration(kResolutionWidth, kResolutionHeight, kFOVDiag); + cameraProp.setCalibError(kAvgErrorPx, kErrorStdDevPx); + cameraProp.setFPS(kFPS); + cameraProp.setAvgLatencyMs(kAvgLatencyMs); + cameraProp.setLatencyStdDevMs(kLatencyStdDevMs); + // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible + // targets. + cameraSim = new PhotonCameraSim(camera, cameraProp, kMinTargetArea, kMaxLEDRange); - // Add the simulated camera to view the targets on this simulated field. - visionSim.addCamera( - cameraSim, - new Transform3d( - new Translation3d(0, 0, CAMERA_HEIGHT_METERS), - new Rotation3d(0, -CAMERA_PITCH_RADIANS, 0))); + // Add the simulated camera to view the targets on this simulated field. + visionSim.addCamera( + cameraSim, + new Transform3d( + new Translation3d(0, 0, CAMERA_HEIGHT_METERS), + new Rotation3d(0, -CAMERA_PITCH_RADIANS, 0))); - cameraSim.enableDrawWireframe(true); - } + cameraSim.enableDrawWireframe(true); + } - /** - * Update the simulated camera data based on its new field pose. - * - * @param simRobotPose The pose of the simulated robot - */ - public void update(Pose2d simRobotPose) { - visionSim.update(simRobotPose); - } + /** + * Update the simulated camera data based on its new field pose. + * + * @param simRobotPose The pose of the simulated robot + */ + public void update(Pose2d simRobotPose) { + visionSim.update(simRobotPose); + } - /** - * Resets the simulation back to a pre-defined pose. Useful to simulate the action of placing the - * robot onto a specific spot in the field (e.g. at the start of each match). - * - * @param pose - */ - public void resetPose(Pose2d pose) { - visionSim.resetRobotPose(pose); - } + /** + * Resets the simulation back to a pre-defined pose. Useful to simulate the action of placing the + * robot onto a specific spot in the field (e.g. at the start of each match). + * + * @param pose + */ + public void resetPose(Pose2d pose) { + visionSim.resetRobotPose(pose); + } } diff --git a/photonlib-java-examples/swervedriveposeestsim/build.gradle b/photonlib-java-examples/swervedriveposeestsim/build.gradle index d0be7323ec..a4d1b60579 100644 --- a/photonlib-java-examples/swervedriveposeestsim/build.gradle +++ b/photonlib-java-examples/swervedriveposeestsim/build.gradle @@ -78,7 +78,11 @@ wpi.sim.addDriverstation() // in order to make them all available at runtime. Also adding the manifest so WPILib // knows where to look for our Robot Class. jar { - from { configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) } } + from { + configurations.runtimeClasspath.collect { + it.isDirectory() ? it : zipTree(it) + } + } manifest edu.wpi.first.gradlerio.GradleRIOPlugin.javaManifest(ROBOT_MAIN_CLASS) duplicatesStrategy = DuplicatesStrategy.INCLUDE } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java index 1cea631e93..75fba6c5fc 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java @@ -38,105 +38,105 @@ import edu.wpi.first.math.util.Units; public class Constants { - public static class Vision { - public static final String kCameraName = "YOUR CAMERA NAME"; - // Cam mounted facing forward, half a meter forward of center, half a meter up from center. - public static final Transform3d kRobotToCam = - new Transform3d(new Translation3d(0.5, 0.0, 0.5), new Rotation3d(0, 0, 0)); + public static class Vision { + public static final String kCameraName = "YOUR CAMERA NAME"; + // Cam mounted facing forward, half a meter forward of center, half a meter up from center. + public static final Transform3d kRobotToCam = + new Transform3d(new Translation3d(0.5, 0.0, 0.5), new Rotation3d(0, 0, 0)); - // The layout of the AprilTags on the field - public static final AprilTagFieldLayout kTagLayout = - AprilTagFields.kDefaultField.loadAprilTagLayoutField(); + // The layout of the AprilTags on the field + public static final AprilTagFieldLayout kTagLayout = + AprilTagFields.kDefaultField.loadAprilTagLayoutField(); - // The standard deviations of our vision estimated poses, which affect correction rate - // (Fake values. Experiment and determine estimation noise on an actual robot.) - public static final Matrix kSingleTagStdDevs = VecBuilder.fill(4, 4, 8); - public static final Matrix kMultiTagStdDevs = VecBuilder.fill(0.5, 0.5, 1); - } + // The standard deviations of our vision estimated poses, which affect correction rate + // (Fake values. Experiment and determine estimation noise on an actual robot.) + public static final Matrix kSingleTagStdDevs = VecBuilder.fill(4, 4, 8); + public static final Matrix kMultiTagStdDevs = VecBuilder.fill(0.5, 0.5, 1); + } - public static class Swerve { - // Physical properties - public static final double kTrackWidth = Units.inchesToMeters(18.5); - public static final double kTrackLength = Units.inchesToMeters(18.5); - public static final double kRobotWidth = Units.inchesToMeters(25 + 3.25 * 2); - public static final double kRobotLength = Units.inchesToMeters(25 + 3.25 * 2); - public static final double kMaxLinearSpeed = Units.feetToMeters(15.5); - public static final double kMaxAngularSpeed = Units.rotationsToRadians(2); - public static final double kWheelDiameter = Units.inchesToMeters(4); - public static final double kWheelCircumference = kWheelDiameter * Math.PI; + public static class Swerve { + // Physical properties + public static final double kTrackWidth = Units.inchesToMeters(18.5); + public static final double kTrackLength = Units.inchesToMeters(18.5); + public static final double kRobotWidth = Units.inchesToMeters(25 + 3.25 * 2); + public static final double kRobotLength = Units.inchesToMeters(25 + 3.25 * 2); + public static final double kMaxLinearSpeed = Units.feetToMeters(15.5); + public static final double kMaxAngularSpeed = Units.rotationsToRadians(2); + public static final double kWheelDiameter = Units.inchesToMeters(4); + public static final double kWheelCircumference = kWheelDiameter * Math.PI; - public static final double kDriveGearRatio = 6.75; // 6.75:1 SDS MK4 L2 ratio - public static final double kSteerGearRatio = 12.8; // 12.8:1 + public static final double kDriveGearRatio = 6.75; // 6.75:1 SDS MK4 L2 ratio + public static final double kSteerGearRatio = 12.8; // 12.8:1 - public static final double kDriveDistPerPulse = kWheelCircumference / 1024 / kDriveGearRatio; - public static final double kSteerRadPerPulse = 2 * Math.PI / 1024; + public static final double kDriveDistPerPulse = kWheelCircumference / 1024 / kDriveGearRatio; + public static final double kSteerRadPerPulse = 2 * Math.PI / 1024; - public enum ModuleConstants { - FL( // Front left - 1, 0, 0, 1, 1, 2, 3, 0, kTrackLength / 2, kTrackWidth / 2), - FR( // Front Right - 2, 2, 4, 5, 3, 6, 7, 0, kTrackLength / 2, -kTrackWidth / 2), - BL( // Back Left - 3, 4, 8, 9, 5, 10, 11, 0, -kTrackLength / 2, kTrackWidth / 2), - BR( // Back Right - 4, 6, 12, 13, 7, 14, 15, 0, -kTrackLength / 2, -kTrackWidth / 2); + public enum ModuleConstants { + FL( // Front left + 1, 0, 0, 1, 1, 2, 3, 0, kTrackLength / 2, kTrackWidth / 2), + FR( // Front Right + 2, 2, 4, 5, 3, 6, 7, 0, kTrackLength / 2, -kTrackWidth / 2), + BL( // Back Left + 3, 4, 8, 9, 5, 10, 11, 0, -kTrackLength / 2, kTrackWidth / 2), + BR( // Back Right + 4, 6, 12, 13, 7, 14, 15, 0, -kTrackLength / 2, -kTrackWidth / 2); - public final int moduleNum; - public final int driveMotorID; - public final int driveEncoderA; - public final int driveEncoderB; - public final int steerMotorID; - public final int steerEncoderA; - public final int steerEncoderB; - public final double angleOffset; - public final Translation2d centerOffset; + public final int moduleNum; + public final int driveMotorID; + public final int driveEncoderA; + public final int driveEncoderB; + public final int steerMotorID; + public final int steerEncoderA; + public final int steerEncoderB; + public final double angleOffset; + public final Translation2d centerOffset; - private ModuleConstants( - int moduleNum, - int driveMotorID, - int driveEncoderA, - int driveEncoderB, - int steerMotorID, - int steerEncoderA, - int steerEncoderB, - double angleOffset, - double xOffset, - double yOffset) { - this.moduleNum = moduleNum; - this.driveMotorID = driveMotorID; - this.driveEncoderA = driveEncoderA; - this.driveEncoderB = driveEncoderB; - this.steerMotorID = steerMotorID; - this.steerEncoderA = steerEncoderA; - this.steerEncoderB = steerEncoderB; - this.angleOffset = angleOffset; - centerOffset = new Translation2d(xOffset, yOffset); - } - } + private ModuleConstants( + int moduleNum, + int driveMotorID, + int driveEncoderA, + int driveEncoderB, + int steerMotorID, + int steerEncoderA, + int steerEncoderB, + double angleOffset, + double xOffset, + double yOffset) { + this.moduleNum = moduleNum; + this.driveMotorID = driveMotorID; + this.driveEncoderA = driveEncoderA; + this.driveEncoderB = driveEncoderB; + this.steerMotorID = steerMotorID; + this.steerEncoderA = steerEncoderA; + this.steerEncoderB = steerEncoderB; + this.angleOffset = angleOffset; + centerOffset = new Translation2d(xOffset, yOffset); + } + } - // Feedforward - // Linear drive feed forward - public static final SimpleMotorFeedforward kDriveFF = - new SimpleMotorFeedforward( // real - 0.25, // Voltage to break static friction - 2.5, // Volts per meter per second - 0.3 // Volts per meter per second squared - ); - // Steer feed forward - public static final SimpleMotorFeedforward kSteerFF = - new SimpleMotorFeedforward( // real - 0.5, // Voltage to break static friction - 0.25, // Volts per radian per second - 0.01 // Volts per radian per second squared - ); + // Feedforward + // Linear drive feed forward + public static final SimpleMotorFeedforward kDriveFF = + new SimpleMotorFeedforward( // real + 0.25, // Voltage to break static friction + 2.5, // Volts per meter per second + 0.3 // Volts per meter per second squared + ); + // Steer feed forward + public static final SimpleMotorFeedforward kSteerFF = + new SimpleMotorFeedforward( // real + 0.5, // Voltage to break static friction + 0.25, // Volts per radian per second + 0.01 // Volts per radian per second squared + ); - // PID - public static final double kDriveKP = 1; - public static final double kDriveKI = 0; - public static final double kDriveKD = 0; + // PID + public static final double kDriveKP = 1; + public static final double kDriveKI = 0; + public static final double kDriveKD = 0; - public static final double kSteerKP = 20; - public static final double kSteerKI = 0; - public static final double kSteerKD = 0.25; - } + public static final double kSteerKP = 20; + public static final double kSteerKI = 0; + public static final double kSteerKD = 0.25; + } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java index f227c28da0..c420a5735a 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java @@ -27,9 +27,9 @@ import edu.wpi.first.wpilibj.RobotBase; public final class Main { - private Main() {} + private Main() {} - public static void main(String... args) { - RobotBase.startRobot(Robot::new); - } + public static void main(String... args) { + RobotBase.startRobot(Robot::new); + } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java index d32d4d477c..712539e159 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java @@ -38,118 +38,118 @@ import java.util.Random; public class Robot extends TimedRobot { - private SwerveDrive drivetrain; - private Vision vision; - - private XboxController controller; - // Limit max speed - private final double kDriveSpeed = 0.6; - // Rudimentary limiting of drivetrain acceleration - private SlewRateLimiter forwardLimiter = new SlewRateLimiter(1.0 / 0.6); // 1 / x seconds to 100% - private SlewRateLimiter strafeLimiter = new SlewRateLimiter(1.0 / 0.6); - private SlewRateLimiter turnLimiter = new SlewRateLimiter(1.0 / 0.33); - - private Timer autoTimer = new Timer(); - private Random rand = new Random(4512); - - @Override - public void robotInit() { - drivetrain = new SwerveDrive(); - vision = new Vision(); - - controller = new XboxController(0); + private SwerveDrive drivetrain; + private Vision vision; + + private XboxController controller; + // Limit max speed + private final double kDriveSpeed = 0.6; + // Rudimentary limiting of drivetrain acceleration + private SlewRateLimiter forwardLimiter = new SlewRateLimiter(1.0 / 0.6); // 1 / x seconds to 100% + private SlewRateLimiter strafeLimiter = new SlewRateLimiter(1.0 / 0.6); + private SlewRateLimiter turnLimiter = new SlewRateLimiter(1.0 / 0.33); + + private Timer autoTimer = new Timer(); + private Random rand = new Random(4512); + + @Override + public void robotInit() { + drivetrain = new SwerveDrive(); + vision = new Vision(); + + controller = new XboxController(0); + } + + @Override + public void robotPeriodic() { + drivetrain.periodic(); + + // Correct pose estimate with vision measurements + var visionEst = vision.getEstimatedGlobalPose(); + visionEst.ifPresent( + est -> { + var estPose = est.estimatedPose.toPose2d(); + // Change our trust in the measurement based on the tags we can see + var estStdDevs = vision.getEstimationStdDevs(estPose); + + drivetrain.addVisionMeasurement( + est.estimatedPose.toPose2d(), est.timestampSeconds, estStdDevs); + }); + + // Apply a random offset to pose estimator to test vision correction + if (controller.getBButtonPressed()) { + var trf = + new Transform2d( + new Translation2d(rand.nextDouble() * 4 - 2, rand.nextDouble() * 4 - 2), + new Rotation2d(rand.nextDouble() * 2 * Math.PI)); + drivetrain.resetPose(drivetrain.getPose().plus(trf), false); } - @Override - public void robotPeriodic() { - drivetrain.periodic(); - - // Correct pose estimate with vision measurements - var visionEst = vision.getEstimatedGlobalPose(); - visionEst.ifPresent( - est -> { - var estPose = est.estimatedPose.toPose2d(); - // Change our trust in the measurement based on the tags we can see - var estStdDevs = vision.getEstimationStdDevs(estPose); - - drivetrain.addVisionMeasurement( - est.estimatedPose.toPose2d(), est.timestampSeconds, estStdDevs); - }); - - // Apply a random offset to pose estimator to test vision correction - if (controller.getBButtonPressed()) { - var trf = - new Transform2d( - new Translation2d(rand.nextDouble() * 4 - 2, rand.nextDouble() * 4 - 2), - new Rotation2d(rand.nextDouble() * 2 * Math.PI)); - drivetrain.resetPose(drivetrain.getPose().plus(trf), false); - } - - // Log values to the dashboard - drivetrain.log(); - } - - @Override - public void disabledPeriodic() { - drivetrain.stop(); - } - - @Override - public void autonomousInit() { - autoTimer.restart(); - var pose = new Pose2d(1, 1, new Rotation2d()); - drivetrain.resetPose(pose, true); - vision.resetSimPose(pose); - } - - @Override - public void autonomousPeriodic() { - // translate diagonally while spinning - if (autoTimer.get() < 10) { - drivetrain.drive(0.5, 0.5, 0.5, false); - } else { - autoTimer.stop(); - drivetrain.stop(); - } - } - - @Override - public void teleopPeriodic() { - // We will use an "arcade drive" scheme to turn joystick values into target robot speeds - // We want to get joystick values where pushing forward/left is positive - double forward = -controller.getLeftY() * kDriveSpeed; - if (Math.abs(forward) < 0.1) forward = 0; // deadband small values - forward = forwardLimiter.calculate(forward); // limit acceleration - double strafe = -controller.getLeftX() * kDriveSpeed; - if (Math.abs(strafe) < 0.1) strafe = 0; - strafe = strafeLimiter.calculate(strafe); - double turn = -controller.getRightX() * kDriveSpeed; - if (Math.abs(turn) < 0.1) turn = 0; - turn = turnLimiter.calculate(turn); - - // Convert from joystick values to real target speeds - forward *= Constants.Swerve.kMaxLinearSpeed; - strafe *= Constants.Swerve.kMaxLinearSpeed; - turn *= Constants.Swerve.kMaxLinearSpeed; - - // Command drivetrain motors based on target speeds - drivetrain.drive(forward, strafe, turn, true); - } - - @Override - public void simulationPeriodic() { - // Update drivetrain simulation - drivetrain.simulationPeriodic(); - - // Update camera simulation - vision.simulationPeriodic(drivetrain.getSimPose()); - - var debugField = vision.getSimDebugField(); - debugField.getObject("EstimatedRobot").setPose(drivetrain.getPose()); - debugField.getObject("EstimatedRobotModules").setPoses(drivetrain.getModulePoses()); - - // Calculate battery voltage sag due to current draw - RoboRioSim.setVInVoltage( - BatterySim.calculateDefaultBatteryLoadedVoltage(drivetrain.getCurrentDraw())); + // Log values to the dashboard + drivetrain.log(); + } + + @Override + public void disabledPeriodic() { + drivetrain.stop(); + } + + @Override + public void autonomousInit() { + autoTimer.restart(); + var pose = new Pose2d(1, 1, new Rotation2d()); + drivetrain.resetPose(pose, true); + vision.resetSimPose(pose); + } + + @Override + public void autonomousPeriodic() { + // translate diagonally while spinning + if (autoTimer.get() < 10) { + drivetrain.drive(0.5, 0.5, 0.5, false); + } else { + autoTimer.stop(); + drivetrain.stop(); } + } + + @Override + public void teleopPeriodic() { + // We will use an "arcade drive" scheme to turn joystick values into target robot speeds + // We want to get joystick values where pushing forward/left is positive + double forward = -controller.getLeftY() * kDriveSpeed; + if (Math.abs(forward) < 0.1) forward = 0; // deadband small values + forward = forwardLimiter.calculate(forward); // limit acceleration + double strafe = -controller.getLeftX() * kDriveSpeed; + if (Math.abs(strafe) < 0.1) strafe = 0; + strafe = strafeLimiter.calculate(strafe); + double turn = -controller.getRightX() * kDriveSpeed; + if (Math.abs(turn) < 0.1) turn = 0; + turn = turnLimiter.calculate(turn); + + // Convert from joystick values to real target speeds + forward *= Constants.Swerve.kMaxLinearSpeed; + strafe *= Constants.Swerve.kMaxLinearSpeed; + turn *= Constants.Swerve.kMaxLinearSpeed; + + // Command drivetrain motors based on target speeds + drivetrain.drive(forward, strafe, turn, true); + } + + @Override + public void simulationPeriodic() { + // Update drivetrain simulation + drivetrain.simulationPeriodic(); + + // Update camera simulation + vision.simulationPeriodic(drivetrain.getSimPose()); + + var debugField = vision.getSimDebugField(); + debugField.getObject("EstimatedRobot").setPose(drivetrain.getPose()); + debugField.getObject("EstimatedRobotModules").setPoses(drivetrain.getModulePoses()); + + // Calculate battery voltage sag due to current draw + RoboRioSim.setVInVoltage( + BatterySim.calculateDefaultBatteryLoadedVoltage(drivetrain.getCurrentDraw())); + } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java index d1e88c9258..bc1f9f7fb7 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java @@ -44,119 +44,119 @@ import org.photonvision.targeting.PhotonPipelineResult; public class Vision { - private final PhotonCamera camera; - private final PhotonPoseEstimator photonEstimator; - private double lastEstTimestamp = 0; - - // Simulation - private PhotonCameraSim cameraSim; - private VisionSystemSim visionSim; - - public Vision() { - camera = new PhotonCamera(kCameraName); - - photonEstimator = - new PhotonPoseEstimator( - kTagLayout, PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR, camera, kRobotToCam); - photonEstimator.setMultiTagFallbackStrategy(PoseStrategy.LOWEST_AMBIGUITY); - - // ----- Simulation - if (Robot.isSimulation()) { - // Create the vision system simulation which handles cameras and targets on the field. - visionSim = new VisionSystemSim("main"); - // Add all the AprilTags inside the tag layout as visible targets to this simulated field. - visionSim.addAprilTags(kTagLayout); - // Create simulated camera properties. These can be set to mimic your actual camera. - var cameraProp = new SimCameraProperties(); - cameraProp.setCalibration(960, 720, Rotation2d.fromDegrees(90)); - cameraProp.setCalibError(0.35, 0.10); - cameraProp.setFPS(15); - cameraProp.setAvgLatencyMs(50); - cameraProp.setLatencyStdDevMs(15); - // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible - // targets. - cameraSim = new PhotonCameraSim(camera, cameraProp); - // Add the simulated camera to view the targets on this simulated field. - visionSim.addCamera(cameraSim, kRobotToCam); - - cameraSim.enableDrawWireframe(true); - } - } + private final PhotonCamera camera; + private final PhotonPoseEstimator photonEstimator; + private double lastEstTimestamp = 0; - public PhotonPipelineResult getLatestResult() { - return camera.getLatestResult(); - } + // Simulation + private PhotonCameraSim cameraSim; + private VisionSystemSim visionSim; - /** - * The latest estimated robot pose on the field from vision data. This may be empty. This should - * only be called once per loop. - * - * @return An {@link EstimatedRobotPose} with an estimated pose, estimate timestamp, and targets - * used for estimation. - */ - public Optional getEstimatedGlobalPose() { - var visionEst = photonEstimator.update(); - double latestTimestamp = camera.getLatestResult().getTimestampSeconds(); - boolean newResult = Math.abs(latestTimestamp - lastEstTimestamp) > 1e-5; - if (Robot.isSimulation()) { - visionEst.ifPresentOrElse( - est -> - getSimDebugField() - .getObject("VisionEstimation") - .setPose(est.estimatedPose.toPose2d()), - () -> { - if (newResult) getSimDebugField().getObject("VisionEstimation").setPoses(); - }); - } - if (newResult) lastEstTimestamp = latestTimestamp; - return visionEst; - } + public Vision() { + camera = new PhotonCamera(kCameraName); - /** - * The standard deviations of the estimated pose from {@link #getEstimatedGlobalPose()}, for use - * with {@link edu.wpi.first.math.estimator.SwerveDrivePoseEstimator SwerveDrivePoseEstimator}. - * This should only be used when there are targets visible. - * - * @param estimatedPose The estimated pose to guess standard deviations for. - */ - public Matrix getEstimationStdDevs(Pose2d estimatedPose) { - var estStdDevs = kSingleTagStdDevs; - var targets = getLatestResult().getTargets(); - int numTags = 0; - double avgDist = 0; - for (var tgt : targets) { - var tagPose = photonEstimator.getFieldTags().getTagPose(tgt.getFiducialId()); - if (tagPose.isEmpty()) continue; - numTags++; - avgDist += - tagPose.get().toPose2d().getTranslation().getDistance(estimatedPose.getTranslation()); - } - if (numTags == 0) return estStdDevs; - avgDist /= numTags; - // Decrease std devs if multiple targets are visible - if (numTags > 1) estStdDevs = kMultiTagStdDevs; - // Increase std devs based on (average) distance - if (numTags == 1 && avgDist > 4) - estStdDevs = VecBuilder.fill(Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE); - else estStdDevs = estStdDevs.times(1 + (avgDist * avgDist / 30)); - - return estStdDevs; - } + photonEstimator = + new PhotonPoseEstimator( + kTagLayout, PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR, camera, kRobotToCam); + photonEstimator.setMultiTagFallbackStrategy(PoseStrategy.LOWEST_AMBIGUITY); // ----- Simulation - - public void simulationPeriodic(Pose2d robotSimPose) { - visionSim.update(robotSimPose); + if (Robot.isSimulation()) { + // Create the vision system simulation which handles cameras and targets on the field. + visionSim = new VisionSystemSim("main"); + // Add all the AprilTags inside the tag layout as visible targets to this simulated field. + visionSim.addAprilTags(kTagLayout); + // Create simulated camera properties. These can be set to mimic your actual camera. + var cameraProp = new SimCameraProperties(); + cameraProp.setCalibration(960, 720, Rotation2d.fromDegrees(90)); + cameraProp.setCalibError(0.35, 0.10); + cameraProp.setFPS(15); + cameraProp.setAvgLatencyMs(50); + cameraProp.setLatencyStdDevMs(15); + // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible + // targets. + cameraSim = new PhotonCameraSim(camera, cameraProp); + // Add the simulated camera to view the targets on this simulated field. + visionSim.addCamera(cameraSim, kRobotToCam); + + cameraSim.enableDrawWireframe(true); } - - /** Reset pose history of the robot in the vision system simulation. */ - public void resetSimPose(Pose2d pose) { - if (Robot.isSimulation()) visionSim.resetRobotPose(pose); + } + + public PhotonPipelineResult getLatestResult() { + return camera.getLatestResult(); + } + + /** + * The latest estimated robot pose on the field from vision data. This may be empty. This should + * only be called once per loop. + * + * @return An {@link EstimatedRobotPose} with an estimated pose, estimate timestamp, and targets + * used for estimation. + */ + public Optional getEstimatedGlobalPose() { + var visionEst = photonEstimator.update(); + double latestTimestamp = camera.getLatestResult().getTimestampSeconds(); + boolean newResult = Math.abs(latestTimestamp - lastEstTimestamp) > 1e-5; + if (Robot.isSimulation()) { + visionEst.ifPresentOrElse( + est -> + getSimDebugField() + .getObject("VisionEstimation") + .setPose(est.estimatedPose.toPose2d()), + () -> { + if (newResult) getSimDebugField().getObject("VisionEstimation").setPoses(); + }); } - - /** A Field2d for visualizing our robot and objects on the field. */ - public Field2d getSimDebugField() { - if (!Robot.isSimulation()) return null; - return visionSim.getDebugField(); + if (newResult) lastEstTimestamp = latestTimestamp; + return visionEst; + } + + /** + * The standard deviations of the estimated pose from {@link #getEstimatedGlobalPose()}, for use + * with {@link edu.wpi.first.math.estimator.SwerveDrivePoseEstimator SwerveDrivePoseEstimator}. + * This should only be used when there are targets visible. + * + * @param estimatedPose The estimated pose to guess standard deviations for. + */ + public Matrix getEstimationStdDevs(Pose2d estimatedPose) { + var estStdDevs = kSingleTagStdDevs; + var targets = getLatestResult().getTargets(); + int numTags = 0; + double avgDist = 0; + for (var tgt : targets) { + var tagPose = photonEstimator.getFieldTags().getTagPose(tgt.getFiducialId()); + if (tagPose.isEmpty()) continue; + numTags++; + avgDist += + tagPose.get().toPose2d().getTranslation().getDistance(estimatedPose.getTranslation()); } + if (numTags == 0) return estStdDevs; + avgDist /= numTags; + // Decrease std devs if multiple targets are visible + if (numTags > 1) estStdDevs = kMultiTagStdDevs; + // Increase std devs based on (average) distance + if (numTags == 1 && avgDist > 4) + estStdDevs = VecBuilder.fill(Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE); + else estStdDevs = estStdDevs.times(1 + (avgDist * avgDist / 30)); + + return estStdDevs; + } + + // ----- Simulation + + public void simulationPeriodic(Pose2d robotSimPose) { + visionSim.update(robotSimPose); + } + + /** Reset pose history of the robot in the vision system simulation. */ + public void resetSimPose(Pose2d pose) { + if (Robot.isSimulation()) visionSim.resetRobotPose(pose); + } + + /** A Field2d for visualizing our robot and objects on the field. */ + public Field2d getSimDebugField() { + if (!Robot.isSimulation()) return null; + return visionSim.getDebugField(); + } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java index 21911c997d..86a2058732 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java @@ -45,290 +45,290 @@ import frc.robot.Robot; public class SwerveDrive { - // Construct the swerve modules with their respective constants. - // The SwerveModule class will handle all the details of controlling the modules. - private final SwerveModule[] swerveMods = { - new SwerveModule(ModuleConstants.FL), - new SwerveModule(ModuleConstants.FR), - new SwerveModule(ModuleConstants.BL), - new SwerveModule(ModuleConstants.BR) - }; - - // The kinematics for translating between robot chassis speeds and module states. - private final SwerveDriveKinematics kinematics = - new SwerveDriveKinematics( - swerveMods[0].getModuleConstants().centerOffset, - swerveMods[1].getModuleConstants().centerOffset, - swerveMods[2].getModuleConstants().centerOffset, - swerveMods[3].getModuleConstants().centerOffset); - - private final ADXRS450_Gyro gyro = new ADXRS450_Gyro(Port.kOnboardCS0); - - // The robot pose estimator for tracking swerve odometry and applying vision corrections. - private final SwerveDrivePoseEstimator poseEstimator; - - private ChassisSpeeds targetChassisSpeeds = new ChassisSpeeds(); + // Construct the swerve modules with their respective constants. + // The SwerveModule class will handle all the details of controlling the modules. + private final SwerveModule[] swerveMods = { + new SwerveModule(ModuleConstants.FL), + new SwerveModule(ModuleConstants.FR), + new SwerveModule(ModuleConstants.BL), + new SwerveModule(ModuleConstants.BR) + }; + + // The kinematics for translating between robot chassis speeds and module states. + private final SwerveDriveKinematics kinematics = + new SwerveDriveKinematics( + swerveMods[0].getModuleConstants().centerOffset, + swerveMods[1].getModuleConstants().centerOffset, + swerveMods[2].getModuleConstants().centerOffset, + swerveMods[3].getModuleConstants().centerOffset); + + private final ADXRS450_Gyro gyro = new ADXRS450_Gyro(Port.kOnboardCS0); + + // The robot pose estimator for tracking swerve odometry and applying vision corrections. + private final SwerveDrivePoseEstimator poseEstimator; + + private ChassisSpeeds targetChassisSpeeds = new ChassisSpeeds(); + + // ----- Simulation + private final ADXRS450_GyroSim gyroSim; + private final SwerveDriveSim swerveDriveSim; + private double totalCurrentDraw = 0; + + public SwerveDrive() { + // Define the standard deviations for the pose estimator, which determine how fast the pose + // estimate converges to the vision measurement. This should depend on the vision measurement + // noise + // and how many or how frequently vision measurements are applied to the pose estimator. + var stateStdDevs = VecBuilder.fill(0.1, 0.1, 0.1); + var visionStdDevs = VecBuilder.fill(1, 1, 1); + poseEstimator = + new SwerveDrivePoseEstimator( + kinematics, + getGyroYaw(), + getModulePositions(), + new Pose2d(), + stateStdDevs, + visionStdDevs); // ----- Simulation - private final ADXRS450_GyroSim gyroSim; - private final SwerveDriveSim swerveDriveSim; - private double totalCurrentDraw = 0; - - public SwerveDrive() { - // Define the standard deviations for the pose estimator, which determine how fast the pose - // estimate converges to the vision measurement. This should depend on the vision measurement - // noise - // and how many or how frequently vision measurements are applied to the pose estimator. - var stateStdDevs = VecBuilder.fill(0.1, 0.1, 0.1); - var visionStdDevs = VecBuilder.fill(1, 1, 1); - poseEstimator = - new SwerveDrivePoseEstimator( - kinematics, - getGyroYaw(), - getModulePositions(), - new Pose2d(), - stateStdDevs, - visionStdDevs); - - // ----- Simulation - gyroSim = new ADXRS450_GyroSim(gyro); - swerveDriveSim = - new SwerveDriveSim( - kDriveFF, - DCMotor.getFalcon500(1), - kDriveGearRatio, - kWheelDiameter / 2.0, - kSteerFF, - DCMotor.getFalcon500(1), - kSteerGearRatio, - kinematics); - } - - public void periodic() { - for (SwerveModule module : swerveMods) { - module.periodic(); - } - - // Update the odometry of the swerve drive using the wheel encoders and gyro. - poseEstimator.update(getGyroYaw(), getModulePositions()); - } - - /** - * Basic drive control. A target field-relative ChassisSpeeds (vx, vy, omega) is converted to - * specific swerve module states. - * - * @param vxMeters X velocity (forwards/backwards) - * @param vyMeters Y velocity (strafe left/right) - * @param omegaRadians Angular velocity (rotation CCW+) - * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback - * control. - */ - public void drive(double vxMeters, double vyMeters, double omegaRadians, boolean openLoop) { - var targetChassisSpeeds = - ChassisSpeeds.fromFieldRelativeSpeeds(vxMeters, vyMeters, omegaRadians, getHeading()); - setChassisSpeeds(targetChassisSpeeds, openLoop, false); + gyroSim = new ADXRS450_GyroSim(gyro); + swerveDriveSim = + new SwerveDriveSim( + kDriveFF, + DCMotor.getFalcon500(1), + kDriveGearRatio, + kWheelDiameter / 2.0, + kSteerFF, + DCMotor.getFalcon500(1), + kSteerGearRatio, + kinematics); + } + + public void periodic() { + for (SwerveModule module : swerveMods) { + module.periodic(); } - /** - * Command the swerve drive to the desired chassis speeds by converting them to swerve module - * states and using {@link #setModuleStates(SwerveModuleState[], boolean)}. - * - * @param targetChassisSpeeds Target robot-relative chassis speeds (vx, vy, omega). - * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback - * control. - * @param steerInPlace If modules should steer to the target angle when target velocity is 0. - */ - public void setChassisSpeeds( - ChassisSpeeds targetChassisSpeeds, boolean openLoop, boolean steerInPlace) { - setModuleStates(kinematics.toSwerveModuleStates(targetChassisSpeeds), openLoop, steerInPlace); - this.targetChassisSpeeds = targetChassisSpeeds; + // Update the odometry of the swerve drive using the wheel encoders and gyro. + poseEstimator.update(getGyroYaw(), getModulePositions()); + } + + /** + * Basic drive control. A target field-relative ChassisSpeeds (vx, vy, omega) is converted to + * specific swerve module states. + * + * @param vxMeters X velocity (forwards/backwards) + * @param vyMeters Y velocity (strafe left/right) + * @param omegaRadians Angular velocity (rotation CCW+) + * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback + * control. + */ + public void drive(double vxMeters, double vyMeters, double omegaRadians, boolean openLoop) { + var targetChassisSpeeds = + ChassisSpeeds.fromFieldRelativeSpeeds(vxMeters, vyMeters, omegaRadians, getHeading()); + setChassisSpeeds(targetChassisSpeeds, openLoop, false); + } + + /** + * Command the swerve drive to the desired chassis speeds by converting them to swerve module + * states and using {@link #setModuleStates(SwerveModuleState[], boolean)}. + * + * @param targetChassisSpeeds Target robot-relative chassis speeds (vx, vy, omega). + * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback + * control. + * @param steerInPlace If modules should steer to the target angle when target velocity is 0. + */ + public void setChassisSpeeds( + ChassisSpeeds targetChassisSpeeds, boolean openLoop, boolean steerInPlace) { + setModuleStates(kinematics.toSwerveModuleStates(targetChassisSpeeds), openLoop, steerInPlace); + this.targetChassisSpeeds = targetChassisSpeeds; + } + + /** + * Command the swerve modules to the desired states. Velocites exceeding the maximum speed will be + * desaturated (while preserving the ratios between modules). + * + * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback + * control. + * @param steerInPlace If modules should steer to the target angle when target velocity is 0. + */ + public void setModuleStates( + SwerveModuleState[] desiredStates, boolean openLoop, boolean steerInPlace) { + SwerveDriveKinematics.desaturateWheelSpeeds(desiredStates, kMaxLinearSpeed); + for (int i = 0; i < swerveMods.length; i++) { + swerveMods[i].setDesiredState(desiredStates[i], openLoop, steerInPlace); } - - /** - * Command the swerve modules to the desired states. Velocites exceeding the maximum speed will be - * desaturated (while preserving the ratios between modules). - * - * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback - * control. - * @param steerInPlace If modules should steer to the target angle when target velocity is 0. - */ - public void setModuleStates( - SwerveModuleState[] desiredStates, boolean openLoop, boolean steerInPlace) { - SwerveDriveKinematics.desaturateWheelSpeeds(desiredStates, kMaxLinearSpeed); - for (int i = 0; i < swerveMods.length; i++) { - swerveMods[i].setDesiredState(desiredStates[i], openLoop, steerInPlace); - } - } - - /** Stop the swerve drive. */ - public void stop() { - drive(0, 0, 0, true); - } - - /** See {@link SwerveDrivePoseEstimator#addVisionMeasurement(Pose2d, double)}. */ - public void addVisionMeasurement(Pose2d visionMeasurement, double timestampSeconds) { - poseEstimator.addVisionMeasurement(visionMeasurement, timestampSeconds); - } - - /** See {@link SwerveDrivePoseEstimator#addVisionMeasurement(Pose2d, double, Matrix)}. */ - public void addVisionMeasurement( - Pose2d visionMeasurement, double timestampSeconds, Matrix stdDevs) { - poseEstimator.addVisionMeasurement(visionMeasurement, timestampSeconds, stdDevs); - } - - /** - * Reset the estimated pose of the swerve drive on the field. - * - * @param pose New robot pose. - * @param resetSimPose If the simulated robot pose should also be reset. This effectively - * teleports the robot and should only be used during the setup of the simulation world. - */ - public void resetPose(Pose2d pose, boolean resetSimPose) { - if (resetSimPose) { - swerveDriveSim.reset(pose, false); - // we shouldnt realistically be resetting pose after startup, but we will handle it anyway for - // testing - for (int i = 0; i < swerveMods.length; i++) { - swerveMods[i].simulationUpdate(0, 0, 0, 0, 0, 0); - } - gyroSim.setAngle(-pose.getRotation().getDegrees()); - gyroSim.setRate(0); - } - - poseEstimator.resetPosition(getGyroYaw(), getModulePositions(), pose); + } + + /** Stop the swerve drive. */ + public void stop() { + drive(0, 0, 0, true); + } + + /** See {@link SwerveDrivePoseEstimator#addVisionMeasurement(Pose2d, double)}. */ + public void addVisionMeasurement(Pose2d visionMeasurement, double timestampSeconds) { + poseEstimator.addVisionMeasurement(visionMeasurement, timestampSeconds); + } + + /** See {@link SwerveDrivePoseEstimator#addVisionMeasurement(Pose2d, double, Matrix)}. */ + public void addVisionMeasurement( + Pose2d visionMeasurement, double timestampSeconds, Matrix stdDevs) { + poseEstimator.addVisionMeasurement(visionMeasurement, timestampSeconds, stdDevs); + } + + /** + * Reset the estimated pose of the swerve drive on the field. + * + * @param pose New robot pose. + * @param resetSimPose If the simulated robot pose should also be reset. This effectively + * teleports the robot and should only be used during the setup of the simulation world. + */ + public void resetPose(Pose2d pose, boolean resetSimPose) { + if (resetSimPose) { + swerveDriveSim.reset(pose, false); + // we shouldnt realistically be resetting pose after startup, but we will handle it anyway for + // testing + for (int i = 0; i < swerveMods.length; i++) { + swerveMods[i].simulationUpdate(0, 0, 0, 0, 0, 0); + } + gyroSim.setAngle(-pose.getRotation().getDegrees()); + gyroSim.setRate(0); } - /** Get the estimated pose of the swerve drive on the field. */ - public Pose2d getPose() { - return poseEstimator.getEstimatedPosition(); - } - - /** The heading of the swerve drive's estimated pose on the field. */ - public Rotation2d getHeading() { - return getPose().getRotation(); - } - - /** Raw gyro yaw (this may not match the field heading!). */ - public Rotation2d getGyroYaw() { - return gyro.getRotation2d(); - } - - /** Get the chassis speeds of the robot (vx, vy, omega) from the swerve module states. */ - public ChassisSpeeds getChassisSpeeds() { - return kinematics.toChassisSpeeds(getModuleStates()); - } - - /** - * Get the SwerveModuleState of each swerve module (speed, angle). The returned array order - * matches the kinematics module order. - */ - public SwerveModuleState[] getModuleStates() { - return new SwerveModuleState[] { - swerveMods[0].getState(), - swerveMods[1].getState(), - swerveMods[2].getState(), - swerveMods[3].getState() - }; - } - - /** - * Get the SwerveModulePosition of each swerve module (position, angle). The returned array order - * matches the kinematics module order. - */ - public SwerveModulePosition[] getModulePositions() { - return new SwerveModulePosition[] { - swerveMods[0].getPosition(), - swerveMods[1].getPosition(), - swerveMods[2].getPosition(), - swerveMods[3].getPosition() - }; + poseEstimator.resetPosition(getGyroYaw(), getModulePositions(), pose); + } + + /** Get the estimated pose of the swerve drive on the field. */ + public Pose2d getPose() { + return poseEstimator.getEstimatedPosition(); + } + + /** The heading of the swerve drive's estimated pose on the field. */ + public Rotation2d getHeading() { + return getPose().getRotation(); + } + + /** Raw gyro yaw (this may not match the field heading!). */ + public Rotation2d getGyroYaw() { + return gyro.getRotation2d(); + } + + /** Get the chassis speeds of the robot (vx, vy, omega) from the swerve module states. */ + public ChassisSpeeds getChassisSpeeds() { + return kinematics.toChassisSpeeds(getModuleStates()); + } + + /** + * Get the SwerveModuleState of each swerve module (speed, angle). The returned array order + * matches the kinematics module order. + */ + public SwerveModuleState[] getModuleStates() { + return new SwerveModuleState[] { + swerveMods[0].getState(), + swerveMods[1].getState(), + swerveMods[2].getState(), + swerveMods[3].getState() + }; + } + + /** + * Get the SwerveModulePosition of each swerve module (position, angle). The returned array order + * matches the kinematics module order. + */ + public SwerveModulePosition[] getModulePositions() { + return new SwerveModulePosition[] { + swerveMods[0].getPosition(), + swerveMods[1].getPosition(), + swerveMods[2].getPosition(), + swerveMods[3].getPosition() + }; + } + + /** + * Get the Pose2d of each swerve module based on kinematics and current robot pose. The returned + * array order matches the kinematics module order. + */ + public Pose2d[] getModulePoses() { + Pose2d[] modulePoses = new Pose2d[swerveMods.length]; + for (int i = 0; i < swerveMods.length; i++) { + var module = swerveMods[i]; + modulePoses[i] = + getPose() + .transformBy( + new Transform2d( + module.getModuleConstants().centerOffset, module.getAbsoluteHeading())); } - - /** - * Get the Pose2d of each swerve module based on kinematics and current robot pose. The returned - * array order matches the kinematics module order. - */ - public Pose2d[] getModulePoses() { - Pose2d[] modulePoses = new Pose2d[swerveMods.length]; - for (int i = 0; i < swerveMods.length; i++) { - var module = swerveMods[i]; - modulePoses[i] = - getPose() - .transformBy( - new Transform2d( - module.getModuleConstants().centerOffset, module.getAbsoluteHeading())); - } - return modulePoses; + return modulePoses; + } + + /** Log various drivetrain values to the dashboard. */ + public void log() { + String table = "Drive/"; + Pose2d pose = getPose(); + SmartDashboard.putNumber(table + "X", pose.getX()); + SmartDashboard.putNumber(table + "Y", pose.getY()); + SmartDashboard.putNumber(table + "Heading", pose.getRotation().getDegrees()); + ChassisSpeeds chassisSpeeds = getChassisSpeeds(); + SmartDashboard.putNumber(table + "VX", chassisSpeeds.vxMetersPerSecond); + SmartDashboard.putNumber(table + "VY", chassisSpeeds.vyMetersPerSecond); + SmartDashboard.putNumber( + table + "Omega Degrees", Math.toDegrees(chassisSpeeds.omegaRadiansPerSecond)); + SmartDashboard.putNumber(table + "Target VX", targetChassisSpeeds.vxMetersPerSecond); + SmartDashboard.putNumber(table + "Target VY", targetChassisSpeeds.vyMetersPerSecond); + SmartDashboard.putNumber( + table + "Target Omega Degrees", Math.toDegrees(targetChassisSpeeds.omegaRadiansPerSecond)); + + for (SwerveModule module : swerveMods) { + module.log(); } + } - /** Log various drivetrain values to the dashboard. */ - public void log() { - String table = "Drive/"; - Pose2d pose = getPose(); - SmartDashboard.putNumber(table + "X", pose.getX()); - SmartDashboard.putNumber(table + "Y", pose.getY()); - SmartDashboard.putNumber(table + "Heading", pose.getRotation().getDegrees()); - ChassisSpeeds chassisSpeeds = getChassisSpeeds(); - SmartDashboard.putNumber(table + "VX", chassisSpeeds.vxMetersPerSecond); - SmartDashboard.putNumber(table + "VY", chassisSpeeds.vyMetersPerSecond); - SmartDashboard.putNumber( - table + "Omega Degrees", Math.toDegrees(chassisSpeeds.omegaRadiansPerSecond)); - SmartDashboard.putNumber(table + "Target VX", targetChassisSpeeds.vxMetersPerSecond); - SmartDashboard.putNumber(table + "Target VY", targetChassisSpeeds.vyMetersPerSecond); - SmartDashboard.putNumber( - table + "Target Omega Degrees", Math.toDegrees(targetChassisSpeeds.omegaRadiansPerSecond)); - - for (SwerveModule module : swerveMods) { - module.log(); - } - } + // ----- Simulation - // ----- Simulation - - public void simulationPeriodic() { - // Pass commanded motor voltages into swerve drive simulation - double[] driveInputs = new double[swerveMods.length]; - double[] steerInputs = new double[swerveMods.length]; - for (int i = 0; i < swerveMods.length; i++) { - driveInputs[i] = swerveMods[i].getDriveVoltage(); - steerInputs[i] = swerveMods[i].getSteerVoltage(); - } - swerveDriveSim.setDriveInputs(driveInputs); - swerveDriveSim.setSteerInputs(steerInputs); - - // Simulate one timestep - swerveDriveSim.update(Robot.kDefaultPeriod); - - // Update module and gyro values with simulated values - var driveStates = swerveDriveSim.getDriveStates(); - var steerStates = swerveDriveSim.getSteerStates(); - totalCurrentDraw = 0; - double[] driveCurrents = swerveDriveSim.getDriveCurrentDraw(); - for (double current : driveCurrents) totalCurrentDraw += current; - double[] steerCurrents = swerveDriveSim.getSteerCurrentDraw(); - for (double current : steerCurrents) totalCurrentDraw += current; - for (int i = 0; i < swerveMods.length; i++) { - double drivePos = driveStates.get(i).get(0, 0); - double driveRate = driveStates.get(i).get(1, 0); - double steerPos = steerStates.get(i).get(0, 0); - double steerRate = steerStates.get(i).get(1, 0); - swerveMods[i].simulationUpdate( - drivePos, driveRate, driveCurrents[i], steerPos, steerRate, steerCurrents[i]); - } - - gyroSim.setRate(-swerveDriveSim.getOmegaRadsPerSec()); - gyroSim.setAngle(-swerveDriveSim.getPose().getRotation().getDegrees()); + public void simulationPeriodic() { + // Pass commanded motor voltages into swerve drive simulation + double[] driveInputs = new double[swerveMods.length]; + double[] steerInputs = new double[swerveMods.length]; + for (int i = 0; i < swerveMods.length; i++) { + driveInputs[i] = swerveMods[i].getDriveVoltage(); + steerInputs[i] = swerveMods[i].getSteerVoltage(); } - - /** - * The "actual" pose of the robot on the field used in simulation. This is different from the - * swerve drive's estimated pose. - */ - public Pose2d getSimPose() { - return swerveDriveSim.getPose(); + swerveDriveSim.setDriveInputs(driveInputs); + swerveDriveSim.setSteerInputs(steerInputs); + + // Simulate one timestep + swerveDriveSim.update(Robot.kDefaultPeriod); + + // Update module and gyro values with simulated values + var driveStates = swerveDriveSim.getDriveStates(); + var steerStates = swerveDriveSim.getSteerStates(); + totalCurrentDraw = 0; + double[] driveCurrents = swerveDriveSim.getDriveCurrentDraw(); + for (double current : driveCurrents) totalCurrentDraw += current; + double[] steerCurrents = swerveDriveSim.getSteerCurrentDraw(); + for (double current : steerCurrents) totalCurrentDraw += current; + for (int i = 0; i < swerveMods.length; i++) { + double drivePos = driveStates.get(i).get(0, 0); + double driveRate = driveStates.get(i).get(1, 0); + double steerPos = steerStates.get(i).get(0, 0); + double steerRate = steerStates.get(i).get(1, 0); + swerveMods[i].simulationUpdate( + drivePos, driveRate, driveCurrents[i], steerPos, steerRate, steerCurrents[i]); } - public double getCurrentDraw() { - return totalCurrentDraw; - } + gyroSim.setRate(-swerveDriveSim.getOmegaRadsPerSec()); + gyroSim.setAngle(-swerveDriveSim.getPose().getRotation().getDegrees()); + } + + /** + * The "actual" pose of the robot on the field used in simulation. This is different from the + * swerve drive's estimated pose. + */ + public Pose2d getSimPose() { + return swerveDriveSim.getPose(); + } + + public double getCurrentDraw() { + return totalCurrentDraw; + } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java index aff69fc703..841774f0d3 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java @@ -58,437 +58,437 @@ * the Sim GUI's field. */ public class SwerveDriveSim { - private final LinearSystem drivePlant; - private final double driveKs; - private final DCMotor driveMotor; - private final double driveGearing; - private final double driveWheelRadius; - private final LinearSystem steerPlant; - private final double steerKs; - private final DCMotor steerMotor; - private final double steerGearing; - - private final SwerveDriveKinematics kinematics; - private final int numModules; - - private final double[] driveInputs; - private final List> driveStates; - private final double[] steerInputs; - private final List> steerStates; - - private final Random rand = new Random(); - - // noiseless "actual" pose of the robot on the field - private Pose2d pose = new Pose2d(); - private double omegaRadsPerSec = 0; - - /** - * Creates a swerve drive simulation. - * - * @param driveFF The feedforward for the drive motors of this swerve drive. This should be in - * units of meters. - * @param driveMotor The DCMotor model for the drive motor(s) of this swerve drive's modules. This - * should not have any gearing applied. - * @param driveGearing The gear ratio of the drive system. Positive values indicate a reduction - * where one rotation of the drive wheel equals driveGearing rotations of the drive motor. - * @param driveWheelRadius The radius of the module's driving wheel. - * @param steerFF The feedforward for the steer motors of this swerve drive. This should be in - * units of radians. - * @param steerMotor The DCMotor model for the steer motor(s) of this swerve drive's modules. This - * should not have any gearing applied. - * @param steerGearing The gear ratio of the steer system. Positive values indicate a reduction - * where one rotation of the module heading/azimuth equals steerGearing rotations of the steer - * motor. - * @param kinematics The kinematics for this swerve drive. All swerve module information used in - * this class should match the order of the modules this kinematics object was constructed - * with. - */ - public SwerveDriveSim( - SimpleMotorFeedforward driveFF, - DCMotor driveMotor, - double driveGearing, - double driveWheelRadius, - SimpleMotorFeedforward steerFF, - DCMotor steerMotor, - double steerGearing, - SwerveDriveKinematics kinematics) { - this( - new LinearSystem( - Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -driveFF.kv / driveFF.ka), - VecBuilder.fill(0.0, 1.0 / driveFF.ka), - Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), - VecBuilder.fill(0.0, 0.0)), - driveFF.ks, - driveMotor, - driveGearing, - driveWheelRadius, - new LinearSystem( - Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -steerFF.kv / steerFF.ka), - VecBuilder.fill(0.0, 1.0 / steerFF.ka), - Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), - VecBuilder.fill(0.0, 0.0)), - steerFF.ks, - steerMotor, - steerGearing, - kinematics); + private final LinearSystem drivePlant; + private final double driveKs; + private final DCMotor driveMotor; + private final double driveGearing; + private final double driveWheelRadius; + private final LinearSystem steerPlant; + private final double steerKs; + private final DCMotor steerMotor; + private final double steerGearing; + + private final SwerveDriveKinematics kinematics; + private final int numModules; + + private final double[] driveInputs; + private final List> driveStates; + private final double[] steerInputs; + private final List> steerStates; + + private final Random rand = new Random(); + + // noiseless "actual" pose of the robot on the field + private Pose2d pose = new Pose2d(); + private double omegaRadsPerSec = 0; + + /** + * Creates a swerve drive simulation. + * + * @param driveFF The feedforward for the drive motors of this swerve drive. This should be in + * units of meters. + * @param driveMotor The DCMotor model for the drive motor(s) of this swerve drive's modules. This + * should not have any gearing applied. + * @param driveGearing The gear ratio of the drive system. Positive values indicate a reduction + * where one rotation of the drive wheel equals driveGearing rotations of the drive motor. + * @param driveWheelRadius The radius of the module's driving wheel. + * @param steerFF The feedforward for the steer motors of this swerve drive. This should be in + * units of radians. + * @param steerMotor The DCMotor model for the steer motor(s) of this swerve drive's modules. This + * should not have any gearing applied. + * @param steerGearing The gear ratio of the steer system. Positive values indicate a reduction + * where one rotation of the module heading/azimuth equals steerGearing rotations of the steer + * motor. + * @param kinematics The kinematics for this swerve drive. All swerve module information used in + * this class should match the order of the modules this kinematics object was constructed + * with. + */ + public SwerveDriveSim( + SimpleMotorFeedforward driveFF, + DCMotor driveMotor, + double driveGearing, + double driveWheelRadius, + SimpleMotorFeedforward steerFF, + DCMotor steerMotor, + double steerGearing, + SwerveDriveKinematics kinematics) { + this( + new LinearSystem( + Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -driveFF.kv / driveFF.ka), + VecBuilder.fill(0.0, 1.0 / driveFF.ka), + Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), + VecBuilder.fill(0.0, 0.0)), + driveFF.ks, + driveMotor, + driveGearing, + driveWheelRadius, + new LinearSystem( + Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -steerFF.kv / steerFF.ka), + VecBuilder.fill(0.0, 1.0 / steerFF.ka), + Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), + VecBuilder.fill(0.0, 0.0)), + steerFF.ks, + steerMotor, + steerGearing, + kinematics); + } + + /** + * Creates a swerve drive simulation. + * + * @param drivePlant The {@link LinearSystem} representing a swerve module's drive system. The + * state should be in units of meters and input in volts. + * @param driveKs The static gain in volts of the drive system's feedforward, or the minimum + * voltage to cause motion. Set this to 0 to ignore static friction. + * @param driveMotor The DCMotor model for the drive motor(s) of this swerve drive's modules. This + * should not have any gearing applied. + * @param driveGearing The gear ratio of the drive system. Positive values indicate a reduction + * where one rotation of the drive wheel equals driveGearing rotations of the drive motor. + * @param driveWheelRadius The radius of the module's driving wheel. + * @param steerPlant The {@link LinearSystem} representing a swerve module's steer system. The + * state should be in units of radians and input in volts. + * @param steerKs The static gain in volts of the steer system's feedforward, or the minimum + * voltage to cause motion. Set this to 0 to ignore static friction. + * @param steerMotor The DCMotor model for the steer motor(s) of this swerve drive's modules. This + * should not have any gearing applied. + * @param steerGearing The gear ratio of the steer system. Positive values indicate a reduction + * where one rotation of the module heading/azimuth equals steerGearing rotations of the steer + * motor. + * @param kinematics The kinematics for this swerve drive. All swerve module information used in + * this class should match the order of the modules this kinematics object was constructed + * with. + */ + public SwerveDriveSim( + LinearSystem drivePlant, + double driveKs, + DCMotor driveMotor, + double driveGearing, + double driveWheelRadius, + LinearSystem steerPlant, + double steerKs, + DCMotor steerMotor, + double steerGearing, + SwerveDriveKinematics kinematics) { + this.drivePlant = drivePlant; + this.driveKs = driveKs; + this.driveMotor = driveMotor; + this.driveGearing = driveGearing; + this.driveWheelRadius = driveWheelRadius; + this.steerPlant = steerPlant; + this.steerKs = steerKs; + this.steerMotor = steerMotor; + this.steerGearing = steerGearing; + + this.kinematics = kinematics; + numModules = kinematics.toSwerveModuleStates(new ChassisSpeeds()).length; + driveInputs = new double[numModules]; + driveStates = new ArrayList<>(numModules); + steerInputs = new double[numModules]; + steerStates = new ArrayList<>(numModules); + for (int i = 0; i < numModules; i++) { + driveStates.add(VecBuilder.fill(0, 0)); + steerStates.add(VecBuilder.fill(0, 0)); } - - /** - * Creates a swerve drive simulation. - * - * @param drivePlant The {@link LinearSystem} representing a swerve module's drive system. The - * state should be in units of meters and input in volts. - * @param driveKs The static gain in volts of the drive system's feedforward, or the minimum - * voltage to cause motion. Set this to 0 to ignore static friction. - * @param driveMotor The DCMotor model for the drive motor(s) of this swerve drive's modules. This - * should not have any gearing applied. - * @param driveGearing The gear ratio of the drive system. Positive values indicate a reduction - * where one rotation of the drive wheel equals driveGearing rotations of the drive motor. - * @param driveWheelRadius The radius of the module's driving wheel. - * @param steerPlant The {@link LinearSystem} representing a swerve module's steer system. The - * state should be in units of radians and input in volts. - * @param steerKs The static gain in volts of the steer system's feedforward, or the minimum - * voltage to cause motion. Set this to 0 to ignore static friction. - * @param steerMotor The DCMotor model for the steer motor(s) of this swerve drive's modules. This - * should not have any gearing applied. - * @param steerGearing The gear ratio of the steer system. Positive values indicate a reduction - * where one rotation of the module heading/azimuth equals steerGearing rotations of the steer - * motor. - * @param kinematics The kinematics for this swerve drive. All swerve module information used in - * this class should match the order of the modules this kinematics object was constructed - * with. - */ - public SwerveDriveSim( - LinearSystem drivePlant, - double driveKs, - DCMotor driveMotor, - double driveGearing, - double driveWheelRadius, - LinearSystem steerPlant, - double steerKs, - DCMotor steerMotor, - double steerGearing, - SwerveDriveKinematics kinematics) { - this.drivePlant = drivePlant; - this.driveKs = driveKs; - this.driveMotor = driveMotor; - this.driveGearing = driveGearing; - this.driveWheelRadius = driveWheelRadius; - this.steerPlant = steerPlant; - this.steerKs = steerKs; - this.steerMotor = steerMotor; - this.steerGearing = steerGearing; - - this.kinematics = kinematics; - numModules = kinematics.toSwerveModuleStates(new ChassisSpeeds()).length; - driveInputs = new double[numModules]; - driveStates = new ArrayList<>(numModules); - steerInputs = new double[numModules]; - steerStates = new ArrayList<>(numModules); - for (int i = 0; i < numModules; i++) { - driveStates.add(VecBuilder.fill(0, 0)); - steerStates.add(VecBuilder.fill(0, 0)); - } - } - - /** - * Sets the input voltages of the drive motors. - * - * @param inputs Input voltages. These should match the module order used in the kinematics. - */ - public void setDriveInputs(double... inputs) { - final double battVoltage = RobotController.getBatteryVoltage(); - for (int i = 0; i < driveInputs.length; i++) { - double input = inputs[i]; - driveInputs[i] = MathUtil.clamp(input, -battVoltage, battVoltage); - } - } - - /** - * Sets the input voltages of the steer motors. - * - * @param inputs Input voltages. These should match the module order used in the kinematics. - */ - public void setSteerInputs(double... inputs) { - final double battVoltage = RobotController.getBatteryVoltage(); - for (int i = 0; i < steerInputs.length; i++) { - double input = inputs[i]; - steerInputs[i] = MathUtil.clamp(input, -battVoltage, battVoltage); - } + } + + /** + * Sets the input voltages of the drive motors. + * + * @param inputs Input voltages. These should match the module order used in the kinematics. + */ + public void setDriveInputs(double... inputs) { + final double battVoltage = RobotController.getBatteryVoltage(); + for (int i = 0; i < driveInputs.length; i++) { + double input = inputs[i]; + driveInputs[i] = MathUtil.clamp(input, -battVoltage, battVoltage); } - - /** - * Computes the new x given the old x and the control input. Includes the effect of static - * friction. - * - * @param discA The discretized system matrix. - * @param discB The discretized input matrix. - * @param x The position/velocity state of the drive/steer system. - * @param input The input voltage. - * @param ks The kS value of the feedforward model. - * @return The updated x, including the effect of static friction. - */ - protected static Matrix calculateX( - Matrix discA, Matrix discB, Matrix x, double input, double ks) { - var Ax = discA.times(x); - double nextStateVel = Ax.get(1, 0); - // input required to make next state vel == 0 - double inputToStop = nextStateVel / -discB.get(1, 0); - // ks effect on system velocity - double ksSystemEffect = MathUtil.clamp(inputToStop, -ks, ks); - - // after ks system effect: - nextStateVel += discB.get(1, 0) * ksSystemEffect; - inputToStop = nextStateVel / -discB.get(1, 0); - double signToStop = Math.signum(inputToStop); - double inputSign = Math.signum(input); - double ksInputEffect = 0; - - // system velocity was reduced to 0, resist any input - if (Math.abs(ksSystemEffect) < ks) { - double absInput = Math.abs(input); - ksInputEffect = -MathUtil.clamp(ks * inputSign, -absInput, absInput); - } - // non-zero system velocity, but input causes velocity sign change. Resist input after sign - // change - else if ((input * signToStop) > (inputToStop * signToStop)) { - double absInput = Math.abs(input - inputToStop); - ksInputEffect = -MathUtil.clamp(ks * inputSign, -absInput, absInput); - } - - // calculate next x including static friction - var Bu = discB.times(VecBuilder.fill(input + ksSystemEffect + ksInputEffect)); - return Ax.plus(Bu); - } - - /** - * Update the drivetrain states with the given timestep. - * - * @param dtSeconds The timestep in seconds. This should be the robot loop period. - */ - public void update(double dtSeconds) { - var driveDiscAB = Discretization.discretizeAB(drivePlant.getA(), drivePlant.getB(), dtSeconds); - var steerDiscAB = Discretization.discretizeAB(steerPlant.getA(), steerPlant.getB(), dtSeconds); - - var moduleDeltas = new SwerveModulePosition[numModules]; - for (int i = 0; i < numModules; i++) { - double prevDriveStatePos = driveStates.get(i).get(0, 0); - driveStates.set( - i, - calculateX( - driveDiscAB.getFirst(), - driveDiscAB.getSecond(), - driveStates.get(i), - driveInputs[i], - driveKs)); - double currDriveStatePos = driveStates.get(i).get(0, 0); - steerStates.set( - i, - calculateX( - steerDiscAB.getFirst(), - steerDiscAB.getSecond(), - steerStates.get(i), - steerInputs[i], - steerKs)); - double currSteerStatePos = steerStates.get(i).get(0, 0); - moduleDeltas[i] = - new SwerveModulePosition( - currDriveStatePos - prevDriveStatePos, new Rotation2d(currSteerStatePos)); - } - - var twist = kinematics.toTwist2d(moduleDeltas); - pose = pose.exp(twist); - omegaRadsPerSec = twist.dtheta / dtSeconds; - } - - /** - * Reset the simulated swerve drive state. This effectively teleports the robot and should only be - * used during the setup of the simulation world. - * - * @param pose The new pose of the simulated swerve drive. - * @param preserveMotion If true, the current module states will be preserved. Otherwise, they - * will be reset to zeros. - */ - public void reset(Pose2d pose, boolean preserveMotion) { - this.pose = pose; - if (!preserveMotion) { - for (int i = 0; i < numModules; i++) { - driveStates.set(i, VecBuilder.fill(0, 0)); - steerStates.set(i, VecBuilder.fill(0, 0)); - } - omegaRadsPerSec = 0; - } + } + + /** + * Sets the input voltages of the steer motors. + * + * @param inputs Input voltages. These should match the module order used in the kinematics. + */ + public void setSteerInputs(double... inputs) { + final double battVoltage = RobotController.getBatteryVoltage(); + for (int i = 0; i < steerInputs.length; i++) { + double input = inputs[i]; + steerInputs[i] = MathUtil.clamp(input, -battVoltage, battVoltage); } - - /** - * Reset the simulated swerve drive state. This effectively teleports the robot and should only be - * used during the setup of the simulation world. - * - * @param pose The new pose of the simulated swerve drive. - * @param moduleDriveStates The new states of the modules' drive systems in [meters, meters/sec]. - * These should match the module order used in the kinematics. - * @param moduleSteerStates The new states of the modules' steer systems in [radians, - * radians/sec]. These should match the module order used in the kinematics. - */ - public void reset( - Pose2d pose, List> moduleDriveStates, List> moduleSteerStates) { - if (moduleDriveStates.size() != driveStates.size() - || moduleSteerStates.size() != steerStates.size()) - throw new IllegalArgumentException("Provided module states do not match number of modules!"); - this.pose = pose; - for (int i = 0; i < numModules; i++) { - driveStates.set(i, moduleDriveStates.get(i).copy()); - steerStates.set(i, moduleSteerStates.get(i).copy()); - } - omegaRadsPerSec = kinematics.toChassisSpeeds(getModuleStates()).omegaRadiansPerSecond; + } + + /** + * Computes the new x given the old x and the control input. Includes the effect of static + * friction. + * + * @param discA The discretized system matrix. + * @param discB The discretized input matrix. + * @param x The position/velocity state of the drive/steer system. + * @param input The input voltage. + * @param ks The kS value of the feedforward model. + * @return The updated x, including the effect of static friction. + */ + protected static Matrix calculateX( + Matrix discA, Matrix discB, Matrix x, double input, double ks) { + var Ax = discA.times(x); + double nextStateVel = Ax.get(1, 0); + // input required to make next state vel == 0 + double inputToStop = nextStateVel / -discB.get(1, 0); + // ks effect on system velocity + double ksSystemEffect = MathUtil.clamp(inputToStop, -ks, ks); + + // after ks system effect: + nextStateVel += discB.get(1, 0) * ksSystemEffect; + inputToStop = nextStateVel / -discB.get(1, 0); + double signToStop = Math.signum(inputToStop); + double inputSign = Math.signum(input); + double ksInputEffect = 0; + + // system velocity was reduced to 0, resist any input + if (Math.abs(ksSystemEffect) < ks) { + double absInput = Math.abs(input); + ksInputEffect = -MathUtil.clamp(ks * inputSign, -absInput, absInput); } - - /** - * Get the pose of the simulated swerve drive. Note that this is the "actual" pose of the robot in - * the simulation world, without any noise. If you are simulating a pose estimator, this pose - * should only be used for visualization or camera simulation. This should be called after {@link - * #update(double)}. - */ - public Pose2d getPose() { - return pose; + // non-zero system velocity, but input causes velocity sign change. Resist input after sign + // change + else if ((input * signToStop) > (inputToStop * signToStop)) { + double absInput = Math.abs(input - inputToStop); + ksInputEffect = -MathUtil.clamp(ks * inputSign, -absInput, absInput); } - /** - * Get the {@link SwerveModulePosition} of each module. The returned array order matches the - * kinematics module order. This should be called after {@link #update(double)}. - */ - public SwerveModulePosition[] getModulePositions() { - var positions = new SwerveModulePosition[numModules]; - for (int i = 0; i < numModules; i++) { - positions[i] = - new SwerveModulePosition( - driveStates.get(i).get(0, 0), new Rotation2d(steerStates.get(i).get(0, 0))); - } - return positions; + // calculate next x including static friction + var Bu = discB.times(VecBuilder.fill(input + ksSystemEffect + ksInputEffect)); + return Ax.plus(Bu); + } + + /** + * Update the drivetrain states with the given timestep. + * + * @param dtSeconds The timestep in seconds. This should be the robot loop period. + */ + public void update(double dtSeconds) { + var driveDiscAB = Discretization.discretizeAB(drivePlant.getA(), drivePlant.getB(), dtSeconds); + var steerDiscAB = Discretization.discretizeAB(steerPlant.getA(), steerPlant.getB(), dtSeconds); + + var moduleDeltas = new SwerveModulePosition[numModules]; + for (int i = 0; i < numModules; i++) { + double prevDriveStatePos = driveStates.get(i).get(0, 0); + driveStates.set( + i, + calculateX( + driveDiscAB.getFirst(), + driveDiscAB.getSecond(), + driveStates.get(i), + driveInputs[i], + driveKs)); + double currDriveStatePos = driveStates.get(i).get(0, 0); + steerStates.set( + i, + calculateX( + steerDiscAB.getFirst(), + steerDiscAB.getSecond(), + steerStates.get(i), + steerInputs[i], + steerKs)); + double currSteerStatePos = steerStates.get(i).get(0, 0); + moduleDeltas[i] = + new SwerveModulePosition( + currDriveStatePos - prevDriveStatePos, new Rotation2d(currSteerStatePos)); } - /** - * Get the {@link SwerveModulePosition} of each module with rudimentary noise simulation. The - * returned array order matches the kinematics module order. This should be called after {@link - * #update(double)}. - * - * @param driveStdDev The standard deviation in meters of the drive wheel position. - * @param steerStdDev The standard deviation in radians of the steer angle. - */ - public SwerveModulePosition[] getNoisyModulePositions(double driveStdDev, double steerStdDev) { - var positions = new SwerveModulePosition[numModules]; - for (int i = 0; i < numModules; i++) { - positions[i] = - new SwerveModulePosition( - driveStates.get(i).get(0, 0) + rand.nextGaussian() * driveStdDev, - new Rotation2d(steerStates.get(i).get(0, 0) + rand.nextGaussian() * steerStdDev)); - } - return positions; + var twist = kinematics.toTwist2d(moduleDeltas); + pose = pose.exp(twist); + omegaRadsPerSec = twist.dtheta / dtSeconds; + } + + /** + * Reset the simulated swerve drive state. This effectively teleports the robot and should only be + * used during the setup of the simulation world. + * + * @param pose The new pose of the simulated swerve drive. + * @param preserveMotion If true, the current module states will be preserved. Otherwise, they + * will be reset to zeros. + */ + public void reset(Pose2d pose, boolean preserveMotion) { + this.pose = pose; + if (!preserveMotion) { + for (int i = 0; i < numModules; i++) { + driveStates.set(i, VecBuilder.fill(0, 0)); + steerStates.set(i, VecBuilder.fill(0, 0)); + } + omegaRadsPerSec = 0; } - - /** - * Get the {@link SwerveModuleState} of each module. The returned array order matches the - * kinematics module order. This should be called after {@link #update(double)}. - */ - public SwerveModuleState[] getModuleStates() { - var positions = new SwerveModuleState[numModules]; - for (int i = 0; i < numModules; i++) { - positions[i] = - new SwerveModuleState( - driveStates.get(i).get(1, 0), new Rotation2d(steerStates.get(i).get(0, 0))); - } - return positions; + } + + /** + * Reset the simulated swerve drive state. This effectively teleports the robot and should only be + * used during the setup of the simulation world. + * + * @param pose The new pose of the simulated swerve drive. + * @param moduleDriveStates The new states of the modules' drive systems in [meters, meters/sec]. + * These should match the module order used in the kinematics. + * @param moduleSteerStates The new states of the modules' steer systems in [radians, + * radians/sec]. These should match the module order used in the kinematics. + */ + public void reset( + Pose2d pose, List> moduleDriveStates, List> moduleSteerStates) { + if (moduleDriveStates.size() != driveStates.size() + || moduleSteerStates.size() != steerStates.size()) + throw new IllegalArgumentException("Provided module states do not match number of modules!"); + this.pose = pose; + for (int i = 0; i < numModules; i++) { + driveStates.set(i, moduleDriveStates.get(i).copy()); + steerStates.set(i, moduleSteerStates.get(i).copy()); } - - /** - * Get the state of each module's drive system in [meters, meters/sec]. The returned list order - * matches the kinematics module order. This should be called after {@link #update(double)}. - */ - public List> getDriveStates() { - List> states = new ArrayList<>(); - for (int i = 0; i < driveStates.size(); i++) { - states.add(driveStates.get(i).copy()); - } - return states; + omegaRadsPerSec = kinematics.toChassisSpeeds(getModuleStates()).omegaRadiansPerSecond; + } + + /** + * Get the pose of the simulated swerve drive. Note that this is the "actual" pose of the robot in + * the simulation world, without any noise. If you are simulating a pose estimator, this pose + * should only be used for visualization or camera simulation. This should be called after {@link + * #update(double)}. + */ + public Pose2d getPose() { + return pose; + } + + /** + * Get the {@link SwerveModulePosition} of each module. The returned array order matches the + * kinematics module order. This should be called after {@link #update(double)}. + */ + public SwerveModulePosition[] getModulePositions() { + var positions = new SwerveModulePosition[numModules]; + for (int i = 0; i < numModules; i++) { + positions[i] = + new SwerveModulePosition( + driveStates.get(i).get(0, 0), new Rotation2d(steerStates.get(i).get(0, 0))); } - - /** - * Get the state of each module's steer system in [radians, radians/sec]. The returned list order - * matches the kinematics module order. This should be called after {@link #update(double)}. - */ - public List> getSteerStates() { - List> states = new ArrayList<>(); - for (int i = 0; i < steerStates.size(); i++) { - states.add(steerStates.get(i).copy()); - } - return states; + return positions; + } + + /** + * Get the {@link SwerveModulePosition} of each module with rudimentary noise simulation. The + * returned array order matches the kinematics module order. This should be called after {@link + * #update(double)}. + * + * @param driveStdDev The standard deviation in meters of the drive wheel position. + * @param steerStdDev The standard deviation in radians of the steer angle. + */ + public SwerveModulePosition[] getNoisyModulePositions(double driveStdDev, double steerStdDev) { + var positions = new SwerveModulePosition[numModules]; + for (int i = 0; i < numModules; i++) { + positions[i] = + new SwerveModulePosition( + driveStates.get(i).get(0, 0) + rand.nextGaussian() * driveStdDev, + new Rotation2d(steerStates.get(i).get(0, 0) + rand.nextGaussian() * steerStdDev)); } - - /** - * Get the angular velocity of the robot, which can be useful for gyro simulation. CCW positive. - * This should be called after {@link #update(double)}. - */ - public double getOmegaRadsPerSec() { - return omegaRadsPerSec; + return positions; + } + + /** + * Get the {@link SwerveModuleState} of each module. The returned array order matches the + * kinematics module order. This should be called after {@link #update(double)}. + */ + public SwerveModuleState[] getModuleStates() { + var positions = new SwerveModuleState[numModules]; + for (int i = 0; i < numModules; i++) { + positions[i] = + new SwerveModuleState( + driveStates.get(i).get(1, 0), new Rotation2d(steerStates.get(i).get(0, 0))); } - - /** - * Calculates the current drawn from the battery by the motor(s). Ignores regenerative current - * from back-emf. - * - * @param motor The motor(s) used. - * @param radiansPerSec The velocity of the motor in radians per second. - * @param inputVolts The voltage commanded by the motor controller (battery voltage * duty cycle). - * @param battVolts The voltage of the battery. - */ - protected static double getCurrentDraw( - DCMotor motor, double radiansPerSec, double inputVolts, double battVolts) { - double effVolts = inputVolts - radiansPerSec / motor.KvRadPerSecPerVolt; - // ignore regeneration - if (inputVolts >= 0) effVolts = MathUtil.clamp(effVolts, 0, inputVolts); - else effVolts = MathUtil.clamp(effVolts, inputVolts, 0); - // calculate battery current - return (inputVolts / battVolts) * (effVolts / motor.rOhms); + return positions; + } + + /** + * Get the state of each module's drive system in [meters, meters/sec]. The returned list order + * matches the kinematics module order. This should be called after {@link #update(double)}. + */ + public List> getDriveStates() { + List> states = new ArrayList<>(); + for (int i = 0; i < driveStates.size(); i++) { + states.add(driveStates.get(i).copy()); } - - /** - * Get the current draw in amps for each module's drive motor(s). This should be called after - * {@link #update(double)}. The returned array order matches the kinematics module order. - */ - public double[] getDriveCurrentDraw() { - double[] currents = new double[numModules]; - for (int i = 0; i < numModules; i++) { - double radiansPerSec = driveStates.get(i).get(1, 0) * driveGearing / driveWheelRadius; - currents[i] = - getCurrentDraw( - driveMotor, radiansPerSec, driveInputs[i], RobotController.getBatteryVoltage()); - } - return currents; + return states; + } + + /** + * Get the state of each module's steer system in [radians, radians/sec]. The returned list order + * matches the kinematics module order. This should be called after {@link #update(double)}. + */ + public List> getSteerStates() { + List> states = new ArrayList<>(); + for (int i = 0; i < steerStates.size(); i++) { + states.add(steerStates.get(i).copy()); } - - /** - * Get the current draw in amps for each module's steer motor(s). This should be called after - * {@link #update(double)}. The returned array order matches the kinematics module order. - */ - public double[] getSteerCurrentDraw() { - double[] currents = new double[numModules]; - for (int i = 0; i < numModules; i++) { - double radiansPerSec = steerStates.get(i).get(1, 0) * steerGearing; - currents[i] = - getCurrentDraw( - steerMotor, radiansPerSec, steerInputs[i], RobotController.getBatteryVoltage()); - } - return currents; + return states; + } + + /** + * Get the angular velocity of the robot, which can be useful for gyro simulation. CCW positive. + * This should be called after {@link #update(double)}. + */ + public double getOmegaRadsPerSec() { + return omegaRadsPerSec; + } + + /** + * Calculates the current drawn from the battery by the motor(s). Ignores regenerative current + * from back-emf. + * + * @param motor The motor(s) used. + * @param radiansPerSec The velocity of the motor in radians per second. + * @param inputVolts The voltage commanded by the motor controller (battery voltage * duty cycle). + * @param battVolts The voltage of the battery. + */ + protected static double getCurrentDraw( + DCMotor motor, double radiansPerSec, double inputVolts, double battVolts) { + double effVolts = inputVolts - radiansPerSec / motor.KvRadPerSecPerVolt; + // ignore regeneration + if (inputVolts >= 0) effVolts = MathUtil.clamp(effVolts, 0, inputVolts); + else effVolts = MathUtil.clamp(effVolts, inputVolts, 0); + // calculate battery current + return (inputVolts / battVolts) * (effVolts / motor.rOhms); + } + + /** + * Get the current draw in amps for each module's drive motor(s). This should be called after + * {@link #update(double)}. The returned array order matches the kinematics module order. + */ + public double[] getDriveCurrentDraw() { + double[] currents = new double[numModules]; + for (int i = 0; i < numModules; i++) { + double radiansPerSec = driveStates.get(i).get(1, 0) * driveGearing / driveWheelRadius; + currents[i] = + getCurrentDraw( + driveMotor, radiansPerSec, driveInputs[i], RobotController.getBatteryVoltage()); } - - /** - * Get the total current draw in amps of all swerve motors. This should be called after {@link - * #update(double)}. - */ - public double getTotalCurrentDraw() { - double sum = 0; - for (double val : getDriveCurrentDraw()) sum += val; - for (double val : getSteerCurrentDraw()) sum += val; - return sum; + return currents; + } + + /** + * Get the current draw in amps for each module's steer motor(s). This should be called after + * {@link #update(double)}. The returned array order matches the kinematics module order. + */ + public double[] getSteerCurrentDraw() { + double[] currents = new double[numModules]; + for (int i = 0; i < numModules; i++) { + double radiansPerSec = steerStates.get(i).get(1, 0) * steerGearing; + currents[i] = + getCurrentDraw( + steerMotor, radiansPerSec, steerInputs[i], RobotController.getBatteryVoltage()); } + return currents; + } + + /** + * Get the total current draw in amps of all swerve motors. This should be called after {@link + * #update(double)}. + */ + public double getTotalCurrentDraw() { + double sum = 0; + for (double val : getDriveCurrentDraw()) sum += val; + for (double val : getSteerCurrentDraw()) sum += val; + return sum; + } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java index 4615d29217..bafbd24068 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java @@ -39,154 +39,154 @@ import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; public class SwerveModule { - // --- Module Constants - private final ModuleConstants moduleConstants; - - // --- Hardware - private final PWMSparkMax driveMotor; - private final Encoder driveEncoder; - private final PWMSparkMax steerMotor; - private final Encoder steerEncoder; - - // --- Control - private SwerveModuleState desiredState = new SwerveModuleState(); - private boolean openLoop = false; - - // Simple PID feedback controllers run on the roborio - private PIDController drivePidController = new PIDController(kDriveKP, kDriveKI, kDriveKD); - // (A profiled steering PID controller may give better results by utilizing feedforward.) - private PIDController steerPidController = new PIDController(kSteerKP, kSteerKI, kSteerKD); + // --- Module Constants + private final ModuleConstants moduleConstants; + + // --- Hardware + private final PWMSparkMax driveMotor; + private final Encoder driveEncoder; + private final PWMSparkMax steerMotor; + private final Encoder steerEncoder; + + // --- Control + private SwerveModuleState desiredState = new SwerveModuleState(); + private boolean openLoop = false; + + // Simple PID feedback controllers run on the roborio + private PIDController drivePidController = new PIDController(kDriveKP, kDriveKI, kDriveKD); + // (A profiled steering PID controller may give better results by utilizing feedforward.) + private PIDController steerPidController = new PIDController(kSteerKP, kSteerKI, kSteerKD); + + // --- Simulation + private final EncoderSim driveEncoderSim; + private double driveCurrentSim = 0; + private final EncoderSim steerEncoderSim; + private double steerCurrentSim = 0; + + public SwerveModule(ModuleConstants moduleConstants) { + this.moduleConstants = moduleConstants; + + driveMotor = new PWMSparkMax(moduleConstants.driveMotorID); + driveEncoder = new Encoder(moduleConstants.driveEncoderA, moduleConstants.driveEncoderB); + driveEncoder.setDistancePerPulse(kDriveDistPerPulse); + steerMotor = new PWMSparkMax(moduleConstants.steerMotorID); + steerEncoder = new Encoder(moduleConstants.steerEncoderA, moduleConstants.steerEncoderB); + steerEncoder.setDistancePerPulse(kSteerRadPerPulse); + + steerPidController.enableContinuousInput(-Math.PI, Math.PI); // --- Simulation - private final EncoderSim driveEncoderSim; - private double driveCurrentSim = 0; - private final EncoderSim steerEncoderSim; - private double steerCurrentSim = 0; - - public SwerveModule(ModuleConstants moduleConstants) { - this.moduleConstants = moduleConstants; - - driveMotor = new PWMSparkMax(moduleConstants.driveMotorID); - driveEncoder = new Encoder(moduleConstants.driveEncoderA, moduleConstants.driveEncoderB); - driveEncoder.setDistancePerPulse(kDriveDistPerPulse); - steerMotor = new PWMSparkMax(moduleConstants.steerMotorID); - steerEncoder = new Encoder(moduleConstants.steerEncoderA, moduleConstants.steerEncoderB); - steerEncoder.setDistancePerPulse(kSteerRadPerPulse); - - steerPidController.enableContinuousInput(-Math.PI, Math.PI); - - // --- Simulation - driveEncoderSim = new EncoderSim(driveEncoder); - steerEncoderSim = new EncoderSim(steerEncoder); - } - - public void periodic() { - // Perform PID feedback control to steer the module to the target angle - double steerPid = - steerPidController.calculate( - getAbsoluteHeading().getRadians(), desiredState.angle.getRadians()); - steerMotor.setVoltage(steerPid); - - // Use feedforward model to translate target drive velocities into voltages - double driveFF = kDriveFF.calculate(desiredState.speedMetersPerSecond); - double drivePid = 0; - if (!openLoop) { - // Perform PID feedback control to compensate for disturbances - drivePid = - drivePidController.calculate(driveEncoder.getRate(), desiredState.speedMetersPerSecond); - } - - driveMotor.setVoltage(driveFF + drivePid); - } - - /** - * Command this swerve module to the desired angle and velocity. - * - * @param steerInPlace If modules should steer to target angle when target velocity is 0 - */ - public void setDesiredState( - SwerveModuleState desiredState, boolean openLoop, boolean steerInPlace) { - Rotation2d currentRotation = getAbsoluteHeading(); - // Avoid turning more than 90 degrees by inverting speed on large angle changes - desiredState = SwerveModuleState.optimize(desiredState, currentRotation); - - this.desiredState = desiredState; - } - - /** Module heading reported by steering encoder. */ - public Rotation2d getAbsoluteHeading() { - return Rotation2d.fromRadians(steerEncoder.getDistance()); - } - - /** - * {@link SwerveModuleState} describing absolute module rotation and velocity in meters per - * second. - */ - public SwerveModuleState getState() { - return new SwerveModuleState(driveEncoder.getRate(), getAbsoluteHeading()); - } - - /** {@link SwerveModulePosition} describing absolute module rotation and position in meters. */ - public SwerveModulePosition getPosition() { - return new SwerveModulePosition(driveEncoder.getDistance(), getAbsoluteHeading()); + driveEncoderSim = new EncoderSim(driveEncoder); + steerEncoderSim = new EncoderSim(steerEncoder); + } + + public void periodic() { + // Perform PID feedback control to steer the module to the target angle + double steerPid = + steerPidController.calculate( + getAbsoluteHeading().getRadians(), desiredState.angle.getRadians()); + steerMotor.setVoltage(steerPid); + + // Use feedforward model to translate target drive velocities into voltages + double driveFF = kDriveFF.calculate(desiredState.speedMetersPerSecond); + double drivePid = 0; + if (!openLoop) { + // Perform PID feedback control to compensate for disturbances + drivePid = + drivePidController.calculate(driveEncoder.getRate(), desiredState.speedMetersPerSecond); } - /** Voltage of the drive motor */ - public double getDriveVoltage() { - return driveMotor.get() * RobotController.getBatteryVoltage(); - } - - /** Voltage of the steer motor */ - public double getSteerVoltage() { - return steerMotor.get() * RobotController.getBatteryVoltage(); - } - - /** - * Constants about this module, like motor IDs, encoder angle offset, and translation from center. - */ - public ModuleConstants getModuleConstants() { - return moduleConstants; - } - - public void log() { - var state = getState(); - - String table = "Module " + moduleConstants.moduleNum + "/"; - SmartDashboard.putNumber( - table + "Steer Degrees", Math.toDegrees(MathUtil.angleModulus(state.angle.getRadians()))); - SmartDashboard.putNumber( - table + "Steer Target Degrees", Math.toDegrees(steerPidController.getSetpoint())); - SmartDashboard.putNumber( - table + "Drive Velocity Feet", Units.metersToFeet(state.speedMetersPerSecond)); - SmartDashboard.putNumber( - table + "Drive Velocity Target Feet", - Units.metersToFeet(desiredState.speedMetersPerSecond)); - SmartDashboard.putNumber(table + "Drive Current", driveCurrentSim); - SmartDashboard.putNumber(table + "Steer Current", steerCurrentSim); - } - - // ----- Simulation - - public void simulationUpdate( - double driveEncoderDist, - double driveEncoderRate, - double driveCurrent, - double steerEncoderDist, - double steerEncoderRate, - double steerCurrent) { - driveEncoderSim.setDistance(driveEncoderDist); - driveEncoderSim.setRate(driveEncoderRate); - this.driveCurrentSim = driveCurrent; - steerEncoderSim.setDistance(steerEncoderDist); - steerEncoderSim.setRate(steerEncoderRate); - this.steerCurrentSim = steerCurrent; - } - - public double getDriveCurrentSim() { - return driveCurrentSim; - } - - public double getSteerCurrentSim() { - return steerCurrentSim; - } + driveMotor.setVoltage(driveFF + drivePid); + } + + /** + * Command this swerve module to the desired angle and velocity. + * + * @param steerInPlace If modules should steer to target angle when target velocity is 0 + */ + public void setDesiredState( + SwerveModuleState desiredState, boolean openLoop, boolean steerInPlace) { + Rotation2d currentRotation = getAbsoluteHeading(); + // Avoid turning more than 90 degrees by inverting speed on large angle changes + desiredState = SwerveModuleState.optimize(desiredState, currentRotation); + + this.desiredState = desiredState; + } + + /** Module heading reported by steering encoder. */ + public Rotation2d getAbsoluteHeading() { + return Rotation2d.fromRadians(steerEncoder.getDistance()); + } + + /** + * {@link SwerveModuleState} describing absolute module rotation and velocity in meters per + * second. + */ + public SwerveModuleState getState() { + return new SwerveModuleState(driveEncoder.getRate(), getAbsoluteHeading()); + } + + /** {@link SwerveModulePosition} describing absolute module rotation and position in meters. */ + public SwerveModulePosition getPosition() { + return new SwerveModulePosition(driveEncoder.getDistance(), getAbsoluteHeading()); + } + + /** Voltage of the drive motor */ + public double getDriveVoltage() { + return driveMotor.get() * RobotController.getBatteryVoltage(); + } + + /** Voltage of the steer motor */ + public double getSteerVoltage() { + return steerMotor.get() * RobotController.getBatteryVoltage(); + } + + /** + * Constants about this module, like motor IDs, encoder angle offset, and translation from center. + */ + public ModuleConstants getModuleConstants() { + return moduleConstants; + } + + public void log() { + var state = getState(); + + String table = "Module " + moduleConstants.moduleNum + "/"; + SmartDashboard.putNumber( + table + "Steer Degrees", Math.toDegrees(MathUtil.angleModulus(state.angle.getRadians()))); + SmartDashboard.putNumber( + table + "Steer Target Degrees", Math.toDegrees(steerPidController.getSetpoint())); + SmartDashboard.putNumber( + table + "Drive Velocity Feet", Units.metersToFeet(state.speedMetersPerSecond)); + SmartDashboard.putNumber( + table + "Drive Velocity Target Feet", + Units.metersToFeet(desiredState.speedMetersPerSecond)); + SmartDashboard.putNumber(table + "Drive Current", driveCurrentSim); + SmartDashboard.putNumber(table + "Steer Current", steerCurrentSim); + } + + // ----- Simulation + + public void simulationUpdate( + double driveEncoderDist, + double driveEncoderRate, + double driveCurrent, + double steerEncoderDist, + double steerEncoderRate, + double steerCurrent) { + driveEncoderSim.setDistance(driveEncoderDist); + driveEncoderSim.setRate(driveEncoderRate); + this.driveCurrentSim = driveCurrent; + steerEncoderSim.setDistance(steerEncoderDist); + steerEncoderSim.setRate(steerEncoderRate); + this.steerCurrentSim = steerCurrent; + } + + public double getDriveCurrentSim() { + return driveCurrentSim; + } + + public double getSteerCurrentSim() { + return steerCurrentSim; + } } diff --git a/shared/common.gradle b/shared/common.gradle index cb4ce3f80b..408889d2ba 100644 --- a/shared/common.gradle +++ b/shared/common.gradle @@ -89,7 +89,7 @@ jacocoTestReport { afterEvaluate { classDirectories.setFrom(files(classDirectories.files.collect { fileTree(dir: it, - exclude: "edu/wpi/**" + exclude: "edu/wpi/**" ) })) } diff --git a/shared/config.gradle b/shared/config.gradle index 3d5f91b150..f7c7c33c6b 100644 --- a/shared/config.gradle +++ b/shared/config.gradle @@ -78,7 +78,7 @@ ext.createComponentZipTasks = { components, names, base, type, project, func -> if (it in NativeLibrarySpec && stringNames.contains(it.name)) { it.binaries.each { if (!it.buildable) return - def target = nativeUtils.getPublishClassifier(it) + def target = nativeUtils.getPublishClassifier(it) if (configMap.containsKey(target)) { configMap.get(target).add(it) } else { @@ -128,7 +128,7 @@ ext.createAllCombined = { list, name, base, type, project -> list.each { if (it.name.endsWith('debug')) return - from project.zipTree(it.archiveFile) + from project.zipTree(it.archiveFile) dependsOn it } } @@ -140,7 +140,6 @@ ext.createAllCombined = { list, name, base, type, project -> } return task - } // Create the standard ZIP format for the dependencies. diff --git a/versioningHelper.gradle b/versioningHelper.gradle index 4b085f3121..e9dd6e3d0a 100644 --- a/versioningHelper.gradle +++ b/versioningHelper.gradle @@ -3,7 +3,8 @@ import java.time.LocalDateTime import java.time.format.DateTimeFormatter gradle.allprojects { - ext.getCurrentVersion = { -> + ext.getCurrentVersion = { + -> def stdout = new ByteArrayOutputStream() String tagIsh try { From 4d516e3ad2e71ad5d80c42e8353127e44e3742a4 Mon Sep 17 00:00:00 2001 From: Sriman Achanta <68172138+srimanachanta@users.noreply.github.com> Date: Tue, 24 Oct 2023 22:57:30 -0400 Subject: [PATCH 3/3] Use 4 spaces --- build.gradle | 2 + .../photonvision/common/ProgramStatus.java | 8 +- .../configuration/CameraConfiguration.java | 282 ++-- .../common/configuration/ConfigManager.java | 482 +++--- .../common/configuration/ConfigProvider.java | 26 +- .../common/configuration/HardwareConfig.java | 330 ++-- .../configuration/HardwareSettings.java | 10 +- .../configuration/LegacyConfigProvider.java | 814 +++++----- .../common/configuration/NetworkConfig.java | 202 +-- .../configuration/PhotonConfiguration.java | 284 ++-- .../configuration/SqlConfigProvider.java | 868 +++++------ .../dataflow/DataChangeDestination.java | 16 +- .../common/dataflow/DataChangeService.java | 144 +- .../common/dataflow/DataChangeSource.java | 14 +- .../common/dataflow/DataChangeSubscriber.java | 46 +- .../dataflow/events/DataChangeEvent.java | 56 +- .../dataflow/events/HTTPRequestEvent.java | 14 +- .../events/IncomingWebSocketEvent.java | 72 +- .../dataflow/events/OutgoingUIEvent.java | 50 +- .../networktables/NTDataChangeListener.java | 32 +- .../networktables/NTDataPublisher.java | 284 ++-- .../networktables/NetworkTablesManager.java | 310 ++-- .../dataflow/websocket/UIDataPublisher.java | 70 +- .../common/hardware/GPIO/CustomGPIO.java | 126 +- .../common/hardware/GPIO/GPIOBase.java | 108 +- .../hardware/GPIO/pi/PigpioCommand.java | 32 +- .../hardware/GPIO/pi/PigpioException.java | 596 ++++---- .../common/hardware/GPIO/pi/PigpioPin.java | 110 +- .../common/hardware/GPIO/pi/PigpioPulse.java | 34 +- .../common/hardware/GPIO/pi/PigpioSocket.java | 644 ++++---- .../hardware/GPIO/pi/PigpioSocketLock.java | 220 +-- .../common/hardware/HardwareManager.java | 258 ++-- .../common/hardware/PiVersion.java | 80 +- .../common/hardware/Platform.java | 344 ++--- .../common/hardware/StatusLED.java | 38 +- .../common/hardware/VisionLED.java | 322 ++-- .../hardware/metrics/MetricsManager.java | 214 +-- .../common/hardware/metrics/cmds/CmdBase.java | 32 +- .../hardware/metrics/cmds/FileCmds.java | 24 +- .../hardware/metrics/cmds/LinuxCmds.java | 24 +- .../common/hardware/metrics/cmds/PiCmds.java | 32 +- .../photonvision/common/logging/LogGroup.java | 12 +- .../photonvision/common/logging/LogLevel.java | 22 +- .../photonvision/common/logging/Logger.java | 622 ++++---- .../common/networking/NetworkInterface.java | 92 +- .../common/networking/NetworkManager.java | 220 +-- .../common/networking/NetworkMode.java | 12 +- .../common/networking/NetworkUtils.java | 178 +-- .../common/networking/RoborioFinder.java | 152 +- .../common/scripting/ScriptCommandType.java | 16 +- .../common/scripting/ScriptConfig.java | 26 +- .../common/scripting/ScriptEvent.java | 48 +- .../common/scripting/ScriptEventType.java | 30 +- .../common/scripting/ScriptManager.java | 170 +-- .../photonvision/common/util/ColorHelper.java | 6 +- .../common/util/MemoryManager.java | 100 +- .../common/util/NativeLibHelper.java | 24 +- .../common/util/ReflectionUtils.java | 62 +- .../common/util/SerializationUtils.java | 60 +- .../photonvision/common/util/ShellExec.java | 328 ++-- .../photonvision/common/util/TestUtils.java | 616 ++++---- .../common/util/TimedTaskManager.java | 86 +- .../common/util/file/FileUtils.java | 178 +-- .../common/util/file/JacksonUtils.java | 184 +-- .../util/file/ProgramDirectoryUtilities.java | 68 +- .../common/util/java/TriConsumer.java | 2 +- .../common/util/math/IPUtils.java | 60 +- .../common/util/math/MathUtils.java | 336 ++--- .../common/util/numbers/DoubleCouple.java | 48 +- .../common/util/numbers/IntegerCouple.java | 12 +- .../common/util/numbers/NumberCouple.java | 74 +- .../common/util/numbers/NumberListUtils.java | 168 +-- .../org/photonvision/raspi/LibCameraJNI.java | 268 ++-- .../vision/apriltag/AprilTagFamily.java | 24 +- .../vision/aruco/ArucoDetectionResult.java | 94 +- .../vision/aruco/ArucoDetectorParams.java | 74 +- .../vision/aruco/PhotonArucoDetector.java | 172 +-- .../CameraCalibrationCoefficients.java | 224 +-- .../vision/calibration/JsonMat.java | 184 +-- .../vision/camera/CameraQuirk.java | 24 +- .../vision/camera/CameraType.java | 6 +- .../vision/camera/FileVisionSource.java | 146 +- .../vision/camera/LibcameraGpuSettables.java | 372 ++--- .../vision/camera/LibcameraGpuSource.java | 92 +- .../vision/camera/QuirkyCamera.java | 196 +-- .../vision/camera/USBCameraSource.java | 568 +++---- .../org/photonvision/vision/frame/Frame.java | 84 +- .../vision/frame/FrameDivisor.java | 16 +- .../vision/frame/FrameProvider.java | 18 +- .../vision/frame/FrameStaticProperties.java | 142 +- .../vision/frame/FrameThresholdType.java | 6 +- .../frame/consumer/DummyFrameConsumer.java | 8 +- .../frame/consumer/FileSaveFrameConsumer.java | 116 +- .../frame/consumer/MJPGFrameConsumer.java | 390 ++--- .../frame/provider/CpuImageProcessor.java | 150 +- .../frame/provider/FileFrameProvider.java | 162 +- .../provider/LibcameraGpuFrameProvider.java | 162 +- .../frame/provider/USBFrameProvider.java | 58 +- .../org/photonvision/vision/opencv/CVMat.java | 116 +- .../photonvision/vision/opencv/CVShape.java | 58 +- .../photonvision/vision/opencv/Contour.java | 364 ++--- .../vision/opencv/ContourGroupingMode.java | 14 +- .../opencv/ContourIntersectionDirection.java | 10 +- .../vision/opencv/ContourShape.java | 32 +- .../vision/opencv/ContourSortMode.java | 40 +- .../photonvision/vision/opencv/DualMat.java | 4 +- .../vision/opencv/DualOffsetValues.java | 40 +- .../vision/opencv/ImageFlipMode.java | 16 +- .../vision/opencv/ImageRotationMode.java | 22 +- .../vision/opencv/Releasable.java | 2 +- .../org/photonvision/vision/pipe/CVPipe.java | 52 +- .../pipe/impl/AprilTagDetectionPipe.java | 52 +- .../impl/AprilTagDetectionPipeParams.java | 50 +- .../pipe/impl/AprilTagPoseEstimatorPipe.java | 176 +-- .../vision/pipe/impl/ArucoDetectionPipe.java | 36 +- .../pipe/impl/ArucoDetectionPipeParams.java | 56 +- .../vision/pipe/impl/BlurPipe.java | 62 +- .../vision/pipe/impl/CalculateFPSPipe.java | 30 +- .../vision/pipe/impl/Calibrate3dPipe.java | 240 +-- .../pipe/impl/Collect2dTargetsPipe.java | 90 +- .../vision/pipe/impl/CornerDetectionPipe.java | 332 ++-- .../vision/pipe/impl/Draw2dAprilTagsPipe.java | 18 +- .../vision/pipe/impl/Draw2dArucoPipe.java | 18 +- .../vision/pipe/impl/Draw2dCrosshairPipe.java | 166 +- .../vision/pipe/impl/Draw2dTargetsPipe.java | 408 ++--- .../vision/pipe/impl/Draw3dAprilTagsPipe.java | 20 +- .../vision/pipe/impl/Draw3dArucoPipe.java | 18 +- .../vision/pipe/impl/Draw3dTargetsPipe.java | 524 +++---- .../pipe/impl/DrawCornerDetectionPipe.java | 32 +- .../vision/pipe/impl/ErodeDilatePipe.java | 64 +- .../vision/pipe/impl/FilterContoursPipe.java | 212 +-- .../vision/pipe/impl/FilterShapesPipe.java | 102 +- .../pipe/impl/FindBoardCornersPipe.java | 576 +++---- .../vision/pipe/impl/FindCirclesPipe.java | 210 +-- .../vision/pipe/impl/FindContoursPipe.java | 26 +- .../vision/pipe/impl/FindPolygonPipe.java | 98 +- .../pipe/impl/GPUAcceleratedHSVPipe.java | 972 ++++++------ .../vision/pipe/impl/GrayscalePipe.java | 22 +- .../vision/pipe/impl/GroupContoursPipe.java | 126 +- .../vision/pipe/impl/HSVPipe.java | 116 +- .../vision/pipe/impl/MultiTargetPNPPipe.java | 90 +- .../vision/pipe/impl/OutputMatPipe.java | 14 +- .../vision/pipe/impl/ResizeImagePipe.java | 40 +- .../vision/pipe/impl/RotateImagePipe.java | 60 +- .../vision/pipe/impl/SolvePNPPipe.java | 130 +- .../vision/pipe/impl/SortContoursPipe.java | 88 +- .../vision/pipe/impl/SpeckleRejectPipe.java | 64 +- .../pipeline/AdvancedPipelineSettings.java | 258 ++-- .../vision/pipeline/AprilTagPipeline.java | 358 ++--- .../pipeline/AprilTagPipelineSettings.java | 118 +- .../vision/pipeline/ArucoPipeline.java | 114 +- .../pipeline/ArucoPipelineSettings.java | 30 +- .../vision/pipeline/CVPipeline.java | 76 +- .../vision/pipeline/CVPipelineSettings.java | 214 +-- .../vision/pipeline/Calibrate3dPipeline.java | 346 ++--- .../Calibration3dPipelineSettings.java | 24 +- .../vision/pipeline/ColoredShapePipeline.java | 380 ++--- .../ColoredShapePipelineSettings.java | 126 +- .../vision/pipeline/DriverModePipeline.java | 136 +- .../pipeline/DriverModePipelineSettings.java | 18 +- .../vision/pipeline/OutputStreamPipeline.java | 378 ++--- .../vision/pipeline/PipelineProfiler.java | 124 +- .../vision/pipeline/PipelineType.java | 24 +- .../vision/pipeline/ReflectivePipeline.java | 318 ++-- .../pipeline/ReflectivePipelineSettings.java | 16 +- .../vision/pipeline/UICalibrationData.java | 124 +- .../vision/pipeline/result/BytePackable.java | 238 +-- .../pipeline/result/CVPipelineResult.java | 112 +- .../result/DriverModePipelineResult.java | 6 +- .../photonvision/vision/processes/Data.java | 10 +- .../vision/processes/PipelineManager.java | 768 +++++----- .../vision/processes/VisionModule.java | 1008 ++++++------- .../VisionModuleChangeSubscriber.java | 386 ++--- .../vision/processes/VisionModuleManager.java | 132 +- .../vision/processes/VisionRunner.java | 126 +- .../vision/processes/VisionSource.java | 20 +- .../vision/processes/VisionSourceManager.java | 588 ++++---- .../processes/VisionSourceSettables.java | 186 +-- .../vision/target/PotentialTarget.java | 62 +- .../vision/target/RobotOffsetPointMode.java | 6 +- .../target/RobotOffsetPointOperation.java | 42 +- .../vision/target/TargetCalculations.java | 264 ++-- .../vision/target/TargetModel.java | 326 ++-- .../vision/target/TargetOffsetPointEdge.java | 10 +- .../vision/target/TargetOrientation.java | 4 +- .../vision/target/TrackedTarget.java | 900 +++++------ .../vision/videoStream/SocketVideoStream.java | 140 +- .../videoStream/SocketVideoStreamManager.java | 116 +- .../photonvision/common/BenchmarkTest.java | 308 ++-- .../common/ShapeBenchmarkTest.java | 334 ++--- .../common/configuration/ConfigTest.java | 206 +-- .../configuration/NetworkConfigTest.java | 44 +- .../common/configuration/SQLConfigTest.java | 60 +- .../common/util/CoordinateConversionTest.java | 92 +- .../common/util/LogFileManagementTest.java | 80 +- .../common/util/TimedTaskManagerTest.java | 14 +- .../hardware/HardwareConfigTest.java | 30 +- .../hardware/HardwareManagerTest.java | 34 +- .../photonvision/hardware/HardwareTest.java | 92 +- .../photonvision/vision/QuirkyCameraTest.java | 56 +- .../frame/provider/FileFrameProviderTest.java | 102 +- .../vision/frame/provider/LibcameraTest.java | 28 +- .../vision/opencv/ContourTest.java | 74 +- .../vision/pipeline/AprilTagTest.java | 220 +-- .../vision/pipeline/ArucoPipelineTest.java | 92 +- .../vision/pipeline/Calibrate3dPipeTest.java | 576 +++---- .../vision/pipeline/CirclePNPTest.java | 258 ++-- .../pipeline/ColoredShapePipelineTest.java | 174 +-- .../vision/pipeline/PipelineProfilerTest.java | 26 +- .../pipeline/ReflectivePipelineTest.java | 198 +-- .../vision/pipeline/SolvePNPTest.java | 412 ++--- .../vision/processes/PipelineManagerTest.java | 54 +- .../processes/VisionModuleManagerTest.java | 276 ++-- .../processes/VisionSourceManagerTest.java | 54 +- .../vision/target/TargetCalculationsTest.java | 480 +++--- .../vision/target/TrackedTargetTest.java | 64 +- .../org/photonvision/EstimatedRobotPose.java | 48 +- .../java/org/photonvision/PhotonCamera.java | 712 ++++----- .../org/photonvision/PhotonPoseEstimator.java | 1240 +++++++-------- .../photonvision/PhotonTargetSortMode.java | 32 +- .../java/org/photonvision/PhotonUtils.java | 340 ++--- .../simulation/PhotonCameraSim.java | 1020 ++++++------- .../simulation/SimCameraProperties.java | 1332 ++++++++--------- .../simulation/SimPhotonCamera.java | 244 +-- .../simulation/SimVisionSystem.java | 446 +++--- .../simulation/SimVisionTarget.java | 40 +- .../photonvision/simulation/VideoSimUtil.java | 1052 ++++++------- .../simulation/VisionSystemSim.java | 696 ++++----- .../simulation/VisionTargetSim.java | 100 +- .../java/org/photonvision/OpenCVTest.java | 386 ++--- .../java/org/photonvision/PacketTest.java | 276 ++-- .../org/photonvision/PhotonCameraTest.java | 26 +- .../photonvision/PhotonPoseEstimatorTest.java | 1146 +++++++------- .../java/org/photonvision/PhotonUtilTest.java | 118 +- .../org/photonvision/PhotonVersionTest.java | 48 +- .../org/photonvision/VisionSystemSimTest.java | 948 ++++++------ .../estimation/ApriltagWorkbenchTest.java | 98 +- .../src/main/java/org/photonvision/Main.java | 616 ++++---- .../server/CameraSocketHandler.java | 198 +-- .../server/CameraSocketMessageType.java | 28 +- .../server/DataSocketHandler.java | 594 ++++---- .../server/DataSocketMessageType.java | 64 +- .../photonvision/server/RequestHandler.java | 1062 ++++++------- .../java/org/photonvision/server/Server.java | 162 +- .../server/UIInboundSubscriber.java | 40 +- .../server/UIOutboundSubscriber.java | 40 +- .../org/photonvision/server/UISettings.java | 4 +- .../common/dataflow/structures/Packet.java | 388 ++--- .../common/hardware/VisionLEDMode.java | 42 +- .../common/networktables/NTTopicSet.java | 176 +-- .../estimation/CameraTargetRelation.java | 58 +- .../photonvision/estimation/OpenCVHelp.java | 1010 ++++++------- .../estimation/RotTrlTransform3d.java | 388 ++--- .../photonvision/estimation/TargetModel.java | 282 ++-- .../estimation/VisionEstimation.java | 180 +-- .../targeting/MultiTargetPNPResults.java | 116 +- .../photonvision/targeting/PNPResults.java | 270 ++-- .../targeting/PhotonPipelineResult.java | 424 +++--- .../targeting/PhotonTrackedTarget.java | 516 +++---- .../photonvision/targeting/TargetCorner.java | 42 +- .../org/photonvision/utils/PacketUtils.java | 42 +- .../src/main/java/frc/robot/Main.java | 18 +- .../src/main/java/frc/robot/Robot.java | 112 +- .../src/main/java/frc/robot/Main.java | 18 +- .../src/main/java/frc/robot/Robot.java | 90 +- .../src/main/java/frc/robot/Main.java | 18 +- .../src/main/java/frc/robot/Robot.java | 104 +- .../src/main/java/frc/robot/Constants.java | 92 +- .../src/main/java/frc/robot/Main.java | 18 +- .../src/main/java/frc/robot/Robot.java | 144 +- .../java/frc/robot/sim/DrivetrainSim.java | 100 +- .../main/java/frc/robot/sim/VisionSim.java | 136 +- .../src/main/java/frc/robot/Constants.java | 180 +-- .../src/main/java/frc/robot/Main.java | 8 +- .../src/main/java/frc/robot/Robot.java | 222 +-- .../src/main/java/frc/robot/Vision.java | 214 +-- .../subsystems/drivetrain/SwerveDrive.java | 544 +++---- .../subsystems/drivetrain/SwerveDriveSim.java | 832 +++++----- .../subsystems/drivetrain/SwerveModule.java | 292 ++-- 279 files changed, 27417 insertions(+), 27415 deletions(-) diff --git a/build.gradle b/build.gradle index 58f9d98a94..d6ddaeaad4 100644 --- a/build.gradle +++ b/build.gradle @@ -55,6 +55,8 @@ spotless { } toggleOffOn() googleJavaFormat() + indentWithTabs(2) + indentWithSpaces(4) removeUnusedImports() trimTrailingWhitespace() endWithNewline() diff --git a/photon-core/src/main/java/org/photonvision/common/ProgramStatus.java b/photon-core/src/main/java/org/photonvision/common/ProgramStatus.java index c86671081f..a1ba6d091d 100644 --- a/photon-core/src/main/java/org/photonvision/common/ProgramStatus.java +++ b/photon-core/src/main/java/org/photonvision/common/ProgramStatus.java @@ -18,8 +18,8 @@ package org.photonvision.common; public enum ProgramStatus { - UHOH, - RUNNING, - RUNNING_NT, - RUNNING_NT_TARGET + UHOH, + RUNNING, + RUNNING_NT, + RUNNING_NT_TARGET } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/CameraConfiguration.java b/photon-core/src/main/java/org/photonvision/common/configuration/CameraConfiguration.java index a72df617bc..b2f8cf0f70 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/CameraConfiguration.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/CameraConfiguration.java @@ -32,151 +32,151 @@ import org.photonvision.vision.processes.PipelineManager; public class CameraConfiguration { - private static final Logger logger = new Logger(CameraConfiguration.class, LogGroup.Camera); - - /** Name as reported by CSCore */ - public String baseName = ""; - - /** Name used to title the subfolder of this config */ - public String uniqueName = ""; - - /** User-set nickname */ - public String nickname = ""; - - /** Can be either path (ex /dev/videoX) or index (ex 1). */ - public String path = ""; - - @JsonIgnore public String[] otherPaths = {}; - - public CameraType cameraType = CameraType.UsbCamera; - public double FOV = 70; - public final List calibrations; - public int currentPipelineIndex = 0; - - public int streamIndex = 0; // 0 index means ports [1181, 1182], 1 means [1183, 1184], etc... - - @JsonIgnore // this ignores the pipes as we serialize them to their own subfolder - public List pipelineSettings = new ArrayList<>(); - - @JsonIgnore - public DriverModePipelineSettings driveModeSettings = new DriverModePipelineSettings(); - - public CameraConfiguration(String baseName, String path) { - this(baseName, baseName, baseName, path, new String[0]); - } - - public CameraConfiguration( - String baseName, String uniqueName, String nickname, String path, String[] alternates) { - this.baseName = baseName; - this.uniqueName = uniqueName; - this.nickname = nickname; - this.path = path; - this.calibrations = new ArrayList<>(); - this.otherPaths = alternates; - - logger.debug( - "Creating USB camera configuration for " - + cameraType - + " " - + baseName - + " (AKA " - + nickname - + ") at " - + path); - } - - @JsonCreator - public CameraConfiguration( - @JsonProperty("baseName") String baseName, - @JsonProperty("uniqueName") String uniqueName, - @JsonProperty("nickname") String nickname, - @JsonProperty("FOV") double FOV, - @JsonProperty("path") String path, - @JsonProperty("cameraType") CameraType cameraType, - @JsonProperty("calibration") List calibrations, - @JsonProperty("currentPipelineIndex") int currentPipelineIndex) { - this.baseName = baseName; - this.uniqueName = uniqueName; - this.nickname = nickname; - this.FOV = FOV; - this.path = path; - this.cameraType = cameraType; - this.calibrations = calibrations != null ? calibrations : new ArrayList<>(); - this.currentPipelineIndex = currentPipelineIndex; - - logger.debug( - "Creating camera configuration for " - + cameraType - + " " - + baseName - + " (AKA " - + nickname - + ") at " - + path); - } - - public void addPipelineSettings(List settings) { - for (var setting : settings) { - addPipelineSetting(setting); + private static final Logger logger = new Logger(CameraConfiguration.class, LogGroup.Camera); + + /** Name as reported by CSCore */ + public String baseName = ""; + + /** Name used to title the subfolder of this config */ + public String uniqueName = ""; + + /** User-set nickname */ + public String nickname = ""; + + /** Can be either path (ex /dev/videoX) or index (ex 1). */ + public String path = ""; + + @JsonIgnore public String[] otherPaths = {}; + + public CameraType cameraType = CameraType.UsbCamera; + public double FOV = 70; + public final List calibrations; + public int currentPipelineIndex = 0; + + public int streamIndex = 0; // 0 index means ports [1181, 1182], 1 means [1183, 1184], etc... + + @JsonIgnore // this ignores the pipes as we serialize them to their own subfolder + public List pipelineSettings = new ArrayList<>(); + + @JsonIgnore + public DriverModePipelineSettings driveModeSettings = new DriverModePipelineSettings(); + + public CameraConfiguration(String baseName, String path) { + this(baseName, baseName, baseName, path, new String[0]); } - } - public void addPipelineSetting(CVPipelineSettings setting) { - if (pipelineSettings.stream() - .anyMatch(s -> s.pipelineNickname.equalsIgnoreCase(setting.pipelineNickname))) { - logger.error("Could not name two pipelines the same thing! Renaming"); - setting.pipelineNickname += "_1"; // TODO verify this logic + public CameraConfiguration( + String baseName, String uniqueName, String nickname, String path, String[] alternates) { + this.baseName = baseName; + this.uniqueName = uniqueName; + this.nickname = nickname; + this.path = path; + this.calibrations = new ArrayList<>(); + this.otherPaths = alternates; + + logger.debug( + "Creating USB camera configuration for " + + cameraType + + " " + + baseName + + " (AKA " + + nickname + + ") at " + + path); } - if (pipelineSettings.stream().anyMatch(s -> s.pipelineIndex == setting.pipelineIndex)) { - var newIndex = pipelineSettings.size(); - logger.error("Could not insert two pipelines at same index! Changing to " + newIndex); - setting.pipelineIndex = newIndex; // TODO verify this logic + @JsonCreator + public CameraConfiguration( + @JsonProperty("baseName") String baseName, + @JsonProperty("uniqueName") String uniqueName, + @JsonProperty("nickname") String nickname, + @JsonProperty("FOV") double FOV, + @JsonProperty("path") String path, + @JsonProperty("cameraType") CameraType cameraType, + @JsonProperty("calibration") List calibrations, + @JsonProperty("currentPipelineIndex") int currentPipelineIndex) { + this.baseName = baseName; + this.uniqueName = uniqueName; + this.nickname = nickname; + this.FOV = FOV; + this.path = path; + this.cameraType = cameraType; + this.calibrations = calibrations != null ? calibrations : new ArrayList<>(); + this.currentPipelineIndex = currentPipelineIndex; + + logger.debug( + "Creating camera configuration for " + + cameraType + + " " + + baseName + + " (AKA " + + nickname + + ") at " + + path); } - pipelineSettings.add(setting); - pipelineSettings.sort(PipelineManager.PipelineSettingsIndexComparator); - } - - public void setPipelineSettings(List settings) { - pipelineSettings = settings; - } - - public void addCalibration(CameraCalibrationCoefficients calibration) { - logger.info("adding calibration " + calibration.resolution); - calibrations.stream() - .filter(it -> it.resolution.equals(calibration.resolution)) - .findAny() - .ifPresent(calibrations::remove); - calibrations.add(calibration); - } - - @Override - public String toString() { - return "CameraConfiguration [baseName=" - + baseName - + ", uniqueName=" - + uniqueName - + ", nickname=" - + nickname - + ", path=" - + path - + ", otherPaths=" - + Arrays.toString(otherPaths) - + ", cameraType=" - + cameraType - + ", FOV=" - + FOV - + ", calibrations=" - + calibrations - + ", currentPipelineIndex=" - + currentPipelineIndex - + ", streamIndex=" - + streamIndex - + ", pipelineSettings=" - + pipelineSettings - + ", driveModeSettings=" - + driveModeSettings - + "]"; - } + public void addPipelineSettings(List settings) { + for (var setting : settings) { + addPipelineSetting(setting); + } + } + + public void addPipelineSetting(CVPipelineSettings setting) { + if (pipelineSettings.stream() + .anyMatch(s -> s.pipelineNickname.equalsIgnoreCase(setting.pipelineNickname))) { + logger.error("Could not name two pipelines the same thing! Renaming"); + setting.pipelineNickname += "_1"; // TODO verify this logic + } + + if (pipelineSettings.stream().anyMatch(s -> s.pipelineIndex == setting.pipelineIndex)) { + var newIndex = pipelineSettings.size(); + logger.error("Could not insert two pipelines at same index! Changing to " + newIndex); + setting.pipelineIndex = newIndex; // TODO verify this logic + } + + pipelineSettings.add(setting); + pipelineSettings.sort(PipelineManager.PipelineSettingsIndexComparator); + } + + public void setPipelineSettings(List settings) { + pipelineSettings = settings; + } + + public void addCalibration(CameraCalibrationCoefficients calibration) { + logger.info("adding calibration " + calibration.resolution); + calibrations.stream() + .filter(it -> it.resolution.equals(calibration.resolution)) + .findAny() + .ifPresent(calibrations::remove); + calibrations.add(calibration); + } + + @Override + public String toString() { + return "CameraConfiguration [baseName=" + + baseName + + ", uniqueName=" + + uniqueName + + ", nickname=" + + nickname + + ", path=" + + path + + ", otherPaths=" + + Arrays.toString(otherPaths) + + ", cameraType=" + + cameraType + + ", FOV=" + + FOV + + ", calibrations=" + + calibrations + + ", currentPipelineIndex=" + + currentPipelineIndex + + ", streamIndex=" + + streamIndex + + ", pipelineSettings=" + + pipelineSettings + + ", driveModeSettings=" + + driveModeSettings + + "]"; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/ConfigManager.java b/photon-core/src/main/java/org/photonvision/common/configuration/ConfigManager.java index e9738111fc..1d87dd17ab 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/ConfigManager.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/ConfigManager.java @@ -37,261 +37,261 @@ import org.zeroturnaround.zip.ZipUtil; public class ConfigManager { - private static final Logger logger = new Logger(ConfigManager.class, LogGroup.General); - private static ConfigManager INSTANCE; - - public static final String HW_CFG_FNAME = "hardwareConfig.json"; - public static final String HW_SET_FNAME = "hardwareSettings.json"; - public static final String NET_SET_FNAME = "networkSettings.json"; - - final File configDirectoryFile; - - private final ConfigProvider m_provider; - - private final Thread settingsSaveThread; - private long saveRequestTimestamp = -1; - - enum ConfigSaveStrategy { - SQL, - LEGACY, - ATOMIC_ZIP - } - - // This logic decides which kind of ConfigManager we load as the default. If we want - // to switch back to the legacy config manager, change this constant - private static final ConfigSaveStrategy m_saveStrat = ConfigSaveStrategy.SQL; - - public static ConfigManager getInstance() { - if (INSTANCE == null) { - switch (m_saveStrat) { - case SQL: - INSTANCE = new ConfigManager(getRootFolder(), new SqlConfigProvider(getRootFolder())); - break; - case LEGACY: - INSTANCE = new ConfigManager(getRootFolder(), new LegacyConfigProvider(getRootFolder())); - break; - case ATOMIC_ZIP: - // not yet done, fall through - default: - break; - } + private static final Logger logger = new Logger(ConfigManager.class, LogGroup.General); + private static ConfigManager INSTANCE; + + public static final String HW_CFG_FNAME = "hardwareConfig.json"; + public static final String HW_SET_FNAME = "hardwareSettings.json"; + public static final String NET_SET_FNAME = "networkSettings.json"; + + final File configDirectoryFile; + + private final ConfigProvider m_provider; + + private final Thread settingsSaveThread; + private long saveRequestTimestamp = -1; + + enum ConfigSaveStrategy { + SQL, + LEGACY, + ATOMIC_ZIP + } + + // This logic decides which kind of ConfigManager we load as the default. If we want + // to switch back to the legacy config manager, change this constant + private static final ConfigSaveStrategy m_saveStrat = ConfigSaveStrategy.SQL; + + public static ConfigManager getInstance() { + if (INSTANCE == null) { + switch (m_saveStrat) { + case SQL: + INSTANCE = new ConfigManager(getRootFolder(), new SqlConfigProvider(getRootFolder())); + break; + case LEGACY: + INSTANCE = new ConfigManager(getRootFolder(), new LegacyConfigProvider(getRootFolder())); + break; + case ATOMIC_ZIP: + // not yet done, fall through + default: + break; + } + } + return INSTANCE; + } + + private void translateLegacyIfPresent(Path folderPath) { + if (!(m_provider instanceof SqlConfigProvider)) { + // Cannot import into SQL if we aren't in SQL mode rn + return; + } + + var maybeCams = Path.of(folderPath.toAbsolutePath().toString(), "cameras").toFile(); + var maybeCamsBak = Path.of(folderPath.toAbsolutePath().toString(), "cameras_backup").toFile(); + + if (maybeCams.exists() && maybeCams.isDirectory()) { + logger.info("Translating settings zip!"); + var legacy = new LegacyConfigProvider(folderPath); + legacy.load(); + var loadedConfig = legacy.getConfig(); + + // yeet our current cameras directory, not needed anymore + if (maybeCamsBak.exists()) FileUtils.deleteDirectory(maybeCamsBak.toPath()); + if (!maybeCams.canWrite()) { + maybeCams.setWritable(true); + } + + try { + Files.move(maybeCams.toPath(), maybeCamsBak.toPath(), StandardCopyOption.REPLACE_EXISTING); + } catch (IOException e) { + logger.error("Exception moving cameras to cameras_bak!", e); + + // Try to just copy from cams to cams-bak instead of moving? Windows sometimes needs us to + // do that + try { + org.apache.commons.io.FileUtils.copyDirectory(maybeCams, maybeCamsBak); + } catch (IOException e1) { + // So we can't move to cams_bak, and we can't copy and delete either? We just have to give + // up here on preserving the old folder + logger.error("Exception while backup-copying cameras to cameras_bak!", e); + e1.printStackTrace(); + } + + // Delete the directory because we were successfully able to load the config but were unable + // to save or copy the folder. + if (maybeCams.exists()) FileUtils.deleteDirectory(maybeCams.toPath()); + } + + // Save the same config out using SQL loader + var sql = new SqlConfigProvider(getRootFolder()); + sql.setConfig(loadedConfig); + sql.saveToDisk(); + } } - return INSTANCE; - } - private void translateLegacyIfPresent(Path folderPath) { - if (!(m_provider instanceof SqlConfigProvider)) { - // Cannot import into SQL if we aren't in SQL mode rn - return; + public static boolean saveUploadedSettingsZip(File uploadPath) { + // Unpack to /tmp/something/photonvision + var folderPath = Path.of(System.getProperty("java.io.tmpdir"), "photonvision").toFile(); + folderPath.mkdirs(); + ZipUtil.unpack(uploadPath, folderPath); + + // Nuke the current settings directory + FileUtils.deleteDirectory(getRootFolder()); + + // If there's a cameras folder in the upload, we know we need to import from the + // old style + var maybeCams = Path.of(folderPath.getAbsolutePath(), "cameras").toFile(); + if (maybeCams.exists() && maybeCams.isDirectory()) { + var legacy = new LegacyConfigProvider(folderPath.toPath()); + legacy.load(); + var loadedConfig = legacy.getConfig(); + + var sql = new SqlConfigProvider(getRootFolder()); + sql.setConfig(loadedConfig); + return sql.saveToDisk(); + } else { + // new structure -- just copy and save like we used to + try { + org.apache.commons.io.FileUtils.copyDirectory(folderPath, getRootFolder().toFile()); + logger.info("Copied settings successfully!"); + return true; + } catch (IOException e) { + logger.error("Exception copying uploaded settings!", e); + return false; + } + } } - var maybeCams = Path.of(folderPath.toAbsolutePath().toString(), "cameras").toFile(); - var maybeCamsBak = Path.of(folderPath.toAbsolutePath().toString(), "cameras_backup").toFile(); + public PhotonConfiguration getConfig() { + return m_provider.getConfig(); + } - if (maybeCams.exists() && maybeCams.isDirectory()) { - logger.info("Translating settings zip!"); - var legacy = new LegacyConfigProvider(folderPath); - legacy.load(); - var loadedConfig = legacy.getConfig(); + private static Path getRootFolder() { + return Path.of("photonvision_config"); + } - // yeet our current cameras directory, not needed anymore - if (maybeCamsBak.exists()) FileUtils.deleteDirectory(maybeCamsBak.toPath()); - if (!maybeCams.canWrite()) { - maybeCams.setWritable(true); - } + ConfigManager(Path configDirectory, ConfigProvider provider) { + this.configDirectoryFile = new File(configDirectory.toUri()); + m_provider = provider; - try { - Files.move(maybeCams.toPath(), maybeCamsBak.toPath(), StandardCopyOption.REPLACE_EXISTING); - } catch (IOException e) { - logger.error("Exception moving cameras to cameras_bak!", e); + settingsSaveThread = new Thread(this::saveAndWriteTask); + settingsSaveThread.start(); + } - // Try to just copy from cams to cams-bak instead of moving? Windows sometimes needs us to - // do that + public void load() { + translateLegacyIfPresent(this.configDirectoryFile.toPath()); + m_provider.load(); + } + + public void addCameraConfigurations(List sources) { + getConfig().addCameraConfigs(sources); + requestSave(); + } + + public void saveModule(CameraConfiguration config, String uniqueName) { + getConfig().addCameraConfig(uniqueName, config); + requestSave(); + } + + public File getSettingsFolderAsZip() { + File out = Path.of(System.getProperty("java.io.tmpdir"), "photonvision-settings.zip").toFile(); try { - org.apache.commons.io.FileUtils.copyDirectory(maybeCams, maybeCamsBak); - } catch (IOException e1) { - // So we can't move to cams_bak, and we can't copy and delete either? We just have to give - // up here on preserving the old folder - logger.error("Exception while backup-copying cameras to cameras_bak!", e); - e1.printStackTrace(); + ZipUtil.pack(configDirectoryFile, out); + } catch (Exception e) { + e.printStackTrace(); } + return out; + } + + public void setNetworkSettings(NetworkConfig networkConfig) { + getConfig().setNetworkConfig(networkConfig); + requestSave(); + } - // Delete the directory because we were successfully able to load the config but were unable - // to save or copy the folder. - if (maybeCams.exists()) FileUtils.deleteDirectory(maybeCams.toPath()); - } + public Path getLogsDir() { + return Path.of(configDirectoryFile.toString(), "logs"); + } + + public Path getCalibDir() { + return Path.of(configDirectoryFile.toString(), "calibImgs"); + } + + public static final String LOG_PREFIX = "photonvision-"; + public static final String LOG_EXT = ".log"; + public static final String LOG_DATE_TIME_FORMAT = "yyyy-M-d_hh-mm-ss"; + + public String taToLogFname(TemporalAccessor date) { + var dateString = DateTimeFormatter.ofPattern(LOG_DATE_TIME_FORMAT).format(date); + return LOG_PREFIX + dateString + LOG_EXT; + } + + public Date logFnameToDate(String fname) throws ParseException { + // Strip away known unneeded portions of the log file name + fname = fname.replace(LOG_PREFIX, "").replace(LOG_EXT, ""); + DateFormat format = new SimpleDateFormat(LOG_DATE_TIME_FORMAT); + return format.parse(fname); + } + + public Path getLogPath() { + var logFile = Path.of(this.getLogsDir().toString(), taToLogFname(LocalDateTime.now())).toFile(); + if (!logFile.getParentFile().exists()) logFile.getParentFile().mkdirs(); + return logFile.toPath(); + } + + public Path getImageSavePath() { + var imgFilePath = Path.of(configDirectoryFile.toString(), "imgSaves").toFile(); + if (!imgFilePath.exists()) imgFilePath.mkdirs(); + return imgFilePath.toPath(); + } + + public boolean saveUploadedHardwareConfig(Path uploadPath) { + return m_provider.saveUploadedHardwareConfig(uploadPath); + } + + public boolean saveUploadedHardwareSettings(Path uploadPath) { + return m_provider.saveUploadedHardwareSettings(uploadPath); + } - // Save the same config out using SQL loader - var sql = new SqlConfigProvider(getRootFolder()); - sql.setConfig(loadedConfig); - sql.saveToDisk(); + public boolean saveUploadedNetworkConfig(Path uploadPath) { + return m_provider.saveUploadedNetworkConfig(uploadPath); } - } - - public static boolean saveUploadedSettingsZip(File uploadPath) { - // Unpack to /tmp/something/photonvision - var folderPath = Path.of(System.getProperty("java.io.tmpdir"), "photonvision").toFile(); - folderPath.mkdirs(); - ZipUtil.unpack(uploadPath, folderPath); - - // Nuke the current settings directory - FileUtils.deleteDirectory(getRootFolder()); - - // If there's a cameras folder in the upload, we know we need to import from the - // old style - var maybeCams = Path.of(folderPath.getAbsolutePath(), "cameras").toFile(); - if (maybeCams.exists() && maybeCams.isDirectory()) { - var legacy = new LegacyConfigProvider(folderPath.toPath()); - legacy.load(); - var loadedConfig = legacy.getConfig(); - - var sql = new SqlConfigProvider(getRootFolder()); - sql.setConfig(loadedConfig); - return sql.saveToDisk(); - } else { - // new structure -- just copy and save like we used to - try { - org.apache.commons.io.FileUtils.copyDirectory(folderPath, getRootFolder().toFile()); - logger.info("Copied settings successfully!"); - return true; - } catch (IOException e) { - logger.error("Exception copying uploaded settings!", e); - return false; - } + + public boolean saveUploadedAprilTagFieldLayout(Path uploadPath) { + return m_provider.saveUploadedAprilTagFieldLayout(uploadPath); } - } - - public PhotonConfiguration getConfig() { - return m_provider.getConfig(); - } - - private static Path getRootFolder() { - return Path.of("photonvision_config"); - } - - ConfigManager(Path configDirectory, ConfigProvider provider) { - this.configDirectoryFile = new File(configDirectory.toUri()); - m_provider = provider; - - settingsSaveThread = new Thread(this::saveAndWriteTask); - settingsSaveThread.start(); - } - - public void load() { - translateLegacyIfPresent(this.configDirectoryFile.toPath()); - m_provider.load(); - } - - public void addCameraConfigurations(List sources) { - getConfig().addCameraConfigs(sources); - requestSave(); - } - - public void saveModule(CameraConfiguration config, String uniqueName) { - getConfig().addCameraConfig(uniqueName, config); - requestSave(); - } - - public File getSettingsFolderAsZip() { - File out = Path.of(System.getProperty("java.io.tmpdir"), "photonvision-settings.zip").toFile(); - try { - ZipUtil.pack(configDirectoryFile, out); - } catch (Exception e) { - e.printStackTrace(); + + public void requestSave() { + logger.trace("Requesting save..."); + saveRequestTimestamp = System.currentTimeMillis(); + } + + public void unloadCameraConfigs() { + this.getConfig().getCameraConfigurations().clear(); } - return out; - } - - public void setNetworkSettings(NetworkConfig networkConfig) { - getConfig().setNetworkConfig(networkConfig); - requestSave(); - } - - public Path getLogsDir() { - return Path.of(configDirectoryFile.toString(), "logs"); - } - - public Path getCalibDir() { - return Path.of(configDirectoryFile.toString(), "calibImgs"); - } - - public static final String LOG_PREFIX = "photonvision-"; - public static final String LOG_EXT = ".log"; - public static final String LOG_DATE_TIME_FORMAT = "yyyy-M-d_hh-mm-ss"; - - public String taToLogFname(TemporalAccessor date) { - var dateString = DateTimeFormatter.ofPattern(LOG_DATE_TIME_FORMAT).format(date); - return LOG_PREFIX + dateString + LOG_EXT; - } - - public Date logFnameToDate(String fname) throws ParseException { - // Strip away known unneeded portions of the log file name - fname = fname.replace(LOG_PREFIX, "").replace(LOG_EXT, ""); - DateFormat format = new SimpleDateFormat(LOG_DATE_TIME_FORMAT); - return format.parse(fname); - } - - public Path getLogPath() { - var logFile = Path.of(this.getLogsDir().toString(), taToLogFname(LocalDateTime.now())).toFile(); - if (!logFile.getParentFile().exists()) logFile.getParentFile().mkdirs(); - return logFile.toPath(); - } - - public Path getImageSavePath() { - var imgFilePath = Path.of(configDirectoryFile.toString(), "imgSaves").toFile(); - if (!imgFilePath.exists()) imgFilePath.mkdirs(); - return imgFilePath.toPath(); - } - - public boolean saveUploadedHardwareConfig(Path uploadPath) { - return m_provider.saveUploadedHardwareConfig(uploadPath); - } - - public boolean saveUploadedHardwareSettings(Path uploadPath) { - return m_provider.saveUploadedHardwareSettings(uploadPath); - } - - public boolean saveUploadedNetworkConfig(Path uploadPath) { - return m_provider.saveUploadedNetworkConfig(uploadPath); - } - - public boolean saveUploadedAprilTagFieldLayout(Path uploadPath) { - return m_provider.saveUploadedAprilTagFieldLayout(uploadPath); - } - - public void requestSave() { - logger.trace("Requesting save..."); - saveRequestTimestamp = System.currentTimeMillis(); - } - - public void unloadCameraConfigs() { - this.getConfig().getCameraConfigurations().clear(); - } - - public void clearConfig() { - logger.info("Clearing configuration!"); - m_provider.clearConfig(); - m_provider.saveToDisk(); - } - - public void saveToDisk() { - m_provider.saveToDisk(); - } - - private void saveAndWriteTask() { - // Only save if 1 second has past since the request was made - while (!Thread.currentThread().isInterrupted()) { - if (saveRequestTimestamp > 0 && (System.currentTimeMillis() - saveRequestTimestamp) > 1000L) { - saveRequestTimestamp = -1; - logger.debug("Saving to disk..."); - saveToDisk(); - } - - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - logger.error("Exception waiting for settings semaphore", e); - } + + public void clearConfig() { + logger.info("Clearing configuration!"); + m_provider.clearConfig(); + m_provider.saveToDisk(); + } + + public void saveToDisk() { + m_provider.saveToDisk(); + } + + private void saveAndWriteTask() { + // Only save if 1 second has past since the request was made + while (!Thread.currentThread().isInterrupted()) { + if (saveRequestTimestamp > 0 && (System.currentTimeMillis() - saveRequestTimestamp) > 1000L) { + saveRequestTimestamp = -1; + logger.debug("Saving to disk..."); + saveToDisk(); + } + + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + logger.error("Exception waiting for settings semaphore", e); + } + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/ConfigProvider.java b/photon-core/src/main/java/org/photonvision/common/configuration/ConfigProvider.java index 1655283bf5..9964d0ca93 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/ConfigProvider.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/ConfigProvider.java @@ -20,25 +20,25 @@ import java.nio.file.Path; public abstract class ConfigProvider { - protected PhotonConfiguration config; + protected PhotonConfiguration config; - abstract void load(); + abstract void load(); - abstract boolean saveToDisk(); + abstract boolean saveToDisk(); - PhotonConfiguration getConfig() { - return config; - } + PhotonConfiguration getConfig() { + return config; + } - public void clearConfig() { - config = new PhotonConfiguration(); - } + public void clearConfig() { + config = new PhotonConfiguration(); + } - public abstract boolean saveUploadedHardwareConfig(Path uploadPath); + public abstract boolean saveUploadedHardwareConfig(Path uploadPath); - public abstract boolean saveUploadedHardwareSettings(Path uploadPath); + public abstract boolean saveUploadedHardwareSettings(Path uploadPath); - public abstract boolean saveUploadedNetworkConfig(Path uploadPath); + public abstract boolean saveUploadedNetworkConfig(Path uploadPath); - public abstract boolean saveUploadedAprilTagFieldLayout(Path uploadPath); + public abstract boolean saveUploadedAprilTagFieldLayout(Path uploadPath); } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/HardwareConfig.java b/photon-core/src/main/java/org/photonvision/common/configuration/HardwareConfig.java index 9651932e34..2c8323f159 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/HardwareConfig.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/HardwareConfig.java @@ -24,179 +24,179 @@ @JsonIgnoreProperties(ignoreUnknown = true) public class HardwareConfig { - public final String deviceName; - public final String deviceLogoPath; - public final String supportURL; + public final String deviceName; + public final String deviceLogoPath; + public final String supportURL; - // LED control - public final ArrayList ledPins; - public final String ledSetCommand; - public final boolean ledsCanDim; - public final ArrayList ledBrightnessRange; - public final String ledDimCommand; - public final String ledBlinkCommand; - public final ArrayList statusRGBPins; + // LED control + public final ArrayList ledPins; + public final String ledSetCommand; + public final boolean ledsCanDim; + public final ArrayList ledBrightnessRange; + public final String ledDimCommand; + public final String ledBlinkCommand; + public final ArrayList statusRGBPins; - // Metrics - public final String cpuTempCommand; - public final String cpuMemoryCommand; - public final String cpuUtilCommand; - public final String cpuThrottleReasonCmd; - public final String cpuUptimeCommand; - public final String gpuMemoryCommand; - public final String ramUtilCommand; - public final String gpuMemUsageCommand; - public final String diskUsageCommand; + // Metrics + public final String cpuTempCommand; + public final String cpuMemoryCommand; + public final String cpuUtilCommand; + public final String cpuThrottleReasonCmd; + public final String cpuUptimeCommand; + public final String gpuMemoryCommand; + public final String ramUtilCommand; + public final String gpuMemUsageCommand; + public final String diskUsageCommand; - // Device stuff - public final String restartHardwareCommand; - public final double vendorFOV; // -1 for unmanaged - public final List blacklistedResIndices; // this happens before the defaults are applied + // Device stuff + public final String restartHardwareCommand; + public final double vendorFOV; // -1 for unmanaged + public final List blacklistedResIndices; // this happens before the defaults are applied - public HardwareConfig() { - deviceName = ""; - deviceLogoPath = ""; - supportURL = ""; - ledPins = new ArrayList<>(); - ledSetCommand = ""; - ledsCanDim = false; - ledBrightnessRange = new ArrayList<>(); - statusRGBPins = new ArrayList<>(); - ledDimCommand = ""; + public HardwareConfig() { + deviceName = ""; + deviceLogoPath = ""; + supportURL = ""; + ledPins = new ArrayList<>(); + ledSetCommand = ""; + ledsCanDim = false; + ledBrightnessRange = new ArrayList<>(); + statusRGBPins = new ArrayList<>(); + ledDimCommand = ""; - cpuTempCommand = ""; - cpuMemoryCommand = ""; - cpuUtilCommand = ""; - cpuThrottleReasonCmd = ""; - cpuUptimeCommand = ""; - gpuMemoryCommand = ""; - ramUtilCommand = ""; - ledBlinkCommand = ""; - gpuMemUsageCommand = ""; - diskUsageCommand = ""; + cpuTempCommand = ""; + cpuMemoryCommand = ""; + cpuUtilCommand = ""; + cpuThrottleReasonCmd = ""; + cpuUptimeCommand = ""; + gpuMemoryCommand = ""; + ramUtilCommand = ""; + ledBlinkCommand = ""; + gpuMemUsageCommand = ""; + diskUsageCommand = ""; - restartHardwareCommand = ""; - vendorFOV = -1; - blacklistedResIndices = Collections.emptyList(); - } + restartHardwareCommand = ""; + vendorFOV = -1; + blacklistedResIndices = Collections.emptyList(); + } - @SuppressWarnings("unused") - public HardwareConfig( - String deviceName, - String deviceLogoPath, - String supportURL, - ArrayList ledPins, - String ledSetCommand, - boolean ledsCanDim, - ArrayList ledBrightnessRange, - String ledDimCommand, - String ledBlinkCommand, - ArrayList statusRGBPins, - String cpuTempCommand, - String cpuMemoryCommand, - String cpuUtilCommand, - String cpuThrottleReasonCmd, - String cpuUptimeCommand, - String gpuMemoryCommand, - String ramUtilCommand, - String gpuMemUsageCommand, - String diskUsageCommand, - String restartHardwareCommand, - double vendorFOV, - List blacklistedResIndices) { - this.deviceName = deviceName; - this.deviceLogoPath = deviceLogoPath; - this.supportURL = supportURL; - this.ledPins = ledPins; - this.ledSetCommand = ledSetCommand; - this.ledsCanDim = ledsCanDim; - this.ledBrightnessRange = ledBrightnessRange; - this.ledDimCommand = ledDimCommand; - this.ledBlinkCommand = ledBlinkCommand; - this.statusRGBPins = statusRGBPins; - this.cpuTempCommand = cpuTempCommand; - this.cpuMemoryCommand = cpuMemoryCommand; - this.cpuUtilCommand = cpuUtilCommand; - this.cpuThrottleReasonCmd = cpuThrottleReasonCmd; - this.cpuUptimeCommand = cpuUptimeCommand; - this.gpuMemoryCommand = gpuMemoryCommand; - this.ramUtilCommand = ramUtilCommand; - this.gpuMemUsageCommand = gpuMemUsageCommand; - this.diskUsageCommand = diskUsageCommand; - this.restartHardwareCommand = restartHardwareCommand; - this.vendorFOV = vendorFOV; - this.blacklistedResIndices = blacklistedResIndices; - } + @SuppressWarnings("unused") + public HardwareConfig( + String deviceName, + String deviceLogoPath, + String supportURL, + ArrayList ledPins, + String ledSetCommand, + boolean ledsCanDim, + ArrayList ledBrightnessRange, + String ledDimCommand, + String ledBlinkCommand, + ArrayList statusRGBPins, + String cpuTempCommand, + String cpuMemoryCommand, + String cpuUtilCommand, + String cpuThrottleReasonCmd, + String cpuUptimeCommand, + String gpuMemoryCommand, + String ramUtilCommand, + String gpuMemUsageCommand, + String diskUsageCommand, + String restartHardwareCommand, + double vendorFOV, + List blacklistedResIndices) { + this.deviceName = deviceName; + this.deviceLogoPath = deviceLogoPath; + this.supportURL = supportURL; + this.ledPins = ledPins; + this.ledSetCommand = ledSetCommand; + this.ledsCanDim = ledsCanDim; + this.ledBrightnessRange = ledBrightnessRange; + this.ledDimCommand = ledDimCommand; + this.ledBlinkCommand = ledBlinkCommand; + this.statusRGBPins = statusRGBPins; + this.cpuTempCommand = cpuTempCommand; + this.cpuMemoryCommand = cpuMemoryCommand; + this.cpuUtilCommand = cpuUtilCommand; + this.cpuThrottleReasonCmd = cpuThrottleReasonCmd; + this.cpuUptimeCommand = cpuUptimeCommand; + this.gpuMemoryCommand = gpuMemoryCommand; + this.ramUtilCommand = ramUtilCommand; + this.gpuMemUsageCommand = gpuMemUsageCommand; + this.diskUsageCommand = diskUsageCommand; + this.restartHardwareCommand = restartHardwareCommand; + this.vendorFOV = vendorFOV; + this.blacklistedResIndices = blacklistedResIndices; + } - /** - * @return True if the FOV has been preset to a sane value, false otherwise - */ - public final boolean hasPresetFOV() { - return vendorFOV > 0; - } + /** + * @return True if the FOV has been preset to a sane value, false otherwise + */ + public final boolean hasPresetFOV() { + return vendorFOV > 0; + } - /** - * @return True if any command has been configured to a non-default empty, false otherwise - */ - public final boolean hasCommandsConfigured() { - return cpuTempCommand != "" - || cpuMemoryCommand != "" - || cpuUtilCommand != "" - || cpuThrottleReasonCmd != "" - || cpuUptimeCommand != "" - || gpuMemoryCommand != "" - || ramUtilCommand != "" - || ledBlinkCommand != "" - || gpuMemUsageCommand != "" - || diskUsageCommand != ""; - } + /** + * @return True if any command has been configured to a non-default empty, false otherwise + */ + public final boolean hasCommandsConfigured() { + return cpuTempCommand != "" + || cpuMemoryCommand != "" + || cpuUtilCommand != "" + || cpuThrottleReasonCmd != "" + || cpuUptimeCommand != "" + || gpuMemoryCommand != "" + || ramUtilCommand != "" + || ledBlinkCommand != "" + || gpuMemUsageCommand != "" + || diskUsageCommand != ""; + } - @Override - public String toString() { - return "HardwareConfig [deviceName=" - + deviceName - + ", deviceLogoPath=" - + deviceLogoPath - + ", supportURL=" - + supportURL - + ", ledPins=" - + ledPins - + ", ledSetCommand=" - + ledSetCommand - + ", ledsCanDim=" - + ledsCanDim - + ", ledBrightnessRange=" - + ledBrightnessRange - + ", ledDimCommand=" - + ledDimCommand - + ", ledBlinkCommand=" - + ledBlinkCommand - + ", statusRGBPins=" - + statusRGBPins - + ", cpuTempCommand=" - + cpuTempCommand - + ", cpuMemoryCommand=" - + cpuMemoryCommand - + ", cpuUtilCommand=" - + cpuUtilCommand - + ", cpuThrottleReasonCmd=" - + cpuThrottleReasonCmd - + ", cpuUptimeCommand=" - + cpuUptimeCommand - + ", gpuMemoryCommand=" - + gpuMemoryCommand - + ", ramUtilCommand=" - + ramUtilCommand - + ", gpuMemUsageCommand=" - + gpuMemUsageCommand - + ", diskUsageCommand=" - + diskUsageCommand - + ", restartHardwareCommand=" - + restartHardwareCommand - + ", vendorFOV=" - + vendorFOV - + ", blacklistedResIndices=" - + blacklistedResIndices - + "]"; - } + @Override + public String toString() { + return "HardwareConfig [deviceName=" + + deviceName + + ", deviceLogoPath=" + + deviceLogoPath + + ", supportURL=" + + supportURL + + ", ledPins=" + + ledPins + + ", ledSetCommand=" + + ledSetCommand + + ", ledsCanDim=" + + ledsCanDim + + ", ledBrightnessRange=" + + ledBrightnessRange + + ", ledDimCommand=" + + ledDimCommand + + ", ledBlinkCommand=" + + ledBlinkCommand + + ", statusRGBPins=" + + statusRGBPins + + ", cpuTempCommand=" + + cpuTempCommand + + ", cpuMemoryCommand=" + + cpuMemoryCommand + + ", cpuUtilCommand=" + + cpuUtilCommand + + ", cpuThrottleReasonCmd=" + + cpuThrottleReasonCmd + + ", cpuUptimeCommand=" + + cpuUptimeCommand + + ", gpuMemoryCommand=" + + gpuMemoryCommand + + ", ramUtilCommand=" + + ramUtilCommand + + ", gpuMemUsageCommand=" + + gpuMemUsageCommand + + ", diskUsageCommand=" + + diskUsageCommand + + ", restartHardwareCommand=" + + restartHardwareCommand + + ", vendorFOV=" + + vendorFOV + + ", blacklistedResIndices=" + + blacklistedResIndices + + "]"; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/HardwareSettings.java b/photon-core/src/main/java/org/photonvision/common/configuration/HardwareSettings.java index 82ba3442a1..3ef424dfe2 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/HardwareSettings.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/HardwareSettings.java @@ -18,10 +18,10 @@ package org.photonvision.common.configuration; public class HardwareSettings { - public int ledBrightnessPercentage = 100; + public int ledBrightnessPercentage = 100; - @Override - public String toString() { - return "HardwareSettings [ledBrightnessPercentage=" + ledBrightnessPercentage + "]"; - } + @Override + public String toString() { + return "HardwareSettings [ledBrightnessPercentage=" + ledBrightnessPercentage + "]"; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/LegacyConfigProvider.java b/photon-core/src/main/java/org/photonvision/common/configuration/LegacyConfigProvider.java index 60b38cb002..74d5d472fe 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/LegacyConfigProvider.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/LegacyConfigProvider.java @@ -43,445 +43,445 @@ import org.zeroturnaround.zip.ZipUtil; class LegacyConfigProvider extends ConfigProvider { - private static final Logger logger = new Logger(LegacyConfigProvider.class, LogGroup.General); - - public static final String HW_CFG_FNAME = "hardwareConfig.json"; - public static final String HW_SET_FNAME = "hardwareSettings.json"; - public static final String NET_SET_FNAME = "networkSettings.json"; - public static final String ATFL_SET_FNAME = "apriltagFieldLayout.json"; - - private PhotonConfiguration config; - private final File hardwareConfigFile; - private final File hardwareSettingsFile; - private final File networkConfigFile; - private final File camerasFolder; - private final File apriltagFieldLayoutFile; - - final File configDirectoryFile; - - private long saveRequestTimestamp = -1; - private final Thread settingsSaveThread; - - public static void saveUploadedSettingsZip(File uploadPath) { - var folderPath = Path.of(System.getProperty("java.io.tmpdir"), "photonvision").toFile(); - folderPath.mkdirs(); - ZipUtil.unpack(uploadPath, folderPath); - FileUtils.deleteDirectory(getRootFolder()); - try { - org.apache.commons.io.FileUtils.copyDirectory(folderPath, getRootFolder().toFile()); - logger.info("Copied settings successfully!"); - } catch (IOException e) { - logger.error("Exception copying uploaded settings!", e); + private static final Logger logger = new Logger(LegacyConfigProvider.class, LogGroup.General); + + public static final String HW_CFG_FNAME = "hardwareConfig.json"; + public static final String HW_SET_FNAME = "hardwareSettings.json"; + public static final String NET_SET_FNAME = "networkSettings.json"; + public static final String ATFL_SET_FNAME = "apriltagFieldLayout.json"; + + private PhotonConfiguration config; + private final File hardwareConfigFile; + private final File hardwareSettingsFile; + private final File networkConfigFile; + private final File camerasFolder; + private final File apriltagFieldLayoutFile; + + final File configDirectoryFile; + + private long saveRequestTimestamp = -1; + private final Thread settingsSaveThread; + + public static void saveUploadedSettingsZip(File uploadPath) { + var folderPath = Path.of(System.getProperty("java.io.tmpdir"), "photonvision").toFile(); + folderPath.mkdirs(); + ZipUtil.unpack(uploadPath, folderPath); + FileUtils.deleteDirectory(getRootFolder()); + try { + org.apache.commons.io.FileUtils.copyDirectory(folderPath, getRootFolder().toFile()); + logger.info("Copied settings successfully!"); + } catch (IOException e) { + logger.error("Exception copying uploaded settings!", e); + } } - } - - public PhotonConfiguration getConfig() { - return config; - } - - private static Path getRootFolder() { - return Path.of("photonvision_config"); - } - - protected LegacyConfigProvider(Path configDirectoryFile) { - this.configDirectoryFile = new File(configDirectoryFile.toUri()); - this.hardwareConfigFile = - new File(Path.of(configDirectoryFile.toString(), HW_CFG_FNAME).toUri()); - this.hardwareSettingsFile = - new File(Path.of(configDirectoryFile.toString(), HW_SET_FNAME).toUri()); - this.networkConfigFile = - new File(Path.of(configDirectoryFile.toString(), NET_SET_FNAME).toUri()); - this.apriltagFieldLayoutFile = - new File(Path.of(configDirectoryFile.toString(), ATFL_SET_FNAME).toUri()); - this.camerasFolder = new File(Path.of(configDirectoryFile.toString(), "cameras").toUri()); - - settingsSaveThread = new Thread(this::saveAndWriteTask); - settingsSaveThread.start(); - } - - @Override - public void load() { - logger.info("Loading settings..."); - if (!configDirectoryFile.exists()) { - if (configDirectoryFile.mkdirs()) { - logger.debug("Root config folder did not exist. Created!"); - } else { - logger.error("Failed to create root config folder!"); - } + + public PhotonConfiguration getConfig() { + return config; } - if (!configDirectoryFile.canWrite()) { - logger.debug("Making root dir writeable..."); - try { - var success = configDirectoryFile.setWritable(true); - if (success) logger.debug("Set root dir writeable!"); - else logger.error("Could not make root dir writeable!"); - } catch (SecurityException e) { - logger.error("Could not make root dir writeable!", e); - } + + private static Path getRootFolder() { + return Path.of("photonvision_config"); } - HardwareConfig hardwareConfig; - HardwareSettings hardwareSettings; - NetworkConfig networkConfig; - AprilTagFieldLayout atfl = null; - - if (hardwareConfigFile.exists()) { - try { - hardwareConfig = - JacksonUtils.deserialize(hardwareConfigFile.toPath(), HardwareConfig.class); - if (hardwareConfig == null) { - logger.error("Could not deserialize hardware config! Loading defaults"); - hardwareConfig = new HardwareConfig(); - } - } catch (IOException e) { - logger.error("Could not deserialize hardware config! Loading defaults"); - hardwareConfig = new HardwareConfig(); - } - } else { - logger.info("Hardware config does not exist! Loading defaults"); - hardwareConfig = new HardwareConfig(); + protected LegacyConfigProvider(Path configDirectoryFile) { + this.configDirectoryFile = new File(configDirectoryFile.toUri()); + this.hardwareConfigFile = + new File(Path.of(configDirectoryFile.toString(), HW_CFG_FNAME).toUri()); + this.hardwareSettingsFile = + new File(Path.of(configDirectoryFile.toString(), HW_SET_FNAME).toUri()); + this.networkConfigFile = + new File(Path.of(configDirectoryFile.toString(), NET_SET_FNAME).toUri()); + this.apriltagFieldLayoutFile = + new File(Path.of(configDirectoryFile.toString(), ATFL_SET_FNAME).toUri()); + this.camerasFolder = new File(Path.of(configDirectoryFile.toString(), "cameras").toUri()); + + settingsSaveThread = new Thread(this::saveAndWriteTask); + settingsSaveThread.start(); } - if (hardwareSettingsFile.exists()) { - try { - hardwareSettings = - JacksonUtils.deserialize(hardwareSettingsFile.toPath(), HardwareSettings.class); - if (hardwareSettings == null) { - logger.error("Could not deserialize hardware settings! Loading defaults"); - hardwareSettings = new HardwareSettings(); + @Override + public void load() { + logger.info("Loading settings..."); + if (!configDirectoryFile.exists()) { + if (configDirectoryFile.mkdirs()) { + logger.debug("Root config folder did not exist. Created!"); + } else { + logger.error("Failed to create root config folder!"); + } + } + if (!configDirectoryFile.canWrite()) { + logger.debug("Making root dir writeable..."); + try { + var success = configDirectoryFile.setWritable(true); + if (success) logger.debug("Set root dir writeable!"); + else logger.error("Could not make root dir writeable!"); + } catch (SecurityException e) { + logger.error("Could not make root dir writeable!", e); + } } - } catch (IOException e) { - logger.error("Could not deserialize hardware settings! Loading defaults"); - hardwareSettings = new HardwareSettings(); - } - } else { - logger.info("Hardware settings does not exist! Loading defaults"); - hardwareSettings = new HardwareSettings(); - } - if (networkConfigFile.exists()) { - try { - networkConfig = JacksonUtils.deserialize(networkConfigFile.toPath(), NetworkConfig.class); - if (networkConfig == null) { - logger.error("Could not deserialize network config! Loading defaults"); - networkConfig = new NetworkConfig(); + HardwareConfig hardwareConfig; + HardwareSettings hardwareSettings; + NetworkConfig networkConfig; + AprilTagFieldLayout atfl = null; + + if (hardwareConfigFile.exists()) { + try { + hardwareConfig = + JacksonUtils.deserialize(hardwareConfigFile.toPath(), HardwareConfig.class); + if (hardwareConfig == null) { + logger.error("Could not deserialize hardware config! Loading defaults"); + hardwareConfig = new HardwareConfig(); + } + } catch (IOException e) { + logger.error("Could not deserialize hardware config! Loading defaults"); + hardwareConfig = new HardwareConfig(); + } + } else { + logger.info("Hardware config does not exist! Loading defaults"); + hardwareConfig = new HardwareConfig(); } - } catch (IOException e) { - logger.error("Could not deserialize network config! Loading defaults"); - networkConfig = new NetworkConfig(); - } - } else { - logger.info("Network config file does not exist! Loading defaults"); - networkConfig = new NetworkConfig(); - } - if (!camerasFolder.exists()) { - if (camerasFolder.mkdirs()) { - logger.debug("Cameras config folder did not exist. Created!"); - } else { - logger.error("Failed to create cameras config folder!"); - } - } + if (hardwareSettingsFile.exists()) { + try { + hardwareSettings = + JacksonUtils.deserialize(hardwareSettingsFile.toPath(), HardwareSettings.class); + if (hardwareSettings == null) { + logger.error("Could not deserialize hardware settings! Loading defaults"); + hardwareSettings = new HardwareSettings(); + } + } catch (IOException e) { + logger.error("Could not deserialize hardware settings! Loading defaults"); + hardwareSettings = new HardwareSettings(); + } + } else { + logger.info("Hardware settings does not exist! Loading defaults"); + hardwareSettings = new HardwareSettings(); + } - if (apriltagFieldLayoutFile.exists()) { - try { - atfl = - JacksonUtils.deserialize(apriltagFieldLayoutFile.toPath(), AprilTagFieldLayout.class); - if (atfl == null) { - logger.error("Could not deserialize apriltag field layout! (still null)"); + if (networkConfigFile.exists()) { + try { + networkConfig = JacksonUtils.deserialize(networkConfigFile.toPath(), NetworkConfig.class); + if (networkConfig == null) { + logger.error("Could not deserialize network config! Loading defaults"); + networkConfig = new NetworkConfig(); + } + } catch (IOException e) { + logger.error("Could not deserialize network config! Loading defaults"); + networkConfig = new NetworkConfig(); + } + } else { + logger.info("Network config file does not exist! Loading defaults"); + networkConfig = new NetworkConfig(); } - } catch (IOException e) { - logger.error("Could not deserialize apriltag field layout!", e); - atfl = null; // not required, nice to be explicit - } - } - if (atfl == null) { - logger.info("Loading default apriltags for 2023 field..."); - try { - atfl = AprilTagFields.kDefaultField.loadAprilTagLayoutField(); - } catch (UncheckedIOException e) { - logger.error("Error loading WPILib field", e); - atfl = null; - } - if (atfl == null) { - // what do we even do here lmao -- wpilib built-in should always work - logger.error("Field layout is *still* null??????"); - atfl = new AprilTagFieldLayout(List.of(), 1, 1); - } - } - HashMap cameraConfigurations = loadCameraConfigs(); + if (!camerasFolder.exists()) { + if (camerasFolder.mkdirs()) { + logger.debug("Cameras config folder did not exist. Created!"); + } else { + logger.error("Failed to create cameras config folder!"); + } + } - this.config = - new PhotonConfiguration( - hardwareConfig, hardwareSettings, networkConfig, atfl, cameraConfigurations); - } + if (apriltagFieldLayoutFile.exists()) { + try { + atfl = + JacksonUtils.deserialize(apriltagFieldLayoutFile.toPath(), AprilTagFieldLayout.class); + if (atfl == null) { + logger.error("Could not deserialize apriltag field layout! (still null)"); + } + } catch (IOException e) { + logger.error("Could not deserialize apriltag field layout!", e); + atfl = null; // not required, nice to be explicit + } + } + if (atfl == null) { + logger.info("Loading default apriltags for 2023 field..."); + try { + atfl = AprilTagFields.kDefaultField.loadAprilTagLayoutField(); + } catch (UncheckedIOException e) { + logger.error("Error loading WPILib field", e); + atfl = null; + } + if (atfl == null) { + // what do we even do here lmao -- wpilib built-in should always work + logger.error("Field layout is *still* null??????"); + atfl = new AprilTagFieldLayout(List.of(), 1, 1); + } + } - @Override - public boolean saveToDisk() { - // Delete old configs - FileUtils.deleteDirectory(camerasFolder.toPath()); + HashMap cameraConfigurations = loadCameraConfigs(); - try { - JacksonUtils.serialize(networkConfigFile.toPath(), config.getNetworkConfig()); - } catch (IOException e) { - logger.error("Could not save network config!", e); - } - try { - JacksonUtils.serialize(hardwareSettingsFile.toPath(), config.getHardwareSettings()); - } catch (IOException e) { - logger.error("Could not save hardware config!", e); + this.config = + new PhotonConfiguration( + hardwareConfig, hardwareSettings, networkConfig, atfl, cameraConfigurations); } - // save all of our cameras - var cameraConfigMap = config.getCameraConfigurations(); - for (var subdirName : cameraConfigMap.keySet()) { - var camConfig = cameraConfigMap.get(subdirName); - var subdir = Path.of(camerasFolder.toPath().toString(), subdirName); - - if (!subdir.toFile().exists()) { - // TODO: check for error - subdir.toFile().mkdirs(); - } - - try { - JacksonUtils.serialize(Path.of(subdir.toString(), "config.json"), camConfig); - } catch (IOException e) { - logger.error("Could not save config.json for " + subdir, e); - } - - try { - JacksonUtils.serialize( - Path.of(subdir.toString(), "drivermode.json"), camConfig.driveModeSettings); - } catch (IOException e) { - logger.error("Could not save drivermode.json for " + subdir, e); - } - - for (var pipe : camConfig.pipelineSettings) { - var pipePath = Path.of(subdir.toString(), "pipelines", pipe.pipelineNickname + ".json"); - - if (!pipePath.getParent().toFile().exists()) { - // TODO: check for error - pipePath.getParent().toFile().mkdirs(); - } + @Override + public boolean saveToDisk() { + // Delete old configs + FileUtils.deleteDirectory(camerasFolder.toPath()); try { - JacksonUtils.serialize(pipePath, pipe); + JacksonUtils.serialize(networkConfigFile.toPath(), config.getNetworkConfig()); } catch (IOException e) { - logger.error("Could not save " + pipe.pipelineNickname + ".json!", e); + logger.error("Could not save network config!", e); } - } - } - logger.info("Settings saved!"); - return false; // TODO, deal with this. Do I need to? - } - - private HashMap loadCameraConfigs() { - HashMap loadedConfigurations = new HashMap<>(); - try { - var subdirectories = - Files.list(camerasFolder.toPath()) - .filter(f -> f.toFile().isDirectory()) - .collect(Collectors.toList()); - - for (var subdir : subdirectories) { - var cameraConfigPath = Path.of(subdir.toString(), "config.json"); - CameraConfiguration loadedConfig = null; try { - loadedConfig = - JacksonUtils.deserialize( - cameraConfigPath.toAbsolutePath(), CameraConfiguration.class); - } catch (JsonProcessingException e) { - logger.error("Camera config deserialization failed!", e); - e.printStackTrace(); + JacksonUtils.serialize(hardwareSettingsFile.toPath(), config.getHardwareSettings()); + } catch (IOException e) { + logger.error("Could not save hardware config!", e); } - if (loadedConfig == null) { // If the file could not be deserialized - logger.warn("Could not load camera " + subdir + "'s config.json! Loading " + "default"); - continue; // TODO how do we later try to load this camera if it gets reconnected? + + // save all of our cameras + var cameraConfigMap = config.getCameraConfigurations(); + for (var subdirName : cameraConfigMap.keySet()) { + var camConfig = cameraConfigMap.get(subdirName); + var subdir = Path.of(camerasFolder.toPath().toString(), subdirName); + + if (!subdir.toFile().exists()) { + // TODO: check for error + subdir.toFile().mkdirs(); + } + + try { + JacksonUtils.serialize(Path.of(subdir.toString(), "config.json"), camConfig); + } catch (IOException e) { + logger.error("Could not save config.json for " + subdir, e); + } + + try { + JacksonUtils.serialize( + Path.of(subdir.toString(), "drivermode.json"), camConfig.driveModeSettings); + } catch (IOException e) { + logger.error("Could not save drivermode.json for " + subdir, e); + } + + for (var pipe : camConfig.pipelineSettings) { + var pipePath = Path.of(subdir.toString(), "pipelines", pipe.pipelineNickname + ".json"); + + if (!pipePath.getParent().toFile().exists()) { + // TODO: check for error + pipePath.getParent().toFile().mkdirs(); + } + + try { + JacksonUtils.serialize(pipePath, pipe); + } catch (IOException e) { + logger.error("Could not save " + pipe.pipelineNickname + ".json!", e); + } + } } + logger.info("Settings saved!"); + return false; // TODO, deal with this. Do I need to? + } - // At this point we have only loaded the base stuff - // We still need to deserialize pipelines, as well as - // driver mode settings - var driverModeFile = Path.of(subdir.toString(), "drivermode.json"); - DriverModePipelineSettings driverMode; + private HashMap loadCameraConfigs() { + HashMap loadedConfigurations = new HashMap<>(); try { - driverMode = - JacksonUtils.deserialize( - driverModeFile.toAbsolutePath(), DriverModePipelineSettings.class); - } catch (JsonProcessingException e) { - logger.error("Could not deserialize drivermode.json! Loading defaults"); - logger.debug(Arrays.toString(e.getStackTrace())); - driverMode = new DriverModePipelineSettings(); + var subdirectories = + Files.list(camerasFolder.toPath()) + .filter(f -> f.toFile().isDirectory()) + .collect(Collectors.toList()); + + for (var subdir : subdirectories) { + var cameraConfigPath = Path.of(subdir.toString(), "config.json"); + CameraConfiguration loadedConfig = null; + try { + loadedConfig = + JacksonUtils.deserialize( + cameraConfigPath.toAbsolutePath(), CameraConfiguration.class); + } catch (JsonProcessingException e) { + logger.error("Camera config deserialization failed!", e); + e.printStackTrace(); + } + if (loadedConfig == null) { // If the file could not be deserialized + logger.warn("Could not load camera " + subdir + "'s config.json! Loading " + "default"); + continue; // TODO how do we later try to load this camera if it gets reconnected? + } + + // At this point we have only loaded the base stuff + // We still need to deserialize pipelines, as well as + // driver mode settings + var driverModeFile = Path.of(subdir.toString(), "drivermode.json"); + DriverModePipelineSettings driverMode; + try { + driverMode = + JacksonUtils.deserialize( + driverModeFile.toAbsolutePath(), DriverModePipelineSettings.class); + } catch (JsonProcessingException e) { + logger.error("Could not deserialize drivermode.json! Loading defaults"); + logger.debug(Arrays.toString(e.getStackTrace())); + driverMode = new DriverModePipelineSettings(); + } + if (driverMode == null) { + logger.warn( + "Could not load camera " + subdir + "'s drivermode.json! Loading" + " default"); + driverMode = new DriverModePipelineSettings(); + } + + // Load pipelines by mapping the files within the pipelines subdir + // to their deserialized equivalents + var pipelineSubdirectory = Path.of(subdir.toString(), "pipelines"); + List settings = + pipelineSubdirectory.toFile().exists() + ? Files.list(pipelineSubdirectory) + .filter(p -> p.toFile().isFile()) + .map( + p -> { + var relativizedFilePath = + configDirectoryFile + .toPath() + .toAbsolutePath() + .relativize(p) + .toString(); + try { + return JacksonUtils.deserialize(p, CVPipelineSettings.class); + } catch (JsonProcessingException e) { + logger.error("Exception while deserializing " + relativizedFilePath, e); + } catch (IOException e) { + logger.warn( + "Could not load pipeline at " + + relativizedFilePath + + "! Skipping..."); + } + return null; + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()) + : Collections.emptyList(); + + loadedConfig.driveModeSettings = driverMode; + loadedConfig.addPipelineSettings(settings); + + loadedConfigurations.put(subdir.toFile().getName(), loadedConfig); + } + } catch (IOException e) { + logger.error("Error loading camera configs!", e); } - if (driverMode == null) { - logger.warn( - "Could not load camera " + subdir + "'s drivermode.json! Loading" + " default"); - driverMode = new DriverModePipelineSettings(); + return loadedConfigurations; + } + + public void addCameraConfigurations(List sources) { + getConfig().addCameraConfigs(sources); + requestSave(); + } + + public void saveModule(CameraConfiguration config, String uniqueName) { + getConfig().addCameraConfig(uniqueName, config); + requestSave(); + } + + public File getSettingsFolderAsZip() { + File out = Path.of(System.getProperty("java.io.tmpdir"), "photonvision-settings.zip").toFile(); + try { + ZipUtil.pack(configDirectoryFile, out); + } catch (Exception e) { + e.printStackTrace(); } + return out; + } - // Load pipelines by mapping the files within the pipelines subdir - // to their deserialized equivalents - var pipelineSubdirectory = Path.of(subdir.toString(), "pipelines"); - List settings = - pipelineSubdirectory.toFile().exists() - ? Files.list(pipelineSubdirectory) - .filter(p -> p.toFile().isFile()) - .map( - p -> { - var relativizedFilePath = - configDirectoryFile - .toPath() - .toAbsolutePath() - .relativize(p) - .toString(); - try { - return JacksonUtils.deserialize(p, CVPipelineSettings.class); - } catch (JsonProcessingException e) { - logger.error("Exception while deserializing " + relativizedFilePath, e); - } catch (IOException e) { - logger.warn( - "Could not load pipeline at " - + relativizedFilePath - + "! Skipping..."); - } - return null; - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()) - : Collections.emptyList(); - - loadedConfig.driveModeSettings = driverMode; - loadedConfig.addPipelineSettings(settings); - - loadedConfigurations.put(subdir.toFile().getName(), loadedConfig); - } - } catch (IOException e) { - logger.error("Error loading camera configs!", e); + public void setNetworkSettings(NetworkConfig networkConfig) { + getConfig().setNetworkConfig(networkConfig); + requestSave(); } - return loadedConfigurations; - } - - public void addCameraConfigurations(List sources) { - getConfig().addCameraConfigs(sources); - requestSave(); - } - - public void saveModule(CameraConfiguration config, String uniqueName) { - getConfig().addCameraConfig(uniqueName, config); - requestSave(); - } - - public File getSettingsFolderAsZip() { - File out = Path.of(System.getProperty("java.io.tmpdir"), "photonvision-settings.zip").toFile(); - try { - ZipUtil.pack(configDirectoryFile, out); - } catch (Exception e) { - e.printStackTrace(); + + public Path getLogsDir() { + return Path.of(configDirectoryFile.toString(), "logs"); + } + + public Path getCalibDir() { + return Path.of(configDirectoryFile.toString(), "calibImgs"); } - return out; - } - - public void setNetworkSettings(NetworkConfig networkConfig) { - getConfig().setNetworkConfig(networkConfig); - requestSave(); - } - - public Path getLogsDir() { - return Path.of(configDirectoryFile.toString(), "logs"); - } - - public Path getCalibDir() { - return Path.of(configDirectoryFile.toString(), "calibImgs"); - } - - public static final String LOG_PREFIX = "photonvision-"; - public static final String LOG_EXT = ".log"; - public static final String LOG_DATE_TIME_FORMAT = "yyyy-M-d_hh-mm-ss"; - - public String taToLogFname(TemporalAccessor date) { - var dateString = DateTimeFormatter.ofPattern(LOG_DATE_TIME_FORMAT).format(date); - return LOG_PREFIX + dateString + LOG_EXT; - } - - public Date logFnameToDate(String fname) throws ParseException { - // Strip away known unneeded portions of the log file name - fname = fname.replace(LOG_PREFIX, "").replace(LOG_EXT, ""); - DateFormat format = new SimpleDateFormat(LOG_DATE_TIME_FORMAT); - return format.parse(fname); - } - - public Path getLogPath() { - var logFile = Path.of(this.getLogsDir().toString(), taToLogFname(LocalDateTime.now())).toFile(); - if (!logFile.getParentFile().exists()) logFile.getParentFile().mkdirs(); - return logFile.toPath(); - } - - public Path getImageSavePath() { - var imgFilePath = Path.of(configDirectoryFile.toString(), "imgSaves").toFile(); - if (!imgFilePath.exists()) imgFilePath.mkdirs(); - return imgFilePath.toPath(); - } - - public Path getHardwareConfigFile() { - return this.hardwareConfigFile.toPath(); - } - - public Path getHardwareSettingsFile() { - return this.hardwareSettingsFile.toPath(); - } - - public Path getNetworkConfigFile() { - return this.networkConfigFile.toPath(); - } - - public Path getAprilTagFieldLayoutFile() { - return this.apriltagFieldLayoutFile.toPath(); - } - - @Override - public boolean saveUploadedHardwareConfig(Path uploadPath) { - return FileUtils.replaceFile(uploadPath, this.getHardwareConfigFile()); - } - - @Override - public boolean saveUploadedHardwareSettings(Path uploadPath) { - return FileUtils.replaceFile(uploadPath, this.getHardwareSettingsFile()); - } - - @Override - public boolean saveUploadedNetworkConfig(Path uploadPath) { - return FileUtils.replaceFile(uploadPath, this.getNetworkConfigFile()); - } - - @Override - public boolean saveUploadedAprilTagFieldLayout(Path uploadPath) { - return FileUtils.replaceFile(uploadPath, this.getAprilTagFieldLayoutFile()); - } - - public void requestSave() { - logger.trace("Requesting save..."); - saveRequestTimestamp = System.currentTimeMillis(); - } - - private void saveAndWriteTask() { - // Only save if 1 second has past since the request was made - while (!Thread.currentThread().isInterrupted()) { - if (saveRequestTimestamp > 0 && (System.currentTimeMillis() - saveRequestTimestamp) > 1000L) { - saveRequestTimestamp = -1; - logger.debug("Saving to disk..."); - saveToDisk(); - } - - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - logger.error("Exception waiting for settings semaphore", e); - } + + public static final String LOG_PREFIX = "photonvision-"; + public static final String LOG_EXT = ".log"; + public static final String LOG_DATE_TIME_FORMAT = "yyyy-M-d_hh-mm-ss"; + + public String taToLogFname(TemporalAccessor date) { + var dateString = DateTimeFormatter.ofPattern(LOG_DATE_TIME_FORMAT).format(date); + return LOG_PREFIX + dateString + LOG_EXT; + } + + public Date logFnameToDate(String fname) throws ParseException { + // Strip away known unneeded portions of the log file name + fname = fname.replace(LOG_PREFIX, "").replace(LOG_EXT, ""); + DateFormat format = new SimpleDateFormat(LOG_DATE_TIME_FORMAT); + return format.parse(fname); + } + + public Path getLogPath() { + var logFile = Path.of(this.getLogsDir().toString(), taToLogFname(LocalDateTime.now())).toFile(); + if (!logFile.getParentFile().exists()) logFile.getParentFile().mkdirs(); + return logFile.toPath(); + } + + public Path getImageSavePath() { + var imgFilePath = Path.of(configDirectoryFile.toString(), "imgSaves").toFile(); + if (!imgFilePath.exists()) imgFilePath.mkdirs(); + return imgFilePath.toPath(); + } + + public Path getHardwareConfigFile() { + return this.hardwareConfigFile.toPath(); + } + + public Path getHardwareSettingsFile() { + return this.hardwareSettingsFile.toPath(); + } + + public Path getNetworkConfigFile() { + return this.networkConfigFile.toPath(); } - } - public void unloadCameraConfigs() { - this.config.getCameraConfigurations().clear(); - } + public Path getAprilTagFieldLayoutFile() { + return this.apriltagFieldLayoutFile.toPath(); + } + + @Override + public boolean saveUploadedHardwareConfig(Path uploadPath) { + return FileUtils.replaceFile(uploadPath, this.getHardwareConfigFile()); + } + + @Override + public boolean saveUploadedHardwareSettings(Path uploadPath) { + return FileUtils.replaceFile(uploadPath, this.getHardwareSettingsFile()); + } + + @Override + public boolean saveUploadedNetworkConfig(Path uploadPath) { + return FileUtils.replaceFile(uploadPath, this.getNetworkConfigFile()); + } + + @Override + public boolean saveUploadedAprilTagFieldLayout(Path uploadPath) { + return FileUtils.replaceFile(uploadPath, this.getAprilTagFieldLayoutFile()); + } + + public void requestSave() { + logger.trace("Requesting save..."); + saveRequestTimestamp = System.currentTimeMillis(); + } + + private void saveAndWriteTask() { + // Only save if 1 second has past since the request was made + while (!Thread.currentThread().isInterrupted()) { + if (saveRequestTimestamp > 0 && (System.currentTimeMillis() - saveRequestTimestamp) > 1000L) { + saveRequestTimestamp = -1; + logger.debug("Saving to disk..."); + saveToDisk(); + } + + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + logger.error("Exception waiting for settings semaphore", e); + } + } + } + + public void unloadCameraConfigs() { + this.config.getCameraConfigurations().clear(); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/NetworkConfig.java b/photon-core/src/main/java/org/photonvision/common/configuration/NetworkConfig.java index 4af3317f14..36e1efe8e3 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/NetworkConfig.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/NetworkConfig.java @@ -30,118 +30,118 @@ import org.photonvision.common.util.file.JacksonUtils; public class NetworkConfig { - // Can be an integer team number, or an IP address - public String ntServerAddress = "0"; - public NetworkMode connectionType = NetworkMode.DHCP; - public String staticIp = ""; - public String hostname = "photonvision"; - public boolean runNTServer = false; - public boolean shouldManage; + // Can be an integer team number, or an IP address + public String ntServerAddress = "0"; + public NetworkMode connectionType = NetworkMode.DHCP; + public String staticIp = ""; + public String hostname = "photonvision"; + public boolean runNTServer = false; + public boolean shouldManage; - @JsonIgnore public static final String NM_IFACE_STRING = "${interface}"; - @JsonIgnore public static final String NM_IP_STRING = "${ipaddr}"; + @JsonIgnore public static final String NM_IFACE_STRING = "${interface}"; + @JsonIgnore public static final String NM_IP_STRING = "${ipaddr}"; - public String networkManagerIface; - public String setStaticCommand = - "nmcli con mod ${interface} ipv4.addresses ${ipaddr}/8 ipv4.method \"manual\" ipv6.method \"disabled\""; - public String setDHCPcommand = - "nmcli con mod ${interface} ipv4.method \"auto\" ipv6.method \"disabled\""; + public String networkManagerIface; + public String setStaticCommand = + "nmcli con mod ${interface} ipv4.addresses ${ipaddr}/8 ipv4.method \"manual\" ipv6.method \"disabled\""; + public String setDHCPcommand = + "nmcli con mod ${interface} ipv4.method \"auto\" ipv6.method \"disabled\""; - public NetworkConfig() { - if (Platform.isLinux()) { - // Default to the name of the first Ethernet connection. Otherwise, "Wired connection 1" is a - // reasonable guess - this.networkManagerIface = - NetworkUtils.getAllWiredInterfaces().stream() - .map(it -> it.connName) - .findFirst() - .orElse("Wired connection 1"); - } + public NetworkConfig() { + if (Platform.isLinux()) { + // Default to the name of the first Ethernet connection. Otherwise, "Wired connection 1" is a + // reasonable guess + this.networkManagerIface = + NetworkUtils.getAllWiredInterfaces().stream() + .map(it -> it.connName) + .findFirst() + .orElse("Wired connection 1"); + } - // We can (usually) manage networking on Linux devices, and if we can, we should try to. Command - // line inhibitions happen at a level above this class - setShouldManage(deviceCanManageNetwork()); - } + // We can (usually) manage networking on Linux devices, and if we can, we should try to. Command + // line inhibitions happen at a level above this class + setShouldManage(deviceCanManageNetwork()); + } - @JsonCreator - public NetworkConfig( - @JsonProperty("ntServerAddress") @JsonAlias({"ntServerAddress", "teamNumber"}) - String ntServerAddress, - @JsonProperty("connectionType") NetworkMode connectionType, - @JsonProperty("staticIp") String staticIp, - @JsonProperty("hostname") String hostname, - @JsonProperty("runNTServer") boolean runNTServer, - @JsonProperty("shouldManage") boolean shouldManage, - @JsonProperty("networkManagerIface") String networkManagerIface, - @JsonProperty("setStaticCommand") String setStaticCommand, - @JsonProperty("setDHCPcommand") String setDHCPcommand) { - this.ntServerAddress = ntServerAddress; - this.connectionType = connectionType; - this.staticIp = staticIp; - this.hostname = hostname; - this.runNTServer = runNTServer; - this.networkManagerIface = networkManagerIface; - this.setStaticCommand = setStaticCommand; - this.setDHCPcommand = setDHCPcommand; - setShouldManage(shouldManage); - } + @JsonCreator + public NetworkConfig( + @JsonProperty("ntServerAddress") @JsonAlias({"ntServerAddress", "teamNumber"}) + String ntServerAddress, + @JsonProperty("connectionType") NetworkMode connectionType, + @JsonProperty("staticIp") String staticIp, + @JsonProperty("hostname") String hostname, + @JsonProperty("runNTServer") boolean runNTServer, + @JsonProperty("shouldManage") boolean shouldManage, + @JsonProperty("networkManagerIface") String networkManagerIface, + @JsonProperty("setStaticCommand") String setStaticCommand, + @JsonProperty("setDHCPcommand") String setDHCPcommand) { + this.ntServerAddress = ntServerAddress; + this.connectionType = connectionType; + this.staticIp = staticIp; + this.hostname = hostname; + this.runNTServer = runNTServer; + this.networkManagerIface = networkManagerIface; + this.setStaticCommand = setStaticCommand; + this.setDHCPcommand = setDHCPcommand; + setShouldManage(shouldManage); + } - public Map toHashMap() { - try { - var ret = new ObjectMapper().convertValue(this, JacksonUtils.UIMap.class); - ret.put("canManage", this.deviceCanManageNetwork()); - return ret; - } catch (Exception e) { - e.printStackTrace(); - return new HashMap<>(); + public Map toHashMap() { + try { + var ret = new ObjectMapper().convertValue(this, JacksonUtils.UIMap.class); + ret.put("canManage", this.deviceCanManageNetwork()); + return ret; + } catch (Exception e) { + e.printStackTrace(); + return new HashMap<>(); + } } - } - @JsonIgnore - public String getPhysicalInterfaceName() { - return NetworkUtils.getNMinfoForConnName(this.networkManagerIface).devName; - } + @JsonIgnore + public String getPhysicalInterfaceName() { + return NetworkUtils.getNMinfoForConnName(this.networkManagerIface).devName; + } - @JsonIgnore - public String getEscapedInterfaceName() { - return "\"" + networkManagerIface + "\""; - } + @JsonIgnore + public String getEscapedInterfaceName() { + return "\"" + networkManagerIface + "\""; + } - @JsonIgnore - public boolean shouldManage() { - return this.shouldManage; - } + @JsonIgnore + public boolean shouldManage() { + return this.shouldManage; + } - @JsonIgnore - public void setShouldManage(boolean shouldManage) { - this.shouldManage = shouldManage && this.deviceCanManageNetwork(); - } + @JsonIgnore + public void setShouldManage(boolean shouldManage) { + this.shouldManage = shouldManage && this.deviceCanManageNetwork(); + } - @JsonIgnore - private boolean deviceCanManageNetwork() { - return Platform.isLinux(); - } + @JsonIgnore + private boolean deviceCanManageNetwork() { + return Platform.isLinux(); + } - @Override - public String toString() { - return "NetworkConfig [serverAddr=" - + ntServerAddress - + ", connectionType=" - + connectionType - + ", staticIp=" - + staticIp - + ", hostname=" - + hostname - + ", runNTServer=" - + runNTServer - + ", networkManagerIface=" - + networkManagerIface - + ", setStaticCommand=" - + setStaticCommand - + ", setDHCPcommand=" - + setDHCPcommand - + ", shouldManage=" - + shouldManage - + "]"; - } + @Override + public String toString() { + return "NetworkConfig [serverAddr=" + + ntServerAddress + + ", connectionType=" + + connectionType + + ", staticIp=" + + staticIp + + ", hostname=" + + hostname + + ", runNTServer=" + + runNTServer + + ", networkManagerIface=" + + networkManagerIface + + ", setStaticCommand=" + + setStaticCommand + + ", setDHCPcommand=" + + setDHCPcommand + + ", shouldManage=" + + shouldManage + + "]"; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/PhotonConfiguration.java b/photon-core/src/main/java/org/photonvision/common/configuration/PhotonConfiguration.java index a823ada5fb..fac672636c 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/PhotonConfiguration.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/PhotonConfiguration.java @@ -33,146 +33,146 @@ import org.photonvision.vision.processes.VisionSource; public class PhotonConfiguration { - private final HardwareConfig hardwareConfig; - private final HardwareSettings hardwareSettings; - private NetworkConfig networkConfig; - private AprilTagFieldLayout atfl; - private HashMap cameraConfigurations; - - public PhotonConfiguration( - HardwareConfig hardwareConfig, - HardwareSettings hardwareSettings, - NetworkConfig networkConfig, - AprilTagFieldLayout atfl) { - this(hardwareConfig, hardwareSettings, networkConfig, atfl, new HashMap<>()); - } - - public PhotonConfiguration( - HardwareConfig hardwareConfig, - HardwareSettings hardwareSettings, - NetworkConfig networkConfig, - AprilTagFieldLayout atfl, - HashMap cameraConfigurations) { - this.hardwareConfig = hardwareConfig; - this.hardwareSettings = hardwareSettings; - this.networkConfig = networkConfig; - this.cameraConfigurations = cameraConfigurations; - this.atfl = atfl; - } - - public PhotonConfiguration() { - this( - new HardwareConfig(), - new HardwareSettings(), - new NetworkConfig(), - new AprilTagFieldLayout(List.of(), 0, 0)); - } - - public HardwareConfig getHardwareConfig() { - return hardwareConfig; - } - - public NetworkConfig getNetworkConfig() { - return networkConfig; - } - - public HardwareSettings getHardwareSettings() { - return hardwareSettings; - } - - public AprilTagFieldLayout getApriltagFieldLayout() { - return atfl; - } - - public void setApriltagFieldLayout(AprilTagFieldLayout atfl) { - this.atfl = atfl; - } - - public void setNetworkConfig(NetworkConfig networkConfig) { - this.networkConfig = networkConfig; - } - - public HashMap getCameraConfigurations() { - return cameraConfigurations; - } - - public void addCameraConfigs(Collection sources) { - for (var s : sources) { - addCameraConfig(s.getCameraConfiguration()); - } - } - - public void addCameraConfig(CameraConfiguration config) { - addCameraConfig(config.uniqueName, config); - } - - public void addCameraConfig(String name, CameraConfiguration config) { - cameraConfigurations.put(name, config); - } - - public Map toHashMap() { - Map map = new HashMap<>(); - var settingsSubmap = new HashMap(); - - // Hack active interfaces into networkSettings - var netConfigMap = networkConfig.toHashMap(); - netConfigMap.put("networkInterfaceNames", NetworkUtils.getAllWiredInterfaces()); - - settingsSubmap.put("networkSettings", netConfigMap); - - map.put( - "cameraSettings", - VisionModuleManager.getInstance().getModules().stream() - .map(VisionModule::toUICameraConfig) - .map(SerializationUtils::objectToHashMap) - .collect(Collectors.toList())); - - var lightingConfig = new UILightingConfig(); - lightingConfig.brightness = hardwareSettings.ledBrightnessPercentage; - lightingConfig.supported = !hardwareConfig.ledPins.isEmpty(); - settingsSubmap.put("lighting", SerializationUtils.objectToHashMap(lightingConfig)); - // General Settings - var generalSubmap = new HashMap(); - generalSubmap.put("version", PhotonVersion.versionString); - generalSubmap.put( - "gpuAcceleration", - LibCameraJNI.isSupported() - ? "Zerocopy Libcamera on " + LibCameraJNI.getSensorModel().getFriendlyName() - : ""); // TODO add support for other types of GPU accel - generalSubmap.put("hardwareModel", hardwareConfig.deviceName); - generalSubmap.put("hardwarePlatform", Platform.getPlatformName()); - settingsSubmap.put("general", generalSubmap); - // AprilTagFieldLayout - settingsSubmap.put("atfl", this.atfl); - - map.put( - "cameraSettings", - VisionModuleManager.getInstance().getModules().stream() - .map(VisionModule::toUICameraConfig) - .map(SerializationUtils::objectToHashMap) - .collect(Collectors.toList())); - map.put("settings", settingsSubmap); - - return map; - } - - public static class UILightingConfig { - public int brightness = 0; - public boolean supported = true; - } - - public static class UICameraConfiguration { - @SuppressWarnings("unused") - public double fov; - - public String nickname; - public HashMap currentPipelineSettings; - public int currentPipelineIndex; - public List pipelineNicknames; - public HashMap> videoFormatList; - public int outputStreamPort; - public int inputStreamPort; - public List> calibrations; - public boolean isFovConfigurable = true; - } + private final HardwareConfig hardwareConfig; + private final HardwareSettings hardwareSettings; + private NetworkConfig networkConfig; + private AprilTagFieldLayout atfl; + private HashMap cameraConfigurations; + + public PhotonConfiguration( + HardwareConfig hardwareConfig, + HardwareSettings hardwareSettings, + NetworkConfig networkConfig, + AprilTagFieldLayout atfl) { + this(hardwareConfig, hardwareSettings, networkConfig, atfl, new HashMap<>()); + } + + public PhotonConfiguration( + HardwareConfig hardwareConfig, + HardwareSettings hardwareSettings, + NetworkConfig networkConfig, + AprilTagFieldLayout atfl, + HashMap cameraConfigurations) { + this.hardwareConfig = hardwareConfig; + this.hardwareSettings = hardwareSettings; + this.networkConfig = networkConfig; + this.cameraConfigurations = cameraConfigurations; + this.atfl = atfl; + } + + public PhotonConfiguration() { + this( + new HardwareConfig(), + new HardwareSettings(), + new NetworkConfig(), + new AprilTagFieldLayout(List.of(), 0, 0)); + } + + public HardwareConfig getHardwareConfig() { + return hardwareConfig; + } + + public NetworkConfig getNetworkConfig() { + return networkConfig; + } + + public HardwareSettings getHardwareSettings() { + return hardwareSettings; + } + + public AprilTagFieldLayout getApriltagFieldLayout() { + return atfl; + } + + public void setApriltagFieldLayout(AprilTagFieldLayout atfl) { + this.atfl = atfl; + } + + public void setNetworkConfig(NetworkConfig networkConfig) { + this.networkConfig = networkConfig; + } + + public HashMap getCameraConfigurations() { + return cameraConfigurations; + } + + public void addCameraConfigs(Collection sources) { + for (var s : sources) { + addCameraConfig(s.getCameraConfiguration()); + } + } + + public void addCameraConfig(CameraConfiguration config) { + addCameraConfig(config.uniqueName, config); + } + + public void addCameraConfig(String name, CameraConfiguration config) { + cameraConfigurations.put(name, config); + } + + public Map toHashMap() { + Map map = new HashMap<>(); + var settingsSubmap = new HashMap(); + + // Hack active interfaces into networkSettings + var netConfigMap = networkConfig.toHashMap(); + netConfigMap.put("networkInterfaceNames", NetworkUtils.getAllWiredInterfaces()); + + settingsSubmap.put("networkSettings", netConfigMap); + + map.put( + "cameraSettings", + VisionModuleManager.getInstance().getModules().stream() + .map(VisionModule::toUICameraConfig) + .map(SerializationUtils::objectToHashMap) + .collect(Collectors.toList())); + + var lightingConfig = new UILightingConfig(); + lightingConfig.brightness = hardwareSettings.ledBrightnessPercentage; + lightingConfig.supported = !hardwareConfig.ledPins.isEmpty(); + settingsSubmap.put("lighting", SerializationUtils.objectToHashMap(lightingConfig)); + // General Settings + var generalSubmap = new HashMap(); + generalSubmap.put("version", PhotonVersion.versionString); + generalSubmap.put( + "gpuAcceleration", + LibCameraJNI.isSupported() + ? "Zerocopy Libcamera on " + LibCameraJNI.getSensorModel().getFriendlyName() + : ""); // TODO add support for other types of GPU accel + generalSubmap.put("hardwareModel", hardwareConfig.deviceName); + generalSubmap.put("hardwarePlatform", Platform.getPlatformName()); + settingsSubmap.put("general", generalSubmap); + // AprilTagFieldLayout + settingsSubmap.put("atfl", this.atfl); + + map.put( + "cameraSettings", + VisionModuleManager.getInstance().getModules().stream() + .map(VisionModule::toUICameraConfig) + .map(SerializationUtils::objectToHashMap) + .collect(Collectors.toList())); + map.put("settings", settingsSubmap); + + return map; + } + + public static class UILightingConfig { + public int brightness = 0; + public boolean supported = true; + } + + public static class UICameraConfiguration { + @SuppressWarnings("unused") + public double fov; + + public String nickname; + public HashMap currentPipelineSettings; + public int currentPipelineIndex; + public List pipelineNicknames; + public HashMap> videoFormatList; + public int outputStreamPort; + public int inputStreamPort; + public List> calibrations; + public boolean isFovConfigurable = true; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/configuration/SqlConfigProvider.java b/photon-core/src/main/java/org/photonvision/common/configuration/SqlConfigProvider.java index 3b9253e088..8a0c755ad8 100644 --- a/photon-core/src/main/java/org/photonvision/common/configuration/SqlConfigProvider.java +++ b/photon-core/src/main/java/org/photonvision/common/configuration/SqlConfigProvider.java @@ -45,463 +45,463 @@ *

Global has one row per global config file (like hardware settings and network settings) */ public class SqlConfigProvider extends ConfigProvider { - private final Logger logger = new Logger(SqlConfigProvider.class, LogGroup.Config); - - static class TableKeys { - static final String CAM_UNIQUE_NAME = "unique_name"; - static final String CONFIG_JSON = "config_json"; - static final String DRIVERMODE_JSON = "drivermode_json"; - static final String PIPELINE_JSONS = "pipeline_jsons"; - - static final String NETWORK_CONFIG = "networkConfig"; - static final String HARDWARE_CONFIG = "hardwareConfig"; - static final String HARDWARE_SETTINGS = "hardwareSettings"; - static final String ATFL_CONFIG_FILE = "apriltagFieldLayout"; - } - - private static final String dbName = "photon.sqlite"; - private final String dbPath; - - private final Object m_mutex = new Object(); - private final File rootFolder; - - public SqlConfigProvider(Path rootFolder) { - this.rootFolder = rootFolder.toFile(); - dbPath = Path.of(rootFolder.toString(), dbName).toAbsolutePath().toString(); - logger.debug("Using database " + dbPath); - initDatabase(); - } - - public PhotonConfiguration getConfig() { - if (config == null) { - logger.warn("CONFIG IS NULL!"); + private final Logger logger = new Logger(SqlConfigProvider.class, LogGroup.Config); + + static class TableKeys { + static final String CAM_UNIQUE_NAME = "unique_name"; + static final String CONFIG_JSON = "config_json"; + static final String DRIVERMODE_JSON = "drivermode_json"; + static final String PIPELINE_JSONS = "pipeline_jsons"; + + static final String NETWORK_CONFIG = "networkConfig"; + static final String HARDWARE_CONFIG = "hardwareConfig"; + static final String HARDWARE_SETTINGS = "hardwareSettings"; + static final String ATFL_CONFIG_FILE = "apriltagFieldLayout"; } - return config; - } - - private Connection createConn() { - String url = "jdbc:sqlite:" + dbPath; - - try { - var conn = DriverManager.getConnection(url); - conn.setAutoCommit(false); - return conn; - } catch (SQLException e) { - logger.error("Error creating connection", e); - return null; + + private static final String dbName = "photon.sqlite"; + private final String dbPath; + + private final Object m_mutex = new Object(); + private final File rootFolder; + + public SqlConfigProvider(Path rootFolder) { + this.rootFolder = rootFolder.toFile(); + dbPath = Path.of(rootFolder.toString(), dbName).toAbsolutePath().toString(); + logger.debug("Using database " + dbPath); + initDatabase(); } - } - - private void tryCommit(Connection conn) { - try { - conn.commit(); - } catch (SQLException e) { - logger.error("Err committing changes: ", e); - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error("Err rolling back changes: ", e); - } + + public PhotonConfiguration getConfig() { + if (config == null) { + logger.warn("CONFIG IS NULL!"); + } + return config; } - } - private void initDatabase() { - // Make sure root dir exists + private Connection createConn() { + String url = "jdbc:sqlite:" + dbPath; - if (!rootFolder.exists()) { - if (rootFolder.mkdirs()) { - logger.debug("Root config folder did not exist. Created!"); - } else { - logger.error("Failed to create root config folder!"); - } + try { + var conn = DriverManager.getConnection(url); + conn.setAutoCommit(false); + return conn; + } catch (SQLException e) { + logger.error("Error creating connection", e); + return null; + } } - Connection conn = null; - Statement createGlobalTableStatement = null, createCameraTableStatement = null; - try { - conn = createConn(); - if (conn == null) { - logger.error("No connection, cannot init db"); - return; - } - - // Create global settings table. Just a dumb table with list of jsons and their - // name - try { - createGlobalTableStatement = conn.createStatement(); - String sql = - "CREATE TABLE IF NOT EXISTS global (\n" - + " filename TINYTEXT PRIMARY KEY,\n" - + " contents mediumtext NOT NULL\n" - + ");"; - createGlobalTableStatement.execute(sql); - } catch (SQLException e) { - logger.error("Err creating global table", e); - } - - // Create cameras table, key is the camera unique name - try { - createCameraTableStatement = conn.createStatement(); - var sql = - "CREATE TABLE IF NOT EXISTS cameras (\n" - + " unique_name TINYTEXT PRIMARY KEY,\n" - + " config_json text NOT NULL,\n" - + " drivermode_json text NOT NULL,\n" - + " pipeline_jsons mediumtext NOT NULL\n" - + ");"; - createCameraTableStatement.execute(sql); - } catch (SQLException e) { - logger.error("Err creating cameras table", e); - } - - this.tryCommit(conn); - } finally { - try { - if (createGlobalTableStatement != null) createGlobalTableStatement.close(); - if (createCameraTableStatement != null) createCameraTableStatement.close(); - if (conn != null) conn.close(); - } catch (SQLException e) { - e.printStackTrace(); - } + private void tryCommit(Connection conn) { + try { + conn.commit(); + } catch (SQLException e) { + logger.error("Err committing changes: ", e); + try { + conn.rollback(); + } catch (SQLException e1) { + logger.error("Err rolling back changes: ", e); + } + } } - } - - @Override - public boolean saveToDisk() { - logger.debug("Saving to disk"); - var conn = createConn(); - if (conn == null) return false; - - synchronized (m_mutex) { - if (config == null) { - logger.error("Config null! Cannot save"); - return false; - } - - saveCameras(conn); - saveGlobal(conn); - tryCommit(conn); - - try { - conn.close(); - } catch (SQLException e) { - // TODO, does the file still save if the SQL connection isn't closed correctly? If so, - // return false here. - logger.error("SQL Err closing connection while saving to disk: ", e); - } + + private void initDatabase() { + // Make sure root dir exists + + if (!rootFolder.exists()) { + if (rootFolder.mkdirs()) { + logger.debug("Root config folder did not exist. Created!"); + } else { + logger.error("Failed to create root config folder!"); + } + } + + Connection conn = null; + Statement createGlobalTableStatement = null, createCameraTableStatement = null; + try { + conn = createConn(); + if (conn == null) { + logger.error("No connection, cannot init db"); + return; + } + + // Create global settings table. Just a dumb table with list of jsons and their + // name + try { + createGlobalTableStatement = conn.createStatement(); + String sql = + "CREATE TABLE IF NOT EXISTS global (\n" + + " filename TINYTEXT PRIMARY KEY,\n" + + " contents mediumtext NOT NULL\n" + + ");"; + createGlobalTableStatement.execute(sql); + } catch (SQLException e) { + logger.error("Err creating global table", e); + } + + // Create cameras table, key is the camera unique name + try { + createCameraTableStatement = conn.createStatement(); + var sql = + "CREATE TABLE IF NOT EXISTS cameras (\n" + + " unique_name TINYTEXT PRIMARY KEY,\n" + + " config_json text NOT NULL,\n" + + " drivermode_json text NOT NULL,\n" + + " pipeline_jsons mediumtext NOT NULL\n" + + ");"; + createCameraTableStatement.execute(sql); + } catch (SQLException e) { + logger.error("Err creating cameras table", e); + } + + this.tryCommit(conn); + } finally { + try { + if (createGlobalTableStatement != null) createGlobalTableStatement.close(); + if (createCameraTableStatement != null) createCameraTableStatement.close(); + if (conn != null) conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + @Override + public boolean saveToDisk() { + logger.debug("Saving to disk"); + var conn = createConn(); + if (conn == null) return false; + + synchronized (m_mutex) { + if (config == null) { + logger.error("Config null! Cannot save"); + return false; + } + + saveCameras(conn); + saveGlobal(conn); + tryCommit(conn); + + try { + conn.close(); + } catch (SQLException e) { + // TODO, does the file still save if the SQL connection isn't closed correctly? If so, + // return false here. + logger.error("SQL Err closing connection while saving to disk: ", e); + } + } + + logger.info("Settings saved!"); + return true; + } + + @Override + public void load() { + logger.debug("Loading config..."); + var conn = createConn(); + if (conn == null) return; + + synchronized (m_mutex) { + HardwareConfig hardwareConfig; + HardwareSettings hardwareSettings; + NetworkConfig networkConfig; + AprilTagFieldLayout atfl; + + try { + hardwareConfig = + JacksonUtils.deserialize( + getOneConfigFile(conn, TableKeys.HARDWARE_CONFIG), HardwareConfig.class); + } catch (IOException e) { + logger.error("Could not deserialize hardware config! Loading defaults"); + hardwareConfig = new HardwareConfig(); + } + + try { + hardwareSettings = + JacksonUtils.deserialize( + getOneConfigFile(conn, TableKeys.HARDWARE_SETTINGS), HardwareSettings.class); + } catch (IOException e) { + logger.error("Could not deserialize hardware settings! Loading defaults"); + hardwareSettings = new HardwareSettings(); + } + + try { + networkConfig = + JacksonUtils.deserialize( + getOneConfigFile(conn, TableKeys.NETWORK_CONFIG), NetworkConfig.class); + } catch (IOException e) { + logger.error("Could not deserialize network config! Loading defaults"); + networkConfig = new NetworkConfig(); + } + + try { + atfl = + JacksonUtils.deserialize( + getOneConfigFile(conn, TableKeys.ATFL_CONFIG_FILE), AprilTagFieldLayout.class); + } catch (IOException e) { + logger.error("Could not deserialize apriltag layout! Loading defaults"); + try { + atfl = AprilTagFields.kDefaultField.loadAprilTagLayoutField(); + } catch (UncheckedIOException e2) { + logger.error("Error loading WPILib field", e); + atfl = null; + } + if (atfl == null) { + // what do we even do here lmao -- wpilib should always work + logger.error("Field layout is *still* null??????"); + atfl = new AprilTagFieldLayout(List.of(), 1, 1); + } + } + + var cams = loadCameraConfigs(conn); + + try { + conn.close(); + } catch (SQLException e) { + logger.error("SQL Err closing connection while loading: ", e); + } + + this.config = + new PhotonConfiguration(hardwareConfig, hardwareSettings, networkConfig, atfl, cams); + } } - logger.info("Settings saved!"); - return true; - } - - @Override - public void load() { - logger.debug("Loading config..."); - var conn = createConn(); - if (conn == null) return; - - synchronized (m_mutex) { - HardwareConfig hardwareConfig; - HardwareSettings hardwareSettings; - NetworkConfig networkConfig; - AprilTagFieldLayout atfl; - - try { - hardwareConfig = - JacksonUtils.deserialize( - getOneConfigFile(conn, TableKeys.HARDWARE_CONFIG), HardwareConfig.class); - } catch (IOException e) { - logger.error("Could not deserialize hardware config! Loading defaults"); - hardwareConfig = new HardwareConfig(); - } - - try { - hardwareSettings = - JacksonUtils.deserialize( - getOneConfigFile(conn, TableKeys.HARDWARE_SETTINGS), HardwareSettings.class); - } catch (IOException e) { - logger.error("Could not deserialize hardware settings! Loading defaults"); - hardwareSettings = new HardwareSettings(); - } - - try { - networkConfig = - JacksonUtils.deserialize( - getOneConfigFile(conn, TableKeys.NETWORK_CONFIG), NetworkConfig.class); - } catch (IOException e) { - logger.error("Could not deserialize network config! Loading defaults"); - networkConfig = new NetworkConfig(); - } - - try { - atfl = - JacksonUtils.deserialize( - getOneConfigFile(conn, TableKeys.ATFL_CONFIG_FILE), AprilTagFieldLayout.class); - } catch (IOException e) { - logger.error("Could not deserialize apriltag layout! Loading defaults"); + private String getOneConfigFile(Connection conn, String filename) { + // Query every single row of the global settings db + PreparedStatement query = null; try { - atfl = AprilTagFields.kDefaultField.loadAprilTagLayoutField(); - } catch (UncheckedIOException e2) { - logger.error("Error loading WPILib field", e); - atfl = null; + query = + conn.prepareStatement("SELECT contents FROM global where filename=\"" + filename + "\""); + + var result = query.executeQuery(); + + while (result.next()) { + return result.getString("contents"); + } + } catch (SQLException e) { + logger.error("SQL Err getting file " + filename, e); + } finally { + try { + if (query != null) query.close(); + } catch (SQLException e) { + logger.error("SQL Err closing config file query " + filename, e); + } } - if (atfl == null) { - // what do we even do here lmao -- wpilib should always work - logger.error("Field layout is *still* null??????"); - atfl = new AprilTagFieldLayout(List.of(), 1, 1); + + return ""; + } + + private void saveCameras(Connection conn) { + try { + // Replace this camera's row with the new settings + var sqlString = + "REPLACE INTO cameras (unique_name, config_json, drivermode_json, pipeline_jsons) VALUES " + + "(?,?,?,?);"; + + for (var c : config.getCameraConfigurations().entrySet()) { + PreparedStatement statement = conn.prepareStatement(sqlString); + + var config = c.getValue(); + statement.setString(1, c.getKey()); + statement.setString(2, JacksonUtils.serializeToString(config)); + statement.setString(3, JacksonUtils.serializeToString(config.driveModeSettings)); + + // Serializing a list of abstract classes sucks. Instead, make it into an array + // of strings, which we can later unpack back into individual settings + List settings = + config.pipelineSettings.stream() + .map( + it -> { + try { + return JacksonUtils.serializeToString(it); + } catch (IOException e) { + e.printStackTrace(); + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + statement.setString(4, JacksonUtils.serializeToString(settings)); + + statement.executeUpdate(); + } + } catch (SQLException | IOException e) { + logger.error("Err saving cameras", e); + try { + conn.rollback(); + } catch (SQLException e1) { + logger.error("Err rolling back changes: ", e); + } } - } + } - var cams = loadCameraConfigs(conn); + private void addFile(PreparedStatement ps, String key, String value) throws SQLException { + ps.setString(1, key); + ps.setString(2, value); + } + + private void saveGlobal(Connection conn) { + PreparedStatement statement1 = null; + PreparedStatement statement2 = null; + PreparedStatement statement3 = null; + try { + // Replace this camera's row with the new settings + var sqlString = "REPLACE INTO global (filename, contents) VALUES " + "(?,?);"; + + statement1 = conn.prepareStatement(sqlString); + addFile( + statement1, + TableKeys.HARDWARE_SETTINGS, + JacksonUtils.serializeToString(config.getHardwareSettings())); + statement1.executeUpdate(); + + statement2 = conn.prepareStatement(sqlString); + addFile( + statement2, + TableKeys.NETWORK_CONFIG, + JacksonUtils.serializeToString(config.getNetworkConfig())); + statement2.executeUpdate(); + statement2.close(); + + statement3 = conn.prepareStatement(sqlString); + addFile( + statement3, + TableKeys.HARDWARE_CONFIG, + JacksonUtils.serializeToString(config.getHardwareConfig())); + statement3.executeUpdate(); + statement3.close(); + + } catch (SQLException | IOException e) { + logger.error("Err saving global", e); + try { + conn.rollback(); + } catch (SQLException e1) { + logger.error("Err rolling back changes: ", e); + } + } finally { + try { + if (statement1 != null) statement1.close(); + if (statement2 != null) statement2.close(); + if (statement3 != null) statement3.close(); + } catch (SQLException e) { + logger.error("SQL Err closing global settings query ", e); + } + } + } - try { - conn.close(); - } catch (SQLException e) { - logger.error("SQL Err closing connection while loading: ", e); - } + private boolean saveOneFile(String fname, Path path) { + Connection conn = null; + PreparedStatement statement1 = null; - this.config = - new PhotonConfiguration(hardwareConfig, hardwareSettings, networkConfig, atfl, cams); + try { + conn = createConn(); + if (conn == null) { + return false; + } + + // Replace this camera's row with the new settings + var sqlString = "REPLACE INTO global (filename, contents) VALUES " + "(?,?);"; + + statement1 = conn.prepareStatement(sqlString); + addFile(statement1, fname, Files.readString(path)); + statement1.executeUpdate(); + + conn.commit(); + return true; + } catch (SQLException | IOException e) { + logger.error("Error while saving file to global: ", e); + try { + conn.rollback(); + } catch (SQLException e1) { + logger.error("Error rolling back changes: ", e); + } + return false; + } finally { + try { + if (statement1 != null) statement1.close(); + conn.close(); + } catch (SQLException e) { + logger.error("SQL Error saving file " + fname, e); + } + } } - } - - private String getOneConfigFile(Connection conn, String filename) { - // Query every single row of the global settings db - PreparedStatement query = null; - try { - query = - conn.prepareStatement("SELECT contents FROM global where filename=\"" + filename + "\""); - - var result = query.executeQuery(); - - while (result.next()) { - return result.getString("contents"); - } - } catch (SQLException e) { - logger.error("SQL Err getting file " + filename, e); - } finally { - try { - if (query != null) query.close(); - } catch (SQLException e) { - logger.error("SQL Err closing config file query " + filename, e); - } + + @Override + public boolean saveUploadedHardwareConfig(Path uploadPath) { + return saveOneFile(TableKeys.HARDWARE_CONFIG, uploadPath); } - return ""; - } - - private void saveCameras(Connection conn) { - try { - // Replace this camera's row with the new settings - var sqlString = - "REPLACE INTO cameras (unique_name, config_json, drivermode_json, pipeline_jsons) VALUES " - + "(?,?,?,?);"; - - for (var c : config.getCameraConfigurations().entrySet()) { - PreparedStatement statement = conn.prepareStatement(sqlString); - - var config = c.getValue(); - statement.setString(1, c.getKey()); - statement.setString(2, JacksonUtils.serializeToString(config)); - statement.setString(3, JacksonUtils.serializeToString(config.driveModeSettings)); - - // Serializing a list of abstract classes sucks. Instead, make it into an array - // of strings, which we can later unpack back into individual settings - List settings = - config.pipelineSettings.stream() - .map( - it -> { - try { - return JacksonUtils.serializeToString(it); - } catch (IOException e) { - e.printStackTrace(); - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - statement.setString(4, JacksonUtils.serializeToString(settings)); - - statement.executeUpdate(); - } - } catch (SQLException | IOException e) { - logger.error("Err saving cameras", e); - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error("Err rolling back changes: ", e); - } + @Override + public boolean saveUploadedHardwareSettings(Path uploadPath) { + return saveOneFile(TableKeys.HARDWARE_SETTINGS, uploadPath); } - } - - private void addFile(PreparedStatement ps, String key, String value) throws SQLException { - ps.setString(1, key); - ps.setString(2, value); - } - - private void saveGlobal(Connection conn) { - PreparedStatement statement1 = null; - PreparedStatement statement2 = null; - PreparedStatement statement3 = null; - try { - // Replace this camera's row with the new settings - var sqlString = "REPLACE INTO global (filename, contents) VALUES " + "(?,?);"; - - statement1 = conn.prepareStatement(sqlString); - addFile( - statement1, - TableKeys.HARDWARE_SETTINGS, - JacksonUtils.serializeToString(config.getHardwareSettings())); - statement1.executeUpdate(); - - statement2 = conn.prepareStatement(sqlString); - addFile( - statement2, - TableKeys.NETWORK_CONFIG, - JacksonUtils.serializeToString(config.getNetworkConfig())); - statement2.executeUpdate(); - statement2.close(); - - statement3 = conn.prepareStatement(sqlString); - addFile( - statement3, - TableKeys.HARDWARE_CONFIG, - JacksonUtils.serializeToString(config.getHardwareConfig())); - statement3.executeUpdate(); - statement3.close(); - - } catch (SQLException | IOException e) { - logger.error("Err saving global", e); - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error("Err rolling back changes: ", e); - } - } finally { - try { - if (statement1 != null) statement1.close(); - if (statement2 != null) statement2.close(); - if (statement3 != null) statement3.close(); - } catch (SQLException e) { - logger.error("SQL Err closing global settings query ", e); - } + + @Override + public boolean saveUploadedNetworkConfig(Path uploadPath) { + return saveOneFile(TableKeys.NETWORK_CONFIG, uploadPath); } - } - - private boolean saveOneFile(String fname, Path path) { - Connection conn = null; - PreparedStatement statement1 = null; - - try { - conn = createConn(); - if (conn == null) { - return false; - } - - // Replace this camera's row with the new settings - var sqlString = "REPLACE INTO global (filename, contents) VALUES " + "(?,?);"; - - statement1 = conn.prepareStatement(sqlString); - addFile(statement1, fname, Files.readString(path)); - statement1.executeUpdate(); - - conn.commit(); - return true; - } catch (SQLException | IOException e) { - logger.error("Error while saving file to global: ", e); - try { - conn.rollback(); - } catch (SQLException e1) { - logger.error("Error rolling back changes: ", e); - } - return false; - } finally { - try { - if (statement1 != null) statement1.close(); - conn.close(); - } catch (SQLException e) { - logger.error("SQL Error saving file " + fname, e); - } + + @Override + public boolean saveUploadedAprilTagFieldLayout(Path uploadPath) { + return saveOneFile(TableKeys.ATFL_CONFIG_FILE, uploadPath); } - } - - @Override - public boolean saveUploadedHardwareConfig(Path uploadPath) { - return saveOneFile(TableKeys.HARDWARE_CONFIG, uploadPath); - } - - @Override - public boolean saveUploadedHardwareSettings(Path uploadPath) { - return saveOneFile(TableKeys.HARDWARE_SETTINGS, uploadPath); - } - - @Override - public boolean saveUploadedNetworkConfig(Path uploadPath) { - return saveOneFile(TableKeys.NETWORK_CONFIG, uploadPath); - } - - @Override - public boolean saveUploadedAprilTagFieldLayout(Path uploadPath) { - return saveOneFile(TableKeys.ATFL_CONFIG_FILE, uploadPath); - } - - private HashMap loadCameraConfigs(Connection conn) { - HashMap loadedConfigurations = new HashMap<>(); - - // Query every single row of the cameras db - PreparedStatement query = null; - try { - query = - conn.prepareStatement( - String.format( - "SELECT %s, %s, %s, %s FROM cameras", - TableKeys.CAM_UNIQUE_NAME, - TableKeys.CONFIG_JSON, - TableKeys.DRIVERMODE_JSON, - TableKeys.PIPELINE_JSONS)); - - var result = query.executeQuery(); - - // Iterate over every row/"camera" in the table - while (result.next()) { - List dummyList = new ArrayList<>(); - - var uniqueName = result.getString(TableKeys.CAM_UNIQUE_NAME); - var config = - JacksonUtils.deserialize( - result.getString(TableKeys.CONFIG_JSON), CameraConfiguration.class); - var driverMode = - JacksonUtils.deserialize( - result.getString(TableKeys.DRIVERMODE_JSON), DriverModePipelineSettings.class); - List pipelineSettings = - JacksonUtils.deserialize( - result.getString(TableKeys.PIPELINE_JSONS), dummyList.getClass()); - - List loadedSettings = new ArrayList<>(); - for (var str : pipelineSettings) { - if (str instanceof String) { - loadedSettings.add(JacksonUtils.deserialize((String) str, CVPipelineSettings.class)); - } - } - config.pipelineSettings = loadedSettings; - config.driveModeSettings = driverMode; - loadedConfigurations.put(uniqueName, config); - } - } catch (SQLException | IOException e) { - logger.error("Err loading cameras: ", e); - } finally { - try { - if (query != null) query.close(); - } catch (SQLException e) { - logger.error("SQL Err closing connection while loading cameras ", e); - } + private HashMap loadCameraConfigs(Connection conn) { + HashMap loadedConfigurations = new HashMap<>(); + + // Query every single row of the cameras db + PreparedStatement query = null; + try { + query = + conn.prepareStatement( + String.format( + "SELECT %s, %s, %s, %s FROM cameras", + TableKeys.CAM_UNIQUE_NAME, + TableKeys.CONFIG_JSON, + TableKeys.DRIVERMODE_JSON, + TableKeys.PIPELINE_JSONS)); + + var result = query.executeQuery(); + + // Iterate over every row/"camera" in the table + while (result.next()) { + List dummyList = new ArrayList<>(); + + var uniqueName = result.getString(TableKeys.CAM_UNIQUE_NAME); + var config = + JacksonUtils.deserialize( + result.getString(TableKeys.CONFIG_JSON), CameraConfiguration.class); + var driverMode = + JacksonUtils.deserialize( + result.getString(TableKeys.DRIVERMODE_JSON), DriverModePipelineSettings.class); + List pipelineSettings = + JacksonUtils.deserialize( + result.getString(TableKeys.PIPELINE_JSONS), dummyList.getClass()); + + List loadedSettings = new ArrayList<>(); + for (var str : pipelineSettings) { + if (str instanceof String) { + loadedSettings.add(JacksonUtils.deserialize((String) str, CVPipelineSettings.class)); + } + } + + config.pipelineSettings = loadedSettings; + config.driveModeSettings = driverMode; + loadedConfigurations.put(uniqueName, config); + } + } catch (SQLException | IOException e) { + logger.error("Err loading cameras: ", e); + } finally { + try { + if (query != null) query.close(); + } catch (SQLException e) { + logger.error("SQL Err closing connection while loading cameras ", e); + } + } + return loadedConfigurations; } - return loadedConfigurations; - } - public void setConfig(PhotonConfiguration config) { - this.config = config; - } + public void setConfig(PhotonConfiguration config) { + this.config = config; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeDestination.java b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeDestination.java index d046734463..16a69dfe4f 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeDestination.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeDestination.java @@ -22,14 +22,14 @@ import java.util.List; public enum DataChangeDestination { - DCD_ACTIVEMODULE, - DCD_ACTIVEPIPELINESETTINGS, - DCD_GENSETTINGS, - DCD_UI, - DCD_OTHER; + DCD_ACTIVEMODULE, + DCD_ACTIVEPIPELINESETTINGS, + DCD_GENSETTINGS, + DCD_UI, + DCD_OTHER; - public static final List AllDestinations = - Arrays.asList(DataChangeDestination.values()); + public static final List AllDestinations = + Arrays.asList(DataChangeDestination.values()); - public static class DataChangeDestinationList extends ArrayList {} + public static class DataChangeDestinationList extends ArrayList {} } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeService.java b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeService.java index 22b5ade244..9d49e94290 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeService.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeService.java @@ -27,84 +27,84 @@ @SuppressWarnings("rawtypes") public class DataChangeService { - private static final Logger logger = new Logger(DataChangeService.class, LogGroup.WebServer); - - private static class ThreadSafeSingleton { - private static final DataChangeService INSTANCE = new DataChangeService(); - } - - public static DataChangeService getInstance() { - return ThreadSafeSingleton.INSTANCE; - } - - private final CopyOnWriteArrayList subscribers; - - @SuppressWarnings("FieldCanBeLocal") - private final Thread dispatchThread; - - private final BlockingQueue eventQueue = new LinkedBlockingQueue<>(); - - private DataChangeService() { - subscribers = new CopyOnWriteArrayList<>(); - dispatchThread = new Thread(this::dispatchFromQueue); - dispatchThread.setName("DataChangeEventDispatchThread"); - dispatchThread.start(); - } - - public boolean hasEvents() { - return !eventQueue.isEmpty(); - } - - private void dispatchFromQueue() { - while (true) { - try { - var taken = eventQueue.take(); - for (var sub : subscribers) { - if (sub.wantedSources.contains(taken.sourceType) - && sub.wantedDestinations.contains(taken.destType)) { - sub.onDataChangeEvent(taken); - } + private static final Logger logger = new Logger(DataChangeService.class, LogGroup.WebServer); + + private static class ThreadSafeSingleton { + private static final DataChangeService INSTANCE = new DataChangeService(); + } + + public static DataChangeService getInstance() { + return ThreadSafeSingleton.INSTANCE; + } + + private final CopyOnWriteArrayList subscribers; + + @SuppressWarnings("FieldCanBeLocal") + private final Thread dispatchThread; + + private final BlockingQueue eventQueue = new LinkedBlockingQueue<>(); + + private DataChangeService() { + subscribers = new CopyOnWriteArrayList<>(); + dispatchThread = new Thread(this::dispatchFromQueue); + dispatchThread.setName("DataChangeEventDispatchThread"); + dispatchThread.start(); + } + + public boolean hasEvents() { + return !eventQueue.isEmpty(); + } + + private void dispatchFromQueue() { + while (true) { + try { + var taken = eventQueue.take(); + for (var sub : subscribers) { + if (sub.wantedSources.contains(taken.sourceType) + && sub.wantedDestinations.contains(taken.destType)) { + sub.onDataChangeEvent(taken); + } + } + } catch (Exception e) { + logger.error("Exception when dispatching event!", e); + e.printStackTrace(); + } } - } catch (Exception e) { - logger.error("Exception when dispatching event!", e); - e.printStackTrace(); - } } - } - - public void addSubscriber(DataChangeSubscriber subscriber) { - if (!subscribers.addIfAbsent(subscriber)) { - logger.warn("Attempted to add already added subscriber!"); - } else { - logger.debug( - () -> { - var sources = - subscriber.wantedSources.stream() - .map(Enum::toString) - .collect(Collectors.joining(", ")); - var dests = - subscriber.wantedDestinations.stream() - .map(Enum::toString) - .collect(Collectors.joining(", ")); - - return "Added subscriber - " + "Sources: " + sources + ", Destinations: " + dests; - }); + + public void addSubscriber(DataChangeSubscriber subscriber) { + if (!subscribers.addIfAbsent(subscriber)) { + logger.warn("Attempted to add already added subscriber!"); + } else { + logger.debug( + () -> { + var sources = + subscriber.wantedSources.stream() + .map(Enum::toString) + .collect(Collectors.joining(", ")); + var dests = + subscriber.wantedDestinations.stream() + .map(Enum::toString) + .collect(Collectors.joining(", ")); + + return "Added subscriber - " + "Sources: " + sources + ", Destinations: " + dests; + }); + } } - } - public void addSubscribers(DataChangeSubscriber... subs) { - for (var sub : subs) { - addSubscriber(sub); + public void addSubscribers(DataChangeSubscriber... subs) { + for (var sub : subs) { + addSubscriber(sub); + } } - } - public void publishEvent(DataChangeEvent event) { - eventQueue.offer(event); - } + public void publishEvent(DataChangeEvent event) { + eventQueue.offer(event); + } - public void publishEvents(DataChangeEvent... events) { - for (var event : events) { - publishEvent(event); + public void publishEvents(DataChangeEvent... events) { + for (var event : events) { + publishEvent(event); + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSource.java b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSource.java index 966015ca74..c9ba1585fd 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSource.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSource.java @@ -22,13 +22,13 @@ import java.util.List; public enum DataChangeSource { - DCS_WEBSOCKET, - DCS_HTTP, - DCS_NETWORKTABLES, - DCS_VISIONMODULE, - DCS_OTHER; + DCS_WEBSOCKET, + DCS_HTTP, + DCS_NETWORKTABLES, + DCS_VISIONMODULE, + DCS_OTHER; - public static final List AllSources = Arrays.asList(DataChangeSource.values()); + public static final List AllSources = Arrays.asList(DataChangeSource.values()); - public static class DataChangeSourceList extends ArrayList {} + public static class DataChangeSourceList extends ArrayList {} } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSubscriber.java b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSubscriber.java index 7b44095c31..92a06fdb67 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSubscriber.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/DataChangeSubscriber.java @@ -22,34 +22,34 @@ import org.photonvision.common.dataflow.events.DataChangeEvent; public abstract class DataChangeSubscriber { - public final List wantedSources; - public final List wantedDestinations; + public final List wantedSources; + public final List wantedDestinations; - private final int hash; + private final int hash; - public DataChangeSubscriber( - List wantedSources, List wantedDestinations) { - this.wantedSources = wantedSources; - this.wantedDestinations = wantedDestinations; - hash = Objects.hash(wantedSources, wantedDestinations); - } + public DataChangeSubscriber( + List wantedSources, List wantedDestinations) { + this.wantedSources = wantedSources; + this.wantedDestinations = wantedDestinations; + hash = Objects.hash(wantedSources, wantedDestinations); + } - public DataChangeSubscriber() { - this(DataChangeSource.AllSources, DataChangeDestination.AllDestinations); - } + public DataChangeSubscriber() { + this(DataChangeSource.AllSources, DataChangeDestination.AllDestinations); + } - public DataChangeSubscriber(DataChangeSource.DataChangeSourceList wantedSources) { - this(wantedSources, DataChangeDestination.AllDestinations); - } + public DataChangeSubscriber(DataChangeSource.DataChangeSourceList wantedSources) { + this(wantedSources, DataChangeDestination.AllDestinations); + } - public DataChangeSubscriber(DataChangeDestination.DataChangeDestinationList wantedDestinations) { - this(DataChangeSource.AllSources, wantedDestinations); - } + public DataChangeSubscriber(DataChangeDestination.DataChangeDestinationList wantedDestinations) { + this(DataChangeSource.AllSources, wantedDestinations); + } - public abstract void onDataChangeEvent(DataChangeEvent event); + public abstract void onDataChangeEvent(DataChangeEvent event); - @Override - public int hashCode() { - return hash; - } + @Override + public int hashCode() { + return hash; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/events/DataChangeEvent.java b/photon-core/src/main/java/org/photonvision/common/dataflow/events/DataChangeEvent.java index 7f7b993c04..3f40aba733 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/events/DataChangeEvent.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/events/DataChangeEvent.java @@ -21,34 +21,34 @@ import org.photonvision.common.dataflow.DataChangeSource; public class DataChangeEvent { - public final DataChangeSource sourceType; - public final DataChangeDestination destType; - public final String propertyName; - public final T data; + public final DataChangeSource sourceType; + public final DataChangeDestination destType; + public final String propertyName; + public final T data; - public DataChangeEvent( - DataChangeSource sourceType, - DataChangeDestination destType, - String propertyName, - T newValue) { - this.sourceType = sourceType; - this.destType = destType; - this.propertyName = propertyName; - this.data = newValue; - } + public DataChangeEvent( + DataChangeSource sourceType, + DataChangeDestination destType, + String propertyName, + T newValue) { + this.sourceType = sourceType; + this.destType = destType; + this.propertyName = propertyName; + this.data = newValue; + } - @Override - public String toString() { - return "DataChangeEvent{" - + "sourceType=" - + sourceType - + ", destType=" - + destType - + ", propertyName='" - + propertyName - + '\'' - + ", data=" - + data - + '}'; - } + @Override + public String toString() { + return "DataChangeEvent{" + + "sourceType=" + + sourceType + + ", destType=" + + destType + + ", propertyName='" + + propertyName + + '\'' + + ", data=" + + data + + '}'; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/events/HTTPRequestEvent.java b/photon-core/src/main/java/org/photonvision/common/dataflow/events/HTTPRequestEvent.java index 393977a7a8..5843ba1cca 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/events/HTTPRequestEvent.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/events/HTTPRequestEvent.java @@ -21,11 +21,11 @@ import org.photonvision.common.dataflow.DataChangeSource; public class HTTPRequestEvent extends DataChangeEvent { - public HTTPRequestEvent( - DataChangeSource sourceType, - DataChangeDestination destType, - String propertyName, - T newValue) { - super(sourceType, destType, propertyName, newValue); - } + public HTTPRequestEvent( + DataChangeSource sourceType, + DataChangeDestination destType, + String propertyName, + T newValue) { + super(sourceType, destType, propertyName, newValue); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/events/IncomingWebSocketEvent.java b/photon-core/src/main/java/org/photonvision/common/dataflow/events/IncomingWebSocketEvent.java index 87d3ca3cab..7f0fd6134e 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/events/IncomingWebSocketEvent.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/events/IncomingWebSocketEvent.java @@ -23,44 +23,44 @@ import org.photonvision.common.dataflow.DataChangeSource; public class IncomingWebSocketEvent extends DataChangeEvent { - public final Integer cameraIndex; - public final WsContext originContext; + public final Integer cameraIndex; + public final WsContext originContext; - public IncomingWebSocketEvent(DataChangeDestination destType, String propertyName, T newValue) { - this(destType, propertyName, newValue, null, null); - } + public IncomingWebSocketEvent(DataChangeDestination destType, String propertyName, T newValue) { + this(destType, propertyName, newValue, null, null); + } - public IncomingWebSocketEvent( - DataChangeDestination destType, - String propertyName, - T newValue, - Integer cameraIndex, - WsContext originContext) { - super(DataChangeSource.DCS_WEBSOCKET, destType, propertyName, newValue); - this.cameraIndex = cameraIndex; - this.originContext = originContext; - } + public IncomingWebSocketEvent( + DataChangeDestination destType, + String propertyName, + T newValue, + Integer cameraIndex, + WsContext originContext) { + super(DataChangeSource.DCS_WEBSOCKET, destType, propertyName, newValue); + this.cameraIndex = cameraIndex; + this.originContext = originContext; + } - @SuppressWarnings("unchecked") - public IncomingWebSocketEvent( - DataChangeDestination destType, String dataKey, HashMap data) { - this(destType, dataKey, (T) data.get(dataKey)); - } + @SuppressWarnings("unchecked") + public IncomingWebSocketEvent( + DataChangeDestination destType, String dataKey, HashMap data) { + this(destType, dataKey, (T) data.get(dataKey)); + } - @Override - public String toString() { - return "IncomingWebSocketEvent{" - + "cameraIndex=" - + cameraIndex - + ", sourceType=" - + sourceType - + ", destType=" - + destType - + ", propertyName='" - + propertyName - + '\'' - + ", data=" - + data - + '}'; - } + @Override + public String toString() { + return "IncomingWebSocketEvent{" + + "cameraIndex=" + + cameraIndex + + ", sourceType=" + + sourceType + + ", destType=" + + destType + + ", propertyName='" + + propertyName + + '\'' + + ", data=" + + data + + '}'; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/events/OutgoingUIEvent.java b/photon-core/src/main/java/org/photonvision/common/dataflow/events/OutgoingUIEvent.java index 11feec070b..efb3308dc5 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/events/OutgoingUIEvent.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/events/OutgoingUIEvent.java @@ -23,29 +23,29 @@ import org.photonvision.common.dataflow.DataChangeSource; public class OutgoingUIEvent extends DataChangeEvent { - public final WsContext originContext; - - public OutgoingUIEvent(String propertyName, T newValue) { - this(propertyName, newValue, null); - } - - public OutgoingUIEvent(String propertyName, T newValue, WsContext originContext) { - super(DataChangeSource.DCS_WEBSOCKET, DataChangeDestination.DCD_UI, propertyName, newValue); - this.originContext = originContext; - } - - public static OutgoingUIEvent> wrappedOf( - String commandName, Object value) { - HashMap data = new HashMap<>(); - data.put(commandName, value); - return new OutgoingUIEvent<>(commandName, data); - } - - public static OutgoingUIEvent> wrappedOf( - String commandName, String propertyName, Object value, WsContext originContext) { - HashMap data = new HashMap<>(); - data.put(propertyName, value); - - return new OutgoingUIEvent<>(commandName, data, originContext); - } + public final WsContext originContext; + + public OutgoingUIEvent(String propertyName, T newValue) { + this(propertyName, newValue, null); + } + + public OutgoingUIEvent(String propertyName, T newValue, WsContext originContext) { + super(DataChangeSource.DCS_WEBSOCKET, DataChangeDestination.DCD_UI, propertyName, newValue); + this.originContext = originContext; + } + + public static OutgoingUIEvent> wrappedOf( + String commandName, Object value) { + HashMap data = new HashMap<>(); + data.put(commandName, value); + return new OutgoingUIEvent<>(commandName, data); + } + + public static OutgoingUIEvent> wrappedOf( + String commandName, String propertyName, Object value, WsContext originContext) { + HashMap data = new HashMap<>(); + data.put(propertyName, value); + + return new OutgoingUIEvent<>(commandName, data, originContext); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataChangeListener.java b/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataChangeListener.java index 9303b6c622..2bd172cadb 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataChangeListener.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataChangeListener.java @@ -24,22 +24,22 @@ import java.util.function.Consumer; public class NTDataChangeListener { - private final NetworkTableInstance instance; - private final Subscriber watchedEntry; - private final int listenerID; + private final NetworkTableInstance instance; + private final Subscriber watchedEntry; + private final int listenerID; - public NTDataChangeListener( - NetworkTableInstance instance, - Subscriber watchedSubscriber, - Consumer dataChangeConsumer) { - this.watchedEntry = watchedSubscriber; - this.instance = instance; - listenerID = - this.instance.addListener( - watchedEntry, EnumSet.of(NetworkTableEvent.Kind.kValueAll), dataChangeConsumer); - } + public NTDataChangeListener( + NetworkTableInstance instance, + Subscriber watchedSubscriber, + Consumer dataChangeConsumer) { + this.watchedEntry = watchedSubscriber; + this.instance = instance; + listenerID = + this.instance.addListener( + watchedEntry, EnumSet.of(NetworkTableEvent.Kind.kValueAll), dataChangeConsumer); + } - public void remove() { - this.instance.removeListener(listenerID); - } + public void remove() { + this.instance.removeListener(listenerID); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataPublisher.java b/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataPublisher.java index 306b6ae7a3..d985c710f5 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataPublisher.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NTDataPublisher.java @@ -32,166 +32,166 @@ import org.photonvision.vision.target.TrackedTarget; public class NTDataPublisher implements CVPipelineResultConsumer { - private final Logger logger = new Logger(NTDataPublisher.class, LogGroup.General); + private final Logger logger = new Logger(NTDataPublisher.class, LogGroup.General); - private final NetworkTable rootTable = NetworkTablesManager.getInstance().kRootTable; + private final NetworkTable rootTable = NetworkTablesManager.getInstance().kRootTable; - private final NTTopicSet ts = new NTTopicSet(); + private final NTTopicSet ts = new NTTopicSet(); - NTDataChangeListener pipelineIndexListener; - private final Supplier pipelineIndexSupplier; - private final Consumer pipelineIndexConsumer; + NTDataChangeListener pipelineIndexListener; + private final Supplier pipelineIndexSupplier; + private final Consumer pipelineIndexConsumer; - NTDataChangeListener driverModeListener; - private final BooleanSupplier driverModeSupplier; - private final Consumer driverModeConsumer; + NTDataChangeListener driverModeListener; + private final BooleanSupplier driverModeSupplier; + private final Consumer driverModeConsumer; - private long heartbeatCounter = 0; + private long heartbeatCounter = 0; - public NTDataPublisher( - String cameraNickname, - Supplier pipelineIndexSupplier, - Consumer pipelineIndexConsumer, - BooleanSupplier driverModeSupplier, - Consumer driverModeConsumer) { - this.pipelineIndexSupplier = pipelineIndexSupplier; - this.pipelineIndexConsumer = pipelineIndexConsumer; - this.driverModeSupplier = driverModeSupplier; - this.driverModeConsumer = driverModeConsumer; + public NTDataPublisher( + String cameraNickname, + Supplier pipelineIndexSupplier, + Consumer pipelineIndexConsumer, + BooleanSupplier driverModeSupplier, + Consumer driverModeConsumer) { + this.pipelineIndexSupplier = pipelineIndexSupplier; + this.pipelineIndexConsumer = pipelineIndexConsumer; + this.driverModeSupplier = driverModeSupplier; + this.driverModeConsumer = driverModeConsumer; - updateCameraNickname(cameraNickname); - updateEntries(); - } - - private void onPipelineIndexChange(NetworkTableEvent entryNotification) { - var newIndex = (int) entryNotification.valueData.value.getInteger(); - var originalIndex = pipelineIndexSupplier.get(); - - // ignore indexes below 0 - if (newIndex < 0) { - ts.pipelineIndexPublisher.set(originalIndex); - return; + updateCameraNickname(cameraNickname); + updateEntries(); } - if (newIndex == originalIndex) { - logger.debug("Pipeline index is already " + newIndex); - return; + private void onPipelineIndexChange(NetworkTableEvent entryNotification) { + var newIndex = (int) entryNotification.valueData.value.getInteger(); + var originalIndex = pipelineIndexSupplier.get(); + + // ignore indexes below 0 + if (newIndex < 0) { + ts.pipelineIndexPublisher.set(originalIndex); + return; + } + + if (newIndex == originalIndex) { + logger.debug("Pipeline index is already " + newIndex); + return; + } + + pipelineIndexConsumer.accept(newIndex); + var setIndex = pipelineIndexSupplier.get(); + if (newIndex != setIndex) { // set failed + ts.pipelineIndexPublisher.set(setIndex); + // TODO: Log + } + logger.debug("Set pipeline index to " + newIndex); } - pipelineIndexConsumer.accept(newIndex); - var setIndex = pipelineIndexSupplier.get(); - if (newIndex != setIndex) { // set failed - ts.pipelineIndexPublisher.set(setIndex); - // TODO: Log - } - logger.debug("Set pipeline index to " + newIndex); - } + private void onDriverModeChange(NetworkTableEvent entryNotification) { + var newDriverMode = entryNotification.valueData.value.getBoolean(); + var originalDriverMode = driverModeSupplier.getAsBoolean(); - private void onDriverModeChange(NetworkTableEvent entryNotification) { - var newDriverMode = entryNotification.valueData.value.getBoolean(); - var originalDriverMode = driverModeSupplier.getAsBoolean(); + if (newDriverMode == originalDriverMode) { + logger.debug("Driver mode is already " + newDriverMode); + return; + } - if (newDriverMode == originalDriverMode) { - logger.debug("Driver mode is already " + newDriverMode); - return; + driverModeConsumer.accept(newDriverMode); + logger.debug("Set driver mode to " + newDriverMode); } - driverModeConsumer.accept(newDriverMode); - logger.debug("Set driver mode to " + newDriverMode); - } - - private void removeEntries() { - if (pipelineIndexListener != null) pipelineIndexListener.remove(); - if (driverModeListener != null) driverModeListener.remove(); - ts.removeEntries(); - } - - private void updateEntries() { - if (pipelineIndexListener != null) pipelineIndexListener.remove(); - if (driverModeListener != null) driverModeListener.remove(); - - ts.updateEntries(); - - pipelineIndexListener = - new NTDataChangeListener( - ts.subTable.getInstance(), ts.pipelineIndexRequestSub, this::onPipelineIndexChange); - - driverModeListener = - new NTDataChangeListener( - ts.subTable.getInstance(), ts.driverModeSubscriber, this::onDriverModeChange); - } - - public void updateCameraNickname(String newCameraNickname) { - removeEntries(); - ts.subTable = rootTable.getSubTable(newCameraNickname); - updateEntries(); - } - - @Override - public void accept(CVPipelineResult result) { - var simplified = - new PhotonPipelineResult( - result.getLatencyMillis(), - TrackedTarget.simpleFromTrackedTargets(result.targets), - result.multiTagResult); - Packet packet = new Packet(simplified.getPacketSize()); - simplified.populatePacket(packet); - - ts.rawBytesEntry.set(packet.getData()); - - ts.pipelineIndexPublisher.set(pipelineIndexSupplier.get()); - ts.driverModePublisher.set(driverModeSupplier.getAsBoolean()); - ts.latencyMillisEntry.set(result.getLatencyMillis()); - ts.hasTargetEntry.set(result.hasTargets()); - - if (result.hasTargets()) { - var bestTarget = result.targets.get(0); - - ts.targetPitchEntry.set(bestTarget.getPitch()); - ts.targetYawEntry.set(bestTarget.getYaw()); - ts.targetAreaEntry.set(bestTarget.getArea()); - ts.targetSkewEntry.set(bestTarget.getSkew()); - - var pose = bestTarget.getBestCameraToTarget3d(); - ts.targetPoseEntry.set( - new double[] { - pose.getTranslation().getX(), - pose.getTranslation().getY(), - pose.getTranslation().getZ(), - pose.getRotation().getQuaternion().getW(), - pose.getRotation().getQuaternion().getX(), - pose.getRotation().getQuaternion().getY(), - pose.getRotation().getQuaternion().getZ() - }); - - var targetOffsetPoint = bestTarget.getTargetOffsetPoint(); - ts.bestTargetPosX.set(targetOffsetPoint.x); - ts.bestTargetPosY.set(targetOffsetPoint.y); - } else { - ts.targetPitchEntry.set(0); - ts.targetYawEntry.set(0); - ts.targetAreaEntry.set(0); - ts.targetSkewEntry.set(0); - ts.targetPoseEntry.set(new double[] {0, 0, 0}); - ts.bestTargetPosX.set(0); - ts.bestTargetPosY.set(0); + private void removeEntries() { + if (pipelineIndexListener != null) pipelineIndexListener.remove(); + if (driverModeListener != null) driverModeListener.remove(); + ts.removeEntries(); } - // Something in the result can sometimes be null -- so check probably too many things - if (result.inputAndOutputFrame != null - && result.inputAndOutputFrame.frameStaticProperties != null - && result.inputAndOutputFrame.frameStaticProperties.cameraCalibration != null) { - var fsp = result.inputAndOutputFrame.frameStaticProperties; - ts.cameraIntrinsicsPublisher.accept(fsp.cameraCalibration.getIntrinsicsArr()); - ts.cameraDistortionPublisher.accept(fsp.cameraCalibration.getExtrinsicsArr()); - } else { - ts.cameraIntrinsicsPublisher.accept(new double[] {}); - ts.cameraDistortionPublisher.accept(new double[] {}); + private void updateEntries() { + if (pipelineIndexListener != null) pipelineIndexListener.remove(); + if (driverModeListener != null) driverModeListener.remove(); + + ts.updateEntries(); + + pipelineIndexListener = + new NTDataChangeListener( + ts.subTable.getInstance(), ts.pipelineIndexRequestSub, this::onPipelineIndexChange); + + driverModeListener = + new NTDataChangeListener( + ts.subTable.getInstance(), ts.driverModeSubscriber, this::onDriverModeChange); } - ts.heartbeatPublisher.set(heartbeatCounter++); + public void updateCameraNickname(String newCameraNickname) { + removeEntries(); + ts.subTable = rootTable.getSubTable(newCameraNickname); + updateEntries(); + } - // TODO...nt4... is this needed? - rootTable.getInstance().flush(); - } + @Override + public void accept(CVPipelineResult result) { + var simplified = + new PhotonPipelineResult( + result.getLatencyMillis(), + TrackedTarget.simpleFromTrackedTargets(result.targets), + result.multiTagResult); + Packet packet = new Packet(simplified.getPacketSize()); + simplified.populatePacket(packet); + + ts.rawBytesEntry.set(packet.getData()); + + ts.pipelineIndexPublisher.set(pipelineIndexSupplier.get()); + ts.driverModePublisher.set(driverModeSupplier.getAsBoolean()); + ts.latencyMillisEntry.set(result.getLatencyMillis()); + ts.hasTargetEntry.set(result.hasTargets()); + + if (result.hasTargets()) { + var bestTarget = result.targets.get(0); + + ts.targetPitchEntry.set(bestTarget.getPitch()); + ts.targetYawEntry.set(bestTarget.getYaw()); + ts.targetAreaEntry.set(bestTarget.getArea()); + ts.targetSkewEntry.set(bestTarget.getSkew()); + + var pose = bestTarget.getBestCameraToTarget3d(); + ts.targetPoseEntry.set( + new double[] { + pose.getTranslation().getX(), + pose.getTranslation().getY(), + pose.getTranslation().getZ(), + pose.getRotation().getQuaternion().getW(), + pose.getRotation().getQuaternion().getX(), + pose.getRotation().getQuaternion().getY(), + pose.getRotation().getQuaternion().getZ() + }); + + var targetOffsetPoint = bestTarget.getTargetOffsetPoint(); + ts.bestTargetPosX.set(targetOffsetPoint.x); + ts.bestTargetPosY.set(targetOffsetPoint.y); + } else { + ts.targetPitchEntry.set(0); + ts.targetYawEntry.set(0); + ts.targetAreaEntry.set(0); + ts.targetSkewEntry.set(0); + ts.targetPoseEntry.set(new double[] {0, 0, 0}); + ts.bestTargetPosX.set(0); + ts.bestTargetPosY.set(0); + } + + // Something in the result can sometimes be null -- so check probably too many things + if (result.inputAndOutputFrame != null + && result.inputAndOutputFrame.frameStaticProperties != null + && result.inputAndOutputFrame.frameStaticProperties.cameraCalibration != null) { + var fsp = result.inputAndOutputFrame.frameStaticProperties; + ts.cameraIntrinsicsPublisher.accept(fsp.cameraCalibration.getIntrinsicsArr()); + ts.cameraDistortionPublisher.accept(fsp.cameraCalibration.getExtrinsicsArr()); + } else { + ts.cameraIntrinsicsPublisher.accept(new double[] {}); + ts.cameraDistortionPublisher.accept(new double[] {}); + } + + ts.heartbeatPublisher.set(heartbeatCounter++); + + // TODO...nt4... is this needed? + rootTable.getInstance().flush(); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NetworkTablesManager.java b/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NetworkTablesManager.java index 74de5eb37e..a62e0f3b4c 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NetworkTablesManager.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/networktables/NetworkTablesManager.java @@ -40,168 +40,168 @@ import org.photonvision.common.util.file.JacksonUtils; public class NetworkTablesManager { - private final NetworkTableInstance ntInstance = NetworkTableInstance.getDefault(); - private final String kRootTableName = "/photonvision"; - private final String kFieldLayoutName = "apriltag_field_layout"; - public final NetworkTable kRootTable = ntInstance.getTable(kRootTableName); - - private final NTLogger m_ntLogger = new NTLogger(); - - private boolean m_isRetryingConnection = false; - - private StringSubscriber m_fieldLayoutSubscriber = - kRootTable.getStringTopic(kFieldLayoutName).subscribe(""); - - private NetworkTablesManager() { - ntInstance.addLogger(255, 255, (event) -> {}); // to hide error messages - ntInstance.addConnectionListener(true, m_ntLogger); // to hide error messages - - ntInstance.addListener( - m_fieldLayoutSubscriber, EnumSet.of(Kind.kValueAll), this::onFieldLayoutChanged); - - TimedTaskManager.getInstance().addTask("NTManager", this::ntTick, 5000); - - // Get the UI state in sync with the backend. NT should fire a callback when it first connects - // to the robot - broadcastConnectedStatus(); - } - - private static NetworkTablesManager INSTANCE; - - public static NetworkTablesManager getInstance() { - if (INSTANCE == null) INSTANCE = new NetworkTablesManager(); - return INSTANCE; - } - - private static final Logger logger = new Logger(NetworkTablesManager.class, LogGroup.General); - - private static class NTLogger implements Consumer { - private boolean hasReportedConnectionFailure = false; - - @Override - public void accept(NetworkTableEvent event) { - var isConnEvent = event.is(Kind.kConnected); - var isDisconnEvent = event.is(Kind.kDisconnected); - - if (!hasReportedConnectionFailure && isDisconnEvent) { - var msg = - String.format( - "NT lost connection to %s:%d! (NT version %d). Will retry in background.", - event.connInfo.remote_ip, - event.connInfo.remote_port, - event.connInfo.protocol_version); - logger.error(msg); - - hasReportedConnectionFailure = true; - getInstance().broadcastConnectedStatus(); - } else if (isConnEvent && event.connInfo != null) { - var msg = - String.format( - "NT connected to %s:%d! (NT version %d)", - event.connInfo.remote_ip, - event.connInfo.remote_port, - event.connInfo.protocol_version); - logger.info(msg); - - hasReportedConnectionFailure = false; - ScriptManager.queueEvent(ScriptEventType.kNTConnected); - getInstance().broadcastVersion(); - getInstance().broadcastConnectedStatus(); - } + private final NetworkTableInstance ntInstance = NetworkTableInstance.getDefault(); + private final String kRootTableName = "/photonvision"; + private final String kFieldLayoutName = "apriltag_field_layout"; + public final NetworkTable kRootTable = ntInstance.getTable(kRootTableName); + + private final NTLogger m_ntLogger = new NTLogger(); + + private boolean m_isRetryingConnection = false; + + private StringSubscriber m_fieldLayoutSubscriber = + kRootTable.getStringTopic(kFieldLayoutName).subscribe(""); + + private NetworkTablesManager() { + ntInstance.addLogger(255, 255, (event) -> {}); // to hide error messages + ntInstance.addConnectionListener(true, m_ntLogger); // to hide error messages + + ntInstance.addListener( + m_fieldLayoutSubscriber, EnumSet.of(Kind.kValueAll), this::onFieldLayoutChanged); + + TimedTaskManager.getInstance().addTask("NTManager", this::ntTick, 5000); + + // Get the UI state in sync with the backend. NT should fire a callback when it first connects + // to the robot + broadcastConnectedStatus(); + } + + private static NetworkTablesManager INSTANCE; + + public static NetworkTablesManager getInstance() { + if (INSTANCE == null) INSTANCE = new NetworkTablesManager(); + return INSTANCE; } - } - - private void onFieldLayoutChanged(NetworkTableEvent event) { - var atfl_json = event.valueData.value.getString(); - try { - System.out.println("Got new field layout!"); - var atfl = JacksonUtils.deserialize(atfl_json, AprilTagFieldLayout.class); - ConfigManager.getInstance().getConfig().setApriltagFieldLayout(atfl); - ConfigManager.getInstance().requestSave(); - DataChangeService.getInstance() - .publishEvent( - new OutgoingUIEvent<>( - "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); - } catch (IOException e) { - logger.error("Error deserializing atfl!"); - logger.error(atfl_json); + + private static final Logger logger = new Logger(NetworkTablesManager.class, LogGroup.General); + + private static class NTLogger implements Consumer { + private boolean hasReportedConnectionFailure = false; + + @Override + public void accept(NetworkTableEvent event) { + var isConnEvent = event.is(Kind.kConnected); + var isDisconnEvent = event.is(Kind.kDisconnected); + + if (!hasReportedConnectionFailure && isDisconnEvent) { + var msg = + String.format( + "NT lost connection to %s:%d! (NT version %d). Will retry in background.", + event.connInfo.remote_ip, + event.connInfo.remote_port, + event.connInfo.protocol_version); + logger.error(msg); + + hasReportedConnectionFailure = true; + getInstance().broadcastConnectedStatus(); + } else if (isConnEvent && event.connInfo != null) { + var msg = + String.format( + "NT connected to %s:%d! (NT version %d)", + event.connInfo.remote_ip, + event.connInfo.remote_port, + event.connInfo.protocol_version); + logger.info(msg); + + hasReportedConnectionFailure = false; + ScriptManager.queueEvent(ScriptEventType.kNTConnected); + getInstance().broadcastVersion(); + getInstance().broadcastConnectedStatus(); + } + } + } + + private void onFieldLayoutChanged(NetworkTableEvent event) { + var atfl_json = event.valueData.value.getString(); + try { + System.out.println("Got new field layout!"); + var atfl = JacksonUtils.deserialize(atfl_json, AprilTagFieldLayout.class); + ConfigManager.getInstance().getConfig().setApriltagFieldLayout(atfl); + ConfigManager.getInstance().requestSave(); + DataChangeService.getInstance() + .publishEvent( + new OutgoingUIEvent<>( + "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); + } catch (IOException e) { + logger.error("Error deserializing atfl!"); + logger.error(atfl_json); + } + } + + public void broadcastConnectedStatus() { + TimedTaskManager.getInstance().addOneShotTask(this::broadcastConnectedStatusImpl, 1000L); } - } - - public void broadcastConnectedStatus() { - TimedTaskManager.getInstance().addOneShotTask(this::broadcastConnectedStatusImpl, 1000L); - } - - private void broadcastConnectedStatusImpl() { - HashMap map = new HashMap<>(); - var subMap = new HashMap(); - - subMap.put("connected", ntInstance.isConnected()); - if (ntInstance.isConnected()) { - var connections = ntInstance.getConnections(); - if (connections.length > 0) { - subMap.put("address", connections[0].remote_ip + ":" + connections[0].remote_port); - } - subMap.put("clients", connections.length); + + private void broadcastConnectedStatusImpl() { + HashMap map = new HashMap<>(); + var subMap = new HashMap(); + + subMap.put("connected", ntInstance.isConnected()); + if (ntInstance.isConnected()) { + var connections = ntInstance.getConnections(); + if (connections.length > 0) { + subMap.put("address", connections[0].remote_ip + ":" + connections[0].remote_port); + } + subMap.put("clients", connections.length); + } + + map.put("ntConnectionInfo", subMap); + DataChangeService.getInstance() + .publishEvent(new OutgoingUIEvent<>("networkTablesConnected", map)); } - map.put("ntConnectionInfo", subMap); - DataChangeService.getInstance() - .publishEvent(new OutgoingUIEvent<>("networkTablesConnected", map)); - } - - private void broadcastVersion() { - kRootTable.getEntry("version").setString(PhotonVersion.versionString); - kRootTable.getEntry("buildDate").setString(PhotonVersion.buildDate); - } - - public void setConfig(NetworkConfig config) { - if (config.runNTServer) { - setServerMode(); - } else { - setClientMode(config.ntServerAddress); + private void broadcastVersion() { + kRootTable.getEntry("version").setString(PhotonVersion.versionString); + kRootTable.getEntry("buildDate").setString(PhotonVersion.buildDate); } - broadcastVersion(); - } - - private void setClientMode(String ntServerAddress) { - ntInstance.stopServer(); - ntInstance.startClient4("photonvision"); - try { - int t = Integer.parseInt(ntServerAddress); - if (!m_isRetryingConnection) logger.info("Starting NT Client, server team is " + t); - ntInstance.setServerTeam(t); - } catch (NumberFormatException e) { - if (!m_isRetryingConnection) - logger.info("Starting NT Client, server IP is \"" + ntServerAddress + "\""); - ntInstance.setServer(ntServerAddress); + + public void setConfig(NetworkConfig config) { + if (config.runNTServer) { + setServerMode(); + } else { + setClientMode(config.ntServerAddress); + } + broadcastVersion(); } - ntInstance.startDSClient(); - broadcastVersion(); - } - - private void setServerMode() { - logger.info("Starting NT Server"); - ntInstance.stopClient(); - ntInstance.startServer(); - broadcastVersion(); - } - - // So it seems like if Photon starts before the robot NT server does, and both aren't static IP, - // it'll never connect. This hack works around it by restarting the client/server while the nt - // instance - // isn't connected, same as clicking the save button in the settings menu (or restarting the - // service) - private void ntTick() { - if (!ntInstance.isConnected() - && !ConfigManager.getInstance().getConfig().getNetworkConfig().runNTServer) { - setConfig(ConfigManager.getInstance().getConfig().getNetworkConfig()); + + private void setClientMode(String ntServerAddress) { + ntInstance.stopServer(); + ntInstance.startClient4("photonvision"); + try { + int t = Integer.parseInt(ntServerAddress); + if (!m_isRetryingConnection) logger.info("Starting NT Client, server team is " + t); + ntInstance.setServerTeam(t); + } catch (NumberFormatException e) { + if (!m_isRetryingConnection) + logger.info("Starting NT Client, server IP is \"" + ntServerAddress + "\""); + ntInstance.setServer(ntServerAddress); + } + ntInstance.startDSClient(); + broadcastVersion(); + } + + private void setServerMode() { + logger.info("Starting NT Server"); + ntInstance.stopClient(); + ntInstance.startServer(); + broadcastVersion(); } - if (!ntInstance.isConnected() && !m_isRetryingConnection) { - m_isRetryingConnection = true; - logger.error( - "[NetworkTablesManager] Could not connect to the robot! Will retry in the background..."); + // So it seems like if Photon starts before the robot NT server does, and both aren't static IP, + // it'll never connect. This hack works around it by restarting the client/server while the nt + // instance + // isn't connected, same as clicking the save button in the settings menu (or restarting the + // service) + private void ntTick() { + if (!ntInstance.isConnected() + && !ConfigManager.getInstance().getConfig().getNetworkConfig().runNTServer) { + setConfig(ConfigManager.getInstance().getConfig().getNetworkConfig()); + } + + if (!ntInstance.isConnected() && !m_isRetryingConnection) { + m_isRetryingConnection = true; + logger.error( + "[NetworkTablesManager] Could not connect to the robot! Will retry in the background..."); + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/dataflow/websocket/UIDataPublisher.java b/photon-core/src/main/java/org/photonvision/common/dataflow/websocket/UIDataPublisher.java index 4a56ecab3e..17ed240921 100644 --- a/photon-core/src/main/java/org/photonvision/common/dataflow/websocket/UIDataPublisher.java +++ b/photon-core/src/main/java/org/photonvision/common/dataflow/websocket/UIDataPublisher.java @@ -28,47 +28,47 @@ import org.photonvision.vision.pipeline.result.CVPipelineResult; public class UIDataPublisher implements CVPipelineResultConsumer { - private static final Logger logger = new Logger(UIDataPublisher.class, LogGroup.VisionModule); + private static final Logger logger = new Logger(UIDataPublisher.class, LogGroup.VisionModule); - private final int index; - private long lastUIResultUpdateTime = 0; + private final int index; + private long lastUIResultUpdateTime = 0; - public UIDataPublisher(int index) { - this.index = index; - } + public UIDataPublisher(int index) { + this.index = index; + } - @Override - public void accept(CVPipelineResult result) { - long now = System.currentTimeMillis(); + @Override + public void accept(CVPipelineResult result) { + long now = System.currentTimeMillis(); - // only update the UI at 15hz - if (lastUIResultUpdateTime + 1000.0 / 10.0 > now) return; + // only update the UI at 15hz + if (lastUIResultUpdateTime + 1000.0 / 10.0 > now) return; - var dataMap = new HashMap(); - dataMap.put("fps", result.fps); - dataMap.put("latency", result.getLatencyMillis()); - var uiTargets = new ArrayList>(result.targets.size()); - for (var t : result.targets) { - uiTargets.add(t.toHashMap()); - } - dataMap.put("targets", uiTargets); + var dataMap = new HashMap(); + dataMap.put("fps", result.fps); + dataMap.put("latency", result.getLatencyMillis()); + var uiTargets = new ArrayList>(result.targets.size()); + for (var t : result.targets) { + uiTargets.add(t.toHashMap()); + } + dataMap.put("targets", uiTargets); - // Only send Multitag Results if they are present, similar to 3d pose - if (result.multiTagResult.estimatedPose.isPresent) { - var multitagData = new HashMap(); - multitagData.put( - "bestTransform", - SerializationUtils.transformToHashMap(result.multiTagResult.estimatedPose.best)); - multitagData.put("bestReprojectionError", result.multiTagResult.estimatedPose.bestReprojErr); - multitagData.put("fiducialIDsUsed", result.multiTagResult.fiducialIDsUsed); - dataMap.put("multitagResult", multitagData); - } + // Only send Multitag Results if they are present, similar to 3d pose + if (result.multiTagResult.estimatedPose.isPresent) { + var multitagData = new HashMap(); + multitagData.put( + "bestTransform", + SerializationUtils.transformToHashMap(result.multiTagResult.estimatedPose.best)); + multitagData.put("bestReprojectionError", result.multiTagResult.estimatedPose.bestReprojErr); + multitagData.put("fiducialIDsUsed", result.multiTagResult.fiducialIDsUsed); + dataMap.put("multitagResult", multitagData); + } - var uiMap = new HashMap>(); - uiMap.put(index, dataMap); + var uiMap = new HashMap>(); + uiMap.put(index, dataMap); - DataChangeService.getInstance() - .publishEvent(OutgoingUIEvent.wrappedOf("updatePipelineResult", uiMap)); - lastUIResultUpdateTime = now; - } + DataChangeService.getInstance() + .publishEvent(OutgoingUIEvent.wrappedOf("updatePipelineResult", uiMap)); + lastUIResultUpdateTime = now; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/CustomGPIO.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/CustomGPIO.java index 8b3e282a35..f67beb97cc 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/CustomGPIO.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/CustomGPIO.java @@ -21,79 +21,79 @@ import org.photonvision.common.hardware.Platform; public class CustomGPIO extends GPIOBase { - private boolean currentState; - private final int port; + private boolean currentState; + private final int port; - public CustomGPIO(int port) { - this.port = port; - } + public CustomGPIO(int port) { + this.port = port; + } - @Override - public void togglePin() { - if (this.port != -1) { - execute( - commands - .get("setState") - .replace("{s}", String.valueOf(!currentState)) - .replace("{p}", String.valueOf(this.port))); - currentState = !currentState; + @Override + public void togglePin() { + if (this.port != -1) { + execute( + commands + .get("setState") + .replace("{s}", String.valueOf(!currentState)) + .replace("{p}", String.valueOf(this.port))); + currentState = !currentState; + } } - } - @Override - public int getPinNumber() { - return port; - } + @Override + public int getPinNumber() { + return port; + } - @Override - public void setStateImpl(boolean state) { - if (this.port != -1) { - execute( - commands - .get("setState") - .replace("{s}", String.valueOf(state)) - .replace("{p}", String.valueOf(port))); - currentState = state; + @Override + public void setStateImpl(boolean state) { + if (this.port != -1) { + execute( + commands + .get("setState") + .replace("{s}", String.valueOf(state)) + .replace("{p}", String.valueOf(port))); + currentState = state; + } } - } - @Override - public boolean shutdown() { - if (this.port != -1) { - execute(commands.get("shutdown")); - return true; + @Override + public boolean shutdown() { + if (this.port != -1) { + execute(commands.get("shutdown")); + return true; + } + return false; } - return false; - } - @Override - public boolean getStateImpl() { - return currentState; - } + @Override + public boolean getStateImpl() { + return currentState; + } - @Override - public void blinkImpl(int pulseTimeMillis, int blinks) { - execute( - commands - .get("blink") - .replace("{pulseTime}", String.valueOf(pulseTimeMillis)) - .replace("{blinks}", String.valueOf(blinks)) - .replace("{p}", String.valueOf(this.port))); - } + @Override + public void blinkImpl(int pulseTimeMillis, int blinks) { + execute( + commands + .get("blink") + .replace("{pulseTime}", String.valueOf(pulseTimeMillis)) + .replace("{blinks}", String.valueOf(blinks)) + .replace("{p}", String.valueOf(this.port))); + } - @Override - public void setBrightnessImpl(int brightness) { - execute( - commands - .get("dim") - .replace("{p}", String.valueOf(port)) - .replace("{v}", String.valueOf(brightness))); - } + @Override + public void setBrightnessImpl(int brightness) { + execute( + commands + .get("dim") + .replace("{p}", String.valueOf(port)) + .replace("{v}", String.valueOf(brightness))); + } - public static void setConfig(HardwareConfig config) { - if (Platform.isRaspberryPi()) return; - commands.replace("setState", config.ledSetCommand); - commands.replace("dim", config.ledDimCommand); - commands.replace("blink", config.ledBlinkCommand); - } + public static void setConfig(HardwareConfig config) { + if (Platform.isRaspberryPi()) return; + commands.replace("setState", config.ledSetCommand); + commands.replace("dim", config.ledDimCommand); + commands.replace("blink", config.ledBlinkCommand); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/GPIOBase.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/GPIOBase.java index 5b9d6ba368..a1d25be98f 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/GPIOBase.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/GPIOBase.java @@ -24,76 +24,76 @@ import org.photonvision.common.util.ShellExec; public abstract class GPIOBase { - private static final Logger logger = new Logger(GPIOBase.class, LogGroup.General); - private static final ShellExec runCommand = new ShellExec(true, true); - - protected static HashMap commands = - new HashMap<>() { - { - put("setState", ""); - put("shutdown", ""); - put("dim", ""); - put("blink", ""); + private static final Logger logger = new Logger(GPIOBase.class, LogGroup.General); + private static final ShellExec runCommand = new ShellExec(true, true); + + protected static HashMap commands = + new HashMap<>() { + { + put("setState", ""); + put("shutdown", ""); + put("dim", ""); + put("blink", ""); + } + }; + + protected static String execute(String command) { + try { + runCommand.executeBashCommand(command); + } catch (Exception e) { + logger.error(Arrays.toString(e.getStackTrace())); + return ""; } - }; - - protected static String execute(String command) { - try { - runCommand.executeBashCommand(command); - } catch (Exception e) { - logger.error(Arrays.toString(e.getStackTrace())); - return ""; + return runCommand.getOutput(); } - return runCommand.getOutput(); - } - public abstract int getPinNumber(); + public abstract int getPinNumber(); - public void setState(boolean state) { - if (getPinNumber() != -1) { - setStateImpl(state); + public void setState(boolean state) { + if (getPinNumber() != -1) { + setStateImpl(state); + } } - } - protected abstract void setStateImpl(boolean state); + protected abstract void setStateImpl(boolean state); - public final void setOff() { - setState(false); - } + public final void setOff() { + setState(false); + } - public final void setOn() { - setState(true); - } + public final void setOn() { + setState(true); + } - public void togglePin() { - setState(!getStateImpl()); - } + public void togglePin() { + setState(!getStateImpl()); + } - public abstract boolean shutdown(); + public abstract boolean shutdown(); - public final boolean getState() { - if (getPinNumber() != -1) { - return getStateImpl(); - } else return false; - } + public final boolean getState() { + if (getPinNumber() != -1) { + return getStateImpl(); + } else return false; + } - public abstract boolean getStateImpl(); + public abstract boolean getStateImpl(); - public final void blink(int pulseTimeMillis, int blinks) { - if (getPinNumber() != -1) { - blinkImpl(pulseTimeMillis, blinks); + public final void blink(int pulseTimeMillis, int blinks) { + if (getPinNumber() != -1) { + blinkImpl(pulseTimeMillis, blinks); + } } - } - protected abstract void blinkImpl(int pulseTimeMillis, int blinks); + protected abstract void blinkImpl(int pulseTimeMillis, int blinks); - public final void setBrightness(int brightness) { - if (getPinNumber() != -1) { - if (brightness > 100) brightness = 100; - if (brightness < 0) brightness = 0; - setBrightnessImpl(brightness); + public final void setBrightness(int brightness) { + if (getPinNumber() != -1) { + if (brightness > 100) brightness = 100; + if (brightness < 0) brightness = 0; + setBrightnessImpl(brightness); + } } - } - protected abstract void setBrightnessImpl(int brightness); + protected abstract void setBrightnessImpl(int brightness); } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioCommand.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioCommand.java index 7ba3266ffe..932674bb9e 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioCommand.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioCommand.java @@ -19,22 +19,22 @@ @SuppressWarnings("SpellCheckingInspection") public enum PigpioCommand { - PCMD_READ(3), // int gpio_read(unsigned gpio) - PCMD_WRITE(4), // int gpio_write(unsigned gpio, unsigned level) - PCMD_WVCLR(27), // int wave_clear(void) - PCMD_WVAG(28), // int wave_add_generic(unsigned numPulses, gpioPulse_t *pulses) - PCMD_WVHLT(33), // int wave_tx_stop(void) - PCMD_WVCRE(49), // int wave_create(void) - PCMD_WVDEL(50), // int wave_delete(unsigned wave_id) - PCMD_WVTX(51), // int wave_tx_send(unsigned wave_id) (once) - PCMD_WVTXR(52), // int wave_tx_send(unsigned wave_id) (repeat) - PCMD_GDC(83), // int get_duty_cyle(unsigned user_gpio) - PCMD_HP(86), // int hardware_pwm(unsigned gpio, unsigned PWMfreq, unsigned PWMduty) - PCMD_WVTXM(100); // int wave_tx_send(unsigned wave_id, unsigned wave_mode) + PCMD_READ(3), // int gpio_read(unsigned gpio) + PCMD_WRITE(4), // int gpio_write(unsigned gpio, unsigned level) + PCMD_WVCLR(27), // int wave_clear(void) + PCMD_WVAG(28), // int wave_add_generic(unsigned numPulses, gpioPulse_t *pulses) + PCMD_WVHLT(33), // int wave_tx_stop(void) + PCMD_WVCRE(49), // int wave_create(void) + PCMD_WVDEL(50), // int wave_delete(unsigned wave_id) + PCMD_WVTX(51), // int wave_tx_send(unsigned wave_id) (once) + PCMD_WVTXR(52), // int wave_tx_send(unsigned wave_id) (repeat) + PCMD_GDC(83), // int get_duty_cyle(unsigned user_gpio) + PCMD_HP(86), // int hardware_pwm(unsigned gpio, unsigned PWMfreq, unsigned PWMduty) + PCMD_WVTXM(100); // int wave_tx_send(unsigned wave_id, unsigned wave_mode) - public final int value; + public final int value; - PigpioCommand(int value) { - this.value = value; - } + PigpioCommand(int value) { + this.value = value; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioException.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioException.java index dfa2191206..3ad737777d 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioException.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioException.java @@ -27,317 +27,317 @@ */ @SuppressWarnings({"SpellCheckingInspection", "unused", "RedundantSuppression"}) public class PigpioException extends Exception { - private int rc = -99999999; - private static final long serialVersionUID = 443595760654129068L; + private int rc = -99999999; + private static final long serialVersionUID = 443595760654129068L; - public PigpioException() { - super(); - } + public PigpioException() { + super(); + } - public PigpioException(int rc) { - super(); - this.rc = rc; - } + public PigpioException(int rc) { + super(); + this.rc = rc; + } - public PigpioException(int rc, String msg) { - super(msg); - this.rc = rc; - } + public PigpioException(int rc, String msg) { + super(msg); + this.rc = rc; + } - public PigpioException(String arg0, Throwable arg1, boolean arg2, boolean arg3) { - super(arg0, arg1, arg2, arg3); - } + public PigpioException(String arg0, Throwable arg1, boolean arg2, boolean arg3) { + super(arg0, arg1, arg2, arg3); + } - public PigpioException(String arg0, Throwable arg1) { - super(arg0, arg1); - } + public PigpioException(String arg0, Throwable arg1) { + super(arg0, arg1); + } - public PigpioException(String arg0) { - super(arg0); - } + public PigpioException(String arg0) { + super(arg0); + } - public PigpioException(Throwable arg0) { - super(arg0); - } + public PigpioException(Throwable arg0) { + super(arg0); + } - @Override - public String getMessage() { - return "(" + rc + ") " + getMessageForError(rc); - } + @Override + public String getMessage() { + return "(" + rc + ") " + getMessageForError(rc); + } - /** - * Retrieve the error code that was returned by the underlying Pigpio call. - * - * @return The error code that was returned by the underlying Pigpio call. - */ - public int getErrorCode() { - return rc; - } // End of getErrorCode + /** + * Retrieve the error code that was returned by the underlying Pigpio call. + * + * @return The error code that was returned by the underlying Pigpio call. + */ + public int getErrorCode() { + return rc; + } // End of getErrorCode - // Public constants for the error codes that can be thrown by Pigpio - public static final int PI_INIT_FAILED = -1; // gpioInitialise failed - public static final int PI_BAD_USER_GPIO = -2; // gpio not 0-31 - public static final int PI_BAD_GPIO = -3; // gpio not 0-53 - public static final int PI_BAD_MODE = -4; // mode not 0-7 - public static final int PI_BAD_LEVEL = -5; // level not 0-1 - public static final int PI_BAD_PUD = -6; // pud not 0-2 - public static final int PI_BAD_PULSEWIDTH = -7; // pulsewidth not 0 or 500-2500 - public static final int PI_BAD_DUTYCYCLE = -8; // dutycycle outside set range - public static final int PI_BAD_TIMER = -9; // timer not 0-9 - public static final int PI_BAD_MS = -10; // ms not 10-60000 - public static final int PI_BAD_TIMETYPE = -11; // timetype not 0-1 - public static final int PI_BAD_SECONDS = -12; // seconds < 0 - public static final int PI_BAD_MICROS = -13; // micros not 0-999999 - public static final int PI_TIMER_FAILED = -14; // gpioSetTimerFunc failed - public static final int PI_BAD_WDOG_TIMEOUT = -15; // timeout not 0-60000 - public static final int PI_NO_ALERT_FUNC = -16; // DEPRECATED - public static final int PI_BAD_CLK_PERIPH = -17; // clock peripheral not 0-1 - public static final int PI_BAD_CLK_SOURCE = -18; // DEPRECATED - public static final int PI_BAD_CLK_MICROS = -19; // clock micros not 1, 2, 4, 5, 8, or 10 - public static final int PI_BAD_BUF_MILLIS = -20; // buf millis not 100-10000 - public static final int PI_BAD_DUTYRANGE = -21; // dutycycle range not 25-40000 - public static final int PI_BAD_DUTY_RANGE = -21; // DEPRECATED (use PI_BAD_DUTYRANGE) - public static final int PI_BAD_SIGNUM = -22; // signum not 0-63 - public static final int PI_BAD_PATHNAME = -23; // can't open pathname - public static final int PI_NO_HANDLE = -24; // no handle available - public static final int PI_BAD_HANDLE = -25; // unknown handle - public static final int PI_BAD_IF_FLAGS = -26; // ifFlags > 3 - public static final int PI_BAD_CHANNEL = -27; // DMA channel not 0-14 - public static final int PI_BAD_PRIM_CHANNEL = -27; // DMA primary channel not 0-14 - public static final int PI_BAD_SOCKET_PORT = -28; // socket port not 1024-32000 - public static final int PI_BAD_FIFO_COMMAND = -29; // unrecognized fifo command - public static final int PI_BAD_SECO_CHANNEL = -30; // DMA secondary channel not 0-6 - public static final int PI_NOT_INITIALISED = -31; // function called before gpioInitialise - public static final int PI_INITIALISED = -32; // function called after gpioInitialise - public static final int PI_BAD_WAVE_MODE = -33; // waveform mode not 0-1 - public static final int PI_BAD_CFG_INTERNAL = -34; // bad parameter in gpioCfgInternals call - public static final int PI_BAD_WAVE_BAUD = -35; // baud rate not 50-250K(RX)/50-1M(TX) - public static final int PI_TOO_MANY_PULSES = -36; // waveform has too many pulses - public static final int PI_TOO_MANY_CHARS = -37; // waveform has too many chars - public static final int PI_NOT_SERIAL_GPIO = -38; // no serial read in progress on gpio - public static final int PI_BAD_SERIAL_STRUC = -39; // bad (null) serial structure parameter - public static final int PI_BAD_SERIAL_BUF = -40; // bad (null) serial buf parameter - public static final int PI_NOT_PERMITTED = -41; // gpio operation not permitted - public static final int PI_SOME_PERMITTED = -42; // one or more gpios not permitted - public static final int PI_BAD_WVSC_COMMND = -43; // bad WVSC subcommand - public static final int PI_BAD_WVSM_COMMND = -44; // bad WVSM subcommand - public static final int PI_BAD_WVSP_COMMND = -45; // bad WVSP subcommand - public static final int PI_BAD_PULSELEN = -46; // trigger pulse length not 1-100 - public static final int PI_BAD_SCRIPT = -47; // invalid script - public static final int PI_BAD_SCRIPT_ID = -48; // unknown script id - public static final int PI_BAD_SER_OFFSET = -49; // add serial data offset > 30 minutes - public static final int PI_GPIO_IN_USE = -50; // gpio already in use - public static final int PI_BAD_SERIAL_COUNT = -51; // must read at least a byte at a time - public static final int PI_BAD_PARAM_NUM = -52; // script parameter id not 0-9 - public static final int PI_DUP_TAG = -53; // script has duplicate tag - public static final int PI_TOO_MANY_TAGS = -54; // script has too many tags - public static final int PI_BAD_SCRIPT_CMD = -55; // illegal script command - public static final int PI_BAD_VAR_NUM = -56; // script variable id not 0-149 - public static final int PI_NO_SCRIPT_ROOM = -57; // no more room for scripts - public static final int PI_NO_MEMORY = -58; // can't allocate temporary memory - public static final int PI_SOCK_READ_FAILED = -59; // socket read failed - public static final int PI_SOCK_WRIT_FAILED = -60; // socket write failed - public static final int PI_TOO_MANY_PARAM = -61; // too many script parameters (> 10) - public static final int PI_NOT_HALTED = -62; // script already running or failed - public static final int PI_BAD_TAG = -63; // script has unresolved tag - public static final int PI_BAD_MICS_DELAY = -64; // bad MICS delay (too large) - public static final int PI_BAD_MILS_DELAY = -65; // bad MILS delay (too large) - public static final int PI_BAD_WAVE_ID = -66; // non existent wave id - public static final int PI_TOO_MANY_CBS = -67; // No more CBs for waveform - public static final int PI_TOO_MANY_OOL = -68; // No more OOL for waveform - public static final int PI_EMPTY_WAVEFORM = -69; // attempt to create an empty waveform - public static final int PI_NO_WAVEFORM_ID = -70; // no more waveforms - public static final int PI_I2C_OPEN_FAILED = -71; // can't open I2C device - public static final int PI_SER_OPEN_FAILED = -72; // can't open serial device - public static final int PI_SPI_OPEN_FAILED = -73; // can't open SPI device - public static final int PI_BAD_I2C_BUS = -74; // bad I2C bus - public static final int PI_BAD_I2C_ADDR = -75; // bad I2C address - public static final int PI_BAD_SPI_CHANNEL = -76; // bad SPI channel - public static final int PI_BAD_FLAGS = -77; // bad i2c/spi/ser open flags - public static final int PI_BAD_SPI_SPEED = -78; // bad SPI speed - public static final int PI_BAD_SER_DEVICE = -79; // bad serial device name - public static final int PI_BAD_SER_SPEED = -80; // bad serial baud rate - public static final int PI_BAD_PARAM = -81; // bad i2c/spi/ser parameter - public static final int PI_I2C_WRITE_FAILED = -82; // i2c write failed - public static final int PI_I2C_READ_FAILED = -83; // i2c read failed - public static final int PI_BAD_SPI_COUNT = -84; // bad SPI count - public static final int PI_SER_WRITE_FAILED = -85; // ser write failed - public static final int PI_SER_READ_FAILED = -86; // ser read failed - public static final int PI_SER_READ_NO_DATA = -87; // ser read no data available - public static final int PI_UNKNOWN_COMMAND = -88; // unknown command - public static final int PI_SPI_XFER_FAILED = -89; // spi xfer/read/write failed - public static final int PI_BAD_POINTER = -90; // bad (NULL) pointer - public static final int PI_NO_AUX_SPI = -91; // need a A+/B+/Pi2 for auxiliary SPI - public static final int PI_NOT_PWM_GPIO = -92; // gpio is not in use for PWM - public static final int PI_NOT_SERVO_GPIO = -93; // gpio is not in use for servo pulses - public static final int PI_NOT_HCLK_GPIO = -94; // gpio has no hardware clock - public static final int PI_NOT_HPWM_GPIO = -95; // gpio has no hardware PWM - public static final int PI_BAD_HPWM_FREQ = -96; // hardware PWM frequency not 1-125M - public static final int PI_BAD_HPWM_DUTY = -97; // hardware PWM dutycycle not 0-1M - public static final int PI_BAD_HCLK_FREQ = -98; // hardware clock frequency not 4689-250M - public static final int PI_BAD_HCLK_PASS = -99; // need password to use hardware clock 1 - public static final int PI_HPWM_ILLEGAL = -100; // illegal, PWM in use for main clock - public static final int PI_BAD_DATABITS = -101; // serial data bits not 1-32 - public static final int PI_BAD_STOPBITS = -102; // serial (half) stop bits not 2-8 - public static final int PI_MSG_TOOBIG = -103; // socket/pipe message too big - public static final int PI_BAD_MALLOC_MODE = -104; // bad memory allocation mode - public static final int PI_TOO_MANY_SEGS = -105; // too many I2C transaction parts - public static final int PI_BAD_I2C_SEG = -106; // a combined I2C transaction failed - public static final int PI_BAD_SMBUS_CMD = -107; - public static final int PI_NOT_I2C_GPIO = -108; - public static final int PI_BAD_I2C_WLEN = -109; - public static final int PI_BAD_I2C_RLEN = -110; - public static final int PI_BAD_I2C_CMD = -111; - public static final int PI_BAD_I2C_BAUD = -112; - public static final int PI_CHAIN_LOOP_CNT = -113; - public static final int PI_BAD_CHAIN_LOOP = -114; - public static final int PI_CHAIN_COUNTER = -115; - public static final int PI_BAD_CHAIN_CMD = -116; - public static final int PI_BAD_CHAIN_DELAY = -117; - public static final int PI_CHAIN_NESTING = -118; - public static final int PI_CHAIN_TOO_BIG = -119; - public static final int PI_DEPRECATED = -120; - public static final int PI_BAD_SER_INVERT = -121; - public static final int PI_BAD_EDGE = -122; - public static final int PI_BAD_ISR_INIT = -123; - public static final int PI_BAD_FOREVER = -124; - public static final int PI_BAD_FILTER = -125; + // Public constants for the error codes that can be thrown by Pigpio + public static final int PI_INIT_FAILED = -1; // gpioInitialise failed + public static final int PI_BAD_USER_GPIO = -2; // gpio not 0-31 + public static final int PI_BAD_GPIO = -3; // gpio not 0-53 + public static final int PI_BAD_MODE = -4; // mode not 0-7 + public static final int PI_BAD_LEVEL = -5; // level not 0-1 + public static final int PI_BAD_PUD = -6; // pud not 0-2 + public static final int PI_BAD_PULSEWIDTH = -7; // pulsewidth not 0 or 500-2500 + public static final int PI_BAD_DUTYCYCLE = -8; // dutycycle outside set range + public static final int PI_BAD_TIMER = -9; // timer not 0-9 + public static final int PI_BAD_MS = -10; // ms not 10-60000 + public static final int PI_BAD_TIMETYPE = -11; // timetype not 0-1 + public static final int PI_BAD_SECONDS = -12; // seconds < 0 + public static final int PI_BAD_MICROS = -13; // micros not 0-999999 + public static final int PI_TIMER_FAILED = -14; // gpioSetTimerFunc failed + public static final int PI_BAD_WDOG_TIMEOUT = -15; // timeout not 0-60000 + public static final int PI_NO_ALERT_FUNC = -16; // DEPRECATED + public static final int PI_BAD_CLK_PERIPH = -17; // clock peripheral not 0-1 + public static final int PI_BAD_CLK_SOURCE = -18; // DEPRECATED + public static final int PI_BAD_CLK_MICROS = -19; // clock micros not 1, 2, 4, 5, 8, or 10 + public static final int PI_BAD_BUF_MILLIS = -20; // buf millis not 100-10000 + public static final int PI_BAD_DUTYRANGE = -21; // dutycycle range not 25-40000 + public static final int PI_BAD_DUTY_RANGE = -21; // DEPRECATED (use PI_BAD_DUTYRANGE) + public static final int PI_BAD_SIGNUM = -22; // signum not 0-63 + public static final int PI_BAD_PATHNAME = -23; // can't open pathname + public static final int PI_NO_HANDLE = -24; // no handle available + public static final int PI_BAD_HANDLE = -25; // unknown handle + public static final int PI_BAD_IF_FLAGS = -26; // ifFlags > 3 + public static final int PI_BAD_CHANNEL = -27; // DMA channel not 0-14 + public static final int PI_BAD_PRIM_CHANNEL = -27; // DMA primary channel not 0-14 + public static final int PI_BAD_SOCKET_PORT = -28; // socket port not 1024-32000 + public static final int PI_BAD_FIFO_COMMAND = -29; // unrecognized fifo command + public static final int PI_BAD_SECO_CHANNEL = -30; // DMA secondary channel not 0-6 + public static final int PI_NOT_INITIALISED = -31; // function called before gpioInitialise + public static final int PI_INITIALISED = -32; // function called after gpioInitialise + public static final int PI_BAD_WAVE_MODE = -33; // waveform mode not 0-1 + public static final int PI_BAD_CFG_INTERNAL = -34; // bad parameter in gpioCfgInternals call + public static final int PI_BAD_WAVE_BAUD = -35; // baud rate not 50-250K(RX)/50-1M(TX) + public static final int PI_TOO_MANY_PULSES = -36; // waveform has too many pulses + public static final int PI_TOO_MANY_CHARS = -37; // waveform has too many chars + public static final int PI_NOT_SERIAL_GPIO = -38; // no serial read in progress on gpio + public static final int PI_BAD_SERIAL_STRUC = -39; // bad (null) serial structure parameter + public static final int PI_BAD_SERIAL_BUF = -40; // bad (null) serial buf parameter + public static final int PI_NOT_PERMITTED = -41; // gpio operation not permitted + public static final int PI_SOME_PERMITTED = -42; // one or more gpios not permitted + public static final int PI_BAD_WVSC_COMMND = -43; // bad WVSC subcommand + public static final int PI_BAD_WVSM_COMMND = -44; // bad WVSM subcommand + public static final int PI_BAD_WVSP_COMMND = -45; // bad WVSP subcommand + public static final int PI_BAD_PULSELEN = -46; // trigger pulse length not 1-100 + public static final int PI_BAD_SCRIPT = -47; // invalid script + public static final int PI_BAD_SCRIPT_ID = -48; // unknown script id + public static final int PI_BAD_SER_OFFSET = -49; // add serial data offset > 30 minutes + public static final int PI_GPIO_IN_USE = -50; // gpio already in use + public static final int PI_BAD_SERIAL_COUNT = -51; // must read at least a byte at a time + public static final int PI_BAD_PARAM_NUM = -52; // script parameter id not 0-9 + public static final int PI_DUP_TAG = -53; // script has duplicate tag + public static final int PI_TOO_MANY_TAGS = -54; // script has too many tags + public static final int PI_BAD_SCRIPT_CMD = -55; // illegal script command + public static final int PI_BAD_VAR_NUM = -56; // script variable id not 0-149 + public static final int PI_NO_SCRIPT_ROOM = -57; // no more room for scripts + public static final int PI_NO_MEMORY = -58; // can't allocate temporary memory + public static final int PI_SOCK_READ_FAILED = -59; // socket read failed + public static final int PI_SOCK_WRIT_FAILED = -60; // socket write failed + public static final int PI_TOO_MANY_PARAM = -61; // too many script parameters (> 10) + public static final int PI_NOT_HALTED = -62; // script already running or failed + public static final int PI_BAD_TAG = -63; // script has unresolved tag + public static final int PI_BAD_MICS_DELAY = -64; // bad MICS delay (too large) + public static final int PI_BAD_MILS_DELAY = -65; // bad MILS delay (too large) + public static final int PI_BAD_WAVE_ID = -66; // non existent wave id + public static final int PI_TOO_MANY_CBS = -67; // No more CBs for waveform + public static final int PI_TOO_MANY_OOL = -68; // No more OOL for waveform + public static final int PI_EMPTY_WAVEFORM = -69; // attempt to create an empty waveform + public static final int PI_NO_WAVEFORM_ID = -70; // no more waveforms + public static final int PI_I2C_OPEN_FAILED = -71; // can't open I2C device + public static final int PI_SER_OPEN_FAILED = -72; // can't open serial device + public static final int PI_SPI_OPEN_FAILED = -73; // can't open SPI device + public static final int PI_BAD_I2C_BUS = -74; // bad I2C bus + public static final int PI_BAD_I2C_ADDR = -75; // bad I2C address + public static final int PI_BAD_SPI_CHANNEL = -76; // bad SPI channel + public static final int PI_BAD_FLAGS = -77; // bad i2c/spi/ser open flags + public static final int PI_BAD_SPI_SPEED = -78; // bad SPI speed + public static final int PI_BAD_SER_DEVICE = -79; // bad serial device name + public static final int PI_BAD_SER_SPEED = -80; // bad serial baud rate + public static final int PI_BAD_PARAM = -81; // bad i2c/spi/ser parameter + public static final int PI_I2C_WRITE_FAILED = -82; // i2c write failed + public static final int PI_I2C_READ_FAILED = -83; // i2c read failed + public static final int PI_BAD_SPI_COUNT = -84; // bad SPI count + public static final int PI_SER_WRITE_FAILED = -85; // ser write failed + public static final int PI_SER_READ_FAILED = -86; // ser read failed + public static final int PI_SER_READ_NO_DATA = -87; // ser read no data available + public static final int PI_UNKNOWN_COMMAND = -88; // unknown command + public static final int PI_SPI_XFER_FAILED = -89; // spi xfer/read/write failed + public static final int PI_BAD_POINTER = -90; // bad (NULL) pointer + public static final int PI_NO_AUX_SPI = -91; // need a A+/B+/Pi2 for auxiliary SPI + public static final int PI_NOT_PWM_GPIO = -92; // gpio is not in use for PWM + public static final int PI_NOT_SERVO_GPIO = -93; // gpio is not in use for servo pulses + public static final int PI_NOT_HCLK_GPIO = -94; // gpio has no hardware clock + public static final int PI_NOT_HPWM_GPIO = -95; // gpio has no hardware PWM + public static final int PI_BAD_HPWM_FREQ = -96; // hardware PWM frequency not 1-125M + public static final int PI_BAD_HPWM_DUTY = -97; // hardware PWM dutycycle not 0-1M + public static final int PI_BAD_HCLK_FREQ = -98; // hardware clock frequency not 4689-250M + public static final int PI_BAD_HCLK_PASS = -99; // need password to use hardware clock 1 + public static final int PI_HPWM_ILLEGAL = -100; // illegal, PWM in use for main clock + public static final int PI_BAD_DATABITS = -101; // serial data bits not 1-32 + public static final int PI_BAD_STOPBITS = -102; // serial (half) stop bits not 2-8 + public static final int PI_MSG_TOOBIG = -103; // socket/pipe message too big + public static final int PI_BAD_MALLOC_MODE = -104; // bad memory allocation mode + public static final int PI_TOO_MANY_SEGS = -105; // too many I2C transaction parts + public static final int PI_BAD_I2C_SEG = -106; // a combined I2C transaction failed + public static final int PI_BAD_SMBUS_CMD = -107; + public static final int PI_NOT_I2C_GPIO = -108; + public static final int PI_BAD_I2C_WLEN = -109; + public static final int PI_BAD_I2C_RLEN = -110; + public static final int PI_BAD_I2C_CMD = -111; + public static final int PI_BAD_I2C_BAUD = -112; + public static final int PI_CHAIN_LOOP_CNT = -113; + public static final int PI_BAD_CHAIN_LOOP = -114; + public static final int PI_CHAIN_COUNTER = -115; + public static final int PI_BAD_CHAIN_CMD = -116; + public static final int PI_BAD_CHAIN_DELAY = -117; + public static final int PI_CHAIN_NESTING = -118; + public static final int PI_CHAIN_TOO_BIG = -119; + public static final int PI_DEPRECATED = -120; + public static final int PI_BAD_SER_INVERT = -121; + public static final int PI_BAD_EDGE = -122; + public static final int PI_BAD_ISR_INIT = -123; + public static final int PI_BAD_FOREVER = -124; + public static final int PI_BAD_FILTER = -125; - public static final int PI_PIGIF_ERR_0 = -2000; - public static final int PI_PIGIF_ERR_99 = -2099; + public static final int PI_PIGIF_ERR_0 = -2000; + public static final int PI_PIGIF_ERR_99 = -2099; - public static final int PI_CUSTOM_ERR_0 = -3000; - public static final int PI_CUSTOM_ERR_999 = -3999; + public static final int PI_CUSTOM_ERR_0 = -3000; + public static final int PI_CUSTOM_ERR_999 = -3999; - private static final HashMap errorMessages = new HashMap<>(); + private static final HashMap errorMessages = new HashMap<>(); - static { - errorMessages.put(PI_INIT_FAILED, "pigpio initialisation failed"); - errorMessages.put(PI_BAD_USER_GPIO, "GPIO not 0-31"); - errorMessages.put(PI_BAD_GPIO, "GPIO not 0-53"); - errorMessages.put(PI_BAD_MODE, "mode not 0-7"); - errorMessages.put(PI_BAD_LEVEL, "level not 0-1"); - errorMessages.put(PI_BAD_PUD, "pud not 0-2"); - errorMessages.put(PI_BAD_PULSEWIDTH, "pulsewidth not 0 or 500-2500"); - errorMessages.put(PI_BAD_DUTYCYCLE, "dutycycle not 0-range (default 255)"); - errorMessages.put(PI_BAD_TIMER, "timer not 0-9"); - errorMessages.put(PI_BAD_MS, "ms not 10-60000"); - errorMessages.put(PI_BAD_TIMETYPE, "timetype not 0-1"); - errorMessages.put(PI_BAD_SECONDS, "seconds < 0"); - errorMessages.put(PI_BAD_MICROS, "micros not 0-999999"); - errorMessages.put(PI_TIMER_FAILED, "gpioSetTimerFunc failed"); - errorMessages.put(PI_BAD_WDOG_TIMEOUT, "timeout not 0-60000"); - errorMessages.put(PI_NO_ALERT_FUNC, "DEPRECATED"); - errorMessages.put(PI_BAD_CLK_PERIPH, "clock peripheral not 0-1"); - errorMessages.put(PI_BAD_CLK_SOURCE, "DEPRECATED"); - errorMessages.put(PI_BAD_CLK_MICROS, "clock micros not 1, 2, 4, 5, 8, or 10"); - errorMessages.put(PI_BAD_BUF_MILLIS, "buf millis not 100-10000"); - errorMessages.put(PI_BAD_DUTYRANGE, "dutycycle range not 25-40000"); - errorMessages.put(PI_BAD_SIGNUM, "signum not 0-63"); - errorMessages.put(PI_BAD_PATHNAME, "can't open pathname"); - errorMessages.put(PI_NO_HANDLE, "no handle available"); - errorMessages.put(PI_BAD_HANDLE, "unknown handle"); - errorMessages.put(PI_BAD_IF_FLAGS, "ifFlags > 3"); - errorMessages.put(PI_BAD_CHANNEL, "DMA channel not 0-14"); - errorMessages.put(PI_BAD_SOCKET_PORT, "socket port not 1024-30000"); - errorMessages.put(PI_BAD_FIFO_COMMAND, "unknown fifo command"); - errorMessages.put(PI_BAD_SECO_CHANNEL, "DMA secondary channel not 0-14"); - errorMessages.put(PI_NOT_INITIALISED, "function called before gpioInitialise"); - errorMessages.put(PI_INITIALISED, "function called after gpioInitialise"); - errorMessages.put(PI_BAD_WAVE_MODE, "waveform mode not 0-1"); - errorMessages.put(PI_BAD_CFG_INTERNAL, "bad parameter in gpioCfgInternals call"); - errorMessages.put(PI_BAD_WAVE_BAUD, "baud rate not 50-250000(RX)/1000000(TX)"); - errorMessages.put(PI_TOO_MANY_PULSES, "waveform has too many pulses"); - errorMessages.put(PI_TOO_MANY_CHARS, "waveform has too many chars"); - errorMessages.put(PI_NOT_SERIAL_GPIO, "no bit bang serial read in progress on GPIO"); - errorMessages.put(PI_NOT_PERMITTED, "no permission to update GPIO"); - errorMessages.put(PI_SOME_PERMITTED, "no permission to update one or more GPIO"); - errorMessages.put(PI_BAD_WVSC_COMMND, "bad WVSC subcommand"); - errorMessages.put(PI_BAD_WVSM_COMMND, "bad WVSM subcommand"); - errorMessages.put(PI_BAD_WVSP_COMMND, "bad WVSP subcommand"); - errorMessages.put(PI_BAD_PULSELEN, "trigger pulse length not 1-100"); - errorMessages.put(PI_BAD_SCRIPT, "invalid script"); - errorMessages.put(PI_BAD_SCRIPT_ID, "unknown script id"); - errorMessages.put(PI_BAD_SER_OFFSET, "add serial data offset > 30 minute"); - errorMessages.put(PI_GPIO_IN_USE, "GPIO already in use"); - errorMessages.put(PI_BAD_SERIAL_COUNT, "must read at least a byte at a time"); - errorMessages.put(PI_BAD_PARAM_NUM, "script parameter id not 0-9"); - errorMessages.put(PI_DUP_TAG, "script has duplicate tag"); - errorMessages.put(PI_TOO_MANY_TAGS, "script has too many tags"); - errorMessages.put(PI_BAD_SCRIPT_CMD, "illegal script command"); - errorMessages.put(PI_BAD_VAR_NUM, "script variable id not 0-149"); - errorMessages.put(PI_NO_SCRIPT_ROOM, "no more room for scripts"); - errorMessages.put(PI_NO_MEMORY, "can't allocate temporary memory"); - errorMessages.put(PI_SOCK_READ_FAILED, "socket read failed"); - errorMessages.put(PI_SOCK_WRIT_FAILED, "socket write failed"); - errorMessages.put(PI_TOO_MANY_PARAM, "too many script parameters (> 10)"); - errorMessages.put(PI_NOT_HALTED, "script already running or failed"); - errorMessages.put(PI_BAD_TAG, "script has unresolved tag"); - errorMessages.put(PI_BAD_MICS_DELAY, "bad MICS delay (too large)"); - errorMessages.put(PI_BAD_MILS_DELAY, "bad MILS delay (too large)"); - errorMessages.put(PI_BAD_WAVE_ID, "non existent wave id"); - errorMessages.put(PI_TOO_MANY_CBS, "No more CBs for waveform"); - errorMessages.put(PI_TOO_MANY_OOL, "No more OOL for waveform"); - errorMessages.put(PI_EMPTY_WAVEFORM, "attempt to create an empty waveform"); - errorMessages.put(PI_NO_WAVEFORM_ID, "No more waveform ids"); - errorMessages.put(PI_I2C_OPEN_FAILED, "can't open I2C device"); - errorMessages.put(PI_SER_OPEN_FAILED, "can't open serial device"); - errorMessages.put(PI_SPI_OPEN_FAILED, "can't open SPI device"); - errorMessages.put(PI_BAD_I2C_BUS, "bad I2C bus"); - errorMessages.put(PI_BAD_I2C_ADDR, "bad I2C address"); - errorMessages.put(PI_BAD_SPI_CHANNEL, "bad SPI channel"); - errorMessages.put(PI_BAD_FLAGS, "bad i2c/spi/ser open flags"); - errorMessages.put(PI_BAD_SPI_SPEED, "bad SPI speed"); - errorMessages.put(PI_BAD_SER_DEVICE, "bad serial device name"); - errorMessages.put(PI_BAD_SER_SPEED, "bad serial baud rate"); - errorMessages.put(PI_BAD_PARAM, "bad i2c/spi/ser parameter"); - errorMessages.put(PI_I2C_WRITE_FAILED, "I2C write failed"); - errorMessages.put(PI_I2C_READ_FAILED, "I2C read failed"); - errorMessages.put(PI_BAD_SPI_COUNT, "bad SPI count"); - errorMessages.put(PI_SER_WRITE_FAILED, "ser write failed"); - errorMessages.put(PI_SER_READ_FAILED, "ser read failed"); - errorMessages.put(PI_SER_READ_NO_DATA, "ser read no data available"); - errorMessages.put(PI_UNKNOWN_COMMAND, "unknown command"); - errorMessages.put(PI_SPI_XFER_FAILED, "SPI xfer/read/write failed"); - errorMessages.put(PI_BAD_POINTER, "bad (NULL) pointer"); - errorMessages.put(PI_NO_AUX_SPI, "no auxiliary SPI on Pi A or B"); - errorMessages.put(PI_NOT_PWM_GPIO, "GPIO is not in use for PWM"); - errorMessages.put(PI_NOT_SERVO_GPIO, "GPIO is not in use for servo pulses"); - errorMessages.put(PI_NOT_HCLK_GPIO, "GPIO has no hardware clock"); - errorMessages.put(PI_NOT_HPWM_GPIO, "GPIO has no hardware PWM"); - errorMessages.put(PI_BAD_HPWM_FREQ, "hardware PWM frequency not 1-125M"); - errorMessages.put(PI_BAD_HPWM_DUTY, "hardware PWM dutycycle not 0-1M"); - errorMessages.put(PI_BAD_HCLK_FREQ, "hardware clock frequency not 4689-250M"); - errorMessages.put(PI_BAD_HCLK_PASS, "need password to use hardware clock 1"); - errorMessages.put(PI_HPWM_ILLEGAL, "illegal, PWM in use for main clock"); - errorMessages.put(PI_BAD_DATABITS, "serial data bits not 1-32"); - errorMessages.put(PI_BAD_STOPBITS, "serial (half) stop bits not 2-8"); - errorMessages.put(PI_MSG_TOOBIG, "socket/pipe message too big"); - errorMessages.put(PI_BAD_MALLOC_MODE, "bad memory allocation mode"); - errorMessages.put(PI_TOO_MANY_SEGS, "too many I2C transaction segments"); - errorMessages.put(PI_BAD_I2C_SEG, "an I2C transaction segment failed"); - errorMessages.put(PI_BAD_SMBUS_CMD, "SMBus command not supported"); - errorMessages.put(PI_NOT_I2C_GPIO, "no bit bang I2C in progress on GPIO"); - errorMessages.put(PI_BAD_I2C_WLEN, "bad I2C write length"); - errorMessages.put(PI_BAD_I2C_RLEN, "bad I2C read length"); - errorMessages.put(PI_BAD_I2C_CMD, "bad I2C command"); - errorMessages.put(PI_BAD_I2C_BAUD, "bad I2C baud rate, not 50-500k"); - errorMessages.put(PI_CHAIN_LOOP_CNT, "bad chain loop count"); - errorMessages.put(PI_BAD_CHAIN_LOOP, "empty chain loop"); - errorMessages.put(PI_CHAIN_COUNTER, "too many chain counters"); - errorMessages.put(PI_BAD_CHAIN_CMD, "bad chain command"); - errorMessages.put(PI_BAD_CHAIN_DELAY, "bad chain delay micros"); - errorMessages.put(PI_CHAIN_NESTING, "chain counters nested too deeply"); - errorMessages.put(PI_CHAIN_TOO_BIG, "chain is too long"); - errorMessages.put(PI_DEPRECATED, "deprecated function removed"); - errorMessages.put(PI_BAD_SER_INVERT, "bit bang serial invert not 0 or 1"); - errorMessages.put(PI_BAD_EDGE, "bad ISR edge value, not 0-2"); - errorMessages.put(PI_BAD_ISR_INIT, "bad ISR initialisation"); - errorMessages.put(PI_BAD_FOREVER, "loop forever must be last chain command"); - errorMessages.put(PI_BAD_FILTER, "bad filter parameter"); - } + static { + errorMessages.put(PI_INIT_FAILED, "pigpio initialisation failed"); + errorMessages.put(PI_BAD_USER_GPIO, "GPIO not 0-31"); + errorMessages.put(PI_BAD_GPIO, "GPIO not 0-53"); + errorMessages.put(PI_BAD_MODE, "mode not 0-7"); + errorMessages.put(PI_BAD_LEVEL, "level not 0-1"); + errorMessages.put(PI_BAD_PUD, "pud not 0-2"); + errorMessages.put(PI_BAD_PULSEWIDTH, "pulsewidth not 0 or 500-2500"); + errorMessages.put(PI_BAD_DUTYCYCLE, "dutycycle not 0-range (default 255)"); + errorMessages.put(PI_BAD_TIMER, "timer not 0-9"); + errorMessages.put(PI_BAD_MS, "ms not 10-60000"); + errorMessages.put(PI_BAD_TIMETYPE, "timetype not 0-1"); + errorMessages.put(PI_BAD_SECONDS, "seconds < 0"); + errorMessages.put(PI_BAD_MICROS, "micros not 0-999999"); + errorMessages.put(PI_TIMER_FAILED, "gpioSetTimerFunc failed"); + errorMessages.put(PI_BAD_WDOG_TIMEOUT, "timeout not 0-60000"); + errorMessages.put(PI_NO_ALERT_FUNC, "DEPRECATED"); + errorMessages.put(PI_BAD_CLK_PERIPH, "clock peripheral not 0-1"); + errorMessages.put(PI_BAD_CLK_SOURCE, "DEPRECATED"); + errorMessages.put(PI_BAD_CLK_MICROS, "clock micros not 1, 2, 4, 5, 8, or 10"); + errorMessages.put(PI_BAD_BUF_MILLIS, "buf millis not 100-10000"); + errorMessages.put(PI_BAD_DUTYRANGE, "dutycycle range not 25-40000"); + errorMessages.put(PI_BAD_SIGNUM, "signum not 0-63"); + errorMessages.put(PI_BAD_PATHNAME, "can't open pathname"); + errorMessages.put(PI_NO_HANDLE, "no handle available"); + errorMessages.put(PI_BAD_HANDLE, "unknown handle"); + errorMessages.put(PI_BAD_IF_FLAGS, "ifFlags > 3"); + errorMessages.put(PI_BAD_CHANNEL, "DMA channel not 0-14"); + errorMessages.put(PI_BAD_SOCKET_PORT, "socket port not 1024-30000"); + errorMessages.put(PI_BAD_FIFO_COMMAND, "unknown fifo command"); + errorMessages.put(PI_BAD_SECO_CHANNEL, "DMA secondary channel not 0-14"); + errorMessages.put(PI_NOT_INITIALISED, "function called before gpioInitialise"); + errorMessages.put(PI_INITIALISED, "function called after gpioInitialise"); + errorMessages.put(PI_BAD_WAVE_MODE, "waveform mode not 0-1"); + errorMessages.put(PI_BAD_CFG_INTERNAL, "bad parameter in gpioCfgInternals call"); + errorMessages.put(PI_BAD_WAVE_BAUD, "baud rate not 50-250000(RX)/1000000(TX)"); + errorMessages.put(PI_TOO_MANY_PULSES, "waveform has too many pulses"); + errorMessages.put(PI_TOO_MANY_CHARS, "waveform has too many chars"); + errorMessages.put(PI_NOT_SERIAL_GPIO, "no bit bang serial read in progress on GPIO"); + errorMessages.put(PI_NOT_PERMITTED, "no permission to update GPIO"); + errorMessages.put(PI_SOME_PERMITTED, "no permission to update one or more GPIO"); + errorMessages.put(PI_BAD_WVSC_COMMND, "bad WVSC subcommand"); + errorMessages.put(PI_BAD_WVSM_COMMND, "bad WVSM subcommand"); + errorMessages.put(PI_BAD_WVSP_COMMND, "bad WVSP subcommand"); + errorMessages.put(PI_BAD_PULSELEN, "trigger pulse length not 1-100"); + errorMessages.put(PI_BAD_SCRIPT, "invalid script"); + errorMessages.put(PI_BAD_SCRIPT_ID, "unknown script id"); + errorMessages.put(PI_BAD_SER_OFFSET, "add serial data offset > 30 minute"); + errorMessages.put(PI_GPIO_IN_USE, "GPIO already in use"); + errorMessages.put(PI_BAD_SERIAL_COUNT, "must read at least a byte at a time"); + errorMessages.put(PI_BAD_PARAM_NUM, "script parameter id not 0-9"); + errorMessages.put(PI_DUP_TAG, "script has duplicate tag"); + errorMessages.put(PI_TOO_MANY_TAGS, "script has too many tags"); + errorMessages.put(PI_BAD_SCRIPT_CMD, "illegal script command"); + errorMessages.put(PI_BAD_VAR_NUM, "script variable id not 0-149"); + errorMessages.put(PI_NO_SCRIPT_ROOM, "no more room for scripts"); + errorMessages.put(PI_NO_MEMORY, "can't allocate temporary memory"); + errorMessages.put(PI_SOCK_READ_FAILED, "socket read failed"); + errorMessages.put(PI_SOCK_WRIT_FAILED, "socket write failed"); + errorMessages.put(PI_TOO_MANY_PARAM, "too many script parameters (> 10)"); + errorMessages.put(PI_NOT_HALTED, "script already running or failed"); + errorMessages.put(PI_BAD_TAG, "script has unresolved tag"); + errorMessages.put(PI_BAD_MICS_DELAY, "bad MICS delay (too large)"); + errorMessages.put(PI_BAD_MILS_DELAY, "bad MILS delay (too large)"); + errorMessages.put(PI_BAD_WAVE_ID, "non existent wave id"); + errorMessages.put(PI_TOO_MANY_CBS, "No more CBs for waveform"); + errorMessages.put(PI_TOO_MANY_OOL, "No more OOL for waveform"); + errorMessages.put(PI_EMPTY_WAVEFORM, "attempt to create an empty waveform"); + errorMessages.put(PI_NO_WAVEFORM_ID, "No more waveform ids"); + errorMessages.put(PI_I2C_OPEN_FAILED, "can't open I2C device"); + errorMessages.put(PI_SER_OPEN_FAILED, "can't open serial device"); + errorMessages.put(PI_SPI_OPEN_FAILED, "can't open SPI device"); + errorMessages.put(PI_BAD_I2C_BUS, "bad I2C bus"); + errorMessages.put(PI_BAD_I2C_ADDR, "bad I2C address"); + errorMessages.put(PI_BAD_SPI_CHANNEL, "bad SPI channel"); + errorMessages.put(PI_BAD_FLAGS, "bad i2c/spi/ser open flags"); + errorMessages.put(PI_BAD_SPI_SPEED, "bad SPI speed"); + errorMessages.put(PI_BAD_SER_DEVICE, "bad serial device name"); + errorMessages.put(PI_BAD_SER_SPEED, "bad serial baud rate"); + errorMessages.put(PI_BAD_PARAM, "bad i2c/spi/ser parameter"); + errorMessages.put(PI_I2C_WRITE_FAILED, "I2C write failed"); + errorMessages.put(PI_I2C_READ_FAILED, "I2C read failed"); + errorMessages.put(PI_BAD_SPI_COUNT, "bad SPI count"); + errorMessages.put(PI_SER_WRITE_FAILED, "ser write failed"); + errorMessages.put(PI_SER_READ_FAILED, "ser read failed"); + errorMessages.put(PI_SER_READ_NO_DATA, "ser read no data available"); + errorMessages.put(PI_UNKNOWN_COMMAND, "unknown command"); + errorMessages.put(PI_SPI_XFER_FAILED, "SPI xfer/read/write failed"); + errorMessages.put(PI_BAD_POINTER, "bad (NULL) pointer"); + errorMessages.put(PI_NO_AUX_SPI, "no auxiliary SPI on Pi A or B"); + errorMessages.put(PI_NOT_PWM_GPIO, "GPIO is not in use for PWM"); + errorMessages.put(PI_NOT_SERVO_GPIO, "GPIO is not in use for servo pulses"); + errorMessages.put(PI_NOT_HCLK_GPIO, "GPIO has no hardware clock"); + errorMessages.put(PI_NOT_HPWM_GPIO, "GPIO has no hardware PWM"); + errorMessages.put(PI_BAD_HPWM_FREQ, "hardware PWM frequency not 1-125M"); + errorMessages.put(PI_BAD_HPWM_DUTY, "hardware PWM dutycycle not 0-1M"); + errorMessages.put(PI_BAD_HCLK_FREQ, "hardware clock frequency not 4689-250M"); + errorMessages.put(PI_BAD_HCLK_PASS, "need password to use hardware clock 1"); + errorMessages.put(PI_HPWM_ILLEGAL, "illegal, PWM in use for main clock"); + errorMessages.put(PI_BAD_DATABITS, "serial data bits not 1-32"); + errorMessages.put(PI_BAD_STOPBITS, "serial (half) stop bits not 2-8"); + errorMessages.put(PI_MSG_TOOBIG, "socket/pipe message too big"); + errorMessages.put(PI_BAD_MALLOC_MODE, "bad memory allocation mode"); + errorMessages.put(PI_TOO_MANY_SEGS, "too many I2C transaction segments"); + errorMessages.put(PI_BAD_I2C_SEG, "an I2C transaction segment failed"); + errorMessages.put(PI_BAD_SMBUS_CMD, "SMBus command not supported"); + errorMessages.put(PI_NOT_I2C_GPIO, "no bit bang I2C in progress on GPIO"); + errorMessages.put(PI_BAD_I2C_WLEN, "bad I2C write length"); + errorMessages.put(PI_BAD_I2C_RLEN, "bad I2C read length"); + errorMessages.put(PI_BAD_I2C_CMD, "bad I2C command"); + errorMessages.put(PI_BAD_I2C_BAUD, "bad I2C baud rate, not 50-500k"); + errorMessages.put(PI_CHAIN_LOOP_CNT, "bad chain loop count"); + errorMessages.put(PI_BAD_CHAIN_LOOP, "empty chain loop"); + errorMessages.put(PI_CHAIN_COUNTER, "too many chain counters"); + errorMessages.put(PI_BAD_CHAIN_CMD, "bad chain command"); + errorMessages.put(PI_BAD_CHAIN_DELAY, "bad chain delay micros"); + errorMessages.put(PI_CHAIN_NESTING, "chain counters nested too deeply"); + errorMessages.put(PI_CHAIN_TOO_BIG, "chain is too long"); + errorMessages.put(PI_DEPRECATED, "deprecated function removed"); + errorMessages.put(PI_BAD_SER_INVERT, "bit bang serial invert not 0 or 1"); + errorMessages.put(PI_BAD_EDGE, "bad ISR edge value, not 0-2"); + errorMessages.put(PI_BAD_ISR_INIT, "bad ISR initialisation"); + errorMessages.put(PI_BAD_FOREVER, "loop forever must be last chain command"); + errorMessages.put(PI_BAD_FILTER, "bad filter parameter"); + } - public static String getMessageForError(int errorCode) { - return errorMessages.get(errorCode); - } + public static String getMessageForError(int errorCode) { + return errorMessages.get(errorCode); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPin.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPin.java index 288e950191..2e0cc6f0c5 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPin.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPin.java @@ -22,72 +22,72 @@ import org.photonvision.common.logging.Logger; public class PigpioPin extends GPIOBase { - public static final Logger logger = new Logger(PigpioPin.class, LogGroup.General); - private static final PigpioSocket piSocket = new PigpioSocket(); + public static final Logger logger = new Logger(PigpioPin.class, LogGroup.General); + private static final PigpioSocket piSocket = new PigpioSocket(); - private final boolean isHardwarePWMPin; - private final int pinNo; + private final boolean isHardwarePWMPin; + private final int pinNo; - private boolean hasFailedHardwarePWM; + private boolean hasFailedHardwarePWM; - public PigpioPin(int pinNo) { - isHardwarePWMPin = pinNo == 12 || pinNo == 13 || pinNo == 17 || pinNo == 18; - this.pinNo = pinNo; - } + public PigpioPin(int pinNo) { + isHardwarePWMPin = pinNo == 12 || pinNo == 13 || pinNo == 17 || pinNo == 18; + this.pinNo = pinNo; + } - @Override - public int getPinNumber() { - return pinNo; - } + @Override + public int getPinNumber() { + return pinNo; + } - @Override - protected void setStateImpl(boolean state) { - try { - piSocket.gpioWrite(pinNo, state); - } catch (PigpioException e) { - logger.error("gpioWrite FAIL - " + e.getMessage()); + @Override + protected void setStateImpl(boolean state) { + try { + piSocket.gpioWrite(pinNo, state); + } catch (PigpioException e) { + logger.error("gpioWrite FAIL - " + e.getMessage()); + } } - } - @Override - public boolean shutdown() { - setState(false); - return true; - } + @Override + public boolean shutdown() { + setState(false); + return true; + } - @Override - public boolean getStateImpl() { - try { - return piSocket.gpioRead(pinNo); - } catch (PigpioException e) { - logger.error("gpioRead FAIL - " + e.getMessage()); - return false; + @Override + public boolean getStateImpl() { + try { + return piSocket.gpioRead(pinNo); + } catch (PigpioException e) { + logger.error("gpioRead FAIL - " + e.getMessage()); + return false; + } } - } - @Override - protected void blinkImpl(int pulseTimeMillis, int blinks) { - try { - piSocket.generateAndSendWaveform(pulseTimeMillis, blinks, pinNo); - } catch (PigpioException e) { - logger.error("Could not set blink - " + e.getMessage()); + @Override + protected void blinkImpl(int pulseTimeMillis, int blinks) { + try { + piSocket.generateAndSendWaveform(pulseTimeMillis, blinks, pinNo); + } catch (PigpioException e) { + logger.error("Could not set blink - " + e.getMessage()); + } } - } - @Override - protected void setBrightnessImpl(int brightness) { - if (isHardwarePWMPin) { - try { - piSocket.hardwarePWM(pinNo, 22000, (int) (1000000 * (brightness / 100.0))); - } catch (PigpioException e) { - logger.error("Failed to hardPWM - " + e.getMessage()); - } - } else if (!hasFailedHardwarePWM) { - logger.warn( - "Specified pin (" - + pinNo - + ") is not capable of hardware PWM - no action will be taken."); - hasFailedHardwarePWM = true; + @Override + protected void setBrightnessImpl(int brightness) { + if (isHardwarePWMPin) { + try { + piSocket.hardwarePWM(pinNo, 22000, (int) (1000000 * (brightness / 100.0))); + } catch (PigpioException e) { + logger.error("Failed to hardPWM - " + e.getMessage()); + } + } else if (!hasFailedHardwarePWM) { + logger.warn( + "Specified pin (" + + pinNo + + ") is not capable of hardware PWM - no action will be taken."); + hasFailedHardwarePWM = true; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPulse.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPulse.java index 4f262aa34f..6d0ff0d1ea 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPulse.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioPulse.java @@ -18,22 +18,22 @@ package org.photonvision.common.hardware.GPIO.pi; public class PigpioPulse { - int gpioOn; - int gpioOff; - int delayMicros; + int gpioOn; + int gpioOff; + int delayMicros; - /** - * Initialises a pulse. - * - * @param gpioOn GPIO number to switch on at the start of the pulse. If zero, then no GPIO will be - * switched on. - * @param gpioOff GPIO number to switch off at the start of the pulse. If zero, then no GPIO will - * be switched off. - * @param delayMicros the delay in microseconds before the next pulse. - */ - public PigpioPulse(int gpioOn, int gpioOff, int delayMicros) { - this.gpioOn = gpioOn != 0 ? 1 << gpioOn : 0; - this.gpioOff = gpioOff != 0 ? 1 << gpioOff : 0; - this.delayMicros = delayMicros; - } + /** + * Initialises a pulse. + * + * @param gpioOn GPIO number to switch on at the start of the pulse. If zero, then no GPIO will be + * switched on. + * @param gpioOff GPIO number to switch off at the start of the pulse. If zero, then no GPIO will + * be switched off. + * @param delayMicros the delay in microseconds before the next pulse. + */ + public PigpioPulse(int gpioOn, int gpioOff, int delayMicros) { + this.gpioOn = gpioOn != 0 ? 1 << gpioOn : 0; + this.gpioOff = gpioOff != 0 ? 1 << gpioOff : 0; + this.delayMicros = delayMicros; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocket.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocket.java index 9ec6f5d709..d6c2bd310d 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocket.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocket.java @@ -28,345 +28,345 @@ @SuppressWarnings({"SpellCheckingInspection", "unused"}) public class PigpioSocket { - private static final Logger logger = new Logger(PigpioSocket.class, LogGroup.General); - private static final int PIGPIOD_MESSAGE_SIZE = 12; - - private PigpioSocketLock commandSocket; - private int activeWaveformID = -1; - - /** Creates and starts a socket connection to a pigpio daemon on localhost */ - public PigpioSocket() { - this("127.0.0.1", 8888); - } - - /** - * Creates and starts a socket connection to a pigpio daemon on a remote host with the specified - * address and port - * - * @param addr Address of remote pigpio daemon - * @param port Port of remote pigpio daemon - */ - public PigpioSocket(String addr, int port) { - try { - commandSocket = new PigpioSocketLock(addr, port); - } catch (IOException e) { - logger.error("Failed to create or connect to Pigpio Daemon socket", e); - } - } - - /** - * Reconnects to the pigpio daemon - * - * @throws PigpioException on failure - */ - public void reconnect() throws PigpioException { - try { - commandSocket.reconnect(); - } catch (IOException e) { - logger.error("Failed to reconnect to Pigpio Daemon socket", e); - throw new PigpioException("reconnect", e); - } - } - - /** - * Terminates the connection to the pigpio daemon - * - * @throws PigpioException on failure - */ - public void gpioTerminate() throws PigpioException { - try { - commandSocket.terminate(); - } catch (IOException e) { - logger.error("Failed to terminate connection to Pigpio Daemon socket", e); - throw new PigpioException("gpioTerminate", e); - } - } - - /** - * Read the GPIO level - * - * @param pin Pin to read from - * @return Value of the pin - * @throws PigpioException on failure - */ - public boolean gpioRead(int pin) throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_READ.value, pin); - if (retCode < 0) throw new PigpioException(retCode); - return retCode != 0; - } catch (IOException e) { - logger.error("Failed to read GPIO pin: " + pin, e); - throw new PigpioException("gpioRead", e); - } - } - - /** - * Write the GPIO level - * - * @param pin Pin to write to - * @param value Value to write - * @throws PigpioException on failure - */ - public void gpioWrite(int pin, boolean value) throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WRITE.value, pin, value ? 1 : 0); - if (retCode < 0) throw new PigpioException(retCode); - } catch (IOException e) { - logger.error("Failed to write to GPIO pin: " + pin, e); - throw new PigpioException("gpioWrite", e); + private static final Logger logger = new Logger(PigpioSocket.class, LogGroup.General); + private static final int PIGPIOD_MESSAGE_SIZE = 12; + + private PigpioSocketLock commandSocket; + private int activeWaveformID = -1; + + /** Creates and starts a socket connection to a pigpio daemon on localhost */ + public PigpioSocket() { + this("127.0.0.1", 8888); } - } - - /** - * Clears all waveforms and any data added by calls to {@link #waveAddGeneric(ArrayList)} - * - * @throws PigpioException on failure - */ - public void waveClear() throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVCLR.value); - if (retCode < 0) throw new PigpioException(retCode); - } catch (IOException e) { - logger.error("Failed to clear waveforms", e); - throw new PigpioException("waveClear", e); + + /** + * Creates and starts a socket connection to a pigpio daemon on a remote host with the specified + * address and port + * + * @param addr Address of remote pigpio daemon + * @param port Port of remote pigpio daemon + */ + public PigpioSocket(String addr, int port) { + try { + commandSocket = new PigpioSocketLock(addr, port); + } catch (IOException e) { + logger.error("Failed to create or connect to Pigpio Daemon socket", e); + } } - } - - /** - * Adds a number of pulses to the current waveform - * - * @param pulses ArrayList of pulses to add - * @return the new total number of pulses in the current waveform - * @throws PigpioException on failure - */ - private int waveAddGeneric(ArrayList pulses) throws PigpioException { - // pigpio wave message format - - // I p1 0 - // I p2 0 - // I p3 pulses * 12 - // ## extension ## - // III on/off/delay * pulses - - if (pulses == null || pulses.isEmpty()) return 0; - - try { - ByteBuffer bb = ByteBuffer.allocate(pulses.size() * 12); - bb.order(ByteOrder.LITTLE_ENDIAN); - for (var pulse : pulses) { - bb.putInt(pulse.gpioOn).putInt(pulse.gpioOff).putInt(pulse.delayMicros); - } - - int retCode = - commandSocket.sendCmd( - PigpioCommand.PCMD_WVAG.value, - 0, - 0, - pulses.size() * PIGPIOD_MESSAGE_SIZE, - bb.array()); - if (retCode < 0) throw new PigpioException(retCode); - - return retCode; - } catch (IOException e) { - logger.error("Failed to add pulse(s) to waveform", e); - throw new PigpioException("waveAddGeneric", e); + + /** + * Reconnects to the pigpio daemon + * + * @throws PigpioException on failure + */ + public void reconnect() throws PigpioException { + try { + commandSocket.reconnect(); + } catch (IOException e) { + logger.error("Failed to reconnect to Pigpio Daemon socket", e); + throw new PigpioException("reconnect", e); + } } - } - - /** - * Creates pulses and adds them to the current waveform - * - * @param pulseTimeMillis Pulse length in milliseconds - * @param blinks Number of times to pulse. -1 for repeat - * @param pinNo Pin to pulse - */ - private void addBlinkPulsesToWaveform(int pulseTimeMillis, int blinks, int pinNo) { - boolean repeat = blinks == -1; - - if (blinks == 0) return; - - if (repeat) { - blinks = 1; + + /** + * Terminates the connection to the pigpio daemon + * + * @throws PigpioException on failure + */ + public void gpioTerminate() throws PigpioException { + try { + commandSocket.terminate(); + } catch (IOException e) { + logger.error("Failed to terminate connection to Pigpio Daemon socket", e); + throw new PigpioException("gpioTerminate", e); + } } - try { - ArrayList pulses = new ArrayList<>(); - var startPulse = new PigpioPulse(pinNo, 0, pulseTimeMillis * 1000); - var endPulse = new PigpioPulse(0, pinNo, pulseTimeMillis * 1000); + /** + * Read the GPIO level + * + * @param pin Pin to read from + * @return Value of the pin + * @throws PigpioException on failure + */ + public boolean gpioRead(int pin) throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_READ.value, pin); + if (retCode < 0) throw new PigpioException(retCode); + return retCode != 0; + } catch (IOException e) { + logger.error("Failed to read GPIO pin: " + pin, e); + throw new PigpioException("gpioRead", e); + } + } - for (int i = 0; i < blinks; i++) { - pulses.add(startPulse); - pulses.add(endPulse); - } + /** + * Write the GPIO level + * + * @param pin Pin to write to + * @param value Value to write + * @throws PigpioException on failure + */ + public void gpioWrite(int pin, boolean value) throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WRITE.value, pin, value ? 1 : 0); + if (retCode < 0) throw new PigpioException(retCode); + } catch (IOException e) { + logger.error("Failed to write to GPIO pin: " + pin, e); + throw new PigpioException("gpioWrite", e); + } + } - waveAddGeneric(pulses); - pulses.clear(); - } catch (Exception e) { - e.printStackTrace(); + /** + * Clears all waveforms and any data added by calls to {@link #waveAddGeneric(ArrayList)} + * + * @throws PigpioException on failure + */ + public void waveClear() throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVCLR.value); + if (retCode < 0) throw new PigpioException(retCode); + } catch (IOException e) { + logger.error("Failed to clear waveforms", e); + throw new PigpioException("waveClear", e); + } } - } - - /** - * Generates and sends a waveform to the given pins with the specified parameters. - * - * @param pulseTimeMillis Pulse length in milliseconds - * @param blinks Number of times to pulse. -1 for repeat - * @param pins Pins to pulse - * @throws PigpioException on failure - */ - public void generateAndSendWaveform(int pulseTimeMillis, int blinks, int... pins) - throws PigpioException { - if (pins.length == 0) return; - boolean repeat = blinks == -1; - if (blinks == 0) return; - - // stop any active waves - waveTxStop(); - waveClear(); - - if (activeWaveformID != -1) { - waveDelete(activeWaveformID); - activeWaveformID = -1; + + /** + * Adds a number of pulses to the current waveform + * + * @param pulses ArrayList of pulses to add + * @return the new total number of pulses in the current waveform + * @throws PigpioException on failure + */ + private int waveAddGeneric(ArrayList pulses) throws PigpioException { + // pigpio wave message format + + // I p1 0 + // I p2 0 + // I p3 pulses * 12 + // ## extension ## + // III on/off/delay * pulses + + if (pulses == null || pulses.isEmpty()) return 0; + + try { + ByteBuffer bb = ByteBuffer.allocate(pulses.size() * 12); + bb.order(ByteOrder.LITTLE_ENDIAN); + for (var pulse : pulses) { + bb.putInt(pulse.gpioOn).putInt(pulse.gpioOff).putInt(pulse.delayMicros); + } + + int retCode = + commandSocket.sendCmd( + PigpioCommand.PCMD_WVAG.value, + 0, + 0, + pulses.size() * PIGPIOD_MESSAGE_SIZE, + bb.array()); + if (retCode < 0) throw new PigpioException(retCode); + + return retCode; + } catch (IOException e) { + logger.error("Failed to add pulse(s) to waveform", e); + throw new PigpioException("waveAddGeneric", e); + } } - for (int pin : pins) { - addBlinkPulsesToWaveform(pulseTimeMillis, blinks, pin); + /** + * Creates pulses and adds them to the current waveform + * + * @param pulseTimeMillis Pulse length in milliseconds + * @param blinks Number of times to pulse. -1 for repeat + * @param pinNo Pin to pulse + */ + private void addBlinkPulsesToWaveform(int pulseTimeMillis, int blinks, int pinNo) { + boolean repeat = blinks == -1; + + if (blinks == 0) return; + + if (repeat) { + blinks = 1; + } + + try { + ArrayList pulses = new ArrayList<>(); + var startPulse = new PigpioPulse(pinNo, 0, pulseTimeMillis * 1000); + var endPulse = new PigpioPulse(0, pinNo, pulseTimeMillis * 1000); + + for (int i = 0; i < blinks; i++) { + pulses.add(startPulse); + pulses.add(endPulse); + } + + waveAddGeneric(pulses); + pulses.clear(); + } catch (Exception e) { + e.printStackTrace(); + } } - int waveformId = waveCreate(); - - if (waveformId >= 0) { - if (repeat) { - waveSendRepeat(waveformId); - } else { - waveSendOnce(waveformId); - } - } else { - String error = ""; - switch (waveformId) { - case PI_EMPTY_WAVEFORM: - error = "Waveform empty"; - break; - case PI_TOO_MANY_CBS: - error = "Too many CBS"; - break; - case PI_TOO_MANY_OOL: - error = "Too many OOL"; - break; - case PI_NO_WAVEFORM_ID: - error = "No waveform ID"; - break; - } - logger.error("Failed to send wave: " + error); + /** + * Generates and sends a waveform to the given pins with the specified parameters. + * + * @param pulseTimeMillis Pulse length in milliseconds + * @param blinks Number of times to pulse. -1 for repeat + * @param pins Pins to pulse + * @throws PigpioException on failure + */ + public void generateAndSendWaveform(int pulseTimeMillis, int blinks, int... pins) + throws PigpioException { + if (pins.length == 0) return; + boolean repeat = blinks == -1; + if (blinks == 0) return; + + // stop any active waves + waveTxStop(); + waveClear(); + + if (activeWaveformID != -1) { + waveDelete(activeWaveformID); + activeWaveformID = -1; + } + + for (int pin : pins) { + addBlinkPulsesToWaveform(pulseTimeMillis, blinks, pin); + } + + int waveformId = waveCreate(); + + if (waveformId >= 0) { + if (repeat) { + waveSendRepeat(waveformId); + } else { + waveSendOnce(waveformId); + } + } else { + String error = ""; + switch (waveformId) { + case PI_EMPTY_WAVEFORM: + error = "Waveform empty"; + break; + case PI_TOO_MANY_CBS: + error = "Too many CBS"; + break; + case PI_TOO_MANY_OOL: + error = "Too many OOL"; + break; + case PI_NO_WAVEFORM_ID: + error = "No waveform ID"; + break; + } + logger.error("Failed to send wave: " + error); + } } - } - - /** - * Stops the transmission of the current waveform - * - * @return success - * @throws PigpioException on failure - */ - public boolean waveTxStop() throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVHLT.value); - if (retCode < 0) throw new PigpioException(retCode); - return retCode == 0; - } catch (IOException e) { - logger.error("Failed to stop waveform", e); - throw new PigpioException("waveTxStop", e); + + /** + * Stops the transmission of the current waveform + * + * @return success + * @throws PigpioException on failure + */ + public boolean waveTxStop() throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVHLT.value); + if (retCode < 0) throw new PigpioException(retCode); + return retCode == 0; + } catch (IOException e) { + logger.error("Failed to stop waveform", e); + throw new PigpioException("waveTxStop", e); + } } - } - - /** - * Creates a waveform from the data provided by the prior calls to {@link - * #waveAddGeneric(ArrayList)} Upon success a wave ID greater than or equal to 0 is returned - * - * @return ID of the created waveform - * @throws PigpioException on failure - */ - public int waveCreate() throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVCRE.value); - if (retCode < 0) throw new PigpioException(retCode); - return retCode; - } catch (IOException e) { - logger.error("Failed to create new waveform", e); - throw new PigpioException("waveCreate", e); + + /** + * Creates a waveform from the data provided by the prior calls to {@link + * #waveAddGeneric(ArrayList)} Upon success a wave ID greater than or equal to 0 is returned + * + * @return ID of the created waveform + * @throws PigpioException on failure + */ + public int waveCreate() throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVCRE.value); + if (retCode < 0) throw new PigpioException(retCode); + return retCode; + } catch (IOException e) { + logger.error("Failed to create new waveform", e); + throw new PigpioException("waveCreate", e); + } } - } - - /** - * Deletes the waveform with specified wave ID - * - * @param waveId ID of the waveform to delete - * @throws PigpioException on failure - */ - public void waveDelete(int waveId) throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVDEL.value, waveId); - if (retCode < 0) throw new PigpioException(retCode); - } catch (IOException e) { - logger.error("Failed to delete wave: " + waveId, e); - throw new PigpioException("waveDelete", e); + + /** + * Deletes the waveform with specified wave ID + * + * @param waveId ID of the waveform to delete + * @throws PigpioException on failure + */ + public void waveDelete(int waveId) throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVDEL.value, waveId); + if (retCode < 0) throw new PigpioException(retCode); + } catch (IOException e) { + logger.error("Failed to delete wave: " + waveId, e); + throw new PigpioException("waveDelete", e); + } } - } - - /** - * Transmits the waveform with specified wave ID. The waveform is sent once - * - * @param waveId ID of the waveform to transmit - * @return The number of DMA control blocks in the waveform - * @throws PigpioException on failure - */ - public int waveSendOnce(int waveId) throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVTX.value, waveId); - if (retCode < 0) throw new PigpioException(retCode); - return retCode; - } catch (IOException e) { - throw new PigpioException("waveSendOnce", e); + + /** + * Transmits the waveform with specified wave ID. The waveform is sent once + * + * @param waveId ID of the waveform to transmit + * @return The number of DMA control blocks in the waveform + * @throws PigpioException on failure + */ + public int waveSendOnce(int waveId) throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVTX.value, waveId); + if (retCode < 0) throw new PigpioException(retCode); + return retCode; + } catch (IOException e) { + throw new PigpioException("waveSendOnce", e); + } } - } - - /** - * Transmits the waveform with specified wave ID. The waveform cycles until cancelled (either by - * the sending of a new waveform or {@link #waveTxStop()} - * - * @param waveId ID of the waveform to transmit - * @return The number of DMA control blocks in the waveform - * @throws PigpioException on failure - */ - public int waveSendRepeat(int waveId) throws PigpioException { - try { - int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVTXR.value, waveId); - if (retCode < 0) throw new PigpioException(retCode); - return retCode; - } catch (IOException e) { - throw new PigpioException("waveSendRepeat", e); + + /** + * Transmits the waveform with specified wave ID. The waveform cycles until cancelled (either by + * the sending of a new waveform or {@link #waveTxStop()} + * + * @param waveId ID of the waveform to transmit + * @return The number of DMA control blocks in the waveform + * @throws PigpioException on failure + */ + public int waveSendRepeat(int waveId) throws PigpioException { + try { + int retCode = commandSocket.sendCmd(PigpioCommand.PCMD_WVTXR.value, waveId); + if (retCode < 0) throw new PigpioException(retCode); + return retCode; + } catch (IOException e) { + throw new PigpioException("waveSendRepeat", e); + } } - } - - /** - * Starts hardware PWM on a GPIO at the specified frequency and dutycycle - * - * @param pin GPIO pin to start PWM on - * @param pwmFrequency Frequency to run at (1Hz-125MHz). Frequencies above 30MHz are unlikely to - * work - * @param pwmDuty Duty cycle to run at (0-1,000,000) - * @throws PigpioException on failure - */ - public void hardwarePWM(int pin, int pwmFrequency, int pwmDuty) throws PigpioException { - try { - ByteBuffer bb = ByteBuffer.allocate(4); - bb.order(ByteOrder.LITTLE_ENDIAN); - bb.putInt(pwmDuty); - - int retCode = - commandSocket.sendCmd(PigpioCommand.PCMD_HP.value, pin, pwmFrequency, 4, bb.array()); - if (retCode < 0) throw new PigpioException(retCode); - } catch (IOException e) { - throw new PigpioException("hardwarePWM", e); + + /** + * Starts hardware PWM on a GPIO at the specified frequency and dutycycle + * + * @param pin GPIO pin to start PWM on + * @param pwmFrequency Frequency to run at (1Hz-125MHz). Frequencies above 30MHz are unlikely to + * work + * @param pwmDuty Duty cycle to run at (0-1,000,000) + * @throws PigpioException on failure + */ + public void hardwarePWM(int pin, int pwmFrequency, int pwmDuty) throws PigpioException { + try { + ByteBuffer bb = ByteBuffer.allocate(4); + bb.order(ByteOrder.LITTLE_ENDIAN); + bb.putInt(pwmDuty); + + int retCode = + commandSocket.sendCmd(PigpioCommand.PCMD_HP.value, pin, pwmFrequency, 4, bb.array()); + if (retCode < 0) throw new PigpioException(retCode); + } catch (IOException e) { + throw new PigpioException("hardwarePWM", e); + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocketLock.java b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocketLock.java index f65169874b..60e9d9ff39 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocketLock.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/GPIO/pi/PigpioSocketLock.java @@ -28,120 +28,120 @@ * https://github.com/nkolban/jpigpio/blob/master/JPigpio/src/jpigpio/SocketLock.java */ final class PigpioSocketLock { - private static final int replyTimeoutMillis = 1000; - - private final String addr; - private final int port; - - private Socket socket; - private DataInputStream in; - private DataOutputStream out; - - public PigpioSocketLock(String addr, int port) throws IOException { - this.addr = addr; - this.port = port; - reconnect(); - } - - public void reconnect() throws IOException { - socket = new Socket(addr, port); - out = new DataOutputStream(socket.getOutputStream()); - in = new DataInputStream(socket.getInputStream()); - } - - public void terminate() throws IOException { - in.close(); - in = null; - - out.flush(); - out.close(); - out = null; - - socket.close(); - socket = null; - } - - public synchronized int sendCmd(int cmd) throws IOException { - byte[] b = {}; - return sendCmd(cmd, 0, 0, 0, b); - } - - public synchronized int sendCmd(int cmd, int p1) throws IOException { - byte[] b = {}; - return sendCmd(cmd, p1, 0, 0, b); - } - - public synchronized int sendCmd(int cmd, int p1, int p2) throws IOException { - byte[] b = {}; - return sendCmd(cmd, p1, p2, 0, b); - } - - public synchronized int sendCmd(int cmd, int p1, int p2, int p3) throws IOException { - byte[] b = {}; - return sendCmd(cmd, p1, p2, p3, b); - } - - /** - * Send extended command to pigpiod and return result code - * - * @param cmd Command to send - * @param p1 Command parameter 1 - * @param p2 Command parameter 2 - * @param p3 Command parameter 3 (usually length of extended data - see paramater ext) - * @param ext Array of bytes containing extended data - * @return Command result code - * @throws IOException in case of network connection error - */ - @SuppressWarnings("UnusedAssignment") - public synchronized int sendCmd(int cmd, int p1, int p2, int p3, byte[] ext) throws IOException { - ByteBuffer bb = ByteBuffer.allocate(16 + ext.length); - - bb.putInt(Integer.reverseBytes(cmd)); - bb.putInt(Integer.reverseBytes(p1)); - bb.putInt(Integer.reverseBytes(p2)); - bb.putInt(Integer.reverseBytes(p3)); - - if (ext.length > 0) { - bb.put(ext); + private static final int replyTimeoutMillis = 1000; + + private final String addr; + private final int port; + + private Socket socket; + private DataInputStream in; + private DataOutputStream out; + + public PigpioSocketLock(String addr, int port) throws IOException { + this.addr = addr; + this.port = port; + reconnect(); + } + + public void reconnect() throws IOException { + socket = new Socket(addr, port); + out = new DataOutputStream(socket.getOutputStream()); + in = new DataInputStream(socket.getInputStream()); + } + + public void terminate() throws IOException { + in.close(); + in = null; + + out.flush(); + out.close(); + out = null; + + socket.close(); + socket = null; + } + + public synchronized int sendCmd(int cmd) throws IOException { + byte[] b = {}; + return sendCmd(cmd, 0, 0, 0, b); + } + + public synchronized int sendCmd(int cmd, int p1) throws IOException { + byte[] b = {}; + return sendCmd(cmd, p1, 0, 0, b); + } + + public synchronized int sendCmd(int cmd, int p1, int p2) throws IOException { + byte[] b = {}; + return sendCmd(cmd, p1, p2, 0, b); } - out.write(bb.array()); - out.flush(); - - int w = replyTimeoutMillis; - int a = in.available(); - - // if by any chance there is no response from pigpiod, then wait up to - // specified timeout - while (w > 0 && a < 16) { - w -= 10; - try { - Thread.sleep(10); - } catch (InterruptedException ignored) { - } - a = in.available(); + public synchronized int sendCmd(int cmd, int p1, int p2, int p3) throws IOException { + byte[] b = {}; + return sendCmd(cmd, p1, p2, p3, b); } - // throw exception if response from pigpiod has not arrived yet - if (in.available() < 16) { - throw new IOException( - "Timeout: No response from pigpio daemon within " + replyTimeoutMillis + " ms."); + /** + * Send extended command to pigpiod and return result code + * + * @param cmd Command to send + * @param p1 Command parameter 1 + * @param p2 Command parameter 2 + * @param p3 Command parameter 3 (usually length of extended data - see paramater ext) + * @param ext Array of bytes containing extended data + * @return Command result code + * @throws IOException in case of network connection error + */ + @SuppressWarnings("UnusedAssignment") + public synchronized int sendCmd(int cmd, int p1, int p2, int p3, byte[] ext) throws IOException { + ByteBuffer bb = ByteBuffer.allocate(16 + ext.length); + + bb.putInt(Integer.reverseBytes(cmd)); + bb.putInt(Integer.reverseBytes(p1)); + bb.putInt(Integer.reverseBytes(p2)); + bb.putInt(Integer.reverseBytes(p3)); + + if (ext.length > 0) { + bb.put(ext); + } + + out.write(bb.array()); + out.flush(); + + int w = replyTimeoutMillis; + int a = in.available(); + + // if by any chance there is no response from pigpiod, then wait up to + // specified timeout + while (w > 0 && a < 16) { + w -= 10; + try { + Thread.sleep(10); + } catch (InterruptedException ignored) { + } + a = in.available(); + } + + // throw exception if response from pigpiod has not arrived yet + if (in.available() < 16) { + throw new IOException( + "Timeout: No response from pigpio daemon within " + replyTimeoutMillis + " ms."); + } + + int resp = Integer.reverseBytes(in.readInt()); // ignore response + resp = Integer.reverseBytes(in.readInt()); // ignore response + resp = Integer.reverseBytes(in.readInt()); // ignore response + resp = Integer.reverseBytes(in.readInt()); // contains error or response + return resp; } - int resp = Integer.reverseBytes(in.readInt()); // ignore response - resp = Integer.reverseBytes(in.readInt()); // ignore response - resp = Integer.reverseBytes(in.readInt()); // ignore response - resp = Integer.reverseBytes(in.readInt()); // contains error or response - return resp; - } - - /** - * Read all remaining bytes coming from pigpiod - * - * @param data Array to store read bytes. - * @throws IOException if unable to read from network - */ - public void readBytes(byte[] data) throws IOException { - in.readFully(data); - } + /** + * Read all remaining bytes coming from pigpiod + * + * @param data Array to store read bytes. + * @throws IOException if unable to read from network + */ + public void readBytes(byte[] data) throws IOException { + in.readFully(data); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/HardwareManager.java b/photon-core/src/main/java/org/photonvision/common/hardware/HardwareManager.java index 38c6f5cd81..2a049cccae 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/HardwareManager.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/HardwareManager.java @@ -34,154 +34,154 @@ import org.photonvision.common.util.ShellExec; public class HardwareManager { - private static HardwareManager instance; + private static HardwareManager instance; - private final ShellExec shellExec = new ShellExec(true, false); - private final Logger logger = new Logger(HardwareManager.class, LogGroup.General); + private final ShellExec shellExec = new ShellExec(true, false); + private final Logger logger = new Logger(HardwareManager.class, LogGroup.General); - private final HardwareConfig hardwareConfig; - private final HardwareSettings hardwareSettings; + private final HardwareConfig hardwareConfig; + private final HardwareSettings hardwareSettings; - private final MetricsManager metricsManager; + private final MetricsManager metricsManager; - @SuppressWarnings({"FieldCanBeLocal", "unused"}) - private final StatusLED statusLED; + @SuppressWarnings({"FieldCanBeLocal", "unused"}) + private final StatusLED statusLED; - @SuppressWarnings("FieldCanBeLocal") - private final IntegerSubscriber ledModeRequest; + @SuppressWarnings("FieldCanBeLocal") + private final IntegerSubscriber ledModeRequest; - private final IntegerPublisher ledModeState; + private final IntegerPublisher ledModeState; - @SuppressWarnings({"FieldCanBeLocal", "unused"}) - private final NTDataChangeListener ledModeListener; + @SuppressWarnings({"FieldCanBeLocal", "unused"}) + private final NTDataChangeListener ledModeListener; - public final VisionLED visionLED; // May be null if no LED is specified + public final VisionLED visionLED; // May be null if no LED is specified - private final PigpioSocket pigpioSocket; // will be null unless on Raspi + private final PigpioSocket pigpioSocket; // will be null unless on Raspi - public static HardwareManager getInstance() { - if (instance == null) { - var conf = ConfigManager.getInstance().getConfig(); - instance = new HardwareManager(conf.getHardwareConfig(), conf.getHardwareSettings()); + public static HardwareManager getInstance() { + if (instance == null) { + var conf = ConfigManager.getInstance().getConfig(); + instance = new HardwareManager(conf.getHardwareConfig(), conf.getHardwareSettings()); + } + return instance; } - return instance; - } - - private HardwareManager(HardwareConfig hardwareConfig, HardwareSettings hardwareSettings) { - this.hardwareConfig = hardwareConfig; - this.hardwareSettings = hardwareSettings; - - this.metricsManager = new MetricsManager(); - this.metricsManager.setConfig(hardwareConfig); - - ledModeRequest = - NetworkTablesManager.getInstance() - .kRootTable - .getIntegerTopic("ledModeRequest") - .subscribe(-1); - ledModeState = - NetworkTablesManager.getInstance().kRootTable.getIntegerTopic("ledModeState").publish(); - ledModeState.set(VisionLEDMode.kDefault.value); - - CustomGPIO.setConfig(hardwareConfig); - - if (Platform.isRaspberryPi()) { - pigpioSocket = new PigpioSocket(); - } else { - pigpioSocket = null; + + private HardwareManager(HardwareConfig hardwareConfig, HardwareSettings hardwareSettings) { + this.hardwareConfig = hardwareConfig; + this.hardwareSettings = hardwareSettings; + + this.metricsManager = new MetricsManager(); + this.metricsManager.setConfig(hardwareConfig); + + ledModeRequest = + NetworkTablesManager.getInstance() + .kRootTable + .getIntegerTopic("ledModeRequest") + .subscribe(-1); + ledModeState = + NetworkTablesManager.getInstance().kRootTable.getIntegerTopic("ledModeState").publish(); + ledModeState.set(VisionLEDMode.kDefault.value); + + CustomGPIO.setConfig(hardwareConfig); + + if (Platform.isRaspberryPi()) { + pigpioSocket = new PigpioSocket(); + } else { + pigpioSocket = null; + } + + statusLED = + hardwareConfig.statusRGBPins.size() == 3 + ? new StatusLED(hardwareConfig.statusRGBPins) + : null; + + var hasBrightnessRange = hardwareConfig.ledBrightnessRange.size() == 2; + visionLED = + hardwareConfig.ledPins.isEmpty() + ? null + : new VisionLED( + hardwareConfig.ledPins, + hasBrightnessRange ? hardwareConfig.ledBrightnessRange.get(0) : 0, + hasBrightnessRange ? hardwareConfig.ledBrightnessRange.get(1) : 100, + pigpioSocket, + ledModeState::set); + + ledModeListener = + visionLED == null + ? null + : new NTDataChangeListener( + NetworkTablesManager.getInstance().kRootTable.getInstance(), + ledModeRequest, + visionLED::onLedModeChange); + + Runtime.getRuntime().addShutdownHook(new Thread(this::onJvmExit)); + + if (visionLED != null) { + visionLED.setBrightness(hardwareSettings.ledBrightnessPercentage); + visionLED.blink(85, 4); // bootup blink + } + + // Start hardware metrics thread (Disabled until implemented) + // if (Platform.isLinux()) MetricsPublisher.getInstance().startTask(); } - statusLED = - hardwareConfig.statusRGBPins.size() == 3 - ? new StatusLED(hardwareConfig.statusRGBPins) - : null; - - var hasBrightnessRange = hardwareConfig.ledBrightnessRange.size() == 2; - visionLED = - hardwareConfig.ledPins.isEmpty() - ? null - : new VisionLED( - hardwareConfig.ledPins, - hasBrightnessRange ? hardwareConfig.ledBrightnessRange.get(0) : 0, - hasBrightnessRange ? hardwareConfig.ledBrightnessRange.get(1) : 100, - pigpioSocket, - ledModeState::set); - - ledModeListener = - visionLED == null - ? null - : new NTDataChangeListener( - NetworkTablesManager.getInstance().kRootTable.getInstance(), - ledModeRequest, - visionLED::onLedModeChange); - - Runtime.getRuntime().addShutdownHook(new Thread(this::onJvmExit)); - - if (visionLED != null) { - visionLED.setBrightness(hardwareSettings.ledBrightnessPercentage); - visionLED.blink(85, 4); // bootup blink + public void setBrightnessPercent(int percent) { + if (percent != hardwareSettings.ledBrightnessPercentage) { + hardwareSettings.ledBrightnessPercentage = percent; + if (visionLED != null) visionLED.setBrightness(percent); + ConfigManager.getInstance().requestSave(); + logger.info("Setting led brightness to " + percent + "%"); + } } - // Start hardware metrics thread (Disabled until implemented) - // if (Platform.isLinux()) MetricsPublisher.getInstance().startTask(); - } + private void onJvmExit() { + logger.info("Shutting down LEDs..."); + if (visionLED != null) visionLED.setState(false); - public void setBrightnessPercent(int percent) { - if (percent != hardwareSettings.ledBrightnessPercentage) { - hardwareSettings.ledBrightnessPercentage = percent; - if (visionLED != null) visionLED.setBrightness(percent); - ConfigManager.getInstance().requestSave(); - logger.info("Setting led brightness to " + percent + "%"); - } - } - - private void onJvmExit() { - logger.info("Shutting down LEDs..."); - if (visionLED != null) visionLED.setState(false); - - logger.info("Force-flushing settings..."); - ConfigManager.getInstance().saveToDisk(); - } - - public boolean restartDevice() { - if (Platform.isLinux()) { - try { - return shellExec.executeBashCommand("reboot now") == 0; - } catch (IOException e) { - logger.error("Could not restart device!", e); - return false; - } + logger.info("Force-flushing settings..."); + ConfigManager.getInstance().saveToDisk(); } - try { - return shellExec.executeBashCommand(hardwareConfig.restartHardwareCommand) == 0; - } catch (IOException e) { - logger.error("Could not restart device!", e); - return false; + + public boolean restartDevice() { + if (Platform.isLinux()) { + try { + return shellExec.executeBashCommand("reboot now") == 0; + } catch (IOException e) { + logger.error("Could not restart device!", e); + return false; + } + } + try { + return shellExec.executeBashCommand(hardwareConfig.restartHardwareCommand) == 0; + } catch (IOException e) { + logger.error("Could not restart device!", e); + return false; + } } - } - - public void setStatus(ProgramStatus status) { - switch (status) { - case UHOH: - // red flashing, green off - break; - case RUNNING: - // red solid, green off - break; - case RUNNING_NT: - // red off, green solid - break; - case RUNNING_NT_TARGET: - // red off, green flashing - break; + + public void setStatus(ProgramStatus status) { + switch (status) { + case UHOH: + // red flashing, green off + break; + case RUNNING: + // red solid, green off + break; + case RUNNING_NT: + // red off, green solid + break; + case RUNNING_NT_TARGET: + // red off, green flashing + break; + } } - } - public HardwareConfig getConfig() { - return hardwareConfig; - } + public HardwareConfig getConfig() { + return hardwareConfig; + } - public void publishMetrics() { - metricsManager.publishMetrics(); - } + public void publishMetrics() { + metricsManager.publishMetrics(); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/PiVersion.java b/photon-core/src/main/java/org/photonvision/common/hardware/PiVersion.java index 240f98779c..dbf23ac7e4 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/PiVersion.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/PiVersion.java @@ -21,51 +21,51 @@ import org.photonvision.common.util.ShellExec; public enum PiVersion { - PI_B("Pi Model B"), - COMPUTE_MODULE("Compute Module Rev"), - ZERO_W("Pi Zero W Rev 1.1"), - ZERO_2_W("Raspberry Pi Zero 2"), - PI_3("Pi 3"), - PI_4("Pi 4"), - COMPUTE_MODULE_3("Compute Module 3"), - UNKNOWN("UNKNOWN"); + PI_B("Pi Model B"), + COMPUTE_MODULE("Compute Module Rev"), + ZERO_W("Pi Zero W Rev 1.1"), + ZERO_2_W("Raspberry Pi Zero 2"), + PI_3("Pi 3"), + PI_4("Pi 4"), + COMPUTE_MODULE_3("Compute Module 3"), + UNKNOWN("UNKNOWN"); - private final String identifier; - private static final ShellExec shell = new ShellExec(true, false); - private static final PiVersion currentPiVersion = calcPiVersion(); + private final String identifier; + private static final ShellExec shell = new ShellExec(true, false); + private static final PiVersion currentPiVersion = calcPiVersion(); - PiVersion(String s) { - this.identifier = s.toLowerCase(); - } - - public static PiVersion getPiVersion() { - return currentPiVersion; - } - - private static PiVersion calcPiVersion() { - if (!Platform.isRaspberryPi()) return PiVersion.UNKNOWN; - String piString = getPiVersionString(); - for (PiVersion p : PiVersion.values()) { - if (piString.toLowerCase().contains(p.identifier)) return p; + PiVersion(String s) { + this.identifier = s.toLowerCase(); } - return UNKNOWN; - } - // Query /proc/device-tree/model. This should return the model of the pi - // Versions here: - // https://github.com/raspberrypi/linux/blob/rpi-5.10.y/arch/arm/boot/dts/bcm2710-rpi-cm3.dts - private static String getPiVersionString() { - if (!Platform.isRaspberryPi()) return ""; - try { - shell.executeBashCommand("cat /proc/device-tree/model"); - } catch (IOException e) { - e.printStackTrace(); + public static PiVersion getPiVersion() { + return currentPiVersion; } - if (shell.getExitCode() == 0) { - // We expect it to be in the format "raspberry pi X model X" - return shell.getOutput(); + + private static PiVersion calcPiVersion() { + if (!Platform.isRaspberryPi()) return PiVersion.UNKNOWN; + String piString = getPiVersionString(); + for (PiVersion p : PiVersion.values()) { + if (piString.toLowerCase().contains(p.identifier)) return p; + } + return UNKNOWN; } - return ""; - } + // Query /proc/device-tree/model. This should return the model of the pi + // Versions here: + // https://github.com/raspberrypi/linux/blob/rpi-5.10.y/arch/arm/boot/dts/bcm2710-rpi-cm3.dts + private static String getPiVersionString() { + if (!Platform.isRaspberryPi()) return ""; + try { + shell.executeBashCommand("cat /proc/device-tree/model"); + } catch (IOException e) { + e.printStackTrace(); + } + if (shell.getExitCode() == 0) { + // We expect it to be in the format "raspberry pi X model X" + return shell.getOutput(); + } + + return ""; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/Platform.java b/photon-core/src/main/java/org/photonvision/common/hardware/Platform.java index 1a0290cef2..1c500ab722 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/Platform.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/Platform.java @@ -26,190 +26,190 @@ @SuppressWarnings("unused") public enum Platform { - // WPILib Supported (JNI) - WINDOWS_64("Windows x64", false, OSType.WINDOWS, true), - LINUX_32("Linux x86", false, OSType.LINUX, true), - LINUX_64("Linux x64", false, OSType.LINUX, true), - LINUX_RASPBIAN32( - "Linux Raspbian 32-bit", true, OSType.LINUX, true), // Raspberry Pi 3/4 with a 32-bit image - LINUX_RASPBIAN64( - "Linux Raspbian 64-bit", true, OSType.LINUX, true), // Raspberry Pi 3/4 with a 64-bit image - LINUX_AARCH64("Linux AARCH64", false, OSType.LINUX, true), // Jetson Nano, Jetson TX2 - - // PhotonVision Supported (Manual build/install) - LINUX_ARM32("Linux ARM32", false, OSType.LINUX, true), // ODROID XU4, C1+ - LINUX_ARM64("Linux ARM64", false, OSType.LINUX, true), // ODROID C2, N2 - - // Completely unsupported - WINDOWS_32("Windows x86", false, OSType.WINDOWS, false), - MACOS("Mac OS", false, OSType.MACOS, false), - UNKNOWN("Unsupported Platform", false, OSType.UNKNOWN, false); - - private enum OSType { - WINDOWS, - LINUX, - MACOS, - UNKNOWN - } - - private static final ShellExec shell = new ShellExec(true, false); - public final String description; - public final boolean isPi; - public final OSType osType; - public final boolean isSupported; - - // Set once at init, shouldn't be needed after. - private static final Platform currentPlatform = getCurrentPlatform(); - private static final boolean isRoot = checkForRoot(); - - Platform(String description, boolean isPi, OSType osType, boolean isSupported) { - this.description = description; - this.isPi = isPi; - this.osType = osType; - this.isSupported = isSupported; - } - - ////////////////////////////////////////////////////// - // Public API - - // Checks specifically if unix shell and API are supported - public static boolean isLinux() { - return currentPlatform.osType == OSType.LINUX; - } - - public static boolean isRaspberryPi() { - return currentPlatform.isPi; - } - - public static String getPlatformName() { - if (currentPlatform.equals(UNKNOWN)) { - return UnknownPlatformString; - } else { - return currentPlatform.description; + // WPILib Supported (JNI) + WINDOWS_64("Windows x64", false, OSType.WINDOWS, true), + LINUX_32("Linux x86", false, OSType.LINUX, true), + LINUX_64("Linux x64", false, OSType.LINUX, true), + LINUX_RASPBIAN32( + "Linux Raspbian 32-bit", true, OSType.LINUX, true), // Raspberry Pi 3/4 with a 32-bit image + LINUX_RASPBIAN64( + "Linux Raspbian 64-bit", true, OSType.LINUX, true), // Raspberry Pi 3/4 with a 64-bit image + LINUX_AARCH64("Linux AARCH64", false, OSType.LINUX, true), // Jetson Nano, Jetson TX2 + + // PhotonVision Supported (Manual build/install) + LINUX_ARM32("Linux ARM32", false, OSType.LINUX, true), // ODROID XU4, C1+ + LINUX_ARM64("Linux ARM64", false, OSType.LINUX, true), // ODROID C2, N2 + + // Completely unsupported + WINDOWS_32("Windows x86", false, OSType.WINDOWS, false), + MACOS("Mac OS", false, OSType.MACOS, false), + UNKNOWN("Unsupported Platform", false, OSType.UNKNOWN, false); + + private enum OSType { + WINDOWS, + LINUX, + MACOS, + UNKNOWN } - } - - public static boolean isRoot() { - return isRoot; - } - - ////////////////////////////////////////////////////// - - // Debug info related to unknown platforms for debug help - private static final String OS_NAME = System.getProperty("os.name"); - private static final String OS_ARCH = System.getProperty("os.arch"); - private static final String UnknownPlatformString = - String.format("Unknown Platform. OS: %s, Architecture: %s", OS_NAME, OS_ARCH); - - @SuppressWarnings("StatementWithEmptyBody") - private static boolean checkForRoot() { - if (isLinux()) { - try { - shell.executeBashCommand("id -u"); - } catch (IOException e) { - e.printStackTrace(); - } - - while (!shell.isOutputCompleted()) { - // TODO: add timeout - } - - if (shell.getExitCode() == 0) { - return shell.getOutput().split("\n")[0].equals("0"); - } - - } else { - return true; + + private static final ShellExec shell = new ShellExec(true, false); + public final String description; + public final boolean isPi; + public final OSType osType; + public final boolean isSupported; + + // Set once at init, shouldn't be needed after. + private static final Platform currentPlatform = getCurrentPlatform(); + private static final boolean isRoot = checkForRoot(); + + Platform(String description, boolean isPi, OSType osType, boolean isSupported) { + this.description = description; + this.isPi = isPi; + this.osType = osType; + this.isSupported = isSupported; } - return false; - } - - private static Platform getCurrentPlatform() { - if (RuntimeDetector.isWindows()) { - if (RuntimeDetector.is32BitIntel()) { - return WINDOWS_32; - } else if (RuntimeDetector.is64BitIntel()) { - return WINDOWS_64; - } else { - // please don't try this - return UNKNOWN; - } + + ////////////////////////////////////////////////////// + // Public API + + // Checks specifically if unix shell and API are supported + public static boolean isLinux() { + return currentPlatform.osType == OSType.LINUX; } - if (RuntimeDetector.isMac()) { - // TODO - once we have real support, this might have to be more granular - return MACOS; + public static boolean isRaspberryPi() { + return currentPlatform.isPi; } - if (RuntimeDetector.isLinux()) { - if (isPiSBC()) { - if (RuntimeDetector.isArm32()) { - return LINUX_RASPBIAN32; - } else if (RuntimeDetector.isArm64()) { - return LINUX_RASPBIAN64; + public static String getPlatformName() { + if (currentPlatform.equals(UNKNOWN)) { + return UnknownPlatformString; } else { - // Unknown/exotic installation - return UNKNOWN; + return currentPlatform.description; } - } else if (isJetsonSBC()) { - if (RuntimeDetector.isArm64()) { - // TODO - do we need to check OS version? - return LINUX_AARCH64; + } + + public static boolean isRoot() { + return isRoot; + } + + ////////////////////////////////////////////////////// + + // Debug info related to unknown platforms for debug help + private static final String OS_NAME = System.getProperty("os.name"); + private static final String OS_ARCH = System.getProperty("os.arch"); + private static final String UnknownPlatformString = + String.format("Unknown Platform. OS: %s, Architecture: %s", OS_NAME, OS_ARCH); + + @SuppressWarnings("StatementWithEmptyBody") + private static boolean checkForRoot() { + if (isLinux()) { + try { + shell.executeBashCommand("id -u"); + } catch (IOException e) { + e.printStackTrace(); + } + + while (!shell.isOutputCompleted()) { + // TODO: add timeout + } + + if (shell.getExitCode() == 0) { + return shell.getOutput().split("\n")[0].equals("0"); + } + } else { - // Unknown/exotic installation - return UNKNOWN; + return true; + } + return false; + } + + private static Platform getCurrentPlatform() { + if (RuntimeDetector.isWindows()) { + if (RuntimeDetector.is32BitIntel()) { + return WINDOWS_32; + } else if (RuntimeDetector.is64BitIntel()) { + return WINDOWS_64; + } else { + // please don't try this + return UNKNOWN; + } } - } else if (RuntimeDetector.is64BitIntel()) { - return LINUX_64; - } else if (RuntimeDetector.is32BitIntel()) { - return LINUX_32; - } else if (RuntimeDetector.isArm64()) { - // TODO - os detection needed? - return LINUX_AARCH64; - } else { - // Unknown or otherwise unsupported platform + + if (RuntimeDetector.isMac()) { + // TODO - once we have real support, this might have to be more granular + return MACOS; + } + + if (RuntimeDetector.isLinux()) { + if (isPiSBC()) { + if (RuntimeDetector.isArm32()) { + return LINUX_RASPBIAN32; + } else if (RuntimeDetector.isArm64()) { + return LINUX_RASPBIAN64; + } else { + // Unknown/exotic installation + return UNKNOWN; + } + } else if (isJetsonSBC()) { + if (RuntimeDetector.isArm64()) { + // TODO - do we need to check OS version? + return LINUX_AARCH64; + } else { + // Unknown/exotic installation + return UNKNOWN; + } + } else if (RuntimeDetector.is64BitIntel()) { + return LINUX_64; + } else if (RuntimeDetector.is32BitIntel()) { + return LINUX_32; + } else if (RuntimeDetector.isArm64()) { + // TODO - os detection needed? + return LINUX_AARCH64; + } else { + // Unknown or otherwise unsupported platform + return Platform.UNKNOWN; + } + } + + // If we fall through all the way to here, return Platform.UNKNOWN; - } } - // If we fall through all the way to here, - return Platform.UNKNOWN; - } - - // Check for various known SBC types - private static boolean isPiSBC() { - return fileHasText("/proc/cpuinfo", "Raspberry Pi"); - } - - private static boolean isJetsonSBC() { - // https://forums.developer.nvidia.com/t/how-to-recognize-jetson-nano-device/146624 - return fileHasText("/proc/device-tree/model", "NVIDIA Jetson"); - } - - // Checks for various names of linux OS - private static boolean isStretch() { - // TODO - this is a total guess - return fileHasText("/etc/os-release", "Stretch"); - } - - private static boolean isBuster() { - // TODO - this is a total guess - return fileHasText("/etc/os-release", "Buster"); - } - - private static boolean fileHasText(String filename, String text) { - try (BufferedReader reader = Files.newBufferedReader(Paths.get(filename))) { - while (true) { - String value = reader.readLine(); - if (value == null) { - return false; - - } else if (value.contains(text)) { - return true; - } // else, next line - } - } catch (IOException ex) { - return false; + // Check for various known SBC types + private static boolean isPiSBC() { + return fileHasText("/proc/cpuinfo", "Raspberry Pi"); + } + + private static boolean isJetsonSBC() { + // https://forums.developer.nvidia.com/t/how-to-recognize-jetson-nano-device/146624 + return fileHasText("/proc/device-tree/model", "NVIDIA Jetson"); + } + + // Checks for various names of linux OS + private static boolean isStretch() { + // TODO - this is a total guess + return fileHasText("/etc/os-release", "Stretch"); + } + + private static boolean isBuster() { + // TODO - this is a total guess + return fileHasText("/etc/os-release", "Buster"); + } + + private static boolean fileHasText(String filename, String text) { + try (BufferedReader reader = Files.newBufferedReader(Paths.get(filename))) { + while (true) { + String value = reader.readLine(); + if (value == null) { + return false; + + } else if (value.contains(text)) { + return true; + } // else, next line + } + } catch (IOException ex) { + return false; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/StatusLED.java b/photon-core/src/main/java/org/photonvision/common/hardware/StatusLED.java index 01a181e0a7..0c10b3548e 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/StatusLED.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/StatusLED.java @@ -23,26 +23,26 @@ import org.photonvision.common.hardware.GPIO.pi.PigpioPin; public class StatusLED { - public final GPIOBase redLED; - public final GPIOBase greenLED; - public final GPIOBase blueLED; + public final GPIOBase redLED; + public final GPIOBase greenLED; + public final GPIOBase blueLED; - public StatusLED(List statusLedPins) { - // fill unassigned pins with -1 to disable - if (statusLedPins.size() != 3) { - for (int i = 0; i < 3 - statusLedPins.size(); i++) { - statusLedPins.add(-1); - } - } + public StatusLED(List statusLedPins) { + // fill unassigned pins with -1 to disable + if (statusLedPins.size() != 3) { + for (int i = 0; i < 3 - statusLedPins.size(); i++) { + statusLedPins.add(-1); + } + } - if (Platform.isRaspberryPi()) { - redLED = new PigpioPin(statusLedPins.get(0)); - greenLED = new PigpioPin(statusLedPins.get(1)); - blueLED = new PigpioPin(statusLedPins.get(2)); - } else { - redLED = new CustomGPIO(statusLedPins.get(0)); - greenLED = new CustomGPIO(statusLedPins.get(1)); - blueLED = new CustomGPIO(statusLedPins.get(2)); + if (Platform.isRaspberryPi()) { + redLED = new PigpioPin(statusLedPins.get(0)); + greenLED = new PigpioPin(statusLedPins.get(1)); + blueLED = new PigpioPin(statusLedPins.get(2)); + } else { + redLED = new CustomGPIO(statusLedPins.get(0)); + greenLED = new CustomGPIO(statusLedPins.get(1)); + blueLED = new CustomGPIO(statusLedPins.get(2)); + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/VisionLED.java b/photon-core/src/main/java/org/photonvision/common/hardware/VisionLED.java index 6be153391f..7d1d5273b3 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/VisionLED.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/VisionLED.java @@ -33,171 +33,171 @@ import org.photonvision.common.util.math.MathUtils; public class VisionLED { - private static final Logger logger = new Logger(VisionLED.class, LogGroup.VisionModule); - - private final int[] ledPins; - private final List visionLEDs = new ArrayList<>(); - private final int brightnessMin; - private final int brightnessMax; - private final PigpioSocket pigpioSocket; - - private VisionLEDMode currentLedMode = VisionLEDMode.kDefault; - private BooleanSupplier pipelineModeSupplier; - - private int mappedBrightnessPercentage; - - private final Consumer modeConsumer; - - public VisionLED( - List ledPins, - int brightnessMin, - int brightnessMax, - PigpioSocket pigpioSocket, - Consumer visionLEDmode) { - this.brightnessMin = brightnessMin; - this.brightnessMax = brightnessMax; - this.pigpioSocket = pigpioSocket; - this.modeConsumer = visionLEDmode; - this.ledPins = ledPins.stream().mapToInt(i -> i).toArray(); - ledPins.forEach( - pin -> { - if (Platform.isRaspberryPi()) { - visionLEDs.add(new PigpioPin(pin)); - } else { - visionLEDs.add(new CustomGPIO(pin)); - } - }); - pipelineModeSupplier = () -> false; - } - - public void setPipelineModeSupplier(BooleanSupplier pipelineModeSupplier) { - this.pipelineModeSupplier = pipelineModeSupplier; - } - - public void setBrightness(int percentage) { - mappedBrightnessPercentage = MathUtils.map(percentage, 0, 100, brightnessMin, brightnessMax); - setInternal(currentLedMode, false); - } - - public void blink(int pulseLengthMillis, int blinkCount) { - blinkImpl(pulseLengthMillis, blinkCount); - int blinkDuration = pulseLengthMillis * blinkCount * 2; - TimedTaskManager.getInstance() - .addOneShotTask(() -> setInternal(this.currentLedMode, false), blinkDuration + 150); - } - - private void blinkImpl(int pulseLengthMillis, int blinkCount) { - if (Platform.isRaspberryPi()) { - try { - setStateImpl(false); // hack to ensure hardware PWM has stopped before trying to blink - pigpioSocket.generateAndSendWaveform(pulseLengthMillis, blinkCount, ledPins); - } catch (PigpioException e) { - logger.error("Failed to blink!", e); - } catch (NullPointerException e) { - logger.error("Failed to blink, pigpio internal issue!", e); - } - } else { - for (GPIOBase led : visionLEDs) { - led.blink(pulseLengthMillis, blinkCount); - } + private static final Logger logger = new Logger(VisionLED.class, LogGroup.VisionModule); + + private final int[] ledPins; + private final List visionLEDs = new ArrayList<>(); + private final int brightnessMin; + private final int brightnessMax; + private final PigpioSocket pigpioSocket; + + private VisionLEDMode currentLedMode = VisionLEDMode.kDefault; + private BooleanSupplier pipelineModeSupplier; + + private int mappedBrightnessPercentage; + + private final Consumer modeConsumer; + + public VisionLED( + List ledPins, + int brightnessMin, + int brightnessMax, + PigpioSocket pigpioSocket, + Consumer visionLEDmode) { + this.brightnessMin = brightnessMin; + this.brightnessMax = brightnessMax; + this.pigpioSocket = pigpioSocket; + this.modeConsumer = visionLEDmode; + this.ledPins = ledPins.stream().mapToInt(i -> i).toArray(); + ledPins.forEach( + pin -> { + if (Platform.isRaspberryPi()) { + visionLEDs.add(new PigpioPin(pin)); + } else { + visionLEDs.add(new CustomGPIO(pin)); + } + }); + pipelineModeSupplier = () -> false; } - } - - private void setStateImpl(boolean state) { - if (Platform.isRaspberryPi()) { - try { - // stop any active blink - pigpioSocket.waveTxStop(); - } catch (PigpioException e) { - logger.error("Failed to stop blink!", e); - } catch (NullPointerException e) { - logger.error("Failed to blink, pigpio internal issue!", e); - } + + public void setPipelineModeSupplier(BooleanSupplier pipelineModeSupplier) { + this.pipelineModeSupplier = pipelineModeSupplier; + } + + public void setBrightness(int percentage) { + mappedBrightnessPercentage = MathUtils.map(percentage, 0, 100, brightnessMin, brightnessMax); + setInternal(currentLedMode, false); + } + + public void blink(int pulseLengthMillis, int blinkCount) { + blinkImpl(pulseLengthMillis, blinkCount); + int blinkDuration = pulseLengthMillis * blinkCount * 2; + TimedTaskManager.getInstance() + .addOneShotTask(() -> setInternal(this.currentLedMode, false), blinkDuration + 150); + } + + private void blinkImpl(int pulseLengthMillis, int blinkCount) { + if (Platform.isRaspberryPi()) { + try { + setStateImpl(false); // hack to ensure hardware PWM has stopped before trying to blink + pigpioSocket.generateAndSendWaveform(pulseLengthMillis, blinkCount, ledPins); + } catch (PigpioException e) { + logger.error("Failed to blink!", e); + } catch (NullPointerException e) { + logger.error("Failed to blink, pigpio internal issue!", e); + } + } else { + for (GPIOBase led : visionLEDs) { + led.blink(pulseLengthMillis, blinkCount); + } + } } - try { - // if the user has set an LED brightness other than 100%, use that instead - if (mappedBrightnessPercentage == 100 || !state) { - visionLEDs.forEach((led) -> led.setState(state)); - } else { - visionLEDs.forEach((led) -> led.setBrightness(mappedBrightnessPercentage)); - } - } catch (NullPointerException e) { - logger.error("Failed to blink, pigpio internal issue!", e); + + private void setStateImpl(boolean state) { + if (Platform.isRaspberryPi()) { + try { + // stop any active blink + pigpioSocket.waveTxStop(); + } catch (PigpioException e) { + logger.error("Failed to stop blink!", e); + } catch (NullPointerException e) { + logger.error("Failed to blink, pigpio internal issue!", e); + } + } + try { + // if the user has set an LED brightness other than 100%, use that instead + if (mappedBrightnessPercentage == 100 || !state) { + visionLEDs.forEach((led) -> led.setState(state)); + } else { + visionLEDs.forEach((led) -> led.setBrightness(mappedBrightnessPercentage)); + } + } catch (NullPointerException e) { + logger.error("Failed to blink, pigpio internal issue!", e); + } + } + + public void setState(boolean on) { + setInternal(on ? VisionLEDMode.kOn : VisionLEDMode.kOff, false); } - } - - public void setState(boolean on) { - setInternal(on ? VisionLEDMode.kOn : VisionLEDMode.kOff, false); - } - - void onLedModeChange(NetworkTableEvent entryNotification) { - var newLedModeRaw = (int) entryNotification.valueData.value.getInteger(); - logger.debug("Got LED mode " + newLedModeRaw); - if (newLedModeRaw != currentLedMode.value) { - VisionLEDMode newLedMode; - switch (newLedModeRaw) { - case -1: - newLedMode = VisionLEDMode.kDefault; - break; - case 0: - newLedMode = VisionLEDMode.kOff; - break; - case 1: - newLedMode = VisionLEDMode.kOn; - break; - case 2: - newLedMode = VisionLEDMode.kBlink; - break; - default: - logger.warn("User supplied invalid LED mode, falling back to Default"); - newLedMode = VisionLEDMode.kDefault; - break; - } - setInternal(newLedMode, true); - - if (modeConsumer != null) modeConsumer.accept(newLedMode.value); + + void onLedModeChange(NetworkTableEvent entryNotification) { + var newLedModeRaw = (int) entryNotification.valueData.value.getInteger(); + logger.debug("Got LED mode " + newLedModeRaw); + if (newLedModeRaw != currentLedMode.value) { + VisionLEDMode newLedMode; + switch (newLedModeRaw) { + case -1: + newLedMode = VisionLEDMode.kDefault; + break; + case 0: + newLedMode = VisionLEDMode.kOff; + break; + case 1: + newLedMode = VisionLEDMode.kOn; + break; + case 2: + newLedMode = VisionLEDMode.kBlink; + break; + default: + logger.warn("User supplied invalid LED mode, falling back to Default"); + newLedMode = VisionLEDMode.kDefault; + break; + } + setInternal(newLedMode, true); + + if (modeConsumer != null) modeConsumer.accept(newLedMode.value); + } } - } - - private void setInternal(VisionLEDMode newLedMode, boolean fromNT) { - var lastLedMode = currentLedMode; - - if (fromNT) { - switch (newLedMode) { - case kDefault: - setStateImpl(pipelineModeSupplier.getAsBoolean()); - break; - case kOff: - setStateImpl(false); - break; - case kOn: - setStateImpl(true); - break; - case kBlink: - blinkImpl(85, -1); - break; - } - currentLedMode = newLedMode; - logger.info( - "Changing LED mode from \"" + lastLedMode.toString() + "\" to \"" + newLedMode + "\""); - } else { - if (currentLedMode == VisionLEDMode.kDefault) { - switch (newLedMode) { - case kDefault: - setStateImpl(pipelineModeSupplier.getAsBoolean()); - break; - case kOff: - setStateImpl(false); - break; - case kOn: - setStateImpl(true); - break; - case kBlink: - blinkImpl(85, -1); - break; + + private void setInternal(VisionLEDMode newLedMode, boolean fromNT) { + var lastLedMode = currentLedMode; + + if (fromNT) { + switch (newLedMode) { + case kDefault: + setStateImpl(pipelineModeSupplier.getAsBoolean()); + break; + case kOff: + setStateImpl(false); + break; + case kOn: + setStateImpl(true); + break; + case kBlink: + blinkImpl(85, -1); + break; + } + currentLedMode = newLedMode; + logger.info( + "Changing LED mode from \"" + lastLedMode.toString() + "\" to \"" + newLedMode + "\""); + } else { + if (currentLedMode == VisionLEDMode.kDefault) { + switch (newLedMode) { + case kDefault: + setStateImpl(pipelineModeSupplier.getAsBoolean()); + break; + case kOff: + setStateImpl(false); + break; + case kOn: + setStateImpl(true); + break; + case kBlink: + blinkImpl(85, -1); + break; + } + } + logger.info("Changing LED internal state to " + newLedMode.toString()); } - } - logger.info("Changing LED internal state to " + newLedMode.toString()); } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/MetricsManager.java b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/MetricsManager.java index b68bd06623..d38b527187 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/MetricsManager.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/MetricsManager.java @@ -33,129 +33,129 @@ import org.photonvision.common.util.ShellExec; public class MetricsManager { - final Logger logger = new Logger(MetricsManager.class, LogGroup.General); + final Logger logger = new Logger(MetricsManager.class, LogGroup.General); - CmdBase cmds; + CmdBase cmds; - private final ShellExec runCommand = new ShellExec(true, true); + private final ShellExec runCommand = new ShellExec(true, true); - public void setConfig(HardwareConfig config) { - if (config.hasCommandsConfigured()) { - cmds = new FileCmds(); - } else if (Platform.isRaspberryPi()) { - cmds = new PiCmds(); // Pi's can use a hardcoded command set - } else if (Platform.isLinux()) { - cmds = new LinuxCmds(); // Linux/Unix platforms assume a nominal command set - } else { - cmds = new CmdBase(); // default - base has no commands + public void setConfig(HardwareConfig config) { + if (config.hasCommandsConfigured()) { + cmds = new FileCmds(); + } else if (Platform.isRaspberryPi()) { + cmds = new PiCmds(); // Pi's can use a hardcoded command set + } else if (Platform.isLinux()) { + cmds = new LinuxCmds(); // Linux/Unix platforms assume a nominal command set + } else { + cmds = new CmdBase(); // default - base has no commands + } + + cmds.initCmds(config); + } + + public String safeExecute(String str) { + if (str.isEmpty()) return ""; + try { + return execute(str); + } catch (Exception e) { + return "****"; + } } - cmds.initCmds(config); - } + private String cpuMemSave = null; - public String safeExecute(String str) { - if (str.isEmpty()) return ""; - try { - return execute(str); - } catch (Exception e) { - return "****"; + public String getMemory() { + if (cmds.cpuMemoryCommand.isEmpty()) return ""; + if (cpuMemSave == null) { + // save the value and only run it once + cpuMemSave = execute(cmds.cpuMemoryCommand); + } + return cpuMemSave; } - } - private String cpuMemSave = null; + public String getTemp() { + return safeExecute(cmds.cpuTemperatureCommand); + } - public String getMemory() { - if (cmds.cpuMemoryCommand.isEmpty()) return ""; - if (cpuMemSave == null) { - // save the value and only run it once - cpuMemSave = execute(cmds.cpuMemoryCommand); + public String getUtilization() { + return safeExecute(cmds.cpuUtilizationCommand); } - return cpuMemSave; - } - public String getTemp() { - return safeExecute(cmds.cpuTemperatureCommand); - } + public String getUptime() { + return safeExecute(cmds.cpuUptimeCommand); + } - public String getUtilization() { - return safeExecute(cmds.cpuUtilizationCommand); - } + public String getThrottleReason() { + return safeExecute(cmds.cpuThrottleReasonCmd); + } - public String getUptime() { - return safeExecute(cmds.cpuUptimeCommand); - } + private String gpuMemSave = null; - public String getThrottleReason() { - return safeExecute(cmds.cpuThrottleReasonCmd); - } + public String getGPUMemorySplit() { + if (gpuMemSave == null) { + // only needs to run once + gpuMemSave = safeExecute(cmds.gpuMemoryCommand); + } + return gpuMemSave; + } - private String gpuMemSave = null; + public String getMallocedMemory() { + return safeExecute(cmds.gpuMemUsageCommand); + } + + public String getUsedDiskPct() { + return safeExecute(cmds.diskUsageCommand); + } - public String getGPUMemorySplit() { - if (gpuMemSave == null) { - // only needs to run once - gpuMemSave = safeExecute(cmds.gpuMemoryCommand); + // TODO: Output in MBs for consistency + public String getUsedRam() { + return safeExecute(cmds.ramUsageCommand); } - return gpuMemSave; - } - - public String getMallocedMemory() { - return safeExecute(cmds.gpuMemUsageCommand); - } - - public String getUsedDiskPct() { - return safeExecute(cmds.diskUsageCommand); - } - - // TODO: Output in MBs for consistency - public String getUsedRam() { - return safeExecute(cmds.ramUsageCommand); - } - - public void publishMetrics() { - logger.debug("Publishing Metrics..."); - final var metrics = new HashMap(); - - metrics.put("cpuTemp", this.getTemp()); - metrics.put("cpuUtil", this.getUtilization()); - metrics.put("cpuMem", this.getMemory()); - metrics.put("cpuThr", this.getThrottleReason()); - metrics.put("cpuUptime", this.getUptime()); - metrics.put("gpuMem", this.getGPUMemorySplit()); - metrics.put("ramUtil", this.getUsedRam()); - metrics.put("gpuMemUtil", this.getMallocedMemory()); - metrics.put("diskUtilPct", this.getUsedDiskPct()); - - DataChangeService.getInstance().publishEvent(OutgoingUIEvent.wrappedOf("metrics", metrics)); - } - - public synchronized String execute(String command) { - try { - runCommand.executeBashCommand(command); - return runCommand.getOutput(); - } catch (Exception e) { - StringWriter sw = new StringWriter(); - PrintWriter pw = new PrintWriter(sw); - e.printStackTrace(pw); - - logger.error( - "Command: \"" - + command - + "\" returned an error!" - + "\nOutput Received: " - + runCommand.getOutput() - + "\nStandard Error: " - + runCommand.getError() - + "\nCommand completed: " - + runCommand.isOutputCompleted() - + "\nError completed: " - + runCommand.isErrorCompleted() - + "\nExit code: " - + runCommand.getExitCode() - + "\n Exception: " - + e - + sw); - return ""; + + public void publishMetrics() { + logger.debug("Publishing Metrics..."); + final var metrics = new HashMap(); + + metrics.put("cpuTemp", this.getTemp()); + metrics.put("cpuUtil", this.getUtilization()); + metrics.put("cpuMem", this.getMemory()); + metrics.put("cpuThr", this.getThrottleReason()); + metrics.put("cpuUptime", this.getUptime()); + metrics.put("gpuMem", this.getGPUMemorySplit()); + metrics.put("ramUtil", this.getUsedRam()); + metrics.put("gpuMemUtil", this.getMallocedMemory()); + metrics.put("diskUtilPct", this.getUsedDiskPct()); + + DataChangeService.getInstance().publishEvent(OutgoingUIEvent.wrappedOf("metrics", metrics)); + } + + public synchronized String execute(String command) { + try { + runCommand.executeBashCommand(command); + return runCommand.getOutput(); + } catch (Exception e) { + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + e.printStackTrace(pw); + + logger.error( + "Command: \"" + + command + + "\" returned an error!" + + "\nOutput Received: " + + runCommand.getOutput() + + "\nStandard Error: " + + runCommand.getError() + + "\nCommand completed: " + + runCommand.isOutputCompleted() + + "\nError completed: " + + runCommand.isErrorCompleted() + + "\nExit code: " + + runCommand.getExitCode() + + "\n Exception: " + + e + + sw); + return ""; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/CmdBase.java b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/CmdBase.java index d22086be6b..69473ad9e1 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/CmdBase.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/CmdBase.java @@ -20,21 +20,21 @@ import org.photonvision.common.configuration.HardwareConfig; public class CmdBase { - // CPU - public String cpuMemoryCommand = ""; - public String cpuTemperatureCommand = ""; - public String cpuUtilizationCommand = ""; - public String cpuThrottleReasonCmd = ""; - public String cpuUptimeCommand = ""; - // GPU - public String gpuMemoryCommand = ""; - public String gpuMemUsageCommand = ""; - // RAM - public String ramUsageCommand = ""; - // Disk - public String diskUsageCommand = ""; + // CPU + public String cpuMemoryCommand = ""; + public String cpuTemperatureCommand = ""; + public String cpuUtilizationCommand = ""; + public String cpuThrottleReasonCmd = ""; + public String cpuUptimeCommand = ""; + // GPU + public String gpuMemoryCommand = ""; + public String gpuMemUsageCommand = ""; + // RAM + public String ramUsageCommand = ""; + // Disk + public String diskUsageCommand = ""; - public void initCmds(HardwareConfig config) { - // default - do nothing - } + public void initCmds(HardwareConfig config) { + // default - do nothing + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/FileCmds.java b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/FileCmds.java index f7942560d4..7a27469037 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/FileCmds.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/FileCmds.java @@ -20,19 +20,19 @@ import org.photonvision.common.configuration.HardwareConfig; public class FileCmds extends CmdBase { - @Override - public void initCmds(HardwareConfig config) { - cpuMemoryCommand = config.cpuMemoryCommand; - cpuTemperatureCommand = config.cpuTempCommand; - cpuUtilizationCommand = config.cpuUtilCommand; - cpuThrottleReasonCmd = config.cpuThrottleReasonCmd; - cpuUptimeCommand = config.cpuUptimeCommand; + @Override + public void initCmds(HardwareConfig config) { + cpuMemoryCommand = config.cpuMemoryCommand; + cpuTemperatureCommand = config.cpuTempCommand; + cpuUtilizationCommand = config.cpuUtilCommand; + cpuThrottleReasonCmd = config.cpuThrottleReasonCmd; + cpuUptimeCommand = config.cpuUptimeCommand; - gpuMemoryCommand = config.gpuMemoryCommand; - gpuMemUsageCommand = config.gpuMemUsageCommand; + gpuMemoryCommand = config.gpuMemoryCommand; + gpuMemUsageCommand = config.gpuMemUsageCommand; - diskUsageCommand = config.diskUsageCommand; + diskUsageCommand = config.diskUsageCommand; - ramUsageCommand = config.ramUtilCommand; - } + ramUsageCommand = config.ramUtilCommand; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/LinuxCmds.java b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/LinuxCmds.java index 2acaee228e..1a7d18f35e 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/LinuxCmds.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/LinuxCmds.java @@ -20,21 +20,21 @@ import org.photonvision.common.configuration.HardwareConfig; public class LinuxCmds extends CmdBase { - public void initCmds(HardwareConfig config) { - // CPU - cpuMemoryCommand = "awk '/MemTotal:/ {print int($2 / 1000);}' /proc/meminfo"; + public void initCmds(HardwareConfig config) { + // CPU + cpuMemoryCommand = "awk '/MemTotal:/ {print int($2 / 1000);}' /proc/meminfo"; - // TODO: boards have lots of thermal devices. Hard to pick the CPU + // TODO: boards have lots of thermal devices. Hard to pick the CPU - cpuUtilizationCommand = - "top -bn1 | grep \"Cpu(s)\" | sed \"s/.*, *\\([0-9.]*\\)%* id.*/\\1/\" | awk '{print 100 - $1}'"; + cpuUtilizationCommand = + "top -bn1 | grep \"Cpu(s)\" | sed \"s/.*, *\\([0-9.]*\\)%* id.*/\\1/\" | awk '{print 100 - $1}'"; - cpuUptimeCommand = "uptime -p | cut -c 4-"; + cpuUptimeCommand = "uptime -p | cut -c 4-"; - // RAM - ramUsageCommand = "awk '/MemAvailable:/ {print int($2 / 1000);}' /proc/meminfo"; + // RAM + ramUsageCommand = "awk '/MemAvailable:/ {print int($2 / 1000);}' /proc/meminfo"; - // Disk - diskUsageCommand = "df ./ --output=pcent | tail -n +2"; - } + // Disk + diskUsageCommand = "df ./ --output=pcent | tail -n +2"; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/PiCmds.java b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/PiCmds.java index 6c11fc0489..6eb005acc5 100644 --- a/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/PiCmds.java +++ b/photon-core/src/main/java/org/photonvision/common/hardware/metrics/cmds/PiCmds.java @@ -20,22 +20,22 @@ import org.photonvision.common.configuration.HardwareConfig; public class PiCmds extends LinuxCmds { - /** Applies pi-specific commands, ignoring any input configuration */ - public void initCmds(HardwareConfig config) { - super.initCmds(config); + /** Applies pi-specific commands, ignoring any input configuration */ + public void initCmds(HardwareConfig config) { + super.initCmds(config); - // CPU - cpuMemoryCommand = "vcgencmd get_mem arm | grep -Eo '[0-9]+'"; - cpuTemperatureCommand = "sed 's/.\\{3\\}$/.&/' /sys/class/thermal/thermal_zone0/temp"; - cpuThrottleReasonCmd = - "if (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x01 )) != 0x00 )); then echo \"LOW VOLTAGE\"; " - + " elif (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x08 )) != 0x00 )); then echo \"HIGH TEMP\"; " - + " elif (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x10000 )) != 0x00 )); then echo \"Prev. Low Voltage\"; " - + " elif (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x80000 )) != 0x00 )); then echo \"Prev. High Temp\"; " - + " else echo \"None\"; fi"; + // CPU + cpuMemoryCommand = "vcgencmd get_mem arm | grep -Eo '[0-9]+'"; + cpuTemperatureCommand = "sed 's/.\\{3\\}$/.&/' /sys/class/thermal/thermal_zone0/temp"; + cpuThrottleReasonCmd = + "if (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x01 )) != 0x00 )); then echo \"LOW VOLTAGE\"; " + + " elif (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x08 )) != 0x00 )); then echo \"HIGH TEMP\"; " + + " elif (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x10000 )) != 0x00 )); then echo \"Prev. Low Voltage\"; " + + " elif (( $(( $(vcgencmd get_throttled | grep -Eo 0x[0-9a-fA-F]*) & 0x80000 )) != 0x00 )); then echo \"Prev. High Temp\"; " + + " else echo \"None\"; fi"; - // GPU - gpuMemoryCommand = "vcgencmd get_mem gpu | grep -Eo '[0-9]+'"; - gpuMemUsageCommand = "vcgencmd get_mem malloc | grep -Eo '[0-9]+'"; - } + // GPU + gpuMemoryCommand = "vcgencmd get_mem gpu | grep -Eo '[0-9]+'"; + gpuMemUsageCommand = "vcgencmd get_mem malloc | grep -Eo '[0-9]+'"; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/logging/LogGroup.java b/photon-core/src/main/java/org/photonvision/common/logging/LogGroup.java index d968e9f6f1..50524db63f 100644 --- a/photon-core/src/main/java/org/photonvision/common/logging/LogGroup.java +++ b/photon-core/src/main/java/org/photonvision/common/logging/LogGroup.java @@ -18,10 +18,10 @@ package org.photonvision.common.logging; public enum LogGroup { - Camera, - WebServer, - VisionModule, - Data, - General, - Config + Camera, + WebServer, + VisionModule, + Data, + General, + Config } diff --git a/photon-core/src/main/java/org/photonvision/common/logging/LogLevel.java b/photon-core/src/main/java/org/photonvision/common/logging/LogLevel.java index 890a803a37..5d96d3aeb4 100644 --- a/photon-core/src/main/java/org/photonvision/common/logging/LogLevel.java +++ b/photon-core/src/main/java/org/photonvision/common/logging/LogLevel.java @@ -18,17 +18,17 @@ package org.photonvision.common.logging; public enum LogLevel { - ERROR(0, Logger.ANSI_RED), - WARN(1, Logger.ANSI_YELLOW), - INFO(2, Logger.ANSI_GREEN), - DEBUG(3, Logger.ANSI_WHITE), - TRACE(4, Logger.ANSI_CYAN); + ERROR(0, Logger.ANSI_RED), + WARN(1, Logger.ANSI_YELLOW), + INFO(2, Logger.ANSI_GREEN), + DEBUG(3, Logger.ANSI_WHITE), + TRACE(4, Logger.ANSI_CYAN); - public final String colorCode; - public final int code; + public final String colorCode; + public final int code; - LogLevel(int code, String colorCode) { - this.code = code; - this.colorCode = colorCode; - } + LogLevel(int code, String colorCode) { + this.code = code; + this.colorCode = colorCode; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/logging/Logger.java b/photon-core/src/main/java/org/photonvision/common/logging/Logger.java index d8338d4ec9..b06fea13fa 100644 --- a/photon-core/src/main/java/org/photonvision/common/logging/Logger.java +++ b/photon-core/src/main/java/org/photonvision/common/logging/Logger.java @@ -31,316 +31,316 @@ @SuppressWarnings("unused") public class Logger { - public static final String ANSI_RESET = "\u001B[0m"; - public static final String ANSI_BLACK = "\u001B[30m"; - public static final String ANSI_RED = "\u001B[31m"; - public static final String ANSI_GREEN = "\u001B[32m"; - public static final String ANSI_YELLOW = "\u001B[33m"; - public static final String ANSI_BLUE = "\u001B[34m"; - public static final String ANSI_PURPLE = "\u001B[35m"; - public static final String ANSI_CYAN = "\u001B[36m"; - public static final String ANSI_WHITE = "\u001B[37m"; - - public static final int MAX_LOGS_TO_KEEP = 100; - - private static final SimpleDateFormat simpleDateFormat = - new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - - private static final List> uiBacklog = new ArrayList<>(); - private static boolean connected = false; - - private static final UILogAppender uiLogAppender = new UILogAppender(); - - private final String className; - private final LogGroup group; - - public Logger(Class clazz, LogGroup group) { - this.className = clazz.getSimpleName(); - this.group = group; - } - - public Logger(Class clazz, String suffix, LogGroup group) { - this.className = clazz.getSimpleName() + " - " + suffix; - this.group = group; - } - - public static String getDate() { - return simpleDateFormat.format(new Date()); - } - - public static String format( - String logMessage, LogLevel level, LogGroup group, String clazz, boolean color) { - var date = getDate(); - var builder = new StringBuilder(); - if (color) builder.append(level.colorCode); - builder - .append("[") - .append(date) - .append("] [") - .append(group) - .append(" - ") - .append(clazz) - .append("] [") - .append(level.name()) - .append("] ") - .append(logMessage); - if (color) builder.append(ANSI_RESET); - return builder.toString(); - } - - private static final HashMap levelMap = new HashMap<>(); - private static final List currentAppenders = new ArrayList<>(); - - static { - levelMap.put(LogGroup.Camera, LogLevel.INFO); - levelMap.put(LogGroup.General, LogLevel.INFO); - levelMap.put(LogGroup.WebServer, LogLevel.INFO); - levelMap.put(LogGroup.Data, LogLevel.INFO); - levelMap.put(LogGroup.VisionModule, LogLevel.INFO); - levelMap.put(LogGroup.Config, LogLevel.INFO); - } - - static { - currentAppenders.add(new ConsoleLogAppender()); - currentAppenders.add(uiLogAppender); - addFileAppender(ConfigManager.getInstance().getLogPath()); - cleanLogs(ConfigManager.getInstance().getLogsDir()); - } - - @SuppressWarnings("ResultOfMethodCallIgnored") - public static void addFileAppender(Path logFilePath) { - var file = logFilePath.toFile(); - if (!file.exists()) { - try { - file.getParentFile().mkdirs(); - file.createNewFile(); - } catch (IOException e) { - e.printStackTrace(); - } - } - currentAppenders.add(new FileLogAppender(logFilePath)); - } - - public static void cleanLogs(Path folderToClean) { - File[] logs = folderToClean.toFile().listFiles(); - if (logs == null) return; - LinkedList logFileList = new LinkedList<>(Arrays.asList(logs)); - HashMap logFileStartDateMap = new HashMap<>(); - - // Remove any files from the list for which we can't parse a start date from their name. - // Simultaneously populate our HashMap with Date objects representing the file-name - // indicated log start time. - logFileList.removeIf( - (File arg0) -> { - try { - logFileStartDateMap.put( - arg0, ConfigManager.getInstance().logFnameToDate(arg0.getName())); - return false; - } catch (ParseException e) { - return true; - } - }); - - // Execute a sort on the log file list by date in the filename. - logFileList.sort( - (File arg0, File arg1) -> { - Date date0 = logFileStartDateMap.get(arg0); - Date date1 = logFileStartDateMap.get(arg1); - return date1.compareTo(date0); - }); - - int logCounter = 0; - for (File file : logFileList) { - // Due to filtering above, everything in logFileList should be a log file - if (logCounter < MAX_LOGS_TO_KEEP) { - // Skip over the first MAX_LOGS_TO_KEEP files - logCounter++; - } else { - // Delete this file. - file.delete(); - } - } - } - - public static void setLevel(LogGroup group, LogLevel newLevel) { - levelMap.put(group, newLevel); - } - - // TODO: profile - private static void log(String message, LogLevel level, LogGroup group, String clazz) { - for (var a : currentAppenders) { - var shouldColor = a instanceof ConsoleLogAppender; - var formattedMessage = format(message, level, group, clazz, shouldColor); - a.log(formattedMessage, level); - } - if (!connected) { - synchronized (uiBacklog) { - uiBacklog.add(Pair.of(format(message, level, group, clazz, false), level)); - } - } - } - - public static void sendConnectedBacklog() { - connected = true; - synchronized (uiBacklog) { - for (var message : uiBacklog) { - uiLogAppender.log(message.getLeft(), message.getRight()); - } - uiBacklog.clear(); - } - } - - public boolean shouldLog(LogLevel logLevel) { - return logLevel.code <= levelMap.get(group).code; - } - - private void log(String message, LogLevel level) { - if (shouldLog(level)) { - log(message, level, group, className); - } - } - - private void log(String message, LogLevel messageLevel, LogLevel conditionalLevel) { - if (shouldLog(conditionalLevel)) { - log(message, messageLevel, group, className); - } - } - - private void log(Supplier messageSupplier, LogLevel level) { - if (shouldLog(level)) { - log(messageSupplier.get(), level, group, className); - } - } - - private void log( - Supplier messageSupplier, LogLevel messageLevel, LogLevel conditionalLevel) { - if (shouldLog(conditionalLevel)) { - log(messageSupplier.get(), messageLevel, group, className); - } - } - - public void error(Supplier messageSupplier) { - log(messageSupplier, LogLevel.ERROR); - } - - public void error(String message) { - log(message, LogLevel.ERROR); - } - - /** - * Logs an error message with the stack trace of a Throwable. The stacktrace will only be printed - * if the current LogLevel is TRACE - * - * @param message - * @param t - */ - public void error(String message, Throwable t) { - log(message + ": " + t.getMessage(), LogLevel.ERROR); - log(convertStackTraceToString(t), LogLevel.ERROR, LogLevel.DEBUG); - } - - public void warn(Supplier messageSupplier) { - log(messageSupplier, LogLevel.WARN); - } - - public void warn(String message) { - log(message, LogLevel.WARN); - } - - public void info(Supplier messageSupplier) { - log(messageSupplier, LogLevel.INFO); - } - - public void info(String message) { - log(message, LogLevel.INFO); - } - - public void debug(Supplier messageSupplier) { - log(messageSupplier, LogLevel.DEBUG); - } - - public void debug(String message) { - log(message, LogLevel.DEBUG); - } - - public void trace(Supplier messageSupplier) { - log(messageSupplier, LogLevel.TRACE); - } - - public void trace(String message) { - log(message, LogLevel.TRACE); - } - - private static String convertStackTraceToString(Throwable throwable) { - try (StringWriter sw = new StringWriter(); - PrintWriter pw = new PrintWriter(sw)) { - throwable.printStackTrace(pw); - return sw.toString(); - } catch (IOException ioe) { - throw new IllegalStateException(ioe); - } - } - - private interface LogAppender { - void log(String message, LogLevel level); - } - - private static class ConsoleLogAppender implements LogAppender { - @Override - public void log(String message, LogLevel level) { - System.out.println(message); - } - } - - private static class UILogAppender implements LogAppender { - @Override - public void log(String message, LogLevel level) { - var messageMap = new HashMap(); - messageMap.put("logMessage", message); - messageMap.put("logLevel", level.code); - var superMap = new HashMap(); - superMap.put("logMessage", messageMap); - DataChangeService.getInstance().publishEvent(OutgoingUIEvent.wrappedOf("log", superMap)); - } - } - - private static class FileLogAppender implements LogAppender { - private OutputStream out; - private boolean wantsFlush; - - public FileLogAppender(Path logFilePath) { - try { - this.out = new FileOutputStream(logFilePath.toFile()); - TimedTaskManager.getInstance() - .addTask( - "FileLogAppender", - () -> { - try { - if (wantsFlush) { - out.flush(); - wantsFlush = false; + public static final String ANSI_RESET = "\u001B[0m"; + public static final String ANSI_BLACK = "\u001B[30m"; + public static final String ANSI_RED = "\u001B[31m"; + public static final String ANSI_GREEN = "\u001B[32m"; + public static final String ANSI_YELLOW = "\u001B[33m"; + public static final String ANSI_BLUE = "\u001B[34m"; + public static final String ANSI_PURPLE = "\u001B[35m"; + public static final String ANSI_CYAN = "\u001B[36m"; + public static final String ANSI_WHITE = "\u001B[37m"; + + public static final int MAX_LOGS_TO_KEEP = 100; + + private static final SimpleDateFormat simpleDateFormat = + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + + private static final List> uiBacklog = new ArrayList<>(); + private static boolean connected = false; + + private static final UILogAppender uiLogAppender = new UILogAppender(); + + private final String className; + private final LogGroup group; + + public Logger(Class clazz, LogGroup group) { + this.className = clazz.getSimpleName(); + this.group = group; + } + + public Logger(Class clazz, String suffix, LogGroup group) { + this.className = clazz.getSimpleName() + " - " + suffix; + this.group = group; + } + + public static String getDate() { + return simpleDateFormat.format(new Date()); + } + + public static String format( + String logMessage, LogLevel level, LogGroup group, String clazz, boolean color) { + var date = getDate(); + var builder = new StringBuilder(); + if (color) builder.append(level.colorCode); + builder + .append("[") + .append(date) + .append("] [") + .append(group) + .append(" - ") + .append(clazz) + .append("] [") + .append(level.name()) + .append("] ") + .append(logMessage); + if (color) builder.append(ANSI_RESET); + return builder.toString(); + } + + private static final HashMap levelMap = new HashMap<>(); + private static final List currentAppenders = new ArrayList<>(); + + static { + levelMap.put(LogGroup.Camera, LogLevel.INFO); + levelMap.put(LogGroup.General, LogLevel.INFO); + levelMap.put(LogGroup.WebServer, LogLevel.INFO); + levelMap.put(LogGroup.Data, LogLevel.INFO); + levelMap.put(LogGroup.VisionModule, LogLevel.INFO); + levelMap.put(LogGroup.Config, LogLevel.INFO); + } + + static { + currentAppenders.add(new ConsoleLogAppender()); + currentAppenders.add(uiLogAppender); + addFileAppender(ConfigManager.getInstance().getLogPath()); + cleanLogs(ConfigManager.getInstance().getLogsDir()); + } + + @SuppressWarnings("ResultOfMethodCallIgnored") + public static void addFileAppender(Path logFilePath) { + var file = logFilePath.toFile(); + if (!file.exists()) { + try { + file.getParentFile().mkdirs(); + file.createNewFile(); + } catch (IOException e) { + e.printStackTrace(); + } + } + currentAppenders.add(new FileLogAppender(logFilePath)); + } + + public static void cleanLogs(Path folderToClean) { + File[] logs = folderToClean.toFile().listFiles(); + if (logs == null) return; + LinkedList logFileList = new LinkedList<>(Arrays.asList(logs)); + HashMap logFileStartDateMap = new HashMap<>(); + + // Remove any files from the list for which we can't parse a start date from their name. + // Simultaneously populate our HashMap with Date objects representing the file-name + // indicated log start time. + logFileList.removeIf( + (File arg0) -> { + try { + logFileStartDateMap.put( + arg0, ConfigManager.getInstance().logFnameToDate(arg0.getName())); + return false; + } catch (ParseException e) { + return true; } - } catch (IOException ignored) { - } - }, - 3000L); - } catch (FileNotFoundException e) { - out = null; - System.err.println("Unable to log to file " + logFilePath); - } - } - - @Override - public void log(String message, LogLevel level) { - message += "\n"; - try { - out.write(message.getBytes()); - wantsFlush = true; - } catch (IOException e) { - e.printStackTrace(); - } catch (NullPointerException e) { - // Nothing to do - no stream available for writing - } - } - } + }); + + // Execute a sort on the log file list by date in the filename. + logFileList.sort( + (File arg0, File arg1) -> { + Date date0 = logFileStartDateMap.get(arg0); + Date date1 = logFileStartDateMap.get(arg1); + return date1.compareTo(date0); + }); + + int logCounter = 0; + for (File file : logFileList) { + // Due to filtering above, everything in logFileList should be a log file + if (logCounter < MAX_LOGS_TO_KEEP) { + // Skip over the first MAX_LOGS_TO_KEEP files + logCounter++; + } else { + // Delete this file. + file.delete(); + } + } + } + + public static void setLevel(LogGroup group, LogLevel newLevel) { + levelMap.put(group, newLevel); + } + + // TODO: profile + private static void log(String message, LogLevel level, LogGroup group, String clazz) { + for (var a : currentAppenders) { + var shouldColor = a instanceof ConsoleLogAppender; + var formattedMessage = format(message, level, group, clazz, shouldColor); + a.log(formattedMessage, level); + } + if (!connected) { + synchronized (uiBacklog) { + uiBacklog.add(Pair.of(format(message, level, group, clazz, false), level)); + } + } + } + + public static void sendConnectedBacklog() { + connected = true; + synchronized (uiBacklog) { + for (var message : uiBacklog) { + uiLogAppender.log(message.getLeft(), message.getRight()); + } + uiBacklog.clear(); + } + } + + public boolean shouldLog(LogLevel logLevel) { + return logLevel.code <= levelMap.get(group).code; + } + + private void log(String message, LogLevel level) { + if (shouldLog(level)) { + log(message, level, group, className); + } + } + + private void log(String message, LogLevel messageLevel, LogLevel conditionalLevel) { + if (shouldLog(conditionalLevel)) { + log(message, messageLevel, group, className); + } + } + + private void log(Supplier messageSupplier, LogLevel level) { + if (shouldLog(level)) { + log(messageSupplier.get(), level, group, className); + } + } + + private void log( + Supplier messageSupplier, LogLevel messageLevel, LogLevel conditionalLevel) { + if (shouldLog(conditionalLevel)) { + log(messageSupplier.get(), messageLevel, group, className); + } + } + + public void error(Supplier messageSupplier) { + log(messageSupplier, LogLevel.ERROR); + } + + public void error(String message) { + log(message, LogLevel.ERROR); + } + + /** + * Logs an error message with the stack trace of a Throwable. The stacktrace will only be printed + * if the current LogLevel is TRACE + * + * @param message + * @param t + */ + public void error(String message, Throwable t) { + log(message + ": " + t.getMessage(), LogLevel.ERROR); + log(convertStackTraceToString(t), LogLevel.ERROR, LogLevel.DEBUG); + } + + public void warn(Supplier messageSupplier) { + log(messageSupplier, LogLevel.WARN); + } + + public void warn(String message) { + log(message, LogLevel.WARN); + } + + public void info(Supplier messageSupplier) { + log(messageSupplier, LogLevel.INFO); + } + + public void info(String message) { + log(message, LogLevel.INFO); + } + + public void debug(Supplier messageSupplier) { + log(messageSupplier, LogLevel.DEBUG); + } + + public void debug(String message) { + log(message, LogLevel.DEBUG); + } + + public void trace(Supplier messageSupplier) { + log(messageSupplier, LogLevel.TRACE); + } + + public void trace(String message) { + log(message, LogLevel.TRACE); + } + + private static String convertStackTraceToString(Throwable throwable) { + try (StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw)) { + throwable.printStackTrace(pw); + return sw.toString(); + } catch (IOException ioe) { + throw new IllegalStateException(ioe); + } + } + + private interface LogAppender { + void log(String message, LogLevel level); + } + + private static class ConsoleLogAppender implements LogAppender { + @Override + public void log(String message, LogLevel level) { + System.out.println(message); + } + } + + private static class UILogAppender implements LogAppender { + @Override + public void log(String message, LogLevel level) { + var messageMap = new HashMap(); + messageMap.put("logMessage", message); + messageMap.put("logLevel", level.code); + var superMap = new HashMap(); + superMap.put("logMessage", messageMap); + DataChangeService.getInstance().publishEvent(OutgoingUIEvent.wrappedOf("log", superMap)); + } + } + + private static class FileLogAppender implements LogAppender { + private OutputStream out; + private boolean wantsFlush; + + public FileLogAppender(Path logFilePath) { + try { + this.out = new FileOutputStream(logFilePath.toFile()); + TimedTaskManager.getInstance() + .addTask( + "FileLogAppender", + () -> { + try { + if (wantsFlush) { + out.flush(); + wantsFlush = false; + } + } catch (IOException ignored) { + } + }, + 3000L); + } catch (FileNotFoundException e) { + out = null; + System.err.println("Unable to log to file " + logFilePath); + } + } + + @Override + public void log(String message, LogLevel level) { + message += "\n"; + try { + out.write(message.getBytes()); + wantsFlush = true; + } catch (IOException e) { + e.printStackTrace(); + } catch (NullPointerException e) { + // Nothing to do - no stream available for writing + } + } + } } diff --git a/photon-core/src/main/java/org/photonvision/common/networking/NetworkInterface.java b/photon-core/src/main/java/org/photonvision/common/networking/NetworkInterface.java index 8bd8ba182d..8130dccb62 100644 --- a/photon-core/src/main/java/org/photonvision/common/networking/NetworkInterface.java +++ b/photon-core/src/main/java/org/photonvision/common/networking/NetworkInterface.java @@ -23,56 +23,56 @@ @SuppressWarnings("WeakerAccess") public class NetworkInterface { - private static final Logger logger = new Logger(NetworkInterface.class, LogGroup.General); + private static final Logger logger = new Logger(NetworkInterface.class, LogGroup.General); - public final String name; - public final String displayName; - public final String ipAddress; - public final String netmask; - public final String broadcast; + public final String name; + public final String displayName; + public final String ipAddress; + public final String netmask; + public final String broadcast; - public NetworkInterface(java.net.NetworkInterface inetface, InterfaceAddress ifaceAddress) { - name = inetface.getName(); - displayName = inetface.getDisplayName(); + public NetworkInterface(java.net.NetworkInterface inetface, InterfaceAddress ifaceAddress) { + name = inetface.getName(); + displayName = inetface.getDisplayName(); - var inetAddress = ifaceAddress.getAddress(); - ipAddress = inetAddress.getHostAddress(); - netmask = getIPv4LocalNetMask(ifaceAddress); + var inetAddress = ifaceAddress.getAddress(); + ipAddress = inetAddress.getHostAddress(); + netmask = getIPv4LocalNetMask(ifaceAddress); - // TODO: (low) hack to "get" gateway, this is gross and bad, pls fix - var splitIPAddr = ipAddress.split("\\."); - splitIPAddr[3] = "1"; - splitIPAddr[3] = "255"; - broadcast = String.join(".", splitIPAddr); - } + // TODO: (low) hack to "get" gateway, this is gross and bad, pls fix + var splitIPAddr = ipAddress.split("\\."); + splitIPAddr[3] = "1"; + splitIPAddr[3] = "255"; + broadcast = String.join(".", splitIPAddr); + } - private static String getIPv4LocalNetMask(InterfaceAddress interfaceAddress) { - var netPrefix = interfaceAddress.getNetworkPrefixLength(); - try { - // Since this is for IPv4, it's 32 bits, so set the sign value of - // the int to "negative"... - int shiftby = (1 << 31); - // For the number of bits of the prefix -1 (we already set the sign bit) - for (int i = netPrefix - 1; i > 0; i--) { - // Shift the sign right... Java makes the sign bit sticky on a shift... - // So no need to "set it back up"... - shiftby = (shiftby >> 1); - } - // Transform the resulting value in xxx.xxx.xxx.xxx format, like if - /// it was a standard address... - // Return the address thus created... - return ((shiftby >> 24) & 255) - + "." - + ((shiftby >> 16) & 255) - + "." - + ((shiftby >> 8) & 255) - + "." - + (shiftby & 255); - // return InetAddress.getByName(maskString); - } catch (Exception e) { - logger.error("Failed to get netmask!", e); + private static String getIPv4LocalNetMask(InterfaceAddress interfaceAddress) { + var netPrefix = interfaceAddress.getNetworkPrefixLength(); + try { + // Since this is for IPv4, it's 32 bits, so set the sign value of + // the int to "negative"... + int shiftby = (1 << 31); + // For the number of bits of the prefix -1 (we already set the sign bit) + for (int i = netPrefix - 1; i > 0; i--) { + // Shift the sign right... Java makes the sign bit sticky on a shift... + // So no need to "set it back up"... + shiftby = (shiftby >> 1); + } + // Transform the resulting value in xxx.xxx.xxx.xxx format, like if + /// it was a standard address... + // Return the address thus created... + return ((shiftby >> 24) & 255) + + "." + + ((shiftby >> 16) & 255) + + "." + + ((shiftby >> 8) & 255) + + "." + + (shiftby & 255); + // return InetAddress.getByName(maskString); + } catch (Exception e) { + logger.error("Failed to get netmask!", e); + } + // Something went wrong here... + return null; } - // Something went wrong here... - return null; - } } diff --git a/photon-core/src/main/java/org/photonvision/common/networking/NetworkManager.java b/photon-core/src/main/java/org/photonvision/common/networking/NetworkManager.java index c09649bc91..22543c4bb3 100644 --- a/photon-core/src/main/java/org/photonvision/common/networking/NetworkManager.java +++ b/photon-core/src/main/java/org/photonvision/common/networking/NetworkManager.java @@ -25,127 +25,127 @@ import org.photonvision.common.util.ShellExec; public class NetworkManager { - private static final Logger logger = new Logger(NetworkManager.class, LogGroup.General); + private static final Logger logger = new Logger(NetworkManager.class, LogGroup.General); - private NetworkManager() {} + private NetworkManager() {} - private static class SingletonHolder { - private static final NetworkManager INSTANCE = new NetworkManager(); - } - - public static NetworkManager getInstance() { - return SingletonHolder.INSTANCE; - } - - private boolean isManaged = false; - public boolean networkingIsDisabled = false; // Passed in via CLI + private static class SingletonHolder { + private static final NetworkManager INSTANCE = new NetworkManager(); + } - public void initialize(boolean shouldManage) { - isManaged = shouldManage && !networkingIsDisabled; - if (!isManaged) { - return; + public static NetworkManager getInstance() { + return SingletonHolder.INSTANCE; } - var config = ConfigManager.getInstance().getConfig().getNetworkConfig(); - logger.info("Setting " + config.connectionType + " with team " + config.ntServerAddress); - if (Platform.isLinux()) { - if (!Platform.isRoot()) { - logger.error("Cannot manage hostname without root!"); - } - - // always set hostname - if (!config.hostname.isEmpty()) { - try { - var shell = new ShellExec(true, false); - shell.executeBashCommand("cat /etc/hostname | tr -d \" \\t\\n\\r\""); - var oldHostname = shell.getOutput().replace("\n", ""); - - var setHostnameRetCode = - shell.executeBashCommand( - "echo $NEW_HOSTNAME > /etc/hostname".replace("$NEW_HOSTNAME", config.hostname)); - setHostnameRetCode = - shell.executeBashCommand("hostnamectl set-hostname " + config.hostname); - - // Add to /etc/hosts - var addHostRetCode = - shell.executeBashCommand( - String.format( - "sed -i \"s/127.0.1.1.*%s/127.0.1.1\\t%s/g\" /etc/hosts", - oldHostname, config.hostname)); - - shell.executeBashCommand("sudo service avahi-daemon restart"); - - var success = setHostnameRetCode == 0 && addHostRetCode == 0; - if (!success) { - logger.error( - "Setting hostname returned non-zero codes (hostname/hosts) " - + setHostnameRetCode - + "|" - + addHostRetCode - + "!"); - } else { - logger.info("Set hostname to " + config.hostname); - } - } catch (Exception e) { - logger.error("Failed to set hostname!", e); - } + private boolean isManaged = false; + public boolean networkingIsDisabled = false; // Passed in via CLI - } else { - logger.warn("Got empty hostname?"); - } - - if (config.connectionType == NetworkMode.DHCP) { - var shell = new ShellExec(); - try { - // set nmcli back to DHCP, and re-run dhclient -- this ought to grab a new IP address - shell.executeBashCommand( - config.setDHCPcommand.replace( - NetworkConfig.NM_IFACE_STRING, config.getEscapedInterfaceName())); - shell.executeBashCommand("dhclient " + config.getPhysicalInterfaceName(), false); - } catch (Exception e) { - logger.error("Exception while setting DHCP!"); + public void initialize(boolean shouldManage) { + isManaged = shouldManage && !networkingIsDisabled; + if (!isManaged) { + return; } - } else if (config.connectionType == NetworkMode.STATIC) { - var shell = new ShellExec(); - if (!config.staticIp.isEmpty()) { - try { - shell.executeBashCommand( - config - .setStaticCommand - .replace(NetworkConfig.NM_IFACE_STRING, config.getEscapedInterfaceName()) - .replace(NetworkConfig.NM_IP_STRING, config.staticIp)); - - if (Platform.isRaspberryPi()) { - // Pi's need to manually have their interface adjusted?? and the 5-second sleep is - // integral in my testing (Matt) - shell.executeBashCommand( - "sh -c 'nmcli con down " - + config.getEscapedInterfaceName() - + "; nmcli con up " - + config.getEscapedInterfaceName() - + "'"); + + var config = ConfigManager.getInstance().getConfig().getNetworkConfig(); + logger.info("Setting " + config.connectionType + " with team " + config.ntServerAddress); + if (Platform.isLinux()) { + if (!Platform.isRoot()) { + logger.error("Cannot manage hostname without root!"); + } + + // always set hostname + if (!config.hostname.isEmpty()) { + try { + var shell = new ShellExec(true, false); + shell.executeBashCommand("cat /etc/hostname | tr -d \" \\t\\n\\r\""); + var oldHostname = shell.getOutput().replace("\n", ""); + + var setHostnameRetCode = + shell.executeBashCommand( + "echo $NEW_HOSTNAME > /etc/hostname".replace("$NEW_HOSTNAME", config.hostname)); + setHostnameRetCode = + shell.executeBashCommand("hostnamectl set-hostname " + config.hostname); + + // Add to /etc/hosts + var addHostRetCode = + shell.executeBashCommand( + String.format( + "sed -i \"s/127.0.1.1.*%s/127.0.1.1\\t%s/g\" /etc/hosts", + oldHostname, config.hostname)); + + shell.executeBashCommand("sudo service avahi-daemon restart"); + + var success = setHostnameRetCode == 0 && addHostRetCode == 0; + if (!success) { + logger.error( + "Setting hostname returned non-zero codes (hostname/hosts) " + + setHostnameRetCode + + "|" + + addHostRetCode + + "!"); + } else { + logger.info("Set hostname to " + config.hostname); + } + } catch (Exception e) { + logger.error("Failed to set hostname!", e); + } + } else { - // for now just bring down /up -- more testing needed on beelink et al. - shell.executeBashCommand( - "sh -c 'nmcli con down " - + config.getEscapedInterfaceName() - + "; nmcli con up " - + config.getEscapedInterfaceName() - + "'"); + logger.warn("Got empty hostname?"); + } + + if (config.connectionType == NetworkMode.DHCP) { + var shell = new ShellExec(); + try { + // set nmcli back to DHCP, and re-run dhclient -- this ought to grab a new IP address + shell.executeBashCommand( + config.setDHCPcommand.replace( + NetworkConfig.NM_IFACE_STRING, config.getEscapedInterfaceName())); + shell.executeBashCommand("dhclient " + config.getPhysicalInterfaceName(), false); + } catch (Exception e) { + logger.error("Exception while setting DHCP!"); + } + } else if (config.connectionType == NetworkMode.STATIC) { + var shell = new ShellExec(); + if (!config.staticIp.isEmpty()) { + try { + shell.executeBashCommand( + config + .setStaticCommand + .replace(NetworkConfig.NM_IFACE_STRING, config.getEscapedInterfaceName()) + .replace(NetworkConfig.NM_IP_STRING, config.staticIp)); + + if (Platform.isRaspberryPi()) { + // Pi's need to manually have their interface adjusted?? and the 5-second sleep is + // integral in my testing (Matt) + shell.executeBashCommand( + "sh -c 'nmcli con down " + + config.getEscapedInterfaceName() + + "; nmcli con up " + + config.getEscapedInterfaceName() + + "'"); + } else { + // for now just bring down /up -- more testing needed on beelink et al. + shell.executeBashCommand( + "sh -c 'nmcli con down " + + config.getEscapedInterfaceName() + + "; nmcli con up " + + config.getEscapedInterfaceName() + + "'"); + } + } catch (Exception e) { + logger.error("Error while setting static IP!", e); + } + } else { + logger.warn("Got empty static IP?"); + } } - } catch (Exception e) { - logger.error("Error while setting static IP!", e); - } } else { - logger.warn("Got empty static IP?"); + logger.info("Not managing network on non-Linux platforms"); } - } - } else { - logger.info("Not managing network on non-Linux platforms"); } - } - public void reinitialize() { - initialize(ConfigManager.getInstance().getConfig().getNetworkConfig().shouldManage()); - } + public void reinitialize() { + initialize(ConfigManager.getInstance().getConfig().getNetworkConfig().shouldManage()); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/networking/NetworkMode.java b/photon-core/src/main/java/org/photonvision/common/networking/NetworkMode.java index 0aa6ef14da..c491e86625 100644 --- a/photon-core/src/main/java/org/photonvision/common/networking/NetworkMode.java +++ b/photon-core/src/main/java/org/photonvision/common/networking/NetworkMode.java @@ -20,11 +20,11 @@ import com.fasterxml.jackson.annotation.JsonValue; public enum NetworkMode { - DHCP, - STATIC; + DHCP, + STATIC; - @JsonValue - public int toValue() { - return ordinal(); - } + @JsonValue + public int toValue() { + return ordinal(); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/networking/NetworkUtils.java b/photon-core/src/main/java/org/photonvision/common/networking/NetworkUtils.java index 84467eea1a..cf84309e57 100644 --- a/photon-core/src/main/java/org/photonvision/common/networking/NetworkUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/networking/NetworkUtils.java @@ -29,111 +29,111 @@ import org.photonvision.common.util.ShellExec; public class NetworkUtils { - private static final Logger logger = new Logger(NetworkUtils.class, LogGroup.General); + private static final Logger logger = new Logger(NetworkUtils.class, LogGroup.General); - public enum NMType { - NMTYPE_ETHERNET("ethernet"), - NMTYPE_WIFI("wifi"), - NMTYPE_UNKNOWN(""); + public enum NMType { + NMTYPE_ETHERNET("ethernet"), + NMTYPE_WIFI("wifi"), + NMTYPE_UNKNOWN(""); - NMType(String id) { - identifier = id; - } + NMType(String id) { + identifier = id; + } - private final String identifier; + private final String identifier; - public static NMType typeForString(String s) { - for (var t : NMType.values()) { - if (t.identifier.equals(s)) { - return t; + public static NMType typeForString(String s) { + for (var t : NMType.values()) { + if (t.identifier.equals(s)) { + return t; + } + } + return NMTYPE_UNKNOWN; } - } - return NMTYPE_UNKNOWN; } - } - public static class NMDeviceInfo { - public NMDeviceInfo(String c, String d, String type) { - connName = c; - devName = d; - nmType = NMType.typeForString(type); - } + public static class NMDeviceInfo { + public NMDeviceInfo(String c, String d, String type) { + connName = c; + devName = d; + nmType = NMType.typeForString(type); + } - public final String connName; // Human-readable name used by "nmcli con" - public final String devName; // underlying device, used by dhclient - public final NMType nmType; - - @Override - public String toString() { - return "NMDeviceInfo [connName=" - + connName - + ", devName=" - + devName - + ", nmType=" - + nmType - + "]"; + public final String connName; // Human-readable name used by "nmcli con" + public final String devName; // underlying device, used by dhclient + public final NMType nmType; + + @Override + public String toString() { + return "NMDeviceInfo [connName=" + + connName + + ", devName=" + + devName + + ", nmType=" + + nmType + + "]"; + } } - } - private static List allInterfaces = new ArrayList<>(); - private static long lastReadTimestamp = 0; + private static List allInterfaces = new ArrayList<>(); + private static long lastReadTimestamp = 0; + + public static List getAllInterfaces() { + long now = System.currentTimeMillis(); + if (now - lastReadTimestamp < 5000) return allInterfaces; + else lastReadTimestamp = now; - public static List getAllInterfaces() { - long now = System.currentTimeMillis(); - if (now - lastReadTimestamp < 5000) return allInterfaces; - else lastReadTimestamp = now; + var ret = new ArrayList(); - var ret = new ArrayList(); + if (!Platform.isLinux()) { + // Can only determine interface name on Linux, give up + return ret; + } - if (!Platform.isLinux()) { - // Can only determine interface name on Linux, give up - return ret; + try { + var shell = new ShellExec(true, false); + shell.executeBashCommand( + "nmcli -t -f GENERAL.CONNECTION,GENERAL.DEVICE,GENERAL.TYPE device show"); + String out = shell.getOutput(); + if (out == null) { + return new ArrayList<>(); + } + Pattern pattern = + Pattern.compile("GENERAL.CONNECTION:(.*)\nGENERAL.DEVICE:(.*)\nGENERAL.TYPE:(.*)"); + Matcher matcher = pattern.matcher(out); + while (matcher.find()) { + ret.add(new NMDeviceInfo(matcher.group(1), matcher.group(2), matcher.group(3))); + } + } catch (IOException e) { + logger.error("Could not get active NM ifaces!", e); + } + + logger.debug("Found network interfaces:\n" + ret); + + allInterfaces = ret; + return ret; } - try { - var shell = new ShellExec(true, false); - shell.executeBashCommand( - "nmcli -t -f GENERAL.CONNECTION,GENERAL.DEVICE,GENERAL.TYPE device show"); - String out = shell.getOutput(); - if (out == null) { - return new ArrayList<>(); - } - Pattern pattern = - Pattern.compile("GENERAL.CONNECTION:(.*)\nGENERAL.DEVICE:(.*)\nGENERAL.TYPE:(.*)"); - Matcher matcher = pattern.matcher(out); - while (matcher.find()) { - ret.add(new NMDeviceInfo(matcher.group(1), matcher.group(2), matcher.group(3))); - } - } catch (IOException e) { - logger.error("Could not get active NM ifaces!", e); + public static List getAllActiveInterfaces() { + // Seems like if an interface exists but isn't actually connected, the connection name will be + // an empty string. Check here and only return connections with non-empty names + return getAllInterfaces().stream() + .filter(it -> !it.connName.trim().isEmpty()) + .collect(Collectors.toList()); } - logger.debug("Found network interfaces:\n" + ret); - - allInterfaces = ret; - return ret; - } - - public static List getAllActiveInterfaces() { - // Seems like if an interface exists but isn't actually connected, the connection name will be - // an empty string. Check here and only return connections with non-empty names - return getAllInterfaces().stream() - .filter(it -> !it.connName.trim().isEmpty()) - .collect(Collectors.toList()); - } - - public static List getAllWiredInterfaces() { - return getAllActiveInterfaces().stream() - .filter(it -> it.nmType == NMType.NMTYPE_ETHERNET) - .collect(Collectors.toList()); - } - - public static NMDeviceInfo getNMinfoForConnName(String connName) { - for (NMDeviceInfo info : getAllActiveInterfaces()) { - if (info.connName.equals(connName)) { - return info; - } + public static List getAllWiredInterfaces() { + return getAllActiveInterfaces().stream() + .filter(it -> it.nmType == NMType.NMTYPE_ETHERNET) + .collect(Collectors.toList()); + } + + public static NMDeviceInfo getNMinfoForConnName(String connName) { + for (NMDeviceInfo info : getAllActiveInterfaces()) { + if (info.connName.equals(connName)) { + return info; + } + } + return null; } - return null; - } } diff --git a/photon-core/src/main/java/org/photonvision/common/networking/RoborioFinder.java b/photon-core/src/main/java/org/photonvision/common/networking/RoborioFinder.java index 7e068c5a7c..605480e296 100644 --- a/photon-core/src/main/java/org/photonvision/common/networking/RoborioFinder.java +++ b/photon-core/src/main/java/org/photonvision/common/networking/RoborioFinder.java @@ -31,88 +31,88 @@ import org.photonvision.common.logging.Logger; public class RoborioFinder { - private static RoborioFinder instance; - private static final Logger logger = new Logger(RoborioFinder.class, LogGroup.General); + private static RoborioFinder instance; + private static final Logger logger = new Logger(RoborioFinder.class, LogGroup.General); - public static RoborioFinder getInstance() { - if (instance == null) instance = new RoborioFinder(); - return instance; - } - - public void findRios() { - HashMap map = new HashMap<>(); - var subMap = new HashMap(); - // Separate from the above so we don't hold stuff up - System.setProperty("java.net.preferIPv4Stack", "true"); - subMap.put( - "deviceIps", - Arrays.stream(CameraServerJNI.getNetworkInterfaces()) - .filter(it -> !it.equals("0.0.0.0")) - .toArray()); - logger.info("Searching for rios"); - List possibleRioList = new ArrayList<>(); - for (var ip : CameraServerJNI.getNetworkInterfaces()) { - logger.info("Trying " + ip); - var possibleRioAddr = getPossibleRioAddress(ip); - if (possibleRioAddr != null) { - logger.info("Maybe found " + ip); - searchForHost(possibleRioList, possibleRioAddr); - } else { - logger.info("Didn't match RIO IP"); - } + public static RoborioFinder getInstance() { + if (instance == null) instance = new RoborioFinder(); + return instance; } - // String name = - // "roboRIO-" - // + - // ConfigManager.getInstance().getConfig().getNetworkConfig().teamNumber - // + "-FRC.local"; - // searchForHost(possibleRioList, name); - // name = - // "roboRIO-" - // + - // ConfigManager.getInstance().getConfig().getNetworkConfig().teamNumber - // + "-FRC.lan"; - // searchForHost(possibleRioList, name); - // name = - // "roboRIO-" - // + - // ConfigManager.getInstance().getConfig().getNetworkConfig().teamNumber - // + "-FRC.frc-field.local"; - // searchForHost(possibleRioList, name); - // subMap.put("possibleRios", possibleRioList.toArray()); + public void findRios() { + HashMap map = new HashMap<>(); + var subMap = new HashMap(); + // Separate from the above so we don't hold stuff up + System.setProperty("java.net.preferIPv4Stack", "true"); + subMap.put( + "deviceIps", + Arrays.stream(CameraServerJNI.getNetworkInterfaces()) + .filter(it -> !it.equals("0.0.0.0")) + .toArray()); + logger.info("Searching for rios"); + List possibleRioList = new ArrayList<>(); + for (var ip : CameraServerJNI.getNetworkInterfaces()) { + logger.info("Trying " + ip); + var possibleRioAddr = getPossibleRioAddress(ip); + if (possibleRioAddr != null) { + logger.info("Maybe found " + ip); + searchForHost(possibleRioList, possibleRioAddr); + } else { + logger.info("Didn't match RIO IP"); + } + } + + // String name = + // "roboRIO-" + // + + // ConfigManager.getInstance().getConfig().getNetworkConfig().teamNumber + // + "-FRC.local"; + // searchForHost(possibleRioList, name); + // name = + // "roboRIO-" + // + + // ConfigManager.getInstance().getConfig().getNetworkConfig().teamNumber + // + "-FRC.lan"; + // searchForHost(possibleRioList, name); + // name = + // "roboRIO-" + // + + // ConfigManager.getInstance().getConfig().getNetworkConfig().teamNumber + // + "-FRC.frc-field.local"; + // searchForHost(possibleRioList, name); + // subMap.put("possibleRios", possibleRioList.toArray()); - subMap.put("possibleRios", possibleRioList.toArray()); - map.put("networkInfo", subMap); - DataChangeService.getInstance().publishEvent(new OutgoingUIEvent<>("deviceIpInfo", map)); - } + subMap.put("possibleRios", possibleRioList.toArray()); + map.put("networkInfo", subMap); + DataChangeService.getInstance().publishEvent(new OutgoingUIEvent<>("deviceIpInfo", map)); + } - String getPossibleRioAddress(String ip) { - try { - InetAddress addr = InetAddress.getByName(ip); - var address = addr.getAddress(); - if (address[0] != (byte) (10 & 0xff)) return null; - address[3] = (byte) (2 & 0xff); - return InetAddress.getByAddress(address).getHostAddress(); - } catch (UnknownHostException e) { - e.printStackTrace(); + String getPossibleRioAddress(String ip) { + try { + InetAddress addr = InetAddress.getByName(ip); + var address = addr.getAddress(); + if (address[0] != (byte) (10 & 0xff)) return null; + address[3] = (byte) (2 & 0xff); + return InetAddress.getByAddress(address).getHostAddress(); + } catch (UnknownHostException e) { + e.printStackTrace(); + } + return null; } - return null; - } - void searchForHost(List list, String hostname) { - try { - logger.info("Looking up " + hostname); - InetAddress testAddr = InetAddress.getByName(hostname); - logger.info("Pinging " + hostname); - var canContact = testAddr.isReachable(500); - if (canContact) { - logger.info("Was able to connect to " + hostname); - if (!list.contains(hostname)) list.add(hostname); - } else { - logger.info("Unable to reach " + hostname); - } - } catch (IOException ignored) { + void searchForHost(List list, String hostname) { + try { + logger.info("Looking up " + hostname); + InetAddress testAddr = InetAddress.getByName(hostname); + logger.info("Pinging " + hostname); + var canContact = testAddr.isReachable(500); + if (canContact) { + logger.info("Was able to connect to " + hostname); + if (!list.contains(hostname)) list.add(hostname); + } else { + logger.info("Unable to reach " + hostname); + } + } catch (IOException ignored) { + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptCommandType.java b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptCommandType.java index 8c005e62ef..debc05c1b8 100644 --- a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptCommandType.java +++ b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptCommandType.java @@ -18,14 +18,14 @@ package org.photonvision.common.scripting; public enum ScriptCommandType { - kDefault(""), - kBashScript("bash"), - kPythonScript("python"), - kPython3Script("python3"); + kDefault(""), + kBashScript("bash"), + kPythonScript("python"), + kPython3Script("python3"); - public final String value; + public final String value; - ScriptCommandType(String value) { - this.value = value; - } + ScriptCommandType(String value) { + this.value = value; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptConfig.java b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptConfig.java index e83ac12143..684a1a882c 100644 --- a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptConfig.java +++ b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptConfig.java @@ -21,19 +21,19 @@ import com.fasterxml.jackson.annotation.JsonProperty; public class ScriptConfig { - public final ScriptEventType eventType; - public final String command; + public final ScriptEventType eventType; + public final String command; - public ScriptConfig(ScriptEventType eventType) { - this.eventType = eventType; - this.command = ""; - } + public ScriptConfig(ScriptEventType eventType) { + this.eventType = eventType; + this.command = ""; + } - @JsonCreator - public ScriptConfig( - @JsonProperty("eventType") ScriptEventType eventType, - @JsonProperty("command") String command) { - this.eventType = eventType; - this.command = command; - } + @JsonCreator + public ScriptConfig( + @JsonProperty("eventType") ScriptEventType eventType, + @JsonProperty("command") String command) { + this.eventType = eventType; + this.command = command; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEvent.java b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEvent.java index 0d998f1e9f..a60dd566a4 100644 --- a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEvent.java +++ b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEvent.java @@ -23,32 +23,32 @@ import org.photonvision.common.util.ShellExec; public class ScriptEvent { - private static final ShellExec executor = new ShellExec(true, true); + private static final ShellExec executor = new ShellExec(true, true); - public final ScriptConfig config; - private final Logger logger = new Logger(ScriptEvent.class, LogGroup.General); + public final ScriptConfig config; + private final Logger logger = new Logger(ScriptEvent.class, LogGroup.General); - public ScriptEvent(ScriptConfig config) { - this.config = config; - } - - public int run() throws IOException { - int retVal = executor.executeBashCommand(config.command); - - String output = executor.getOutput(); - String error = executor.getError(); + public ScriptEvent(ScriptConfig config) { + this.config = config; + } - if (!error.isEmpty()) { - logger.error("Error when running \"" + config.eventType.name() + "\" script: " + error); - } else if (!output.isEmpty()) { - logger.info( - String.format("Output from \"%s\" script: %s\n", config.eventType.name(), output)); + public int run() throws IOException { + int retVal = executor.executeBashCommand(config.command); + + String output = executor.getOutput(); + String error = executor.getError(); + + if (!error.isEmpty()) { + logger.error("Error when running \"" + config.eventType.name() + "\" script: " + error); + } else if (!output.isEmpty()) { + logger.info( + String.format("Output from \"%s\" script: %s\n", config.eventType.name(), output)); + } + logger.info( + String.format( + "Script for %s ran with command line: \"%s\", exit code: %d, output: %s, " + + "error: %s\n", + config.eventType.name(), config.command, retVal, output, error)); + return retVal; } - logger.info( - String.format( - "Script for %s ran with command line: \"%s\", exit code: %d, output: %s, " - + "error: %s\n", - config.eventType.name(), config.command, retVal, output, error)); - return retVal; - } } diff --git a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEventType.java b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEventType.java index 1bbcf2008b..3fe98bf447 100644 --- a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEventType.java +++ b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptEventType.java @@ -18,21 +18,21 @@ package org.photonvision.common.scripting; public enum ScriptEventType { - kProgramInit("Program Init"), - kProgramExit("Program Exit"), - kNTConnected("NT Connected"), - kLEDOn("LED On"), - kLEDOff("LED Off"), - kEnterDriverMode("Enter Driver Mode"), - kExitDriverMode("Exit Driver Mode"), - kFoundTarget("Found Target"), - kFoundMultipleTarget("Found Multiple Target"), - kLostTarget("Lost Target"), - kPipelineLag("Pipeline Lag"); + kProgramInit("Program Init"), + kProgramExit("Program Exit"), + kNTConnected("NT Connected"), + kLEDOn("LED On"), + kLEDOff("LED Off"), + kEnterDriverMode("Enter Driver Mode"), + kExitDriverMode("Exit Driver Mode"), + kFoundTarget("Found Target"), + kFoundMultipleTarget("Found Multiple Target"), + kLostTarget("Lost Target"), + kPipelineLag("Pipeline Lag"); - public final String value; + public final String value; - ScriptEventType(String value) { - this.value = value; - } + ScriptEventType(String value) { + this.value = value; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptManager.java b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptManager.java index b9c8e04033..a47684cc5c 100644 --- a/photon-core/src/main/java/org/photonvision/common/scripting/ScriptManager.java +++ b/photon-core/src/main/java/org/photonvision/common/scripting/ScriptManager.java @@ -31,110 +31,110 @@ import org.photonvision.common.util.file.JacksonUtils; public class ScriptManager { - private static final Logger logger = new Logger(ScriptManager.class, LogGroup.General); + private static final Logger logger = new Logger(ScriptManager.class, LogGroup.General); - private ScriptManager() {} + private ScriptManager() {} - private static final List events = new ArrayList<>(); - private static final LinkedBlockingDeque queuedEvents = - new LinkedBlockingDeque<>(25); + private static final List events = new ArrayList<>(); + private static final LinkedBlockingDeque queuedEvents = + new LinkedBlockingDeque<>(25); - public static void initialize() { - ScriptConfigManager.initialize(); - if (ScriptConfigManager.fileExists()) { - for (ScriptConfig scriptConfig : ScriptConfigManager.loadConfig()) { - ScriptEvent scriptEvent = new ScriptEvent(scriptConfig); - events.add(scriptEvent); - } + public static void initialize() { + ScriptConfigManager.initialize(); + if (ScriptConfigManager.fileExists()) { + for (ScriptConfig scriptConfig : ScriptConfigManager.loadConfig()) { + ScriptEvent scriptEvent = new ScriptEvent(scriptConfig); + events.add(scriptEvent); + } - TimedTaskManager.getInstance().addTask("ScriptRunner", new ScriptRunner(), 10); + TimedTaskManager.getInstance().addTask("ScriptRunner", new ScriptRunner(), 10); - } else { - logger.error("Something went wrong initializing scripts! Events will not run."); - } - } - - private static class ScriptRunner implements Runnable { - @Override - public void run() { - try { - handleEvent(queuedEvents.takeFirst()); - } catch (InterruptedException e) { - logger.error("ScriptRunner queue interrupted!", e); - } + } else { + logger.error("Something went wrong initializing scripts! Events will not run."); + } } - private void handleEvent(ScriptEventType eventType) { - var toRun = - events.parallelStream() - .filter(e -> e.config.eventType == eventType) - .findFirst() - .orElse(null); - if (toRun != null) { - try { - toRun.run(); - } catch (IOException e) { - logger.error("Failed to run script for event \"" + eventType.name() + "\"", e); + private static class ScriptRunner implements Runnable { + @Override + public void run() { + try { + handleEvent(queuedEvents.takeFirst()); + } catch (InterruptedException e) { + logger.error("ScriptRunner queue interrupted!", e); + } + } + + private void handleEvent(ScriptEventType eventType) { + var toRun = + events.parallelStream() + .filter(e -> e.config.eventType == eventType) + .findFirst() + .orElse(null); + if (toRun != null) { + try { + toRun.run(); + } catch (IOException e) { + logger.error("Failed to run script for event \"" + eventType.name() + "\"", e); + } + } } - } } - } - protected static class ScriptConfigManager { - // protected static final Path scriptConfigPath = - // Paths.get(ConfigManager.SettingsPath.toString(), "scripts.json"); - static final Path scriptConfigPath = Paths.get(""); // TODO: Waiting on config + protected static class ScriptConfigManager { + // protected static final Path scriptConfigPath = + // Paths.get(ConfigManager.SettingsPath.toString(), "scripts.json"); + static final Path scriptConfigPath = Paths.get(""); // TODO: Waiting on config - private ScriptConfigManager() {} + private ScriptConfigManager() {} - static boolean fileExists() { - return Files.exists(scriptConfigPath); - } + static boolean fileExists() { + return Files.exists(scriptConfigPath); + } - public static void initialize() { - if (!fileExists()) { - List eventsConfig = new ArrayList<>(); - for (var eventType : ScriptEventType.values()) { - eventsConfig.add(new ScriptConfig(eventType)); + public static void initialize() { + if (!fileExists()) { + List eventsConfig = new ArrayList<>(); + for (var eventType : ScriptEventType.values()) { + eventsConfig.add(new ScriptConfig(eventType)); + } + + try { + JacksonUtils.serialize(scriptConfigPath, eventsConfig.toArray(new ScriptConfig[0]), true); + } catch (IOException e) { + logger.error("Failed to initialize!", e); + } + } } - try { - JacksonUtils.serialize(scriptConfigPath, eventsConfig.toArray(new ScriptConfig[0]), true); - } catch (IOException e) { - logger.error("Failed to initialize!", e); + static List loadConfig() { + try { + var raw = JacksonUtils.deserialize(scriptConfigPath, ScriptConfig[].class); + if (raw != null) { + return List.of(raw); + } + } catch (IOException e) { + logger.error("Failed to load scripting config!", e); + } + return new ArrayList<>(); } - } - } - static List loadConfig() { - try { - var raw = JacksonUtils.deserialize(scriptConfigPath, ScriptConfig[].class); - if (raw != null) { - return List.of(raw); + protected static void deleteConfig() { + try { + Files.delete(scriptConfigPath); + } catch (IOException e) { + // + } } - } catch (IOException e) { - logger.error("Failed to load scripting config!", e); - } - return new ArrayList<>(); } - protected static void deleteConfig() { - try { - Files.delete(scriptConfigPath); - } catch (IOException e) { - // - } - } - } - - public static void queueEvent(ScriptEventType eventType) { - if (Platform.isLinux()) { - try { - queuedEvents.putLast(eventType); - logger.info("Queued event: " + eventType.name()); - } catch (InterruptedException e) { - logger.error("Failed to add event to queue: " + eventType.name(), e); - } + public static void queueEvent(ScriptEventType eventType) { + if (Platform.isLinux()) { + try { + queuedEvents.putLast(eventType); + logger.info("Queued event: " + eventType.name()); + } catch (InterruptedException e) { + logger.error("Failed to add event to queue: " + eventType.name(), e); + } + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/ColorHelper.java b/photon-core/src/main/java/org/photonvision/common/util/ColorHelper.java index f4b7b58627..1ce4c55f18 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/ColorHelper.java +++ b/photon-core/src/main/java/org/photonvision/common/util/ColorHelper.java @@ -21,7 +21,7 @@ import org.opencv.core.Scalar; public class ColorHelper { - public static Scalar colorToScalar(Color color) { - return new Scalar(color.getBlue(), color.getGreen(), color.getRed()); - } + public static Scalar colorToScalar(Color color) { + return new Scalar(color.getBlue(), color.getGreen(), color.getRed()); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/MemoryManager.java b/photon-core/src/main/java/org/photonvision/common/util/MemoryManager.java index 7bf426320b..d84536e0a4 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/MemoryManager.java +++ b/photon-core/src/main/java/org/photonvision/common/util/MemoryManager.java @@ -18,67 +18,67 @@ package org.photonvision.common.util; public class MemoryManager { - private static final long MEGABYTE_FACTOR = 1024L * 1024L; + private static final long MEGABYTE_FACTOR = 1024L * 1024L; - private int collectionThreshold; - private long collectionPeriodMillis = -1; + private int collectionThreshold; + private long collectionPeriodMillis = -1; - private double lastUsedMb = 0; - private long lastCollectionMillis = 0; + private double lastUsedMb = 0; + private long lastCollectionMillis = 0; - public MemoryManager(int collectionThreshold) { - this.collectionThreshold = collectionThreshold; - } - - public MemoryManager(int collectionThreshold, long collectionPeriodMillis) { - this.collectionThreshold = collectionThreshold; - this.collectionPeriodMillis = collectionPeriodMillis; - } - - public void setCollectionThreshold(int collectionThreshold) { - this.collectionThreshold = collectionThreshold; - } + public MemoryManager(int collectionThreshold) { + this.collectionThreshold = collectionThreshold; + } - public void setCollectionPeriodMillis(long collectionPeriodMillis) { - this.collectionPeriodMillis = collectionPeriodMillis; - } + public MemoryManager(int collectionThreshold, long collectionPeriodMillis) { + this.collectionThreshold = collectionThreshold; + this.collectionPeriodMillis = collectionPeriodMillis; + } - private static long getUsedMemory() { - return Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); - } + public void setCollectionThreshold(int collectionThreshold) { + this.collectionThreshold = collectionThreshold; + } - private static double getUsedMemoryMB() { - return ((double) getUsedMemory() / MEGABYTE_FACTOR); - } + public void setCollectionPeriodMillis(long collectionPeriodMillis) { + this.collectionPeriodMillis = collectionPeriodMillis; + } - private void collect() { - System.gc(); - System.runFinalization(); - } + private static long getUsedMemory() { + return Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); + } - public void run() { - run(false); - } + private static double getUsedMemoryMB() { + return ((double) getUsedMemory() / MEGABYTE_FACTOR); + } - public void run(boolean print) { - var usedMem = getUsedMemoryMB(); + private void collect() { + System.gc(); + System.runFinalization(); + } - if (usedMem != lastUsedMb) { - lastUsedMb = usedMem; - if (print) System.out.printf("Memory usage: %.2fMB\n", usedMem); + public void run() { + run(false); } - boolean collectionThresholdPassed = usedMem >= collectionThreshold; - boolean collectionPeriodPassed = - collectionPeriodMillis != -1 - && (System.currentTimeMillis() - lastCollectionMillis >= collectionPeriodMillis); - - if (collectionThresholdPassed || collectionPeriodPassed) { - collect(); - lastCollectionMillis = System.currentTimeMillis(); - if (print) { - System.out.printf("Garbage collected at %.2fMB\n", usedMem); - } + public void run(boolean print) { + var usedMem = getUsedMemoryMB(); + + if (usedMem != lastUsedMb) { + lastUsedMb = usedMem; + if (print) System.out.printf("Memory usage: %.2fMB\n", usedMem); + } + + boolean collectionThresholdPassed = usedMem >= collectionThreshold; + boolean collectionPeriodPassed = + collectionPeriodMillis != -1 + && (System.currentTimeMillis() - lastCollectionMillis >= collectionPeriodMillis); + + if (collectionThresholdPassed || collectionPeriodPassed) { + collect(); + lastCollectionMillis = System.currentTimeMillis(); + if (print) { + System.out.printf("Garbage collected at %.2fMB\n", usedMem); + } + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/NativeLibHelper.java b/photon-core/src/main/java/org/photonvision/common/util/NativeLibHelper.java index fec7523936..a8d7dd5ef0 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/NativeLibHelper.java +++ b/photon-core/src/main/java/org/photonvision/common/util/NativeLibHelper.java @@ -21,20 +21,20 @@ import java.nio.file.Paths; public class NativeLibHelper { - private static NativeLibHelper INSTANCE; + private static NativeLibHelper INSTANCE; - public static NativeLibHelper getInstance() { - if (INSTANCE == null) { - INSTANCE = new NativeLibHelper(); - } + public static NativeLibHelper getInstance() { + if (INSTANCE == null) { + INSTANCE = new NativeLibHelper(); + } - return INSTANCE; - } + return INSTANCE; + } - public final Path NativeLibPath; + public final Path NativeLibPath; - private NativeLibHelper() { - String home = System.getProperty("user.home"); - NativeLibPath = Paths.get(home, ".pvlibs", "nativecache"); - } + private NativeLibHelper() { + String home = System.getProperty("user.home"); + NativeLibPath = Paths.get(home, ".pvlibs", "nativecache"); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/ReflectionUtils.java b/photon-core/src/main/java/org/photonvision/common/util/ReflectionUtils.java index 3289d45ae6..cda97c3f8f 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/ReflectionUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/ReflectionUtils.java @@ -18,41 +18,41 @@ package org.photonvision.common.util; public class ReflectionUtils { - public static StackTraceElement[] getFullStackTrace() { - return Thread.currentThread().getStackTrace(); - } + public static StackTraceElement[] getFullStackTrace() { + return Thread.currentThread().getStackTrace(); + } - public static StackTraceElement getNthCaller(int n) { - if (n < 0) n = 0; - return Thread.currentThread().getStackTrace()[n]; - } + public static StackTraceElement getNthCaller(int n) { + if (n < 0) n = 0; + return Thread.currentThread().getStackTrace()[n]; + } - public static String getCallerClassName() { - StackTraceElement[] stElements = Thread.currentThread().getStackTrace(); - for (int i = 1; i < stElements.length; i++) { - StackTraceElement ste = stElements[i]; - if (!ste.getClassName().equals(ReflectionUtils.class.getName()) - && ste.getClassName().indexOf("java.lang.Thread") != 0) { - return ste.getClassName(); - } + public static String getCallerClassName() { + StackTraceElement[] stElements = Thread.currentThread().getStackTrace(); + for (int i = 1; i < stElements.length; i++) { + StackTraceElement ste = stElements[i]; + if (!ste.getClassName().equals(ReflectionUtils.class.getName()) + && ste.getClassName().indexOf("java.lang.Thread") != 0) { + return ste.getClassName(); + } + } + return null; } - return null; - } - public static String getCallerCallerClassName() { - StackTraceElement[] stElements = Thread.currentThread().getStackTrace(); - String callerClassName = null; - for (int i = 1; i < stElements.length; i++) { - StackTraceElement ste = stElements[i]; - if (!ste.getClassName().equals(ReflectionUtils.class.getName()) - && ste.getClassName().indexOf("java.lang.Thread") != 0) { - if (callerClassName == null) { - callerClassName = ste.getClassName(); - } else if (!callerClassName.equals(ste.getClassName())) { - return ste.getClassName(); + public static String getCallerCallerClassName() { + StackTraceElement[] stElements = Thread.currentThread().getStackTrace(); + String callerClassName = null; + for (int i = 1; i < stElements.length; i++) { + StackTraceElement ste = stElements[i]; + if (!ste.getClassName().equals(ReflectionUtils.class.getName()) + && ste.getClassName().indexOf("java.lang.Thread") != 0) { + if (callerClassName == null) { + callerClassName = ste.getClassName(); + } else if (!callerClassName.equals(ste.getClassName())) { + return ste.getClassName(); + } + } } - } + return null; } - return null; - } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/SerializationUtils.java b/photon-core/src/main/java/org/photonvision/common/util/SerializationUtils.java index 783b7b62e7..6fc3ea9c30 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/SerializationUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/SerializationUtils.java @@ -23,39 +23,39 @@ import org.photonvision.common.logging.Logger; public final class SerializationUtils { - private static final Logger logger = new Logger(SerializationUtils.class, LogGroup.General); + private static final Logger logger = new Logger(SerializationUtils.class, LogGroup.General); - public static HashMap objectToHashMap(Object src) { - var ret = new HashMap(); - for (var field : src.getClass().getFields()) { - try { - field.setAccessible(true); - if (!field - .getType() - .isEnum()) { // if the field is not an enum, get it based on the current pipeline - ret.put(field.getName(), field.get(src)); - } else { - var ordinal = (Enum) field.get(src); - ret.put(field.getName(), ordinal.ordinal()); + public static HashMap objectToHashMap(Object src) { + var ret = new HashMap(); + for (var field : src.getClass().getFields()) { + try { + field.setAccessible(true); + if (!field + .getType() + .isEnum()) { // if the field is not an enum, get it based on the current pipeline + ret.put(field.getName(), field.get(src)); + } else { + var ordinal = (Enum) field.get(src); + ret.put(field.getName(), ordinal.ordinal()); + } + } catch (IllegalArgumentException | IllegalAccessException e) { + logger.error("Could not serialize " + src.getClass().getSimpleName(), e); + } } - } catch (IllegalArgumentException | IllegalAccessException e) { - logger.error("Could not serialize " + src.getClass().getSimpleName(), e); - } + return ret; } - return ret; - } - public static HashMap transformToHashMap(Transform3d transform) { - var ret = new HashMap(); - ret.put("x", transform.getTranslation().getX()); - ret.put("y", transform.getTranslation().getY()); - ret.put("z", transform.getTranslation().getZ()); - ret.put("qw", transform.getRotation().getQuaternion().getW()); - ret.put("qx", transform.getRotation().getQuaternion().getX()); - ret.put("qy", transform.getRotation().getQuaternion().getY()); - ret.put("qz", transform.getRotation().getQuaternion().getZ()); + public static HashMap transformToHashMap(Transform3d transform) { + var ret = new HashMap(); + ret.put("x", transform.getTranslation().getX()); + ret.put("y", transform.getTranslation().getY()); + ret.put("z", transform.getTranslation().getZ()); + ret.put("qw", transform.getRotation().getQuaternion().getW()); + ret.put("qx", transform.getRotation().getQuaternion().getX()); + ret.put("qy", transform.getRotation().getQuaternion().getY()); + ret.put("qz", transform.getRotation().getQuaternion().getZ()); - ret.put("angle_z", transform.getRotation().getZ()); - return ret; - } + ret.put("angle_z", transform.getRotation().getZ()); + return ret; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/ShellExec.java b/photon-core/src/main/java/org/photonvision/common/util/ShellExec.java index b6e83d18fb..c8433ec158 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/ShellExec.java +++ b/photon-core/src/main/java/org/photonvision/common/util/ShellExec.java @@ -24,185 +24,185 @@ /** Execute external process and optionally read output buffer. */ @SuppressWarnings({"unused"}) public class ShellExec { - private static final Logger logger = new Logger(ShellExec.class, LogGroup.General); - - private int exitCode; - private final boolean readOutput; - private final boolean readError; - private StreamGobbler errorGobbler, outputGobbler; - - public ShellExec() { - this(false, false); - } - - public ShellExec(boolean readOutput, boolean readError) { - this.readOutput = readOutput; - this.readError = readError; - } - - public int executeBashCommand(String command) throws IOException { - return executeBashCommand(command, true); - } - - /** - * Execute a bash command. We can handle complex bash commands including multiple executions (; | - * and ||), quotes, expansions ($), escapes (\), e.g.: "cd /abc/def; mv ghi 'older ghi '$(whoami)" - * - * @param command Bash command to execute - * @return true if bash got started, but your command may have failed. - */ - public int executeBashCommand(String command, boolean wait) throws IOException { - logger.debug("Executing \"" + command + "\""); - - boolean success = false; - Runtime r = Runtime.getRuntime(); - // Use bash -c, so we can handle things like multi commands separated by ; and - // things like quotes, $, |, and \. My tests show that command comes as - // one argument to bash, so we do not need to quote it to make it one thing. - // Also, exec may object if it does not have an executable file as the first thing, - // so having bash here makes it happy provided bash is installed and in path. - String[] commands = {"bash", "-c", command}; - - Process process = r.exec(commands); - - // Consume streams, older jvm's had a memory leak if streams were not read, - // some other jvm+OS combinations may block unless streams are consumed. - int retcode = doProcess(wait, process); - logger.debug("Got exit code " + retcode); - return retcode; - } - - /** - * Execute a command in current folder, and wait for process to end - * - * @param command command ("c:/some/folder/script.bat" or "some/folder/script.sh") - * @param args 0..n command line arguments - * @return process exit code - */ - public int execute(String command, String... args) throws IOException { - return execute(command, null, true, args); - } - - /** - * Execute a command. - * - * @param command command ("c:/some/folder/script.bat" or "some/folder/script.sh") - * @param workdir working directory or NULL to use command folder - * @param wait wait for process to end - * @param args 0..n command line arguments - * @return process exit code - */ - public int execute(String command, String workdir, boolean wait, String... args) - throws IOException { - String[] cmdArr; - if (args != null && args.length > 0) { - cmdArr = new String[1 + args.length]; - cmdArr[0] = command; - System.arraycopy(args, 0, cmdArr, 1, args.length); - } else { - cmdArr = new String[] {command}; + private static final Logger logger = new Logger(ShellExec.class, LogGroup.General); + + private int exitCode; + private final boolean readOutput; + private final boolean readError; + private StreamGobbler errorGobbler, outputGobbler; + + public ShellExec() { + this(false, false); } - ProcessBuilder pb = new ProcessBuilder(cmdArr); - File workingDir = (workdir == null ? new File(command).getParentFile() : new File(workdir)); - pb.directory(workingDir); - - Process process = pb.start(); - - // Consume streams, older jvm's had a memory leak if streams were not read, - // some other jvm+OS combinations may block unless streams are consumed. - return doProcess(wait, process); - } - - private int doProcess(boolean wait, Process process) { - errorGobbler = new StreamGobbler(process.getErrorStream(), readError); - outputGobbler = new StreamGobbler(process.getInputStream(), readOutput); - errorGobbler.start(); - outputGobbler.start(); - - exitCode = 0; - if (wait) { - try { - process.waitFor(); - exitCode = process.exitValue(); - } catch (InterruptedException ignored) { - } + public ShellExec(boolean readOutput, boolean readError) { + this.readOutput = readOutput; + this.readError = readError; } - return exitCode; - } - - public int getExitCode() { - return exitCode; - } - - public boolean isOutputCompleted() { - return (outputGobbler != null && outputGobbler.isCompleted()); - } - - public boolean isErrorCompleted() { - return (errorGobbler != null && errorGobbler.isCompleted()); - } - - public String getOutput() { - return (outputGobbler != null ? outputGobbler.getOutput() : null); - } - - public String getError() { - return (errorGobbler != null ? errorGobbler.getOutput() : null); - } - - // ******************************************** - // ******************************************** - - /** - * StreamGobbler reads inputstream to "gobble" it. This is used by Executor class when running a - * commandline applications. Gobblers must read/purge INSTR and ERRSTR process streams. - * http://www.javaworld.com/javaworld/jw-12-2000/jw-1229-traps.html?page=4 - */ - @SuppressWarnings("WeakerAccess") - private static class StreamGobbler extends Thread { - private final InputStream is; - private final StringBuilder output; - private volatile boolean completed; // mark volatile to guarantee a thread safety - - public StreamGobbler(InputStream is, boolean readStream) { - this.is = is; - this.output = (readStream ? new StringBuilder(256) : null); + + public int executeBashCommand(String command) throws IOException { + return executeBashCommand(command, true); } - public void run() { - completed = false; - try { - String NL = System.getProperty("line.separator", "\r\n"); + /** + * Execute a bash command. We can handle complex bash commands including multiple executions (; | + * and ||), quotes, expansions ($), escapes (\), e.g.: "cd /abc/def; mv ghi 'older ghi '$(whoami)" + * + * @param command Bash command to execute + * @return true if bash got started, but your command may have failed. + */ + public int executeBashCommand(String command, boolean wait) throws IOException { + logger.debug("Executing \"" + command + "\""); + + boolean success = false; + Runtime r = Runtime.getRuntime(); + // Use bash -c, so we can handle things like multi commands separated by ; and + // things like quotes, $, |, and \. My tests show that command comes as + // one argument to bash, so we do not need to quote it to make it one thing. + // Also, exec may object if it does not have an executable file as the first thing, + // so having bash here makes it happy provided bash is installed and in path. + String[] commands = {"bash", "-c", command}; + + Process process = r.exec(commands); + + // Consume streams, older jvm's had a memory leak if streams were not read, + // some other jvm+OS combinations may block unless streams are consumed. + int retcode = doProcess(wait, process); + logger.debug("Got exit code " + retcode); + return retcode; + } - InputStreamReader isr = new InputStreamReader(is); - BufferedReader br = new BufferedReader(isr); - String line; - while ((line = br.readLine()) != null) { - if (output != null) output.append(line).append(NL); - } - } catch (IOException ex) { - // ex.printStackTrace(); - } - completed = true; + /** + * Execute a command in current folder, and wait for process to end + * + * @param command command ("c:/some/folder/script.bat" or "some/folder/script.sh") + * @param args 0..n command line arguments + * @return process exit code + */ + public int execute(String command, String... args) throws IOException { + return execute(command, null, true, args); } /** - * Get inputstream buffer or null if stream was not consumed. + * Execute a command. * - * @return Output stream + * @param command command ("c:/some/folder/script.bat" or "some/folder/script.sh") + * @param workdir working directory or NULL to use command folder + * @param wait wait for process to end + * @param args 0..n command line arguments + * @return process exit code */ + public int execute(String command, String workdir, boolean wait, String... args) + throws IOException { + String[] cmdArr; + if (args != null && args.length > 0) { + cmdArr = new String[1 + args.length]; + cmdArr[0] = command; + System.arraycopy(args, 0, cmdArr, 1, args.length); + } else { + cmdArr = new String[] {command}; + } + + ProcessBuilder pb = new ProcessBuilder(cmdArr); + File workingDir = (workdir == null ? new File(command).getParentFile() : new File(workdir)); + pb.directory(workingDir); + + Process process = pb.start(); + + // Consume streams, older jvm's had a memory leak if streams were not read, + // some other jvm+OS combinations may block unless streams are consumed. + return doProcess(wait, process); + } + + private int doProcess(boolean wait, Process process) { + errorGobbler = new StreamGobbler(process.getErrorStream(), readError); + outputGobbler = new StreamGobbler(process.getInputStream(), readOutput); + errorGobbler.start(); + outputGobbler.start(); + + exitCode = 0; + if (wait) { + try { + process.waitFor(); + exitCode = process.exitValue(); + } catch (InterruptedException ignored) { + } + } + return exitCode; + } + + public int getExitCode() { + return exitCode; + } + + public boolean isOutputCompleted() { + return (outputGobbler != null && outputGobbler.isCompleted()); + } + + public boolean isErrorCompleted() { + return (errorGobbler != null && errorGobbler.isCompleted()); + } + public String getOutput() { - return (output != null ? output.toString() : null); + return (outputGobbler != null ? outputGobbler.getOutput() : null); + } + + public String getError() { + return (errorGobbler != null ? errorGobbler.getOutput() : null); } + // ******************************************** + // ******************************************** + /** - * Is input stream completed. - * - * @return if input stream is completed + * StreamGobbler reads inputstream to "gobble" it. This is used by Executor class when running a + * commandline applications. Gobblers must read/purge INSTR and ERRSTR process streams. + * http://www.javaworld.com/javaworld/jw-12-2000/jw-1229-traps.html?page=4 */ - public boolean isCompleted() { - return completed; + @SuppressWarnings("WeakerAccess") + private static class StreamGobbler extends Thread { + private final InputStream is; + private final StringBuilder output; + private volatile boolean completed; // mark volatile to guarantee a thread safety + + public StreamGobbler(InputStream is, boolean readStream) { + this.is = is; + this.output = (readStream ? new StringBuilder(256) : null); + } + + public void run() { + completed = false; + try { + String NL = System.getProperty("line.separator", "\r\n"); + + InputStreamReader isr = new InputStreamReader(is); + BufferedReader br = new BufferedReader(isr); + String line; + while ((line = br.readLine()) != null) { + if (output != null) output.append(line).append(NL); + } + } catch (IOException ex) { + // ex.printStackTrace(); + } + completed = true; + } + + /** + * Get inputstream buffer or null if stream was not consumed. + * + * @return Output stream + */ + public String getOutput() { + return (output != null ? output.toString() : null); + } + + /** + * Is input stream completed. + * + * @return if input stream is completed + */ + public boolean isCompleted() { + return completed; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/TestUtils.java b/photon-core/src/main/java/org/photonvision/common/util/TestUtils.java index 4497350897..d109f97834 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/TestUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/TestUtils.java @@ -39,360 +39,360 @@ import org.photonvision.vision.calibration.CameraCalibrationCoefficients; public class TestUtils { - public static boolean loadLibraries() { - JNIWrapper.Helper.setExtractOnStaticLoad(false); - WPIUtilJNI.Helper.setExtractOnStaticLoad(false); - NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); - WPINetJNI.Helper.setExtractOnStaticLoad(false); - CameraServerJNI.Helper.setExtractOnStaticLoad(false); - CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); - AprilTagJNI.Helper.setExtractOnStaticLoad(false); - - try { - var loader = - new RuntimeLoader<>( - Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); - loader.loadLibrary(); - - CombinedRuntimeLoader.loadLibraries( - TestUtils.class, - "wpiutiljni", - "ntcorejni", - "wpinetjni", - "wpiHaljni", - "cscorejni", - "cscorejnicvstatic", - "apriltagjni"); - return true; - } catch (IOException e) { - e.printStackTrace(); - return false; - } - } - - @SuppressWarnings("unused") - public enum WPI2019Image { - kCargoAngledDark48in(1.2192), - kCargoSideStraightDark36in(0.9144), - kCargoSideStraightDark60in(1.524), - kCargoSideStraightDark72in(1.8288), - kCargoSideStraightPanelDark36in(0.9144), - kCargoStraightDark19in(0.4826), - kCargoStraightDark24in(0.6096), - kCargoStraightDark48in(1.2192), - kCargoStraightDark72in(1.8288), - kCargoStraightDark72in_HighRes(1.8288), - kCargoStraightDark90in(2.286), - kRocketPanelAngleDark48in(1.2192), - kRocketPanelAngleDark60in(1.524); - - public static double FOV = 68.5; - - public final double distanceMeters; - public final Path path; - - Path getPath() { - var filename = this.toString().substring(1); - return Path.of("2019", "WPI", filename + ".jpg"); - } - - WPI2019Image(double distanceMeters) { - this.distanceMeters = distanceMeters; - this.path = getPath(); - } - } - - @SuppressWarnings("unused") - public enum WPI2020Image { - kBlueGoal_060in_Center(1.524), - kBlueGoal_084in_Center(2.1336), - kBlueGoal_084in_Center_720p(2.1336), - kBlueGoal_108in_Center(2.7432), - kBlueGoal_132in_Center(3.3528), - kBlueGoal_156in_Center(3.9624), - kBlueGoal_180in_Center(4.572), - kBlueGoal_156in_Left(3.9624), - kBlueGoal_224in_Left(5.6896), - kBlueGoal_228in_ProtectedZone(5.7912), - kBlueGoal_330in_ProtectedZone(8.382), - kBlueGoal_Far_ProtectedZone(10.668), // TODO: find a more accurate distance - kRedLoading_016in_Down(0.4064), - kRedLoading_030in_Down(0.762), - kRedLoading_048in_Down(1.2192), - kRedLoading_048in(1.2192), - kRedLoading_060in(1.524), - kRedLoading_084in(2.1336), - kRedLoading_108in(2.7432); + public static boolean loadLibraries() { + JNIWrapper.Helper.setExtractOnStaticLoad(false); + WPIUtilJNI.Helper.setExtractOnStaticLoad(false); + NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); + WPINetJNI.Helper.setExtractOnStaticLoad(false); + CameraServerJNI.Helper.setExtractOnStaticLoad(false); + CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); + AprilTagJNI.Helper.setExtractOnStaticLoad(false); + + try { + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + + CombinedRuntimeLoader.loadLibraries( + TestUtils.class, + "wpiutiljni", + "ntcorejni", + "wpinetjni", + "wpiHaljni", + "cscorejni", + "cscorejnicvstatic", + "apriltagjni"); + return true; + } catch (IOException e) { + e.printStackTrace(); + return false; + } + } + + @SuppressWarnings("unused") + public enum WPI2019Image { + kCargoAngledDark48in(1.2192), + kCargoSideStraightDark36in(0.9144), + kCargoSideStraightDark60in(1.524), + kCargoSideStraightDark72in(1.8288), + kCargoSideStraightPanelDark36in(0.9144), + kCargoStraightDark19in(0.4826), + kCargoStraightDark24in(0.6096), + kCargoStraightDark48in(1.2192), + kCargoStraightDark72in(1.8288), + kCargoStraightDark72in_HighRes(1.8288), + kCargoStraightDark90in(2.286), + kRocketPanelAngleDark48in(1.2192), + kRocketPanelAngleDark60in(1.524); + + public static double FOV = 68.5; + + public final double distanceMeters; + public final Path path; + + Path getPath() { + var filename = this.toString().substring(1); + return Path.of("2019", "WPI", filename + ".jpg"); + } + + WPI2019Image(double distanceMeters) { + this.distanceMeters = distanceMeters; + this.path = getPath(); + } + } + + @SuppressWarnings("unused") + public enum WPI2020Image { + kBlueGoal_060in_Center(1.524), + kBlueGoal_084in_Center(2.1336), + kBlueGoal_084in_Center_720p(2.1336), + kBlueGoal_108in_Center(2.7432), + kBlueGoal_132in_Center(3.3528), + kBlueGoal_156in_Center(3.9624), + kBlueGoal_180in_Center(4.572), + kBlueGoal_156in_Left(3.9624), + kBlueGoal_224in_Left(5.6896), + kBlueGoal_228in_ProtectedZone(5.7912), + kBlueGoal_330in_ProtectedZone(8.382), + kBlueGoal_Far_ProtectedZone(10.668), // TODO: find a more accurate distance + kRedLoading_016in_Down(0.4064), + kRedLoading_030in_Down(0.762), + kRedLoading_048in_Down(1.2192), + kRedLoading_048in(1.2192), + kRedLoading_060in(1.524), + kRedLoading_084in(2.1336), + kRedLoading_108in(2.7432); + + public static double FOV = 68.5; + + public final double distanceMeters; + public final Path path; + + Path getPath() { + var filename = this.toString().substring(1).replace('_', '-'); + return Path.of("2020", "WPI", filename + ".jpg"); + } + + WPI2020Image(double distanceMeters) { + this.distanceMeters = distanceMeters; + this.path = getPath(); + } + } - public static double FOV = 68.5; + public enum WPI2023Apriltags { + k162_36_Angle, + k162_36_Straight, + k383_60_Angle2; - public final double distanceMeters; - public final Path path; + public static double FOV = 68.5; - Path getPath() { - var filename = this.toString().substring(1).replace('_', '-'); - return Path.of("2020", "WPI", filename + ".jpg"); + public final Translation2d approxPose; + public final Path path; + + Path getPath() { + var filename = this.toString().substring(1); + return Path.of("2023", "AprilTags", filename + ".png"); + } + + Translation2d getPose() { + var names = this.toString().substring(1).split("_"); + var x = Units.inchesToMeters(Integer.parseInt(names[0])); + var y = Units.inchesToMeters(Integer.parseInt(names[1])); + return new Translation2d(x, y); + } + + WPI2023Apriltags() { + this.approxPose = getPose(); + this.path = getPath(); + } } - WPI2020Image(double distanceMeters) { - this.distanceMeters = distanceMeters; - this.path = getPath(); + public enum WPI2022Image { + kTerminal12ft6in(Units.feetToMeters(12.5)), + kTerminal22ft6in(Units.feetToMeters(22.5)); + + public static double FOV = 68.5; + + public final double distanceMeters; + public final Path path; + + Path getPath() { + var filename = this.toString().substring(1).replace('_', '-'); + return Path.of("2022", "WPI", filename + ".png"); + } + + WPI2022Image(double distanceMeters) { + this.distanceMeters = distanceMeters; + this.path = getPath(); + } } - } - public enum WPI2023Apriltags { - k162_36_Angle, - k162_36_Straight, - k383_60_Angle2; + public enum PolygonTestImages { + kPolygons; - public static double FOV = 68.5; + public final Path path; - public final Translation2d approxPose; - public final Path path; + Path getPath() { + var filename = this.toString().substring(1).toLowerCase(); + return Path.of("polygons", filename + ".png"); + } - Path getPath() { - var filename = this.toString().substring(1); - return Path.of("2023", "AprilTags", filename + ".png"); + PolygonTestImages() { + this.path = getPath(); + } } - Translation2d getPose() { - var names = this.toString().substring(1).split("_"); - var x = Units.inchesToMeters(Integer.parseInt(names[0])); - var y = Units.inchesToMeters(Integer.parseInt(names[1])); - return new Translation2d(x, y); + public enum PowercellTestImages { + kPowercell_test_1, + kPowercell_test_2, + kPowercell_test_3, + kPowercell_test_4, + kPowercell_test_5, + kPowercell_test_6; + + public final Path path; + + Path getPath() { + var filename = this.toString().substring(1).toLowerCase(); + return Path.of(filename + ".png"); + } + + PowercellTestImages() { + this.path = getPath(); + } } - WPI2023Apriltags() { - this.approxPose = getPose(); - this.path = getPath(); + public enum ApriltagTestImages { + kRobots, + kTag1_640_480, + kTag1_16h5_1280, + kTag_corner_1280; + + public final Path path; + + Path getPath() { + // Strip leading k + var filename = this.toString().substring(1).toLowerCase(); + var extension = ".jpg"; + if (filename.equals("tag1_16h5_1280")) extension = ".png"; + return Path.of("apriltag", filename + extension); + } + + ApriltagTestImages() { + this.path = getPath(); + } } - } - public enum WPI2022Image { - kTerminal12ft6in(Units.feetToMeters(12.5)), - kTerminal22ft6in(Units.feetToMeters(22.5)); + public static Path getResourcesFolderPath(boolean testMode) { + System.out.println("CWD: " + Path.of("").toAbsolutePath()); + + // VSCode likes to make this path relative to the wrong root directory, so a fun hack to tell + // if it's wrong + Path ret = Path.of("test-resources").toAbsolutePath(); + if (Path.of("test-resources") + .toAbsolutePath() + .toString() + .replace("/", "") + .replace("\\", "") + .toLowerCase() + .matches(".*photon-[a-z]*test-resources")) { + ret = Path.of("../test-resources").toAbsolutePath(); + } + return ret; + } - public static double FOV = 68.5; + public static Path getTestMode2019ImagePath() { + return getResourcesFolderPath(true) + .resolve("testimages") + .resolve(WPI2019Image.kRocketPanelAngleDark60in.path); + } - public final double distanceMeters; - public final Path path; + public static Path getTestMode2020ImagePath() { + return getResourcesFolderPath(true) + .resolve("testimages") + .resolve(WPI2020Image.kBlueGoal_156in_Left.path); + } - Path getPath() { - var filename = this.toString().substring(1).replace('_', '-'); - return Path.of("2022", "WPI", filename + ".png"); + public static Path getTestMode2022ImagePath() { + return getResourcesFolderPath(true) + .resolve("testimages") + .resolve(WPI2022Image.kTerminal22ft6in.path); } - WPI2022Image(double distanceMeters) { - this.distanceMeters = distanceMeters; - this.path = getPath(); + public static Path getTestModeApriltagPath() { + return getResourcesFolderPath(true) + .resolve("testimages") + .resolve(ApriltagTestImages.kRobots.path); } - } - public enum PolygonTestImages { - kPolygons; + public static Path getTestImagesPath(boolean testMode) { + return getResourcesFolderPath(testMode).resolve("testimages"); + } - public final Path path; + public static Path getCalibrationPath(boolean testMode) { + return getResourcesFolderPath(testMode).resolve("calibration"); + } - Path getPath() { - var filename = this.toString().substring(1).toLowerCase(); - return Path.of("polygons", filename + ".png"); + public static Path getPowercellPath(boolean testMode) { + return getTestImagesPath(testMode).resolve("polygons").resolve("powercells"); } - PolygonTestImages() { - this.path = getPath(); + public static Path getWPIImagePath(WPI2020Image image, boolean testMode) { + return getTestImagesPath(testMode).resolve(image.path); } - } - public enum PowercellTestImages { - kPowercell_test_1, - kPowercell_test_2, - kPowercell_test_3, - kPowercell_test_4, - kPowercell_test_5, - kPowercell_test_6; + public static Path getWPIImagePath(WPI2019Image image, boolean testMode) { + return getTestImagesPath(testMode).resolve(image.path); + } - public final Path path; + public static Path getPolygonImagePath(PolygonTestImages image, boolean testMode) { + return getTestImagesPath(testMode).resolve(image.path); + } - Path getPath() { - var filename = this.toString().substring(1).toLowerCase(); - return Path.of(filename + ".png"); + public static Path getApriltagImagePath(ApriltagTestImages image, boolean testMode) { + return getTestImagesPath(testMode).resolve(image.path); } - PowercellTestImages() { - this.path = getPath(); + public static Path getPowercellImagePath(PowercellTestImages image, boolean testMode) { + return getPowercellPath(testMode).resolve(image.path); } - } - public enum ApriltagTestImages { - kRobots, - kTag1_640_480, - kTag1_16h5_1280, - kTag_corner_1280; + public static Path getDotBoardImagesPath() { + return getResourcesFolderPath(false).resolve("calibrationBoardImages"); + } - public final Path path; + public static Path getSquaresBoardImagesPath() { + return getResourcesFolderPath(false).resolve("calibrationSquaresImg"); + } - Path getPath() { - // Strip leading k - var filename = this.toString().substring(1).toLowerCase(); - var extension = ".jpg"; - if (filename.equals("tag1_16h5_1280")) extension = ".png"; - return Path.of("apriltag", filename + extension); + public static File getHardwareConfigJson() { + return getResourcesFolderPath(false) + .resolve("hardware") + .resolve("HardwareConfig.json") + .toFile(); } - ApriltagTestImages() { - this.path = getPath(); + private static final String LIFECAM_240P_CAL_FILE = "lifecam240p.json"; + private static final String LIFECAM_480P_CAL_FILE = "lifecam480p.json"; + public static final String LIFECAM_1280P_CAL_FILE = "lifecam_1280.json"; + public static final String LIMELIGHT_480P_CAL_FILE = "limelight_1280_720.json"; + + public static CameraCalibrationCoefficients getCoeffs(String filename, boolean testMode) { + try { + return new ObjectMapper() + .readValue( + (Path.of(getCalibrationPath(testMode).toString(), filename).toFile()), + CameraCalibrationCoefficients.class); + } catch (IOException e) { + e.printStackTrace(); + return null; + } } - } - public static Path getResourcesFolderPath(boolean testMode) { - System.out.println("CWD: " + Path.of("").toAbsolutePath()); + public static CameraCalibrationCoefficients get2019LifeCamCoeffs(boolean testMode) { + return getCoeffs(LIFECAM_240P_CAL_FILE, testMode); + } - // VSCode likes to make this path relative to the wrong root directory, so a fun hack to tell - // if it's wrong - Path ret = Path.of("test-resources").toAbsolutePath(); - if (Path.of("test-resources") - .toAbsolutePath() - .toString() - .replace("/", "") - .replace("\\", "") - .toLowerCase() - .matches(".*photon-[a-z]*test-resources")) { - ret = Path.of("../test-resources").toAbsolutePath(); + public static CameraCalibrationCoefficients get2020LifeCamCoeffs(boolean testMode) { + return getCoeffs(LIFECAM_480P_CAL_FILE, testMode); } - return ret; - } - public static Path getTestMode2019ImagePath() { - return getResourcesFolderPath(true) - .resolve("testimages") - .resolve(WPI2019Image.kRocketPanelAngleDark60in.path); - } + public static CameraCalibrationCoefficients getLaptop() { + return getCoeffs("laptop.json", true); + } - public static Path getTestMode2020ImagePath() { - return getResourcesFolderPath(true) - .resolve("testimages") - .resolve(WPI2020Image.kBlueGoal_156in_Left.path); - } + private static final int DefaultTimeoutMillis = 5000; - public static Path getTestMode2022ImagePath() { - return getResourcesFolderPath(true) - .resolve("testimages") - .resolve(WPI2022Image.kTerminal22ft6in.path); - } + public static void showImage(Mat frame, String title, int timeoutMs) { + if (frame.empty()) return; + try { + HighGui.imshow(title, frame); + HighGui.waitKey(timeoutMs); + HighGui.destroyAllWindows(); + } catch (HeadlessException ignored) { + } + } - public static Path getTestModeApriltagPath() { - return getResourcesFolderPath(true) - .resolve("testimages") - .resolve(ApriltagTestImages.kRobots.path); - } + public static void showImage(Mat frame, int timeoutMs) { + showImage(frame, "", timeoutMs); + } + + public static void showImage(Mat frame, String title) { + showImage(frame, title, DefaultTimeoutMillis); + } + + public static void showImage(Mat frame) { + showImage(frame, DefaultTimeoutMillis); + } + + public static Path getTestMode2023ImagePath() { + return getResourcesFolderPath(true) + .resolve("testimages") + .resolve(WPI2022Image.kTerminal22ft6in.path); + } - public static Path getTestImagesPath(boolean testMode) { - return getResourcesFolderPath(testMode).resolve("testimages"); - } - - public static Path getCalibrationPath(boolean testMode) { - return getResourcesFolderPath(testMode).resolve("calibration"); - } - - public static Path getPowercellPath(boolean testMode) { - return getTestImagesPath(testMode).resolve("polygons").resolve("powercells"); - } - - public static Path getWPIImagePath(WPI2020Image image, boolean testMode) { - return getTestImagesPath(testMode).resolve(image.path); - } - - public static Path getWPIImagePath(WPI2019Image image, boolean testMode) { - return getTestImagesPath(testMode).resolve(image.path); - } - - public static Path getPolygonImagePath(PolygonTestImages image, boolean testMode) { - return getTestImagesPath(testMode).resolve(image.path); - } - - public static Path getApriltagImagePath(ApriltagTestImages image, boolean testMode) { - return getTestImagesPath(testMode).resolve(image.path); - } - - public static Path getPowercellImagePath(PowercellTestImages image, boolean testMode) { - return getPowercellPath(testMode).resolve(image.path); - } - - public static Path getDotBoardImagesPath() { - return getResourcesFolderPath(false).resolve("calibrationBoardImages"); - } - - public static Path getSquaresBoardImagesPath() { - return getResourcesFolderPath(false).resolve("calibrationSquaresImg"); - } - - public static File getHardwareConfigJson() { - return getResourcesFolderPath(false) - .resolve("hardware") - .resolve("HardwareConfig.json") - .toFile(); - } - - private static final String LIFECAM_240P_CAL_FILE = "lifecam240p.json"; - private static final String LIFECAM_480P_CAL_FILE = "lifecam480p.json"; - public static final String LIFECAM_1280P_CAL_FILE = "lifecam_1280.json"; - public static final String LIMELIGHT_480P_CAL_FILE = "limelight_1280_720.json"; - - public static CameraCalibrationCoefficients getCoeffs(String filename, boolean testMode) { - try { - return new ObjectMapper() - .readValue( - (Path.of(getCalibrationPath(testMode).toString(), filename).toFile()), - CameraCalibrationCoefficients.class); - } catch (IOException e) { - e.printStackTrace(); - return null; - } - } - - public static CameraCalibrationCoefficients get2019LifeCamCoeffs(boolean testMode) { - return getCoeffs(LIFECAM_240P_CAL_FILE, testMode); - } - - public static CameraCalibrationCoefficients get2020LifeCamCoeffs(boolean testMode) { - return getCoeffs(LIFECAM_480P_CAL_FILE, testMode); - } - - public static CameraCalibrationCoefficients getLaptop() { - return getCoeffs("laptop.json", true); - } - - private static final int DefaultTimeoutMillis = 5000; - - public static void showImage(Mat frame, String title, int timeoutMs) { - if (frame.empty()) return; - try { - HighGui.imshow(title, frame); - HighGui.waitKey(timeoutMs); - HighGui.destroyAllWindows(); - } catch (HeadlessException ignored) { - } - } - - public static void showImage(Mat frame, int timeoutMs) { - showImage(frame, "", timeoutMs); - } - - public static void showImage(Mat frame, String title) { - showImage(frame, title, DefaultTimeoutMillis); - } - - public static void showImage(Mat frame) { - showImage(frame, DefaultTimeoutMillis); - } - - public static Path getTestMode2023ImagePath() { - return getResourcesFolderPath(true) - .resolve("testimages") - .resolve(WPI2022Image.kTerminal22ft6in.path); - } - - public static CameraCalibrationCoefficients get2023LifeCamCoeffs(boolean testMode) { - return getCoeffs(LIFECAM_1280P_CAL_FILE, testMode); - } + public static CameraCalibrationCoefficients get2023LifeCamCoeffs(boolean testMode) { + return getCoeffs(LIFECAM_1280P_CAL_FILE, testMode); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/TimedTaskManager.java b/photon-core/src/main/java/org/photonvision/common/util/TimedTaskManager.java index e17d3c8b3d..7107faae32 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/TimedTaskManager.java +++ b/photon-core/src/main/java/org/photonvision/common/util/TimedTaskManager.java @@ -23,60 +23,60 @@ import org.photonvision.common.logging.Logger; public class TimedTaskManager { - private static final Logger logger = new Logger(TimedTaskManager.class, LogGroup.General); + private static final Logger logger = new Logger(TimedTaskManager.class, LogGroup.General); - private static class Singleton { - public static final TimedTaskManager INSTANCE = new TimedTaskManager(); - } + private static class Singleton { + public static final TimedTaskManager INSTANCE = new TimedTaskManager(); + } - public static TimedTaskManager getInstance() { - return Singleton.INSTANCE; - } + public static TimedTaskManager getInstance() { + return Singleton.INSTANCE; + } - private static class CaughtThreadFactory implements ThreadFactory { - private static final ThreadFactory defaultThreadFactory = Executors.defaultThreadFactory(); + private static class CaughtThreadFactory implements ThreadFactory { + private static final ThreadFactory defaultThreadFactory = Executors.defaultThreadFactory(); - @Override - public Thread newThread(@NotNull Runnable r) { - Thread thread = defaultThreadFactory.newThread(r); - thread.setUncaughtExceptionHandler( - (t, e) -> logger.error("TimedTask threw uncaught exception!", e)); - return thread; + @Override + public Thread newThread(@NotNull Runnable r) { + Thread thread = defaultThreadFactory.newThread(r); + thread.setUncaughtExceptionHandler( + (t, e) -> logger.error("TimedTask threw uncaught exception!", e)); + return thread; + } } - } - private final ScheduledExecutorService timedTaskExecutorPool = - new ScheduledThreadPoolExecutor(2, new CaughtThreadFactory()); - private final ConcurrentHashMap> activeTasks = new ConcurrentHashMap<>(); + private final ScheduledExecutorService timedTaskExecutorPool = + new ScheduledThreadPoolExecutor(2, new CaughtThreadFactory()); + private final ConcurrentHashMap> activeTasks = new ConcurrentHashMap<>(); - public void addTask(String identifier, Runnable runnable, long millisInterval) { - if (!activeTasks.containsKey(identifier)) { - var future = - timedTaskExecutorPool.scheduleAtFixedRate( - runnable, 0, millisInterval, TimeUnit.MILLISECONDS); - activeTasks.put(identifier, future); + public void addTask(String identifier, Runnable runnable, long millisInterval) { + if (!activeTasks.containsKey(identifier)) { + var future = + timedTaskExecutorPool.scheduleAtFixedRate( + runnable, 0, millisInterval, TimeUnit.MILLISECONDS); + activeTasks.put(identifier, future); + } } - } - public void addTask( - String identifier, Runnable runnable, long millisStartDelay, long millisInterval) { - if (!activeTasks.containsKey(identifier)) { - var future = - timedTaskExecutorPool.scheduleAtFixedRate( - runnable, millisStartDelay, millisInterval, TimeUnit.MILLISECONDS); - activeTasks.put(identifier, future); + public void addTask( + String identifier, Runnable runnable, long millisStartDelay, long millisInterval) { + if (!activeTasks.containsKey(identifier)) { + var future = + timedTaskExecutorPool.scheduleAtFixedRate( + runnable, millisStartDelay, millisInterval, TimeUnit.MILLISECONDS); + activeTasks.put(identifier, future); + } } - } - public void addOneShotTask(Runnable runnable, long millisStartDelay) { - timedTaskExecutorPool.schedule(runnable, millisStartDelay, TimeUnit.MILLISECONDS); - } + public void addOneShotTask(Runnable runnable, long millisStartDelay) { + timedTaskExecutorPool.schedule(runnable, millisStartDelay, TimeUnit.MILLISECONDS); + } - public void cancelTask(String identifier) { - var future = activeTasks.getOrDefault(identifier, null); - if (future != null) { - future.cancel(true); - activeTasks.remove(identifier); + public void cancelTask(String identifier) { + var future = activeTasks.getOrDefault(identifier, null); + if (future != null) { + future.cancel(true); + activeTasks.remove(identifier); + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/file/FileUtils.java b/photon-core/src/main/java/org/photonvision/common/util/file/FileUtils.java index 1da1b3b054..9d4cce6569 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/file/FileUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/file/FileUtils.java @@ -34,109 +34,109 @@ import org.photonvision.common.logging.Logger; public class FileUtils { - private FileUtils() {} + private FileUtils() {} - private static final Logger logger = new Logger(FileUtils.class, LogGroup.General); - private static final Set allReadWriteExecutePerms = - new HashSet<>(Arrays.asList(PosixFilePermission.values())); + private static final Logger logger = new Logger(FileUtils.class, LogGroup.General); + private static final Set allReadWriteExecutePerms = + new HashSet<>(Arrays.asList(PosixFilePermission.values())); - public static void deleteDirectory(Path path) { - try { - var files = Files.walk(path); + public static void deleteDirectory(Path path) { + try { + var files = Files.walk(path); - // delete directory including files and sub-folders - files - .sorted(Comparator.reverseOrder()) - .map(Path::toFile) - // .filter(File::isFile) // we want to delete directories and sub-dirs, too - .forEach((var file) -> deleteFile(file.toPath())); + // delete directory including files and sub-folders + files + .sorted(Comparator.reverseOrder()) + .map(Path::toFile) + // .filter(File::isFile) // we want to delete directories and sub-dirs, too + .forEach((var file) -> deleteFile(file.toPath())); - // close the stream - files.close(); - } catch (IOException e) { - logger.error("Exception deleting files in " + path + "!", e); + // close the stream + files.close(); + } catch (IOException e) { + logger.error("Exception deleting files in " + path + "!", e); + } } - } - /** - * Delete the file at the path. - * - * @param path file path to delete. - * @return whether the operation was successful. - */ - public static boolean deleteFile(Path path) { - try { - Files.delete(path); - return true; - } catch (FileNotFoundException | NoSuchFileException fe) { - logger.warn("Tried to delete file \"" + path + "\" but it did not exist"); - return false; - } catch (IOException e) { - logger.error("Exception deleting file \"" + path + "\"!", e); - return false; + /** + * Delete the file at the path. + * + * @param path file path to delete. + * @return whether the operation was successful. + */ + public static boolean deleteFile(Path path) { + try { + Files.delete(path); + return true; + } catch (FileNotFoundException | NoSuchFileException fe) { + logger.warn("Tried to delete file \"" + path + "\" but it did not exist"); + return false; + } catch (IOException e) { + logger.error("Exception deleting file \"" + path + "\"!", e); + return false; + } } - } - /** - * Copy a file from a source to a new destination. - * - * @param src the file path to copy. - * @param dst the file path to replace. - * @return whether the operation was successful. - */ - public static boolean copyFile(Path src, Path dst) { - try { - Files.copy(src, dst); - return true; - } catch (IOException e) { - logger.error("Exception copying file " + src + " to " + dst + "!", e); - return false; + /** + * Copy a file from a source to a new destination. + * + * @param src the file path to copy. + * @param dst the file path to replace. + * @return whether the operation was successful. + */ + public static boolean copyFile(Path src, Path dst) { + try { + Files.copy(src, dst); + return true; + } catch (IOException e) { + logger.error("Exception copying file " + src + " to " + dst + "!", e); + return false; + } } - } - /** - * Replace the destination file with a new source. - * - * @param src the file path to replace with. - * @param dst the file path to replace. - * @return whether the operation was successful. - */ - public static boolean replaceFile(Path src, Path dst) { - boolean fileDeleted = deleteFile(dst); - boolean fileCopied = copyFile(src, dst); - return fileDeleted && fileCopied; - } + /** + * Replace the destination file with a new source. + * + * @param src the file path to replace with. + * @param dst the file path to replace. + * @return whether the operation was successful. + */ + public static boolean replaceFile(Path src, Path dst) { + boolean fileDeleted = deleteFile(dst); + boolean fileCopied = copyFile(src, dst); + return fileDeleted && fileCopied; + } - public static void setFilePerms(Path path) throws IOException { - if (Platform.isLinux()) { - File thisFile = path.toFile(); - Set perms = - Files.readAttributes(path, PosixFileAttributes.class).permissions(); - if (!perms.equals(allReadWriteExecutePerms)) { - logger.info("Setting perms on" + path); - Files.setPosixFilePermissions(path, perms); - var theseFiles = thisFile.listFiles(); - if (thisFile.isDirectory() && theseFiles != null) { - for (File subfile : theseFiles) { - setFilePerms(subfile.toPath()); - } + public static void setFilePerms(Path path) throws IOException { + if (Platform.isLinux()) { + File thisFile = path.toFile(); + Set perms = + Files.readAttributes(path, PosixFileAttributes.class).permissions(); + if (!perms.equals(allReadWriteExecutePerms)) { + logger.info("Setting perms on" + path); + Files.setPosixFilePermissions(path, perms); + var theseFiles = thisFile.listFiles(); + if (thisFile.isDirectory() && theseFiles != null) { + for (File subfile : theseFiles) { + setFilePerms(subfile.toPath()); + } + } + } } - } } - } - public static void setAllPerms(Path path) { - if (Platform.isLinux()) { - String command = String.format("chmod 777 -R %s", path.toString()); - try { - Process p = Runtime.getRuntime().exec(command); - p.waitFor(); + public static void setAllPerms(Path path) { + if (Platform.isLinux()) { + String command = String.format("chmod 777 -R %s", path.toString()); + try { + Process p = Runtime.getRuntime().exec(command); + p.waitFor(); - } catch (Exception e) { - logger.error("Setting perms failed!", e); - } - } else { - logger.info("Cannot set directory permissions on Windows!"); + } catch (Exception e) { + logger.error("Setting perms failed!", e); + } + } else { + logger.info("Cannot set directory permissions on Windows!"); + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/file/JacksonUtils.java b/photon-core/src/main/java/org/photonvision/common/util/file/JacksonUtils.java index a891c14fa7..818a0e51ec 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/file/JacksonUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/file/JacksonUtils.java @@ -34,110 +34,110 @@ import java.util.HashMap; public class JacksonUtils { - public static class UIMap extends HashMap {} + public static class UIMap extends HashMap {} - public static void serialize(Path path, T object) throws IOException { - serialize(path, object, true); - } - - public static String serializeToString(T object) throws IOException { - PolymorphicTypeValidator ptv = - BasicPolymorphicTypeValidator.builder().allowIfBaseType(object.getClass()).build(); - ObjectMapper objectMapper = - JsonMapper.builder() - .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) - .build(); - return objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(object); - } + public static void serialize(Path path, T object) throws IOException { + serialize(path, object, true); + } - public static void serialize(Path path, T object, boolean forceSync) throws IOException { - PolymorphicTypeValidator ptv = - BasicPolymorphicTypeValidator.builder().allowIfBaseType(object.getClass()).build(); - ObjectMapper objectMapper = - JsonMapper.builder() - .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) - .build(); - String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(object); - saveJsonString(json, path, forceSync); - } + public static String serializeToString(T object) throws IOException { + PolymorphicTypeValidator ptv = + BasicPolymorphicTypeValidator.builder().allowIfBaseType(object.getClass()).build(); + ObjectMapper objectMapper = + JsonMapper.builder() + .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) + .build(); + return objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(object); + } - public static T deserialize(String s, Class ref) throws IOException { - PolymorphicTypeValidator ptv = - BasicPolymorphicTypeValidator.builder().allowIfBaseType(ref).build(); - ObjectMapper objectMapper = - JsonMapper.builder() - .configure(JsonReadFeature.ALLOW_JAVA_COMMENTS, true) - .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) - .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) - .build(); + public static void serialize(Path path, T object, boolean forceSync) throws IOException { + PolymorphicTypeValidator ptv = + BasicPolymorphicTypeValidator.builder().allowIfBaseType(object.getClass()).build(); + ObjectMapper objectMapper = + JsonMapper.builder() + .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) + .build(); + String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(object); + saveJsonString(json, path, forceSync); + } - return objectMapper.readValue(s, ref); - } + public static T deserialize(String s, Class ref) throws IOException { + PolymorphicTypeValidator ptv = + BasicPolymorphicTypeValidator.builder().allowIfBaseType(ref).build(); + ObjectMapper objectMapper = + JsonMapper.builder() + .configure(JsonReadFeature.ALLOW_JAVA_COMMENTS, true) + .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) + .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) + .build(); - public static T deserialize(Path path, Class ref) throws IOException { - PolymorphicTypeValidator ptv = - BasicPolymorphicTypeValidator.builder().allowIfBaseType(ref).build(); - ObjectMapper objectMapper = - JsonMapper.builder() - .configure(JsonReadFeature.ALLOW_JAVA_COMMENTS, true) - .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) - .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) - .build(); - File jsonFile = new File(path.toString()); - if (jsonFile.exists() && jsonFile.length() > 0) { - return objectMapper.readValue(jsonFile, ref); + return objectMapper.readValue(s, ref); } - return null; - } - public static T deserialize(Path path, Class ref, StdDeserializer deserializer) - throws IOException { - ObjectMapper objectMapper = new ObjectMapper(); - SimpleModule module = new SimpleModule(); - module.addDeserializer(ref, deserializer); - objectMapper.registerModule(module); - - File jsonFile = new File(path.toString()); - if (jsonFile.exists() && jsonFile.length() > 0) { - return objectMapper.readValue(jsonFile, ref); + public static T deserialize(Path path, Class ref) throws IOException { + PolymorphicTypeValidator ptv = + BasicPolymorphicTypeValidator.builder().allowIfBaseType(ref).build(); + ObjectMapper objectMapper = + JsonMapper.builder() + .configure(JsonReadFeature.ALLOW_JAVA_COMMENTS, true) + .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) + .activateDefaultTyping(ptv, ObjectMapper.DefaultTyping.JAVA_LANG_OBJECT) + .build(); + File jsonFile = new File(path.toString()); + if (jsonFile.exists() && jsonFile.length() > 0) { + return objectMapper.readValue(jsonFile, ref); + } + return null; } - return null; - } - public static void serialize(Path path, T object, Class ref, StdSerializer serializer) - throws IOException { - serialize(path, object, ref, serializer, true); - } + public static T deserialize(Path path, Class ref, StdDeserializer deserializer) + throws IOException { + ObjectMapper objectMapper = new ObjectMapper(); + SimpleModule module = new SimpleModule(); + module.addDeserializer(ref, deserializer); + objectMapper.registerModule(module); - public static void serialize( - Path path, T object, Class ref, StdSerializer serializer, boolean forceSync) - throws IOException { - ObjectMapper objectMapper = new ObjectMapper(); - SimpleModule module = new SimpleModule(); - module.addSerializer(ref, serializer); - objectMapper.registerModule(module); - String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(object); - saveJsonString(json, path, forceSync); - } + File jsonFile = new File(path.toString()); + if (jsonFile.exists() && jsonFile.length() > 0) { + return objectMapper.readValue(jsonFile, ref); + } + return null; + } - private static void saveJsonString(String json, Path path, boolean forceSync) throws IOException { - var file = path.toFile(); - if (file.getParentFile() != null && !file.getParentFile().exists()) { - file.getParentFile().mkdirs(); + public static void serialize(Path path, T object, Class ref, StdSerializer serializer) + throws IOException { + serialize(path, object, ref, serializer, true); } - if (!file.exists()) { - if (!file.canWrite()) { - file.setWritable(true); - } - file.createNewFile(); + + public static void serialize( + Path path, T object, Class ref, StdSerializer serializer, boolean forceSync) + throws IOException { + ObjectMapper objectMapper = new ObjectMapper(); + SimpleModule module = new SimpleModule(); + module.addSerializer(ref, serializer); + objectMapper.registerModule(module); + String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(object); + saveJsonString(json, path, forceSync); } - FileOutputStream fileOutputStream = new FileOutputStream(file); - fileOutputStream.write(json.getBytes()); - fileOutputStream.flush(); - if (forceSync) { - FileDescriptor fileDescriptor = fileOutputStream.getFD(); - fileDescriptor.sync(); + + private static void saveJsonString(String json, Path path, boolean forceSync) throws IOException { + var file = path.toFile(); + if (file.getParentFile() != null && !file.getParentFile().exists()) { + file.getParentFile().mkdirs(); + } + if (!file.exists()) { + if (!file.canWrite()) { + file.setWritable(true); + } + file.createNewFile(); + } + FileOutputStream fileOutputStream = new FileOutputStream(file); + fileOutputStream.write(json.getBytes()); + fileOutputStream.flush(); + if (forceSync) { + FileDescriptor fileDescriptor = fileOutputStream.getFD(); + fileDescriptor.sync(); + } + fileOutputStream.close(); } - fileOutputStream.close(); - } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/file/ProgramDirectoryUtilities.java b/photon-core/src/main/java/org/photonvision/common/util/file/ProgramDirectoryUtilities.java index 541b2a42cb..e9e244386d 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/file/ProgramDirectoryUtilities.java +++ b/photon-core/src/main/java/org/photonvision/common/util/file/ProgramDirectoryUtilities.java @@ -21,43 +21,43 @@ import java.net.URISyntaxException; public class ProgramDirectoryUtilities { - private static String getJarName() { - return new File( - ProgramDirectoryUtilities.class - .getProtectionDomain() - .getCodeSource() - .getLocation() - .getPath()) - .getName(); - } - - private static boolean runningFromJAR() { - String jarName = getJarName(); - return jarName.contains(".jar"); - } + private static String getJarName() { + return new File( + ProgramDirectoryUtilities.class + .getProtectionDomain() + .getCodeSource() + .getLocation() + .getPath()) + .getName(); + } - public static String getProgramDirectory() { - if (runningFromJAR()) { - return getCurrentJARDirectory(); - } else { - return System.getProperty("user.dir"); + private static boolean runningFromJAR() { + String jarName = getJarName(); + return jarName.contains(".jar"); } - } - private static String getCurrentJARDirectory() { - try { - return new File( - ProgramDirectoryUtilities.class - .getProtectionDomain() - .getCodeSource() - .getLocation() - .toURI() - .getPath()) - .getParent(); - } catch (URISyntaxException exception) { - exception.printStackTrace(); + public static String getProgramDirectory() { + if (runningFromJAR()) { + return getCurrentJARDirectory(); + } else { + return System.getProperty("user.dir"); + } } - return null; - } + private static String getCurrentJARDirectory() { + try { + return new File( + ProgramDirectoryUtilities.class + .getProtectionDomain() + .getCodeSource() + .getLocation() + .toURI() + .getPath()) + .getParent(); + } catch (URISyntaxException exception) { + exception.printStackTrace(); + } + + return null; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/java/TriConsumer.java b/photon-core/src/main/java/org/photonvision/common/util/java/TriConsumer.java index 3d599f00bd..6f1dc67fbc 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/java/TriConsumer.java +++ b/photon-core/src/main/java/org/photonvision/common/util/java/TriConsumer.java @@ -18,5 +18,5 @@ package org.photonvision.common.util.java; public interface TriConsumer { - void accept(T t, U u, V v); + void accept(T t, U u, V v); } diff --git a/photon-core/src/main/java/org/photonvision/common/util/math/IPUtils.java b/photon-core/src/main/java/org/photonvision/common/util/math/IPUtils.java index ecd2d93caf..f4127e782c 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/math/IPUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/math/IPUtils.java @@ -21,36 +21,36 @@ import java.util.List; public class IPUtils { - public static boolean isValidIPV4(final String ip) { - String PATTERN = - "^((0|1\\d?\\d?|2[0-4]?\\d?|25[0-5]?|[3-9]\\d?)\\.){3}(0|1\\d?\\d?|2[0-4]?\\d?|25[0-5]?|[3-9]\\d?)$"; - - return ip.matches(PATTERN); - } - - public static List getDigitBytes(int num) { - List digits = new ArrayList<>(); - collectDigitBytes(num, digits); - return digits; - } - - private static void collectDigitBytes(int num, List digits) { - if (num / 10 > 0) { - collectDigitBytes(num / 10, digits); + public static boolean isValidIPV4(final String ip) { + String PATTERN = + "^((0|1\\d?\\d?|2[0-4]?\\d?|25[0-5]?|[3-9]\\d?)\\.){3}(0|1\\d?\\d?|2[0-4]?\\d?|25[0-5]?|[3-9]\\d?)$"; + + return ip.matches(PATTERN); + } + + public static List getDigitBytes(int num) { + List digits = new ArrayList<>(); + collectDigitBytes(num, digits); + return digits; + } + + private static void collectDigitBytes(int num, List digits) { + if (num / 10 > 0) { + collectDigitBytes(num / 10, digits); + } + digits.add((byte) (num % 10)); + } + + public static List getDigits(int num) { + List digits = new ArrayList<>(); + collectDigits(num, digits); + return digits; } - digits.add((byte) (num % 10)); - } - - public static List getDigits(int num) { - List digits = new ArrayList<>(); - collectDigits(num, digits); - return digits; - } - - private static void collectDigits(int num, List digits) { - if (num / 10 > 0) { - collectDigits(num / 10, digits); + + private static void collectDigits(int num, List digits) { + if (num / 10 > 0) { + collectDigits(num / 10, digits); + } + digits.add(num % 10); } - digits.add(num % 10); - } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/math/MathUtils.java b/photon-core/src/main/java/org/photonvision/common/util/math/MathUtils.java index c85875fa89..2b6bb31ae3 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/math/MathUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/math/MathUtils.java @@ -30,172 +30,172 @@ import org.opencv.core.Mat; public class MathUtils { - MathUtils() {} - - public static double toSlope(Number angle) { - return Math.atan(Math.toRadians(angle.doubleValue() - 90)); - } - - public static int safeDivide(int quotient, int divisor) { - if (divisor == 0) { - return 0; - } else { - return quotient / divisor; - } - } - - public static double roundTo(double value, int to) { - double toMult = Math.pow(10, to); - return (double) Math.round(value * toMult) / toMult; - } - - public static double nanosToMillis(long nanos) { - return nanos / 1000000.0; - } - - public static double nanosToMillis(double nanos) { - return nanos / 1000000.0; - } - - public static long millisToNanos(long millis) { - return millis * 1000000; - } - - public static long microsToNanos(long micros) { - return micros * 1000; - } - - public static double map( - double value, double in_min, double in_max, double out_min, double out_max) { - return (value - in_min) * (out_max - out_min) / (in_max - in_min) + out_min; - } - - public static int map(int value, int inMin, int inMax, int outMin, int outMax) { - return (int) Math.floor(map((double) value, inMin, inMax, outMin, outMax) + 0.5); - } - - public static long wpiNanoTime() { - return microsToNanos(WPIUtilJNI.now()); - } - - /** - * Get the value of the nTh percentile of a list - * - * @param list The list to evaluate - * @param p The percentile, in [0,100] - * @return - */ - public static double getPercentile(List list, double p) { - if ((p > 100) || (p <= 0)) { - throw new IllegalArgumentException("invalid quantile value: " + p); - } - - if (list.isEmpty()) { - return Double.NaN; - } - if (list.size() == 1) { - return list.get(0); // always return single value for n = 1 - } - - // Sort array. We avoid a third copy here by just creating the - // list directly. - double[] sorted = new double[list.size()]; - for (int i = 0; i < list.size(); i++) { - sorted[i] = list.get(i); - } - Arrays.sort(sorted); - - return evaluateSorted(sorted, p); - } - - private static double evaluateSorted(final double[] sorted, final double p) { - double n = sorted.length; - double pos = p * (n + 1) / 100; - double fpos = Math.floor(pos); - int intPos = (int) fpos; - double dif = pos - fpos; - - if (pos < 1) { - return sorted[0]; - } - if (pos >= n) { - return sorted[sorted.length - 1]; - } - double lower = sorted[intPos - 1]; - double upper = sorted[intPos]; - return lower + dif * (upper - lower); - } - - /** - * Linearly interpolates between two values. - * - * @param startValue The start value. - * @param endValue The end value. - * @param t The fraction for interpolation. - * @return The interpolated value. - */ - @SuppressWarnings("ParameterName") - public static double lerp(double startValue, double endValue, double t) { - return startValue + (endValue - startValue) * t; - } - - /** - * OpenCV uses the EDN coordinate system, but WPILib uses NWU. Converts a camera-to-target - * transformation from EDN to NWU. - * - *

Note: The detected target's rvec and tvec perform a rotation-translation transformation - * which converts points in the target's coordinate system to the camera's. This means applying - * the transformation to the target point (0,0,0) for example would give the target's center - * relative to the camera. Conveniently, if we make a translation-rotation transformation out of - * these components instead, we get the transformation from the camera to the target. - * - * @param cameraToTarget3d A camera-to-target Transform3d in EDN. - * @return A camera-to-target Transform3d in NWU. - */ - public static Transform3d convertOpenCVtoPhotonTransform(Transform3d cameraToTarget3d) { - // TODO: Refactor into new pipe? - return CoordinateSystem.convert( - cameraToTarget3d, CoordinateSystem.EDN(), CoordinateSystem.NWU()); - } - - /* - * From the AprilTag repo: - * "The coordinate system has the origin at the camera center. The z-axis points from the camera - * center out the camera lens. The x-axis is to the right in the image taken by the camera, and - * y is down. The tag's coordinate frame is centered at the center of the tag, with x-axis to the - * right, y-axis down, and z-axis into the tag." - * - * This means our detected transformation will be in EDN. Our subsequent uses of the transformation, - * however, assume the tag's z-axis point away from the tag instead of into it. This means we - * have to correct the transformation's rotation. - */ - private static final Rotation3d APRILTAG_BASE_ROTATION = - new Rotation3d(VecBuilder.fill(0, 1, 0), Units.degreesToRadians(180)); - - /** - * AprilTag returns a camera-to-tag transform in EDN, but the tag's z-axis points into the tag - * instead of away from it and towards the camera. This means we have to correct the - * transformation's rotation. - * - * @param pose The Transform3d with translation and rotation directly from the {@link - * AprilTagPoseEstimate}. - */ - public static Transform3d convertApriltagtoOpenCV(Transform3d pose) { - var ocvRotation = APRILTAG_BASE_ROTATION.rotateBy(pose.getRotation()); - return new Transform3d(pose.getTranslation(), ocvRotation); - } - - public static Pose3d convertArucotoOpenCV(Transform3d pose) { - var ocvRotation = - APRILTAG_BASE_ROTATION.rotateBy( - new Rotation3d(VecBuilder.fill(0, 0, 1), Units.degreesToRadians(180)) - .rotateBy(pose.getRotation())); - return new Pose3d(pose.getTranslation(), ocvRotation); - } - - public static void rotationToOpencvRvec(Rotation3d rotation, Mat rvecOutput) { - var angle = rotation.getAngle(); - var axis = rotation.getAxis().times(angle); - rvecOutput.put(0, 0, axis.getData()); - } + MathUtils() {} + + public static double toSlope(Number angle) { + return Math.atan(Math.toRadians(angle.doubleValue() - 90)); + } + + public static int safeDivide(int quotient, int divisor) { + if (divisor == 0) { + return 0; + } else { + return quotient / divisor; + } + } + + public static double roundTo(double value, int to) { + double toMult = Math.pow(10, to); + return (double) Math.round(value * toMult) / toMult; + } + + public static double nanosToMillis(long nanos) { + return nanos / 1000000.0; + } + + public static double nanosToMillis(double nanos) { + return nanos / 1000000.0; + } + + public static long millisToNanos(long millis) { + return millis * 1000000; + } + + public static long microsToNanos(long micros) { + return micros * 1000; + } + + public static double map( + double value, double in_min, double in_max, double out_min, double out_max) { + return (value - in_min) * (out_max - out_min) / (in_max - in_min) + out_min; + } + + public static int map(int value, int inMin, int inMax, int outMin, int outMax) { + return (int) Math.floor(map((double) value, inMin, inMax, outMin, outMax) + 0.5); + } + + public static long wpiNanoTime() { + return microsToNanos(WPIUtilJNI.now()); + } + + /** + * Get the value of the nTh percentile of a list + * + * @param list The list to evaluate + * @param p The percentile, in [0,100] + * @return + */ + public static double getPercentile(List list, double p) { + if ((p > 100) || (p <= 0)) { + throw new IllegalArgumentException("invalid quantile value: " + p); + } + + if (list.isEmpty()) { + return Double.NaN; + } + if (list.size() == 1) { + return list.get(0); // always return single value for n = 1 + } + + // Sort array. We avoid a third copy here by just creating the + // list directly. + double[] sorted = new double[list.size()]; + for (int i = 0; i < list.size(); i++) { + sorted[i] = list.get(i); + } + Arrays.sort(sorted); + + return evaluateSorted(sorted, p); + } + + private static double evaluateSorted(final double[] sorted, final double p) { + double n = sorted.length; + double pos = p * (n + 1) / 100; + double fpos = Math.floor(pos); + int intPos = (int) fpos; + double dif = pos - fpos; + + if (pos < 1) { + return sorted[0]; + } + if (pos >= n) { + return sorted[sorted.length - 1]; + } + double lower = sorted[intPos - 1]; + double upper = sorted[intPos]; + return lower + dif * (upper - lower); + } + + /** + * Linearly interpolates between two values. + * + * @param startValue The start value. + * @param endValue The end value. + * @param t The fraction for interpolation. + * @return The interpolated value. + */ + @SuppressWarnings("ParameterName") + public static double lerp(double startValue, double endValue, double t) { + return startValue + (endValue - startValue) * t; + } + + /** + * OpenCV uses the EDN coordinate system, but WPILib uses NWU. Converts a camera-to-target + * transformation from EDN to NWU. + * + *

Note: The detected target's rvec and tvec perform a rotation-translation transformation + * which converts points in the target's coordinate system to the camera's. This means applying + * the transformation to the target point (0,0,0) for example would give the target's center + * relative to the camera. Conveniently, if we make a translation-rotation transformation out of + * these components instead, we get the transformation from the camera to the target. + * + * @param cameraToTarget3d A camera-to-target Transform3d in EDN. + * @return A camera-to-target Transform3d in NWU. + */ + public static Transform3d convertOpenCVtoPhotonTransform(Transform3d cameraToTarget3d) { + // TODO: Refactor into new pipe? + return CoordinateSystem.convert( + cameraToTarget3d, CoordinateSystem.EDN(), CoordinateSystem.NWU()); + } + + /* + * From the AprilTag repo: + * "The coordinate system has the origin at the camera center. The z-axis points from the camera + * center out the camera lens. The x-axis is to the right in the image taken by the camera, and + * y is down. The tag's coordinate frame is centered at the center of the tag, with x-axis to the + * right, y-axis down, and z-axis into the tag." + * + * This means our detected transformation will be in EDN. Our subsequent uses of the transformation, + * however, assume the tag's z-axis point away from the tag instead of into it. This means we + * have to correct the transformation's rotation. + */ + private static final Rotation3d APRILTAG_BASE_ROTATION = + new Rotation3d(VecBuilder.fill(0, 1, 0), Units.degreesToRadians(180)); + + /** + * AprilTag returns a camera-to-tag transform in EDN, but the tag's z-axis points into the tag + * instead of away from it and towards the camera. This means we have to correct the + * transformation's rotation. + * + * @param pose The Transform3d with translation and rotation directly from the {@link + * AprilTagPoseEstimate}. + */ + public static Transform3d convertApriltagtoOpenCV(Transform3d pose) { + var ocvRotation = APRILTAG_BASE_ROTATION.rotateBy(pose.getRotation()); + return new Transform3d(pose.getTranslation(), ocvRotation); + } + + public static Pose3d convertArucotoOpenCV(Transform3d pose) { + var ocvRotation = + APRILTAG_BASE_ROTATION.rotateBy( + new Rotation3d(VecBuilder.fill(0, 0, 1), Units.degreesToRadians(180)) + .rotateBy(pose.getRotation())); + return new Pose3d(pose.getTranslation(), ocvRotation); + } + + public static void rotationToOpencvRvec(Rotation3d rotation, Mat rvecOutput) { + var angle = rotation.getAngle(); + var axis = rotation.getAxis().times(angle); + rvecOutput.put(0, 0, axis.getData()); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/numbers/DoubleCouple.java b/photon-core/src/main/java/org/photonvision/common/util/numbers/DoubleCouple.java index 6294e561ff..d8503503e0 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/numbers/DoubleCouple.java +++ b/photon-core/src/main/java/org/photonvision/common/util/numbers/DoubleCouple.java @@ -20,28 +20,28 @@ import org.opencv.core.Point; public class DoubleCouple extends NumberCouple { - public DoubleCouple() { - super(0.0, 0.0); - } - - public DoubleCouple(Number first, Number second) { - super(first.doubleValue(), second.doubleValue()); - } - - public DoubleCouple(Double first, Double second) { - super(first, second); - } - - public DoubleCouple(Point point) { - super(point.x, point.y); - } - - public Point toPoint() { - return new Point(first, second); - } - - public void fromPoint(Point point) { - first = point.x; - second = point.y; - } + public DoubleCouple() { + super(0.0, 0.0); + } + + public DoubleCouple(Number first, Number second) { + super(first.doubleValue(), second.doubleValue()); + } + + public DoubleCouple(Double first, Double second) { + super(first, second); + } + + public DoubleCouple(Point point) { + super(point.x, point.y); + } + + public Point toPoint() { + return new Point(first, second); + } + + public void fromPoint(Point point) { + first = point.x; + second = point.y; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/numbers/IntegerCouple.java b/photon-core/src/main/java/org/photonvision/common/util/numbers/IntegerCouple.java index 196776b068..0cd834cc23 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/numbers/IntegerCouple.java +++ b/photon-core/src/main/java/org/photonvision/common/util/numbers/IntegerCouple.java @@ -18,11 +18,11 @@ package org.photonvision.common.util.numbers; public class IntegerCouple extends NumberCouple { - public IntegerCouple() { - super(0, 0); - } + public IntegerCouple() { + super(0, 0); + } - public IntegerCouple(Integer first, Integer second) { - super(first, second); - } + public IntegerCouple(Integer first, Integer second) { + super(first, second); + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberCouple.java b/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberCouple.java index 26c40fe984..9a6047c797 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberCouple.java +++ b/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberCouple.java @@ -20,51 +20,51 @@ import com.fasterxml.jackson.annotation.JsonIgnore; public abstract class NumberCouple { - protected T first; - protected T second; + protected T first; + protected T second; - public NumberCouple(T first, T second) { - this.first = first; - this.second = second; - } - - public void setFirst(T first) { - this.first = first; - } - - public T getFirst() { - return first; - } + public NumberCouple(T first, T second) { + this.first = first; + this.second = second; + } - public void setSecond(T second) { - this.second = second; - } + public void setFirst(T first) { + this.first = first; + } - public T getSecond() { - return second; - } + public T getFirst() { + return first; + } - public void set(T first, T second) { - this.first = first; - this.second = second; - } + public void setSecond(T second) { + this.second = second; + } - @Override - public boolean equals(Object obj) { - if (!(obj instanceof NumberCouple)) { - return false; + public T getSecond() { + return second; } - var couple = (NumberCouple) obj; - if (!couple.first.equals(first)) { - return false; + public void set(T first, T second) { + this.first = first; + this.second = second; } - return couple.second.equals(second); - } + @Override + public boolean equals(Object obj) { + if (!(obj instanceof NumberCouple)) { + return false; + } + + var couple = (NumberCouple) obj; + if (!couple.first.equals(first)) { + return false; + } - @JsonIgnore - public boolean isEmpty() { - return first.intValue() == 0 && second.intValue() == 0; - } + return couple.second.equals(second); + } + + @JsonIgnore + public boolean isEmpty() { + return first.intValue() == 0 && second.intValue() == 0; + } } diff --git a/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberListUtils.java b/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberListUtils.java index 1823c33e21..bd1272745c 100644 --- a/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberListUtils.java +++ b/photon-core/src/main/java/org/photonvision/common/util/numbers/NumberListUtils.java @@ -25,90 +25,90 @@ @SuppressWarnings("unused") public class NumberListUtils { - /** - * @param collection an ArrayList of Comparable objects - * @return the median of collection - */ - public static double median(List collection, Comparator comp) { - double result; - int n = collection.size() / 2; - - if (collection.size() % 2 == 0) // even number of items; find the middle two and average them - result = - (nthSmallest(collection, n - 1, comp).doubleValue() - + nthSmallest(collection, n, comp).doubleValue()) - / 2.0; - else // odd number of items; return the one in the middle - result = nthSmallest(collection, n, comp).doubleValue(); - - return result; - } - - public static String toString(List collection) { - return toString(collection, ""); - } - - public static String toString(List collection, String suffix) { - StringJoiner joiner = new StringJoiner(", "); - for (T x : collection) { - String s = x.doubleValue() + suffix; - joiner.add(s); + /** + * @param collection an ArrayList of Comparable objects + * @return the median of collection + */ + public static double median(List collection, Comparator comp) { + double result; + int n = collection.size() / 2; + + if (collection.size() % 2 == 0) // even number of items; find the middle two and average them + result = + (nthSmallest(collection, n - 1, comp).doubleValue() + + nthSmallest(collection, n, comp).doubleValue()) + / 2.0; + else // odd number of items; return the one in the middle + result = nthSmallest(collection, n, comp).doubleValue(); + + return result; } - return joiner.toString(); - } - - /** - * @param collection an ArrayList of Numbers - * @return the mean of collection - */ - public static double mean(final List collection) { - BigDecimal sum = BigDecimal.ZERO; - for (final Number number : collection) { - sum = sum.add(BigDecimal.valueOf(number.doubleValue())); + + public static String toString(List collection) { + return toString(collection, ""); + } + + public static String toString(List collection, String suffix) { + StringJoiner joiner = new StringJoiner(", "); + for (T x : collection) { + String s = x.doubleValue() + suffix; + joiner.add(s); + } + return joiner.toString(); + } + + /** + * @param collection an ArrayList of Numbers + * @return the mean of collection + */ + public static double mean(final List collection) { + BigDecimal sum = BigDecimal.ZERO; + for (final Number number : collection) { + sum = sum.add(BigDecimal.valueOf(number.doubleValue())); + } + return (sum.doubleValue() / collection.size()); + } + + /** + * @param collection a collection of Comparable objects + * @param n the position of the desired object, using the ordering defined on the collection + * elements + * @return the nth smallest object + */ + public static T nthSmallest(List collection, int n, Comparator comp) { + T result, pivot; + ArrayList underPivot = new ArrayList<>(), + overPivot = new ArrayList<>(), + equalPivot = new ArrayList<>(); + + // choosing a pivot is a whole topic in itself. + // this implementation uses the simple strategy of grabbing something from the middle of the + // ArrayList. + + pivot = collection.get(n / 2); + + // split collection into 3 lists based on comparison with the pivot + + for (T obj : collection) { + int order = comp.compare(obj, pivot); + + if (order < 0) // obj < pivot + underPivot.add(obj); + else if (order > 0) // obj > pivot + overPivot.add(obj); + else // obj = pivot + equalPivot.add(obj); + } // for each obj in collection + + // recurse on the appropriate collection + + if (n < underPivot.size()) result = nthSmallest(underPivot, n, comp); + else if (n < underPivot.size() + equalPivot.size()) // equal to pivot; just return it + result = pivot; + else // everything in underPivot and equalPivot is too small. Adjust n accordingly in the + // recursion. + result = nthSmallest(overPivot, n - underPivot.size() - equalPivot.size(), comp); + + return result; } - return (sum.doubleValue() / collection.size()); - } - - /** - * @param collection a collection of Comparable objects - * @param n the position of the desired object, using the ordering defined on the collection - * elements - * @return the nth smallest object - */ - public static T nthSmallest(List collection, int n, Comparator comp) { - T result, pivot; - ArrayList underPivot = new ArrayList<>(), - overPivot = new ArrayList<>(), - equalPivot = new ArrayList<>(); - - // choosing a pivot is a whole topic in itself. - // this implementation uses the simple strategy of grabbing something from the middle of the - // ArrayList. - - pivot = collection.get(n / 2); - - // split collection into 3 lists based on comparison with the pivot - - for (T obj : collection) { - int order = comp.compare(obj, pivot); - - if (order < 0) // obj < pivot - underPivot.add(obj); - else if (order > 0) // obj > pivot - overPivot.add(obj); - else // obj = pivot - equalPivot.add(obj); - } // for each obj in collection - - // recurse on the appropriate collection - - if (n < underPivot.size()) result = nthSmallest(underPivot, n, comp); - else if (n < underPivot.size() + equalPivot.size()) // equal to pivot; just return it - result = pivot; - else // everything in underPivot and equalPivot is too small. Adjust n accordingly in the - // recursion. - result = nthSmallest(overPivot, n - underPivot.size() - equalPivot.size(), comp); - - return result; - } } diff --git a/photon-core/src/main/java/org/photonvision/raspi/LibCameraJNI.java b/photon-core/src/main/java/org/photonvision/raspi/LibCameraJNI.java index f4588d81c7..a75ae2e579 100644 --- a/photon-core/src/main/java/org/photonvision/raspi/LibCameraJNI.java +++ b/photon-core/src/main/java/org/photonvision/raspi/LibCameraJNI.java @@ -27,165 +27,165 @@ import org.photonvision.common.logging.Logger; public class LibCameraJNI { - private static boolean libraryLoaded = false; - private static final Logger logger = new Logger(LibCameraJNI.class, LogGroup.Camera); - - public static final Object CAMERA_LOCK = new Object(); - - public static synchronized void forceLoad() throws IOException { - if (libraryLoaded) return; - - try { - File libDirectory = Path.of("lib/").toFile(); - if (!libDirectory.exists()) { - Files.createDirectory(libDirectory.toPath()).toFile(); - } - - // We always extract the shared object (we could hash each so, but that's a lot of work) - URL resourceURL = LibCameraJNI.class.getResource("/nativelibraries/libphotonlibcamera.so"); - File libFile = Path.of("lib/libphotonlibcamera.so").toFile(); - try (InputStream in = resourceURL.openStream()) { - if (libFile.exists()) Files.delete(libFile.toPath()); - Files.copy(in, libFile.toPath()); - } catch (Exception e) { - logger.error("Could not extract the native library!"); - } - System.load(libFile.getAbsolutePath()); - - libraryLoaded = true; - logger.info("Successfully loaded libpicam shared object"); - } catch (UnsatisfiedLinkError e) { - logger.error("Couldn't load libpicam shared object"); - e.printStackTrace(); + private static boolean libraryLoaded = false; + private static final Logger logger = new Logger(LibCameraJNI.class, LogGroup.Camera); + + public static final Object CAMERA_LOCK = new Object(); + + public static synchronized void forceLoad() throws IOException { + if (libraryLoaded) return; + + try { + File libDirectory = Path.of("lib/").toFile(); + if (!libDirectory.exists()) { + Files.createDirectory(libDirectory.toPath()).toFile(); + } + + // We always extract the shared object (we could hash each so, but that's a lot of work) + URL resourceURL = LibCameraJNI.class.getResource("/nativelibraries/libphotonlibcamera.so"); + File libFile = Path.of("lib/libphotonlibcamera.so").toFile(); + try (InputStream in = resourceURL.openStream()) { + if (libFile.exists()) Files.delete(libFile.toPath()); + Files.copy(in, libFile.toPath()); + } catch (Exception e) { + logger.error("Could not extract the native library!"); + } + System.load(libFile.getAbsolutePath()); + + libraryLoaded = true; + logger.info("Successfully loaded libpicam shared object"); + } catch (UnsatisfiedLinkError e) { + logger.error("Couldn't load libpicam shared object"); + e.printStackTrace(); + } } - } - - public enum SensorModel { - Disconnected, - OV5647, // Picam v1 - IMX219, // Picam v2 - IMX477, // Picam HQ - OV9281, - OV7251, - Unknown; - - public String getFriendlyName() { - switch (this) { - case Disconnected: - return "Disconnected Camera"; - case OV5647: - return "Camera Module v1"; - case IMX219: - return "Camera Module v2"; - case IMX477: - return "HQ Camera"; - case OV9281: - return "OV9281"; - case OV7251: - return "OV7251"; - case Unknown: - default: - return "Unknown Camera"; - } + + public enum SensorModel { + Disconnected, + OV5647, // Picam v1 + IMX219, // Picam v2 + IMX477, // Picam HQ + OV9281, + OV7251, + Unknown; + + public String getFriendlyName() { + switch (this) { + case Disconnected: + return "Disconnected Camera"; + case OV5647: + return "Camera Module v1"; + case IMX219: + return "Camera Module v2"; + case IMX477: + return "HQ Camera"; + case OV9281: + return "OV9281"; + case OV7251: + return "OV7251"; + case Unknown: + default: + return "Unknown Camera"; + } + } } - } - public static SensorModel getSensorModel() { - int model = getSensorModelRaw(); - return SensorModel.values()[model]; - } + public static SensorModel getSensorModel() { + int model = getSensorModelRaw(); + return SensorModel.values()[model]; + } - public static boolean isSupported() { - return libraryLoaded - // && getSensorModel() != PicamJNI.SensorModel.Disconnected - // && Platform.isRaspberryPi() - && isLibraryWorking(); - } + public static boolean isSupported() { + return libraryLoaded + // && getSensorModel() != PicamJNI.SensorModel.Disconnected + // && Platform.isRaspberryPi() + && isLibraryWorking(); + } - private static native boolean isLibraryWorking(); + private static native boolean isLibraryWorking(); - public static native int getSensorModelRaw(); + public static native int getSensorModelRaw(); - // ======================================================== // + // ======================================================== // - /** - * Creates a new runner with a given width/height/fps - * - * @param width Camera video mode width in pixels - * @param height Camera video mode height in pixels - * @param fps Camera video mode FPS - * @return success of creating a camera object - */ - public static native boolean createCamera(int width, int height, int rotation); + /** + * Creates a new runner with a given width/height/fps + * + * @param width Camera video mode width in pixels + * @param height Camera video mode height in pixels + * @param fps Camera video mode FPS + * @return success of creating a camera object + */ + public static native boolean createCamera(int width, int height, int rotation); - /** - * Starts the camera thresholder and display threads running. Make sure that this function is - * called synchronously with stopCamera and returnFrame! - */ - public static native boolean startCamera(); + /** + * Starts the camera thresholder and display threads running. Make sure that this function is + * called synchronously with stopCamera and returnFrame! + */ + public static native boolean startCamera(); - /** Stops the camera runner. Make sure to call prior to destroying the camera! */ - public static native boolean stopCamera(); + /** Stops the camera runner. Make sure to call prior to destroying the camera! */ + public static native boolean stopCamera(); - // Destroy all native resources associated with a camera. Ensure stop is called prior! - public static native boolean destroyCamera(); + // Destroy all native resources associated with a camera. Ensure stop is called prior! + public static native boolean destroyCamera(); - // ======================================================== // + // ======================================================== // - // Set thresholds on [0..1] - public static native boolean setThresholds( - double hl, double sl, double vl, double hu, double su, double vu, boolean hueInverted); + // Set thresholds on [0..1] + public static native boolean setThresholds( + double hl, double sl, double vl, double hu, double su, double vu, boolean hueInverted); - public static native boolean setAutoExposure(boolean doAutoExposure); + public static native boolean setAutoExposure(boolean doAutoExposure); - // Exposure time, in microseconds - public static native boolean setExposure(int exposureUs); + // Exposure time, in microseconds + public static native boolean setExposure(int exposureUs); - // Set brightness on [-1, 1] - public static native boolean setBrightness(double brightness); + // Set brightness on [-1, 1] + public static native boolean setBrightness(double brightness); - // Unknown ranges for red and blue AWB gain - public static native boolean setAwbGain(double red, double blue); + // Unknown ranges for red and blue AWB gain + public static native boolean setAwbGain(double red, double blue); - /** - * Get the time when the first pixel exposure was started, in the same timebase as libcamera gives - * the frame capture time. Units are nanoseconds. - */ - public static native long getFrameCaptureTime(); + /** + * Get the time when the first pixel exposure was started, in the same timebase as libcamera gives + * the frame capture time. Units are nanoseconds. + */ + public static native long getFrameCaptureTime(); - /** - * Get the current time, in the same timebase as libcamera gives the frame capture time. Units are - * nanoseconds. - */ - public static native long getLibcameraTimestamp(); + /** + * Get the current time, in the same timebase as libcamera gives the frame capture time. Units are + * nanoseconds. + */ + public static native long getLibcameraTimestamp(); - public static native long setFramesToCopy(boolean copyIn, boolean copyOut); + public static native long setFramesToCopy(boolean copyIn, boolean copyOut); - // Analog gain multiplier to apply to all color channels, on [1, Big Number] - public static native boolean setAnalogGain(double analog); + // Analog gain multiplier to apply to all color channels, on [1, Big Number] + public static native boolean setAnalogGain(double analog); - /** Block until a new frame is available from native code. */ - public static native boolean awaitNewFrame(); + /** Block until a new frame is available from native code. */ + public static native boolean awaitNewFrame(); - /** - * Get a pointer to the most recent color mat generated. Call this immediately after - * awaitNewFrame, and call only once per new frame! - */ - public static native long takeColorFrame(); + /** + * Get a pointer to the most recent color mat generated. Call this immediately after + * awaitNewFrame, and call only once per new frame! + */ + public static native long takeColorFrame(); - /** - * Get a pointer to the most recent processed mat generated. Call this immediately after - * awaitNewFrame, and call only once per new frame! - */ - public static native long takeProcessedFrame(); + /** + * Get a pointer to the most recent processed mat generated. Call this immediately after + * awaitNewFrame, and call only once per new frame! + */ + public static native long takeProcessedFrame(); - /** - * Set the GPU processing type we should do. Enum of [none, HSV, greyscale, adaptive threshold]. - */ - public static native boolean setGpuProcessType(int type); + /** + * Set the GPU processing type we should do. Enum of [none, HSV, greyscale, adaptive threshold]. + */ + public static native boolean setGpuProcessType(int type); - public static native int getGpuProcessType(); + public static native int getGpuProcessType(); - // Release a frame pointer back to the libcamera driver code to be filled again */ - // public static native long returnFrame(long frame); + // Release a frame pointer back to the libcamera driver code to be filled again */ + // public static native long returnFrame(long frame); } diff --git a/photon-core/src/main/java/org/photonvision/vision/apriltag/AprilTagFamily.java b/photon-core/src/main/java/org/photonvision/vision/apriltag/AprilTagFamily.java index 40042080ef..8fbda02683 100644 --- a/photon-core/src/main/java/org/photonvision/vision/apriltag/AprilTagFamily.java +++ b/photon-core/src/main/java/org/photonvision/vision/apriltag/AprilTagFamily.java @@ -18,17 +18,17 @@ package org.photonvision.vision.apriltag; public enum AprilTagFamily { - kTag36h11, - kTag25h9, - kTag16h5, - kTagCircle21h7, - kTagCircle49h12, - kTagStandard41h12, - kTagStandard52h13, - kTagCustom48h11; + kTag36h11, + kTag25h9, + kTag16h5, + kTagCircle21h7, + kTagCircle49h12, + kTagStandard41h12, + kTagStandard52h13, + kTagCustom48h11; - public String getNativeName() { - // We want to strip the leading kT and replace with "t" - return this.name().replaceFirst("kT", "t"); - } + public String getNativeName() { + // We want to strip the leading kT and replace with "t" + return this.name().replaceFirst("kT", "t"); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectionResult.java b/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectionResult.java index bf8916d30f..b59ae9e9e4 100644 --- a/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectionResult.java +++ b/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectionResult.java @@ -22,62 +22,62 @@ import org.photonvision.common.logging.Logger; public class ArucoDetectionResult { - private static final Logger logger = - new Logger(ArucoDetectionResult.class, LogGroup.VisionModule); - double[] xCorners; - double[] yCorners; + private static final Logger logger = + new Logger(ArucoDetectionResult.class, LogGroup.VisionModule); + double[] xCorners; + double[] yCorners; - int id; + int id; - double[] tvec, rvec; + double[] tvec, rvec; - public ArucoDetectionResult( - double[] xCorners, double[] yCorners, int id, double[] tvec, double[] rvec) { - this.xCorners = xCorners; - this.yCorners = yCorners; - this.id = id; - this.tvec = tvec; - this.rvec = rvec; - // logger.debug("Creating a new detection result: " + this.toString()); - } + public ArucoDetectionResult( + double[] xCorners, double[] yCorners, int id, double[] tvec, double[] rvec) { + this.xCorners = xCorners; + this.yCorners = yCorners; + this.id = id; + this.tvec = tvec; + this.rvec = rvec; + // logger.debug("Creating a new detection result: " + this.toString()); + } - public double[] getTvec() { - return tvec; - } + public double[] getTvec() { + return tvec; + } - public double[] getRvec() { - return rvec; - } + public double[] getRvec() { + return rvec; + } - public double[] getxCorners() { - return xCorners; - } + public double[] getxCorners() { + return xCorners; + } - public double[] getyCorners() { - return yCorners; - } + public double[] getyCorners() { + return yCorners; + } - public int getId() { - return id; - } + public int getId() { + return id; + } - public double getCenterX() { - return (xCorners[0] + xCorners[1] + xCorners[2] + xCorners[3]) * .25; - } + public double getCenterX() { + return (xCorners[0] + xCorners[1] + xCorners[2] + xCorners[3]) * .25; + } - public double getCenterY() { - return (yCorners[0] + yCorners[1] + yCorners[2] + yCorners[3]) * .25; - } + public double getCenterY() { + return (yCorners[0] + yCorners[1] + yCorners[2] + yCorners[3]) * .25; + } - @Override - public String toString() { - return "ArucoDetectionResult{" - + "xCorners=" - + Arrays.toString(xCorners) - + ", yCorners=" - + Arrays.toString(yCorners) - + ", id=" - + id - + '}'; - } + @Override + public String toString() { + return "ArucoDetectionResult{" + + "xCorners=" + + Arrays.toString(xCorners) + + ", yCorners=" + + Arrays.toString(yCorners) + + ", id=" + + id + + '}'; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectorParams.java b/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectorParams.java index 2a69eecd22..24ea8dafab 100644 --- a/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectorParams.java +++ b/photon-core/src/main/java/org/photonvision/vision/aruco/ArucoDetectorParams.java @@ -24,54 +24,54 @@ import org.photonvision.common.logging.Logger; public class ArucoDetectorParams { - private static final Logger logger = new Logger(PhotonArucoDetector.class, LogGroup.VisionModule); + private static final Logger logger = new Logger(PhotonArucoDetector.class, LogGroup.VisionModule); - private float m_decimate = -1; - private int m_iterations = -1; - private double m_accuracy = -1; + private float m_decimate = -1; + private int m_iterations = -1; + private double m_accuracy = -1; - DetectorParameters parameters = new DetectorParameters(); - ArucoDetector detector; + DetectorParameters parameters = new DetectorParameters(); + ArucoDetector detector; - public ArucoDetectorParams() { - setDecimation(1); - setCornerAccuracy(25); - setCornerRefinementMaxIterations(100); + public ArucoDetectorParams() { + setDecimation(1); + setCornerAccuracy(25); + setCornerRefinementMaxIterations(100); - detector = - new ArucoDetector( - Objdetect.getPredefinedDictionary(Objdetect.DICT_APRILTAG_16h5), parameters); - } + detector = + new ArucoDetector( + Objdetect.getPredefinedDictionary(Objdetect.DICT_APRILTAG_16h5), parameters); + } - public void setDecimation(float decimate) { - if (decimate == m_decimate) return; + public void setDecimation(float decimate) { + if (decimate == m_decimate) return; - logger.info("Setting decimation from " + m_decimate + " to " + decimate); + logger.info("Setting decimation from " + m_decimate + " to " + decimate); - // We only need to mutate the parameters -- the detector keeps a pointer to the parameters - // object internally, so it should automatically update - parameters.set_aprilTagQuadDecimate(decimate); - m_decimate = decimate; - } + // We only need to mutate the parameters -- the detector keeps a pointer to the parameters + // object internally, so it should automatically update + parameters.set_aprilTagQuadDecimate(decimate); + m_decimate = decimate; + } - public void setCornerRefinementMaxIterations(int iters) { - if (iters == m_iterations || iters <= 0) return; + public void setCornerRefinementMaxIterations(int iters) { + if (iters == m_iterations || iters <= 0) return; - parameters.set_cornerRefinementMethod(Objdetect.CORNER_REFINE_SUBPIX); - parameters.set_cornerRefinementMaxIterations(iters); // 200 + parameters.set_cornerRefinementMethod(Objdetect.CORNER_REFINE_SUBPIX); + parameters.set_cornerRefinementMaxIterations(iters); // 200 - m_iterations = iters; - } + m_iterations = iters; + } - public void setCornerAccuracy(double accuracy) { - if (accuracy == m_accuracy || accuracy <= 0) return; + public void setCornerAccuracy(double accuracy) { + if (accuracy == m_accuracy || accuracy <= 0) return; - parameters.set_cornerRefinementMinAccuracy( - accuracy / 1000.0); // divides by 1000 because the UI multiplies it by 1000 - m_accuracy = accuracy; - } + parameters.set_cornerRefinementMinAccuracy( + accuracy / 1000.0); // divides by 1000 because the UI multiplies it by 1000 + m_accuracy = accuracy; + } - public ArucoDetector getDetector() { - return detector; - } + public ArucoDetector getDetector() { + return detector; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/aruco/PhotonArucoDetector.java b/photon-core/src/main/java/org/photonvision/vision/aruco/PhotonArucoDetector.java index f73694b514..6bb10f7baa 100644 --- a/photon-core/src/main/java/org/photonvision/vision/aruco/PhotonArucoDetector.java +++ b/photon-core/src/main/java/org/photonvision/vision/aruco/PhotonArucoDetector.java @@ -31,102 +31,102 @@ import org.photonvision.vision.calibration.CameraCalibrationCoefficients; public class PhotonArucoDetector { - private static final Logger logger = new Logger(PhotonArucoDetector.class, LogGroup.VisionModule); + private static final Logger logger = new Logger(PhotonArucoDetector.class, LogGroup.VisionModule); - private static final Rotation3d ARUCO_BASE_ROTATION = - new Rotation3d(VecBuilder.fill(0, 0, 1), Units.degreesToRadians(180)); + private static final Rotation3d ARUCO_BASE_ROTATION = + new Rotation3d(VecBuilder.fill(0, 0, 1), Units.degreesToRadians(180)); - Mat ids; + Mat ids; - Mat tvecs; - Mat rvecs; - ArrayList corners; + Mat tvecs; + Mat rvecs; + ArrayList corners; - Mat cornerMat; - Translation3d translation; - Rotation3d rotation; - double timeStartDetect; - double timeEndDetect; - Pose3d tagPose; - double timeStartProcess; - double timeEndProcess; - double[] xCorners = new double[4]; - double[] yCorners = new double[4]; + Mat cornerMat; + Translation3d translation; + Rotation3d rotation; + double timeStartDetect; + double timeEndDetect; + Pose3d tagPose; + double timeStartProcess; + double timeEndProcess; + double[] xCorners = new double[4]; + double[] yCorners = new double[4]; - public PhotonArucoDetector() { - logger.debug("New Aruco Detector"); - ids = new Mat(); - tvecs = new Mat(); - rvecs = new Mat(); - corners = new ArrayList<>(); - tagPose = new Pose3d(); - translation = new Translation3d(); - rotation = new Rotation3d(); - } - - public ArucoDetectionResult[] detect( - Mat grayscaleImg, - float tagSize, - CameraCalibrationCoefficients coeffs, - ArucoDetector detector) { - detector.detectMarkers(grayscaleImg, corners, ids); - if (coeffs != null) { - Aruco.estimatePoseSingleMarkers( - corners, - tagSize, - coeffs.getCameraIntrinsicsMat(), - coeffs.getDistCoeffsMat(), - rvecs, - tvecs); + public PhotonArucoDetector() { + logger.debug("New Aruco Detector"); + ids = new Mat(); + tvecs = new Mat(); + rvecs = new Mat(); + corners = new ArrayList<>(); + tagPose = new Pose3d(); + translation = new Translation3d(); + rotation = new Rotation3d(); } - ArucoDetectionResult[] toReturn = new ArucoDetectionResult[corners.size()]; - timeStartProcess = System.currentTimeMillis(); - for (int i = 0; i < corners.size(); i++) { - cornerMat = corners.get(i); - // logger.debug(cornerMat.dump()); - xCorners = - new double[] { - cornerMat.get(0, 0)[0], - cornerMat.get(0, 1)[0], - cornerMat.get(0, 2)[0], - cornerMat.get(0, 3)[0] - }; - yCorners = - new double[] { - cornerMat.get(0, 0)[1], - cornerMat.get(0, 1)[1], - cornerMat.get(0, 2)[1], - cornerMat.get(0, 3)[1] - }; - cornerMat.release(); + public ArucoDetectionResult[] detect( + Mat grayscaleImg, + float tagSize, + CameraCalibrationCoefficients coeffs, + ArucoDetector detector) { + detector.detectMarkers(grayscaleImg, corners, ids); + if (coeffs != null) { + Aruco.estimatePoseSingleMarkers( + corners, + tagSize, + coeffs.getCameraIntrinsicsMat(), + coeffs.getDistCoeffsMat(), + rvecs, + tvecs); + } - double[] tvec; - double[] rvec; - if (coeffs != null) { - // Need to apply a 180 rotation about Z - var origRvec = rvecs.get(i, 0); - var axisangle = VecBuilder.fill(origRvec[0], origRvec[1], origRvec[2]); - Rotation3d rotation = new Rotation3d(axisangle, axisangle.normF()); - var ocvRotation = ARUCO_BASE_ROTATION.rotateBy(rotation); + ArucoDetectionResult[] toReturn = new ArucoDetectionResult[corners.size()]; + timeStartProcess = System.currentTimeMillis(); + for (int i = 0; i < corners.size(); i++) { + cornerMat = corners.get(i); + // logger.debug(cornerMat.dump()); + xCorners = + new double[] { + cornerMat.get(0, 0)[0], + cornerMat.get(0, 1)[0], + cornerMat.get(0, 2)[0], + cornerMat.get(0, 3)[0] + }; + yCorners = + new double[] { + cornerMat.get(0, 0)[1], + cornerMat.get(0, 1)[1], + cornerMat.get(0, 2)[1], + cornerMat.get(0, 3)[1] + }; + cornerMat.release(); - var angle = ocvRotation.getAngle(); - var finalAxisAngle = ocvRotation.getAxis().times(angle); + double[] tvec; + double[] rvec; + if (coeffs != null) { + // Need to apply a 180 rotation about Z + var origRvec = rvecs.get(i, 0); + var axisangle = VecBuilder.fill(origRvec[0], origRvec[1], origRvec[2]); + Rotation3d rotation = new Rotation3d(axisangle, axisangle.normF()); + var ocvRotation = ARUCO_BASE_ROTATION.rotateBy(rotation); - tvec = tvecs.get(i, 0); - rvec = finalAxisAngle.getData(); - } else { - tvec = new double[] {0, 0, 0}; - rvec = new double[] {0, 0, 0}; - } + var angle = ocvRotation.getAngle(); + var finalAxisAngle = ocvRotation.getAxis().times(angle); - toReturn[i] = - new ArucoDetectionResult(xCorners, yCorners, (int) ids.get(i, 0)[0], tvec, rvec); - } - rvecs.release(); - tvecs.release(); - ids.release(); + tvec = tvecs.get(i, 0); + rvec = finalAxisAngle.getData(); + } else { + tvec = new double[] {0, 0, 0}; + rvec = new double[] {0, 0, 0}; + } - return toReturn; - } + toReturn[i] = + new ArucoDetectionResult(xCorners, yCorners, (int) ids.get(i, 0)[0], tvec, rvec); + } + rvecs.release(); + tvecs.release(); + ids.release(); + + return toReturn; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/calibration/CameraCalibrationCoefficients.java b/photon-core/src/main/java/org/photonvision/vision/calibration/CameraCalibrationCoefficients.java index 08f064cd75..4b4ae5a8cc 100644 --- a/photon-core/src/main/java/org/photonvision/vision/calibration/CameraCalibrationCoefficients.java +++ b/photon-core/src/main/java/org/photonvision/vision/calibration/CameraCalibrationCoefficients.java @@ -28,116 +28,116 @@ import org.photonvision.vision.opencv.Releasable; public class CameraCalibrationCoefficients implements Releasable { - @JsonProperty("resolution") - public final Size resolution; - - @JsonProperty("cameraIntrinsics") - public final JsonMat cameraIntrinsics; - - @JsonProperty("cameraExtrinsics") - @JsonAlias({"cameraExtrinsics", "distCoeffs"}) - public final JsonMat distCoeffs; - - @JsonProperty("perViewErrors") - public final double[] perViewErrors; - - @JsonProperty("standardDeviation") - public final double standardDeviation; - - @JsonIgnore private final double[] intrinsicsArr = new double[9]; - - @JsonIgnore private final double[] extrinsicsArr = new double[5]; - - @JsonCreator - public CameraCalibrationCoefficients( - @JsonProperty("resolution") Size resolution, - @JsonProperty("cameraIntrinsics") JsonMat cameraIntrinsics, - @JsonProperty("cameraExtrinsics") JsonMat distCoeffs, - @JsonProperty("perViewErrors") double[] perViewErrors, - @JsonProperty("standardDeviation") double standardDeviation) { - this.resolution = resolution; - this.cameraIntrinsics = cameraIntrinsics; - this.distCoeffs = distCoeffs; - this.perViewErrors = perViewErrors; - this.standardDeviation = standardDeviation; - - // do this once so gets are quick - getCameraIntrinsicsMat().get(0, 0, intrinsicsArr); - getDistCoeffsMat().get(0, 0, extrinsicsArr); - } - - @JsonIgnore - public Mat getCameraIntrinsicsMat() { - return cameraIntrinsics.getAsMat(); - } - - @JsonIgnore - public MatOfDouble getDistCoeffsMat() { - return distCoeffs.getAsMatOfDouble(); - } - - @JsonIgnore - public double[] getIntrinsicsArr() { - return intrinsicsArr; - } - - @JsonIgnore - public double[] getExtrinsicsArr() { - return extrinsicsArr; - } - - @JsonIgnore - public double[] getPerViewErrors() { - return perViewErrors; - } - - @JsonIgnore - public double getStandardDeviation() { - return standardDeviation; - } - - @Override - public void release() { - cameraIntrinsics.release(); - distCoeffs.release(); - } - - public static CameraCalibrationCoefficients parseFromCalibdbJson(JsonNode json) { - // camera_matrix is a row major, array of arrays - var cam_matrix = json.get("camera_matrix"); - - double[] cam_arr = - new double[] { - cam_matrix.get(0).get(0).doubleValue(), - cam_matrix.get(0).get(1).doubleValue(), - cam_matrix.get(0).get(2).doubleValue(), - cam_matrix.get(1).get(0).doubleValue(), - cam_matrix.get(1).get(1).doubleValue(), - cam_matrix.get(1).get(2).doubleValue(), - cam_matrix.get(2).get(0).doubleValue(), - cam_matrix.get(2).get(1).doubleValue(), - cam_matrix.get(2).get(2).doubleValue() - }; - - var dist_coefs = json.get("distortion_coefficients"); - - double[] dist_array = - new double[] { - dist_coefs.get(0).doubleValue(), - dist_coefs.get(1).doubleValue(), - dist_coefs.get(2).doubleValue(), - dist_coefs.get(3).doubleValue(), - dist_coefs.get(4).doubleValue(), - }; - - var cam_jsonmat = new JsonMat(3, 3, cam_arr); - var distortion_jsonmat = new JsonMat(1, 5, dist_array); - - var error = json.get("avg_reprojection_error").asDouble(); - var width = json.get("img_size").get(0).doubleValue(); - var height = json.get("img_size").get(1).doubleValue(); - - return new CameraCalibrationCoefficients( - new Size(width, height), cam_jsonmat, distortion_jsonmat, new double[] {error}, 0); - } + @JsonProperty("resolution") + public final Size resolution; + + @JsonProperty("cameraIntrinsics") + public final JsonMat cameraIntrinsics; + + @JsonProperty("cameraExtrinsics") + @JsonAlias({"cameraExtrinsics", "distCoeffs"}) + public final JsonMat distCoeffs; + + @JsonProperty("perViewErrors") + public final double[] perViewErrors; + + @JsonProperty("standardDeviation") + public final double standardDeviation; + + @JsonIgnore private final double[] intrinsicsArr = new double[9]; + + @JsonIgnore private final double[] extrinsicsArr = new double[5]; + + @JsonCreator + public CameraCalibrationCoefficients( + @JsonProperty("resolution") Size resolution, + @JsonProperty("cameraIntrinsics") JsonMat cameraIntrinsics, + @JsonProperty("cameraExtrinsics") JsonMat distCoeffs, + @JsonProperty("perViewErrors") double[] perViewErrors, + @JsonProperty("standardDeviation") double standardDeviation) { + this.resolution = resolution; + this.cameraIntrinsics = cameraIntrinsics; + this.distCoeffs = distCoeffs; + this.perViewErrors = perViewErrors; + this.standardDeviation = standardDeviation; + + // do this once so gets are quick + getCameraIntrinsicsMat().get(0, 0, intrinsicsArr); + getDistCoeffsMat().get(0, 0, extrinsicsArr); + } + + @JsonIgnore + public Mat getCameraIntrinsicsMat() { + return cameraIntrinsics.getAsMat(); + } + + @JsonIgnore + public MatOfDouble getDistCoeffsMat() { + return distCoeffs.getAsMatOfDouble(); + } + + @JsonIgnore + public double[] getIntrinsicsArr() { + return intrinsicsArr; + } + + @JsonIgnore + public double[] getExtrinsicsArr() { + return extrinsicsArr; + } + + @JsonIgnore + public double[] getPerViewErrors() { + return perViewErrors; + } + + @JsonIgnore + public double getStandardDeviation() { + return standardDeviation; + } + + @Override + public void release() { + cameraIntrinsics.release(); + distCoeffs.release(); + } + + public static CameraCalibrationCoefficients parseFromCalibdbJson(JsonNode json) { + // camera_matrix is a row major, array of arrays + var cam_matrix = json.get("camera_matrix"); + + double[] cam_arr = + new double[] { + cam_matrix.get(0).get(0).doubleValue(), + cam_matrix.get(0).get(1).doubleValue(), + cam_matrix.get(0).get(2).doubleValue(), + cam_matrix.get(1).get(0).doubleValue(), + cam_matrix.get(1).get(1).doubleValue(), + cam_matrix.get(1).get(2).doubleValue(), + cam_matrix.get(2).get(0).doubleValue(), + cam_matrix.get(2).get(1).doubleValue(), + cam_matrix.get(2).get(2).doubleValue() + }; + + var dist_coefs = json.get("distortion_coefficients"); + + double[] dist_array = + new double[] { + dist_coefs.get(0).doubleValue(), + dist_coefs.get(1).doubleValue(), + dist_coefs.get(2).doubleValue(), + dist_coefs.get(3).doubleValue(), + dist_coefs.get(4).doubleValue(), + }; + + var cam_jsonmat = new JsonMat(3, 3, cam_arr); + var distortion_jsonmat = new JsonMat(1, 5, dist_array); + + var error = json.get("avg_reprojection_error").asDouble(); + var width = json.get("img_size").get(0).doubleValue(); + var height = json.get("img_size").get(1).doubleValue(); + + return new CameraCalibrationCoefficients( + new Size(width, height), cam_jsonmat, distortion_jsonmat, new double[] {error}, 0); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/calibration/JsonMat.java b/photon-core/src/main/java/org/photonvision/vision/calibration/JsonMat.java index 79a3aec9c6..15a6d0b9e0 100644 --- a/photon-core/src/main/java/org/photonvision/vision/calibration/JsonMat.java +++ b/photon-core/src/main/java/org/photonvision/vision/calibration/JsonMat.java @@ -30,100 +30,100 @@ import org.photonvision.vision.opencv.Releasable; public class JsonMat implements Releasable { - public final int rows; - public final int cols; - public final int type; - public final double[] data; - - // Cached matrices to avoid object recreation - @JsonIgnore private Mat wrappedMat = null; - @JsonIgnore private Matrix wpilibMat = null; - - private MatOfDouble wrappedMatOfDouble; - - public JsonMat(int rows, int cols, double[] data) { - this(rows, cols, CvType.CV_64FC1, data); - } - - public JsonMat( - @JsonProperty("rows") int rows, - @JsonProperty("cols") int cols, - @JsonProperty("type") int type, - @JsonProperty("data") double[] data) { - this.rows = rows; - this.cols = cols; - this.type = type; - this.data = data; - } - - private static boolean isCameraMatrixMat(Mat mat) { - return mat.type() == CvType.CV_64FC1 && mat.cols() == 3 && mat.rows() == 3; - } - - private static boolean isDistortionCoeffsMat(Mat mat) { - return mat.type() == CvType.CV_64FC1 && mat.cols() == 5 && mat.rows() == 1; - } - - private static boolean isCalibrationMat(Mat mat) { - return isDistortionCoeffsMat(mat) || isCameraMatrixMat(mat); - } - - @JsonIgnore - public static double[] getDataFromMat(Mat mat) { - if (!isCalibrationMat(mat)) return null; - - double[] data = new double[(int) (mat.total() * mat.elemSize())]; - mat.get(0, 0, data); - - int dataLen = -1; - - if (isCameraMatrixMat(mat)) dataLen = 9; - if (isDistortionCoeffsMat(mat)) dataLen = 5; - - // truncate Mat data to correct number data points. - return Arrays.copyOfRange(data, 0, dataLen); - } - - public static JsonMat fromMat(Mat mat) { - if (!isCalibrationMat(mat)) return null; - return new JsonMat(mat.rows(), mat.cols(), getDataFromMat(mat)); - } - - @JsonIgnore - public Mat getAsMat() { - if (this.type != CvType.CV_64FC1) return null; - - if (wrappedMat == null) { - this.wrappedMat = new Mat(this.rows, this.cols, this.type); - this.wrappedMat.put(0, 0, this.data); + public final int rows; + public final int cols; + public final int type; + public final double[] data; + + // Cached matrices to avoid object recreation + @JsonIgnore private Mat wrappedMat = null; + @JsonIgnore private Matrix wpilibMat = null; + + private MatOfDouble wrappedMatOfDouble; + + public JsonMat(int rows, int cols, double[] data) { + this(rows, cols, CvType.CV_64FC1, data); + } + + public JsonMat( + @JsonProperty("rows") int rows, + @JsonProperty("cols") int cols, + @JsonProperty("type") int type, + @JsonProperty("data") double[] data) { + this.rows = rows; + this.cols = cols; + this.type = type; + this.data = data; + } + + private static boolean isCameraMatrixMat(Mat mat) { + return mat.type() == CvType.CV_64FC1 && mat.cols() == 3 && mat.rows() == 3; } - return this.wrappedMat; - } - - @JsonIgnore - public MatOfDouble getAsMatOfDouble() { - if (this.wrappedMatOfDouble == null) { - this.wrappedMatOfDouble = new MatOfDouble(); - getAsMat().convertTo(wrappedMatOfDouble, CvType.CV_64F); + + private static boolean isDistortionCoeffsMat(Mat mat) { + return mat.type() == CvType.CV_64FC1 && mat.cols() == 5 && mat.rows() == 1; + } + + private static boolean isCalibrationMat(Mat mat) { + return isDistortionCoeffsMat(mat) || isCameraMatrixMat(mat); + } + + @JsonIgnore + public static double[] getDataFromMat(Mat mat) { + if (!isCalibrationMat(mat)) return null; + + double[] data = new double[(int) (mat.total() * mat.elemSize())]; + mat.get(0, 0, data); + + int dataLen = -1; + + if (isCameraMatrixMat(mat)) dataLen = 9; + if (isDistortionCoeffsMat(mat)) dataLen = 5; + + // truncate Mat data to correct number data points. + return Arrays.copyOfRange(data, 0, dataLen); + } + + public static JsonMat fromMat(Mat mat) { + if (!isCalibrationMat(mat)) return null; + return new JsonMat(mat.rows(), mat.cols(), getDataFromMat(mat)); + } + + @JsonIgnore + public Mat getAsMat() { + if (this.type != CvType.CV_64FC1) return null; + + if (wrappedMat == null) { + this.wrappedMat = new Mat(this.rows, this.cols, this.type); + this.wrappedMat.put(0, 0, this.data); + } + return this.wrappedMat; + } + + @JsonIgnore + public MatOfDouble getAsMatOfDouble() { + if (this.wrappedMatOfDouble == null) { + this.wrappedMatOfDouble = new MatOfDouble(); + getAsMat().convertTo(wrappedMatOfDouble, CvType.CV_64F); + } + return this.wrappedMatOfDouble; + } + + @JsonIgnore + public Matrix getAsWpilibMat() { + if (wpilibMat == null) { + wpilibMat = new Matrix(new SimpleMatrix(rows, cols, true, data)); + } + return (Matrix) wpilibMat; + } + + @Override + public void release() { + getAsMat().release(); } - return this.wrappedMatOfDouble; - } - @JsonIgnore - public Matrix getAsWpilibMat() { - if (wpilibMat == null) { - wpilibMat = new Matrix(new SimpleMatrix(rows, cols, true, data)); + public Packet populatePacket(Packet packet) { + packet.encode(this.data); + return packet; } - return (Matrix) wpilibMat; - } - - @Override - public void release() { - getAsMat().release(); - } - - public Packet populatePacket(Packet packet) { - packet.encode(this.data); - return packet; - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/camera/CameraQuirk.java b/photon-core/src/main/java/org/photonvision/vision/camera/CameraQuirk.java index c28620d532..69f9c564cb 100644 --- a/photon-core/src/main/java/org/photonvision/vision/camera/CameraQuirk.java +++ b/photon-core/src/main/java/org/photonvision/vision/camera/CameraQuirk.java @@ -18,16 +18,16 @@ package org.photonvision.vision.camera; public enum CameraQuirk { - /** Camera settable for controllable image gain */ - Gain, - /** For the Raspberry Pi Camera */ - PiCam, - /** Cap at 100FPS for high-bandwidth cameras */ - FPSCap100, - /** Separate red/blue gain controls available */ - AWBGain, - /** Will not work with photonvision - Logitec C270 at least */ - CompletelyBroken, - /** Has adjustable focus and autofocus switch */ - AdjustableFocus, + /** Camera settable for controllable image gain */ + Gain, + /** For the Raspberry Pi Camera */ + PiCam, + /** Cap at 100FPS for high-bandwidth cameras */ + FPSCap100, + /** Separate red/blue gain controls available */ + AWBGain, + /** Will not work with photonvision - Logitec C270 at least */ + CompletelyBroken, + /** Has adjustable focus and autofocus switch */ + AdjustableFocus, } diff --git a/photon-core/src/main/java/org/photonvision/vision/camera/CameraType.java b/photon-core/src/main/java/org/photonvision/vision/camera/CameraType.java index d63221e1bd..8ce16ca275 100644 --- a/photon-core/src/main/java/org/photonvision/vision/camera/CameraType.java +++ b/photon-core/src/main/java/org/photonvision/vision/camera/CameraType.java @@ -18,7 +18,7 @@ package org.photonvision.vision.camera; public enum CameraType { - UsbCamera, - HttpCamera, - ZeroCopyPicam + UsbCamera, + HttpCamera, + ZeroCopyPicam } diff --git a/photon-core/src/main/java/org/photonvision/vision/camera/FileVisionSource.java b/photon-core/src/main/java/org/photonvision/vision/camera/FileVisionSource.java index 2aeb57787b..951135fcf4 100644 --- a/photon-core/src/main/java/org/photonvision/vision/camera/FileVisionSource.java +++ b/photon-core/src/main/java/org/photonvision/vision/camera/FileVisionSource.java @@ -29,89 +29,89 @@ import org.photonvision.vision.processes.VisionSourceSettables; public class FileVisionSource extends VisionSource { - private final FileFrameProvider frameProvider; - private final FileSourceSettables settables; - - public FileVisionSource(CameraConfiguration cameraConfiguration) { - super(cameraConfiguration); - var calibration = - !cameraConfiguration.calibrations.isEmpty() - ? cameraConfiguration.calibrations.get(0) - : null; - frameProvider = - new FileFrameProvider( - Path.of(cameraConfiguration.path), - cameraConfiguration.FOV, - FileFrameProvider.MAX_FPS, - calibration); - settables = - new FileSourceSettables(cameraConfiguration, frameProvider.get().frameStaticProperties); - } - - public FileVisionSource(String name, String imagePath, double fov) { - super(new CameraConfiguration(name, imagePath)); - frameProvider = new FileFrameProvider(imagePath, fov); - settables = - new FileSourceSettables(cameraConfiguration, frameProvider.get().frameStaticProperties); - } - - @Override - public FrameProvider getFrameProvider() { - return frameProvider; - } - - @Override - public VisionSourceSettables getSettables() { - return settables; - } - - @Override - public boolean isVendorCamera() { - return false; - } - - private static class FileSourceSettables extends VisionSourceSettables { - private final VideoMode videoMode; - - private final HashMap videoModes = new HashMap<>(); - - FileSourceSettables( - CameraConfiguration cameraConfiguration, FrameStaticProperties frameStaticProperties) { - super(cameraConfiguration); - this.frameStaticProperties = frameStaticProperties; - videoMode = - new VideoMode( - PixelFormat.kMJPEG, - frameStaticProperties.imageWidth, - frameStaticProperties.imageHeight, - 30); - videoModes.put(0, videoMode); + private final FileFrameProvider frameProvider; + private final FileSourceSettables settables; + + public FileVisionSource(CameraConfiguration cameraConfiguration) { + super(cameraConfiguration); + var calibration = + !cameraConfiguration.calibrations.isEmpty() + ? cameraConfiguration.calibrations.get(0) + : null; + frameProvider = + new FileFrameProvider( + Path.of(cameraConfiguration.path), + cameraConfiguration.FOV, + FileFrameProvider.MAX_FPS, + calibration); + settables = + new FileSourceSettables(cameraConfiguration, frameProvider.get().frameStaticProperties); } - @Override - public void setExposure(double exposure) {} - - public void setAutoExposure(boolean cameraAutoExposure) {} - - @Override - public void setBrightness(int brightness) {} + public FileVisionSource(String name, String imagePath, double fov) { + super(new CameraConfiguration(name, imagePath)); + frameProvider = new FileFrameProvider(imagePath, fov); + settables = + new FileSourceSettables(cameraConfiguration, frameProvider.get().frameStaticProperties); + } @Override - public void setGain(int gain) {} + public FrameProvider getFrameProvider() { + return frameProvider; + } @Override - public VideoMode getCurrentVideoMode() { - return videoMode; + public VisionSourceSettables getSettables() { + return settables; } @Override - protected void setVideoModeInternal(VideoMode videoMode) { - // Do nothing + public boolean isVendorCamera() { + return false; } - @Override - public HashMap getAllVideoModes() { - return videoModes; + private static class FileSourceSettables extends VisionSourceSettables { + private final VideoMode videoMode; + + private final HashMap videoModes = new HashMap<>(); + + FileSourceSettables( + CameraConfiguration cameraConfiguration, FrameStaticProperties frameStaticProperties) { + super(cameraConfiguration); + this.frameStaticProperties = frameStaticProperties; + videoMode = + new VideoMode( + PixelFormat.kMJPEG, + frameStaticProperties.imageWidth, + frameStaticProperties.imageHeight, + 30); + videoModes.put(0, videoMode); + } + + @Override + public void setExposure(double exposure) {} + + public void setAutoExposure(boolean cameraAutoExposure) {} + + @Override + public void setBrightness(int brightness) {} + + @Override + public void setGain(int gain) {} + + @Override + public VideoMode getCurrentVideoMode() { + return videoMode; + } + + @Override + protected void setVideoModeInternal(VideoMode videoMode) { + // Do nothing + } + + @Override + public HashMap getAllVideoModes() { + return videoModes; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSettables.java b/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSettables.java index cd052dbb96..7e0bd5c5c7 100644 --- a/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSettables.java +++ b/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSettables.java @@ -28,216 +28,216 @@ import org.photonvision.vision.processes.VisionSourceSettables; public class LibcameraGpuSettables extends VisionSourceSettables { - private FPSRatedVideoMode currentVideoMode; - private double lastManualExposure = 50; - private int lastBrightness = 50; - private boolean lastAutoExposureActive; - private int lastGain = 50; - private Pair lastAwbGains = new Pair<>(18, 18); - private boolean m_initialized = false; + private FPSRatedVideoMode currentVideoMode; + private double lastManualExposure = 50; + private int lastBrightness = 50; + private boolean lastAutoExposureActive; + private int lastGain = 50; + private Pair lastAwbGains = new Pair<>(18, 18); + private boolean m_initialized = false; - private final LibCameraJNI.SensorModel sensorModel; + private final LibCameraJNI.SensorModel sensorModel; - private ImageRotationMode m_rotationMode; + private ImageRotationMode m_rotationMode; - public void setRotation(ImageRotationMode rotationMode) { - if (rotationMode != m_rotationMode) { - m_rotationMode = rotationMode; + public void setRotation(ImageRotationMode rotationMode) { + if (rotationMode != m_rotationMode) { + m_rotationMode = rotationMode; - setVideoModeInternal(getCurrentVideoMode()); - } - } - - public LibcameraGpuSettables(CameraConfiguration configuration) { - super(configuration); - - videoModes = new HashMap<>(); - - sensorModel = LibCameraJNI.getSensorModel(); - - if (sensorModel == LibCameraJNI.SensorModel.IMX219) { - // Settings for the IMX219 sensor, which is used on the Pi Camera Module v2 - videoModes.put( - 0, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 120, 120, .39)); - videoModes.put( - 1, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 30, 30, .39)); - videoModes.put( - 2, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 65, 90, .39)); - videoModes.put( - 3, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 30, 30, .39)); - // TODO: fix 1280x720 in the native code and re-add it - videoModes.put( - 4, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1920, 1080, 15, 20, .53)); - videoModes.put( - 5, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 3280 / 2, 2464 / 2, 15, 20, 1)); - videoModes.put( - 6, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 3280 / 4, 2464 / 4, 15, 20, 1)); - } else if (sensorModel == LibCameraJNI.SensorModel.OV9281) { - videoModes.put( - 0, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 30, 30, .39)); - videoModes.put( - 1, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1280 / 2, 800 / 2, 60, 60, 1)); - videoModes.put( - 2, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 65, 90, .39)); - videoModes.put( - 3, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1280, 800, 60, 60, 1)); - - } else { - if (sensorModel == LibCameraJNI.SensorModel.IMX477) { - LibcameraGpuSource.logger.warn( - "It appears you are using a Pi HQ Camera. This camera is not officially supported. You will have to set your camera FOV differently based on resolution."); - } else if (sensorModel == LibCameraJNI.SensorModel.Unknown) { - LibcameraGpuSource.logger.warn( - "You have an unknown sensor connected to your Pi over CSI! This is likely a bug. If it is not, then you will have to set your camera FOV differently based on resolution."); - } - - // Settings for the OV5647 sensor, which is used by the Pi Camera Module v1 - videoModes.put(0, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 90, 90, 1)); - videoModes.put(1, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 85, 90, 1)); - videoModes.put( - 2, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 960, 720, 45, 49, 0.74)); - // Half the size of the active areas on the OV5647 - videoModes.put( - 3, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 2592 / 2, 1944 / 2, 20, 20, 1)); - videoModes.put( - 4, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1280, 720, 30, 45, 0.91)); - videoModes.put( - 5, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1920, 1080, 15, 20, 0.72)); + setVideoModeInternal(getCurrentVideoMode()); + } } - // TODO need to add more video modes for new sensors here + public LibcameraGpuSettables(CameraConfiguration configuration) { + super(configuration); + + videoModes = new HashMap<>(); + + sensorModel = LibCameraJNI.getSensorModel(); + + if (sensorModel == LibCameraJNI.SensorModel.IMX219) { + // Settings for the IMX219 sensor, which is used on the Pi Camera Module v2 + videoModes.put( + 0, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 120, 120, .39)); + videoModes.put( + 1, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 30, 30, .39)); + videoModes.put( + 2, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 65, 90, .39)); + videoModes.put( + 3, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 30, 30, .39)); + // TODO: fix 1280x720 in the native code and re-add it + videoModes.put( + 4, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1920, 1080, 15, 20, .53)); + videoModes.put( + 5, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 3280 / 2, 2464 / 2, 15, 20, 1)); + videoModes.put( + 6, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 3280 / 4, 2464 / 4, 15, 20, 1)); + } else if (sensorModel == LibCameraJNI.SensorModel.OV9281) { + videoModes.put( + 0, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 30, 30, .39)); + videoModes.put( + 1, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1280 / 2, 800 / 2, 60, 60, 1)); + videoModes.put( + 2, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 65, 90, .39)); + videoModes.put( + 3, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1280, 800, 60, 60, 1)); + + } else { + if (sensorModel == LibCameraJNI.SensorModel.IMX477) { + LibcameraGpuSource.logger.warn( + "It appears you are using a Pi HQ Camera. This camera is not officially supported. You will have to set your camera FOV differently based on resolution."); + } else if (sensorModel == LibCameraJNI.SensorModel.Unknown) { + LibcameraGpuSource.logger.warn( + "You have an unknown sensor connected to your Pi over CSI! This is likely a bug. If it is not, then you will have to set your camera FOV differently based on resolution."); + } + + // Settings for the OV5647 sensor, which is used by the Pi Camera Module v1 + videoModes.put(0, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 320, 240, 90, 90, 1)); + videoModes.put(1, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 640, 480, 85, 90, 1)); + videoModes.put( + 2, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 960, 720, 45, 49, 0.74)); + // Half the size of the active areas on the OV5647 + videoModes.put( + 3, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 2592 / 2, 1944 / 2, 20, 20, 1)); + videoModes.put( + 4, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1280, 720, 30, 45, 0.91)); + videoModes.put( + 5, new FPSRatedVideoMode(VideoMode.PixelFormat.kUnknown, 1920, 1080, 15, 20, 0.72)); + } - currentVideoMode = (FPSRatedVideoMode) videoModes.get(0); - } + // TODO need to add more video modes for new sensors here - @Override - public double getFOV() { - return getCurrentVideoMode().fovMultiplier * getConfiguration().FOV; - } + currentVideoMode = (FPSRatedVideoMode) videoModes.get(0); + } - @Override - public void setAutoExposure(boolean cameraAutoExposure) { - lastAutoExposureActive = cameraAutoExposure; - LibCameraJNI.setAutoExposure(cameraAutoExposure); - } + @Override + public double getFOV() { + return getCurrentVideoMode().fovMultiplier * getConfiguration().FOV; + } - @Override - public void setExposure(double exposure) { - if (exposure < 0.0 || lastAutoExposureActive) { - // Auto-exposure is active right now, don't set anything. - return; + @Override + public void setAutoExposure(boolean cameraAutoExposure) { + lastAutoExposureActive = cameraAutoExposure; + LibCameraJNI.setAutoExposure(cameraAutoExposure); } - // HACKS! - // If we set exposure too low, libcamera crashes or slows down - // Very weird and smelly - // For now, band-aid this by just not setting it lower than the "it breaks" limit - // is different depending on camera. - if (sensorModel == LibCameraJNI.SensorModel.OV9281) { - if (exposure < 6.0) { - exposure = 6.0; - } - } else if (sensorModel == LibCameraJNI.SensorModel.OV5647) { - if (exposure < 0.7) { - exposure = 0.7; - } + @Override + public void setExposure(double exposure) { + if (exposure < 0.0 || lastAutoExposureActive) { + // Auto-exposure is active right now, don't set anything. + return; + } + + // HACKS! + // If we set exposure too low, libcamera crashes or slows down + // Very weird and smelly + // For now, band-aid this by just not setting it lower than the "it breaks" limit + // is different depending on camera. + if (sensorModel == LibCameraJNI.SensorModel.OV9281) { + if (exposure < 6.0) { + exposure = 6.0; + } + } else if (sensorModel == LibCameraJNI.SensorModel.OV5647) { + if (exposure < 0.7) { + exposure = 0.7; + } + } + + lastManualExposure = exposure; + var success = LibCameraJNI.setExposure((int) Math.round(exposure) * 800); + if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera exposure"); } - lastManualExposure = exposure; - var success = LibCameraJNI.setExposure((int) Math.round(exposure) * 800); - if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera exposure"); - } - - @Override - public void setBrightness(int brightness) { - lastBrightness = brightness; - double realBrightness = MathUtils.map(brightness, 0.0, 100.0, -1.0, 1.0); - var success = LibCameraJNI.setBrightness(realBrightness); - if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera brightness"); - } - - @Override - public void setGain(int gain) { - lastGain = gain; - // TODO units here seem odd -- 5ish seems legit? So divide by 10 - var success = LibCameraJNI.setAnalogGain(gain / 10.0); - if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera gain"); - } - - @Override - public void setRedGain(int red) { - if (sensorModel != LibCameraJNI.SensorModel.OV9281) { - lastAwbGains = Pair.of(red, lastAwbGains.getSecond()); - setAwbGain(lastAwbGains.getFirst(), lastAwbGains.getSecond()); + @Override + public void setBrightness(int brightness) { + lastBrightness = brightness; + double realBrightness = MathUtils.map(brightness, 0.0, 100.0, -1.0, 1.0); + var success = LibCameraJNI.setBrightness(realBrightness); + if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera brightness"); } - } - @Override - public void setBlueGain(int blue) { - if (sensorModel != LibCameraJNI.SensorModel.OV9281) { - lastAwbGains = Pair.of(lastAwbGains.getFirst(), blue); - setAwbGain(lastAwbGains.getFirst(), lastAwbGains.getSecond()); + @Override + public void setGain(int gain) { + lastGain = gain; + // TODO units here seem odd -- 5ish seems legit? So divide by 10 + var success = LibCameraJNI.setAnalogGain(gain / 10.0); + if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera gain"); } - } - public void setAwbGain(int red, int blue) { - if (sensorModel != LibCameraJNI.SensorModel.OV9281) { - var success = LibCameraJNI.setAwbGain(red / 10.0, blue / 10.0); - if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera AWB gains"); + @Override + public void setRedGain(int red) { + if (sensorModel != LibCameraJNI.SensorModel.OV9281) { + lastAwbGains = Pair.of(red, lastAwbGains.getSecond()); + setAwbGain(lastAwbGains.getFirst(), lastAwbGains.getSecond()); + } } - } - - @Override - public FPSRatedVideoMode getCurrentVideoMode() { - return currentVideoMode; - } - - @Override - protected void setVideoModeInternal(VideoMode videoMode) { - var mode = (FPSRatedVideoMode) videoMode; - - // We need to make sure that other threads don't try to do anything funny while we're recreating - // the camera - synchronized (LibCameraJNI.CAMERA_LOCK) { - if (m_initialized) { - logger.debug("Stopping libcamera"); - if (!LibCameraJNI.stopCamera()) { - logger.error("Couldn't stop a zero copy Pi Camera while switching video modes"); + + @Override + public void setBlueGain(int blue) { + if (sensorModel != LibCameraJNI.SensorModel.OV9281) { + lastAwbGains = Pair.of(lastAwbGains.getFirst(), blue); + setAwbGain(lastAwbGains.getFirst(), lastAwbGains.getSecond()); } - logger.debug("Destroying libcamera"); - if (!LibCameraJNI.destroyCamera()) { - logger.error("Couldn't destroy a zero copy Pi Camera while switching video modes"); + } + + public void setAwbGain(int red, int blue) { + if (sensorModel != LibCameraJNI.SensorModel.OV9281) { + var success = LibCameraJNI.setAwbGain(red / 10.0, blue / 10.0); + if (!success) LibcameraGpuSource.logger.warn("Couldn't set Pi Camera AWB gains"); } - } - - logger.debug("Creating libcamera"); - if (!LibCameraJNI.createCamera( - mode.width, mode.height, (m_rotationMode == ImageRotationMode.DEG_180 ? 180 : 0))) { - logger.error("Couldn't create a zero copy Pi Camera while switching video modes"); - } - logger.debug("Starting libcamera"); - if (!LibCameraJNI.startCamera()) { - logger.error("Couldn't start a zero copy Pi Camera while switching video modes"); - } - - m_initialized = true; } - // We don't store last settings on the native side, and when you change video mode these get - // reset on MMAL's end - setExposure(lastManualExposure); - setAutoExposure(lastAutoExposureActive); - setBrightness(lastBrightness); - setGain(lastGain); - setAwbGain(lastAwbGains.getFirst(), lastAwbGains.getSecond()); + @Override + public FPSRatedVideoMode getCurrentVideoMode() { + return currentVideoMode; + } - LibCameraJNI.setFramesToCopy(true, true); + @Override + protected void setVideoModeInternal(VideoMode videoMode) { + var mode = (FPSRatedVideoMode) videoMode; + + // We need to make sure that other threads don't try to do anything funny while we're recreating + // the camera + synchronized (LibCameraJNI.CAMERA_LOCK) { + if (m_initialized) { + logger.debug("Stopping libcamera"); + if (!LibCameraJNI.stopCamera()) { + logger.error("Couldn't stop a zero copy Pi Camera while switching video modes"); + } + logger.debug("Destroying libcamera"); + if (!LibCameraJNI.destroyCamera()) { + logger.error("Couldn't destroy a zero copy Pi Camera while switching video modes"); + } + } + + logger.debug("Creating libcamera"); + if (!LibCameraJNI.createCamera( + mode.width, mode.height, (m_rotationMode == ImageRotationMode.DEG_180 ? 180 : 0))) { + logger.error("Couldn't create a zero copy Pi Camera while switching video modes"); + } + logger.debug("Starting libcamera"); + if (!LibCameraJNI.startCamera()) { + logger.error("Couldn't start a zero copy Pi Camera while switching video modes"); + } + + m_initialized = true; + } - currentVideoMode = mode; - } + // We don't store last settings on the native side, and when you change video mode these get + // reset on MMAL's end + setExposure(lastManualExposure); + setAutoExposure(lastAutoExposureActive); + setBrightness(lastBrightness); + setGain(lastGain); + setAwbGain(lastAwbGains.getFirst(), lastAwbGains.getSecond()); - @Override - public HashMap getAllVideoModes() { - return videoModes; - } + LibCameraJNI.setFramesToCopy(true, true); + + currentVideoMode = mode; + } + + @Override + public HashMap getAllVideoModes() { + return videoModes; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSource.java b/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSource.java index 5ab1ecb312..7e4330e4d7 100644 --- a/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSource.java +++ b/photon-core/src/main/java/org/photonvision/vision/camera/LibcameraGpuSource.java @@ -28,60 +28,60 @@ import org.photonvision.vision.processes.VisionSourceSettables; public class LibcameraGpuSource extends VisionSource { - static final Logger logger = new Logger(LibcameraGpuSource.class, LogGroup.Camera); + static final Logger logger = new Logger(LibcameraGpuSource.class, LogGroup.Camera); - private final LibcameraGpuSettables settables; - private final LibcameraGpuFrameProvider frameProvider; + private final LibcameraGpuSettables settables; + private final LibcameraGpuFrameProvider frameProvider; - public LibcameraGpuSource(CameraConfiguration configuration) { - super(configuration); - if (configuration.cameraType != CameraType.ZeroCopyPicam) { - throw new IllegalArgumentException( - "GPUAcceleratedPicamSource only accepts CameraConfigurations with type Picam"); - } + public LibcameraGpuSource(CameraConfiguration configuration) { + super(configuration); + if (configuration.cameraType != CameraType.ZeroCopyPicam) { + throw new IllegalArgumentException( + "GPUAcceleratedPicamSource only accepts CameraConfigurations with type Picam"); + } - settables = new LibcameraGpuSettables(configuration); - frameProvider = new LibcameraGpuFrameProvider(settables); - } + settables = new LibcameraGpuSettables(configuration); + frameProvider = new LibcameraGpuFrameProvider(settables); + } - @Override - public FrameProvider getFrameProvider() { - return frameProvider; - } + @Override + public FrameProvider getFrameProvider() { + return frameProvider; + } - @Override - public VisionSourceSettables getSettables() { - return settables; - } + @Override + public VisionSourceSettables getSettables() { + return settables; + } - /** - * On the OV5649 the actual FPS we want to request from the GPU can be higher than the FPS that we - * can do after processing. On the IMX219 these FPSes match pretty closely, except for the - * 1280x720 mode. We use this to present a rated FPS to the user that's lower than the actual FPS - * we request from the GPU. This is important for setting user expectations, and is also used by - * the frontend to detect and explain FPS drops. This class should ONLY be used by Picam video - * modes! This is to make sure it shows up nice in the frontend - */ - public static class FPSRatedVideoMode extends VideoMode { - public final int fpsActual; - public final double fovMultiplier; + /** + * On the OV5649 the actual FPS we want to request from the GPU can be higher than the FPS that we + * can do after processing. On the IMX219 these FPSes match pretty closely, except for the + * 1280x720 mode. We use this to present a rated FPS to the user that's lower than the actual FPS + * we request from the GPU. This is important for setting user expectations, and is also used by + * the frontend to detect and explain FPS drops. This class should ONLY be used by Picam video + * modes! This is to make sure it shows up nice in the frontend + */ + public static class FPSRatedVideoMode extends VideoMode { + public final int fpsActual; + public final double fovMultiplier; - public FPSRatedVideoMode( - PixelFormat pixelFormat, - int width, - int height, - int ratedFPS, - int actualFPS, - double fovMultiplier) { - super(pixelFormat, width, height, ratedFPS); + public FPSRatedVideoMode( + PixelFormat pixelFormat, + int width, + int height, + int ratedFPS, + int actualFPS, + double fovMultiplier) { + super(pixelFormat, width, height, ratedFPS); - this.fpsActual = actualFPS; - this.fovMultiplier = fovMultiplier; + this.fpsActual = actualFPS; + this.fovMultiplier = fovMultiplier; + } } - } - @Override - public boolean isVendorCamera() { - return ConfigManager.getInstance().getConfig().getHardwareConfig().hasPresetFOV(); - } + @Override + public boolean isVendorCamera() { + return ConfigManager.getInstance().getConfig().getHardwareConfig().hasPresetFOV(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/camera/QuirkyCamera.java b/photon-core/src/main/java/org/photonvision/vision/camera/QuirkyCamera.java index fa5bbb2d0b..da2dc835cb 100644 --- a/photon-core/src/main/java/org/photonvision/vision/camera/QuirkyCamera.java +++ b/photon-core/src/main/java/org/photonvision/vision/camera/QuirkyCamera.java @@ -22,115 +22,115 @@ import java.util.Objects; public class QuirkyCamera { - private static final List quirkyCameras = - List.of( - new QuirkyCamera( - 0x9331, - 0x5A3, - CameraQuirk.CompletelyBroken), // Chris's older generic "Logitec HD Webcam" - new QuirkyCamera(0x825, 0x46D, CameraQuirk.CompletelyBroken), // Logitec C270 - new QuirkyCamera( - 0x0bda, - 0x5510, - CameraQuirk.CompletelyBroken), // A laptop internal camera someone found broken - new QuirkyCamera( - -1, -1, "Snap Camera", CameraQuirk.CompletelyBroken), // SnapCamera on Windows - new QuirkyCamera( - -1, - -1, - "FaceTime HD Camera", - CameraQuirk.CompletelyBroken), // Mac Facetime Camera shared into Windows in Bootcamp - new QuirkyCamera(0x2000, 0x1415, CameraQuirk.Gain, CameraQuirk.FPSCap100), // PS3Eye - new QuirkyCamera( - -1, -1, "mmal service 16.1", CameraQuirk.PiCam), // PiCam (via V4L2, not zerocopy) - new QuirkyCamera(-1, -1, "unicam", CameraQuirk.PiCam), // PiCam (via V4L2, not zerocopy) - new QuirkyCamera(0x85B, 0x46D, CameraQuirk.AdjustableFocus) // Logitech C925-e - ); + private static final List quirkyCameras = + List.of( + new QuirkyCamera( + 0x9331, + 0x5A3, + CameraQuirk.CompletelyBroken), // Chris's older generic "Logitec HD Webcam" + new QuirkyCamera(0x825, 0x46D, CameraQuirk.CompletelyBroken), // Logitec C270 + new QuirkyCamera( + 0x0bda, + 0x5510, + CameraQuirk.CompletelyBroken), // A laptop internal camera someone found broken + new QuirkyCamera( + -1, -1, "Snap Camera", CameraQuirk.CompletelyBroken), // SnapCamera on Windows + new QuirkyCamera( + -1, + -1, + "FaceTime HD Camera", + CameraQuirk.CompletelyBroken), // Mac Facetime Camera shared into Windows in Bootcamp + new QuirkyCamera(0x2000, 0x1415, CameraQuirk.Gain, CameraQuirk.FPSCap100), // PS3Eye + new QuirkyCamera( + -1, -1, "mmal service 16.1", CameraQuirk.PiCam), // PiCam (via V4L2, not zerocopy) + new QuirkyCamera(-1, -1, "unicam", CameraQuirk.PiCam), // PiCam (via V4L2, not zerocopy) + new QuirkyCamera(0x85B, 0x46D, CameraQuirk.AdjustableFocus) // Logitech C925-e + ); - public static final QuirkyCamera DefaultCamera = new QuirkyCamera(0, 0, ""); - public static final QuirkyCamera ZeroCopyPiCamera = - new QuirkyCamera( - -1, - -1, - "mmal service 16.1", - CameraQuirk.PiCam, - CameraQuirk.Gain, - CameraQuirk.AWBGain); // PiCam (special zerocopy version) + public static final QuirkyCamera DefaultCamera = new QuirkyCamera(0, 0, ""); + public static final QuirkyCamera ZeroCopyPiCamera = + new QuirkyCamera( + -1, + -1, + "mmal service 16.1", + CameraQuirk.PiCam, + CameraQuirk.Gain, + CameraQuirk.AWBGain); // PiCam (special zerocopy version) - public final String baseName; - public final int usbVid; - public final int usbPid; - public final HashMap quirks; + public final String baseName; + public final int usbVid; + public final int usbPid; + public final HashMap quirks; - /** - * Creates a QuirkyCamera that matches by USB VID/PID - * - * @param usbVid USB VID of camera - * @param usbPid USB PID of camera - * @param quirks Camera quirks - */ - private QuirkyCamera(int usbVid, int usbPid, CameraQuirk... quirks) { - this(usbVid, usbPid, "", quirks); - } + /** + * Creates a QuirkyCamera that matches by USB VID/PID + * + * @param usbVid USB VID of camera + * @param usbPid USB PID of camera + * @param quirks Camera quirks + */ + private QuirkyCamera(int usbVid, int usbPid, CameraQuirk... quirks) { + this(usbVid, usbPid, "", quirks); + } - /** - * Creates a QuirkyCamera that matches by USB VID/PID and name - * - * @param usbVid USB VID of camera - * @param usbPid USB PID of camera - * @param baseName CSCore name of camera - * @param quirks Camera quirks - */ - private QuirkyCamera(int usbVid, int usbPid, String baseName, CameraQuirk... quirks) { - this.usbVid = usbVid; - this.usbPid = usbPid; - this.baseName = baseName; + /** + * Creates a QuirkyCamera that matches by USB VID/PID and name + * + * @param usbVid USB VID of camera + * @param usbPid USB PID of camera + * @param baseName CSCore name of camera + * @param quirks Camera quirks + */ + private QuirkyCamera(int usbVid, int usbPid, String baseName, CameraQuirk... quirks) { + this.usbVid = usbVid; + this.usbPid = usbPid; + this.baseName = baseName; - this.quirks = new HashMap<>(); - for (var q : quirks) { - this.quirks.put(q, true); - } - for (var q : CameraQuirk.values()) { - this.quirks.putIfAbsent(q, false); + this.quirks = new HashMap<>(); + for (var q : quirks) { + this.quirks.put(q, true); + } + for (var q : CameraQuirk.values()) { + this.quirks.putIfAbsent(q, false); + } } - } - public boolean hasQuirk(CameraQuirk quirk) { - return quirks.get(quirk); - } + public boolean hasQuirk(CameraQuirk quirk) { + return quirks.get(quirk); + } - public static QuirkyCamera getQuirkyCamera(int usbVid, int usbPid) { - return getQuirkyCamera(usbVid, usbPid, ""); - } + public static QuirkyCamera getQuirkyCamera(int usbVid, int usbPid) { + return getQuirkyCamera(usbVid, usbPid, ""); + } - public static QuirkyCamera getQuirkyCamera(int usbVid, int usbPid, String baseName) { - for (var qc : quirkyCameras) { - boolean hasBaseName = !qc.baseName.isEmpty(); - boolean matchesBaseName = qc.baseName.equals(baseName) || !hasBaseName; - if (qc.usbVid == usbVid && qc.usbPid == usbPid && matchesBaseName) { - return qc; - } + public static QuirkyCamera getQuirkyCamera(int usbVid, int usbPid, String baseName) { + for (var qc : quirkyCameras) { + boolean hasBaseName = !qc.baseName.isEmpty(); + boolean matchesBaseName = qc.baseName.equals(baseName) || !hasBaseName; + if (qc.usbVid == usbVid && qc.usbPid == usbPid && matchesBaseName) { + return qc; + } + } + return new QuirkyCamera(usbVid, usbPid, baseName); } - return new QuirkyCamera(usbVid, usbPid, baseName); - } - public boolean hasQuirks() { - return quirks.containsValue(true); - } + public boolean hasQuirks() { + return quirks.containsValue(true); + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - QuirkyCamera that = (QuirkyCamera) o; - return usbVid == that.usbVid - && usbPid == that.usbPid - && Objects.equals(baseName, that.baseName) - && Objects.equals(quirks, that.quirks); - } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QuirkyCamera that = (QuirkyCamera) o; + return usbVid == that.usbVid + && usbPid == that.usbPid + && Objects.equals(baseName, that.baseName) + && Objects.equals(quirks, that.quirks); + } - @Override - public int hashCode() { - return Objects.hash(usbVid, usbPid, baseName, quirks); - } + @Override + public int hashCode() { + return Objects.hash(usbVid, usbPid, baseName, quirks); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/camera/USBCameraSource.java b/photon-core/src/main/java/org/photonvision/vision/camera/USBCameraSource.java index 69fbc18805..ad235da26a 100644 --- a/photon-core/src/main/java/org/photonvision/vision/camera/USBCameraSource.java +++ b/photon-core/src/main/java/org/photonvision/vision/camera/USBCameraSource.java @@ -34,324 +34,324 @@ import org.photonvision.vision.processes.VisionSourceSettables; public class USBCameraSource extends VisionSource { - private final Logger logger; - private final UsbCamera camera; - private final USBCameraSettables usbCameraSettables; - private final USBFrameProvider usbFrameProvider; - private final CvSink cvSink; + private final Logger logger; + private final UsbCamera camera; + private final USBCameraSettables usbCameraSettables; + private final USBFrameProvider usbFrameProvider; + private final CvSink cvSink; - public final QuirkyCamera cameraQuirks; + public final QuirkyCamera cameraQuirks; - public USBCameraSource(CameraConfiguration config) { - super(config); + public USBCameraSource(CameraConfiguration config) { + super(config); - logger = new Logger(USBCameraSource.class, config.nickname, LogGroup.Camera); - camera = new UsbCamera(config.nickname, config.path); - cvSink = CameraServer.getVideo(this.camera); + logger = new Logger(USBCameraSource.class, config.nickname, LogGroup.Camera); + camera = new UsbCamera(config.nickname, config.path); + cvSink = CameraServer.getVideo(this.camera); - cameraQuirks = - QuirkyCamera.getQuirkyCamera( - camera.getInfo().productId, camera.getInfo().vendorId, config.baseName); + cameraQuirks = + QuirkyCamera.getQuirkyCamera( + camera.getInfo().productId, camera.getInfo().vendorId, config.baseName); - if (cameraQuirks.hasQuirks()) { - logger.info("Quirky camera detected: " + cameraQuirks.baseName); - } - - if (cameraQuirks.hasQuirk(CameraQuirk.CompletelyBroken)) { - // set some defaults, as these should never be used. - logger.info("Camera " + cameraQuirks.baseName + " is not supported for PhotonVision"); - usbCameraSettables = null; - usbFrameProvider = null; - } else { - // Normal init - // auto exposure/brightness/gain will be set by the visionmodule later - disableAutoFocus(); - - usbCameraSettables = new USBCameraSettables(config); - if (usbCameraSettables.getAllVideoModes().isEmpty()) { - logger.info("Camera " + camera.getPath() + " has no video modes supported by PhotonVision"); - usbFrameProvider = null; - } else { - usbFrameProvider = new USBFrameProvider(cvSink, usbCameraSettables); - } - } - } - - void disableAutoFocus() { - if (cameraQuirks.hasQuirk(CameraQuirk.AdjustableFocus)) { - try { - camera.getProperty("focus_auto").set(0); - camera.getProperty("focus_absolute").set(0); // Focus into infinity - } catch (VideoException e) { - logger.error("Unable to disable autofocus!", e); - } - } - } - - @Override - public FrameProvider getFrameProvider() { - return usbFrameProvider; - } - - @Override - public VisionSourceSettables getSettables() { - return this.usbCameraSettables; - } - - public class USBCameraSettables extends VisionSourceSettables { - protected USBCameraSettables(CameraConfiguration configuration) { - super(configuration); - getAllVideoModes(); - setVideoMode(videoModes.get(0)); - } - - public void setAutoExposure(boolean cameraAutoExposure) { - logger.debug("Setting auto exposure to " + cameraAutoExposure); - - if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { - // Case, we know this is a picam. Go through v4l2-ctl interface directly - - // Common settings - camera - .getProperty("image_stabilization") - .set(0); // No image stabilization, as this will throw off odometry - camera.getProperty("power_line_frequency").set(2); // Assume 60Hz USA - camera.getProperty("scene_mode").set(0); // no presets - camera.getProperty("exposure_metering_mode").set(0); - camera.getProperty("exposure_dynamic_framerate").set(0); - - if (!cameraAutoExposure) { - // Pick a bunch of reasonable setting defaults for vision processing retroreflective - camera.getProperty("auto_exposure_bias").set(0); - camera.getProperty("iso_sensitivity_auto").set(0); // Disable auto ISO adjustment - camera.getProperty("iso_sensitivity").set(0); // Manual ISO adjustment - camera.getProperty("white_balance_auto_preset").set(2); // Auto white-balance disabled - camera.getProperty("auto_exposure").set(1); // auto exposure disabled - } else { - // Pick a bunch of reasonable setting defaults for driver, fiducials, or otherwise - // nice-for-humans - camera.getProperty("auto_exposure_bias").set(12); - camera.getProperty("iso_sensitivity_auto").set(1); - camera.getProperty("iso_sensitivity").set(1); // Manual ISO adjustment by default - camera.getProperty("white_balance_auto_preset").set(1); // Auto white-balance enabled - camera.getProperty("auto_exposure").set(0); // auto exposure enabled + if (cameraQuirks.hasQuirks()) { + logger.info("Quirky camera detected: " + cameraQuirks.baseName); } - } else { - // Case - this is some other USB cam. Default to wpilib's implementation - - var canSetWhiteBalance = !cameraQuirks.hasQuirk(CameraQuirk.Gain); - - if (!cameraAutoExposure) { - // Pick a bunch of reasonable setting defaults for vision processing retroreflective - if (canSetWhiteBalance) { - camera.setWhiteBalanceManual(4000); // Auto white-balance disabled, 4000K preset - } + if (cameraQuirks.hasQuirk(CameraQuirk.CompletelyBroken)) { + // set some defaults, as these should never be used. + logger.info("Camera " + cameraQuirks.baseName + " is not supported for PhotonVision"); + usbCameraSettables = null; + usbFrameProvider = null; } else { - // Pick a bunch of reasonable setting defaults for driver, fiducials, or otherwise - // nice-for-humans - if (canSetWhiteBalance) { - camera.setWhiteBalanceAuto(); // Auto white-balance enabled - } - camera.setExposureAuto(); // auto exposure enabled + // Normal init + // auto exposure/brightness/gain will be set by the visionmodule later + disableAutoFocus(); + + usbCameraSettables = new USBCameraSettables(config); + if (usbCameraSettables.getAllVideoModes().isEmpty()) { + logger.info("Camera " + camera.getPath() + " has no video modes supported by PhotonVision"); + usbFrameProvider = null; + } else { + usbFrameProvider = new USBFrameProvider(cvSink, usbCameraSettables); + } } - } - } - - private int timeToPiCamRawExposure(double time_us) { - int retVal = - (int) - Math.round( - time_us / 100.0); // Pi Cam's (both v1 and v2) need exposure time in units of - // 100us/bit - return Math.min(Math.max(retVal, 1), 10000); // Cap to allowable range for parameter } - private double pctToExposureTimeUs(double pct_in) { - // Mirror the photonvision raspicam driver's algorithm for picking an exposure time - // from a 0-100% input - final double PADDING_LOW_US = 10; - final double PADDING_HIGH_US = 10; - return PADDING_LOW_US - + (pct_in / 100.0) * ((1e6 / (double) camera.getVideoMode().fps) - PADDING_HIGH_US); + void disableAutoFocus() { + if (cameraQuirks.hasQuirk(CameraQuirk.AdjustableFocus)) { + try { + camera.getProperty("focus_auto").set(0); + camera.getProperty("focus_absolute").set(0); // Focus into infinity + } catch (VideoException e) { + logger.error("Unable to disable autofocus!", e); + } + } } @Override - public void setExposure(double exposure) { - if (exposure >= 0.0) { - try { - int scaledExposure = 1; - if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { - scaledExposure = Math.round(timeToPiCamRawExposure(pctToExposureTimeUs(exposure))); - logger.debug("Setting camera raw exposure to " + scaledExposure); - camera.getProperty("raw_exposure_time_absolute").set(scaledExposure); - camera.getProperty("raw_exposure_time_absolute").set(scaledExposure); - - } else { - scaledExposure = (int) Math.round(exposure); - logger.debug("Setting camera exposure to " + scaledExposure); - camera.setExposureManual(scaledExposure); - camera.setExposureManual(scaledExposure); - } - } catch (VideoException e) { - logger.error("Failed to set camera exposure!", e); - } - } + public FrameProvider getFrameProvider() { + return usbFrameProvider; } @Override - public void setBrightness(int brightness) { - try { - camera.setBrightness(brightness); - camera.setBrightness(brightness); - } catch (VideoException e) { - logger.error("Failed to set camera brightness!", e); - } + public VisionSourceSettables getSettables() { + return this.usbCameraSettables; } - @Override - public void setGain(int gain) { - try { - if (cameraQuirks.hasQuirk(CameraQuirk.Gain)) { - camera.getProperty("gain_automatic").set(0); - camera.getProperty("gain").set(gain); + public class USBCameraSettables extends VisionSourceSettables { + protected USBCameraSettables(CameraConfiguration configuration) { + super(configuration); + getAllVideoModes(); + setVideoMode(videoModes.get(0)); } - } catch (VideoException e) { - logger.error("Failed to set camera gain!", e); - } - } - @Override - public VideoMode getCurrentVideoMode() { - return camera.isConnected() ? camera.getVideoMode() : null; - } + public void setAutoExposure(boolean cameraAutoExposure) { + logger.debug("Setting auto exposure to " + cameraAutoExposure); - @Override - public void setVideoModeInternal(VideoMode videoMode) { - try { - if (videoMode == null) { - logger.error("Got a null video mode! Doing nothing..."); - return; + if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { + // Case, we know this is a picam. Go through v4l2-ctl interface directly + + // Common settings + camera + .getProperty("image_stabilization") + .set(0); // No image stabilization, as this will throw off odometry + camera.getProperty("power_line_frequency").set(2); // Assume 60Hz USA + camera.getProperty("scene_mode").set(0); // no presets + camera.getProperty("exposure_metering_mode").set(0); + camera.getProperty("exposure_dynamic_framerate").set(0); + + if (!cameraAutoExposure) { + // Pick a bunch of reasonable setting defaults for vision processing retroreflective + camera.getProperty("auto_exposure_bias").set(0); + camera.getProperty("iso_sensitivity_auto").set(0); // Disable auto ISO adjustment + camera.getProperty("iso_sensitivity").set(0); // Manual ISO adjustment + camera.getProperty("white_balance_auto_preset").set(2); // Auto white-balance disabled + camera.getProperty("auto_exposure").set(1); // auto exposure disabled + } else { + // Pick a bunch of reasonable setting defaults for driver, fiducials, or otherwise + // nice-for-humans + camera.getProperty("auto_exposure_bias").set(12); + camera.getProperty("iso_sensitivity_auto").set(1); + camera.getProperty("iso_sensitivity").set(1); // Manual ISO adjustment by default + camera.getProperty("white_balance_auto_preset").set(1); // Auto white-balance enabled + camera.getProperty("auto_exposure").set(0); // auto exposure enabled + } + + } else { + // Case - this is some other USB cam. Default to wpilib's implementation + + var canSetWhiteBalance = !cameraQuirks.hasQuirk(CameraQuirk.Gain); + + if (!cameraAutoExposure) { + // Pick a bunch of reasonable setting defaults for vision processing retroreflective + if (canSetWhiteBalance) { + camera.setWhiteBalanceManual(4000); // Auto white-balance disabled, 4000K preset + } + } else { + // Pick a bunch of reasonable setting defaults for driver, fiducials, or otherwise + // nice-for-humans + if (canSetWhiteBalance) { + camera.setWhiteBalanceAuto(); // Auto white-balance enabled + } + camera.setExposureAuto(); // auto exposure enabled + } + } } - camera.setVideoMode(videoMode); - } catch (Exception e) { - logger.error("Failed to set video mode!", e); - } - } - @Override - public HashMap getAllVideoModes() { - if (videoModes == null) { - videoModes = new HashMap<>(); - List videoModesList = new ArrayList<>(); - try { - VideoMode[] modes; - if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { - modes = - new VideoMode[] { - new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 90), - new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 30), - new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 15), - new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 10), - new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 90), - new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 45), - new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 30), - new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 15), - new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 10), - new VideoMode(VideoMode.PixelFormat.kBGR, 960, 720, 60), - new VideoMode(VideoMode.PixelFormat.kBGR, 960, 720, 10), - new VideoMode(VideoMode.PixelFormat.kBGR, 1280, 720, 45), - new VideoMode(VideoMode.PixelFormat.kBGR, 1920, 1080, 20), - }; - } else { - modes = camera.enumerateVideoModes(); - } - for (VideoMode videoMode : modes) { - // Filter grey modes - if (videoMode.pixelFormat == VideoMode.PixelFormat.kGray - || videoMode.pixelFormat == VideoMode.PixelFormat.kUnknown) { - continue; + private int timeToPiCamRawExposure(double time_us) { + int retVal = + (int) + Math.round( + time_us / 100.0); // Pi Cam's (both v1 and v2) need exposure time in units of + // 100us/bit + return Math.min(Math.max(retVal, 1), 10000); // Cap to allowable range for parameter + } + + private double pctToExposureTimeUs(double pct_in) { + // Mirror the photonvision raspicam driver's algorithm for picking an exposure time + // from a 0-100% input + final double PADDING_LOW_US = 10; + final double PADDING_HIGH_US = 10; + return PADDING_LOW_US + + (pct_in / 100.0) * ((1e6 / (double) camera.getVideoMode().fps) - PADDING_HIGH_US); + } + + @Override + public void setExposure(double exposure) { + if (exposure >= 0.0) { + try { + int scaledExposure = 1; + if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { + scaledExposure = Math.round(timeToPiCamRawExposure(pctToExposureTimeUs(exposure))); + logger.debug("Setting camera raw exposure to " + scaledExposure); + camera.getProperty("raw_exposure_time_absolute").set(scaledExposure); + camera.getProperty("raw_exposure_time_absolute").set(scaledExposure); + + } else { + scaledExposure = (int) Math.round(exposure); + logger.debug("Setting camera exposure to " + scaledExposure); + camera.setExposureManual(scaledExposure); + camera.setExposureManual(scaledExposure); + } + } catch (VideoException e) { + logger.error("Failed to set camera exposure!", e); + } } + } - // On picam, filter non-bgr modes for performance - if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { - if (videoMode.pixelFormat != VideoMode.PixelFormat.kBGR) { - continue; - } + @Override + public void setBrightness(int brightness) { + try { + camera.setBrightness(brightness); + camera.setBrightness(brightness); + } catch (VideoException e) { + logger.error("Failed to set camera brightness!", e); } + } - if (cameraQuirks.hasQuirk(CameraQuirk.FPSCap100)) { - if (videoMode.fps > 100) { - continue; - } + @Override + public void setGain(int gain) { + try { + if (cameraQuirks.hasQuirk(CameraQuirk.Gain)) { + camera.getProperty("gain_automatic").set(0); + camera.getProperty("gain").set(gain); + } + } catch (VideoException e) { + logger.error("Failed to set camera gain!", e); } + } - videoModesList.add(videoMode); - - // TODO - do we want to trim down FPS modes? in cases where the camera has no gain - // control, - // lower FPS might be needed to ensure total exposure is acceptable. - // We look for modes with the same height/width/pixelformat as this mode - // and remove all the ones that are slower. This is sorted low to high. - // So we remove the last element (the fastest FPS) from the duplicate list, - // and remove all remaining elements from the final list - // var duplicateModes = - // videoModesList.stream() - // .filter( - // it -> - // it.height == videoMode.height - // && it.width == videoMode.width - // && it.pixelFormat == videoMode.pixelFormat) - // .sorted(Comparator.comparingDouble(it -> it.fps)) - // .collect(Collectors.toList()); - // duplicateModes.remove(duplicateModes.size() - 1); - // videoModesList.removeAll(duplicateModes); - } - } catch (Exception e) { - logger.error("Exception while enumerating video modes!", e); - videoModesList = List.of(); + @Override + public VideoMode getCurrentVideoMode() { + return camera.isConnected() ? camera.getVideoMode() : null; } - // Sort by resolution - var sortedList = - videoModesList.stream() - .sorted(((a, b) -> (b.width + b.height) - (a.width + a.height))) - .collect(Collectors.toList()); - Collections.reverse(sortedList); - - // On vendor cameras, respect blacklisted indices - var indexBlacklist = - ConfigManager.getInstance().getConfig().getHardwareConfig().blacklistedResIndices; - for (int badIdx : indexBlacklist) { - sortedList.remove(badIdx); + @Override + public void setVideoModeInternal(VideoMode videoMode) { + try { + if (videoMode == null) { + logger.error("Got a null video mode! Doing nothing..."); + return; + } + camera.setVideoMode(videoMode); + } catch (Exception e) { + logger.error("Failed to set video mode!", e); + } } - for (VideoMode videoMode : sortedList) { - videoModes.put(sortedList.indexOf(videoMode), videoMode); + @Override + public HashMap getAllVideoModes() { + if (videoModes == null) { + videoModes = new HashMap<>(); + List videoModesList = new ArrayList<>(); + try { + VideoMode[] modes; + if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { + modes = + new VideoMode[] { + new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 90), + new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 30), + new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 15), + new VideoMode(VideoMode.PixelFormat.kBGR, 320, 240, 10), + new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 90), + new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 45), + new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 30), + new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 15), + new VideoMode(VideoMode.PixelFormat.kBGR, 640, 480, 10), + new VideoMode(VideoMode.PixelFormat.kBGR, 960, 720, 60), + new VideoMode(VideoMode.PixelFormat.kBGR, 960, 720, 10), + new VideoMode(VideoMode.PixelFormat.kBGR, 1280, 720, 45), + new VideoMode(VideoMode.PixelFormat.kBGR, 1920, 1080, 20), + }; + } else { + modes = camera.enumerateVideoModes(); + } + for (VideoMode videoMode : modes) { + // Filter grey modes + if (videoMode.pixelFormat == VideoMode.PixelFormat.kGray + || videoMode.pixelFormat == VideoMode.PixelFormat.kUnknown) { + continue; + } + + // On picam, filter non-bgr modes for performance + if (cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { + if (videoMode.pixelFormat != VideoMode.PixelFormat.kBGR) { + continue; + } + } + + if (cameraQuirks.hasQuirk(CameraQuirk.FPSCap100)) { + if (videoMode.fps > 100) { + continue; + } + } + + videoModesList.add(videoMode); + + // TODO - do we want to trim down FPS modes? in cases where the camera has no gain + // control, + // lower FPS might be needed to ensure total exposure is acceptable. + // We look for modes with the same height/width/pixelformat as this mode + // and remove all the ones that are slower. This is sorted low to high. + // So we remove the last element (the fastest FPS) from the duplicate list, + // and remove all remaining elements from the final list + // var duplicateModes = + // videoModesList.stream() + // .filter( + // it -> + // it.height == videoMode.height + // && it.width == videoMode.width + // && it.pixelFormat == videoMode.pixelFormat) + // .sorted(Comparator.comparingDouble(it -> it.fps)) + // .collect(Collectors.toList()); + // duplicateModes.remove(duplicateModes.size() - 1); + // videoModesList.removeAll(duplicateModes); + } + } catch (Exception e) { + logger.error("Exception while enumerating video modes!", e); + videoModesList = List.of(); + } + + // Sort by resolution + var sortedList = + videoModesList.stream() + .sorted(((a, b) -> (b.width + b.height) - (a.width + a.height))) + .collect(Collectors.toList()); + Collections.reverse(sortedList); + + // On vendor cameras, respect blacklisted indices + var indexBlacklist = + ConfigManager.getInstance().getConfig().getHardwareConfig().blacklistedResIndices; + for (int badIdx : indexBlacklist) { + sortedList.remove(badIdx); + } + + for (VideoMode videoMode : sortedList) { + videoModes.put(sortedList.indexOf(videoMode), videoMode); + } + } + return videoModes; } - } - return videoModes; } - } - - // TODO improve robustness of this detection - @Override - public boolean isVendorCamera() { - return ConfigManager.getInstance().getConfig().getHardwareConfig().hasPresetFOV() - && cameraQuirks.hasQuirk(CameraQuirk.PiCam); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - USBCameraSource that = (USBCameraSource) o; - return cameraQuirks.equals(that.cameraQuirks); - } - - @Override - public int hashCode() { - return Objects.hash( - camera, usbCameraSettables, usbFrameProvider, cameraConfiguration, cvSink, cameraQuirks); - } + + // TODO improve robustness of this detection + @Override + public boolean isVendorCamera() { + return ConfigManager.getInstance().getConfig().getHardwareConfig().hasPresetFOV() + && cameraQuirks.hasQuirk(CameraQuirk.PiCam); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + USBCameraSource that = (USBCameraSource) o; + return cameraQuirks.equals(that.cameraQuirks); + } + + @Override + public int hashCode() { + return Objects.hash( + camera, usbCameraSettables, usbFrameProvider, cameraConfiguration, cvSink, cameraQuirks); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/Frame.java b/photon-core/src/main/java/org/photonvision/vision/frame/Frame.java index b4f2988825..d4202f77a8 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/Frame.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/Frame.java @@ -22,53 +22,53 @@ import org.photonvision.vision.opencv.Releasable; public class Frame implements Releasable { - public final long timestampNanos; + public final long timestampNanos; - // Frame should at _least_ contain the thresholded frame, and sometimes the color image - public final CVMat colorImage; - public final CVMat processedImage; - public final FrameThresholdType type; + // Frame should at _least_ contain the thresholded frame, and sometimes the color image + public final CVMat colorImage; + public final CVMat processedImage; + public final FrameThresholdType type; - public final FrameStaticProperties frameStaticProperties; + public final FrameStaticProperties frameStaticProperties; - public Frame( - CVMat color, - CVMat processed, - FrameThresholdType type, - long timestampNanos, - FrameStaticProperties frameStaticProperties) { - this.colorImage = color; - this.processedImage = processed; - this.type = type; - this.timestampNanos = timestampNanos; - this.frameStaticProperties = frameStaticProperties; - } + public Frame( + CVMat color, + CVMat processed, + FrameThresholdType type, + long timestampNanos, + FrameStaticProperties frameStaticProperties) { + this.colorImage = color; + this.processedImage = processed; + this.type = type; + this.timestampNanos = timestampNanos; + this.frameStaticProperties = frameStaticProperties; + } - public Frame( - CVMat color, - CVMat processed, - FrameThresholdType processType, - FrameStaticProperties frameStaticProperties) { - this(color, processed, processType, MathUtils.wpiNanoTime(), frameStaticProperties); - } + public Frame( + CVMat color, + CVMat processed, + FrameThresholdType processType, + FrameStaticProperties frameStaticProperties) { + this(color, processed, processType, MathUtils.wpiNanoTime(), frameStaticProperties); + } - public Frame() { - this( - new CVMat(), - new CVMat(), - FrameThresholdType.NONE, - MathUtils.wpiNanoTime(), - new FrameStaticProperties(0, 0, 0, null)); - } + public Frame() { + this( + new CVMat(), + new CVMat(), + FrameThresholdType.NONE, + MathUtils.wpiNanoTime(), + new FrameStaticProperties(0, 0, 0, null)); + } - public void copyTo(Frame destFrame) { - colorImage.getMat().copyTo(destFrame.colorImage.getMat()); - processedImage.getMat().copyTo(destFrame.processedImage.getMat()); - } + public void copyTo(Frame destFrame) { + colorImage.getMat().copyTo(destFrame.colorImage.getMat()); + processedImage.getMat().copyTo(destFrame.processedImage.getMat()); + } - @Override - public void release() { - colorImage.release(); - processedImage.release(); - } + @Override + public void release() { + colorImage.release(); + processedImage.release(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/FrameDivisor.java b/photon-core/src/main/java/org/photonvision/vision/frame/FrameDivisor.java index 7d24df7993..b44ca78d10 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/FrameDivisor.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/FrameDivisor.java @@ -18,14 +18,14 @@ package org.photonvision.vision.frame; public enum FrameDivisor { - NONE(1), - HALF(2), - QUARTER(4), - SIXTH(6); + NONE(1), + HALF(2), + QUARTER(4), + SIXTH(6); - public final Integer value; + public final Integer value; - FrameDivisor(int value) { - this.value = value; - } + FrameDivisor(int value) { + this.value = value; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/FrameProvider.java b/photon-core/src/main/java/org/photonvision/vision/frame/FrameProvider.java index 8121ab262e..70de9c5115 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/FrameProvider.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/FrameProvider.java @@ -22,17 +22,17 @@ import org.photonvision.vision.pipe.impl.HSVPipe; public interface FrameProvider extends Supplier { - String getName(); + String getName(); - /** Ask the camera to produce a certain kind of processed image (e.g. HSV or greyscale) */ - void requestFrameThresholdType(FrameThresholdType type); + /** Ask the camera to produce a certain kind of processed image (e.g. HSV or greyscale) */ + void requestFrameThresholdType(FrameThresholdType type); - /** Ask the camera to rotate frames it outputs */ - void requestFrameRotation(ImageRotationMode rotationMode); + /** Ask the camera to rotate frames it outputs */ + void requestFrameRotation(ImageRotationMode rotationMode); - /** Ask the camera to provide either the input, output, or both frames. */ - void requestFrameCopies(boolean copyInput, boolean copyOutput); + /** Ask the camera to provide either the input, output, or both frames. */ + void requestFrameCopies(boolean copyInput, boolean copyOutput); - /** Ask the camera to rotate frames it outputs */ - void requestHsvSettings(HSVPipe.HSVParams params); + /** Ask the camera to rotate frames it outputs */ + void requestHsvSettings(HSVPipe.HSVParams params); } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/FrameStaticProperties.java b/photon-core/src/main/java/org/photonvision/vision/frame/FrameStaticProperties.java index eb709f38eb..cedda04d22 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/FrameStaticProperties.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/FrameStaticProperties.java @@ -24,84 +24,84 @@ /** Represents the properties of a frame. */ public class FrameStaticProperties { - public final int imageWidth; - public final int imageHeight; - public final double fov; - public final double imageArea; - public final double centerX; - public final double centerY; - public final Point centerPoint; - public final double horizontalFocalLength; - public final double verticalFocalLength; - public CameraCalibrationCoefficients cameraCalibration; + public final int imageWidth; + public final int imageHeight; + public final double fov; + public final double imageArea; + public final double centerX; + public final double centerY; + public final Point centerPoint; + public final double horizontalFocalLength; + public final double verticalFocalLength; + public CameraCalibrationCoefficients cameraCalibration; - /** - * Instantiates a new Frame static properties. - * - * @param mode The Video Mode of the camera. - * @param fov The FOV (Field Of Vision) of the image in degrees. - */ - public FrameStaticProperties(VideoMode mode, double fov, CameraCalibrationCoefficients cal) { - this(mode != null ? mode.width : 1, mode != null ? mode.height : 1, fov, cal); - } + /** + * Instantiates a new Frame static properties. + * + * @param mode The Video Mode of the camera. + * @param fov The FOV (Field Of Vision) of the image in degrees. + */ + public FrameStaticProperties(VideoMode mode, double fov, CameraCalibrationCoefficients cal) { + this(mode != null ? mode.width : 1, mode != null ? mode.height : 1, fov, cal); + } - /** - * Instantiates a new Frame static properties. - * - * @param imageWidth The width of the image in pixels. - * @param imageHeight The width of the image in pixels. - * @param fov The FOV (Field Of Vision) of the image in degrees. - */ - public FrameStaticProperties( - int imageWidth, int imageHeight, double fov, CameraCalibrationCoefficients cal) { - this.imageWidth = imageWidth; - this.imageHeight = imageHeight; - this.fov = fov; - this.cameraCalibration = cal; + /** + * Instantiates a new Frame static properties. + * + * @param imageWidth The width of the image in pixels. + * @param imageHeight The width of the image in pixels. + * @param fov The FOV (Field Of Vision) of the image in degrees. + */ + public FrameStaticProperties( + int imageWidth, int imageHeight, double fov, CameraCalibrationCoefficients cal) { + this.imageWidth = imageWidth; + this.imageHeight = imageHeight; + this.fov = fov; + this.cameraCalibration = cal; - imageArea = this.imageWidth * this.imageHeight; + imageArea = this.imageWidth * this.imageHeight; - // pinhole model calculations - if (cameraCalibration != null && cameraCalibration.getCameraIntrinsicsMat() != null) { - // Use calibration data - var camIntrinsics = cameraCalibration.getCameraIntrinsicsMat(); - centerX = camIntrinsics.get(0, 2)[0]; - centerY = camIntrinsics.get(1, 2)[0]; - centerPoint = new Point(centerX, centerY); - horizontalFocalLength = camIntrinsics.get(0, 0)[0]; - verticalFocalLength = camIntrinsics.get(1, 1)[0]; - } else { - // No calibration data. Calculate from user provided diagonal FOV - centerX = (this.imageWidth / 2.0) - 0.5; - centerY = (this.imageHeight / 2.0) - 0.5; - centerPoint = new Point(centerX, centerY); + // pinhole model calculations + if (cameraCalibration != null && cameraCalibration.getCameraIntrinsicsMat() != null) { + // Use calibration data + var camIntrinsics = cameraCalibration.getCameraIntrinsicsMat(); + centerX = camIntrinsics.get(0, 2)[0]; + centerY = camIntrinsics.get(1, 2)[0]; + centerPoint = new Point(centerX, centerY); + horizontalFocalLength = camIntrinsics.get(0, 0)[0]; + verticalFocalLength = camIntrinsics.get(1, 1)[0]; + } else { + // No calibration data. Calculate from user provided diagonal FOV + centerX = (this.imageWidth / 2.0) - 0.5; + centerY = (this.imageHeight / 2.0) - 0.5; + centerPoint = new Point(centerX, centerY); - DoubleCouple horizVertViews = - calculateHorizontalVerticalFoV(this.fov, this.imageWidth, this.imageHeight); - double horizFOV = Math.toRadians(horizVertViews.getFirst()); - double vertFOV = Math.toRadians(horizVertViews.getSecond()); - horizontalFocalLength = (this.imageWidth / 2.0) / Math.tan(horizFOV / 2.0); - verticalFocalLength = (this.imageHeight / 2.0) / Math.tan(vertFOV / 2.0); + DoubleCouple horizVertViews = + calculateHorizontalVerticalFoV(this.fov, this.imageWidth, this.imageHeight); + double horizFOV = Math.toRadians(horizVertViews.getFirst()); + double vertFOV = Math.toRadians(horizVertViews.getSecond()); + horizontalFocalLength = (this.imageWidth / 2.0) / Math.tan(horizFOV / 2.0); + verticalFocalLength = (this.imageHeight / 2.0) / Math.tan(vertFOV / 2.0); + } } - } - /** - * Calculates the horizontal and vertical FOV components from a given diagonal FOV and image size. - * - * @param diagonalFoV Diagonal FOV in degrees - * @param imageWidth Image width in pixels - * @param imageHeight Image height in pixels - * @return Horizontal and vertical FOV in degrees - */ - public static DoubleCouple calculateHorizontalVerticalFoV( - double diagonalFoV, int imageWidth, int imageHeight) { - diagonalFoV = Math.toRadians(diagonalFoV); - double diagonalAspect = Math.hypot(imageWidth, imageHeight); + /** + * Calculates the horizontal and vertical FOV components from a given diagonal FOV and image size. + * + * @param diagonalFoV Diagonal FOV in degrees + * @param imageWidth Image width in pixels + * @param imageHeight Image height in pixels + * @return Horizontal and vertical FOV in degrees + */ + public static DoubleCouple calculateHorizontalVerticalFoV( + double diagonalFoV, int imageWidth, int imageHeight) { + diagonalFoV = Math.toRadians(diagonalFoV); + double diagonalAspect = Math.hypot(imageWidth, imageHeight); - double horizontalView = - Math.atan(Math.tan(diagonalFoV / 2) * (imageWidth / diagonalAspect)) * 2; - double verticalView = Math.atan(Math.tan(diagonalFoV / 2) * (imageHeight / diagonalAspect)) * 2; + double horizontalView = + Math.atan(Math.tan(diagonalFoV / 2) * (imageWidth / diagonalAspect)) * 2; + double verticalView = Math.atan(Math.tan(diagonalFoV / 2) * (imageHeight / diagonalAspect)) * 2; - return new DoubleCouple(Math.toDegrees(horizontalView), Math.toDegrees(verticalView)); - } + return new DoubleCouple(Math.toDegrees(horizontalView), Math.toDegrees(verticalView)); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/FrameThresholdType.java b/photon-core/src/main/java/org/photonvision/vision/frame/FrameThresholdType.java index 3a85b675f2..4e779f3c02 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/FrameThresholdType.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/FrameThresholdType.java @@ -18,7 +18,7 @@ package org.photonvision.vision.frame; public enum FrameThresholdType { - NONE, - HSV, - GREYSCALE, + NONE, + HSV, + GREYSCALE, } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/consumer/DummyFrameConsumer.java b/photon-core/src/main/java/org/photonvision/vision/frame/consumer/DummyFrameConsumer.java index 12f06b837c..1e9449a355 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/consumer/DummyFrameConsumer.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/consumer/DummyFrameConsumer.java @@ -21,8 +21,8 @@ import org.photonvision.vision.frame.FrameConsumer; public class DummyFrameConsumer implements FrameConsumer { - @Override - public void accept(Frame frame) { - frame.release(); // lol ez - } + @Override + public void accept(Frame frame) { + frame.release(); // lol ez + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/consumer/FileSaveFrameConsumer.java b/photon-core/src/main/java/org/photonvision/vision/frame/consumer/FileSaveFrameConsumer.java index 326e38a776..57bc312b2f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/consumer/FileSaveFrameConsumer.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/consumer/FileSaveFrameConsumer.java @@ -32,78 +32,78 @@ import org.photonvision.vision.opencv.CVMat; public class FileSaveFrameConsumer implements Consumer { - private final Logger logger = new Logger(FileSaveFrameConsumer.class, LogGroup.General); + private final Logger logger = new Logger(FileSaveFrameConsumer.class, LogGroup.General); - // Formatters to generate unique, timestamped file names - private static final String FILE_PATH = ConfigManager.getInstance().getImageSavePath().toString(); - private static final String FILE_EXTENSION = ".jpg"; - private static final String NT_SUFFIX = "SaveImgCmd"; + // Formatters to generate unique, timestamped file names + private static final String FILE_PATH = ConfigManager.getInstance().getImageSavePath().toString(); + private static final String FILE_EXTENSION = ".jpg"; + private static final String NT_SUFFIX = "SaveImgCmd"; - DateFormat df = new SimpleDateFormat("yyyy-MM-dd"); - DateFormat tf = new SimpleDateFormat("hhmmssSS"); + DateFormat df = new SimpleDateFormat("yyyy-MM-dd"); + DateFormat tf = new SimpleDateFormat("hhmmssSS"); - private final NetworkTable rootTable; - private NetworkTable subTable; - private final String ntEntryName; - private IntegerEntry saveFrameEntry; + private final NetworkTable rootTable; + private NetworkTable subTable; + private final String ntEntryName; + private IntegerEntry saveFrameEntry; - private String cameraNickname; - private final String streamType; + private String cameraNickname; + private final String streamType; - private long savedImagesCount = 0; + private long savedImagesCount = 0; - public FileSaveFrameConsumer(String camNickname, String streamPrefix) { - this.ntEntryName = streamPrefix + NT_SUFFIX; - this.cameraNickname = camNickname; - this.streamType = streamPrefix; + public FileSaveFrameConsumer(String camNickname, String streamPrefix) { + this.ntEntryName = streamPrefix + NT_SUFFIX; + this.cameraNickname = camNickname; + this.streamType = streamPrefix; - this.rootTable = NetworkTablesManager.getInstance().kRootTable; - updateCameraNickname(camNickname); - } + this.rootTable = NetworkTablesManager.getInstance().kRootTable; + updateCameraNickname(camNickname); + } - public void accept(CVMat image) { - if (image != null && image.getMat() != null && !image.getMat().empty()) { - long currentCount = saveFrameEntry.get(); + public void accept(CVMat image) { + if (image != null && image.getMat() != null && !image.getMat().empty()) { + long currentCount = saveFrameEntry.get(); - // Await save request - if (currentCount == -1) return; + // Await save request + if (currentCount == -1) return; - // The requested count is greater than the actual count - if (savedImagesCount < currentCount) { - Date now = new Date(); + // The requested count is greater than the actual count + if (savedImagesCount < currentCount) { + Date now = new Date(); - String fileName = - cameraNickname + "_" + streamType + "_" + df.format(now) + "T" + tf.format(now); - String saveFilePath = FILE_PATH + File.separator + fileName + FILE_EXTENSION; + String fileName = + cameraNickname + "_" + streamType + "_" + df.format(now) + "T" + tf.format(now); + String saveFilePath = FILE_PATH + File.separator + fileName + FILE_EXTENSION; - Imgcodecs.imwrite(saveFilePath, image.getMat()); + Imgcodecs.imwrite(saveFilePath, image.getMat()); - savedImagesCount++; - logger.info("Saved new image at " + saveFilePath); - } else if (savedImagesCount > currentCount) { - // Reset local value with NT value in case of de-sync - savedImagesCount = currentCount; - } + savedImagesCount++; + logger.info("Saved new image at " + saveFilePath); + } else if (savedImagesCount > currentCount) { + // Reset local value with NT value in case of de-sync + savedImagesCount = currentCount; + } + } } - } - - public void updateCameraNickname(String newCameraNickname) { - // Remove existing entries - if (this.subTable != null) { - if (this.subTable.containsKey(ntEntryName)) { - this.subTable.getEntry(ntEntryName).close(); - } + + public void updateCameraNickname(String newCameraNickname) { + // Remove existing entries + if (this.subTable != null) { + if (this.subTable.containsKey(ntEntryName)) { + this.subTable.getEntry(ntEntryName).close(); + } + } + + // Recreate and re-init network tables structure + this.cameraNickname = newCameraNickname; + this.subTable = rootTable.getSubTable(this.cameraNickname); + this.subTable.getEntry(ntEntryName).setInteger(savedImagesCount); + this.saveFrameEntry = subTable.getIntegerTopic(ntEntryName).getEntry(-1); // Default negative } - // Recreate and re-init network tables structure - this.cameraNickname = newCameraNickname; - this.subTable = rootTable.getSubTable(this.cameraNickname); - this.subTable.getEntry(ntEntryName).setInteger(savedImagesCount); - this.saveFrameEntry = subTable.getIntegerTopic(ntEntryName).getEntry(-1); // Default negative - } - - public void overrideTakeSnapshot() { - // Simulate NT change - saveFrameEntry.set(saveFrameEntry.get() + 1); - } + public void overrideTakeSnapshot() { + // Simulate NT change + saveFrameEntry.set(saveFrameEntry.get() + 1); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/consumer/MJPGFrameConsumer.java b/photon-core/src/main/java/org/photonvision/vision/frame/consumer/MJPGFrameConsumer.java index be40d3b0f8..06f9bf986b 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/consumer/MJPGFrameConsumer.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/consumer/MJPGFrameConsumer.java @@ -31,215 +31,215 @@ import org.photonvision.vision.opencv.CVMat; public class MJPGFrameConsumer { - public static final Mat EMPTY_MAT = new Mat(60, 15 * 7, CvType.CV_8UC3); - private static final double EMPTY_FRAMERATE = 2; - private long lastEmptyTime; - - static { - EMPTY_MAT.setTo(ColorHelper.colorToScalar(Color.BLACK)); - var col = 0; - Imgproc.rectangle( - EMPTY_MAT, - new Rect(col, 0, 15, EMPTY_MAT.height()), - ColorHelper.colorToScalar(new Color(0xa2a2a2)), - -1); - col += 15; - Imgproc.rectangle( - EMPTY_MAT, - new Rect(col, 0, 15, EMPTY_MAT.height()), - ColorHelper.colorToScalar(new Color(0xa2a300)), - -1); - col += 15; - Imgproc.rectangle( - EMPTY_MAT, - new Rect(col, 0, 15, EMPTY_MAT.height()), - ColorHelper.colorToScalar(new Color(0x00a3a2)), - -1); - col += 15; - Imgproc.rectangle( - EMPTY_MAT, - new Rect(col, 0, 15, EMPTY_MAT.height()), - ColorHelper.colorToScalar(new Color(0x00a200)), - -1); - col += 15; - Imgproc.rectangle( - EMPTY_MAT, - new Rect(col, 0, 15, EMPTY_MAT.height()), - ColorHelper.colorToScalar(new Color(0x440045)), - -1); - col += 15; - Imgproc.rectangle( - EMPTY_MAT, - new Rect(col, 0, 15, EMPTY_MAT.height()), - ColorHelper.colorToScalar(new Color(0x0000a2)), - -1); - col += 15; - Imgproc.rectangle( - EMPTY_MAT, - new Rect(col, 0, 15, EMPTY_MAT.height()), - ColorHelper.colorToScalar(new Color(0)), - -1); - Imgproc.rectangle( - EMPTY_MAT, - new Rect(0, 50, EMPTY_MAT.width(), 10), - ColorHelper.colorToScalar(new Color(0)), - -1); - Imgproc.rectangle( - EMPTY_MAT, new Rect(15, 50, 30, 10), ColorHelper.colorToScalar(Color.WHITE), -1); - - Imgproc.putText( - EMPTY_MAT, "Stream", new Point(14, 20), 0, 0.6, ColorHelper.colorToScalar(Color.white), 2); - Imgproc.putText( - EMPTY_MAT, - "Disabled", - new Point(14, 45), - 0, - 0.6, - ColorHelper.colorToScalar(Color.white), - 2); - Imgproc.putText( - EMPTY_MAT, "Stream", new Point(14, 20), 0, 0.6, ColorHelper.colorToScalar(Color.RED), 1); - Imgproc.putText( - EMPTY_MAT, "Disabled", new Point(14, 45), 0, 0.6, ColorHelper.colorToScalar(Color.RED), 1); - } - - private CvSource cvSource; - private MjpegServer mjpegServer; - - private VideoListener listener; - - private final NetworkTable table; - boolean isDisabled = false; - - public MJPGFrameConsumer(String sourceName, int width, int height, int port) { - this.cvSource = new CvSource(sourceName, VideoMode.PixelFormat.kMJPEG, width, height, 30); - this.table = - NetworkTableInstance.getDefault().getTable("/CameraPublisher").getSubTable(sourceName); - - this.mjpegServer = new MjpegServer("serve_" + cvSource.getName(), port); - mjpegServer.setSource(cvSource); - mjpegServer.setCompression(75); - - listener = - new VideoListener( - event -> { - if (event.kind == VideoEvent.Kind.kNetworkInterfacesChanged) { - table.getEntry("source").setString("cv:"); - table.getEntry("streams"); - table.getEntry("connected").setBoolean(true); - table.getEntry("mode").setString(videoModeToString(cvSource.getVideoMode())); - table.getEntry("modes").setStringArray(getSourceModeValues(cvSource.getHandle())); - updateStreamValues(); - } - }, - 0x4fff, - true); - } - - private synchronized void updateStreamValues() { - // Get port - int port = mjpegServer.getPort(); - - // Generate values - var addresses = CameraServerJNI.getNetworkInterfaces(); - ArrayList values = new ArrayList<>(addresses.length + 1); - String listenAddress = CameraServerJNI.getMjpegServerListenAddress(mjpegServer.getHandle()); - if (!listenAddress.isEmpty()) { - // If a listen address is specified, only use that - values.add(makeStreamValue(listenAddress, port)); - } else { - // Otherwise generate for hostname and all interface addresses - values.add(makeStreamValue(CameraServerJNI.getHostname() + ".local", port)); - for (String addr : addresses) { - if ("127.0.0.1".equals(addr)) { - continue; // ignore localhost + public static final Mat EMPTY_MAT = new Mat(60, 15 * 7, CvType.CV_8UC3); + private static final double EMPTY_FRAMERATE = 2; + private long lastEmptyTime; + + static { + EMPTY_MAT.setTo(ColorHelper.colorToScalar(Color.BLACK)); + var col = 0; + Imgproc.rectangle( + EMPTY_MAT, + new Rect(col, 0, 15, EMPTY_MAT.height()), + ColorHelper.colorToScalar(new Color(0xa2a2a2)), + -1); + col += 15; + Imgproc.rectangle( + EMPTY_MAT, + new Rect(col, 0, 15, EMPTY_MAT.height()), + ColorHelper.colorToScalar(new Color(0xa2a300)), + -1); + col += 15; + Imgproc.rectangle( + EMPTY_MAT, + new Rect(col, 0, 15, EMPTY_MAT.height()), + ColorHelper.colorToScalar(new Color(0x00a3a2)), + -1); + col += 15; + Imgproc.rectangle( + EMPTY_MAT, + new Rect(col, 0, 15, EMPTY_MAT.height()), + ColorHelper.colorToScalar(new Color(0x00a200)), + -1); + col += 15; + Imgproc.rectangle( + EMPTY_MAT, + new Rect(col, 0, 15, EMPTY_MAT.height()), + ColorHelper.colorToScalar(new Color(0x440045)), + -1); + col += 15; + Imgproc.rectangle( + EMPTY_MAT, + new Rect(col, 0, 15, EMPTY_MAT.height()), + ColorHelper.colorToScalar(new Color(0x0000a2)), + -1); + col += 15; + Imgproc.rectangle( + EMPTY_MAT, + new Rect(col, 0, 15, EMPTY_MAT.height()), + ColorHelper.colorToScalar(new Color(0)), + -1); + Imgproc.rectangle( + EMPTY_MAT, + new Rect(0, 50, EMPTY_MAT.width(), 10), + ColorHelper.colorToScalar(new Color(0)), + -1); + Imgproc.rectangle( + EMPTY_MAT, new Rect(15, 50, 30, 10), ColorHelper.colorToScalar(Color.WHITE), -1); + + Imgproc.putText( + EMPTY_MAT, "Stream", new Point(14, 20), 0, 0.6, ColorHelper.colorToScalar(Color.white), 2); + Imgproc.putText( + EMPTY_MAT, + "Disabled", + new Point(14, 45), + 0, + 0.6, + ColorHelper.colorToScalar(Color.white), + 2); + Imgproc.putText( + EMPTY_MAT, "Stream", new Point(14, 20), 0, 0.6, ColorHelper.colorToScalar(Color.RED), 1); + Imgproc.putText( + EMPTY_MAT, "Disabled", new Point(14, 45), 0, 0.6, ColorHelper.colorToScalar(Color.RED), 1); + } + + private CvSource cvSource; + private MjpegServer mjpegServer; + + private VideoListener listener; + + private final NetworkTable table; + boolean isDisabled = false; + + public MJPGFrameConsumer(String sourceName, int width, int height, int port) { + this.cvSource = new CvSource(sourceName, VideoMode.PixelFormat.kMJPEG, width, height, 30); + this.table = + NetworkTableInstance.getDefault().getTable("/CameraPublisher").getSubTable(sourceName); + + this.mjpegServer = new MjpegServer("serve_" + cvSource.getName(), port); + mjpegServer.setSource(cvSource); + mjpegServer.setCompression(75); + + listener = + new VideoListener( + event -> { + if (event.kind == VideoEvent.Kind.kNetworkInterfacesChanged) { + table.getEntry("source").setString("cv:"); + table.getEntry("streams"); + table.getEntry("connected").setBoolean(true); + table.getEntry("mode").setString(videoModeToString(cvSource.getVideoMode())); + table.getEntry("modes").setStringArray(getSourceModeValues(cvSource.getHandle())); + updateStreamValues(); + } + }, + 0x4fff, + true); + } + + private synchronized void updateStreamValues() { + // Get port + int port = mjpegServer.getPort(); + + // Generate values + var addresses = CameraServerJNI.getNetworkInterfaces(); + ArrayList values = new ArrayList<>(addresses.length + 1); + String listenAddress = CameraServerJNI.getMjpegServerListenAddress(mjpegServer.getHandle()); + if (!listenAddress.isEmpty()) { + // If a listen address is specified, only use that + values.add(makeStreamValue(listenAddress, port)); + } else { + // Otherwise generate for hostname and all interface addresses + values.add(makeStreamValue(CameraServerJNI.getHostname() + ".local", port)); + for (String addr : addresses) { + if ("127.0.0.1".equals(addr)) { + continue; // ignore localhost + } + values.add(makeStreamValue(addr, port)); + } } - values.add(makeStreamValue(addr, port)); - } + + String[] streamAddresses = values.toArray(new String[0]); + table.getEntry("streams").setStringArray(streamAddresses); + } + + public MJPGFrameConsumer(String name, int port) { + this(name, 320, 240, port); } - String[] streamAddresses = values.toArray(new String[0]); - table.getEntry("streams").setStringArray(streamAddresses); - } + public void accept(CVMat image) { + if (image != null && !image.getMat().empty()) { + cvSource.putFrame(image.getMat()); - public MJPGFrameConsumer(String name, int port) { - this(name, 320, 240, port); - } + // Make sure our disabled framerate limiting doesn't get confused + isDisabled = false; + lastEmptyTime = 0; + } + } - public void accept(CVMat image) { - if (image != null && !image.getMat().empty()) { - cvSource.putFrame(image.getMat()); + public void disabledTick() { + if (!isDisabled) { + cvSource.setVideoMode(VideoMode.PixelFormat.kMJPEG, EMPTY_MAT.width(), EMPTY_MAT.height(), 0); + isDisabled = true; + } - // Make sure our disabled framerate limiting doesn't get confused - isDisabled = false; - lastEmptyTime = 0; + if (System.currentTimeMillis() - lastEmptyTime > 1000.0 / EMPTY_FRAMERATE) { + cvSource.putFrame(EMPTY_MAT); + lastEmptyTime = System.currentTimeMillis(); + } } - } - public void disabledTick() { - if (!isDisabled) { - cvSource.setVideoMode(VideoMode.PixelFormat.kMJPEG, EMPTY_MAT.width(), EMPTY_MAT.height(), 0); - isDisabled = true; + public int getCurrentStreamPort() { + return mjpegServer.getPort(); } - if (System.currentTimeMillis() - lastEmptyTime > 1000.0 / EMPTY_FRAMERATE) { - cvSource.putFrame(EMPTY_MAT); - lastEmptyTime = System.currentTimeMillis(); + private static String makeStreamValue(String address, int port) { + return "mjpg:http://" + address + ":" + port + "/?action=stream"; } - } - public int getCurrentStreamPort() { - return mjpegServer.getPort(); - } + private static String[] getSourceModeValues(int sourceHandle) { + VideoMode[] modes = CameraServerJNI.enumerateSourceVideoModes(sourceHandle); + String[] modeStrings = new String[modes.length]; + for (int i = 0; i < modes.length; i++) { + modeStrings[i] = videoModeToString(modes[i]); + } + return modeStrings; + } - private static String makeStreamValue(String address, int port) { - return "mjpg:http://" + address + ":" + port + "/?action=stream"; - } + private static String videoModeToString(VideoMode mode) { + return mode.width + + "x" + + mode.height + + " " + + pixelFormatToString(mode.pixelFormat) + + " " + + mode.fps + + " fps"; + } - private static String[] getSourceModeValues(int sourceHandle) { - VideoMode[] modes = CameraServerJNI.enumerateSourceVideoModes(sourceHandle); - String[] modeStrings = new String[modes.length]; - for (int i = 0; i < modes.length; i++) { - modeStrings[i] = videoModeToString(modes[i]); + private static String pixelFormatToString(VideoMode.PixelFormat pixelFormat) { + switch (pixelFormat) { + case kMJPEG: + return "MJPEG"; + case kYUYV: + return "YUYV"; + case kRGB565: + return "RGB565"; + case kBGR: + return "BGR"; + case kGray: + return "Gray"; + default: + return "Unknown"; + } } - return modeStrings; - } - - private static String videoModeToString(VideoMode mode) { - return mode.width - + "x" - + mode.height - + " " - + pixelFormatToString(mode.pixelFormat) - + " " - + mode.fps - + " fps"; - } - - private static String pixelFormatToString(VideoMode.PixelFormat pixelFormat) { - switch (pixelFormat) { - case kMJPEG: - return "MJPEG"; - case kYUYV: - return "YUYV"; - case kRGB565: - return "RGB565"; - case kBGR: - return "BGR"; - case kGray: - return "Gray"; - default: - return "Unknown"; + + public void close() { + table.getEntry("connected").setBoolean(false); + mjpegServer.close(); + cvSource.close(); + listener.close(); + mjpegServer = null; + cvSource = null; + listener = null; } - } - - public void close() { - table.getEntry("connected").setBoolean(false); - mjpegServer.close(); - cvSource.close(); - listener.close(); - mjpegServer = null; - cvSource = null; - listener = null; - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/provider/CpuImageProcessor.java b/photon-core/src/main/java/org/photonvision/vision/frame/provider/CpuImageProcessor.java index c84284306e..034ae6615e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/provider/CpuImageProcessor.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/provider/CpuImageProcessor.java @@ -30,94 +30,94 @@ import org.photonvision.vision.pipe.impl.RotateImagePipe; public abstract class CpuImageProcessor implements FrameProvider { - protected static class CapturedFrame { - CVMat colorImage; - FrameStaticProperties staticProps; - long captureTimestamp; - - public CapturedFrame( - CVMat colorImage, FrameStaticProperties staticProps, long captureTimestampNanos) { - this.colorImage = colorImage; - this.staticProps = staticProps; - this.captureTimestamp = captureTimestampNanos; + protected static class CapturedFrame { + CVMat colorImage; + FrameStaticProperties staticProps; + long captureTimestamp; + + public CapturedFrame( + CVMat colorImage, FrameStaticProperties staticProps, long captureTimestampNanos) { + this.colorImage = colorImage; + this.staticProps = staticProps; + this.captureTimestamp = captureTimestampNanos; + } } - } - private final HSVPipe m_hsvPipe = new HSVPipe(); - private final RotateImagePipe m_rImagePipe = new RotateImagePipe(); - private final GrayscalePipe m_grayPipe = new GrayscalePipe(); - FrameThresholdType m_processType; + private final HSVPipe m_hsvPipe = new HSVPipe(); + private final RotateImagePipe m_rImagePipe = new RotateImagePipe(); + private final GrayscalePipe m_grayPipe = new GrayscalePipe(); + FrameThresholdType m_processType; - private final Object m_mutex = new Object(); + private final Object m_mutex = new Object(); - abstract CapturedFrame getInputMat(); + abstract CapturedFrame getInputMat(); - public CpuImageProcessor() { - m_hsvPipe.setParams( - new HSVPipe.HSVParams( - new IntegerCouple(0, 180), - new IntegerCouple(0, 255), - new IntegerCouple(0, 255), - false)); - } - - @Override - public final Frame get() { - // TODO Auto-generated method stub - var input = getInputMat(); - - CVMat outputMat = null; - long sumNanos = 0; - - { - CVPipeResult out = m_rImagePipe.run(input.colorImage.getMat()); - sumNanos += out.nanosElapsed; + public CpuImageProcessor() { + m_hsvPipe.setParams( + new HSVPipe.HSVParams( + new IntegerCouple(0, 180), + new IntegerCouple(0, 255), + new IntegerCouple(0, 255), + false)); } - if (!input.colorImage.getMat().empty()) { - if (m_processType == FrameThresholdType.HSV) { - var hsvResult = m_hsvPipe.run(input.colorImage.getMat()); - outputMat = new CVMat(hsvResult.output); - sumNanos += hsvResult.nanosElapsed; - } else if (m_processType == FrameThresholdType.GREYSCALE) { - var result = m_grayPipe.run(input.colorImage.getMat()); - outputMat = new CVMat(result.output); - sumNanos += result.nanosElapsed; - } else { - outputMat = new CVMat(); - } - } else { - System.out.println("Input was empty!"); - outputMat = new CVMat(); + @Override + public final Frame get() { + // TODO Auto-generated method stub + var input = getInputMat(); + + CVMat outputMat = null; + long sumNanos = 0; + + { + CVPipeResult out = m_rImagePipe.run(input.colorImage.getMat()); + sumNanos += out.nanosElapsed; + } + + if (!input.colorImage.getMat().empty()) { + if (m_processType == FrameThresholdType.HSV) { + var hsvResult = m_hsvPipe.run(input.colorImage.getMat()); + outputMat = new CVMat(hsvResult.output); + sumNanos += hsvResult.nanosElapsed; + } else if (m_processType == FrameThresholdType.GREYSCALE) { + var result = m_grayPipe.run(input.colorImage.getMat()); + outputMat = new CVMat(result.output); + sumNanos += result.nanosElapsed; + } else { + outputMat = new CVMat(); + } + } else { + System.out.println("Input was empty!"); + outputMat = new CVMat(); + } + + return new Frame( + input.colorImage, outputMat, m_processType, input.captureTimestamp, input.staticProps); } - return new Frame( - input.colorImage, outputMat, m_processType, input.captureTimestamp, input.staticProps); - } - - @Override - public void requestFrameThresholdType(FrameThresholdType type) { - synchronized (m_mutex) { - this.m_processType = type; + @Override + public void requestFrameThresholdType(FrameThresholdType type) { + synchronized (m_mutex) { + this.m_processType = type; + } } - } - @Override - public void requestFrameRotation(ImageRotationMode rotationMode) { - synchronized (m_mutex) { - m_rImagePipe.setParams(new RotateImagePipe.RotateImageParams(rotationMode)); + @Override + public void requestFrameRotation(ImageRotationMode rotationMode) { + synchronized (m_mutex) { + m_rImagePipe.setParams(new RotateImagePipe.RotateImageParams(rotationMode)); + } } - } - /** Ask the camera to rotate frames it outputs */ - public void requestHsvSettings(HSVPipe.HSVParams params) { - synchronized (m_mutex) { - m_hsvPipe.setParams(params); + /** Ask the camera to rotate frames it outputs */ + public void requestHsvSettings(HSVPipe.HSVParams params) { + synchronized (m_mutex) { + m_hsvPipe.setParams(params); + } } - } - @Override - public void requestFrameCopies(boolean copyInput, boolean copyOutput) { - // We don't actually do zero-copy, so this method is a no-op - } + @Override + public void requestFrameCopies(boolean copyInput, boolean copyOutput) { + // We don't actually do zero-copy, so this method is a no-op + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/provider/FileFrameProvider.java b/photon-core/src/main/java/org/photonvision/vision/frame/provider/FileFrameProvider.java index f6e77c4ffd..315e4e3de3 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/provider/FileFrameProvider.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/provider/FileFrameProvider.java @@ -33,89 +33,89 @@ * path}. */ public class FileFrameProvider extends CpuImageProcessor { - public static final int MAX_FPS = 5; - private static int count = 0; - - private final int thisIndex = count++; - private final Path path; - private final int millisDelay; - private final CVMat originalFrame; - - private final FrameStaticProperties properties; - - private long lastGetMillis = System.currentTimeMillis(); - - /** - * Instantiates a new FileFrameProvider. - * - * @param path The path of the image to read from. - * @param fov The fov of the image. - * @param maxFPS The max framerate to provide the image at. - */ - public FileFrameProvider(Path path, double fov, int maxFPS) { - this(path, fov, maxFPS, null); - } - - public FileFrameProvider(Path path, double fov, CameraCalibrationCoefficients calibration) { - this(path, fov, MAX_FPS, calibration); - } - - public FileFrameProvider( - Path path, double fov, int maxFPS, CameraCalibrationCoefficients calibration) { - if (!Files.exists(path)) - throw new RuntimeException("Invalid path for image: " + path.toAbsolutePath()); - this.path = path; - this.millisDelay = 1000 / maxFPS; - - Mat rawImage = Imgcodecs.imread(path.toString()); - if (rawImage.cols() > 0 && rawImage.rows() > 0) { - properties = new FrameStaticProperties(rawImage.width(), rawImage.height(), fov, calibration); - originalFrame = new CVMat(rawImage); - } else { - throw new RuntimeException("Image loading failed!"); + public static final int MAX_FPS = 5; + private static int count = 0; + + private final int thisIndex = count++; + private final Path path; + private final int millisDelay; + private final CVMat originalFrame; + + private final FrameStaticProperties properties; + + private long lastGetMillis = System.currentTimeMillis(); + + /** + * Instantiates a new FileFrameProvider. + * + * @param path The path of the image to read from. + * @param fov The fov of the image. + * @param maxFPS The max framerate to provide the image at. + */ + public FileFrameProvider(Path path, double fov, int maxFPS) { + this(path, fov, maxFPS, null); } - } - - /** - * Instantiates a new File frame provider. - * - * @param pathAsString The path of the image to read from as a string. - * @param fov The fov of the image. - */ - public FileFrameProvider(String pathAsString, double fov) { - this(Paths.get(pathAsString), fov, MAX_FPS); - } - - /** - * Instantiates a new File frame provider. - * - * @param path The path of the image to read from. - * @param fov The fov of the image. - */ - public FileFrameProvider(Path path, double fov) { - this(path, fov, MAX_FPS); - } - - @Override - public CapturedFrame getInputMat() { - var out = new CVMat(); - out.copyTo(originalFrame); - - // block to keep FPS at a defined rate - if (System.currentTimeMillis() - lastGetMillis < millisDelay) { - try { - Thread.sleep(millisDelay); - } catch (InterruptedException e) { - e.printStackTrace(); - } + + public FileFrameProvider(Path path, double fov, CameraCalibrationCoefficients calibration) { + this(path, fov, MAX_FPS, calibration); } - lastGetMillis = System.currentTimeMillis(); - return new CapturedFrame(out, properties, MathUtils.wpiNanoTime()); - } + public FileFrameProvider( + Path path, double fov, int maxFPS, CameraCalibrationCoefficients calibration) { + if (!Files.exists(path)) + throw new RuntimeException("Invalid path for image: " + path.toAbsolutePath()); + this.path = path; + this.millisDelay = 1000 / maxFPS; + + Mat rawImage = Imgcodecs.imread(path.toString()); + if (rawImage.cols() > 0 && rawImage.rows() > 0) { + properties = new FrameStaticProperties(rawImage.width(), rawImage.height(), fov, calibration); + originalFrame = new CVMat(rawImage); + } else { + throw new RuntimeException("Image loading failed!"); + } + } - @Override - public String getName() { - return "FileFrameProvider" + thisIndex + " - " + path.getFileName(); - } + /** + * Instantiates a new File frame provider. + * + * @param pathAsString The path of the image to read from as a string. + * @param fov The fov of the image. + */ + public FileFrameProvider(String pathAsString, double fov) { + this(Paths.get(pathAsString), fov, MAX_FPS); + } + + /** + * Instantiates a new File frame provider. + * + * @param path The path of the image to read from. + * @param fov The fov of the image. + */ + public FileFrameProvider(Path path, double fov) { + this(path, fov, MAX_FPS); + } + + @Override + public CapturedFrame getInputMat() { + var out = new CVMat(); + out.copyTo(originalFrame); + + // block to keep FPS at a defined rate + if (System.currentTimeMillis() - lastGetMillis < millisDelay) { + try { + Thread.sleep(millisDelay); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + + lastGetMillis = System.currentTimeMillis(); + return new CapturedFrame(out, properties, MathUtils.wpiNanoTime()); + } + + @Override + public String getName() { + return "FileFrameProvider" + thisIndex + " - " + path.getFileName(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/provider/LibcameraGpuFrameProvider.java b/photon-core/src/main/java/org/photonvision/vision/frame/provider/LibcameraGpuFrameProvider.java index 6ed0229315..7b76f658ad 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/provider/LibcameraGpuFrameProvider.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/provider/LibcameraGpuFrameProvider.java @@ -29,86 +29,86 @@ import org.photonvision.vision.pipe.impl.HSVPipe.HSVParams; public class LibcameraGpuFrameProvider implements FrameProvider { - private final LibcameraGpuSettables settables; - - public LibcameraGpuFrameProvider(LibcameraGpuSettables visionSettables) { - this.settables = visionSettables; - - var vidMode = settables.getCurrentVideoMode(); - settables.setVideoMode(vidMode); - } - - @Override - public String getName() { - return "AcceleratedPicamFrameProvider"; - } - - int i = 0; - - @Override - public Frame get() { - // We need to make sure that other threads don't try to change video modes while we're waiting - // for a frame - // System.out.println("GET!"); - synchronized (LibCameraJNI.CAMERA_LOCK) { - var success = LibCameraJNI.awaitNewFrame(); - - if (!success) { - System.out.println("No new frame"); - return new Frame(); - } - - var colorMat = new CVMat(new Mat(LibCameraJNI.takeColorFrame())); - var processedMat = new CVMat(new Mat(LibCameraJNI.takeProcessedFrame())); - - // System.out.println("Color mat: " + colorMat.getMat().size()); - - // Imgcodecs.imwrite("color" + i + ".jpg", colorMat.getMat()); - // Imgcodecs.imwrite("processed" + (i) + ".jpg", processedMat.getMat()); - - int itype = LibCameraJNI.getGpuProcessType(); - FrameThresholdType type = FrameThresholdType.NONE; - if (itype < FrameThresholdType.values().length && itype >= 0) { - type = FrameThresholdType.values()[itype]; - } - - var now = LibCameraJNI.getLibcameraTimestamp(); - var capture = LibCameraJNI.getFrameCaptureTime(); - var latency = (now - capture); - - return new Frame( - colorMat, - processedMat, - type, - MathUtils.wpiNanoTime() - latency, - settables.getFrameStaticProperties()); + private final LibcameraGpuSettables settables; + + public LibcameraGpuFrameProvider(LibcameraGpuSettables visionSettables) { + this.settables = visionSettables; + + var vidMode = settables.getCurrentVideoMode(); + settables.setVideoMode(vidMode); + } + + @Override + public String getName() { + return "AcceleratedPicamFrameProvider"; + } + + int i = 0; + + @Override + public Frame get() { + // We need to make sure that other threads don't try to change video modes while we're waiting + // for a frame + // System.out.println("GET!"); + synchronized (LibCameraJNI.CAMERA_LOCK) { + var success = LibCameraJNI.awaitNewFrame(); + + if (!success) { + System.out.println("No new frame"); + return new Frame(); + } + + var colorMat = new CVMat(new Mat(LibCameraJNI.takeColorFrame())); + var processedMat = new CVMat(new Mat(LibCameraJNI.takeProcessedFrame())); + + // System.out.println("Color mat: " + colorMat.getMat().size()); + + // Imgcodecs.imwrite("color" + i + ".jpg", colorMat.getMat()); + // Imgcodecs.imwrite("processed" + (i) + ".jpg", processedMat.getMat()); + + int itype = LibCameraJNI.getGpuProcessType(); + FrameThresholdType type = FrameThresholdType.NONE; + if (itype < FrameThresholdType.values().length && itype >= 0) { + type = FrameThresholdType.values()[itype]; + } + + var now = LibCameraJNI.getLibcameraTimestamp(); + var capture = LibCameraJNI.getFrameCaptureTime(); + var latency = (now - capture); + + return new Frame( + colorMat, + processedMat, + type, + MathUtils.wpiNanoTime() - latency, + settables.getFrameStaticProperties()); + } + } + + @Override + public void requestFrameThresholdType(FrameThresholdType type) { + LibCameraJNI.setGpuProcessType(type.ordinal()); + } + + @Override + public void requestFrameRotation(ImageRotationMode rotationMode) { + this.settables.setRotation(rotationMode); + } + + @Override + public void requestHsvSettings(HSVParams params) { + LibCameraJNI.setThresholds( + params.getHsvLower().val[0] / 180.0, + params.getHsvLower().val[1] / 255.0, + params.getHsvLower().val[2] / 255.0, + params.getHsvUpper().val[0] / 180.0, + params.getHsvUpper().val[1] / 255.0, + params.getHsvUpper().val[2] / 255.0, + params.getHueInverted()); + } + + @Override + public void requestFrameCopies(boolean copyInput, boolean copyOutput) { + LibCameraJNI.setFramesToCopy(copyInput, copyOutput); } - } - - @Override - public void requestFrameThresholdType(FrameThresholdType type) { - LibCameraJNI.setGpuProcessType(type.ordinal()); - } - - @Override - public void requestFrameRotation(ImageRotationMode rotationMode) { - this.settables.setRotation(rotationMode); - } - - @Override - public void requestHsvSettings(HSVParams params) { - LibCameraJNI.setThresholds( - params.getHsvLower().val[0] / 180.0, - params.getHsvLower().val[1] / 255.0, - params.getHsvLower().val[2] / 255.0, - params.getHsvUpper().val[0] / 180.0, - params.getHsvUpper().val[1] / 255.0, - params.getHsvUpper().val[2] / 255.0, - params.getHueInverted()); - } - - @Override - public void requestFrameCopies(boolean copyInput, boolean copyOutput) { - LibCameraJNI.setFramesToCopy(copyInput, copyOutput); - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/provider/USBFrameProvider.java b/photon-core/src/main/java/org/photonvision/vision/frame/provider/USBFrameProvider.java index f89111cf18..03459c197d 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/provider/USBFrameProvider.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/provider/USBFrameProvider.java @@ -23,36 +23,36 @@ import org.photonvision.vision.processes.VisionSourceSettables; public class USBFrameProvider extends CpuImageProcessor { - private final CvSink cvSink; - - @SuppressWarnings("SpellCheckingInspection") - private final VisionSourceSettables settables; - - @SuppressWarnings("SpellCheckingInspection") - public USBFrameProvider(CvSink sink, VisionSourceSettables visionSettables) { - cvSink = sink; - cvSink.setEnabled(true); - this.settables = visionSettables; - } - - @Override - public CapturedFrame getInputMat() { - var mat = new CVMat(); // We do this so that we don't fill a Mat in use by another thread - // This is from wpi::Now, or WPIUtilJNI.now() - long time = - cvSink.grabFrame(mat.getMat()) - * 1000; // Units are microseconds, epoch is the same as the Unix epoch - - // Sometimes CSCore gives us a zero frametime. - if (time <= 1e-6) { - time = MathUtils.wpiNanoTime(); + private final CvSink cvSink; + + @SuppressWarnings("SpellCheckingInspection") + private final VisionSourceSettables settables; + + @SuppressWarnings("SpellCheckingInspection") + public USBFrameProvider(CvSink sink, VisionSourceSettables visionSettables) { + cvSink = sink; + cvSink.setEnabled(true); + this.settables = visionSettables; } - return new CapturedFrame(mat, settables.getFrameStaticProperties(), time); - } + @Override + public CapturedFrame getInputMat() { + var mat = new CVMat(); // We do this so that we don't fill a Mat in use by another thread + // This is from wpi::Now, or WPIUtilJNI.now() + long time = + cvSink.grabFrame(mat.getMat()) + * 1000; // Units are microseconds, epoch is the same as the Unix epoch + + // Sometimes CSCore gives us a zero frametime. + if (time <= 1e-6) { + time = MathUtils.wpiNanoTime(); + } - @Override - public String getName() { - return "USBFrameProvider - " + cvSink.getName(); - } + return new CapturedFrame(mat, settables.getFrameStaticProperties(), time); + } + + @Override + public String getName() { + return "USBFrameProvider - " + cvSink.getName(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/CVMat.java b/photon-core/src/main/java/org/photonvision/vision/opencv/CVMat.java index c48d78094e..e58f0d11d7 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/CVMat.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/CVMat.java @@ -23,80 +23,80 @@ import org.photonvision.common.logging.Logger; public class CVMat implements Releasable { - private static final Logger logger = new Logger(CVMat.class, LogGroup.General); + private static final Logger logger = new Logger(CVMat.class, LogGroup.General); - private static int allMatCounter = 0; - private static final HashMap allMats = new HashMap<>(); + private static int allMatCounter = 0; + private static final HashMap allMats = new HashMap<>(); - private static boolean shouldPrint; + private static boolean shouldPrint; - private final Mat mat; + private final Mat mat; - public CVMat() { - this(new Mat()); - } + public CVMat() { + this(new Mat()); + } - public void copyTo(CVMat srcMat) { - copyTo(srcMat.getMat()); - } + public void copyTo(CVMat srcMat) { + copyTo(srcMat.getMat()); + } - public void copyTo(Mat srcMat) { - srcMat.copyTo(mat); - } + public void copyTo(Mat srcMat) { + srcMat.copyTo(mat); + } - private StringBuilder getStackTraceBuilder() { - var trace = Thread.currentThread().getStackTrace(); + private StringBuilder getStackTraceBuilder() { + var trace = Thread.currentThread().getStackTrace(); - final int STACK_FRAMES_TO_SKIP = 3; - final var traceStr = new StringBuilder(); - for (int idx = STACK_FRAMES_TO_SKIP; idx < trace.length; idx++) { - traceStr.append("\t\n").append(trace[idx]); + final int STACK_FRAMES_TO_SKIP = 3; + final var traceStr = new StringBuilder(); + for (int idx = STACK_FRAMES_TO_SKIP; idx < trace.length; idx++) { + traceStr.append("\t\n").append(trace[idx]); + } + traceStr.append("\n"); + return traceStr; } - traceStr.append("\n"); - return traceStr; - } - - public CVMat(Mat mat) { - this.mat = mat; - allMatCounter++; - allMats.put(mat, allMatCounter); - - if (shouldPrint) { - logger.trace(() -> "CVMat" + allMatCounter + " alloc - new count: " + allMats.size()); - logger.trace(getStackTraceBuilder()::toString); + + public CVMat(Mat mat) { + this.mat = mat; + allMatCounter++; + allMats.put(mat, allMatCounter); + + if (shouldPrint) { + logger.trace(() -> "CVMat" + allMatCounter + " alloc - new count: " + allMats.size()); + logger.trace(getStackTraceBuilder()::toString); + } } - } - @Override - public void release() { - // If this mat is empty, all we can do is return - if (mat.empty()) return; + @Override + public void release() { + // If this mat is empty, all we can do is return + if (mat.empty()) return; - // If the mat isn't in the hashmap, we can't remove it - Integer matNo = allMats.get(mat); - if (matNo != null) allMats.remove(mat); - mat.release(); + // If the mat isn't in the hashmap, we can't remove it + Integer matNo = allMats.get(mat); + if (matNo != null) allMats.remove(mat); + mat.release(); - if (shouldPrint) { - logger.trace(() -> "CVMat" + matNo + " de-alloc - new count: " + allMats.size()); - logger.trace(getStackTraceBuilder()::toString); + if (shouldPrint) { + logger.trace(() -> "CVMat" + matNo + " de-alloc - new count: " + allMats.size()); + logger.trace(getStackTraceBuilder()::toString); + } } - } - public Mat getMat() { - return mat; - } + public Mat getMat() { + return mat; + } - @Override - public String toString() { - return "CVMat{" + mat.toString() + '}'; - } + @Override + public String toString() { + return "CVMat{" + mat.toString() + '}'; + } - public static int getMatCount() { - return allMats.size(); - } + public static int getMatCount() { + return allMats.size(); + } - public static void enablePrint(boolean enabled) { - shouldPrint = enabled; - } + public static void enablePrint(boolean enabled) { + shouldPrint = enabled; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/CVShape.java b/photon-core/src/main/java/org/photonvision/vision/opencv/CVShape.java index 2b4f083ea0..99b8c2f107 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/CVShape.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/CVShape.java @@ -23,42 +23,42 @@ import org.opencv.core.Point; public class CVShape implements Releasable { - public final Contour contour; + public final Contour contour; - @Nullable public final ContourShape shape; + @Nullable public final ContourShape shape; - public double radius = 0; - public Point center = null; + public double radius = 0; + public Point center = null; - private MatOfPoint3f customTarget = null; + private MatOfPoint3f customTarget = null; - private final MatOfPoint2f approxCurve = new MatOfPoint2f(); + private final MatOfPoint2f approxCurve = new MatOfPoint2f(); - public CVShape(Contour contour, ContourShape shape) { - this.contour = contour; - this.shape = shape; - } + public CVShape(Contour contour, ContourShape shape) { + this.contour = contour; + this.shape = shape; + } - public CVShape(Contour contour, Point center, double radius) { - this(contour, ContourShape.Circle); - this.radius = radius; - this.center = center; - } + public CVShape(Contour contour, Point center, double radius) { + this(contour, ContourShape.Circle); + this.radius = radius; + this.center = center; + } - public CVShape(Contour contour, MatOfPoint3f targetPoints) { - this.contour = contour; - this.shape = ContourShape.Custom; - customTarget = targetPoints; - } + public CVShape(Contour contour, MatOfPoint3f targetPoints) { + this.contour = contour; + this.shape = ContourShape.Custom; + customTarget = targetPoints; + } - public Contour getContour() { - return contour; - } + public Contour getContour() { + return contour; + } - @Override - public void release() { - if (customTarget != null) customTarget.release(); - approxCurve.release(); - contour.release(); - } + @Override + public void release() { + if (customTarget != null) customTarget.release(); + approxCurve.release(); + contour.release(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/Contour.java b/photon-core/src/main/java/org/photonvision/vision/opencv/Contour.java index ab13160607..181de0b3b3 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/Contour.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/Contour.java @@ -27,204 +27,204 @@ import org.photonvision.common.util.math.MathUtils; public class Contour implements Releasable { - public static final Comparator SortByMomentsX = - Comparator.comparingDouble( - (contour) -> contour.getMoments().get_m10() / contour.getMoments().get_m00()); - - public final MatOfPoint mat; + public static final Comparator SortByMomentsX = + Comparator.comparingDouble( + (contour) -> contour.getMoments().get_m10() / contour.getMoments().get_m00()); - private Double area = Double.NaN; - private Double perimeter = Double.NaN; - private MatOfPoint2f mat2f = null; - private RotatedRect minAreaRect = null; - private Rect boundingRect = null; - private Moments moments = null; - - private MatOfPoint2f convexHull = null; - private MatOfPoint2f approxPolyDp = null; - - public Contour(MatOfPoint mat) { - this.mat = mat; - } - - public MatOfPoint2f getMat2f() { - if (mat2f == null) { - mat2f = new MatOfPoint2f(mat.toArray()); - mat.convertTo(mat2f, CvType.CV_32F); - } - return mat2f; - } - - public MatOfPoint2f getConvexHull() { - if (this.convexHull == null) { - var ints = new MatOfInt(); - Imgproc.convexHull(mat, ints); - this.convexHull = Contour.convertIndexesToPoints(mat, ints); - ints.release(); - } - return convexHull; - } - - public MatOfPoint2f getApproxPolyDp(double epsilon, boolean closed) { - if (this.approxPolyDp == null) { - approxPolyDp = new MatOfPoint2f(); - Imgproc.approxPolyDP(getConvexHull(), approxPolyDp, epsilon, closed); - } - return approxPolyDp; - } - - @Nullable - public MatOfPoint2f getApproxPolyDp() { - return this.approxPolyDp; - } - - public double getArea() { - if (Double.isNaN(area)) { - area = Imgproc.contourArea(mat); - } - return area; - } - - public double getPerimeter() { - if (Double.isNaN(perimeter)) { - perimeter = Imgproc.arcLength(getMat2f(), true); - } - return perimeter; - } - - public RotatedRect getMinAreaRect() { - if (minAreaRect == null) { - minAreaRect = Imgproc.minAreaRect(getMat2f()); - } - return minAreaRect; - } - - public Rect getBoundingRect() { - if (boundingRect == null) { - boundingRect = Imgproc.boundingRect(mat); - } - return boundingRect; - } - - public Moments getMoments() { - if (moments == null) { - moments = Imgproc.moments(mat); - } - return moments; - } - - public Point getCenterPoint() { - return getMinAreaRect().center; - } - - public boolean isEmpty() { - return mat.empty(); - } - - public boolean isIntersecting( - Contour secondContour, ContourIntersectionDirection intersectionDirection) { - boolean isIntersecting = false; - - if (intersectionDirection == ContourIntersectionDirection.None) { - isIntersecting = true; - } else { - try { - MatOfPoint2f intersectMatA = new MatOfPoint2f(); - MatOfPoint2f intersectMatB = new MatOfPoint2f(); - - mat.convertTo(intersectMatA, CvType.CV_32F); - secondContour.mat.convertTo(intersectMatB, CvType.CV_32F); - - RotatedRect a = Imgproc.fitEllipse(intersectMatA); - RotatedRect b = Imgproc.fitEllipse(intersectMatB); - double mA = MathUtils.toSlope(a.angle); - double mB = MathUtils.toSlope(b.angle); - double x0A = a.center.x; - double y0A = a.center.y; - double x0B = b.center.x; - double y0B = b.center.y; - double intersectionX = ((mA * x0A) - y0A - (mB * x0B) + y0B) / (mA - mB); - double intersectionY = (mA * (intersectionX - x0A)) + y0A; - double massX = (x0A + x0B) / 2; - double massY = (y0A + y0B) / 2; - switch (intersectionDirection) { - case Up: - if (intersectionY < massY) isIntersecting = true; - break; - case Down: - if (intersectionY > massY) isIntersecting = true; - break; - case Left: - if (intersectionX < massX) isIntersecting = true; - break; - case Right: - if (intersectionX > massX) isIntersecting = true; - break; + public final MatOfPoint mat; + + private Double area = Double.NaN; + private Double perimeter = Double.NaN; + private MatOfPoint2f mat2f = null; + private RotatedRect minAreaRect = null; + private Rect boundingRect = null; + private Moments moments = null; + + private MatOfPoint2f convexHull = null; + private MatOfPoint2f approxPolyDp = null; + + public Contour(MatOfPoint mat) { + this.mat = mat; + } + + public MatOfPoint2f getMat2f() { + if (mat2f == null) { + mat2f = new MatOfPoint2f(mat.toArray()); + mat.convertTo(mat2f, CvType.CV_32F); } - intersectMatA.release(); - intersectMatB.release(); - } catch (Exception e) { - // defaults to false - } + return mat2f; } - return isIntersecting; - } + public MatOfPoint2f getConvexHull() { + if (this.convexHull == null) { + var ints = new MatOfInt(); + Imgproc.convexHull(mat, ints); + this.convexHull = Contour.convertIndexesToPoints(mat, ints); + ints.release(); + } + return convexHull; + } - // TODO: refactor to do "infinite" contours ??????? - public static Contour groupContoursByIntersection( - Contour firstContour, Contour secondContour, ContourIntersectionDirection intersection) { - if (areIntersecting(firstContour, secondContour, intersection)) { - return combineContours(firstContour, secondContour); - } else { - return null; + public MatOfPoint2f getApproxPolyDp(double epsilon, boolean closed) { + if (this.approxPolyDp == null) { + approxPolyDp = new MatOfPoint2f(); + Imgproc.approxPolyDP(getConvexHull(), approxPolyDp, epsilon, closed); + } + return approxPolyDp; } - } - public static boolean areIntersecting( - Contour firstContour, - Contour secondContour, - ContourIntersectionDirection intersectionDirection) { - return firstContour.isIntersecting(secondContour, intersectionDirection) - || secondContour.isIntersecting(firstContour, intersectionDirection); - } + @Nullable + public MatOfPoint2f getApproxPolyDp() { + return this.approxPolyDp; + } - public static Contour combineContours(Contour... contours) { - return combineContourList(Arrays.asList(contours)); - } + public double getArea() { + if (Double.isNaN(area)) { + area = Imgproc.contourArea(mat); + } + return area; + } - public static Contour combineContourList(Collection contours) { - var points = new MatOfPoint(); + public double getPerimeter() { + if (Double.isNaN(perimeter)) { + perimeter = Imgproc.arcLength(getMat2f(), true); + } + return perimeter; + } - for (var contour : contours) { - points.push_back(contour.mat); + public RotatedRect getMinAreaRect() { + if (minAreaRect == null) { + minAreaRect = Imgproc.minAreaRect(getMat2f()); + } + return minAreaRect; } - var finalContour = new Contour(points); + public Rect getBoundingRect() { + if (boundingRect == null) { + boundingRect = Imgproc.boundingRect(mat); + } + return boundingRect; + } - boolean contourEmpty = finalContour.isEmpty(); - return contourEmpty ? null : finalContour; - } + public Moments getMoments() { + if (moments == null) { + moments = Imgproc.moments(mat); + } + return moments; + } - @Override - public void release() { - if (mat != null) mat.release(); - if (mat2f != null) mat2f.release(); - if (convexHull != null) convexHull.release(); - if (approxPolyDp != null) approxPolyDp.release(); - } + public Point getCenterPoint() { + return getMinAreaRect().center; + } - public static MatOfPoint2f convertIndexesToPoints(MatOfPoint contour, MatOfInt indexes) { - int[] arrIndex = indexes.toArray(); - Point[] arrContour = contour.toArray(); - Point[] arrPoints = new Point[arrIndex.length]; + public boolean isEmpty() { + return mat.empty(); + } - for (int i = 0; i < arrIndex.length; i++) { - arrPoints[i] = arrContour[arrIndex[i]]; + public boolean isIntersecting( + Contour secondContour, ContourIntersectionDirection intersectionDirection) { + boolean isIntersecting = false; + + if (intersectionDirection == ContourIntersectionDirection.None) { + isIntersecting = true; + } else { + try { + MatOfPoint2f intersectMatA = new MatOfPoint2f(); + MatOfPoint2f intersectMatB = new MatOfPoint2f(); + + mat.convertTo(intersectMatA, CvType.CV_32F); + secondContour.mat.convertTo(intersectMatB, CvType.CV_32F); + + RotatedRect a = Imgproc.fitEllipse(intersectMatA); + RotatedRect b = Imgproc.fitEllipse(intersectMatB); + double mA = MathUtils.toSlope(a.angle); + double mB = MathUtils.toSlope(b.angle); + double x0A = a.center.x; + double y0A = a.center.y; + double x0B = b.center.x; + double y0B = b.center.y; + double intersectionX = ((mA * x0A) - y0A - (mB * x0B) + y0B) / (mA - mB); + double intersectionY = (mA * (intersectionX - x0A)) + y0A; + double massX = (x0A + x0B) / 2; + double massY = (y0A + y0B) / 2; + switch (intersectionDirection) { + case Up: + if (intersectionY < massY) isIntersecting = true; + break; + case Down: + if (intersectionY > massY) isIntersecting = true; + break; + case Left: + if (intersectionX < massX) isIntersecting = true; + break; + case Right: + if (intersectionX > massX) isIntersecting = true; + break; + } + intersectMatA.release(); + intersectMatB.release(); + } catch (Exception e) { + // defaults to false + } + } + + return isIntersecting; + } + + // TODO: refactor to do "infinite" contours ??????? + public static Contour groupContoursByIntersection( + Contour firstContour, Contour secondContour, ContourIntersectionDirection intersection) { + if (areIntersecting(firstContour, secondContour, intersection)) { + return combineContours(firstContour, secondContour); + } else { + return null; + } + } + + public static boolean areIntersecting( + Contour firstContour, + Contour secondContour, + ContourIntersectionDirection intersectionDirection) { + return firstContour.isIntersecting(secondContour, intersectionDirection) + || secondContour.isIntersecting(firstContour, intersectionDirection); + } + + public static Contour combineContours(Contour... contours) { + return combineContourList(Arrays.asList(contours)); + } + + public static Contour combineContourList(Collection contours) { + var points = new MatOfPoint(); + + for (var contour : contours) { + points.push_back(contour.mat); + } + + var finalContour = new Contour(points); + + boolean contourEmpty = finalContour.isEmpty(); + return contourEmpty ? null : finalContour; + } + + @Override + public void release() { + if (mat != null) mat.release(); + if (mat2f != null) mat2f.release(); + if (convexHull != null) convexHull.release(); + if (approxPolyDp != null) approxPolyDp.release(); } - var hull = new MatOfPoint2f(); - hull.fromArray(arrPoints); - return hull; - } + public static MatOfPoint2f convertIndexesToPoints(MatOfPoint contour, MatOfInt indexes) { + int[] arrIndex = indexes.toArray(); + Point[] arrContour = contour.toArray(); + Point[] arrPoints = new Point[arrIndex.length]; + + for (int i = 0; i < arrIndex.length; i++) { + arrPoints[i] = arrContour[arrIndex[i]]; + } + + var hull = new MatOfPoint2f(); + hull.fromArray(arrPoints); + return hull; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourGroupingMode.java b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourGroupingMode.java index 42500feb87..1929c058fb 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourGroupingMode.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourGroupingMode.java @@ -18,13 +18,13 @@ package org.photonvision.vision.opencv; public enum ContourGroupingMode { - Single(1), - Dual(2), - TwoOrMore(2); + Single(1), + Dual(2), + TwoOrMore(2); - public final int count; + public final int count; - ContourGroupingMode(int count) { - this.count = count; - } + ContourGroupingMode(int count) { + this.count = count; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourIntersectionDirection.java b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourIntersectionDirection.java index 5aa701c978..d3f97d61e4 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourIntersectionDirection.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourIntersectionDirection.java @@ -18,9 +18,9 @@ package org.photonvision.vision.opencv; public enum ContourIntersectionDirection { - None, - Up, - Down, - Left, - Right + None, + Up, + Down, + Left, + Right } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourShape.java b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourShape.java index bf77b2b0c9..4eac4d847d 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourShape.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourShape.java @@ -21,26 +21,26 @@ import java.util.HashMap; public enum ContourShape { - Circle(0), - Custom(-1), - Triangle(3), - Quadrilateral(4); + Circle(0), + Custom(-1), + Triangle(3), + Quadrilateral(4); - public final int sides; + public final int sides; - ContourShape(int sides) { - this.sides = sides; - } + ContourShape(int sides) { + this.sides = sides; + } - private static final HashMap sidesToValueMap = new HashMap<>(); + private static final HashMap sidesToValueMap = new HashMap<>(); - static { - for (var value : EnumSet.allOf(ContourShape.class)) { - sidesToValueMap.put(value.sides, value); + static { + for (var value : EnumSet.allOf(ContourShape.class)) { + sidesToValueMap.put(value.sides, value); + } } - } - public static ContourShape fromSides(int sides) { - return sidesToValueMap.get(sides); - } + public static ContourShape fromSides(int sides) { + return sidesToValueMap.get(sides); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourSortMode.java b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourSortMode.java index 52b31811ca..2ceff908ec 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/ContourSortMode.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/ContourSortMode.java @@ -21,27 +21,27 @@ import org.photonvision.vision.target.PotentialTarget; public enum ContourSortMode { - Largest( - Comparator.comparingDouble(PotentialTarget::getArea) - .reversed()), // reversed so that zero index has the largest size - Smallest(Largest.getComparator().reversed()), - Highest(Comparator.comparingDouble(rect -> rect.getMinAreaRect().center.y)), - Lowest(Highest.getComparator().reversed()), - Leftmost(Comparator.comparingDouble(target -> target.getMinAreaRect().center.x * -1)), - Rightmost(Leftmost.getComparator().reversed()), - Centermost( - Comparator.comparingDouble( - rect -> - (Math.pow(rect.getMinAreaRect().center.y, 2) - + Math.pow(rect.getMinAreaRect().center.x, 2)))); + Largest( + Comparator.comparingDouble(PotentialTarget::getArea) + .reversed()), // reversed so that zero index has the largest size + Smallest(Largest.getComparator().reversed()), + Highest(Comparator.comparingDouble(rect -> rect.getMinAreaRect().center.y)), + Lowest(Highest.getComparator().reversed()), + Leftmost(Comparator.comparingDouble(target -> target.getMinAreaRect().center.x * -1)), + Rightmost(Leftmost.getComparator().reversed()), + Centermost( + Comparator.comparingDouble( + rect -> + (Math.pow(rect.getMinAreaRect().center.y, 2) + + Math.pow(rect.getMinAreaRect().center.x, 2)))); - private final Comparator m_comparator; + private final Comparator m_comparator; - ContourSortMode(Comparator comparator) { - m_comparator = comparator; - } + ContourSortMode(Comparator comparator) { + m_comparator = comparator; + } - public Comparator getComparator() { - return m_comparator; - } + public Comparator getComparator() { + return m_comparator; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/DualMat.java b/photon-core/src/main/java/org/photonvision/vision/opencv/DualMat.java index 7efe9fcd87..f2adb4a92f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/DualMat.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/DualMat.java @@ -20,6 +20,6 @@ import org.opencv.core.Mat; public class DualMat { - public Mat first; - public Mat second; + public Mat first; + public Mat second; } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/DualOffsetValues.java b/photon-core/src/main/java/org/photonvision/vision/opencv/DualOffsetValues.java index 1039ccff85..c7b2ac04f2 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/DualOffsetValues.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/DualOffsetValues.java @@ -22,27 +22,27 @@ import org.photonvision.vision.target.TargetCalculations; public class DualOffsetValues { - public final Point firstPoint; - public final double firstPointArea; - public final Point secondPoint; - public final double secondPointArea; + public final Point firstPoint; + public final double firstPointArea; + public final Point secondPoint; + public final double secondPointArea; - public DualOffsetValues() { - firstPoint = new Point(); - firstPointArea = 0; - secondPoint = new Point(); - secondPointArea = 0; - } + public DualOffsetValues() { + firstPoint = new Point(); + firstPointArea = 0; + secondPoint = new Point(); + secondPointArea = 0; + } - public DualOffsetValues( - Point firstPoint, double firstPointArea, Point secondPoint, double secondPointArea) { - this.firstPoint = firstPoint; - this.firstPointArea = firstPointArea; - this.secondPoint = secondPoint; - this.secondPointArea = secondPointArea; - } + public DualOffsetValues( + Point firstPoint, double firstPointArea, Point secondPoint, double secondPointArea) { + this.firstPoint = firstPoint; + this.firstPointArea = firstPointArea; + this.secondPoint = secondPoint; + this.secondPointArea = secondPointArea; + } - public DoubleCouple getLineValues() { - return TargetCalculations.getLineFromPoints(firstPoint, secondPoint); - } + public DoubleCouple getLineValues() { + return TargetCalculations.getLineFromPoints(firstPoint, secondPoint); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/ImageFlipMode.java b/photon-core/src/main/java/org/photonvision/vision/opencv/ImageFlipMode.java index d156c25e1f..6fa456e7a0 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/ImageFlipMode.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/ImageFlipMode.java @@ -18,14 +18,14 @@ package org.photonvision.vision.opencv; public enum ImageFlipMode { - NONE(Integer.MIN_VALUE), - VERTICAL(1), - HORIZONTAL(0), - BOTH(-1); + NONE(Integer.MIN_VALUE), + VERTICAL(1), + HORIZONTAL(0), + BOTH(-1); - public final int value; + public final int value; - ImageFlipMode(int value) { - this.value = value; - } + ImageFlipMode(int value) { + this.value = value; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/ImageRotationMode.java b/photon-core/src/main/java/org/photonvision/vision/opencv/ImageRotationMode.java index a86ffdab6d..e72dd50e27 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/ImageRotationMode.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/ImageRotationMode.java @@ -18,18 +18,18 @@ package org.photonvision.vision.opencv; public enum ImageRotationMode { - DEG_0(-1), - DEG_90(0), - DEG_180(1), - DEG_270(2); + DEG_0(-1), + DEG_90(0), + DEG_180(1), + DEG_270(2); - public final int value; + public final int value; - ImageRotationMode(int value) { - this.value = value; - } + ImageRotationMode(int value) { + this.value = value; + } - public boolean isRotated() { - return this.value == DEG_90.value || this.value == DEG_270.value; - } + public boolean isRotated() { + return this.value == DEG_90.value || this.value == DEG_270.value; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/opencv/Releasable.java b/photon-core/src/main/java/org/photonvision/vision/opencv/Releasable.java index 91fead4705..dc76678884 100644 --- a/photon-core/src/main/java/org/photonvision/vision/opencv/Releasable.java +++ b/photon-core/src/main/java/org/photonvision/vision/opencv/Releasable.java @@ -18,5 +18,5 @@ package org.photonvision.vision.opencv; public interface Releasable { - void release(); + void release(); } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/CVPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/CVPipe.java index 23040b1191..d941f51cd0 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/CVPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/CVPipe.java @@ -26,34 +26,34 @@ * @param

Parameters type for the pipe */ public abstract class CVPipe { - protected CVPipeResult result = new CVPipeResult<>(); - protected P params; + protected CVPipeResult result = new CVPipeResult<>(); + protected P params; - public void setParams(P params) { - this.params = params; - } + public void setParams(P params) { + this.params = params; + } - /** - * Runs the process for the pipe. - * - * @param in Input for pipe processing. - * @return Result of processing. - */ - protected abstract O process(I in); + /** + * Runs the process for the pipe. + * + * @param in Input for pipe processing. + * @return Result of processing. + */ + protected abstract O process(I in); - /** - * @param in Input for pipe processing. - * @return Result of processing. - */ - public CVPipeResult run(I in) { - long pipeStartNanos = System.nanoTime(); - result.output = process(in); - result.nanosElapsed = System.nanoTime() - pipeStartNanos; - return result; - } + /** + * @param in Input for pipe processing. + * @return Result of processing. + */ + public CVPipeResult run(I in) { + long pipeStartNanos = System.nanoTime(); + result.output = process(in); + result.nanosElapsed = System.nanoTime() - pipeStartNanos; + return result; + } - public static class CVPipeResult { - public O output; - public long nanosElapsed; - } + public static class CVPipeResult { + public O output; + public long nanosElapsed; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipe.java index 18f95acb5e..5c1497db3e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipe.java @@ -24,40 +24,40 @@ import org.photonvision.vision.pipe.CVPipe; public class AprilTagDetectionPipe - extends CVPipe, AprilTagDetectionPipeParams> { - private final AprilTagDetector m_detector = new AprilTagDetector(); + extends CVPipe, AprilTagDetectionPipeParams> { + private final AprilTagDetector m_detector = new AprilTagDetector(); - public AprilTagDetectionPipe() { - super(); + public AprilTagDetectionPipe() { + super(); - m_detector.addFamily("tag16h5"); - m_detector.addFamily("tag36h11"); - } - - @Override - protected List process(CVMat in) { - if (in.getMat().empty()) { - return List.of(); + m_detector.addFamily("tag16h5"); + m_detector.addFamily("tag36h11"); } - var ret = m_detector.detect(in.getMat()); + @Override + protected List process(CVMat in) { + if (in.getMat().empty()) { + return List.of(); + } + + var ret = m_detector.detect(in.getMat()); - if (ret == null) { - return List.of(); + if (ret == null) { + return List.of(); + } + + return List.of(ret); } - return List.of(ret); - } + @Override + public void setParams(AprilTagDetectionPipeParams newParams) { + if (this.params == null || !this.params.equals(newParams)) { + m_detector.setConfig(newParams.detectorParams); - @Override - public void setParams(AprilTagDetectionPipeParams newParams) { - if (this.params == null || !this.params.equals(newParams)) { - m_detector.setConfig(newParams.detectorParams); + m_detector.clearFamilies(); + m_detector.addFamily(newParams.family.getNativeName()); + } - m_detector.clearFamilies(); - m_detector.addFamily(newParams.family.getNativeName()); + super.setParams(newParams); } - - super.setParams(newParams); - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipeParams.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipeParams.java index 7a1cd08452..ab52d7bcfb 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipeParams.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagDetectionPipeParams.java @@ -21,32 +21,32 @@ import org.photonvision.vision.apriltag.AprilTagFamily; public class AprilTagDetectionPipeParams { - public final AprilTagFamily family; - public final AprilTagDetector.Config detectorParams; + public final AprilTagFamily family; + public final AprilTagDetector.Config detectorParams; - public AprilTagDetectionPipeParams(AprilTagFamily tagFamily, AprilTagDetector.Config config) { - this.family = tagFamily; - this.detectorParams = config; - } + public AprilTagDetectionPipeParams(AprilTagFamily tagFamily, AprilTagDetector.Config config) { + this.family = tagFamily; + this.detectorParams = config; + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((family == null) ? 0 : family.hashCode()); - result = prime * result + ((detectorParams == null) ? 0 : detectorParams.hashCode()); - return result; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((family == null) ? 0 : family.hashCode()); + result = prime * result + ((detectorParams == null) ? 0 : detectorParams.hashCode()); + return result; + } - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - AprilTagDetectionPipeParams other = (AprilTagDetectionPipeParams) obj; - if (family != other.family) return false; - if (detectorParams == null) { - return other.detectorParams == null; - } else return detectorParams.equals(other.detectorParams); - } + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + AprilTagDetectionPipeParams other = (AprilTagDetectionPipeParams) obj; + if (family != other.family) return false; + if (detectorParams == null) { + return other.detectorParams == null; + } else return detectorParams.equals(other.detectorParams); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagPoseEstimatorPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagPoseEstimatorPipe.java index 388dd63325..3c6563e182 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagPoseEstimatorPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/AprilTagPoseEstimatorPipe.java @@ -28,101 +28,101 @@ import org.photonvision.vision.pipe.CVPipe; public class AprilTagPoseEstimatorPipe - extends CVPipe< - AprilTagDetection, - AprilTagPoseEstimate, - AprilTagPoseEstimatorPipe.AprilTagPoseEstimatorPipeParams> { - private final AprilTagPoseEstimator m_poseEstimator = - new AprilTagPoseEstimator(new AprilTagPoseEstimator.Config(0, 0, 0, 0, 0)); - - public AprilTagPoseEstimatorPipe() { - super(); - } - - MatOfPoint2f temp = new MatOfPoint2f(); - - @Override - protected AprilTagPoseEstimate process(AprilTagDetection in) { - // Save the corner points of our detection to an array - Point[] corners = new Point[4]; - for (int i = 0; i < 4; i++) { - corners[i] = new Point(in.getCornerX(i), in.getCornerY(i)); + extends CVPipe< + AprilTagDetection, + AprilTagPoseEstimate, + AprilTagPoseEstimatorPipe.AprilTagPoseEstimatorPipeParams> { + private final AprilTagPoseEstimator m_poseEstimator = + new AprilTagPoseEstimator(new AprilTagPoseEstimator.Config(0, 0, 0, 0, 0)); + + public AprilTagPoseEstimatorPipe() { + super(); } - // And shove into our matofpoints - temp.fromArray(corners); - - // Probably overwrites what was in temp before. I hope - Calib3d.undistortImagePoints( - temp, - temp, - params.calibration.getCameraIntrinsicsMat(), - params.calibration.getDistCoeffsMat()); - - // Save out undistorted corners - corners = temp.toArray(); - - // Apriltagdetection expects an array in form [x1 y1 x2 y2 ...] - var fixedCorners = new double[8]; - for (int i = 0; i < 4; i++) { - fixedCorners[i * 2] = corners[i].x; - fixedCorners[i * 2 + 1] = corners[i].y; - } - - // Create a new Detection with the fixed corners - var corrected = - new AprilTagDetection( - in.getFamily(), - in.getId(), - in.getHamming(), - in.getDecisionMargin(), - in.getHomography(), - in.getCenterX(), - in.getCenterY(), - fixedCorners); - - return m_poseEstimator.estimateOrthogonalIteration(corrected, params.nIters); - } - - @Override - public void setParams(AprilTagPoseEstimatorPipe.AprilTagPoseEstimatorPipeParams newParams) { - if (this.params == null || !this.params.equals(newParams)) { - m_poseEstimator.setConfig(newParams.config); - } - - super.setParams(newParams); - } - public static class AprilTagPoseEstimatorPipeParams { - final AprilTagPoseEstimator.Config config; - final CameraCalibrationCoefficients calibration; - final int nIters; + MatOfPoint2f temp = new MatOfPoint2f(); - public AprilTagPoseEstimatorPipeParams( - Config config, CameraCalibrationCoefficients cal, int nIters) { - this.config = config; - this.nIters = nIters; - this.calibration = cal; + @Override + protected AprilTagPoseEstimate process(AprilTagDetection in) { + // Save the corner points of our detection to an array + Point[] corners = new Point[4]; + for (int i = 0; i < 4; i++) { + corners[i] = new Point(in.getCornerX(i), in.getCornerY(i)); + } + // And shove into our matofpoints + temp.fromArray(corners); + + // Probably overwrites what was in temp before. I hope + Calib3d.undistortImagePoints( + temp, + temp, + params.calibration.getCameraIntrinsicsMat(), + params.calibration.getDistCoeffsMat()); + + // Save out undistorted corners + corners = temp.toArray(); + + // Apriltagdetection expects an array in form [x1 y1 x2 y2 ...] + var fixedCorners = new double[8]; + for (int i = 0; i < 4; i++) { + fixedCorners[i * 2] = corners[i].x; + fixedCorners[i * 2 + 1] = corners[i].y; + } + + // Create a new Detection with the fixed corners + var corrected = + new AprilTagDetection( + in.getFamily(), + in.getId(), + in.getHamming(), + in.getDecisionMargin(), + in.getHomography(), + in.getCenterX(), + in.getCenterY(), + fixedCorners); + + return m_poseEstimator.estimateOrthogonalIteration(corrected, params.nIters); } @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((config == null) ? 0 : config.hashCode()); - result = prime * result + nIters; - return result; + public void setParams(AprilTagPoseEstimatorPipe.AprilTagPoseEstimatorPipeParams newParams) { + if (this.params == null || !this.params.equals(newParams)) { + m_poseEstimator.setConfig(newParams.config); + } + + super.setParams(newParams); } - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - AprilTagPoseEstimatorPipeParams other = (AprilTagPoseEstimatorPipeParams) obj; - if (config == null) { - if (other.config != null) return false; - } else if (!config.equals(other.config)) return false; - return nIters == other.nIters; + public static class AprilTagPoseEstimatorPipeParams { + final AprilTagPoseEstimator.Config config; + final CameraCalibrationCoefficients calibration; + final int nIters; + + public AprilTagPoseEstimatorPipeParams( + Config config, CameraCalibrationCoefficients cal, int nIters) { + this.config = config; + this.nIters = nIters; + this.calibration = cal; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((config == null) ? 0 : config.hashCode()); + result = prime * result + nIters; + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + AprilTagPoseEstimatorPipeParams other = (AprilTagPoseEstimatorPipeParams) obj; + if (config == null) { + if (other.config != null) return false; + } else if (!config.equals(other.config)) return false; + return nIters == other.nIters; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipe.java index 73d60cd7c8..b18978c912 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipe.java @@ -26,25 +26,25 @@ import org.photonvision.vision.pipe.CVPipe; public class ArucoDetectionPipe - extends CVPipe, ArucoDetectionPipeParams> { - PhotonArucoDetector detector = new PhotonArucoDetector(); + extends CVPipe, ArucoDetectionPipeParams> { + PhotonArucoDetector detector = new PhotonArucoDetector(); - @Override - protected List process(Mat in) { - return List.of( - detector.detect( - in, - (float) Units.inchesToMeters(6), - params.cameraCalibrationCoefficients, - params.detectorParams)); - } + @Override + protected List process(Mat in) { + return List.of( + detector.detect( + in, + (float) Units.inchesToMeters(6), + params.cameraCalibrationCoefficients, + params.detectorParams)); + } - @Override - public void setParams(ArucoDetectionPipeParams params) { - super.setParams(params); - } + @Override + public void setParams(ArucoDetectionPipeParams params) { + super.setParams(params); + } - public DetectorParameters getParameters() { - return params == null ? null : params.detectorParams.getDetectorParameters(); - } + public DetectorParameters getParameters() { + return params == null ? null : params.detectorParams.getDetectorParameters(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipeParams.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipeParams.java index 26dfd8c50b..62d3214030 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipeParams.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ArucoDetectionPipeParams.java @@ -22,36 +22,36 @@ import org.photonvision.vision.calibration.CameraCalibrationCoefficients; public class ArucoDetectionPipeParams { - public ArucoDetector detectorParams; - public final CameraCalibrationCoefficients cameraCalibrationCoefficients; + public ArucoDetector detectorParams; + public final CameraCalibrationCoefficients cameraCalibrationCoefficients; - public ArucoDetectionPipeParams( - ArucoDetector detector, CameraCalibrationCoefficients cameraCalibrationCoefficients) { - this.detectorParams = detector; - this.cameraCalibrationCoefficients = cameraCalibrationCoefficients; - } + public ArucoDetectionPipeParams( + ArucoDetector detector, CameraCalibrationCoefficients cameraCalibrationCoefficients) { + this.detectorParams = detector; + this.cameraCalibrationCoefficients = cameraCalibrationCoefficients; + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ArucoDetectionPipeParams that = (ArucoDetectionPipeParams) o; - return Objects.equals(detectorParams, that.detectorParams) - && Objects.equals(cameraCalibrationCoefficients, that.cameraCalibrationCoefficients); - } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ArucoDetectionPipeParams that = (ArucoDetectionPipeParams) o; + return Objects.equals(detectorParams, that.detectorParams) + && Objects.equals(cameraCalibrationCoefficients, that.cameraCalibrationCoefficients); + } - @Override - public int hashCode() { - return Objects.hash(detectorParams, cameraCalibrationCoefficients); - } + @Override + public int hashCode() { + return Objects.hash(detectorParams, cameraCalibrationCoefficients); + } - @Override - public String toString() { - return "ArucoDetectionPipeParams{" - + "detectorParams=" - + detectorParams - + ", cameraCalibrationCoefficients=" - + cameraCalibrationCoefficients - + '}'; - } + @Override + public String toString() { + return "ArucoDetectionPipeParams{" + + "detectorParams=" + + detectorParams + + ", cameraCalibrationCoefficients=" + + cameraCalibrationCoefficients + + '}'; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/BlurPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/BlurPipe.java index ed22cf415c..11eff2e21c 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/BlurPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/BlurPipe.java @@ -24,41 +24,41 @@ /** Represents a pipeline that blurs the image. */ public class BlurPipe extends MutatingPipe { - /** - * Processes this pipe. - * - * @param in Input for pipe processing. - * @return The processed frame. - */ - @Override - protected Void process(Mat in) { - Imgproc.blur(in, in, params.getBlurSize()); - return null; - } - - public static class BlurParams { - // Default BlurImagePrams with zero blur. - public static BlurParams DEFAULT = new BlurParams(0); - - // Member to store the blur size. - private final int m_blurSize; - /** - * Constructs a new BlurImageParams. + * Processes this pipe. * - * @param blurSize The blur size. + * @param in Input for pipe processing. + * @return The processed frame. */ - public BlurParams(int blurSize) { - m_blurSize = blurSize; + @Override + protected Void process(Mat in) { + Imgproc.blur(in, in, params.getBlurSize()); + return null; } - /** - * Returns the blur size. - * - * @return The blur size. - */ - public Size getBlurSize() { - return new Size(m_blurSize, m_blurSize); + public static class BlurParams { + // Default BlurImagePrams with zero blur. + public static BlurParams DEFAULT = new BlurParams(0); + + // Member to store the blur size. + private final int m_blurSize; + + /** + * Constructs a new BlurImageParams. + * + * @param blurSize The blur size. + */ + public BlurParams(int blurSize) { + m_blurSize = blurSize; + } + + /** + * Returns the blur size. + * + * @return The blur size. + */ + public Size getBlurSize() { + return new Size(m_blurSize, m_blurSize); + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CalculateFPSPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CalculateFPSPipe.java index b0c90efc3d..113da4d357 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CalculateFPSPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CalculateFPSPipe.java @@ -22,22 +22,22 @@ import org.photonvision.vision.pipe.CVPipe; public class CalculateFPSPipe - extends CVPipe { - private final LinearFilter fpsFilter = LinearFilter.movingAverage(20); - StopWatch clock = new StopWatch(); + extends CVPipe { + private final LinearFilter fpsFilter = LinearFilter.movingAverage(20); + StopWatch clock = new StopWatch(); - @Override - protected Integer process(Void in) { - if (!clock.isStarted()) { - clock.reset(); - clock.start(); + @Override + protected Integer process(Void in) { + if (!clock.isStarted()) { + clock.reset(); + clock.start(); + } + clock.stop(); + var fps = (int) fpsFilter.calculate(1000.0 / clock.getTime()); + clock.reset(); + clock.start(); + return fps; } - clock.stop(); - var fps = (int) fpsFilter.calculate(1000.0 / clock.getTime()); - clock.reset(); - clock.start(); - return fps; - } - public static class CalculateFPSPipeParams {} + public static class CalculateFPSPipeParams {} } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Calibrate3dPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Calibrate3dPipe.java index 131e2c6dcf..bfca09d6fd 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Calibrate3dPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Calibrate3dPipe.java @@ -34,133 +34,133 @@ import org.photonvision.vision.pipe.CVPipe; public class Calibrate3dPipe - extends CVPipe< - List>, - CameraCalibrationCoefficients, - Calibrate3dPipe.CalibratePipeParams> { - // Camera matrix stores the center of the image and focal length across the x and y-axis in a 3x3 - // matrix - private final Mat cameraMatrix = new Mat(); - // Stores the radical and tangential distortion in a 5x1 matrix - private final MatOfDouble distortionCoefficients = new MatOfDouble(); - - // For logging - private static final Logger logger = new Logger(Calibrate3dPipe.class, LogGroup.General); - - // Translational and rotational matrices - private final List rvecs = new ArrayList<>(); - private final List tvecs = new ArrayList<>(); - - // The Standard deviation of the estimated parameters - private final Mat stdDeviationsIntrinsics = new Mat(); - private final Mat stdDeviationsExtrinsics = new Mat(); - - // Contains the re projection error of each snapshot by re projecting the corners we found and - // finding the Euclidean distance between the actual corners. - private final Mat perViewErrors = new Mat(); - - // RMS of the calibration - private double calibrationAccuracy; - - /** - * Runs the process for the pipe. - * - * @param in Input for pipe processing. In the format (Input image, object points, image points) - * @return Result of processing. - */ - @Override - protected CameraCalibrationCoefficients process(List> in) { - in = - in.stream() - .filter( - it -> - it != null - && it.getLeft() != null - && it.getMiddle() != null - && it.getRight() != null) - .collect(Collectors.toList()); - try { - // FindBoardCorners pipe outputs all the image points, object points, and frames to calculate - // imageSize from, other parameters are output Mats - - var objPoints = in.stream().map(Triple::getMiddle).collect(Collectors.toList()); - var imgPts = in.stream().map(Triple::getRight).collect(Collectors.toList()); - if (objPoints.size() != imgPts.size()) { - logger.error("objpts.size != imgpts.size"); - return null; - } - calibrationAccuracy = - Calib3d.calibrateCameraExtended( - objPoints, - imgPts, - new Size(in.get(0).getLeft().width, in.get(0).getLeft().height), - cameraMatrix, - distortionCoefficients, - rvecs, - tvecs, - stdDeviationsIntrinsics, - stdDeviationsExtrinsics, - perViewErrors); - } catch (Exception e) { - logger.error("Calibration failed!", e); - e.printStackTrace(); - return null; + extends CVPipe< + List>, + CameraCalibrationCoefficients, + Calibrate3dPipe.CalibratePipeParams> { + // Camera matrix stores the center of the image and focal length across the x and y-axis in a 3x3 + // matrix + private final Mat cameraMatrix = new Mat(); + // Stores the radical and tangential distortion in a 5x1 matrix + private final MatOfDouble distortionCoefficients = new MatOfDouble(); + + // For logging + private static final Logger logger = new Logger(Calibrate3dPipe.class, LogGroup.General); + + // Translational and rotational matrices + private final List rvecs = new ArrayList<>(); + private final List tvecs = new ArrayList<>(); + + // The Standard deviation of the estimated parameters + private final Mat stdDeviationsIntrinsics = new Mat(); + private final Mat stdDeviationsExtrinsics = new Mat(); + + // Contains the re projection error of each snapshot by re projecting the corners we found and + // finding the Euclidean distance between the actual corners. + private final Mat perViewErrors = new Mat(); + + // RMS of the calibration + private double calibrationAccuracy; + + /** + * Runs the process for the pipe. + * + * @param in Input for pipe processing. In the format (Input image, object points, image points) + * @return Result of processing. + */ + @Override + protected CameraCalibrationCoefficients process(List> in) { + in = + in.stream() + .filter( + it -> + it != null + && it.getLeft() != null + && it.getMiddle() != null + && it.getRight() != null) + .collect(Collectors.toList()); + try { + // FindBoardCorners pipe outputs all the image points, object points, and frames to calculate + // imageSize from, other parameters are output Mats + + var objPoints = in.stream().map(Triple::getMiddle).collect(Collectors.toList()); + var imgPts = in.stream().map(Triple::getRight).collect(Collectors.toList()); + if (objPoints.size() != imgPts.size()) { + logger.error("objpts.size != imgpts.size"); + return null; + } + calibrationAccuracy = + Calib3d.calibrateCameraExtended( + objPoints, + imgPts, + new Size(in.get(0).getLeft().width, in.get(0).getLeft().height), + cameraMatrix, + distortionCoefficients, + rvecs, + tvecs, + stdDeviationsIntrinsics, + stdDeviationsExtrinsics, + perViewErrors); + } catch (Exception e) { + logger.error("Calibration failed!", e); + e.printStackTrace(); + return null; + } + JsonMat cameraMatrixMat = JsonMat.fromMat(cameraMatrix); + JsonMat distortionCoefficientsMat = JsonMat.fromMat(distortionCoefficients); + // Create a new CameraCalibrationCoefficients object to pass onto SolvePnP + double[] perViewErrorsArray = + new double[(int) perViewErrors.total() * perViewErrors.channels()]; + perViewErrors.get(0, 0, perViewErrorsArray); + + // Standard deviation of results + double stdDev = calculateSD(perViewErrorsArray); + try { + // Print calibration successful + logger.info( + "CALIBRATION SUCCESS for res " + + params.resolution + + " (with accuracy " + + calibrationAccuracy + + ")! camMatrix: \n" + + new ObjectMapper().writeValueAsString(cameraMatrixMat) + + "\ndistortionCoeffs:\n" + + new ObjectMapper().writeValueAsString(distortionCoefficientsMat) + + "\nWith Standard Deviation Of\n" + + stdDev + + "\n"); + } catch (JsonProcessingException e) { + logger.error("Failed to parse calibration data to json!", e); + } + return new CameraCalibrationCoefficients( + params.resolution, cameraMatrixMat, distortionCoefficientsMat, perViewErrorsArray, stdDev); } - JsonMat cameraMatrixMat = JsonMat.fromMat(cameraMatrix); - JsonMat distortionCoefficientsMat = JsonMat.fromMat(distortionCoefficients); - // Create a new CameraCalibrationCoefficients object to pass onto SolvePnP - double[] perViewErrorsArray = - new double[(int) perViewErrors.total() * perViewErrors.channels()]; - perViewErrors.get(0, 0, perViewErrorsArray); - - // Standard deviation of results - double stdDev = calculateSD(perViewErrorsArray); - try { - // Print calibration successful - logger.info( - "CALIBRATION SUCCESS for res " - + params.resolution - + " (with accuracy " - + calibrationAccuracy - + ")! camMatrix: \n" - + new ObjectMapper().writeValueAsString(cameraMatrixMat) - + "\ndistortionCoeffs:\n" - + new ObjectMapper().writeValueAsString(distortionCoefficientsMat) - + "\nWith Standard Deviation Of\n" - + stdDev - + "\n"); - } catch (JsonProcessingException e) { - logger.error("Failed to parse calibration data to json!", e); - } - return new CameraCalibrationCoefficients( - params.resolution, cameraMatrixMat, distortionCoefficientsMat, perViewErrorsArray, stdDev); - } - // Calculate standard deviation of the RMS error of the snapshots - private static double calculateSD(double[] numArray) { - double sum = 0.0, standardDeviation = 0.0; - int length = numArray.length; + // Calculate standard deviation of the RMS error of the snapshots + private static double calculateSD(double[] numArray) { + double sum = 0.0, standardDeviation = 0.0; + int length = numArray.length; - for (double num : numArray) { - sum += num; - } + for (double num : numArray) { + sum += num; + } - double mean = sum / length; + double mean = sum / length; - for (double num : numArray) { - standardDeviation += Math.pow(num - mean, 2); - } + for (double num : numArray) { + standardDeviation += Math.pow(num - mean, 2); + } - return Math.sqrt(standardDeviation / length); - } + return Math.sqrt(standardDeviation / length); + } - public static class CalibratePipeParams { - // Only needs resolution to pass onto CameraCalibrationCoefficients object. - private final Size resolution; + public static class CalibratePipeParams { + // Only needs resolution to pass onto CameraCalibrationCoefficients object. + private final Size resolution; - public CalibratePipeParams(Size resolution) { - // logger.info("res: " + resolution.toString()); - this.resolution = resolution; + public CalibratePipeParams(Size resolution) { + // logger.info("res: " + resolution.toString()); + this.resolution = resolution; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Collect2dTargetsPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Collect2dTargetsPipe.java index 437ef1ec5a..d5e4bda995 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Collect2dTargetsPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Collect2dTargetsPipe.java @@ -27,55 +27,55 @@ /** Represents a pipe that collects available 2d targets. */ public class Collect2dTargetsPipe - extends CVPipe< - List, List, Collect2dTargetsPipe.Collect2dTargetsParams> { - /** - * Processes this pipeline. - * - * @param in Input for pipe processing. - * @return A list of tracked targets. - */ - @Override - protected List process(List in) { - List targets = new ArrayList<>(); + extends CVPipe< + List, List, Collect2dTargetsPipe.Collect2dTargetsParams> { + /** + * Processes this pipeline. + * + * @param in Input for pipe processing. + * @return A list of tracked targets. + */ + @Override + protected List process(List in) { + List targets = new ArrayList<>(); - var calculationParams = - new TrackedTarget.TargetCalculationParameters( - params.targetOrientation == TargetOrientation.Landscape, - params.targetOffsetPointEdge, - params.robotOffsetPointMode, - params.robotOffsetSinglePoint, - params.dualOffsetValues, - params.frameStaticProperties); + var calculationParams = + new TrackedTarget.TargetCalculationParameters( + params.targetOrientation == TargetOrientation.Landscape, + params.targetOffsetPointEdge, + params.robotOffsetPointMode, + params.robotOffsetSinglePoint, + params.dualOffsetValues, + params.frameStaticProperties); - for (PotentialTarget target : in) { - targets.add(new TrackedTarget(target, calculationParams, target.shape)); - } + for (PotentialTarget target : in) { + targets.add(new TrackedTarget(target, calculationParams, target.shape)); + } - return targets; - } + return targets; + } - public static class Collect2dTargetsParams { - private final RobotOffsetPointMode robotOffsetPointMode; - private final Point robotOffsetSinglePoint; - private final DualOffsetValues dualOffsetValues; - private final TargetOffsetPointEdge targetOffsetPointEdge; - private final TargetOrientation targetOrientation; - private final FrameStaticProperties frameStaticProperties; + public static class Collect2dTargetsParams { + private final RobotOffsetPointMode robotOffsetPointMode; + private final Point robotOffsetSinglePoint; + private final DualOffsetValues dualOffsetValues; + private final TargetOffsetPointEdge targetOffsetPointEdge; + private final TargetOrientation targetOrientation; + private final FrameStaticProperties frameStaticProperties; - public Collect2dTargetsParams( - RobotOffsetPointMode robotOffsetPointMode, - Point robotOffsetSinglePoint, - DualOffsetValues dualOffsetValues, - TargetOffsetPointEdge targetOffsetPointEdge, - TargetOrientation orientation, - FrameStaticProperties frameStaticProperties) { - this.frameStaticProperties = frameStaticProperties; - this.robotOffsetPointMode = robotOffsetPointMode; - this.robotOffsetSinglePoint = robotOffsetSinglePoint; - this.dualOffsetValues = dualOffsetValues; - this.targetOffsetPointEdge = targetOffsetPointEdge; - targetOrientation = orientation; + public Collect2dTargetsParams( + RobotOffsetPointMode robotOffsetPointMode, + Point robotOffsetSinglePoint, + DualOffsetValues dualOffsetValues, + TargetOffsetPointEdge targetOffsetPointEdge, + TargetOrientation orientation, + FrameStaticProperties frameStaticProperties) { + this.frameStaticProperties = frameStaticProperties; + this.robotOffsetPointMode = robotOffsetPointMode; + this.robotOffsetSinglePoint = robotOffsetSinglePoint; + this.dualOffsetValues = dualOffsetValues; + this.targetOffsetPointEdge = targetOffsetPointEdge; + targetOrientation = orientation; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CornerDetectionPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CornerDetectionPipe.java index 538403c333..bed3c6b5ef 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CornerDetectionPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/CornerDetectionPipe.java @@ -29,179 +29,179 @@ * CornerDetectionPipeParameters} affect how these corners are calculated. */ public class CornerDetectionPipe - extends CVPipe< - List, - List, - CornerDetectionPipe.CornerDetectionPipeParameters> { - Comparator leftRightComparator = Comparator.comparingDouble(point -> point.x); - Comparator verticalComparator = Comparator.comparingDouble(point -> point.y); - MatOfPoint2f polyOutput = new MatOfPoint2f(); - - @Override - protected List process(List targetList) { - for (var target : targetList) { - // detect corners. Might implement more algorithms later but - // APPROX_POLY_DP_AND_EXTREME_CORNERS should be year agnostic - if (Objects.requireNonNull(params.cornerDetectionStrategy) - == DetectionStrategy.APPROX_POLY_DP_AND_EXTREME_CORNERS) { - var targetCorners = detectExtremeCornersByApproxPolyDp(target, params.calculateConvexHulls); - target.setTargetCorners(targetCorners); - } + extends CVPipe< + List, + List, + CornerDetectionPipe.CornerDetectionPipeParameters> { + Comparator leftRightComparator = Comparator.comparingDouble(point -> point.x); + Comparator verticalComparator = Comparator.comparingDouble(point -> point.y); + MatOfPoint2f polyOutput = new MatOfPoint2f(); + + @Override + protected List process(List targetList) { + for (var target : targetList) { + // detect corners. Might implement more algorithms later but + // APPROX_POLY_DP_AND_EXTREME_CORNERS should be year agnostic + if (Objects.requireNonNull(params.cornerDetectionStrategy) + == DetectionStrategy.APPROX_POLY_DP_AND_EXTREME_CORNERS) { + var targetCorners = detectExtremeCornersByApproxPolyDp(target, params.calculateConvexHulls); + target.setTargetCorners(targetCorners); + } + } + return targetList; } - return targetList; - } - - /** - * @param target The target to find the corners of. - * @return Corners: (bottom-left, bottom-right, top-right, top-left) - */ - private List findBoundingBoxCorners(TrackedTarget target) { - // extract the corners - var points = new Point[4]; - target.m_mainContour.getMinAreaRect().points(points); - - // find the bl/br/tr/tl corners - // first, min by left/right - var list_ = Arrays.asList(points); - list_.sort(leftRightComparator); - // of this, we now have left and right - // sort to get top and bottom - var left = new ArrayList<>(List.of(list_.get(0), list_.get(1))); - left.sort(verticalComparator); - var right = new ArrayList<>(List.of(list_.get(2), list_.get(3))); - right.sort(verticalComparator); - - var tl = left.get(0); - var bl = left.get(1); - var tr = right.get(0); - var br = right.get(1); - - return List.of(bl, br, tr, tl); - } - - /** - * @param a First point. - * @param b Second point. - * @return The straight line distance between them. - */ - private static double distanceBetween(Point a, Point b) { - double xDelta = a.x - b.x; - double yDelta = a.y - b.y; - return Math.sqrt(xDelta * xDelta + yDelta * yDelta); - } - - /** - * Find the 4 most extreme corners of the target's contour. - * - * @param target The target to track. - * @param convexHull Whether to use the convex hull of the contour instead. - * @return The 4 extreme corners of the contour: (bottom-left, bottom-right, top-right, top-left) - */ - private List detectExtremeCornersByApproxPolyDp(TrackedTarget target, boolean convexHull) { - var centroid = target.getMinAreaRect().center; - Comparator compareCenterDist = - Comparator.comparingDouble((Point point) -> distanceBetween(centroid, point)); - - MatOfPoint2f targetContour; - if (convexHull) { - targetContour = target.m_mainContour.getConvexHull(); - } else { - targetContour = target.m_mainContour.getMat2f(); + + /** + * @param target The target to find the corners of. + * @return Corners: (bottom-left, bottom-right, top-right, top-left) + */ + private List findBoundingBoxCorners(TrackedTarget target) { + // extract the corners + var points = new Point[4]; + target.m_mainContour.getMinAreaRect().points(points); + + // find the bl/br/tr/tl corners + // first, min by left/right + var list_ = Arrays.asList(points); + list_.sort(leftRightComparator); + // of this, we now have left and right + // sort to get top and bottom + var left = new ArrayList<>(List.of(list_.get(0), list_.get(1))); + left.sort(verticalComparator); + var right = new ArrayList<>(List.of(list_.get(2), list_.get(3))); + right.sort(verticalComparator); + + var tl = left.get(0); + var bl = left.get(1); + var tr = right.get(0); + var br = right.get(1); + + return List.of(bl, br, tr, tl); } - /* - approximating a shape around the contours - Can be tuned to allow/disallow hulls - we want a number between 0 and 0.16 out of a percentage from 0 to 100 - so take accuracy and divide by 600 - - Furthermore, we know that the contour is open if we haven't done convex hulls, - and it has subcontours. - */ - var isOpen = !convexHull && target.hasSubContours(); - var peri = Imgproc.arcLength(targetContour, true); - Imgproc.approxPolyDP( - targetContour, polyOutput, params.accuracyPercentage / 600.0 * peri, !isOpen); - - // we must have at least 4 corners for this strategy to work. - // If we are looking for an exact side count that is handled here too. - var pointList = new ArrayList<>(polyOutput.toList()); - if (pointList.size() < 4 || (params.exactSideCount && params.sideCount != pointList.size())) - return null; - - target.setApproximateBoundingPolygon(polyOutput); - - // left top, left bottom, right bottom, right top - var boundingBoxCorners = findBoundingBoxCorners(target); - - var compareDistToTl = - Comparator.comparingDouble((Point p) -> distanceBetween(p, boundingBoxCorners.get(3))); - - var compareDistToTr = - Comparator.comparingDouble((Point p) -> distanceBetween(p, boundingBoxCorners.get(2))); - - // top left and top right are the poly corners closest to the bouding box tl and tr - pointList.sort(compareDistToTl); - var tl = pointList.get(0); - pointList.remove(tl); - pointList.sort(compareDistToTr); - var tr = pointList.get(0); - pointList.remove(tr); - - // at this point we look for points on the left/right of the center of the remaining points - // and maximize their distance from the center of the min area rectangle - var leftList = new ArrayList(); - var rightList = new ArrayList(); - double averageXCoordinate = 0.0; - for (var p : pointList) { - averageXCoordinate += p.x; + /** + * @param a First point. + * @param b Second point. + * @return The straight line distance between them. + */ + private static double distanceBetween(Point a, Point b) { + double xDelta = a.x - b.x; + double yDelta = a.y - b.y; + return Math.sqrt(xDelta * xDelta + yDelta * yDelta); } - averageXCoordinate /= pointList.size(); - - // add points that are below the center of the min area rectangle of the target - for (var p : pointList) { - if (p.y - > target.m_mainContour.getBoundingRect().y - + target.m_mainContour.getBoundingRect().height / 2.0) - if (p.x < averageXCoordinate) { - leftList.add(p); + + /** + * Find the 4 most extreme corners of the target's contour. + * + * @param target The target to track. + * @param convexHull Whether to use the convex hull of the contour instead. + * @return The 4 extreme corners of the contour: (bottom-left, bottom-right, top-right, top-left) + */ + private List detectExtremeCornersByApproxPolyDp(TrackedTarget target, boolean convexHull) { + var centroid = target.getMinAreaRect().center; + Comparator compareCenterDist = + Comparator.comparingDouble((Point point) -> distanceBetween(centroid, point)); + + MatOfPoint2f targetContour; + if (convexHull) { + targetContour = target.m_mainContour.getConvexHull(); } else { - rightList.add(p); + targetContour = target.m_mainContour.getMat2f(); + } + + /* + approximating a shape around the contours + Can be tuned to allow/disallow hulls + we want a number between 0 and 0.16 out of a percentage from 0 to 100 + so take accuracy and divide by 600 + + Furthermore, we know that the contour is open if we haven't done convex hulls, + and it has subcontours. + */ + var isOpen = !convexHull && target.hasSubContours(); + var peri = Imgproc.arcLength(targetContour, true); + Imgproc.approxPolyDP( + targetContour, polyOutput, params.accuracyPercentage / 600.0 * peri, !isOpen); + + // we must have at least 4 corners for this strategy to work. + // If we are looking for an exact side count that is handled here too. + var pointList = new ArrayList<>(polyOutput.toList()); + if (pointList.size() < 4 || (params.exactSideCount && params.sideCount != pointList.size())) + return null; + + target.setApproximateBoundingPolygon(polyOutput); + + // left top, left bottom, right bottom, right top + var boundingBoxCorners = findBoundingBoxCorners(target); + + var compareDistToTl = + Comparator.comparingDouble((Point p) -> distanceBetween(p, boundingBoxCorners.get(3))); + + var compareDistToTr = + Comparator.comparingDouble((Point p) -> distanceBetween(p, boundingBoxCorners.get(2))); + + // top left and top right are the poly corners closest to the bouding box tl and tr + pointList.sort(compareDistToTl); + var tl = pointList.get(0); + pointList.remove(tl); + pointList.sort(compareDistToTr); + var tr = pointList.get(0); + pointList.remove(tr); + + // at this point we look for points on the left/right of the center of the remaining points + // and maximize their distance from the center of the min area rectangle + var leftList = new ArrayList(); + var rightList = new ArrayList(); + double averageXCoordinate = 0.0; + for (var p : pointList) { + averageXCoordinate += p.x; } + averageXCoordinate /= pointList.size(); + + // add points that are below the center of the min area rectangle of the target + for (var p : pointList) { + if (p.y + > target.m_mainContour.getBoundingRect().y + + target.m_mainContour.getBoundingRect().height / 2.0) + if (p.x < averageXCoordinate) { + leftList.add(p); + } else { + rightList.add(p); + } + } + if (leftList.isEmpty() || rightList.isEmpty()) return null; + leftList.sort(compareCenterDist); + rightList.sort(compareCenterDist); + var bl = leftList.get(leftList.size() - 1); + var br = rightList.get(rightList.size() - 1); + return List.of(bl, br, tr, tl); } - if (leftList.isEmpty() || rightList.isEmpty()) return null; - leftList.sort(compareCenterDist); - rightList.sort(compareCenterDist); - var bl = leftList.get(leftList.size() - 1); - var br = rightList.get(rightList.size() - 1); - return List.of(bl, br, tr, tl); - } - - public static class CornerDetectionPipeParameters { - private final DetectionStrategy cornerDetectionStrategy; - - private final boolean calculateConvexHulls; - private final boolean exactSideCount; - private final int sideCount; - - /** This number can be changed to change how "accurate" our approximate polygon must be. */ - private final double accuracyPercentage; - - public CornerDetectionPipeParameters( - DetectionStrategy cornerDetectionStrategy, - boolean calculateConvexHulls, - boolean exactSideCount, - int sideCount, - double accuracyPercentage) { - this.cornerDetectionStrategy = cornerDetectionStrategy; - this.calculateConvexHulls = calculateConvexHulls; - this.exactSideCount = exactSideCount; - this.sideCount = sideCount; - this.accuracyPercentage = accuracyPercentage; + + public static class CornerDetectionPipeParameters { + private final DetectionStrategy cornerDetectionStrategy; + + private final boolean calculateConvexHulls; + private final boolean exactSideCount; + private final int sideCount; + + /** This number can be changed to change how "accurate" our approximate polygon must be. */ + private final double accuracyPercentage; + + public CornerDetectionPipeParameters( + DetectionStrategy cornerDetectionStrategy, + boolean calculateConvexHulls, + boolean exactSideCount, + int sideCount, + double accuracyPercentage) { + this.cornerDetectionStrategy = cornerDetectionStrategy; + this.calculateConvexHulls = calculateConvexHulls; + this.exactSideCount = exactSideCount; + this.sideCount = sideCount; + this.accuracyPercentage = accuracyPercentage; + } } - } - public enum DetectionStrategy { - APPROX_POLY_DP_AND_EXTREME_CORNERS - } + public enum DetectionStrategy { + APPROX_POLY_DP_AND_EXTREME_CORNERS + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dAprilTagsPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dAprilTagsPipe.java index 5ae2b59200..ec88e06fe3 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dAprilTagsPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dAprilTagsPipe.java @@ -21,14 +21,14 @@ import org.photonvision.vision.frame.FrameDivisor; public class Draw2dAprilTagsPipe extends Draw2dTargetsPipe { - public static class Draw2dAprilTagsParams extends Draw2dTargetsPipe.Draw2dTargetsParams { - public Draw2dAprilTagsParams( - boolean shouldDraw, boolean showMultipleTargets, FrameDivisor divisor) { - super(shouldDraw, showMultipleTargets, divisor); - // We want to show the polygon, not the rotated box - this.showRotatedBox = false; - this.showMaximumBox = false; - this.rotatedBoxColor = Color.RED; + public static class Draw2dAprilTagsParams extends Draw2dTargetsPipe.Draw2dTargetsParams { + public Draw2dAprilTagsParams( + boolean shouldDraw, boolean showMultipleTargets, FrameDivisor divisor) { + super(shouldDraw, showMultipleTargets, divisor); + // We want to show the polygon, not the rotated box + this.showRotatedBox = false; + this.showMaximumBox = false; + this.rotatedBoxColor = Color.RED; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dArucoPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dArucoPipe.java index 036c9ecdce..85f4ef1801 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dArucoPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dArucoPipe.java @@ -21,14 +21,14 @@ import org.photonvision.vision.frame.FrameDivisor; public class Draw2dArucoPipe extends Draw2dTargetsPipe { - public static class Draw2dArucoParams extends Draw2dTargetsPipe.Draw2dTargetsParams { - public Draw2dArucoParams( - boolean shouldDraw, boolean showMultipleTargets, FrameDivisor divisor) { - super(shouldDraw, showMultipleTargets, divisor); - // We want to show the polygon, not the rotated box - this.showRotatedBox = false; - this.showMaximumBox = false; - this.rotatedBoxColor = Color.RED; + public static class Draw2dArucoParams extends Draw2dTargetsPipe.Draw2dTargetsParams { + public Draw2dArucoParams( + boolean shouldDraw, boolean showMultipleTargets, FrameDivisor divisor) { + super(shouldDraw, showMultipleTargets, divisor); + // We want to show the polygon, not the rotated box + this.showRotatedBox = false; + this.showMaximumBox = false; + this.rotatedBoxColor = Color.RED; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dCrosshairPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dCrosshairPipe.java index c9fc3ba280..53f5852e72 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dCrosshairPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dCrosshairPipe.java @@ -34,101 +34,101 @@ import org.photonvision.vision.target.TrackedTarget; public class Draw2dCrosshairPipe - extends MutatingPipe< - Pair>, Draw2dCrosshairPipe.Draw2dCrosshairParams> { - @Override - protected Void process(Pair> in) { - if (!params.shouldDraw) return null; + extends MutatingPipe< + Pair>, Draw2dCrosshairPipe.Draw2dCrosshairParams> { + @Override + protected Void process(Pair> in) { + if (!params.shouldDraw) return null; - var image = in.getLeft(); + var image = in.getLeft(); - if (params.showCrosshair) { - double x = params.frameStaticProperties.centerX; - double y = params.frameStaticProperties.centerY; - double scale = params.frameStaticProperties.imageWidth / (double) params.divisor.value / 32.0; + if (params.showCrosshair) { + double x = params.frameStaticProperties.centerX; + double y = params.frameStaticProperties.centerY; + double scale = params.frameStaticProperties.imageWidth / (double) params.divisor.value / 32.0; - if (this.params.rotMode == ImageRotationMode.DEG_270 - || this.params.rotMode == ImageRotationMode.DEG_90) { - var tmp = x; - x = y; - y = tmp; - } + if (this.params.rotMode == ImageRotationMode.DEG_270 + || this.params.rotMode == ImageRotationMode.DEG_90) { + var tmp = x; + x = y; + y = tmp; + } - switch (params.robotOffsetPointMode) { - case Single: - if (params.singleOffsetPoint.x != 0 && params.singleOffsetPoint.y != 0) { - x = params.singleOffsetPoint.x; - y = params.singleOffsetPoint.y; - } - break; - case Dual: - if (!in.getRight().isEmpty()) { - var target = in.getRight().get(0); - if (target != null) { - var area = target.getArea(); - var offsetCrosshair = - TargetCalculations.calculateDualOffsetCrosshair(params.dualOffsetValues, area); - x = offsetCrosshair.x; - y = offsetCrosshair.y; + switch (params.robotOffsetPointMode) { + case Single: + if (params.singleOffsetPoint.x != 0 && params.singleOffsetPoint.y != 0) { + x = params.singleOffsetPoint.x; + y = params.singleOffsetPoint.y; + } + break; + case Dual: + if (!in.getRight().isEmpty()) { + var target = in.getRight().get(0); + if (target != null) { + var area = target.getArea(); + var offsetCrosshair = + TargetCalculations.calculateDualOffsetCrosshair(params.dualOffsetValues, area); + x = offsetCrosshair.x; + y = offsetCrosshair.y; + } + } + break; } - } - break; - } - x /= (double) params.divisor.value; - y /= (double) params.divisor.value; + x /= (double) params.divisor.value; + y /= (double) params.divisor.value; - Point xMax = new Point(x + scale, y); - Point xMin = new Point(x - scale, y); - Point yMax = new Point(x, y + scale); - Point yMin = new Point(x, y - scale); + Point xMax = new Point(x + scale, y); + Point xMin = new Point(x - scale, y); + Point yMax = new Point(x, y + scale); + Point yMin = new Point(x, y - scale); - Imgproc.line(image, xMax, xMin, ColorHelper.colorToScalar(params.crosshairColor)); - Imgproc.line(image, yMax, yMin, ColorHelper.colorToScalar(params.crosshairColor)); + Imgproc.line(image, xMax, xMin, ColorHelper.colorToScalar(params.crosshairColor)); + Imgproc.line(image, yMax, yMin, ColorHelper.colorToScalar(params.crosshairColor)); + } + return null; } - return null; - } - public static class Draw2dCrosshairParams { - public boolean showCrosshair = true; - public Color crosshairColor = Color.GREEN; + public static class Draw2dCrosshairParams { + public boolean showCrosshair = true; + public Color crosshairColor = Color.GREEN; - public final boolean shouldDraw; - public final FrameStaticProperties frameStaticProperties; - public final ImageRotationMode rotMode; - public final RobotOffsetPointMode robotOffsetPointMode; - public final Point singleOffsetPoint; - public final DualOffsetValues dualOffsetValues; - private final FrameDivisor divisor; + public final boolean shouldDraw; + public final FrameStaticProperties frameStaticProperties; + public final ImageRotationMode rotMode; + public final RobotOffsetPointMode robotOffsetPointMode; + public final Point singleOffsetPoint; + public final DualOffsetValues dualOffsetValues; + private final FrameDivisor divisor; - public Draw2dCrosshairParams( - FrameStaticProperties frameStaticProperties, - FrameDivisor divisor, - ImageRotationMode rotMode) { - shouldDraw = true; - this.frameStaticProperties = frameStaticProperties; - this.rotMode = rotMode; - robotOffsetPointMode = RobotOffsetPointMode.None; - singleOffsetPoint = new Point(); - dualOffsetValues = new DualOffsetValues(); - this.divisor = divisor; - } + public Draw2dCrosshairParams( + FrameStaticProperties frameStaticProperties, + FrameDivisor divisor, + ImageRotationMode rotMode) { + shouldDraw = true; + this.frameStaticProperties = frameStaticProperties; + this.rotMode = rotMode; + robotOffsetPointMode = RobotOffsetPointMode.None; + singleOffsetPoint = new Point(); + dualOffsetValues = new DualOffsetValues(); + this.divisor = divisor; + } - public Draw2dCrosshairParams( - boolean shouldDraw, - RobotOffsetPointMode robotOffsetPointMode, - Point singleOffsetPoint, - DualOffsetValues dualOffsetValues, - FrameStaticProperties frameStaticProperties, - FrameDivisor divisor, - ImageRotationMode rotMode) { - this.shouldDraw = shouldDraw; - this.frameStaticProperties = frameStaticProperties; - this.robotOffsetPointMode = robotOffsetPointMode; - this.singleOffsetPoint = singleOffsetPoint; - this.dualOffsetValues = dualOffsetValues; - this.divisor = divisor; - this.rotMode = rotMode; + public Draw2dCrosshairParams( + boolean shouldDraw, + RobotOffsetPointMode robotOffsetPointMode, + Point singleOffsetPoint, + DualOffsetValues dualOffsetValues, + FrameStaticProperties frameStaticProperties, + FrameDivisor divisor, + ImageRotationMode rotMode) { + this.shouldDraw = shouldDraw; + this.frameStaticProperties = frameStaticProperties; + this.robotOffsetPointMode = robotOffsetPointMode; + this.singleOffsetPoint = singleOffsetPoint; + this.dualOffsetValues = dualOffsetValues; + this.divisor = divisor; + this.rotMode = rotMode; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dTargetsPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dTargetsPipe.java index 159698ac1e..3365a5cf3e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dTargetsPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw2dTargetsPipe.java @@ -33,225 +33,225 @@ import org.photonvision.vision.target.TrackedTarget; public class Draw2dTargetsPipe - extends MutatingPipe>, Draw2dTargetsPipe.Draw2dTargetsParams> { - MatOfPoint tempMat = new MatOfPoint(); - private static final Logger logger = new Logger(Draw2dTargetsPipe.class, LogGroup.General); - - @Override - protected Void process(Pair> in) { - var imRows = in.getLeft().rows(); - var imCols = in.getLeft().cols(); - var imageSize = Math.sqrt(imRows * imCols); - var textSize = params.kPixelsToText * imageSize; - var thickness = params.kPixelsToThickness * imageSize; - - if (!params.shouldDraw) return null; - - if (!in.getRight().isEmpty() - && (params.showCentroid - || params.showMaximumBox - || params.showRotatedBox - || params.showShape)) { - var centroidColour = ColorHelper.colorToScalar(params.centroidColor); - var maximumBoxColour = ColorHelper.colorToScalar(params.maximumBoxColor); - var rotatedBoxColour = ColorHelper.colorToScalar(params.rotatedBoxColor); - var circleColor = ColorHelper.colorToScalar(params.circleColor); - var shapeColour = ColorHelper.colorToScalar(params.shapeOutlineColour); - - for (int i = 0; i < (params.showMultipleTargets ? in.getRight().size() : 1); i++) { - Point[] vertices = new Point[4]; - MatOfPoint contour = new MatOfPoint(); - - if (i != 0 && !params.showMultipleTargets) { - break; + extends MutatingPipe>, Draw2dTargetsPipe.Draw2dTargetsParams> { + MatOfPoint tempMat = new MatOfPoint(); + private static final Logger logger = new Logger(Draw2dTargetsPipe.class, LogGroup.General); + + @Override + protected Void process(Pair> in) { + var imRows = in.getLeft().rows(); + var imCols = in.getLeft().cols(); + var imageSize = Math.sqrt(imRows * imCols); + var textSize = params.kPixelsToText * imageSize; + var thickness = params.kPixelsToThickness * imageSize; + + if (!params.shouldDraw) return null; + + if (!in.getRight().isEmpty() + && (params.showCentroid + || params.showMaximumBox + || params.showRotatedBox + || params.showShape)) { + var centroidColour = ColorHelper.colorToScalar(params.centroidColor); + var maximumBoxColour = ColorHelper.colorToScalar(params.maximumBoxColor); + var rotatedBoxColour = ColorHelper.colorToScalar(params.rotatedBoxColor); + var circleColor = ColorHelper.colorToScalar(params.circleColor); + var shapeColour = ColorHelper.colorToScalar(params.shapeOutlineColour); + + for (int i = 0; i < (params.showMultipleTargets ? in.getRight().size() : 1); i++) { + Point[] vertices = new Point[4]; + MatOfPoint contour = new MatOfPoint(); + + if (i != 0 && !params.showMultipleTargets) { + break; + } + + TrackedTarget target = in.getRight().get(i); + RotatedRect r = target.getMinAreaRect(); + + if (r == null) continue; + + r.points(vertices); + dividePointArray(vertices); + contour.fromArray(vertices); + + if (params.shouldShowRotatedBox(target.getShape())) { + Imgproc.drawContours( + in.getLeft(), + List.of(contour), + 0, + rotatedBoxColour, + (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); + } else if (params.shouldShowCircle(target.getShape())) { + Imgproc.circle( + in.getLeft(), + target.getShape().center, + (int) target.getShape().radius, + circleColor, + (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); + } else { + // draw approximate polygon + var poly = target.getApproximateBoundingPolygon(); + + // fall back on the shape's approx poly dp + if (poly == null && target.getShape() != null) + poly = target.getShape().getContour().getApproxPolyDp(); + if (poly != null) { + var mat = new MatOfPoint(); + mat.fromArray(poly.toArray()); + divideMat(mat, mat); + Imgproc.drawContours( + in.getLeft(), + List.of(mat), + -1, + ColorHelper.colorToScalar(params.rotatedBoxColor), + 2); + mat.release(); + } + } + + if (params.showMaximumBox) { + Rect box = Imgproc.boundingRect(contour); + Imgproc.rectangle( + in.getLeft(), + new Point(box.x, box.y), + new Point(box.x + box.width, box.y + box.height), + maximumBoxColour, + (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); + } + + if (params.showShape) { + divideMat(target.m_mainContour.mat, tempMat); + Imgproc.drawContours( + in.getLeft(), + List.of(tempMat), + -1, + shapeColour, + (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); + } + + if (params.showContourNumber) { + var center = target.m_mainContour.getCenterPoint(); + var textPos = + new Point( + center.x + params.kPixelsToOffset * imageSize, + center.y - params.kPixelsToOffset * imageSize); + dividePoint(textPos); + + int id = target.getFiducialId(); + var contourNumber = String.valueOf(id == -1 ? i : id); + + Imgproc.putText( + in.getLeft(), + contourNumber, + textPos, + 0, + textSize, + ColorHelper.colorToScalar(params.textColor), + (int) thickness); + } + + if (params.showCentroid) { + Point centroid = target.getTargetOffsetPoint().clone(); + dividePoint(centroid); + var crosshairRadius = (int) (imageSize * params.kPixelsToCentroidRadius); + var x = centroid.x; + var y = centroid.y; + Point xMax = new Point(x + crosshairRadius, y); + Point xMin = new Point(x - crosshairRadius, y); + Point yMax = new Point(x, y + crosshairRadius); + Point yMin = new Point(x, y - crosshairRadius); + + Imgproc.line( + in.getLeft(), + xMax, + xMin, + centroidColour, + (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); + Imgproc.line( + in.getLeft(), + yMax, + yMin, + centroidColour, + (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); + } + } } - TrackedTarget target = in.getRight().get(i); - RotatedRect r = target.getMinAreaRect(); - - if (r == null) continue; - - r.points(vertices); - dividePointArray(vertices); - contour.fromArray(vertices); - - if (params.shouldShowRotatedBox(target.getShape())) { - Imgproc.drawContours( - in.getLeft(), - List.of(contour), - 0, - rotatedBoxColour, - (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); - } else if (params.shouldShowCircle(target.getShape())) { - Imgproc.circle( - in.getLeft(), - target.getShape().center, - (int) target.getShape().radius, - circleColor, - (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); - } else { - // draw approximate polygon - var poly = target.getApproximateBoundingPolygon(); - - // fall back on the shape's approx poly dp - if (poly == null && target.getShape() != null) - poly = target.getShape().getContour().getApproxPolyDp(); - if (poly != null) { - var mat = new MatOfPoint(); - mat.fromArray(poly.toArray()); - divideMat(mat, mat); - Imgproc.drawContours( - in.getLeft(), - List.of(mat), - -1, - ColorHelper.colorToScalar(params.rotatedBoxColor), - 2); - mat.release(); - } - } - - if (params.showMaximumBox) { - Rect box = Imgproc.boundingRect(contour); - Imgproc.rectangle( - in.getLeft(), - new Point(box.x, box.y), - new Point(box.x + box.width, box.y + box.height), - maximumBoxColour, - (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); - } - - if (params.showShape) { - divideMat(target.m_mainContour.mat, tempMat); - Imgproc.drawContours( - in.getLeft(), - List.of(tempMat), - -1, - shapeColour, - (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); - } + return null; + } - if (params.showContourNumber) { - var center = target.m_mainContour.getCenterPoint(); - var textPos = - new Point( - center.x + params.kPixelsToOffset * imageSize, - center.y - params.kPixelsToOffset * imageSize); - dividePoint(textPos); - - int id = target.getFiducialId(); - var contourNumber = String.valueOf(id == -1 ? i : id); - - Imgproc.putText( - in.getLeft(), - contourNumber, - textPos, - 0, - textSize, - ColorHelper.colorToScalar(params.textColor), - (int) thickness); + private void divideMat(MatOfPoint src, MatOfPoint dst) { + var hull = src.toArray(); + for (Point point : hull) { + dividePoint(point); } + dst.fromArray(hull); + } - if (params.showCentroid) { - Point centroid = target.getTargetOffsetPoint().clone(); - dividePoint(centroid); - var crosshairRadius = (int) (imageSize * params.kPixelsToCentroidRadius); - var x = centroid.x; - var y = centroid.y; - Point xMax = new Point(x + crosshairRadius, y); - Point xMin = new Point(x - crosshairRadius, y); - Point yMax = new Point(x, y + crosshairRadius); - Point yMin = new Point(x, y - crosshairRadius); - - Imgproc.line( - in.getLeft(), - xMax, - xMin, - centroidColour, - (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); - Imgproc.line( - in.getLeft(), - yMax, - yMin, - centroidColour, - (int) Math.ceil(imageSize * params.kPixelsToBoxThickness)); + private void divideMat(MatOfPoint2f src, MatOfPoint dst) { + var hull = src.toArray(); + for (Point point : hull) { + dividePoint(point); } - } + dst.fromArray(hull); } - return null; - } - - private void divideMat(MatOfPoint src, MatOfPoint dst) { - var hull = src.toArray(); - for (Point point : hull) { - dividePoint(point); + /** Scale a given point list by the current frame divisor. the point list is mutated! */ + private void dividePointList(List points) { + for (var p : points) { + dividePoint(p); + } } - dst.fromArray(hull); - } - private void divideMat(MatOfPoint2f src, MatOfPoint dst) { - var hull = src.toArray(); - for (Point point : hull) { - dividePoint(point); + /** Scale a given point array by the current frame divisor. the point list is mutated! */ + private void dividePointArray(Point[] points) { + for (var p : points) { + dividePoint(p); + } } - dst.fromArray(hull); - } - /** Scale a given point list by the current frame divisor. the point list is mutated! */ - private void dividePointList(List points) { - for (var p : points) { - dividePoint(p); + private void dividePoint(Point p) { + p.x = p.x / (double) params.divisor.value; + p.y = p.y / (double) params.divisor.value; } - } - /** Scale a given point array by the current frame divisor. the point list is mutated! */ - private void dividePointArray(Point[] points) { - for (var p : points) { - dividePoint(p); - } - } - - private void dividePoint(Point p) { - p.x = p.x / (double) params.divisor.value; - p.y = p.y / (double) params.divisor.value; - } - - public static class Draw2dTargetsParams { - public double kPixelsToText = 0.0025; - public double kPixelsToThickness = 0.008; - public double kPixelsToOffset = 0.04; - public double kPixelsToBoxThickness = 0.007; - public double kPixelsToCentroidRadius = 0.03; - public boolean showCentroid = true; - public boolean showRotatedBox = true; - public boolean showShape = false; - public boolean showMaximumBox = true; - public boolean showContourNumber = true; - public Color centroidColor = Color.GREEN; // Color.decode("#ff5ebf"); - public Color rotatedBoxColor = Color.BLUE; - public Color maximumBoxColor = Color.RED; - public Color shapeOutlineColour = Color.MAGENTA; - public Color textColor = Color.GREEN; - public Color circleColor = Color.RED; - - public final boolean showMultipleTargets; - public final boolean shouldDraw; - - public final FrameDivisor divisor; - - public boolean shouldShowRotatedBox(CVShape shape) { - return showRotatedBox && (shape == null || shape.shape.equals(ContourShape.Quadrilateral)); - } + public static class Draw2dTargetsParams { + public double kPixelsToText = 0.0025; + public double kPixelsToThickness = 0.008; + public double kPixelsToOffset = 0.04; + public double kPixelsToBoxThickness = 0.007; + public double kPixelsToCentroidRadius = 0.03; + public boolean showCentroid = true; + public boolean showRotatedBox = true; + public boolean showShape = false; + public boolean showMaximumBox = true; + public boolean showContourNumber = true; + public Color centroidColor = Color.GREEN; // Color.decode("#ff5ebf"); + public Color rotatedBoxColor = Color.BLUE; + public Color maximumBoxColor = Color.RED; + public Color shapeOutlineColour = Color.MAGENTA; + public Color textColor = Color.GREEN; + public Color circleColor = Color.RED; + + public final boolean showMultipleTargets; + public final boolean shouldDraw; + + public final FrameDivisor divisor; + + public boolean shouldShowRotatedBox(CVShape shape) { + return showRotatedBox && (shape == null || shape.shape.equals(ContourShape.Quadrilateral)); + } - public boolean shouldShowCircle(CVShape shape) { - return shape != null && shape.shape.equals(ContourShape.Circle); - } + public boolean shouldShowCircle(CVShape shape) { + return shape != null && shape.shape.equals(ContourShape.Circle); + } - public Draw2dTargetsParams( - boolean shouldDraw, boolean showMultipleTargets, FrameDivisor divisor) { - this.shouldDraw = shouldDraw; - this.showMultipleTargets = showMultipleTargets; - this.divisor = divisor; + public Draw2dTargetsParams( + boolean shouldDraw, boolean showMultipleTargets, FrameDivisor divisor) { + this.shouldDraw = shouldDraw; + this.showMultipleTargets = showMultipleTargets; + this.divisor = divisor; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dAprilTagsPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dAprilTagsPipe.java index 26f1939b74..017e973372 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dAprilTagsPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dAprilTagsPipe.java @@ -22,15 +22,15 @@ import org.photonvision.vision.target.TargetModel; public class Draw3dAprilTagsPipe extends Draw3dTargetsPipe { - public static class Draw3dAprilTagsParams extends Draw3dContoursParams { - public Draw3dAprilTagsParams( - boolean shouldDraw, - CameraCalibrationCoefficients cameraCalibrationCoefficients, - TargetModel targetModel, - FrameDivisor divisor) { - super(shouldDraw, cameraCalibrationCoefficients, targetModel, divisor); - this.shouldDrawHull = false; - this.redistortPoints = true; + public static class Draw3dAprilTagsParams extends Draw3dContoursParams { + public Draw3dAprilTagsParams( + boolean shouldDraw, + CameraCalibrationCoefficients cameraCalibrationCoefficients, + TargetModel targetModel, + FrameDivisor divisor) { + super(shouldDraw, cameraCalibrationCoefficients, targetModel, divisor); + this.shouldDrawHull = false; + this.redistortPoints = true; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dArucoPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dArucoPipe.java index e048b80e30..0fdd719358 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dArucoPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dArucoPipe.java @@ -22,14 +22,14 @@ import org.photonvision.vision.target.TargetModel; public class Draw3dArucoPipe extends Draw3dTargetsPipe { - public static class Draw3dArucoParams extends Draw3dContoursParams { - public Draw3dArucoParams( - boolean shouldDraw, - CameraCalibrationCoefficients cameraCalibrationCoefficients, - TargetModel targetModel, - FrameDivisor divisor) { - super(shouldDraw, cameraCalibrationCoefficients, targetModel, divisor); - this.shouldDrawHull = false; + public static class Draw3dArucoParams extends Draw3dContoursParams { + public Draw3dArucoParams( + boolean shouldDraw, + CameraCalibrationCoefficients cameraCalibrationCoefficients, + TargetModel targetModel, + FrameDivisor divisor) { + super(shouldDraw, cameraCalibrationCoefficients, targetModel, divisor); + this.shouldDrawHull = false; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dTargetsPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dTargetsPipe.java index 11f8794a86..99927ea0f1 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dTargetsPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/Draw3dTargetsPipe.java @@ -35,279 +35,279 @@ import org.photonvision.vision.target.TrackedTarget; public class Draw3dTargetsPipe - extends MutatingPipe>, Draw3dTargetsPipe.Draw3dContoursParams> { - Logger logger = new Logger(Draw3dTargetsPipe.class, LogGroup.VisionModule); - - @Override - protected Void process(Pair> in) { - if (!params.shouldDraw) return null; - if (params.cameraCalibrationCoefficients == null - || params.cameraCalibrationCoefficients.getCameraIntrinsicsMat() == null - || params.cameraCalibrationCoefficients.getDistCoeffsMat() == null) { - return null; - } - - for (var target : in.getRight()) { - // draw convex hull - if (params.shouldDrawHull(target)) { - var pointMat = new MatOfPoint(); - divideMat2f(target.m_mainContour.getConvexHull(), pointMat); - if (pointMat.size().empty()) { - logger.error("Convex hull is empty?"); - logger.debug( - "Orig. Convex Hull: " + target.m_mainContour.getConvexHull().size().toString()); - continue; - } - Imgproc.drawContours( - in.getLeft(), List.of(pointMat), -1, ColorHelper.colorToScalar(Color.green), 1); - - // draw approximate polygon - var poly = target.getApproximateBoundingPolygon(); - if (poly != null) { - divideMat2f(poly, pointMat); - Imgproc.drawContours( - in.getLeft(), List.of(pointMat), -1, ColorHelper.colorToScalar(Color.blue), 2); - } - pointMat.release(); - } - - // Draw floor and top - if (target.getCameraRelativeRvec() != null - && target.getCameraRelativeTvec() != null - && params.shouldDrawBox) { - var tempMat = new MatOfPoint2f(); - - var jac = new Mat(); - var bottomModel = params.targetModel.getVisualizationBoxBottom(); - var topModel = params.targetModel.getVisualizationBoxTop(); - - Calib3d.projectPoints( - bottomModel, - target.getCameraRelativeRvec(), - target.getCameraRelativeTvec(), - params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(), - params.cameraCalibrationCoefficients.getDistCoeffsMat(), - tempMat, - jac); - - if (params.redistortPoints) { - // Distort the points, so they match the image they're being overlaid on - distortPoints(tempMat, tempMat); + extends MutatingPipe>, Draw3dTargetsPipe.Draw3dContoursParams> { + Logger logger = new Logger(Draw3dTargetsPipe.class, LogGroup.VisionModule); + + @Override + protected Void process(Pair> in) { + if (!params.shouldDraw) return null; + if (params.cameraCalibrationCoefficients == null + || params.cameraCalibrationCoefficients.getCameraIntrinsicsMat() == null + || params.cameraCalibrationCoefficients.getDistCoeffsMat() == null) { + return null; } - var bottomPoints = tempMat.toList(); + for (var target : in.getRight()) { + // draw convex hull + if (params.shouldDrawHull(target)) { + var pointMat = new MatOfPoint(); + divideMat2f(target.m_mainContour.getConvexHull(), pointMat); + if (pointMat.size().empty()) { + logger.error("Convex hull is empty?"); + logger.debug( + "Orig. Convex Hull: " + target.m_mainContour.getConvexHull().size().toString()); + continue; + } + Imgproc.drawContours( + in.getLeft(), List.of(pointMat), -1, ColorHelper.colorToScalar(Color.green), 1); + + // draw approximate polygon + var poly = target.getApproximateBoundingPolygon(); + if (poly != null) { + divideMat2f(poly, pointMat); + Imgproc.drawContours( + in.getLeft(), List.of(pointMat), -1, ColorHelper.colorToScalar(Color.blue), 2); + } + pointMat.release(); + } + + // Draw floor and top + if (target.getCameraRelativeRvec() != null + && target.getCameraRelativeTvec() != null + && params.shouldDrawBox) { + var tempMat = new MatOfPoint2f(); + + var jac = new Mat(); + var bottomModel = params.targetModel.getVisualizationBoxBottom(); + var topModel = params.targetModel.getVisualizationBoxTop(); + + Calib3d.projectPoints( + bottomModel, + target.getCameraRelativeRvec(), + target.getCameraRelativeTvec(), + params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(), + params.cameraCalibrationCoefficients.getDistCoeffsMat(), + tempMat, + jac); + + if (params.redistortPoints) { + // Distort the points, so they match the image they're being overlaid on + distortPoints(tempMat, tempMat); + } + + var bottomPoints = tempMat.toList(); + + Calib3d.projectPoints( + topModel, + target.getCameraRelativeRvec(), + target.getCameraRelativeTvec(), + params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(), + params.cameraCalibrationCoefficients.getDistCoeffsMat(), + tempMat, + jac); + + if (params.redistortPoints) { + // Distort the points, so they match the image they're being overlaid on + distortPoints(tempMat, tempMat); + } + var topPoints = tempMat.toList(); + + dividePointList(bottomPoints); + dividePointList(topPoints); + + // floor, then pillars, then top + for (int i = 0; i < bottomPoints.size(); i++) { + Imgproc.line( + in.getLeft(), + bottomPoints.get(i), + bottomPoints.get((i + 1) % (bottomPoints.size())), + ColorHelper.colorToScalar(Color.green), + 3); + } + + // Draw X, Y and Z axis + MatOfPoint3f pointMat = new MatOfPoint3f(); + // OpenCV expects coordinates in EDN, but we want to visualize in NWU + // NWU | EDN + // X: Z + // Y: -X + // Z: -Y + final double AXIS_LEN = 0.2; + var list = + List.of( + new Point3(0, 0, 0), + new Point3(0, 0, AXIS_LEN), // x-axis + new Point3(-AXIS_LEN, 0, 0), // y-axis + new Point3(0, -AXIS_LEN, 0)); // z-axis + pointMat.fromList(list); + + // The detected target's rvec and tvec perform a rotation-translation transformation which + // converts points in the target's coordinate system to the camera's. This means applying + // the transformation to the target point (0,0,0) for example would give the target's center + // relative to the camera. + Calib3d.projectPoints( + pointMat, + target.getCameraRelativeRvec(), + target.getCameraRelativeTvec(), + params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(), + params.cameraCalibrationCoefficients.getDistCoeffsMat(), + tempMat, + jac); + var axisPoints = tempMat.toList(); + dividePointList(axisPoints); + + // XYZ is RGB + // y-axis = green + Imgproc.line( + in.getLeft(), + axisPoints.get(0), + axisPoints.get(2), + ColorHelper.colorToScalar(Color.GREEN), + 3); + // z-axis = blue + Imgproc.line( + in.getLeft(), + axisPoints.get(0), + axisPoints.get(3), + ColorHelper.colorToScalar(Color.BLUE), + 3); + // x-axis = red + Imgproc.line( + in.getLeft(), + axisPoints.get(0), + axisPoints.get(1), + ColorHelper.colorToScalar(Color.RED), + 3); + + // box edges perpendicular to tag + for (int i = 0; i < bottomPoints.size(); i++) { + Imgproc.line( + in.getLeft(), + bottomPoints.get(i), + topPoints.get(i), + ColorHelper.colorToScalar(Color.blue), + 3); + } + // box edges parallel to tag + for (int i = 0; i < topPoints.size(); i++) { + Imgproc.line( + in.getLeft(), + topPoints.get(i), + topPoints.get((i + 1) % (bottomPoints.size())), + ColorHelper.colorToScalar(Color.orange), + 3); + } + + tempMat.release(); + jac.release(); + pointMat.release(); + } + + // draw corners + var corners = target.getTargetCorners(); + if (corners != null && !corners.isEmpty()) { + for (var corner : corners) { + var x = corner.x / (double) params.divisor.value; + var y = corner.y / (double) params.divisor.value; + + Imgproc.circle( + in.getLeft(), + new Point(x, y), + params.radius, + ColorHelper.colorToScalar(params.color), + params.radius); + } + } + } - Calib3d.projectPoints( - topModel, - target.getCameraRelativeRvec(), - target.getCameraRelativeTvec(), - params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(), - params.cameraCalibrationCoefficients.getDistCoeffsMat(), - tempMat, - jac); + return null; + } - if (params.redistortPoints) { - // Distort the points, so they match the image they're being overlaid on - distortPoints(tempMat, tempMat); - } - var topPoints = tempMat.toList(); - - dividePointList(bottomPoints); - dividePointList(topPoints); - - // floor, then pillars, then top - for (int i = 0; i < bottomPoints.size(); i++) { - Imgproc.line( - in.getLeft(), - bottomPoints.get(i), - bottomPoints.get((i + 1) % (bottomPoints.size())), - ColorHelper.colorToScalar(Color.green), - 3); + private void distortPoints(MatOfPoint2f src, MatOfPoint2f dst) { + var pointsList = src.toList(); + var dstList = new ArrayList(); + final Mat cameraMatrix = params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(); + // k1, k2, p1, p2, k3 + final Mat distCoeffs = params.cameraCalibrationCoefficients.getDistCoeffsMat(); + var cx = cameraMatrix.get(0, 2)[0]; + var cy = cameraMatrix.get(1, 2)[0]; + var fx = cameraMatrix.get(0, 0)[0]; + var fy = cameraMatrix.get(1, 1)[0]; + var k1 = distCoeffs.get(0, 0)[0]; + var k2 = distCoeffs.get(0, 1)[0]; + var k3 = distCoeffs.get(0, 4)[0]; + var p1 = distCoeffs.get(0, 2)[0]; + var p2 = distCoeffs.get(0, 3)[0]; + + for (Point point : pointsList) { + // To relative coordinates <- this is the step you are missing. + double x = (point.x - cx) / fx; // cx, cy is the center of distortion + double y = (point.y - cy) / fy; + + double r2 = x * x + y * y; // square of the radius from center + + // Radial distortion + double xDistort = x * (1 + k1 * r2 + k2 * r2 * r2 + k3 * r2 * r2 * r2); + double yDistort = y * (1 + k1 * r2 + k2 * r2 * r2 + k3 * r2 * r2 * r2); + + // Tangential distortion + xDistort = xDistort + (2 * p1 * x * y + p2 * (r2 + 2 * x * x)); + yDistort = yDistort + (p1 * (r2 + 2 * y * y) + 2 * p2 * x * y); + + // Back to absolute coordinates. + xDistort = xDistort * fx + cx; + yDistort = yDistort * fy + cy; + dstList.add(new Point(xDistort, yDistort)); } + dst.fromList(dstList); + } - // Draw X, Y and Z axis - MatOfPoint3f pointMat = new MatOfPoint3f(); - // OpenCV expects coordinates in EDN, but we want to visualize in NWU - // NWU | EDN - // X: Z - // Y: -X - // Z: -Y - final double AXIS_LEN = 0.2; - var list = - List.of( - new Point3(0, 0, 0), - new Point3(0, 0, AXIS_LEN), // x-axis - new Point3(-AXIS_LEN, 0, 0), // y-axis - new Point3(0, -AXIS_LEN, 0)); // z-axis - pointMat.fromList(list); - - // The detected target's rvec and tvec perform a rotation-translation transformation which - // converts points in the target's coordinate system to the camera's. This means applying - // the transformation to the target point (0,0,0) for example would give the target's center - // relative to the camera. - Calib3d.projectPoints( - pointMat, - target.getCameraRelativeRvec(), - target.getCameraRelativeTvec(), - params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(), - params.cameraCalibrationCoefficients.getDistCoeffsMat(), - tempMat, - jac); - var axisPoints = tempMat.toList(); - dividePointList(axisPoints); - - // XYZ is RGB - // y-axis = green - Imgproc.line( - in.getLeft(), - axisPoints.get(0), - axisPoints.get(2), - ColorHelper.colorToScalar(Color.GREEN), - 3); - // z-axis = blue - Imgproc.line( - in.getLeft(), - axisPoints.get(0), - axisPoints.get(3), - ColorHelper.colorToScalar(Color.BLUE), - 3); - // x-axis = red - Imgproc.line( - in.getLeft(), - axisPoints.get(0), - axisPoints.get(1), - ColorHelper.colorToScalar(Color.RED), - 3); - - // box edges perpendicular to tag - for (int i = 0; i < bottomPoints.size(); i++) { - Imgproc.line( - in.getLeft(), - bottomPoints.get(i), - topPoints.get(i), - ColorHelper.colorToScalar(Color.blue), - 3); - } - // box edges parallel to tag - for (int i = 0; i < topPoints.size(); i++) { - Imgproc.line( - in.getLeft(), - topPoints.get(i), - topPoints.get((i + 1) % (bottomPoints.size())), - ColorHelper.colorToScalar(Color.orange), - 3); + private void divideMat2f(MatOfPoint2f src, MatOfPoint dst) { + var hull = src.toArray(); + var pointArray = new Point[hull.length]; + for (int i = 0; i < hull.length; i++) { + var hullAtI = hull[i]; + pointArray[i] = + new Point( + hullAtI.x / (double) params.divisor.value, hullAtI.y / (double) params.divisor.value); } + dst.fromArray(pointArray); + } - tempMat.release(); - jac.release(); - pointMat.release(); - } - - // draw corners - var corners = target.getTargetCorners(); - if (corners != null && !corners.isEmpty()) { - for (var corner : corners) { - var x = corner.x / (double) params.divisor.value; - var y = corner.y / (double) params.divisor.value; - - Imgproc.circle( - in.getLeft(), - new Point(x, y), - params.radius, - ColorHelper.colorToScalar(params.color), - params.radius); + /** Scale a given point list by the current frame divisor. the point list is mutated! */ + private void dividePointList(List points) { + for (var p : points) { + p.x = p.x / (double) params.divisor.value; + p.y = p.y / (double) params.divisor.value; } - } } - return null; - } - - private void distortPoints(MatOfPoint2f src, MatOfPoint2f dst) { - var pointsList = src.toList(); - var dstList = new ArrayList(); - final Mat cameraMatrix = params.cameraCalibrationCoefficients.getCameraIntrinsicsMat(); - // k1, k2, p1, p2, k3 - final Mat distCoeffs = params.cameraCalibrationCoefficients.getDistCoeffsMat(); - var cx = cameraMatrix.get(0, 2)[0]; - var cy = cameraMatrix.get(1, 2)[0]; - var fx = cameraMatrix.get(0, 0)[0]; - var fy = cameraMatrix.get(1, 1)[0]; - var k1 = distCoeffs.get(0, 0)[0]; - var k2 = distCoeffs.get(0, 1)[0]; - var k3 = distCoeffs.get(0, 4)[0]; - var p1 = distCoeffs.get(0, 2)[0]; - var p2 = distCoeffs.get(0, 3)[0]; - - for (Point point : pointsList) { - // To relative coordinates <- this is the step you are missing. - double x = (point.x - cx) / fx; // cx, cy is the center of distortion - double y = (point.y - cy) / fy; - - double r2 = x * x + y * y; // square of the radius from center - - // Radial distortion - double xDistort = x * (1 + k1 * r2 + k2 * r2 * r2 + k3 * r2 * r2 * r2); - double yDistort = y * (1 + k1 * r2 + k2 * r2 * r2 + k3 * r2 * r2 * r2); - - // Tangential distortion - xDistort = xDistort + (2 * p1 * x * y + p2 * (r2 + 2 * x * x)); - yDistort = yDistort + (p1 * (r2 + 2 * y * y) + 2 * p2 * x * y); - - // Back to absolute coordinates. - xDistort = xDistort * fx + cx; - yDistort = yDistort * fy + cy; - dstList.add(new Point(xDistort, yDistort)); - } - dst.fromList(dstList); - } - - private void divideMat2f(MatOfPoint2f src, MatOfPoint dst) { - var hull = src.toArray(); - var pointArray = new Point[hull.length]; - for (int i = 0; i < hull.length; i++) { - var hullAtI = hull[i]; - pointArray[i] = - new Point( - hullAtI.x / (double) params.divisor.value, hullAtI.y / (double) params.divisor.value); - } - dst.fromArray(pointArray); - } - - /** Scale a given point list by the current frame divisor. the point list is mutated! */ - private void dividePointList(List points) { - for (var p : points) { - p.x = p.x / (double) params.divisor.value; - p.y = p.y / (double) params.divisor.value; - } - } - - public static class Draw3dContoursParams { - public int radius = 2; - public Color color = Color.RED; - - public final boolean shouldDraw; - public boolean shouldDrawHull = true; - public boolean shouldDrawBox = true; - public final TargetModel targetModel; - public final CameraCalibrationCoefficients cameraCalibrationCoefficients; - public final FrameDivisor divisor; - - public boolean redistortPoints = false; - - public Draw3dContoursParams( - boolean shouldDraw, - CameraCalibrationCoefficients cameraCalibrationCoefficients, - TargetModel targetModel, - FrameDivisor divisor) { - this.shouldDraw = shouldDraw; - this.cameraCalibrationCoefficients = cameraCalibrationCoefficients; - this.targetModel = targetModel; - this.divisor = divisor; - } + public static class Draw3dContoursParams { + public int radius = 2; + public Color color = Color.RED; + + public final boolean shouldDraw; + public boolean shouldDrawHull = true; + public boolean shouldDrawBox = true; + public final TargetModel targetModel; + public final CameraCalibrationCoefficients cameraCalibrationCoefficients; + public final FrameDivisor divisor; + + public boolean redistortPoints = false; + + public Draw3dContoursParams( + boolean shouldDraw, + CameraCalibrationCoefficients cameraCalibrationCoefficients, + TargetModel targetModel, + FrameDivisor divisor) { + this.shouldDraw = shouldDraw; + this.cameraCalibrationCoefficients = cameraCalibrationCoefficients; + this.targetModel = targetModel; + this.divisor = divisor; + } - public boolean shouldDrawHull(TrackedTarget t) { - return !t.isFiducial() && this.shouldDrawHull; + public boolean shouldDrawHull(TrackedTarget t) { + return !t.isFiducial() && this.shouldDrawHull; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/DrawCornerDetectionPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/DrawCornerDetectionPipe.java index f7a88c6227..9ef05ad99f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/DrawCornerDetectionPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/DrawCornerDetectionPipe.java @@ -26,23 +26,23 @@ import org.photonvision.vision.target.TrackedTarget; public class DrawCornerDetectionPipe - extends MutatingPipe>, DrawCornerDetectionPipe.DrawCornerParams> { - @Override - protected Void process(Pair> in) { - Mat image = in.getLeft(); + extends MutatingPipe>, DrawCornerDetectionPipe.DrawCornerParams> { + @Override + protected Void process(Pair> in) { + Mat image = in.getLeft(); - for (var target : in.getRight()) { - var corners = target.getTargetCorners(); - for (var corner : corners) { - Imgproc.circle(image, corner, params.dotRadius, params.dotColor); - } - } + for (var target : in.getRight()) { + var corners = target.getTargetCorners(); + for (var corner : corners) { + Imgproc.circle(image, corner, params.dotRadius, params.dotColor); + } + } - return null; - } + return null; + } - public static class DrawCornerParams { - int dotRadius; - Scalar dotColor; - } + public static class DrawCornerParams { + int dotRadius; + Scalar dotColor; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ErodeDilatePipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ErodeDilatePipe.java index 35781f7b4f..9d09a5d198 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ErodeDilatePipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ErodeDilatePipe.java @@ -23,39 +23,39 @@ import org.photonvision.vision.pipe.MutatingPipe; public class ErodeDilatePipe extends MutatingPipe { - @Override - protected Void process(Mat in) { - if (params.shouldErode()) { - Imgproc.erode(in, in, params.getKernel()); - } - if (params.shouldDilate()) { - Imgproc.dilate(in, in, params.getKernel()); - } - return null; - } - - public static class ErodeDilateParams { - private final boolean m_erode; - private final boolean m_dilate; - private final Mat m_kernel; - - public ErodeDilateParams(boolean erode, boolean dilate, int kernelSize) { - m_erode = erode; - m_dilate = dilate; - m_kernel = - Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(kernelSize, kernelSize)); - } - - public boolean shouldErode() { - return m_erode; - } - - public boolean shouldDilate() { - return m_dilate; + @Override + protected Void process(Mat in) { + if (params.shouldErode()) { + Imgproc.erode(in, in, params.getKernel()); + } + if (params.shouldDilate()) { + Imgproc.dilate(in, in, params.getKernel()); + } + return null; } - public Mat getKernel() { - return m_kernel; + public static class ErodeDilateParams { + private final boolean m_erode; + private final boolean m_dilate; + private final Mat m_kernel; + + public ErodeDilateParams(boolean erode, boolean dilate, int kernelSize) { + m_erode = erode; + m_dilate = dilate; + m_kernel = + Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(kernelSize, kernelSize)); + } + + public boolean shouldErode() { + return m_erode; + } + + public boolean shouldDilate() { + return m_dilate; + } + + public Mat getKernel() { + return m_kernel; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterContoursPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterContoursPipe.java index 25af4d3ac3..1e7616c44f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterContoursPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterContoursPipe.java @@ -27,120 +27,120 @@ import org.photonvision.vision.target.TargetCalculations; public class FilterContoursPipe - extends CVPipe, List, FilterContoursPipe.FilterContoursParams> { - List m_filteredContours = new ArrayList<>(); - - @Override - protected List process(List in) { - m_filteredContours.clear(); - for (Contour contour : in) { - filterContour(contour); - } - - // we need the whole list for outlier rejection - rejectOutliers(m_filteredContours, params.xTol, params.yTol); - - return m_filteredContours; - } - - private void rejectOutliers(List list, double xTol, double yTol) { - if (list.size() < 2) return; // Must have at least 2 points to reject outliers - - double meanX = list.stream().mapToDouble(it -> it.getCenterPoint().x).sum() / list.size(); + extends CVPipe, List, FilterContoursPipe.FilterContoursParams> { + List m_filteredContours = new ArrayList<>(); - double stdDevX = - list.stream().mapToDouble(it -> Math.pow(it.getCenterPoint().x - meanX, 2.0)).sum(); - stdDevX /= (list.size() - 1); - stdDevX = Math.sqrt(stdDevX); + @Override + protected List process(List in) { + m_filteredContours.clear(); + for (Contour contour : in) { + filterContour(contour); + } - double meanY = list.stream().mapToDouble(it -> it.getCenterPoint().y).sum() / list.size(); - - double stdDevY = - list.stream().mapToDouble(it -> Math.pow(it.getCenterPoint().y - meanY, 2.0)).sum(); - stdDevY /= (list.size() - 1); - stdDevY = Math.sqrt(stdDevY); - - for (var it = list.iterator(); it.hasNext(); ) { - // Reject points more than N standard devs above/below median - // That is, |point - median| > std dev * tol - Contour c = it.next(); - double x = c.getCenterPoint().x; - double y = c.getCenterPoint().y; - - if (Math.abs(x - meanX) > stdDevX * xTol) { - it.remove(); - } else if (Math.abs(y - meanY) > stdDevY * yTol) { - it.remove(); - } - // Otherwise we're good! Keep it in - } - } - - private void filterContour(Contour contour) { - RotatedRect minAreaRect = contour.getMinAreaRect(); - - // Area Filtering. - double areaPercentage = - minAreaRect.size.area() / params.getFrameStaticProperties().imageArea * 100.0; - double minAreaPercentage = params.getArea().getFirst(); - double maxAreaPercentage = params.getArea().getSecond(); - if (areaPercentage < minAreaPercentage || areaPercentage > maxAreaPercentage) return; - - // Fullness Filtering. - double contourArea = contour.getArea(); - double minFullness = params.getFullness().getFirst() * minAreaRect.size.area() / 100.0; - double maxFullness = params.getFullness().getSecond() * minAreaRect.size.area() / 100.0; - if (contourArea <= minFullness || contourArea >= maxFullness) return; - - // Aspect Ratio Filtering. - double aspectRatio = - TargetCalculations.getAspectRatio(contour.getMinAreaRect(), params.isLandscape); - if (aspectRatio < params.getRatio().getFirst() || aspectRatio > params.getRatio().getSecond()) - return; - - m_filteredContours.add(contour); - } - - public static class FilterContoursParams { - private final DoubleCouple m_area; - private final DoubleCouple m_ratio; - private final DoubleCouple m_fullness; - private final FrameStaticProperties m_frameStaticProperties; - private final double xTol; // IQR tolerance for x - private final double yTol; // IQR tolerance for x - public final boolean isLandscape; - - public FilterContoursParams( - DoubleCouple area, - DoubleCouple ratio, - DoubleCouple extent, - FrameStaticProperties camProperties, - double xTol, - double yTol, - boolean isLandscape) { - this.m_area = area; - this.m_ratio = ratio; - this.m_fullness = extent; - this.m_frameStaticProperties = camProperties; - this.xTol = xTol; - this.yTol = yTol; - this.isLandscape = isLandscape; - } + // we need the whole list for outlier rejection + rejectOutliers(m_filteredContours, params.xTol, params.yTol); - public DoubleCouple getArea() { - return m_area; + return m_filteredContours; } - public DoubleCouple getRatio() { - return m_ratio; + private void rejectOutliers(List list, double xTol, double yTol) { + if (list.size() < 2) return; // Must have at least 2 points to reject outliers + + double meanX = list.stream().mapToDouble(it -> it.getCenterPoint().x).sum() / list.size(); + + double stdDevX = + list.stream().mapToDouble(it -> Math.pow(it.getCenterPoint().x - meanX, 2.0)).sum(); + stdDevX /= (list.size() - 1); + stdDevX = Math.sqrt(stdDevX); + + double meanY = list.stream().mapToDouble(it -> it.getCenterPoint().y).sum() / list.size(); + + double stdDevY = + list.stream().mapToDouble(it -> Math.pow(it.getCenterPoint().y - meanY, 2.0)).sum(); + stdDevY /= (list.size() - 1); + stdDevY = Math.sqrt(stdDevY); + + for (var it = list.iterator(); it.hasNext(); ) { + // Reject points more than N standard devs above/below median + // That is, |point - median| > std dev * tol + Contour c = it.next(); + double x = c.getCenterPoint().x; + double y = c.getCenterPoint().y; + + if (Math.abs(x - meanX) > stdDevX * xTol) { + it.remove(); + } else if (Math.abs(y - meanY) > stdDevY * yTol) { + it.remove(); + } + // Otherwise we're good! Keep it in + } } - public DoubleCouple getFullness() { - return m_fullness; + private void filterContour(Contour contour) { + RotatedRect minAreaRect = contour.getMinAreaRect(); + + // Area Filtering. + double areaPercentage = + minAreaRect.size.area() / params.getFrameStaticProperties().imageArea * 100.0; + double minAreaPercentage = params.getArea().getFirst(); + double maxAreaPercentage = params.getArea().getSecond(); + if (areaPercentage < minAreaPercentage || areaPercentage > maxAreaPercentage) return; + + // Fullness Filtering. + double contourArea = contour.getArea(); + double minFullness = params.getFullness().getFirst() * minAreaRect.size.area() / 100.0; + double maxFullness = params.getFullness().getSecond() * minAreaRect.size.area() / 100.0; + if (contourArea <= minFullness || contourArea >= maxFullness) return; + + // Aspect Ratio Filtering. + double aspectRatio = + TargetCalculations.getAspectRatio(contour.getMinAreaRect(), params.isLandscape); + if (aspectRatio < params.getRatio().getFirst() || aspectRatio > params.getRatio().getSecond()) + return; + + m_filteredContours.add(contour); } - public FrameStaticProperties getFrameStaticProperties() { - return m_frameStaticProperties; + public static class FilterContoursParams { + private final DoubleCouple m_area; + private final DoubleCouple m_ratio; + private final DoubleCouple m_fullness; + private final FrameStaticProperties m_frameStaticProperties; + private final double xTol; // IQR tolerance for x + private final double yTol; // IQR tolerance for x + public final boolean isLandscape; + + public FilterContoursParams( + DoubleCouple area, + DoubleCouple ratio, + DoubleCouple extent, + FrameStaticProperties camProperties, + double xTol, + double yTol, + boolean isLandscape) { + this.m_area = area; + this.m_ratio = ratio; + this.m_fullness = extent; + this.m_frameStaticProperties = camProperties; + this.xTol = xTol; + this.yTol = yTol; + this.isLandscape = isLandscape; + } + + public DoubleCouple getArea() { + return m_area; + } + + public DoubleCouple getRatio() { + return m_ratio; + } + + public DoubleCouple getFullness() { + return m_fullness; + } + + public FrameStaticProperties getFrameStaticProperties() { + return m_frameStaticProperties; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterShapesPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterShapesPipe.java index c2c95a51d7..1802d6971f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterShapesPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FilterShapesPipe.java @@ -25,63 +25,63 @@ import org.photonvision.vision.pipe.CVPipe; public class FilterShapesPipe - extends CVPipe, List, FilterShapesPipe.FilterShapesPipeParams> { - List outputList = new ArrayList<>(); + extends CVPipe, List, FilterShapesPipe.FilterShapesPipeParams> { + List outputList = new ArrayList<>(); - /** - * Runs the process for the pipe. - * - * @param in Input for pipe processing. - * @return Result of processing. - */ - @Override - protected List process(List in) { - outputList.forEach(CVShape::release); - outputList.clear(); - outputList = new ArrayList<>(); + /** + * Runs the process for the pipe. + * + * @param in Input for pipe processing. + * @return Result of processing. + */ + @Override + protected List process(List in) { + outputList.forEach(CVShape::release); + outputList.clear(); + outputList = new ArrayList<>(); - for (var shape : in) { - if (!shouldRemove(shape)) outputList.add(shape); - } + for (var shape : in) { + if (!shouldRemove(shape)) outputList.add(shape); + } - return outputList; - } + return outputList; + } - private boolean shouldRemove(CVShape shape) { - return shape.shape != params.desiredShape - || shape.contour.getArea() / params.getFrameStaticProperties().imageArea * 100.0 - > params.maxArea - || shape.contour.getArea() / params.getFrameStaticProperties().imageArea * 100.0 - < params.minArea - || shape.contour.getPerimeter() > params.maxPeri - || shape.contour.getPerimeter() < params.minPeri; - } + private boolean shouldRemove(CVShape shape) { + return shape.shape != params.desiredShape + || shape.contour.getArea() / params.getFrameStaticProperties().imageArea * 100.0 + > params.maxArea + || shape.contour.getArea() / params.getFrameStaticProperties().imageArea * 100.0 + < params.minArea + || shape.contour.getPerimeter() > params.maxPeri + || shape.contour.getPerimeter() < params.minPeri; + } - public static class FilterShapesPipeParams { - private final ContourShape desiredShape; - private final FrameStaticProperties frameStaticProperties; - private final double minArea; - private final double maxArea; - private final double minPeri; - private final double maxPeri; + public static class FilterShapesPipeParams { + private final ContourShape desiredShape; + private final FrameStaticProperties frameStaticProperties; + private final double minArea; + private final double maxArea; + private final double minPeri; + private final double maxPeri; - public FilterShapesPipeParams( - ContourShape desiredShape, - double minArea, - double maxArea, - double minPeri, - double maxPeri, - FrameStaticProperties frameStaticProperties) { - this.desiredShape = desiredShape; - this.minArea = minArea; - this.maxArea = maxArea; - this.minPeri = minPeri; - this.maxPeri = maxPeri; - this.frameStaticProperties = frameStaticProperties; - } + public FilterShapesPipeParams( + ContourShape desiredShape, + double minArea, + double maxArea, + double minPeri, + double maxPeri, + FrameStaticProperties frameStaticProperties) { + this.desiredShape = desiredShape; + this.minArea = minArea; + this.maxArea = maxArea; + this.minPeri = minPeri; + this.maxPeri = maxPeri; + this.frameStaticProperties = frameStaticProperties; + } - public FrameStaticProperties getFrameStaticProperties() { - return frameStaticProperties; + public FrameStaticProperties getFrameStaticProperties() { + return frameStaticProperties; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindBoardCornersPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindBoardCornersPipe.java index 279c3bb819..678566c707 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindBoardCornersPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindBoardCornersPipe.java @@ -29,306 +29,306 @@ import org.photonvision.vision.pipeline.UICalibrationData; public class FindBoardCornersPipe - extends CVPipe< - Pair, Triple, FindBoardCornersPipe.FindCornersPipeParams> { - private static final Logger logger = - new Logger(FindBoardCornersPipe.class, LogGroup.VisionModule); - - MatOfPoint3f objectPoints = new MatOfPoint3f(); - - Size imageSize; - Size patternSize; - - // Configure the optimizations used while using OpenCV's find corners algorithm - // Since we return results in real-time, we want to ensure it goes as fast as possible - // and fails as fast as possible. - final int findChessboardFlags = - Calib3d.CALIB_CB_NORMALIZE_IMAGE - | Calib3d.CALIB_CB_ADAPTIVE_THRESH - | Calib3d.CALIB_CB_FILTER_QUADS - | Calib3d.CALIB_CB_FAST_CHECK; - - private final MatOfPoint2f boardCorners = new MatOfPoint2f(); - - // Intermediate result mat's - Mat smallerInFrame = new Mat(); - MatOfPoint2f smallerBoardCorners = new MatOfPoint2f(); - - // SubCornerPix params - private final Size zeroZone = new Size(-1, -1); - private final TermCriteria criteria = new TermCriteria(3, 30, 0.001); - - private FindCornersPipeParams lastParams = null; - - public void createObjectPoints() { - if (this.lastParams != null && this.lastParams.equals(this.params)) return; - this.lastParams = this.params; - - this.objectPoints.release(); - this.objectPoints = null; - this.objectPoints = new MatOfPoint3f(); - - /*If using a chessboard, then the pattern size if the inner corners of the board. For example, the pattern size of a 9x9 chessboard would be 8x8 - If using a dot board, then the pattern size width is the sum of the bottom 2 rows and the height is the left or right most column - For example, a 5x4 dot board would have a pattern size of 11x4 - We subtract 1 for chessboard because the UI prompts users for the number of squares, not the - number of corners. - * */ - this.patternSize = - params.type == UICalibrationData.BoardType.CHESSBOARD - ? new Size(params.boardWidth - 1, params.boardHeight - 1) - : new Size(params.boardWidth, params.boardHeight); - - // Chessboard and dot board have different 3D points to project as a dot board has alternating - // dots per column - if (params.type == UICalibrationData.BoardType.CHESSBOARD) { - // Here we can create an NxN grid since a chessboard is rectangular - for (int heightIdx = 0; heightIdx < patternSize.height; heightIdx++) { - for (int widthIdx = 0; widthIdx < patternSize.width; widthIdx++) { - double boardYCoord = heightIdx * params.gridSize; - double boardXCoord = widthIdx * params.gridSize; - objectPoints.push_back(new MatOfPoint3f(new Point3(boardXCoord, boardYCoord, 0.0))); + extends CVPipe< + Pair, Triple, FindBoardCornersPipe.FindCornersPipeParams> { + private static final Logger logger = + new Logger(FindBoardCornersPipe.class, LogGroup.VisionModule); + + MatOfPoint3f objectPoints = new MatOfPoint3f(); + + Size imageSize; + Size patternSize; + + // Configure the optimizations used while using OpenCV's find corners algorithm + // Since we return results in real-time, we want to ensure it goes as fast as possible + // and fails as fast as possible. + final int findChessboardFlags = + Calib3d.CALIB_CB_NORMALIZE_IMAGE + | Calib3d.CALIB_CB_ADAPTIVE_THRESH + | Calib3d.CALIB_CB_FILTER_QUADS + | Calib3d.CALIB_CB_FAST_CHECK; + + private final MatOfPoint2f boardCorners = new MatOfPoint2f(); + + // Intermediate result mat's + Mat smallerInFrame = new Mat(); + MatOfPoint2f smallerBoardCorners = new MatOfPoint2f(); + + // SubCornerPix params + private final Size zeroZone = new Size(-1, -1); + private final TermCriteria criteria = new TermCriteria(3, 30, 0.001); + + private FindCornersPipeParams lastParams = null; + + public void createObjectPoints() { + if (this.lastParams != null && this.lastParams.equals(this.params)) return; + this.lastParams = this.params; + + this.objectPoints.release(); + this.objectPoints = null; + this.objectPoints = new MatOfPoint3f(); + + /*If using a chessboard, then the pattern size if the inner corners of the board. For example, the pattern size of a 9x9 chessboard would be 8x8 + If using a dot board, then the pattern size width is the sum of the bottom 2 rows and the height is the left or right most column + For example, a 5x4 dot board would have a pattern size of 11x4 + We subtract 1 for chessboard because the UI prompts users for the number of squares, not the + number of corners. + * */ + this.patternSize = + params.type == UICalibrationData.BoardType.CHESSBOARD + ? new Size(params.boardWidth - 1, params.boardHeight - 1) + : new Size(params.boardWidth, params.boardHeight); + + // Chessboard and dot board have different 3D points to project as a dot board has alternating + // dots per column + if (params.type == UICalibrationData.BoardType.CHESSBOARD) { + // Here we can create an NxN grid since a chessboard is rectangular + for (int heightIdx = 0; heightIdx < patternSize.height; heightIdx++) { + for (int widthIdx = 0; widthIdx < patternSize.width; widthIdx++) { + double boardYCoord = heightIdx * params.gridSize; + double boardXCoord = widthIdx * params.gridSize; + objectPoints.push_back(new MatOfPoint3f(new Point3(boardXCoord, boardYCoord, 0.0))); + } + } + } else if (params.type == UICalibrationData.BoardType.DOTBOARD) { + // Here we need to alternate the amount of dots per column since a dot board is not + // rectangular and also by taking in account the grid size which should be in mm + for (int i = 0; i < patternSize.height; i++) { + for (int j = 0; j < patternSize.width; j++) { + objectPoints.push_back( + new MatOfPoint3f( + new Point3((2 * j + i % 2) * params.gridSize, i * params.gridSize, 0.0d))); + } + } + } else { + logger.error("Can't create pattern for unknown board type " + params.type); } - } - } else if (params.type == UICalibrationData.BoardType.DOTBOARD) { - // Here we need to alternate the amount of dots per column since a dot board is not - // rectangular and also by taking in account the grid size which should be in mm - for (int i = 0; i < patternSize.height; i++) { - for (int j = 0; j < patternSize.width; j++) { - objectPoints.push_back( - new MatOfPoint3f( - new Point3((2 * j + i % 2) * params.gridSize, i * params.gridSize, 0.0d))); - } - } - } else { - logger.error("Can't create pattern for unknown board type " + params.type); } - } - - /** - * Finds the corners in a given image and returns them - * - * @param in Input for pipe processing. Pair of input and output mat - * @return All valid Mats for camera calibration - */ - @Override - protected Triple process(Pair in) { - // Create the object points - createObjectPoints(); - - return findBoardCorners(in); - } - - /** - * Figures out how much a frame or point cloud must be scaled down by to match the desired size at - * which to run FindCorners. Should usually be > 1. - * - * @param inFrame - * @return - */ - private double getFindCornersScaleFactor(Mat inFrame) { - return 1.0 / params.divisor.value; - } - - /** - * Finds the minimum spacing between a set of x/y points Currently only considers points whose - * index is next to each other Which, currently, means it traverses one dimension. This is a rough - * heuristic approach which could be refined in the future. - * - *

Note that the current implementation can be fooled under the following conditions: (1) The - * width of the image is an odd number, and the smallest distance was actually on the between the - * last two points in a given row and (2) The smallest distance was actually in the direction - * orthogonal to that which was getting traversed by iterating through the MatOfPoint2f in order. - * - *

I've chosen not to handle these for speed's sake, and because, really, you don't need the - * exact answer for "min distance". you just need something fairly reasonable. - * - * @param inPoints point set to analyze. Must be a "tall" matrix. - * @return min spacing between neighbors - */ - private double getApproxMinSpacing(MatOfPoint2f inPoints) { - double minSpacing = Double.MAX_VALUE; - for (int pointIdx = 0; pointIdx < inPoints.height() - 1; pointIdx += 2) { - // +1 idx Neighbor distance - double[] startPoint = inPoints.get(pointIdx, 0); - double[] endPoint = inPoints.get(pointIdx + 1, 0); - double deltaX = startPoint[0] - endPoint[0]; - double deltaY = startPoint[1] - endPoint[1]; - double distToNext = Math.sqrt(deltaX * deltaX + deltaY * deltaY); - - minSpacing = Math.min(distToNext, minSpacing); + + /** + * Finds the corners in a given image and returns them + * + * @param in Input for pipe processing. Pair of input and output mat + * @return All valid Mats for camera calibration + */ + @Override + protected Triple process(Pair in) { + // Create the object points + createObjectPoints(); + + return findBoardCorners(in); } - return minSpacing; - } - - /** - * @param inFrame Full-size mat that is going to get scaled down before passing to - * findBoardCorners - * @return the size to scale the input mat to - */ - private Size getFindCornersImgSize(Mat in) { - int width = in.cols() / params.divisor.value; - int height = in.rows() / params.divisor.value; - return new Size(width, height); - } - - /** - * Given an input frame and a set of points from the "smaller" findChessboardCorner analysis, - * re-scale the points back to where they would have been in the input frame - * - * @param inPoints set of points derived from a call to findChessboardCorner on a shrunken mat. - * Must be a "tall" matrix. - * @param origFrame Original frame we're rescaling points back to - * @param outPoints mat into which the output rescaled points get placed - */ - private void rescalePointsToOrigFrame( - MatOfPoint2f inPoints, Mat origFrame, MatOfPoint2f outPoints) { - // Rescale boardCorners back up to the inproc image size - Point[] outPointsArr = new Point[inPoints.height()]; - double sf = getFindCornersScaleFactor(origFrame); - for (int pointIdx = 0; pointIdx < inPoints.height(); pointIdx++) { - double[] pointCoords = inPoints.get(pointIdx, 0); - double outXCoord = pointCoords[0] / sf; - double outYCoord = pointCoords[1] / sf; - outPointsArr[pointIdx] = new Point(outXCoord, outYCoord); + + /** + * Figures out how much a frame or point cloud must be scaled down by to match the desired size at + * which to run FindCorners. Should usually be > 1. + * + * @param inFrame + * @return + */ + private double getFindCornersScaleFactor(Mat inFrame) { + return 1.0 / params.divisor.value; } - outPoints.fromArray(outPointsArr); - } - - /** - * Picks a window size for doing subpixel optimization based on the board type and spacing - * observed between the corners or points in the image - * - * @param inPoints - * @return - */ - private Size getWindowSize(MatOfPoint2f inPoints) { - double windowHalfWidth = 11; // Dot board uses fixed-size window half-width - if (params.type == UICalibrationData.BoardType.CHESSBOARD) { - // Chessboard uses a dynamic sized window based on how far apart the corners are - windowHalfWidth = Math.floor(getApproxMinSpacing(inPoints) * 0.50); - windowHalfWidth = Math.max(1, windowHalfWidth); + + /** + * Finds the minimum spacing between a set of x/y points Currently only considers points whose + * index is next to each other Which, currently, means it traverses one dimension. This is a rough + * heuristic approach which could be refined in the future. + * + *

Note that the current implementation can be fooled under the following conditions: (1) The + * width of the image is an odd number, and the smallest distance was actually on the between the + * last two points in a given row and (2) The smallest distance was actually in the direction + * orthogonal to that which was getting traversed by iterating through the MatOfPoint2f in order. + * + *

I've chosen not to handle these for speed's sake, and because, really, you don't need the + * exact answer for "min distance". you just need something fairly reasonable. + * + * @param inPoints point set to analyze. Must be a "tall" matrix. + * @return min spacing between neighbors + */ + private double getApproxMinSpacing(MatOfPoint2f inPoints) { + double minSpacing = Double.MAX_VALUE; + for (int pointIdx = 0; pointIdx < inPoints.height() - 1; pointIdx += 2) { + // +1 idx Neighbor distance + double[] startPoint = inPoints.get(pointIdx, 0); + double[] endPoint = inPoints.get(pointIdx + 1, 0); + double deltaX = startPoint[0] - endPoint[0]; + double deltaY = startPoint[1] - endPoint[1]; + double distToNext = Math.sqrt(deltaX * deltaX + deltaY * deltaY); + + minSpacing = Math.min(distToNext, minSpacing); + } + return minSpacing; } - return new Size(windowHalfWidth, windowHalfWidth); - } - - /** - * Find chessboard corners given an input mat and output mat to draw on - * - * @return Frame resolution, object points, board corners - */ - private Triple findBoardCorners(Pair in) { - createObjectPoints(); - - var inFrame = in.getLeft(); - var outFrame = in.getRight(); - - // Convert the inFrame too grayscale to increase contrast - Imgproc.cvtColor(inFrame, inFrame, Imgproc.COLOR_BGR2GRAY); - boolean boardFound = false; - - if (params.type == UICalibrationData.BoardType.CHESSBOARD) { - // This is for chessboards - - // Reduce the image size to be much more manageable - Imgproc.resize(inFrame, smallerInFrame, getFindCornersImgSize(inFrame)); - - // Run the chessboard corner finder on the smaller image - boardFound = - Calib3d.findChessboardCorners( - smallerInFrame, patternSize, smallerBoardCorners, findChessboardFlags); - - // Rescale back to original pixel locations - if (boardFound) { - rescalePointsToOrigFrame(smallerBoardCorners, inFrame, boardCorners); - } - - } else if (params.type == UICalibrationData.BoardType.DOTBOARD) { - // For dot boards - boardFound = - Calib3d.findCirclesGrid( - inFrame, patternSize, boardCorners, Calib3d.CALIB_CB_ASYMMETRIC_GRID); + + /** + * @param inFrame Full-size mat that is going to get scaled down before passing to + * findBoardCorners + * @return the size to scale the input mat to + */ + private Size getFindCornersImgSize(Mat in) { + int width = in.cols() / params.divisor.value; + int height = in.rows() / params.divisor.value; + return new Size(width, height); } - if (!boardFound) { - // If we can't find a chessboard/dot board, just return - return null; + /** + * Given an input frame and a set of points from the "smaller" findChessboardCorner analysis, + * re-scale the points back to where they would have been in the input frame + * + * @param inPoints set of points derived from a call to findChessboardCorner on a shrunken mat. + * Must be a "tall" matrix. + * @param origFrame Original frame we're rescaling points back to + * @param outPoints mat into which the output rescaled points get placed + */ + private void rescalePointsToOrigFrame( + MatOfPoint2f inPoints, Mat origFrame, MatOfPoint2f outPoints) { + // Rescale boardCorners back up to the inproc image size + Point[] outPointsArr = new Point[inPoints.height()]; + double sf = getFindCornersScaleFactor(origFrame); + for (int pointIdx = 0; pointIdx < inPoints.height(); pointIdx++) { + double[] pointCoords = inPoints.get(pointIdx, 0); + double outXCoord = pointCoords[0] / sf; + double outYCoord = pointCoords[1] / sf; + outPointsArr[pointIdx] = new Point(outXCoord, outYCoord); + } + outPoints.fromArray(outPointsArr); } - var outBoardCorners = new MatOfPoint2f(); - boardCorners.copyTo(outBoardCorners); - - var objPts = new MatOfPoint2f(); - objectPoints.copyTo(objPts); - - // Get the size of the inFrame - this.imageSize = new Size(inFrame.width(), inFrame.height()); - - // Do sub corner pix for drawing chessboard - Imgproc.cornerSubPix( - inFrame, outBoardCorners, getWindowSize(outBoardCorners), zeroZone, criteria); - - // convert back to BGR - // Imgproc.cvtColor(inFrame, inFrame, Imgproc.COLOR_GRAY2BGR); - // draw the chessboard, doesn't have to be different for a dot board since it just re projects - // the corners we found - Calib3d.drawChessboardCorners(outFrame, patternSize, outBoardCorners, true); - - // // Add the 3D points and the points of the corners found - // if (addToSnapList) { - // this.listOfObjectPoints.add(objectPoints); - // this.listOfImagePoints.add(boardCorners); - // } - - return Triple.of(inFrame.size(), objPts, outBoardCorners); - } - - public static class FindCornersPipeParams { - private final int boardHeight; - private final int boardWidth; - private final UICalibrationData.BoardType type; - private final double gridSize; - private final FrameDivisor divisor; - - public FindCornersPipeParams( - int boardHeight, - int boardWidth, - UICalibrationData.BoardType type, - double gridSize, - FrameDivisor divisor) { - this.boardHeight = boardHeight; - this.boardWidth = boardWidth; - this.type = type; - this.gridSize = gridSize; // mm - this.divisor = divisor; + /** + * Picks a window size for doing subpixel optimization based on the board type and spacing + * observed between the corners or points in the image + * + * @param inPoints + * @return + */ + private Size getWindowSize(MatOfPoint2f inPoints) { + double windowHalfWidth = 11; // Dot board uses fixed-size window half-width + if (params.type == UICalibrationData.BoardType.CHESSBOARD) { + // Chessboard uses a dynamic sized window based on how far apart the corners are + windowHalfWidth = Math.floor(getApproxMinSpacing(inPoints) * 0.50); + windowHalfWidth = Math.max(1, windowHalfWidth); + } + return new Size(windowHalfWidth, windowHalfWidth); } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + boardHeight; - result = prime * result + boardWidth; - result = prime * result + ((type == null) ? 0 : type.hashCode()); - long temp; - temp = Double.doubleToLongBits(gridSize); - result = prime * result + (int) (temp ^ (temp >>> 32)); - result = prime * result + ((divisor == null) ? 0 : divisor.hashCode()); - return result; + /** + * Find chessboard corners given an input mat and output mat to draw on + * + * @return Frame resolution, object points, board corners + */ + private Triple findBoardCorners(Pair in) { + createObjectPoints(); + + var inFrame = in.getLeft(); + var outFrame = in.getRight(); + + // Convert the inFrame too grayscale to increase contrast + Imgproc.cvtColor(inFrame, inFrame, Imgproc.COLOR_BGR2GRAY); + boolean boardFound = false; + + if (params.type == UICalibrationData.BoardType.CHESSBOARD) { + // This is for chessboards + + // Reduce the image size to be much more manageable + Imgproc.resize(inFrame, smallerInFrame, getFindCornersImgSize(inFrame)); + + // Run the chessboard corner finder on the smaller image + boardFound = + Calib3d.findChessboardCorners( + smallerInFrame, patternSize, smallerBoardCorners, findChessboardFlags); + + // Rescale back to original pixel locations + if (boardFound) { + rescalePointsToOrigFrame(smallerBoardCorners, inFrame, boardCorners); + } + + } else if (params.type == UICalibrationData.BoardType.DOTBOARD) { + // For dot boards + boardFound = + Calib3d.findCirclesGrid( + inFrame, patternSize, boardCorners, Calib3d.CALIB_CB_ASYMMETRIC_GRID); + } + + if (!boardFound) { + // If we can't find a chessboard/dot board, just return + return null; + } + + var outBoardCorners = new MatOfPoint2f(); + boardCorners.copyTo(outBoardCorners); + + var objPts = new MatOfPoint2f(); + objectPoints.copyTo(objPts); + + // Get the size of the inFrame + this.imageSize = new Size(inFrame.width(), inFrame.height()); + + // Do sub corner pix for drawing chessboard + Imgproc.cornerSubPix( + inFrame, outBoardCorners, getWindowSize(outBoardCorners), zeroZone, criteria); + + // convert back to BGR + // Imgproc.cvtColor(inFrame, inFrame, Imgproc.COLOR_GRAY2BGR); + // draw the chessboard, doesn't have to be different for a dot board since it just re projects + // the corners we found + Calib3d.drawChessboardCorners(outFrame, patternSize, outBoardCorners, true); + + // // Add the 3D points and the points of the corners found + // if (addToSnapList) { + // this.listOfObjectPoints.add(objectPoints); + // this.listOfImagePoints.add(boardCorners); + // } + + return Triple.of(inFrame.size(), objPts, outBoardCorners); } - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - FindCornersPipeParams other = (FindCornersPipeParams) obj; - if (boardHeight != other.boardHeight) return false; - if (boardWidth != other.boardWidth) return false; - if (type != other.type) return false; - if (Double.doubleToLongBits(gridSize) != Double.doubleToLongBits(other.gridSize)) - return false; - return divisor == other.divisor; + public static class FindCornersPipeParams { + private final int boardHeight; + private final int boardWidth; + private final UICalibrationData.BoardType type; + private final double gridSize; + private final FrameDivisor divisor; + + public FindCornersPipeParams( + int boardHeight, + int boardWidth, + UICalibrationData.BoardType type, + double gridSize, + FrameDivisor divisor) { + this.boardHeight = boardHeight; + this.boardWidth = boardWidth; + this.type = type; + this.gridSize = gridSize; // mm + this.divisor = divisor; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + boardHeight; + result = prime * result + boardWidth; + result = prime * result + ((type == null) ? 0 : type.hashCode()); + long temp; + temp = Double.doubleToLongBits(gridSize); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + ((divisor == null) ? 0 : divisor.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + FindCornersPipeParams other = (FindCornersPipeParams) obj; + if (boardHeight != other.boardHeight) return false; + if (boardWidth != other.boardWidth) return false; + if (type != other.type) return false; + if (Double.doubleToLongBits(gridSize) != Double.doubleToLongBits(other.gridSize)) + return false; + return divisor == other.divisor; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindCirclesPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindCirclesPipe.java index c7f8761dc2..07ec9ecd82 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindCirclesPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindCirclesPipe.java @@ -29,119 +29,119 @@ import org.photonvision.vision.pipe.CVPipe; public class FindCirclesPipe - extends CVPipe>, List, FindCirclesPipe.FindCirclePipeParams> { - // Output vector of found circles. Each vector is encoded as 3 or 4 element floating-point vector - // (x,y,radius) or (x,y,radius,votes) . - private final Mat circles = new Mat(); + extends CVPipe>, List, FindCirclesPipe.FindCirclePipeParams> { + // Output vector of found circles. Each vector is encoded as 3 or 4 element floating-point vector + // (x,y,radius) or (x,y,radius,votes) . + private final Mat circles = new Mat(); - /** - * Runs the process for the pipe. The reason we need a separate pipe for circles is because if we - * were to use the FindShapes pipe, we would have to assume that any shape more than 10-20+ sides - * is a circle. Only issue with such approximation is that the user would no longer be able to - * track shapes with 10-20+ sides. And hence, in order to overcome this edge case, we can use - * HoughCircles which is more flexible and accurate for finding circles. - * - * @param in Input for pipe processing. 8-bit, single-channel, grayscale input image. - * @return Result of processing. - */ - @Override - protected List process(Pair> in) { - circles.release(); - List output = new ArrayList<>(); + /** + * Runs the process for the pipe. The reason we need a separate pipe for circles is because if we + * were to use the FindShapes pipe, we would have to assume that any shape more than 10-20+ sides + * is a circle. Only issue with such approximation is that the user would no longer be able to + * track shapes with 10-20+ sides. And hence, in order to overcome this edge case, we can use + * HoughCircles which is more flexible and accurate for finding circles. + * + * @param in Input for pipe processing. 8-bit, single-channel, grayscale input image. + * @return Result of processing. + */ + @Override + protected List process(Pair> in) { + circles.release(); + List output = new ArrayList<>(); - var diag = params.diagonalLengthPx; - var minRadius = (int) (params.minRadius * diag / 100.0); - var maxRadius = (int) (params.maxRadius * diag / 100.0); + var diag = params.diagonalLengthPx; + var minRadius = (int) (params.minRadius * diag / 100.0); + var maxRadius = (int) (params.maxRadius * diag / 100.0); - Imgproc.HoughCircles( - in.getLeft(), - circles, - // Detection method, see #HoughModes. The available methods are #HOUGH_GRADIENT and - // #HOUGH_GRADIENT_ALT. - Imgproc.HOUGH_GRADIENT, - /*Inverse ratio of the accumulator resolution to the image resolution. - For example, if dp=1 , the accumulator has the same resolution as the input image. - If dp=2 , the accumulator has half as big width and height. For #HOUGH_GRADIENT_ALT the recommended value is dp=1.5, - unless some small very circles need to be detected. - */ - 1.0, - params.minDist, - params.maxCannyThresh, - Math.max(1.0, params.accuracy), - minRadius, - maxRadius); - // Great, we now found the center point of the circle, and it's radius, but we have no idea what - // contour it corresponds to - // Each contour can only match to one circle, so we keep a list of unmatched contours around and - // only match against them - // This does mean that contours closer than allowableThreshold aren't matched to anything if - // there's a 'better' option - var unmatchedContours = in.getRight(); - for (int x = 0; x < circles.cols(); x++) { - // Grab the current circle we are looking at - double[] c = circles.get(0, x); - // Find the center points of that circle - double x_center = c[0]; - double y_center = c[1]; + Imgproc.HoughCircles( + in.getLeft(), + circles, + // Detection method, see #HoughModes. The available methods are #HOUGH_GRADIENT and + // #HOUGH_GRADIENT_ALT. + Imgproc.HOUGH_GRADIENT, + /*Inverse ratio of the accumulator resolution to the image resolution. + For example, if dp=1 , the accumulator has the same resolution as the input image. + If dp=2 , the accumulator has half as big width and height. For #HOUGH_GRADIENT_ALT the recommended value is dp=1.5, + unless some small very circles need to be detected. + */ + 1.0, + params.minDist, + params.maxCannyThresh, + Math.max(1.0, params.accuracy), + minRadius, + maxRadius); + // Great, we now found the center point of the circle, and it's radius, but we have no idea what + // contour it corresponds to + // Each contour can only match to one circle, so we keep a list of unmatched contours around and + // only match against them + // This does mean that contours closer than allowableThreshold aren't matched to anything if + // there's a 'better' option + var unmatchedContours = in.getRight(); + for (int x = 0; x < circles.cols(); x++) { + // Grab the current circle we are looking at + double[] c = circles.get(0, x); + // Find the center points of that circle + double x_center = c[0]; + double y_center = c[1]; - for (Contour contour : unmatchedContours) { - // Grab the moments of the current contour - Moments mu = contour.getMoments(); - // Determine if the contour is within the threshold of the detected circle - // NOTE: This means that the centroid of the contour must be within the "allowable - // threshold" - // of the center of the circle - if (Math.abs(x_center - (mu.m10 / mu.m00)) <= params.allowableThreshold - && Math.abs(y_center - (mu.m01 / mu.m00)) <= params.allowableThreshold) { - // If it is, then add it to the output array - output.add(new CVShape(contour, new Point(c[0], c[1]), c[2])); - unmatchedContours.remove(contour); - break; + for (Contour contour : unmatchedContours) { + // Grab the moments of the current contour + Moments mu = contour.getMoments(); + // Determine if the contour is within the threshold of the detected circle + // NOTE: This means that the centroid of the contour must be within the "allowable + // threshold" + // of the center of the circle + if (Math.abs(x_center - (mu.m10 / mu.m00)) <= params.allowableThreshold + && Math.abs(y_center - (mu.m01 / mu.m00)) <= params.allowableThreshold) { + // If it is, then add it to the output array + output.add(new CVShape(contour, new Point(c[0], c[1]), c[2])); + unmatchedContours.remove(contour); + break; + } + } } - } - } - // Release everything we don't use - for (var c : unmatchedContours) c.release(); + // Release everything we don't use + for (var c : unmatchedContours) c.release(); - return output; - } + return output; + } - public static class FindCirclePipeParams { - private final int allowableThreshold; - private final int minRadius; - private final int maxRadius; - private final int minDist; - private final int maxCannyThresh; - private final int accuracy; - private final double diagonalLengthPx; + public static class FindCirclePipeParams { + private final int allowableThreshold; + private final int minRadius; + private final int maxRadius; + private final int minDist; + private final int maxCannyThresh; + private final int accuracy; + private final double diagonalLengthPx; - /* - * @params minDist - Minimum distance between the centers of the detected circles. - * If the parameter is too small, multiple neighbor circles may be falsely detected in addition to a true one. If it is too large, some circles may be missed. - * - * @param maxCannyThresh -First method-specific parameter. In case of #HOUGH_GRADIENT and #HOUGH_GRADIENT_ALT, it is the higher threshold of the two passed to the Canny edge detector (the lower one is twice smaller). - * Note that #HOUGH_GRADIENT_ALT uses #Scharr algorithm to compute image derivatives, so the threshold value should normally be higher, such as 300 or normally exposed and contrasty images. - * - * - * @param allowableThreshold - When finding the corresponding contour, this is used to see how close a center should be to a contour for it to be considered THAT contour. - * Should be increased with lower resolutions and decreased with higher resolution - * */ - public FindCirclePipeParams( - int allowableThreshold, - int minRadius, - int minDist, - int maxRadius, - int maxCannyThresh, - int accuracy, - double diagonalLengthPx) { - this.allowableThreshold = allowableThreshold; - this.minRadius = minRadius; - this.maxRadius = maxRadius; - this.minDist = minDist; - this.maxCannyThresh = maxCannyThresh; - this.accuracy = accuracy; - this.diagonalLengthPx = diagonalLengthPx; + /* + * @params minDist - Minimum distance between the centers of the detected circles. + * If the parameter is too small, multiple neighbor circles may be falsely detected in addition to a true one. If it is too large, some circles may be missed. + * + * @param maxCannyThresh -First method-specific parameter. In case of #HOUGH_GRADIENT and #HOUGH_GRADIENT_ALT, it is the higher threshold of the two passed to the Canny edge detector (the lower one is twice smaller). + * Note that #HOUGH_GRADIENT_ALT uses #Scharr algorithm to compute image derivatives, so the threshold value should normally be higher, such as 300 or normally exposed and contrasty images. + * + * + * @param allowableThreshold - When finding the corresponding contour, this is used to see how close a center should be to a contour for it to be considered THAT contour. + * Should be increased with lower resolutions and decreased with higher resolution + * */ + public FindCirclePipeParams( + int allowableThreshold, + int minRadius, + int minDist, + int maxRadius, + int maxCannyThresh, + int accuracy, + double diagonalLengthPx) { + this.allowableThreshold = allowableThreshold; + this.minRadius = minRadius; + this.maxRadius = maxRadius; + this.minDist = minDist; + this.maxCannyThresh = maxCannyThresh; + this.accuracy = accuracy; + this.diagonalLengthPx = diagonalLengthPx; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindContoursPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindContoursPipe.java index 4f05f67532..d5564d63fc 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindContoursPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindContoursPipe.java @@ -27,21 +27,21 @@ import org.photonvision.vision.pipe.CVPipe; public class FindContoursPipe - extends CVPipe, FindContoursPipe.FindContoursParams> { - private final List m_foundContours = new ArrayList<>(); + extends CVPipe, FindContoursPipe.FindContoursParams> { + private final List m_foundContours = new ArrayList<>(); - @Override - protected List process(Mat in) { - for (var m : m_foundContours) { - m.release(); // necessary? - } - m_foundContours.clear(); + @Override + protected List process(Mat in) { + for (var m : m_foundContours) { + m.release(); // necessary? + } + m_foundContours.clear(); - Imgproc.findContours( - in, m_foundContours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_TC89_KCOS); + Imgproc.findContours( + in, m_foundContours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_TC89_KCOS); - return m_foundContours.stream().map(Contour::new).collect(Collectors.toList()); - } + return m_foundContours.stream().map(Contour::new).collect(Collectors.toList()); + } - public static class FindContoursParams {} + public static class FindContoursParams {} } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindPolygonPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindPolygonPipe.java index d291ccb33a..dfadfda53f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindPolygonPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/FindPolygonPipe.java @@ -26,65 +26,65 @@ import org.photonvision.vision.pipe.CVPipe; public class FindPolygonPipe - extends CVPipe, List, FindPolygonPipe.FindPolygonPipeParams> { - List shapeList = new ArrayList<>(); + extends CVPipe, List, FindPolygonPipe.FindPolygonPipeParams> { + List shapeList = new ArrayList<>(); - /* - * Runs the process for the pipe. - * - * @param in Input for pipe processing. - * @return Result of processing. - */ - @Override - protected List process(List in) { - shapeList.forEach(CVShape::release); - shapeList.clear(); - shapeList = new ArrayList<>(); + /* + * Runs the process for the pipe. + * + * @param in Input for pipe processing. + * @return Result of processing. + */ + @Override + protected List process(List in) { + shapeList.forEach(CVShape::release); + shapeList.clear(); + shapeList = new ArrayList<>(); - for (Contour contour : in) { - shapeList.add(getShape(contour)); + for (Contour contour : in) { + shapeList.add(getShape(contour)); + } + + return shapeList; } - return shapeList; - } + private CVShape getShape(Contour in) { + int corners = getCorners(in); - private CVShape getShape(Contour in) { - int corners = getCorners(in); + /*The contourShape enum has predefined shapes for Circles, Triangles, and Quads + meaning any shape not fitting in those predefined shapes must be a custom shape. + */ + if (ContourShape.fromSides(corners) == null) { + return new CVShape(in, ContourShape.Custom); + } + switch (ContourShape.fromSides(corners)) { + case Circle: + return new CVShape(in, ContourShape.Circle); + case Triangle: + return new CVShape(in, ContourShape.Triangle); + case Quadrilateral: + return new CVShape(in, ContourShape.Quadrilateral); + } - /*The contourShape enum has predefined shapes for Circles, Triangles, and Quads - meaning any shape not fitting in those predefined shapes must be a custom shape. - */ - if (ContourShape.fromSides(corners) == null) { - return new CVShape(in, ContourShape.Custom); - } - switch (ContourShape.fromSides(corners)) { - case Circle: - return new CVShape(in, ContourShape.Circle); - case Triangle: - return new CVShape(in, ContourShape.Triangle); - case Quadrilateral: - return new CVShape(in, ContourShape.Quadrilateral); + return new CVShape(in, ContourShape.Custom); } - return new CVShape(in, ContourShape.Custom); - } + private int getCorners(Contour contour) { + var approx = + contour.getApproxPolyDp( + (100 - params.accuracyPercentage) / 100.0 * Imgproc.arcLength(contour.getMat2f(), true), + true); - private int getCorners(Contour contour) { - var approx = - contour.getApproxPolyDp( - (100 - params.accuracyPercentage) / 100.0 * Imgproc.arcLength(contour.getMat2f(), true), - true); - - // The height of the resultant approximation is the number of vertices - return (int) approx.size().height; - } + // The height of the resultant approximation is the number of vertices + return (int) approx.size().height; + } - public static class FindPolygonPipeParams { - private final double accuracyPercentage; + public static class FindPolygonPipeParams { + private final double accuracyPercentage; - // Should be a value between 0-100 - public FindPolygonPipeParams(double accuracyPercentage) { - this.accuracyPercentage = accuracyPercentage; + // Should be a value between 0-100 + public FindPolygonPipeParams(double accuracyPercentage) { + this.accuracyPercentage = accuracyPercentage; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GPUAcceleratedHSVPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GPUAcceleratedHSVPipe.java index c25b351a97..5b49faf641 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GPUAcceleratedHSVPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GPUAcceleratedHSVPipe.java @@ -35,517 +35,517 @@ import org.photonvision.vision.pipe.CVPipe; public class GPUAcceleratedHSVPipe extends CVPipe { - private static final String k_vertexShader = - String.join( - "\n", - "#version 100", - "", - "attribute vec4 position;", - "", - "void main() {", - " gl_Position = position;", - "}"); - private static final String k_fragmentShader = - String.join( - "\n", - "#version 100", - "", - "precision highp float;", - "precision highp int;", - "", - "uniform vec3 lowerThresh;", - "uniform vec3 upperThresh;", - "uniform vec2 resolution;", - "uniform sampler2D texture0;", - "", - "vec3 rgb2hsv(vec3 c) {", - " vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);", - " vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));", - " vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));", - "", - " float d = q.x - min(q.w, q.y);", - " float e = 1.0e-10;", - " return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);", - "}", - "", - "bool inRange(vec3 hsv) {", - " bvec3 botBool = greaterThanEqual(hsv, lowerThresh);", - " bvec3 topBool = lessThanEqual(hsv, upperThresh);", - " return all(botBool) && all(topBool);", - "}", - "", - "void main() {", - " vec2 uv = gl_FragCoord.xy/resolution;", - // Important! We do this .bgr swizzle because the image comes in as BGR, but we pretend - // it's RGB for convenience+speed - " vec3 col = texture2D(texture0, uv).bgr;", - // Only the first value in the vec4 gets used for GL_RED, and only the last value gets - // used for GL_ALPHA - " gl_FragColor = inRange(rgb2hsv(col)) ? vec4(1.0, 0.0, 0.0, 1.0) : vec4(0.0, 0.0, 0.0, 0.0);", - "}"); - private static final int k_startingWidth = 1280, k_startingHeight = 720; - private static final float[] k_vertexPositions = { - // Set up a quad that covers the screen - -1f, +1f, +1f, +1f, -1f, -1f, +1f, -1f - }; - private static final int k_positionVertexAttribute = - 0; // ID for the vertex shader position variable - - public enum PBOMode { - NONE, - SINGLE_BUFFERED, - DOUBLE_BUFFERED - } - - private final IntBuffer vertexVBOIds = GLBuffers.newDirectIntBuffer(1), - unpackPBOIds = GLBuffers.newDirectIntBuffer(2), - packPBOIds = GLBuffers.newDirectIntBuffer(2); - - private final GL2ES2 gl; - private final GLProfile profile; - private final int outputFormat; - private final PBOMode pboMode; - private final GLOffscreenAutoDrawableImpl.FBOImpl drawable; - private final Texture texture; - // The texture uniform holds the image that's being processed - // The resolution uniform holds the current image resolution - // The lower and upper uniforms hold the lower and upper HSV limits for thresholding - private final int textureUniformId, resolutionUniformId, lowerUniformId, upperUniformId; - - private final Logger logger = new Logger(GPUAcceleratedHSVPipe.class, LogGroup.General); - - private byte[] inputBytes, outputBytes; - private Mat outputMat = new Mat(k_startingHeight, k_startingWidth, CvType.CV_8UC1); - private int previousWidth = -1, previousHeight = -1; - private int unpackIndex = 0, unpackNextIndex = 0, packIndex = 0, packNextIndex = 0; - - public GPUAcceleratedHSVPipe(PBOMode pboMode) { - this.pboMode = pboMode; - - // Set up GL profile and ask for specific capabilities - profile = GLProfile.get(pboMode == PBOMode.NONE ? GLProfile.GLES2 : GLProfile.GL4ES3); - final var capabilities = new GLCapabilities(profile); - capabilities.setHardwareAccelerated(true); - capabilities.setFBO(true); - capabilities.setDoubleBuffered(false); - capabilities.setOnscreen(false); - capabilities.setRedBits(8); - capabilities.setBlueBits(0); - capabilities.setGreenBits(0); - capabilities.setAlphaBits(0); - - // Set up the offscreen area we're going to draw to - final var factory = GLDrawableFactory.getFactory(profile); - drawable = - (GLOffscreenAutoDrawableImpl.FBOImpl) - factory.createOffscreenAutoDrawable( - factory.getDefaultDevice(), - capabilities, - new DefaultGLCapabilitiesChooser(), - k_startingWidth, - k_startingHeight); - drawable.display(); - drawable.getContext().makeCurrent(); - - // Get an OpenGL context; OpenGL ES 2.0 and OpenGL 2.0 are compatible with all the coprocs we - // care about but not compatible with PBOs. Open GL ES 3.0 and OpenGL 4.0 are compatible with - // select coprocs *and* PBOs - gl = pboMode == PBOMode.NONE ? drawable.getGL().getGLES2() : drawable.getGL().getGL4ES3(); - final int programId = gl.glCreateProgram(); - - if (pboMode == PBOMode.NONE && !gl.glGetString(GL_EXTENSIONS).contains("GL_EXT_texture_rg")) { - logger.warn( - "OpenGL ES 2.0 implementation does not have the 'GL_EXT_texture_rg' extension, falling back to GL_ALPHA instead of GL_RED output format"); - outputFormat = GL_ALPHA; - } else { - outputFormat = GL_RED; + private static final String k_vertexShader = + String.join( + "\n", + "#version 100", + "", + "attribute vec4 position;", + "", + "void main() {", + " gl_Position = position;", + "}"); + private static final String k_fragmentShader = + String.join( + "\n", + "#version 100", + "", + "precision highp float;", + "precision highp int;", + "", + "uniform vec3 lowerThresh;", + "uniform vec3 upperThresh;", + "uniform vec2 resolution;", + "uniform sampler2D texture0;", + "", + "vec3 rgb2hsv(vec3 c) {", + " vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);", + " vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));", + " vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));", + "", + " float d = q.x - min(q.w, q.y);", + " float e = 1.0e-10;", + " return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);", + "}", + "", + "bool inRange(vec3 hsv) {", + " bvec3 botBool = greaterThanEqual(hsv, lowerThresh);", + " bvec3 topBool = lessThanEqual(hsv, upperThresh);", + " return all(botBool) && all(topBool);", + "}", + "", + "void main() {", + " vec2 uv = gl_FragCoord.xy/resolution;", + // Important! We do this .bgr swizzle because the image comes in as BGR, but we pretend + // it's RGB for convenience+speed + " vec3 col = texture2D(texture0, uv).bgr;", + // Only the first value in the vec4 gets used for GL_RED, and only the last value gets + // used for GL_ALPHA + " gl_FragColor = inRange(rgb2hsv(col)) ? vec4(1.0, 0.0, 0.0, 1.0) : vec4(0.0, 0.0, 0.0, 0.0);", + "}"); + private static final int k_startingWidth = 1280, k_startingHeight = 720; + private static final float[] k_vertexPositions = { + // Set up a quad that covers the screen + -1f, +1f, +1f, +1f, -1f, -1f, +1f, -1f + }; + private static final int k_positionVertexAttribute = + 0; // ID for the vertex shader position variable + + public enum PBOMode { + NONE, + SINGLE_BUFFERED, + DOUBLE_BUFFERED } - // JOGL creates a framebuffer color attachment that has RGB set as the format, which is not - // appropriate for us because we want a single-channel format - // We make ourown FBO color attachment to remedy this - // Detach and destroy the FBO color attachment that JOGL made for us - drawable.getFBObject(GL_FRONT).detachColorbuffer(gl, 0, true); - // Equivalent to calling glBindFramebuffer - drawable.getFBObject(GL_FRONT).bind(gl); - if (true) { // For now renderbuffers are disabled - // Create a color attachment texture to hold our rendered output - var colorBufferIds = GLBuffers.newDirectIntBuffer(1); - gl.glGenTextures(1, colorBufferIds); - gl.glBindTexture(GL_TEXTURE_2D, colorBufferIds.get(0)); - gl.glTexImage2D( - GL_TEXTURE_2D, - 0, - outputFormat == GL_RED ? GL_R8 : GL_ALPHA8, - k_startingWidth, - k_startingHeight, - 0, - outputFormat, - GL_UNSIGNED_BYTE, - null); - gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - // Attach the texture to the framebuffer - gl.glBindTexture(GL_TEXTURE_2D, 0); - gl.glFramebufferTexture2D( - GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, colorBufferIds.get(0), 0); - // Cleanup - gl.glBindTexture(GL_TEXTURE_2D, 0); - } else { - // Create a color attachment texture to hold our rendered output - var renderBufferIds = GLBuffers.newDirectIntBuffer(1); - gl.glGenRenderbuffers(1, renderBufferIds); - gl.glBindRenderbuffer(GL_RENDERBUFFER, renderBufferIds.get(0)); - gl.glRenderbufferStorage( - GL_RENDERBUFFER, - outputFormat == GL_RED ? GL_R8 : GL_ALPHA8, - k_startingWidth, - k_startingHeight); - // Attach the texture to the framebuffer - gl.glFramebufferRenderbuffer( - GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderBufferIds.get(0)); - // Cleanup - gl.glBindRenderbuffer(GL_RENDERBUFFER, 0); - } - drawable.getFBObject(GL_FRONT).unbind(gl); - - // Check that the FBO is attached - int fboStatus = gl.glCheckFramebufferStatus(GL_FRAMEBUFFER); - if (fboStatus == GL_FRAMEBUFFER_UNSUPPORTED) { - throw new RuntimeException( - "GL implementation does not support rendering to internal format '" - + String.format("0x%08X", outputFormat == GL_RED ? GL_R8 : GL_ALPHA8) - + "' with type '" - + String.format("0x%08X", GL_UNSIGNED_BYTE) - + "'"); - } else if (fboStatus != GL_FRAMEBUFFER_COMPLETE) { - throw new RuntimeException( - "Framebuffer is not complete; framebuffer status is " - + String.format("0x%08X", fboStatus)); - } + private final IntBuffer vertexVBOIds = GLBuffers.newDirectIntBuffer(1), + unpackPBOIds = GLBuffers.newDirectIntBuffer(2), + packPBOIds = GLBuffers.newDirectIntBuffer(2); + + private final GL2ES2 gl; + private final GLProfile profile; + private final int outputFormat; + private final PBOMode pboMode; + private final GLOffscreenAutoDrawableImpl.FBOImpl drawable; + private final Texture texture; + // The texture uniform holds the image that's being processed + // The resolution uniform holds the current image resolution + // The lower and upper uniforms hold the lower and upper HSV limits for thresholding + private final int textureUniformId, resolutionUniformId, lowerUniformId, upperUniformId; + + private final Logger logger = new Logger(GPUAcceleratedHSVPipe.class, LogGroup.General); + + private byte[] inputBytes, outputBytes; + private Mat outputMat = new Mat(k_startingHeight, k_startingWidth, CvType.CV_8UC1); + private int previousWidth = -1, previousHeight = -1; + private int unpackIndex = 0, unpackNextIndex = 0, packIndex = 0, packNextIndex = 0; + + public GPUAcceleratedHSVPipe(PBOMode pboMode) { + this.pboMode = pboMode; + + // Set up GL profile and ask for specific capabilities + profile = GLProfile.get(pboMode == PBOMode.NONE ? GLProfile.GLES2 : GLProfile.GL4ES3); + final var capabilities = new GLCapabilities(profile); + capabilities.setHardwareAccelerated(true); + capabilities.setFBO(true); + capabilities.setDoubleBuffered(false); + capabilities.setOnscreen(false); + capabilities.setRedBits(8); + capabilities.setBlueBits(0); + capabilities.setGreenBits(0); + capabilities.setAlphaBits(0); + + // Set up the offscreen area we're going to draw to + final var factory = GLDrawableFactory.getFactory(profile); + drawable = + (GLOffscreenAutoDrawableImpl.FBOImpl) + factory.createOffscreenAutoDrawable( + factory.getDefaultDevice(), + capabilities, + new DefaultGLCapabilitiesChooser(), + k_startingWidth, + k_startingHeight); + drawable.display(); + drawable.getContext().makeCurrent(); + + // Get an OpenGL context; OpenGL ES 2.0 and OpenGL 2.0 are compatible with all the coprocs we + // care about but not compatible with PBOs. Open GL ES 3.0 and OpenGL 4.0 are compatible with + // select coprocs *and* PBOs + gl = pboMode == PBOMode.NONE ? drawable.getGL().getGLES2() : drawable.getGL().getGL4ES3(); + final int programId = gl.glCreateProgram(); + + if (pboMode == PBOMode.NONE && !gl.glGetString(GL_EXTENSIONS).contains("GL_EXT_texture_rg")) { + logger.warn( + "OpenGL ES 2.0 implementation does not have the 'GL_EXT_texture_rg' extension, falling back to GL_ALPHA instead of GL_RED output format"); + outputFormat = GL_ALPHA; + } else { + outputFormat = GL_RED; + } - logger.debug( - "Created an OpenGL context with renderer '" - + gl.glGetString(GL_RENDERER) - + "', version '" - + gl.glGetString(GL.GL_VERSION) - + "', and profile '" - + profile - + "'"); - - var fmt = GLBuffers.newDirectIntBuffer(1); - gl.glGetIntegerv(GLES3.GL_IMPLEMENTATION_COLOR_READ_FORMAT, fmt); - var type = GLBuffers.newDirectIntBuffer(1); - gl.glGetIntegerv(GLES3.GL_IMPLEMENTATION_COLOR_READ_TYPE, type); - - // Tell OpenGL that the attribute in the vertex shader named position is bound to index 0 (the - // index for the generic position input) - gl.glBindAttribLocation(programId, 0, "position"); - - // Compile and set up our two shaders with our program - final int vertexId = createShader(gl, programId, k_vertexShader, GL_VERTEX_SHADER); - final int fragmentId = createShader(gl, programId, k_fragmentShader, GL_FRAGMENT_SHADER); - - // Link our program together and check for errors - gl.glLinkProgram(programId); - IntBuffer status = GLBuffers.newDirectIntBuffer(1); - gl.glGetProgramiv(programId, GL_LINK_STATUS, status); - if (status.get(0) == GL_FALSE) { - IntBuffer infoLogLength = GLBuffers.newDirectIntBuffer(1); - gl.glGetProgramiv(programId, GL_INFO_LOG_LENGTH, infoLogLength); - - ByteBuffer bufferInfoLog = GLBuffers.newDirectByteBuffer(infoLogLength.get(0)); - gl.glGetProgramInfoLog(programId, infoLogLength.get(0), null, bufferInfoLog); - byte[] bytes = new byte[infoLogLength.get(0)]; - bufferInfoLog.get(bytes); - String strInfoLog = new String(bytes); - - throw new RuntimeException("Linker failure: " + strInfoLog); - } - gl.glValidateProgram(programId); - - // Cleanup shaders that are now compiled in - gl.glDetachShader(programId, vertexId); - gl.glDetachShader(programId, fragmentId); - gl.glDeleteShader(vertexId); - gl.glDeleteShader(fragmentId); - - // Tell OpenGL to use our program - gl.glUseProgram(programId); - - // Set up our texture - texture = new Texture(GL_TEXTURE_2D); - texture.setTexParameteri(gl, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - texture.setTexParameteri(gl, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - texture.setTexParameteri(gl, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - texture.setTexParameteri(gl, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - - // Set up a uniform holding our image as a texture - textureUniformId = gl.glGetUniformLocation(programId, "texture0"); - gl.glUniform1i(textureUniformId, 0); - - // Set up a uniform to hold image resolution - resolutionUniformId = gl.glGetUniformLocation(programId, "resolution"); - - // Set up uniforms for the HSV thresholds - lowerUniformId = gl.glGetUniformLocation(programId, "lowerThresh"); - upperUniformId = gl.glGetUniformLocation(programId, "upperThresh"); - - // Set up a quad that covers the entire screen so that our fragment shader draws onto the entire - // screen - gl.glGenBuffers(1, vertexVBOIds); - - FloatBuffer vertexBuffer = GLBuffers.newDirectFloatBuffer(k_vertexPositions); - gl.glBindBuffer(GL_ARRAY_BUFFER, vertexVBOIds.get(0)); - gl.glBufferData( - GL_ARRAY_BUFFER, - (long) vertexBuffer.capacity() * Float.BYTES, - vertexBuffer, - GL_STATIC_DRAW); - - // Set up pixel unpack buffer (a PBO to transfer image data to the GPU) - if (pboMode != PBOMode.NONE) { - gl.glGenBuffers(2, unpackPBOIds); - - gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(0)); - gl.glBufferData( - GLES3.GL_PIXEL_UNPACK_BUFFER, - k_startingHeight * k_startingWidth * 3, - null, - GLES3.GL_STREAM_DRAW); - if (pboMode == PBOMode.DOUBLE_BUFFERED) { - gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(1)); - gl.glBufferData( - GLES3.GL_PIXEL_UNPACK_BUFFER, - k_startingHeight * k_startingWidth * 3, - null, - GLES3.GL_STREAM_DRAW); - } - gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, 0); - } + // JOGL creates a framebuffer color attachment that has RGB set as the format, which is not + // appropriate for us because we want a single-channel format + // We make ourown FBO color attachment to remedy this + // Detach and destroy the FBO color attachment that JOGL made for us + drawable.getFBObject(GL_FRONT).detachColorbuffer(gl, 0, true); + // Equivalent to calling glBindFramebuffer + drawable.getFBObject(GL_FRONT).bind(gl); + if (true) { // For now renderbuffers are disabled + // Create a color attachment texture to hold our rendered output + var colorBufferIds = GLBuffers.newDirectIntBuffer(1); + gl.glGenTextures(1, colorBufferIds); + gl.glBindTexture(GL_TEXTURE_2D, colorBufferIds.get(0)); + gl.glTexImage2D( + GL_TEXTURE_2D, + 0, + outputFormat == GL_RED ? GL_R8 : GL_ALPHA8, + k_startingWidth, + k_startingHeight, + 0, + outputFormat, + GL_UNSIGNED_BYTE, + null); + gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + gl.glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + // Attach the texture to the framebuffer + gl.glBindTexture(GL_TEXTURE_2D, 0); + gl.glFramebufferTexture2D( + GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, colorBufferIds.get(0), 0); + // Cleanup + gl.glBindTexture(GL_TEXTURE_2D, 0); + } else { + // Create a color attachment texture to hold our rendered output + var renderBufferIds = GLBuffers.newDirectIntBuffer(1); + gl.glGenRenderbuffers(1, renderBufferIds); + gl.glBindRenderbuffer(GL_RENDERBUFFER, renderBufferIds.get(0)); + gl.glRenderbufferStorage( + GL_RENDERBUFFER, + outputFormat == GL_RED ? GL_R8 : GL_ALPHA8, + k_startingWidth, + k_startingHeight); + // Attach the texture to the framebuffer + gl.glFramebufferRenderbuffer( + GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderBufferIds.get(0)); + // Cleanup + gl.glBindRenderbuffer(GL_RENDERBUFFER, 0); + } + drawable.getFBObject(GL_FRONT).unbind(gl); + + // Check that the FBO is attached + int fboStatus = gl.glCheckFramebufferStatus(GL_FRAMEBUFFER); + if (fboStatus == GL_FRAMEBUFFER_UNSUPPORTED) { + throw new RuntimeException( + "GL implementation does not support rendering to internal format '" + + String.format("0x%08X", outputFormat == GL_RED ? GL_R8 : GL_ALPHA8) + + "' with type '" + + String.format("0x%08X", GL_UNSIGNED_BYTE) + + "'"); + } else if (fboStatus != GL_FRAMEBUFFER_COMPLETE) { + throw new RuntimeException( + "Framebuffer is not complete; framebuffer status is " + + String.format("0x%08X", fboStatus)); + } - // Set up pixel pack buffer (a PBO to transfer the processed image back from the GPU) - if (pboMode != PBOMode.NONE) { - gl.glGenBuffers(2, packPBOIds); - - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(0)); - gl.glBufferData( - GLES3.GL_PIXEL_PACK_BUFFER, - k_startingHeight * k_startingWidth, - null, - GLES3.GL_STREAM_READ); - if (pboMode == PBOMode.DOUBLE_BUFFERED) { - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(1)); - gl.glBufferData( - GLES3.GL_PIXEL_PACK_BUFFER, - k_startingHeight * k_startingWidth, - null, - GLES3.GL_STREAM_READ); - } - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, 0); - } - } - - private static int createShader(GL2ES2 gl, int programId, String glslCode, int shaderType) { - int shaderId = gl.glCreateShader(shaderType); - if (shaderId == 0) throw new RuntimeException("Shader ID is zero"); - - IntBuffer length = GLBuffers.newDirectIntBuffer(new int[] {glslCode.length()}); - gl.glShaderSource(shaderId, 1, new String[] {glslCode}, length); - gl.glCompileShader(shaderId); - - IntBuffer intBuffer = IntBuffer.allocate(1); - gl.glGetShaderiv(shaderId, GL_COMPILE_STATUS, intBuffer); - - if (intBuffer.get(0) != 1) { - gl.glGetShaderiv(shaderId, GL_INFO_LOG_LENGTH, intBuffer); - int size = intBuffer.get(0); - if (size > 0) { - ByteBuffer byteBuffer = ByteBuffer.allocate(size); - gl.glGetShaderInfoLog(shaderId, size, intBuffer, byteBuffer); - System.err.println(new String(byteBuffer.array())); - } - throw new RuntimeException("Couldn't compile shader"); - } + logger.debug( + "Created an OpenGL context with renderer '" + + gl.glGetString(GL_RENDERER) + + "', version '" + + gl.glGetString(GL.GL_VERSION) + + "', and profile '" + + profile + + "'"); + + var fmt = GLBuffers.newDirectIntBuffer(1); + gl.glGetIntegerv(GLES3.GL_IMPLEMENTATION_COLOR_READ_FORMAT, fmt); + var type = GLBuffers.newDirectIntBuffer(1); + gl.glGetIntegerv(GLES3.GL_IMPLEMENTATION_COLOR_READ_TYPE, type); + + // Tell OpenGL that the attribute in the vertex shader named position is bound to index 0 (the + // index for the generic position input) + gl.glBindAttribLocation(programId, 0, "position"); + + // Compile and set up our two shaders with our program + final int vertexId = createShader(gl, programId, k_vertexShader, GL_VERTEX_SHADER); + final int fragmentId = createShader(gl, programId, k_fragmentShader, GL_FRAGMENT_SHADER); + + // Link our program together and check for errors + gl.glLinkProgram(programId); + IntBuffer status = GLBuffers.newDirectIntBuffer(1); + gl.glGetProgramiv(programId, GL_LINK_STATUS, status); + if (status.get(0) == GL_FALSE) { + IntBuffer infoLogLength = GLBuffers.newDirectIntBuffer(1); + gl.glGetProgramiv(programId, GL_INFO_LOG_LENGTH, infoLogLength); + + ByteBuffer bufferInfoLog = GLBuffers.newDirectByteBuffer(infoLogLength.get(0)); + gl.glGetProgramInfoLog(programId, infoLogLength.get(0), null, bufferInfoLog); + byte[] bytes = new byte[infoLogLength.get(0)]; + bufferInfoLog.get(bytes); + String strInfoLog = new String(bytes); + + throw new RuntimeException("Linker failure: " + strInfoLog); + } + gl.glValidateProgram(programId); - gl.glAttachShader(programId, shaderId); + // Cleanup shaders that are now compiled in + gl.glDetachShader(programId, vertexId); + gl.glDetachShader(programId, fragmentId); + gl.glDeleteShader(vertexId); + gl.glDeleteShader(fragmentId); - return shaderId; - } + // Tell OpenGL to use our program + gl.glUseProgram(programId); - @Override - protected Mat process(Mat in) { - if (in.width() != previousWidth && in.height() != previousHeight) { - logger.debug("Resizing OpenGL viewport, byte buffers, and PBOs"); + // Set up our texture + texture = new Texture(GL_TEXTURE_2D); + texture.setTexParameteri(gl, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + texture.setTexParameteri(gl, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + texture.setTexParameteri(gl, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + texture.setTexParameteri(gl, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - drawable.setSurfaceSize(in.width(), in.height()); - gl.glViewport(0, 0, in.width(), in.height()); + // Set up a uniform holding our image as a texture + textureUniformId = gl.glGetUniformLocation(programId, "texture0"); + gl.glUniform1i(textureUniformId, 0); - previousWidth = in.width(); - previousHeight = in.height(); + // Set up a uniform to hold image resolution + resolutionUniformId = gl.glGetUniformLocation(programId, "resolution"); - inputBytes = new byte[in.width() * in.height() * 3]; + // Set up uniforms for the HSV thresholds + lowerUniformId = gl.glGetUniformLocation(programId, "lowerThresh"); + upperUniformId = gl.glGetUniformLocation(programId, "upperThresh"); - outputBytes = new byte[in.width() * in.height()]; - outputMat = new Mat(k_startingHeight, k_startingWidth, CvType.CV_8UC1); + // Set up a quad that covers the entire screen so that our fragment shader draws onto the entire + // screen + gl.glGenBuffers(1, vertexVBOIds); - if (pboMode != PBOMode.NONE) { - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(0)); + FloatBuffer vertexBuffer = GLBuffers.newDirectFloatBuffer(k_vertexPositions); + gl.glBindBuffer(GL_ARRAY_BUFFER, vertexVBOIds.get(0)); gl.glBufferData( - GLES3.GL_PIXEL_PACK_BUFFER, - (long) in.width() * in.height(), - null, - GLES3.GL_STREAM_READ); + GL_ARRAY_BUFFER, + (long) vertexBuffer.capacity() * Float.BYTES, + vertexBuffer, + GL_STATIC_DRAW); + + // Set up pixel unpack buffer (a PBO to transfer image data to the GPU) + if (pboMode != PBOMode.NONE) { + gl.glGenBuffers(2, unpackPBOIds); + + gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(0)); + gl.glBufferData( + GLES3.GL_PIXEL_UNPACK_BUFFER, + k_startingHeight * k_startingWidth * 3, + null, + GLES3.GL_STREAM_DRAW); + if (pboMode == PBOMode.DOUBLE_BUFFERED) { + gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(1)); + gl.glBufferData( + GLES3.GL_PIXEL_UNPACK_BUFFER, + k_startingHeight * k_startingWidth * 3, + null, + GLES3.GL_STREAM_DRAW); + } + gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, 0); + } - if (pboMode == PBOMode.DOUBLE_BUFFERED) { - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(1)); - gl.glBufferData( - GLES3.GL_PIXEL_PACK_BUFFER, - (long) in.width() * in.height(), - null, - GLES3.GL_STREAM_READ); + // Set up pixel pack buffer (a PBO to transfer the processed image back from the GPU) + if (pboMode != PBOMode.NONE) { + gl.glGenBuffers(2, packPBOIds); + + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(0)); + gl.glBufferData( + GLES3.GL_PIXEL_PACK_BUFFER, + k_startingHeight * k_startingWidth, + null, + GLES3.GL_STREAM_READ); + if (pboMode == PBOMode.DOUBLE_BUFFERED) { + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(1)); + gl.glBufferData( + GLES3.GL_PIXEL_PACK_BUFFER, + k_startingHeight * k_startingWidth, + null, + GLES3.GL_STREAM_READ); + } + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, 0); } - } } - if (pboMode == PBOMode.DOUBLE_BUFFERED) { - unpackIndex = (unpackIndex + 1) % 2; - unpackNextIndex = (unpackIndex + 1) % 2; - } + private static int createShader(GL2ES2 gl, int programId, String glslCode, int shaderType) { + int shaderId = gl.glCreateShader(shaderType); + if (shaderId == 0) throw new RuntimeException("Shader ID is zero"); + + IntBuffer length = GLBuffers.newDirectIntBuffer(new int[] {glslCode.length()}); + gl.glShaderSource(shaderId, 1, new String[] {glslCode}, length); + gl.glCompileShader(shaderId); + + IntBuffer intBuffer = IntBuffer.allocate(1); + gl.glGetShaderiv(shaderId, GL_COMPILE_STATUS, intBuffer); + + if (intBuffer.get(0) != 1) { + gl.glGetShaderiv(shaderId, GL_INFO_LOG_LENGTH, intBuffer); + int size = intBuffer.get(0); + if (size > 0) { + ByteBuffer byteBuffer = ByteBuffer.allocate(size); + gl.glGetShaderInfoLog(shaderId, size, intBuffer, byteBuffer); + System.err.println(new String(byteBuffer.array())); + } + throw new RuntimeException("Couldn't compile shader"); + } + + gl.glAttachShader(programId, shaderId); - // Reset the fullscreen quad - gl.glBindBuffer(GL_ARRAY_BUFFER, vertexVBOIds.get(0)); - gl.glEnableVertexAttribArray(k_positionVertexAttribute); - gl.glVertexAttribPointer(0, 2, GL_FLOAT, false, 0, 0); - gl.glBindBuffer(GL_ARRAY_BUFFER, 0); - - // Load and bind our image as a 2D texture - gl.glActiveTexture(GL_TEXTURE0); - texture.enable(gl); - texture.bind(gl); - - // Load our image into the texture - in.get(0, 0, inputBytes); - if (pboMode == PBOMode.NONE) { - ByteBuffer buf = ByteBuffer.wrap(inputBytes); - // (We're actually taking in BGR even though this says RGB; it's much easier and faster to - // switch it around in the fragment shader) - texture.updateImage( - gl, - new TextureData( - profile, - GL_RGB8, - in.width(), - in.height(), - 0, - GL_RGB, - GL_UNSIGNED_BYTE, - false, - false, - false, - buf, - null)); - } else { - // Bind the PBO to the texture - gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(unpackIndex)); - - // Copy pixels from the PBO to the texture object - gl.glTexSubImage2D( - GLES3.GL_TEXTURE_2D, - 0, - 0, - 0, - in.width(), - in.height(), - GLES3.GL_RGB8, - GLES3.GL_UNSIGNED_BYTE, - 0); - - // Bind (potentially) another PBO to update the texture source - gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(unpackNextIndex)); - - // This call with a nullptr for the data arg tells OpenGL *not* to wait to be in sync with the - // GPU - // This causes the previous data in the PBO to be discarded - gl.glBufferData( - GLES3.GL_PIXEL_UNPACK_BUFFER, - (long) in.width() * in.height() * 3, - null, - GLES3.GL_STREAM_DRAW); - - // Map the buffer of GPU memory into a place that's accessible by us - var buf = - gl.glMapBufferRange( - GLES3.GL_PIXEL_UNPACK_BUFFER, - 0, - (long) in.width() * in.height() * 3, - GLES3.GL_MAP_WRITE_BIT); - buf.put(inputBytes); - - gl.glUnmapBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER); - gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, 0); + return shaderId; } - // Put values in a uniform holding the image resolution - gl.glUniform2f(resolutionUniformId, in.width(), in.height()); + @Override + protected Mat process(Mat in) { + if (in.width() != previousWidth && in.height() != previousHeight) { + logger.debug("Resizing OpenGL viewport, byte buffers, and PBOs"); + + drawable.setSurfaceSize(in.width(), in.height()); + gl.glViewport(0, 0, in.width(), in.height()); + + previousWidth = in.width(); + previousHeight = in.height(); + + inputBytes = new byte[in.width() * in.height() * 3]; + + outputBytes = new byte[in.width() * in.height()]; + outputMat = new Mat(k_startingHeight, k_startingWidth, CvType.CV_8UC1); + + if (pboMode != PBOMode.NONE) { + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(0)); + gl.glBufferData( + GLES3.GL_PIXEL_PACK_BUFFER, + (long) in.width() * in.height(), + null, + GLES3.GL_STREAM_READ); + + if (pboMode == PBOMode.DOUBLE_BUFFERED) { + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(1)); + gl.glBufferData( + GLES3.GL_PIXEL_PACK_BUFFER, + (long) in.width() * in.height(), + null, + GLES3.GL_STREAM_READ); + } + } + } + + if (pboMode == PBOMode.DOUBLE_BUFFERED) { + unpackIndex = (unpackIndex + 1) % 2; + unpackNextIndex = (unpackIndex + 1) % 2; + } + + // Reset the fullscreen quad + gl.glBindBuffer(GL_ARRAY_BUFFER, vertexVBOIds.get(0)); + gl.glEnableVertexAttribArray(k_positionVertexAttribute); + gl.glVertexAttribPointer(0, 2, GL_FLOAT, false, 0, 0); + gl.glBindBuffer(GL_ARRAY_BUFFER, 0); + + // Load and bind our image as a 2D texture + gl.glActiveTexture(GL_TEXTURE0); + texture.enable(gl); + texture.bind(gl); + + // Load our image into the texture + in.get(0, 0, inputBytes); + if (pboMode == PBOMode.NONE) { + ByteBuffer buf = ByteBuffer.wrap(inputBytes); + // (We're actually taking in BGR even though this says RGB; it's much easier and faster to + // switch it around in the fragment shader) + texture.updateImage( + gl, + new TextureData( + profile, + GL_RGB8, + in.width(), + in.height(), + 0, + GL_RGB, + GL_UNSIGNED_BYTE, + false, + false, + false, + buf, + null)); + } else { + // Bind the PBO to the texture + gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(unpackIndex)); + + // Copy pixels from the PBO to the texture object + gl.glTexSubImage2D( + GLES3.GL_TEXTURE_2D, + 0, + 0, + 0, + in.width(), + in.height(), + GLES3.GL_RGB8, + GLES3.GL_UNSIGNED_BYTE, + 0); + + // Bind (potentially) another PBO to update the texture source + gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, unpackPBOIds.get(unpackNextIndex)); + + // This call with a nullptr for the data arg tells OpenGL *not* to wait to be in sync with the + // GPU + // This causes the previous data in the PBO to be discarded + gl.glBufferData( + GLES3.GL_PIXEL_UNPACK_BUFFER, + (long) in.width() * in.height() * 3, + null, + GLES3.GL_STREAM_DRAW); + + // Map the buffer of GPU memory into a place that's accessible by us + var buf = + gl.glMapBufferRange( + GLES3.GL_PIXEL_UNPACK_BUFFER, + 0, + (long) in.width() * in.height() * 3, + GLES3.GL_MAP_WRITE_BIT); + buf.put(inputBytes); + + gl.glUnmapBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER); + gl.glBindBuffer(GLES3.GL_PIXEL_UNPACK_BUFFER, 0); + } - // Put values in threshold uniforms - var lowr = params.getHsvLower().val; - var upr = params.getHsvUpper().val; - gl.glUniform3f(lowerUniformId, (float) lowr[0], (float) lowr[1], (float) lowr[2]); - gl.glUniform3f(upperUniformId, (float) upr[0], (float) upr[1], (float) upr[2]); + // Put values in a uniform holding the image resolution + gl.glUniform2f(resolutionUniformId, in.width(), in.height()); - // Draw the fullscreen quad - gl.glDrawArrays(GL_TRIANGLE_STRIP, 0, k_vertexPositions.length); + // Put values in threshold uniforms + var lowr = params.getHsvLower().val; + var upr = params.getHsvUpper().val; + gl.glUniform3f(lowerUniformId, (float) lowr[0], (float) lowr[1], (float) lowr[2]); + gl.glUniform3f(upperUniformId, (float) upr[0], (float) upr[1], (float) upr[2]); - // Cleanup - texture.disable(gl); - gl.glDisableVertexAttribArray(k_positionVertexAttribute); - gl.glUseProgram(0); + // Draw the fullscreen quad + gl.glDrawArrays(GL_TRIANGLE_STRIP, 0, k_vertexPositions.length); - if (pboMode == PBOMode.NONE) { - return saveMatNoPBO(gl, in.width(), in.height()); - } else { - return saveMatPBO((GLES3) gl, in.width(), in.height(), pboMode == PBOMode.DOUBLE_BUFFERED); + // Cleanup + texture.disable(gl); + gl.glDisableVertexAttribArray(k_positionVertexAttribute); + gl.glUseProgram(0); + + if (pboMode == PBOMode.NONE) { + return saveMatNoPBO(gl, in.width(), in.height()); + } else { + return saveMatPBO((GLES3) gl, in.width(), in.height(), pboMode == PBOMode.DOUBLE_BUFFERED); + } } - } - - private Mat saveMatNoPBO(GL2ES2 gl, int width, int height) { - ByteBuffer buffer = GLBuffers.newDirectByteBuffer(width * height); - // We use GL_RED/GL_ALPHA to get things in a single-channel format - // Note that which pixel format you use is *very* important to performance - // E.g. GL_ALPHA is super slow in this case - gl.glReadPixels(0, 0, width, height, outputFormat, GL_UNSIGNED_BYTE, buffer); - - return new Mat(height, width, CvType.CV_8UC1, buffer); - } - - private Mat saveMatPBO(GLES3 gl, int width, int height, boolean doubleBuffered) { - if (doubleBuffered) { - packIndex = (packIndex + 1) % 2; - packNextIndex = (packIndex + 1) % 2; + + private Mat saveMatNoPBO(GL2ES2 gl, int width, int height) { + ByteBuffer buffer = GLBuffers.newDirectByteBuffer(width * height); + // We use GL_RED/GL_ALPHA to get things in a single-channel format + // Note that which pixel format you use is *very* important to performance + // E.g. GL_ALPHA is super slow in this case + gl.glReadPixels(0, 0, width, height, outputFormat, GL_UNSIGNED_BYTE, buffer); + + return new Mat(height, width, CvType.CV_8UC1, buffer); } - // Set the target framebuffer attachment to read - gl.glReadBuffer(GLES3.GL_COLOR_ATTACHMENT0); - - // Read pixels from the framebuffer to the PBO - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(packIndex)); - // We use GL_RED (which is always supported in GLES3) to get things in a single-channel format - // Note that which pixel format you use is *very* important to performance - // E.g. GL_ALPHA is super slow in this case - gl.glReadPixels(0, 0, width, height, GLES3.GL_RED, GLES3.GL_UNSIGNED_BYTE, 0); - - // Map the PBO into the CPU's memory - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(packNextIndex)); - var buf = - gl.glMapBufferRange( - GLES3.GL_PIXEL_PACK_BUFFER, 0, (long) width * height, GLES3.GL_MAP_READ_BIT); - buf.get(outputBytes); - outputMat.put(0, 0, outputBytes); - gl.glUnmapBuffer(GLES3.GL_PIXEL_PACK_BUFFER); - gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, 0); - - return outputMat; - } + private Mat saveMatPBO(GLES3 gl, int width, int height, boolean doubleBuffered) { + if (doubleBuffered) { + packIndex = (packIndex + 1) % 2; + packNextIndex = (packIndex + 1) % 2; + } + + // Set the target framebuffer attachment to read + gl.glReadBuffer(GLES3.GL_COLOR_ATTACHMENT0); + + // Read pixels from the framebuffer to the PBO + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(packIndex)); + // We use GL_RED (which is always supported in GLES3) to get things in a single-channel format + // Note that which pixel format you use is *very* important to performance + // E.g. GL_ALPHA is super slow in this case + gl.glReadPixels(0, 0, width, height, GLES3.GL_RED, GLES3.GL_UNSIGNED_BYTE, 0); + + // Map the PBO into the CPU's memory + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, packPBOIds.get(packNextIndex)); + var buf = + gl.glMapBufferRange( + GLES3.GL_PIXEL_PACK_BUFFER, 0, (long) width * height, GLES3.GL_MAP_READ_BIT); + buf.get(outputBytes); + outputMat.put(0, 0, outputBytes); + gl.glUnmapBuffer(GLES3.GL_PIXEL_PACK_BUFFER); + gl.glBindBuffer(GLES3.GL_PIXEL_PACK_BUFFER, 0); + + return outputMat; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GrayscalePipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GrayscalePipe.java index cc8404ec70..04b1bf774a 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GrayscalePipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GrayscalePipe.java @@ -22,17 +22,17 @@ import org.photonvision.vision.pipe.CVPipe; public class GrayscalePipe extends CVPipe { - @Override - protected Mat process(Mat in) { - var outputMat = new Mat(); - // We can save a copy here by sending the output of cvtcolor to outputMat directly - // rather than copying. Free performance! - Imgproc.cvtColor(in, outputMat, Imgproc.COLOR_BGR2GRAY, 3); + @Override + protected Mat process(Mat in) { + var outputMat = new Mat(); + // We can save a copy here by sending the output of cvtcolor to outputMat directly + // rather than copying. Free performance! + Imgproc.cvtColor(in, outputMat, Imgproc.COLOR_BGR2GRAY, 3); - return outputMat; - } + return outputMat; + } - public static class GrayscaleParams { - public GrayscaleParams() {} - } + public static class GrayscaleParams { + public GrayscaleParams() {} + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GroupContoursPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GroupContoursPipe.java index 84b6e819df..7711467dd2 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GroupContoursPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/GroupContoursPipe.java @@ -27,83 +27,83 @@ import org.photonvision.vision.target.PotentialTarget; public class GroupContoursPipe - extends CVPipe, List, GroupContoursPipe.GroupContoursParams> { - private final List m_targets = new ArrayList<>(); + extends CVPipe, List, GroupContoursPipe.GroupContoursParams> { + private final List m_targets = new ArrayList<>(); - @Override - protected List process(List input) { - for (var target : m_targets) { - target.release(); - } + @Override + protected List process(List input) { + for (var target : m_targets) { + target.release(); + } - m_targets.clear(); + m_targets.clear(); - if (params.getGroup() == ContourGroupingMode.Single) { - for (var contour : input) { - m_targets.add(new PotentialTarget(contour)); - } - } - // Check if we have at least 2 targets for 2 or more - // This will only ever return 1 contour! - else if (params.getGroup() == ContourGroupingMode.TwoOrMore - && input.size() >= ContourGroupingMode.TwoOrMore.count) { - // Just blob everything together - Contour groupedContour = Contour.combineContourList(input); - if (groupedContour != null) { - m_targets.add(new PotentialTarget(groupedContour, input)); - } - } else { - int groupingCount = params.getGroup().count; + if (params.getGroup() == ContourGroupingMode.Single) { + for (var contour : input) { + m_targets.add(new PotentialTarget(contour)); + } + } + // Check if we have at least 2 targets for 2 or more + // This will only ever return 1 contour! + else if (params.getGroup() == ContourGroupingMode.TwoOrMore + && input.size() >= ContourGroupingMode.TwoOrMore.count) { + // Just blob everything together + Contour groupedContour = Contour.combineContourList(input); + if (groupedContour != null) { + m_targets.add(new PotentialTarget(groupedContour, input)); + } + } else { + int groupingCount = params.getGroup().count; - if (input.size() >= groupingCount) { - input.sort(Contour.SortByMomentsX); - // also why reverse? shouldn't the sort comparator just get reversed? - // TODO: Matt, see this - Collections.reverse(input); + if (input.size() >= groupingCount) { + input.sort(Contour.SortByMomentsX); + // also why reverse? shouldn't the sort comparator just get reversed? + // TODO: Matt, see this + Collections.reverse(input); - for (int i = 0; i < input.size() - 1; i++) { - // make a list of the desired count of contours to group - // (Just make sure we don't get an index out of bounds exception - if (i < 0 || i + groupingCount > input.size()) continue; + for (int i = 0; i < input.size() - 1; i++) { + // make a list of the desired count of contours to group + // (Just make sure we don't get an index out of bounds exception + if (i < 0 || i + groupingCount > input.size()) continue; - // If we're in two or more mode, just try to group everything - List groupingSet = input.subList(i, i + groupingCount); + // If we're in two or more mode, just try to group everything + List groupingSet = input.subList(i, i + groupingCount); - try { - // FYI: This method only takes 2 contours! - Contour groupedContour = - Contour.groupContoursByIntersection( - groupingSet.get(0), groupingSet.get(1), params.getIntersection()); + try { + // FYI: This method only takes 2 contours! + Contour groupedContour = + Contour.groupContoursByIntersection( + groupingSet.get(0), groupingSet.get(1), params.getIntersection()); - if (groupedContour != null) { - m_targets.add(new PotentialTarget(groupedContour, groupingSet)); - i += (groupingCount - 1); + if (groupedContour != null) { + m_targets.add(new PotentialTarget(groupedContour, groupingSet)); + i += (groupingCount - 1); + } + } catch (Exception ex) { + ex.printStackTrace(); + } + } } - } catch (Exception ex) { - ex.printStackTrace(); - } } - } + return m_targets; } - return m_targets; - } - public static class GroupContoursParams { - private final ContourGroupingMode m_group; - private final ContourIntersectionDirection m_intersection; + public static class GroupContoursParams { + private final ContourGroupingMode m_group; + private final ContourIntersectionDirection m_intersection; - public GroupContoursParams( - ContourGroupingMode group, ContourIntersectionDirection intersectionDirection) { - m_group = group; - m_intersection = intersectionDirection; - } + public GroupContoursParams( + ContourGroupingMode group, ContourIntersectionDirection intersectionDirection) { + m_group = group; + m_intersection = intersectionDirection; + } - public ContourGroupingMode getGroup() { - return m_group; - } + public ContourGroupingMode getGroup() { + return m_group; + } - public ContourIntersectionDirection getIntersection() { - return m_intersection; + public ContourIntersectionDirection getIntersection() { + return m_intersection; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/HSVPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/HSVPipe.java index c9ac07fa54..e0ba36ce0e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/HSVPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/HSVPipe.java @@ -25,70 +25,70 @@ import org.photonvision.vision.pipe.CVPipe; public class HSVPipe extends CVPipe { - @Override - protected Mat process(Mat in) { - var outputMat = new Mat(); - // We can save a copy here by sending the output of cvtcolor to outputMat directly - // rather than copying. Free performance! - Imgproc.cvtColor(in, outputMat, Imgproc.COLOR_BGR2HSV, 3); - - if (params.getHueInverted()) { - // In Java code we do this by taking an image thresholded - // from [0, minHue] and ORing it with [maxHue, 180] - - // we want hue from the end of the slider to max hue - Scalar firstLower = params.getHsvLower().clone(); - Scalar firstUpper = params.getHsvUpper().clone(); - firstLower.val[0] = params.getHsvUpper().val[0]; - firstUpper.val[0] = 180; - - var lowerThresholdMat = new Mat(); - Core.inRange(outputMat, firstLower, firstUpper, lowerThresholdMat); - - // We want hue from 0 to the start of the slider - var secondLower = params.getHsvLower().clone(); - var secondUpper = params.getHsvUpper().clone(); - secondLower.val[0] = 0; - secondUpper.val[0] = params.getHsvLower().val[0]; - - // Now that the output mat's been used by the first inRange, it's fine to mutate it - Core.inRange(outputMat, secondLower, secondUpper, outputMat); - - // Now OR the two images together to make a mat that combines the lower and upper bounds - // outputMat holds the second half of the range - Core.bitwise_or(lowerThresholdMat, outputMat, outputMat); - - lowerThresholdMat.release(); - } else { - Core.inRange(outputMat, params.getHsvLower(), params.getHsvUpper(), outputMat); + @Override + protected Mat process(Mat in) { + var outputMat = new Mat(); + // We can save a copy here by sending the output of cvtcolor to outputMat directly + // rather than copying. Free performance! + Imgproc.cvtColor(in, outputMat, Imgproc.COLOR_BGR2HSV, 3); + + if (params.getHueInverted()) { + // In Java code we do this by taking an image thresholded + // from [0, minHue] and ORing it with [maxHue, 180] + + // we want hue from the end of the slider to max hue + Scalar firstLower = params.getHsvLower().clone(); + Scalar firstUpper = params.getHsvUpper().clone(); + firstLower.val[0] = params.getHsvUpper().val[0]; + firstUpper.val[0] = 180; + + var lowerThresholdMat = new Mat(); + Core.inRange(outputMat, firstLower, firstUpper, lowerThresholdMat); + + // We want hue from 0 to the start of the slider + var secondLower = params.getHsvLower().clone(); + var secondUpper = params.getHsvUpper().clone(); + secondLower.val[0] = 0; + secondUpper.val[0] = params.getHsvLower().val[0]; + + // Now that the output mat's been used by the first inRange, it's fine to mutate it + Core.inRange(outputMat, secondLower, secondUpper, outputMat); + + // Now OR the two images together to make a mat that combines the lower and upper bounds + // outputMat holds the second half of the range + Core.bitwise_or(lowerThresholdMat, outputMat, outputMat); + + lowerThresholdMat.release(); + } else { + Core.inRange(outputMat, params.getHsvLower(), params.getHsvUpper(), outputMat); + } + + return outputMat; } - return outputMat; - } + public static class HSVParams { + private final Scalar m_hsvLower; + private final Scalar m_hsvUpper; + private final boolean m_hueInverted; - public static class HSVParams { - private final Scalar m_hsvLower; - private final Scalar m_hsvUpper; - private final boolean m_hueInverted; + public HSVParams( + IntegerCouple hue, IntegerCouple saturation, IntegerCouple value, boolean hueInverted) { + m_hsvLower = new Scalar(hue.getFirst(), saturation.getFirst(), value.getFirst()); + m_hsvUpper = new Scalar(hue.getSecond(), saturation.getSecond(), value.getSecond()); - public HSVParams( - IntegerCouple hue, IntegerCouple saturation, IntegerCouple value, boolean hueInverted) { - m_hsvLower = new Scalar(hue.getFirst(), saturation.getFirst(), value.getFirst()); - m_hsvUpper = new Scalar(hue.getSecond(), saturation.getSecond(), value.getSecond()); + this.m_hueInverted = hueInverted; + } - this.m_hueInverted = hueInverted; - } - - public Scalar getHsvLower() { - return m_hsvLower; - } + public Scalar getHsvLower() { + return m_hsvLower; + } - public Scalar getHsvUpper() { - return m_hsvUpper; - } + public Scalar getHsvUpper() { + return m_hsvUpper; + } - public boolean getHueInverted() { - return m_hueInverted; + public boolean getHueInverted() { + return m_hueInverted; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/MultiTargetPNPPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/MultiTargetPNPPipe.java index 17b5e7ae06..5ce9f124cc 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/MultiTargetPNPPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/MultiTargetPNPPipe.java @@ -30,59 +30,59 @@ /** Estimate the camera pose given multiple Apriltag observations */ public class MultiTargetPNPPipe - extends CVPipe< - List, MultiTargetPNPResults, MultiTargetPNPPipe.MultiTargetPNPPipeParams> { - private static final Logger logger = new Logger(MultiTargetPNPPipe.class, LogGroup.VisionModule); + extends CVPipe< + List, MultiTargetPNPResults, MultiTargetPNPPipe.MultiTargetPNPPipeParams> { + private static final Logger logger = new Logger(MultiTargetPNPPipe.class, LogGroup.VisionModule); - private boolean hasWarned = false; + private boolean hasWarned = false; - @Override - protected MultiTargetPNPResults process(List targetList) { - if (params.cameraCoefficients == null - || params.cameraCoefficients.getCameraIntrinsicsMat() == null - || params.cameraCoefficients.getDistCoeffsMat() == null) { - if (!hasWarned) { - logger.warn( - "Cannot perform solvePNP an uncalibrated camera! Please calibrate this resolution..."); - hasWarned = true; - } - return new MultiTargetPNPResults(); - } - - return calculateCameraInField(targetList); - } + @Override + protected MultiTargetPNPResults process(List targetList) { + if (params.cameraCoefficients == null + || params.cameraCoefficients.getCameraIntrinsicsMat() == null + || params.cameraCoefficients.getDistCoeffsMat() == null) { + if (!hasWarned) { + logger.warn( + "Cannot perform solvePNP an uncalibrated camera! Please calibrate this resolution..."); + hasWarned = true; + } + return new MultiTargetPNPResults(); + } - private MultiTargetPNPResults calculateCameraInField(List targetList) { - // Find tag IDs that exist in the tag layout - var tagIDsUsed = new ArrayList(); - for (var target : targetList) { - int id = target.getFiducialId(); - if (params.atfl.getTagPose(id).isPresent()) tagIDsUsed.add(id); + return calculateCameraInField(targetList); } - // Only run with multiple targets - if (tagIDsUsed.size() < 2) { - return new MultiTargetPNPResults(); - } + private MultiTargetPNPResults calculateCameraInField(List targetList) { + // Find tag IDs that exist in the tag layout + var tagIDsUsed = new ArrayList(); + for (var target : targetList) { + int id = target.getFiducialId(); + if (params.atfl.getTagPose(id).isPresent()) tagIDsUsed.add(id); + } - var estimatedPose = - VisionEstimation.estimateCamPosePNP( - params.cameraCoefficients.cameraIntrinsics.getAsWpilibMat(), - params.cameraCoefficients.distCoeffs.getAsWpilibMat(), - TrackedTarget.simpleFromTrackedTargets(targetList), - params.atfl); + // Only run with multiple targets + if (tagIDsUsed.size() < 2) { + return new MultiTargetPNPResults(); + } - return new MultiTargetPNPResults(estimatedPose, tagIDsUsed); - } + var estimatedPose = + VisionEstimation.estimateCamPosePNP( + params.cameraCoefficients.cameraIntrinsics.getAsWpilibMat(), + params.cameraCoefficients.distCoeffs.getAsWpilibMat(), + TrackedTarget.simpleFromTrackedTargets(targetList), + params.atfl); + + return new MultiTargetPNPResults(estimatedPose, tagIDsUsed); + } - public static class MultiTargetPNPPipeParams { - private final CameraCalibrationCoefficients cameraCoefficients; - private final AprilTagFieldLayout atfl; + public static class MultiTargetPNPPipeParams { + private final CameraCalibrationCoefficients cameraCoefficients; + private final AprilTagFieldLayout atfl; - public MultiTargetPNPPipeParams( - CameraCalibrationCoefficients cameraCoefficients, AprilTagFieldLayout atfl) { - this.cameraCoefficients = cameraCoefficients; - this.atfl = atfl; + public MultiTargetPNPPipeParams( + CameraCalibrationCoefficients cameraCoefficients, AprilTagFieldLayout atfl) { + this.cameraCoefficients = cameraCoefficients; + this.atfl = atfl; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/OutputMatPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/OutputMatPipe.java index 56fa001955..944addf95f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/OutputMatPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/OutputMatPipe.java @@ -22,12 +22,12 @@ import org.photonvision.vision.pipe.MutatingPipe; public class OutputMatPipe extends MutatingPipe { - @Override - protected Void process(Mat in) { - // convert input mat - Imgproc.cvtColor(in, in, Imgproc.COLOR_GRAY2BGR, 3); - return null; - } + @Override + protected Void process(Mat in) { + // convert input mat + Imgproc.cvtColor(in, in, Imgproc.COLOR_GRAY2BGR, 3); + return null; + } - public static class OutputMatParams {} + public static class OutputMatParams {} } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ResizeImagePipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ResizeImagePipe.java index f3ad99a61c..3d9c030434 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ResizeImagePipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/ResizeImagePipe.java @@ -25,29 +25,29 @@ /** Pipe that resizes an image to a given resolution */ public class ResizeImagePipe extends MutatingPipe { - /** - * Process this pipe - * - * @param in {@link Mat} to be resized - */ - @Override - protected Void process(Mat in) { - int width = in.cols() / params.getDivisor().value; - int height = in.rows() / params.getDivisor().value; - Imgproc.resize(in, in, new Size(width, height)); + /** + * Process this pipe + * + * @param in {@link Mat} to be resized + */ + @Override + protected Void process(Mat in) { + int width = in.cols() / params.getDivisor().value; + int height = in.rows() / params.getDivisor().value; + Imgproc.resize(in, in, new Size(width, height)); - return null; - } + return null; + } - public static class ResizeImageParams { - private final FrameDivisor divisor; + public static class ResizeImageParams { + private final FrameDivisor divisor; - public ResizeImageParams(FrameDivisor divisor) { - this.divisor = divisor; - } + public ResizeImageParams(FrameDivisor divisor) { + this.divisor = divisor; + } - public FrameDivisor getDivisor() { - return divisor; + public FrameDivisor getDivisor() { + return divisor; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/RotateImagePipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/RotateImagePipe.java index 5cbaa67f5d..172eda5e01 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/RotateImagePipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/RotateImagePipe.java @@ -24,37 +24,37 @@ /** Pipe that rotates an image to a given orientation */ public class RotateImagePipe extends MutatingPipe { - public RotateImagePipe() { - setParams(RotateImageParams.DEFAULT); - } - - public RotateImagePipe(RotateImageParams params) { - setParams(params); - } - - /** - * Process this pipe - * - * @param in {@link Mat} to be rotated - * @return Rotated {@link Mat} - */ - @Override - protected Void process(Mat in) { - Core.rotate(in, in, params.rotation.value); - return null; - } - - public static class RotateImageParams { - public static RotateImageParams DEFAULT = new RotateImageParams(ImageRotationMode.DEG_0); - - public ImageRotationMode rotation; - - public RotateImageParams() { - rotation = DEFAULT.rotation; + public RotateImagePipe() { + setParams(RotateImageParams.DEFAULT); } - public RotateImageParams(ImageRotationMode rotation) { - this.rotation = rotation; + public RotateImagePipe(RotateImageParams params) { + setParams(params); + } + + /** + * Process this pipe + * + * @param in {@link Mat} to be rotated + * @return Rotated {@link Mat} + */ + @Override + protected Void process(Mat in) { + Core.rotate(in, in, params.rotation.value); + return null; + } + + public static class RotateImageParams { + public static RotateImageParams DEFAULT = new RotateImageParams(ImageRotationMode.DEG_0); + + public ImageRotationMode rotation; + + public RotateImageParams() { + rotation = DEFAULT.rotation; + } + + public RotateImageParams(ImageRotationMode rotation) { + this.rotation = rotation; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SolvePNPPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SolvePNPPipe.java index 92ecd0a29f..6a887d3a18 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SolvePNPPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SolvePNPPipe.java @@ -35,82 +35,82 @@ import org.photonvision.vision.target.TrackedTarget; public class SolvePNPPipe - extends CVPipe, List, SolvePNPPipe.SolvePNPPipeParams> { - private static final Logger logger = new Logger(SolvePNPPipe.class, LogGroup.VisionModule); + extends CVPipe, List, SolvePNPPipe.SolvePNPPipeParams> { + private static final Logger logger = new Logger(SolvePNPPipe.class, LogGroup.VisionModule); - private final MatOfPoint2f imagePoints = new MatOfPoint2f(); + private final MatOfPoint2f imagePoints = new MatOfPoint2f(); - private boolean hasWarned = false; + private boolean hasWarned = false; - @Override - protected List process(List targetList) { - if (params.cameraCoefficients == null - || params.cameraCoefficients.getCameraIntrinsicsMat() == null - || params.cameraCoefficients.getDistCoeffsMat() == null) { - if (!hasWarned) { - logger.warn( - "Cannot perform solvePNP an uncalibrated camera! Please calibrate this resolution..."); - hasWarned = true; - } - return targetList; - } + @Override + protected List process(List targetList) { + if (params.cameraCoefficients == null + || params.cameraCoefficients.getCameraIntrinsicsMat() == null + || params.cameraCoefficients.getDistCoeffsMat() == null) { + if (!hasWarned) { + logger.warn( + "Cannot perform solvePNP an uncalibrated camera! Please calibrate this resolution..."); + hasWarned = true; + } + return targetList; + } - for (var target : targetList) { - calculateTargetPose(target); + for (var target : targetList) { + calculateTargetPose(target); + } + return targetList; } - return targetList; - } - private void calculateTargetPose(TrackedTarget target) { - var corners = target.getTargetCorners(); - if (corners == null - || corners.isEmpty() - || params.cameraCoefficients == null - || params.cameraCoefficients.getCameraIntrinsicsMat() == null - || params.cameraCoefficients.getDistCoeffsMat() == null) { - return; - } - this.imagePoints.fromList(corners); + private void calculateTargetPose(TrackedTarget target) { + var corners = target.getTargetCorners(); + if (corners == null + || corners.isEmpty() + || params.cameraCoefficients == null + || params.cameraCoefficients.getCameraIntrinsicsMat() == null + || params.cameraCoefficients.getDistCoeffsMat() == null) { + return; + } + this.imagePoints.fromList(corners); - var rVec = new Mat(); - var tVec = new Mat(); - try { - Calib3d.solvePnP( - params.targetModel.getRealWorldTargetCoordinates(), - imagePoints, - params.cameraCoefficients.getCameraIntrinsicsMat(), - params.cameraCoefficients.getDistCoeffsMat(), - rVec, - tVec); - } catch (Exception e) { - logger.error("Exception when attempting solvePnP!", e); - return; - } + var rVec = new Mat(); + var tVec = new Mat(); + try { + Calib3d.solvePnP( + params.targetModel.getRealWorldTargetCoordinates(), + imagePoints, + params.cameraCoefficients.getCameraIntrinsicsMat(), + params.cameraCoefficients.getDistCoeffsMat(), + rVec, + tVec); + } catch (Exception e) { + logger.error("Exception when attempting solvePnP!", e); + return; + } - target.setCameraRelativeTvec(tVec); - target.setCameraRelativeRvec(rVec); + target.setCameraRelativeTvec(tVec); + target.setCameraRelativeRvec(rVec); - Translation3d translation = - new Translation3d(tVec.get(0, 0)[0], tVec.get(1, 0)[0], tVec.get(2, 0)[0]); - Rotation3d rotation = - new Rotation3d( - VecBuilder.fill(rVec.get(0, 0)[0], rVec.get(1, 0)[0], rVec.get(2, 0)[0]), - Core.norm(rVec)); + Translation3d translation = + new Translation3d(tVec.get(0, 0)[0], tVec.get(1, 0)[0], tVec.get(2, 0)[0]); + Rotation3d rotation = + new Rotation3d( + VecBuilder.fill(rVec.get(0, 0)[0], rVec.get(1, 0)[0], rVec.get(2, 0)[0]), + Core.norm(rVec)); - Transform3d camToTarget = - MathUtils.convertOpenCVtoPhotonTransform(new Transform3d(translation, rotation)); - target.setBestCameraToTarget3d(camToTarget); - target.setAltCameraToTarget3d(new Transform3d()); - } + Transform3d camToTarget = + MathUtils.convertOpenCVtoPhotonTransform(new Transform3d(translation, rotation)); + target.setBestCameraToTarget3d(camToTarget); + target.setAltCameraToTarget3d(new Transform3d()); + } - public static class SolvePNPPipeParams { - private final CameraCalibrationCoefficients cameraCoefficients; - private final TargetModel targetModel; + public static class SolvePNPPipeParams { + private final CameraCalibrationCoefficients cameraCoefficients; + private final TargetModel targetModel; - public SolvePNPPipeParams( - CameraCalibrationCoefficients cameraCoefficients, TargetModel targetModel) { - this.cameraCoefficients = cameraCoefficients; - this.targetModel = targetModel; + public SolvePNPPipeParams( + CameraCalibrationCoefficients cameraCoefficients, TargetModel targetModel) { + this.cameraCoefficients = cameraCoefficients; + this.targetModel = targetModel; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SortContoursPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SortContoursPipe.java index 7c9eef2001..7bdc9bf0d6 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SortContoursPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SortContoursPipe.java @@ -26,58 +26,58 @@ import org.photonvision.vision.target.PotentialTarget; public class SortContoursPipe - extends CVPipe< - List, List, SortContoursPipe.SortContoursParams> { - private final List m_sortedContours = new ArrayList<>(); + extends CVPipe< + List, List, SortContoursPipe.SortContoursParams> { + private final List m_sortedContours = new ArrayList<>(); - @Override - protected List process(List in) { - for (var oldTarget : m_sortedContours) { - oldTarget.release(); - } - m_sortedContours.clear(); + @Override + protected List process(List in) { + for (var oldTarget : m_sortedContours) { + oldTarget.release(); + } + m_sortedContours.clear(); + + if (!in.isEmpty()) { + m_sortedContours.addAll(in); + if (params.getSortMode() != ContourSortMode.Centermost) { + m_sortedContours.sort(params.getSortMode().getComparator()); + } else { + m_sortedContours.sort(Comparator.comparingDouble(this::calcSquareCenterDistance)); + } + } - if (!in.isEmpty()) { - m_sortedContours.addAll(in); - if (params.getSortMode() != ContourSortMode.Centermost) { - m_sortedContours.sort(params.getSortMode().getComparator()); - } else { - m_sortedContours.sort(Comparator.comparingDouble(this::calcSquareCenterDistance)); - } + return new ArrayList<>( + m_sortedContours.subList(0, Math.min(in.size(), params.getMaxTargets()))); } - return new ArrayList<>( - m_sortedContours.subList(0, Math.min(in.size(), params.getMaxTargets()))); - } + private double calcSquareCenterDistance(PotentialTarget rect) { + return Math.sqrt( + Math.pow(params.getCamProperties().centerX - rect.getMinAreaRect().center.x, 2) + + Math.pow(params.getCamProperties().centerY - rect.getMinAreaRect().center.y, 2)); + } - private double calcSquareCenterDistance(PotentialTarget rect) { - return Math.sqrt( - Math.pow(params.getCamProperties().centerX - rect.getMinAreaRect().center.x, 2) - + Math.pow(params.getCamProperties().centerY - rect.getMinAreaRect().center.y, 2)); - } + public static class SortContoursParams { + private final ContourSortMode m_sortMode; + private final int m_maxTargets; + private final FrameStaticProperties m_frameStaticProperties; - public static class SortContoursParams { - private final ContourSortMode m_sortMode; - private final int m_maxTargets; - private final FrameStaticProperties m_frameStaticProperties; + public SortContoursParams( + ContourSortMode sortMode, int maxTargets, FrameStaticProperties camProperties) { + m_sortMode = sortMode; + m_maxTargets = maxTargets; + m_frameStaticProperties = camProperties; + } - public SortContoursParams( - ContourSortMode sortMode, int maxTargets, FrameStaticProperties camProperties) { - m_sortMode = sortMode; - m_maxTargets = maxTargets; - m_frameStaticProperties = camProperties; - } + public ContourSortMode getSortMode() { + return m_sortMode; + } - public ContourSortMode getSortMode() { - return m_sortMode; - } - - public FrameStaticProperties getCamProperties() { - return m_frameStaticProperties; - } + public FrameStaticProperties getCamProperties() { + return m_frameStaticProperties; + } - public int getMaxTargets() { - return m_maxTargets; + public int getMaxTargets() { + return m_maxTargets; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SpeckleRejectPipe.java b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SpeckleRejectPipe.java index a7aec39693..4f4c5ce547 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SpeckleRejectPipe.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipe/impl/SpeckleRejectPipe.java @@ -23,45 +23,45 @@ import org.photonvision.vision.pipe.CVPipe; public class SpeckleRejectPipe - extends CVPipe, List, SpeckleRejectPipe.SpeckleRejectParams> { - private final List m_despeckledContours = new ArrayList<>(); + extends CVPipe, List, SpeckleRejectPipe.SpeckleRejectParams> { + private final List m_despeckledContours = new ArrayList<>(); - @Override - protected List process(List in) { - for (var c : m_despeckledContours) { - c.mat.release(); - } - m_despeckledContours.clear(); + @Override + protected List process(List in) { + for (var c : m_despeckledContours) { + c.mat.release(); + } + m_despeckledContours.clear(); - if (!in.isEmpty()) { - double averageArea = 0.0; - for (Contour c : in) { - averageArea += c.getArea(); - } - averageArea /= in.size(); + if (!in.isEmpty()) { + double averageArea = 0.0; + for (Contour c : in) { + averageArea += c.getArea(); + } + averageArea /= in.size(); - double minAllowedArea = params.getMinPercentOfAvg() / 100.0 * averageArea; - for (Contour c : in) { - if (c.getArea() >= minAllowedArea) { - m_despeckledContours.add(c); - } else { - c.release(); + double minAllowedArea = params.getMinPercentOfAvg() / 100.0 * averageArea; + for (Contour c : in) { + if (c.getArea() >= minAllowedArea) { + m_despeckledContours.add(c); + } else { + c.release(); + } + } } - } - } - return m_despeckledContours; - } + return m_despeckledContours; + } - public static class SpeckleRejectParams { - private final double m_minPercentOfAvg; + public static class SpeckleRejectParams { + private final double m_minPercentOfAvg; - public SpeckleRejectParams(double minPercentOfAvg) { - m_minPercentOfAvg = minPercentOfAvg; - } + public SpeckleRejectParams(double minPercentOfAvg) { + m_minPercentOfAvg = minPercentOfAvg; + } - public double getMinPercentOfAvg() { - return m_minPercentOfAvg; + public double getMinPercentOfAvg() { + return m_minPercentOfAvg; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/AdvancedPipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/AdvancedPipelineSettings.java index b6c080f6d5..411642c89b 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/AdvancedPipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/AdvancedPipelineSettings.java @@ -31,133 +31,133 @@ import org.photonvision.vision.target.TargetOrientation; public class AdvancedPipelineSettings extends CVPipelineSettings { - public AdvancedPipelineSettings() { - ledMode = true; - } - - public IntegerCouple hsvHue = new IntegerCouple(50, 180); - public IntegerCouple hsvSaturation = new IntegerCouple(50, 255); - public IntegerCouple hsvValue = new IntegerCouple(50, 255); - public boolean hueInverted = false; - - public boolean outputShouldDraw = true; - public boolean outputShowMultipleTargets = false; - - public DoubleCouple contourArea = new DoubleCouple(0.0, 100.0); - public DoubleCouple contourRatio = new DoubleCouple(0.0, 20.0); - public DoubleCouple contourFullness = new DoubleCouple(0.0, 100.0); - public int contourSpecklePercentage = 5; - - // the order in which to sort contours to find the most desirable - public ContourSortMode contourSortMode = ContourSortMode.Largest; - - // the edge (or not) of the target to consider the center point (Top, Bottom, Left, Right, - // Center) - public TargetOffsetPointEdge contourTargetOffsetPointEdge = TargetOffsetPointEdge.Center; - - // orientation of the target in terms of aspect ratio - public TargetOrientation contourTargetOrientation = TargetOrientation.Landscape; - - // the mode in which to offset target center point based on the camera being offset on the - // robot - // (None, Single Point, Dual Point) - public RobotOffsetPointMode offsetRobotOffsetMode = RobotOffsetPointMode.None; - - // the point set by the user in Single Point Offset mode (maybe double too? idr) - public Point offsetSinglePoint = new Point(); - - // the two values that define the line of the Dual Point Offset calibration (think y=mx+b) - public Point offsetDualPointA = new Point(); - public double offsetDualPointAArea = 0; - public Point offsetDualPointB = new Point(); - public double offsetDualPointBArea = 0; - - // how many contours to attempt to group (Single, Dual) - public ContourGroupingMode contourGroupingMode = ContourGroupingMode.Single; - - // the direction in which contours must intersect to be considered intersecting - public ContourIntersectionDirection contourIntersection = ContourIntersectionDirection.Up; - - // 3d settings - public boolean solvePNPEnabled = false; - public TargetModel targetModel = TargetModel.k2020HighGoalOuter; - - // Corner detection settings - public CornerDetectionPipe.DetectionStrategy cornerDetectionStrategy = - CornerDetectionPipe.DetectionStrategy.APPROX_POLY_DP_AND_EXTREME_CORNERS; - public boolean cornerDetectionUseConvexHulls = true; - public boolean cornerDetectionExactSideCount = false; - public int cornerDetectionSideCount = 4; - public double cornerDetectionAccuracyPercentage = 10; - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof AdvancedPipelineSettings)) return false; - if (!super.equals(o)) return false; - AdvancedPipelineSettings that = (AdvancedPipelineSettings) o; - return outputShouldDraw == that.outputShouldDraw - && outputShowMultipleTargets == that.outputShowMultipleTargets - && contourSpecklePercentage == that.contourSpecklePercentage - && Double.compare(that.offsetDualPointAArea, offsetDualPointAArea) == 0 - && Double.compare(that.offsetDualPointBArea, offsetDualPointBArea) == 0 - && solvePNPEnabled == that.solvePNPEnabled - && cornerDetectionUseConvexHulls == that.cornerDetectionUseConvexHulls - && cornerDetectionExactSideCount == that.cornerDetectionExactSideCount - && cornerDetectionSideCount == that.cornerDetectionSideCount - && Double.compare(that.cornerDetectionAccuracyPercentage, cornerDetectionAccuracyPercentage) - == 0 - && Objects.equals(hsvHue, that.hsvHue) - && Objects.equals(hsvSaturation, that.hsvSaturation) - && Objects.equals(hsvValue, that.hsvValue) - && Objects.equals(hueInverted, that.hueInverted) - && Objects.equals(contourArea, that.contourArea) - && Objects.equals(contourRatio, that.contourRatio) - && Objects.equals(contourFullness, that.contourFullness) - && contourSortMode == that.contourSortMode - && contourTargetOffsetPointEdge == that.contourTargetOffsetPointEdge - && contourTargetOrientation == that.contourTargetOrientation - && offsetRobotOffsetMode == that.offsetRobotOffsetMode - && Objects.equals(offsetSinglePoint, that.offsetSinglePoint) - && Objects.equals(offsetDualPointA, that.offsetDualPointA) - && Objects.equals(offsetDualPointB, that.offsetDualPointB) - && contourGroupingMode == that.contourGroupingMode - && contourIntersection == that.contourIntersection - && Objects.equals(targetModel, that.targetModel) - && cornerDetectionStrategy == that.cornerDetectionStrategy; - } - - @Override - public int hashCode() { - return Objects.hash( - super.hashCode(), - hsvHue, - hsvSaturation, - hsvValue, - hueInverted, - outputShouldDraw, - outputShowMultipleTargets, - contourArea, - contourRatio, - contourFullness, - contourSpecklePercentage, - contourSortMode, - contourTargetOffsetPointEdge, - contourTargetOrientation, - offsetRobotOffsetMode, - offsetSinglePoint, - offsetDualPointA, - offsetDualPointAArea, - offsetDualPointB, - offsetDualPointBArea, - contourGroupingMode, - contourIntersection, - solvePNPEnabled, - targetModel, - cornerDetectionStrategy, - cornerDetectionUseConvexHulls, - cornerDetectionExactSideCount, - cornerDetectionSideCount, - cornerDetectionAccuracyPercentage); - } + public AdvancedPipelineSettings() { + ledMode = true; + } + + public IntegerCouple hsvHue = new IntegerCouple(50, 180); + public IntegerCouple hsvSaturation = new IntegerCouple(50, 255); + public IntegerCouple hsvValue = new IntegerCouple(50, 255); + public boolean hueInverted = false; + + public boolean outputShouldDraw = true; + public boolean outputShowMultipleTargets = false; + + public DoubleCouple contourArea = new DoubleCouple(0.0, 100.0); + public DoubleCouple contourRatio = new DoubleCouple(0.0, 20.0); + public DoubleCouple contourFullness = new DoubleCouple(0.0, 100.0); + public int contourSpecklePercentage = 5; + + // the order in which to sort contours to find the most desirable + public ContourSortMode contourSortMode = ContourSortMode.Largest; + + // the edge (or not) of the target to consider the center point (Top, Bottom, Left, Right, + // Center) + public TargetOffsetPointEdge contourTargetOffsetPointEdge = TargetOffsetPointEdge.Center; + + // orientation of the target in terms of aspect ratio + public TargetOrientation contourTargetOrientation = TargetOrientation.Landscape; + + // the mode in which to offset target center point based on the camera being offset on the + // robot + // (None, Single Point, Dual Point) + public RobotOffsetPointMode offsetRobotOffsetMode = RobotOffsetPointMode.None; + + // the point set by the user in Single Point Offset mode (maybe double too? idr) + public Point offsetSinglePoint = new Point(); + + // the two values that define the line of the Dual Point Offset calibration (think y=mx+b) + public Point offsetDualPointA = new Point(); + public double offsetDualPointAArea = 0; + public Point offsetDualPointB = new Point(); + public double offsetDualPointBArea = 0; + + // how many contours to attempt to group (Single, Dual) + public ContourGroupingMode contourGroupingMode = ContourGroupingMode.Single; + + // the direction in which contours must intersect to be considered intersecting + public ContourIntersectionDirection contourIntersection = ContourIntersectionDirection.Up; + + // 3d settings + public boolean solvePNPEnabled = false; + public TargetModel targetModel = TargetModel.k2020HighGoalOuter; + + // Corner detection settings + public CornerDetectionPipe.DetectionStrategy cornerDetectionStrategy = + CornerDetectionPipe.DetectionStrategy.APPROX_POLY_DP_AND_EXTREME_CORNERS; + public boolean cornerDetectionUseConvexHulls = true; + public boolean cornerDetectionExactSideCount = false; + public int cornerDetectionSideCount = 4; + public double cornerDetectionAccuracyPercentage = 10; + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof AdvancedPipelineSettings)) return false; + if (!super.equals(o)) return false; + AdvancedPipelineSettings that = (AdvancedPipelineSettings) o; + return outputShouldDraw == that.outputShouldDraw + && outputShowMultipleTargets == that.outputShowMultipleTargets + && contourSpecklePercentage == that.contourSpecklePercentage + && Double.compare(that.offsetDualPointAArea, offsetDualPointAArea) == 0 + && Double.compare(that.offsetDualPointBArea, offsetDualPointBArea) == 0 + && solvePNPEnabled == that.solvePNPEnabled + && cornerDetectionUseConvexHulls == that.cornerDetectionUseConvexHulls + && cornerDetectionExactSideCount == that.cornerDetectionExactSideCount + && cornerDetectionSideCount == that.cornerDetectionSideCount + && Double.compare(that.cornerDetectionAccuracyPercentage, cornerDetectionAccuracyPercentage) + == 0 + && Objects.equals(hsvHue, that.hsvHue) + && Objects.equals(hsvSaturation, that.hsvSaturation) + && Objects.equals(hsvValue, that.hsvValue) + && Objects.equals(hueInverted, that.hueInverted) + && Objects.equals(contourArea, that.contourArea) + && Objects.equals(contourRatio, that.contourRatio) + && Objects.equals(contourFullness, that.contourFullness) + && contourSortMode == that.contourSortMode + && contourTargetOffsetPointEdge == that.contourTargetOffsetPointEdge + && contourTargetOrientation == that.contourTargetOrientation + && offsetRobotOffsetMode == that.offsetRobotOffsetMode + && Objects.equals(offsetSinglePoint, that.offsetSinglePoint) + && Objects.equals(offsetDualPointA, that.offsetDualPointA) + && Objects.equals(offsetDualPointB, that.offsetDualPointB) + && contourGroupingMode == that.contourGroupingMode + && contourIntersection == that.contourIntersection + && Objects.equals(targetModel, that.targetModel) + && cornerDetectionStrategy == that.cornerDetectionStrategy; + } + + @Override + public int hashCode() { + return Objects.hash( + super.hashCode(), + hsvHue, + hsvSaturation, + hsvValue, + hueInverted, + outputShouldDraw, + outputShowMultipleTargets, + contourArea, + contourRatio, + contourFullness, + contourSpecklePercentage, + contourSortMode, + contourTargetOffsetPointEdge, + contourTargetOrientation, + offsetRobotOffsetMode, + offsetSinglePoint, + offsetDualPointA, + offsetDualPointAArea, + offsetDualPointB, + offsetDualPointBArea, + contourGroupingMode, + contourIntersection, + solvePNPEnabled, + targetModel, + cornerDetectionStrategy, + cornerDetectionUseConvexHulls, + cornerDetectionExactSideCount, + cornerDetectionSideCount, + cornerDetectionAccuracyPercentage); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipeline.java index 0bc91b5adb..d1837ff770 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipeline.java @@ -46,197 +46,197 @@ import org.photonvision.vision.target.TrackedTarget.TargetCalculationParameters; public class AprilTagPipeline extends CVPipeline { - private final AprilTagDetectionPipe aprilTagDetectionPipe = new AprilTagDetectionPipe(); - private final AprilTagPoseEstimatorPipe singleTagPoseEstimatorPipe = - new AprilTagPoseEstimatorPipe(); - private final MultiTargetPNPPipe multiTagPNPPipe = new MultiTargetPNPPipe(); - private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); - - private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.GREYSCALE; - - public AprilTagPipeline() { - super(PROCESSING_TYPE); - settings = new AprilTagPipelineSettings(); - } - - public AprilTagPipeline(AprilTagPipelineSettings settings) { - super(PROCESSING_TYPE); - this.settings = settings; - } - - @Override - protected void setPipeParamsImpl() { - // Sanitize thread count - not supported to have fewer than 1 threads - settings.threads = Math.max(1, settings.threads); - - // if (cameraQuirks.hasQuirk(CameraQuirk.PiCam) && LibCameraJNI.isSupported()) { - // // TODO: Picam grayscale - // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); - // // LibCameraJNI.setShouldCopyColor(true); // need the color image to grayscale - // } - - // TODO (HACK): tag width is Fun because it really belongs in the "target model" - // We need the tag width for the JNI to figure out target pose, but we need a - // target model for the draw 3d targets pipeline to work... - - // for now, hard code tag width based on enum value - double tagWidth = Units.inchesToMeters(3 * 2); // for 6in 16h5 tag. - - // AprilTagDetectorParams aprilTagDetectionParams = - // new AprilTagDetectorParams( - // settings.tagFamily, - // settings.decimate, - // settings.blur, - // settings.threads, - // settings.debug, - // settings.refineEdges); - - var config = new AprilTagDetector.Config(); - config.numThreads = settings.threads; - config.refineEdges = settings.refineEdges; - config.quadSigma = (float) settings.blur; - config.quadDecimate = settings.decimate; - aprilTagDetectionPipe.setParams(new AprilTagDetectionPipeParams(settings.tagFamily, config)); - - if (frameStaticProperties.cameraCalibration != null) { - var cameraMatrix = frameStaticProperties.cameraCalibration.getCameraIntrinsicsMat(); - if (cameraMatrix != null && cameraMatrix.rows() > 0) { - var cx = cameraMatrix.get(0, 2)[0]; - var cy = cameraMatrix.get(1, 2)[0]; - var fx = cameraMatrix.get(0, 0)[0]; - var fy = cameraMatrix.get(1, 1)[0]; - - singleTagPoseEstimatorPipe.setParams( - new AprilTagPoseEstimatorPipeParams( - new Config(tagWidth, fx, fy, cx, cy), - frameStaticProperties.cameraCalibration, - settings.numIterations)); - - // TODO global state ew - var atfl = ConfigManager.getInstance().getConfig().getApriltagFieldLayout(); - multiTagPNPPipe.setParams( - new MultiTargetPNPPipeParams(frameStaticProperties.cameraCalibration, atfl)); - } - } - } + private final AprilTagDetectionPipe aprilTagDetectionPipe = new AprilTagDetectionPipe(); + private final AprilTagPoseEstimatorPipe singleTagPoseEstimatorPipe = + new AprilTagPoseEstimatorPipe(); + private final MultiTargetPNPPipe multiTagPNPPipe = new MultiTargetPNPPipe(); + private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); - @Override - protected CVPipelineResult process(Frame frame, AprilTagPipelineSettings settings) { - long sumPipeNanosElapsed = 0L; + private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.GREYSCALE; - if (frame.type != FrameThresholdType.GREYSCALE) { - // TODO so all cameras should give us GREYSCALE -- how should we handle if not? - // Right now, we just return nothing - return new CVPipelineResult(0, 0, List.of(), frame); + public AprilTagPipeline() { + super(PROCESSING_TYPE); + settings = new AprilTagPipelineSettings(); } - CVPipeResult> tagDetectionPipeResult; - tagDetectionPipeResult = aprilTagDetectionPipe.run(frame.processedImage); - sumPipeNanosElapsed += tagDetectionPipeResult.nanosElapsed; - - List detections = tagDetectionPipeResult.output; - List usedDetections = new ArrayList<>(); - List targetList = new ArrayList<>(); - - // Filter out detections based on pipeline settings - for (AprilTagDetection detection : detections) { - // TODO this should be in a pipe, not in the top level here (Matt) - if (detection.getDecisionMargin() < settings.decisionMargin) continue; - if (detection.getHamming() > settings.hammingDist) continue; - - usedDetections.add(detection); - - // Populate target list for multitag - // (TODO: Address circular dependencies. Multitag only requires corners and IDs, this should - // not be necessary.) - TrackedTarget target = - new TrackedTarget( - detection, - null, - new TargetCalculationParameters( - false, null, null, null, null, frameStaticProperties)); - - targetList.add(target); + public AprilTagPipeline(AprilTagPipelineSettings settings) { + super(PROCESSING_TYPE); + this.settings = settings; } - // Do multi-tag pose estimation - MultiTargetPNPResults multiTagResult = new MultiTargetPNPResults(); - if (settings.solvePNPEnabled && settings.doMultiTarget) { - var multiTagOutput = multiTagPNPPipe.run(targetList); - sumPipeNanosElapsed += multiTagOutput.nanosElapsed; - multiTagResult = multiTagOutput.output; + @Override + protected void setPipeParamsImpl() { + // Sanitize thread count - not supported to have fewer than 1 threads + settings.threads = Math.max(1, settings.threads); + + // if (cameraQuirks.hasQuirk(CameraQuirk.PiCam) && LibCameraJNI.isSupported()) { + // // TODO: Picam grayscale + // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); + // // LibCameraJNI.setShouldCopyColor(true); // need the color image to grayscale + // } + + // TODO (HACK): tag width is Fun because it really belongs in the "target model" + // We need the tag width for the JNI to figure out target pose, but we need a + // target model for the draw 3d targets pipeline to work... + + // for now, hard code tag width based on enum value + double tagWidth = Units.inchesToMeters(3 * 2); // for 6in 16h5 tag. + + // AprilTagDetectorParams aprilTagDetectionParams = + // new AprilTagDetectorParams( + // settings.tagFamily, + // settings.decimate, + // settings.blur, + // settings.threads, + // settings.debug, + // settings.refineEdges); + + var config = new AprilTagDetector.Config(); + config.numThreads = settings.threads; + config.refineEdges = settings.refineEdges; + config.quadSigma = (float) settings.blur; + config.quadDecimate = settings.decimate; + aprilTagDetectionPipe.setParams(new AprilTagDetectionPipeParams(settings.tagFamily, config)); + + if (frameStaticProperties.cameraCalibration != null) { + var cameraMatrix = frameStaticProperties.cameraCalibration.getCameraIntrinsicsMat(); + if (cameraMatrix != null && cameraMatrix.rows() > 0) { + var cx = cameraMatrix.get(0, 2)[0]; + var cy = cameraMatrix.get(1, 2)[0]; + var fx = cameraMatrix.get(0, 0)[0]; + var fy = cameraMatrix.get(1, 1)[0]; + + singleTagPoseEstimatorPipe.setParams( + new AprilTagPoseEstimatorPipeParams( + new Config(tagWidth, fx, fy, cx, cy), + frameStaticProperties.cameraCalibration, + settings.numIterations)); + + // TODO global state ew + var atfl = ConfigManager.getInstance().getConfig().getApriltagFieldLayout(); + multiTagPNPPipe.setParams( + new MultiTargetPNPPipeParams(frameStaticProperties.cameraCalibration, atfl)); + } + } } - // Do single-tag pose estimation - if (settings.solvePNPEnabled) { - // Clear target list that was used for multitag so we can add target transforms - targetList.clear(); - // TODO global state again ew - var atfl = ConfigManager.getInstance().getConfig().getApriltagFieldLayout(); - - for (AprilTagDetection detection : usedDetections) { - AprilTagPoseEstimate tagPoseEstimate = null; - // Do single-tag estimation when "always enabled" or if a tag was not used for multitag - if (settings.doSingleTargetAlways - || !multiTagResult.fiducialIDsUsed.contains(Integer.valueOf(detection.getId()))) { - var poseResult = singleTagPoseEstimatorPipe.run(detection); - sumPipeNanosElapsed += poseResult.nanosElapsed; - tagPoseEstimate = poseResult.output; + @Override + protected CVPipelineResult process(Frame frame, AprilTagPipelineSettings settings) { + long sumPipeNanosElapsed = 0L; + + if (frame.type != FrameThresholdType.GREYSCALE) { + // TODO so all cameras should give us GREYSCALE -- how should we handle if not? + // Right now, we just return nothing + return new CVPipelineResult(0, 0, List.of(), frame); } - // If single-tag estimation was not done, this is a multi-target tag from the layout - if (tagPoseEstimate == null) { - // compute this tag's camera-to-tag transform using the multitag result - var tagPose = atfl.getTagPose(detection.getId()); - if (tagPose.isPresent()) { - var camToTag = - new Transform3d( - new Pose3d().plus(multiTagResult.estimatedPose.best), tagPose.get()); - // match expected AprilTag coordinate system - // TODO cleanup coordinate systems in wpilib 2024 - var apriltagTrl = - CoordinateSystem.convert( - camToTag.getTranslation(), CoordinateSystem.NWU(), CoordinateSystem.EDN()); - var apriltagRot = - CoordinateSystem.convert( - new Rotation3d(), CoordinateSystem.EDN(), CoordinateSystem.NWU()) - .plus( - CoordinateSystem.convert( - camToTag.getRotation(), - CoordinateSystem.NWU(), - CoordinateSystem.EDN())); - apriltagRot = new Rotation3d(0, Math.PI, 0).plus(apriltagRot); - camToTag = new Transform3d(apriltagTrl, apriltagRot); - tagPoseEstimate = new AprilTagPoseEstimate(camToTag, camToTag, 0, 0); - } + CVPipeResult> tagDetectionPipeResult; + tagDetectionPipeResult = aprilTagDetectionPipe.run(frame.processedImage); + sumPipeNanosElapsed += tagDetectionPipeResult.nanosElapsed; + + List detections = tagDetectionPipeResult.output; + List usedDetections = new ArrayList<>(); + List targetList = new ArrayList<>(); + + // Filter out detections based on pipeline settings + for (AprilTagDetection detection : detections) { + // TODO this should be in a pipe, not in the top level here (Matt) + if (detection.getDecisionMargin() < settings.decisionMargin) continue; + if (detection.getHamming() > settings.hammingDist) continue; + + usedDetections.add(detection); + + // Populate target list for multitag + // (TODO: Address circular dependencies. Multitag only requires corners and IDs, this should + // not be necessary.) + TrackedTarget target = + new TrackedTarget( + detection, + null, + new TargetCalculationParameters( + false, null, null, null, null, frameStaticProperties)); + + targetList.add(target); } - // populate the target list - // Challenge here is that TrackedTarget functions with OpenCV Contour - TrackedTarget target = - new TrackedTarget( - detection, - tagPoseEstimate, - new TargetCalculationParameters( - false, null, null, null, null, frameStaticProperties)); - - var correctedBestPose = - MathUtils.convertOpenCVtoPhotonTransform(target.getBestCameraToTarget3d()); - var correctedAltPose = - MathUtils.convertOpenCVtoPhotonTransform(target.getAltCameraToTarget3d()); - - target.setBestCameraToTarget3d( - new Transform3d(correctedBestPose.getTranslation(), correctedBestPose.getRotation())); - target.setAltCameraToTarget3d( - new Transform3d(correctedAltPose.getTranslation(), correctedAltPose.getRotation())); - - targetList.add(target); - } - } + // Do multi-tag pose estimation + MultiTargetPNPResults multiTagResult = new MultiTargetPNPResults(); + if (settings.solvePNPEnabled && settings.doMultiTarget) { + var multiTagOutput = multiTagPNPPipe.run(targetList); + sumPipeNanosElapsed += multiTagOutput.nanosElapsed; + multiTagResult = multiTagOutput.output; + } + + // Do single-tag pose estimation + if (settings.solvePNPEnabled) { + // Clear target list that was used for multitag so we can add target transforms + targetList.clear(); + // TODO global state again ew + var atfl = ConfigManager.getInstance().getConfig().getApriltagFieldLayout(); + + for (AprilTagDetection detection : usedDetections) { + AprilTagPoseEstimate tagPoseEstimate = null; + // Do single-tag estimation when "always enabled" or if a tag was not used for multitag + if (settings.doSingleTargetAlways + || !multiTagResult.fiducialIDsUsed.contains(Integer.valueOf(detection.getId()))) { + var poseResult = singleTagPoseEstimatorPipe.run(detection); + sumPipeNanosElapsed += poseResult.nanosElapsed; + tagPoseEstimate = poseResult.output; + } + + // If single-tag estimation was not done, this is a multi-target tag from the layout + if (tagPoseEstimate == null) { + // compute this tag's camera-to-tag transform using the multitag result + var tagPose = atfl.getTagPose(detection.getId()); + if (tagPose.isPresent()) { + var camToTag = + new Transform3d( + new Pose3d().plus(multiTagResult.estimatedPose.best), tagPose.get()); + // match expected AprilTag coordinate system + // TODO cleanup coordinate systems in wpilib 2024 + var apriltagTrl = + CoordinateSystem.convert( + camToTag.getTranslation(), CoordinateSystem.NWU(), CoordinateSystem.EDN()); + var apriltagRot = + CoordinateSystem.convert( + new Rotation3d(), CoordinateSystem.EDN(), CoordinateSystem.NWU()) + .plus( + CoordinateSystem.convert( + camToTag.getRotation(), + CoordinateSystem.NWU(), + CoordinateSystem.EDN())); + apriltagRot = new Rotation3d(0, Math.PI, 0).plus(apriltagRot); + camToTag = new Transform3d(apriltagTrl, apriltagRot); + tagPoseEstimate = new AprilTagPoseEstimate(camToTag, camToTag, 0, 0); + } + } + + // populate the target list + // Challenge here is that TrackedTarget functions with OpenCV Contour + TrackedTarget target = + new TrackedTarget( + detection, + tagPoseEstimate, + new TargetCalculationParameters( + false, null, null, null, null, frameStaticProperties)); + + var correctedBestPose = + MathUtils.convertOpenCVtoPhotonTransform(target.getBestCameraToTarget3d()); + var correctedAltPose = + MathUtils.convertOpenCVtoPhotonTransform(target.getAltCameraToTarget3d()); + + target.setBestCameraToTarget3d( + new Transform3d(correctedBestPose.getTranslation(), correctedBestPose.getRotation())); + target.setAltCameraToTarget3d( + new Transform3d(correctedAltPose.getTranslation(), correctedAltPose.getRotation())); + + targetList.add(target); + } + } - var fpsResult = calculateFPSPipe.run(null); - var fps = fpsResult.output; + var fpsResult = calculateFPSPipe.run(null); + var fps = fpsResult.output; - return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, multiTagResult, frame); - } + return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, multiTagResult, frame); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipelineSettings.java index 3e5038db0e..47faf82e29 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/AprilTagPipelineSettings.java @@ -23,67 +23,67 @@ @JsonTypeName("AprilTagPipelineSettings") public class AprilTagPipelineSettings extends AdvancedPipelineSettings { - public AprilTagFamily tagFamily = AprilTagFamily.kTag16h5; - public int decimate = 1; - public double blur = 0; - public int threads = 4; // Multiple threads seems to be better performance on most platforms - public boolean debug = false; - public boolean refineEdges = true; - public int numIterations = 40; - public int hammingDist = 0; - public int decisionMargin = 35; - public boolean doMultiTarget = true; - public boolean doSingleTargetAlways = false; + public AprilTagFamily tagFamily = AprilTagFamily.kTag16h5; + public int decimate = 1; + public double blur = 0; + public int threads = 4; // Multiple threads seems to be better performance on most platforms + public boolean debug = false; + public boolean refineEdges = true; + public int numIterations = 40; + public int hammingDist = 0; + public int decisionMargin = 35; + public boolean doMultiTarget = true; + public boolean doSingleTargetAlways = false; - // 3d settings + // 3d settings - public AprilTagPipelineSettings() { - super(); - pipelineType = PipelineType.AprilTag; - outputShowMultipleTargets = true; - targetModel = TargetModel.k6in_16h5; - cameraExposure = 20; - cameraAutoExposure = false; - ledMode = false; - } + public AprilTagPipelineSettings() { + super(); + pipelineType = PipelineType.AprilTag; + outputShowMultipleTargets = true; + targetModel = TargetModel.k6in_16h5; + cameraExposure = 20; + cameraAutoExposure = false; + ledMode = false; + } - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((tagFamily == null) ? 0 : tagFamily.hashCode()); - result = prime * result + decimate; - long temp; - temp = Double.doubleToLongBits(blur); - result = prime * result + (int) (temp ^ (temp >>> 32)); - result = prime * result + threads; - result = prime * result + (debug ? 1231 : 1237); - result = prime * result + (refineEdges ? 1231 : 1237); - result = prime * result + numIterations; - result = prime * result + hammingDist; - result = prime * result + decisionMargin; - result = prime * result + (doMultiTarget ? 1231 : 1237); - result = prime * result + (doSingleTargetAlways ? 1231 : 1237); - return result; - } + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + ((tagFamily == null) ? 0 : tagFamily.hashCode()); + result = prime * result + decimate; + long temp; + temp = Double.doubleToLongBits(blur); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + threads; + result = prime * result + (debug ? 1231 : 1237); + result = prime * result + (refineEdges ? 1231 : 1237); + result = prime * result + numIterations; + result = prime * result + hammingDist; + result = prime * result + decisionMargin; + result = prime * result + (doMultiTarget ? 1231 : 1237); + result = prime * result + (doSingleTargetAlways ? 1231 : 1237); + return result; + } - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (!super.equals(obj)) return false; - if (getClass() != obj.getClass()) return false; - AprilTagPipelineSettings other = (AprilTagPipelineSettings) obj; - if (tagFamily != other.tagFamily) return false; - if (decimate != other.decimate) return false; - if (Double.doubleToLongBits(blur) != Double.doubleToLongBits(other.blur)) return false; - if (threads != other.threads) return false; - if (debug != other.debug) return false; - if (refineEdges != other.refineEdges) return false; - if (numIterations != other.numIterations) return false; - if (hammingDist != other.hammingDist) return false; - if (decisionMargin != other.decisionMargin) return false; - if (doMultiTarget != other.doMultiTarget) return false; - if (doSingleTargetAlways != other.doSingleTargetAlways) return false; - return true; - } + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (!super.equals(obj)) return false; + if (getClass() != obj.getClass()) return false; + AprilTagPipelineSettings other = (AprilTagPipelineSettings) obj; + if (tagFamily != other.tagFamily) return false; + if (decimate != other.decimate) return false; + if (Double.doubleToLongBits(blur) != Double.doubleToLongBits(other.blur)) return false; + if (threads != other.threads) return false; + if (debug != other.debug) return false; + if (refineEdges != other.refineEdges) return false; + if (numIterations != other.numIterations) return false; + if (hammingDist != other.hammingDist) return false; + if (decisionMargin != other.decisionMargin) return false; + if (doMultiTarget != other.doMultiTarget) return false; + if (doSingleTargetAlways != other.doSingleTargetAlways) return false; + return true; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipeline.java index 65dbaf10d4..26a5c60151 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipeline.java @@ -49,79 +49,79 @@ import org.photonvision.vision.target.TrackedTarget.TargetCalculationParameters; public class ArucoPipeline extends CVPipeline { - private final RotateImagePipe rotateImagePipe = new RotateImagePipe(); - private final GrayscalePipe grayscalePipe = new GrayscalePipe(); + private final RotateImagePipe rotateImagePipe = new RotateImagePipe(); + private final GrayscalePipe grayscalePipe = new GrayscalePipe(); - private final ArucoDetectionPipe arucoDetectionPipe = new ArucoDetectionPipe(); - private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); + private final ArucoDetectionPipe arucoDetectionPipe = new ArucoDetectionPipe(); + private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); - ArucoDetectorParams m_arucoDetectorParams = new ArucoDetectorParams(); + ArucoDetectorParams m_arucoDetectorParams = new ArucoDetectorParams(); - public ArucoPipeline() { - super(FrameThresholdType.GREYSCALE); - settings = new ArucoPipelineSettings(); - } + public ArucoPipeline() { + super(FrameThresholdType.GREYSCALE); + settings = new ArucoPipelineSettings(); + } - public ArucoPipeline(ArucoPipelineSettings settings) { - super(FrameThresholdType.GREYSCALE); - this.settings = settings; - } + public ArucoPipeline(ArucoPipelineSettings settings) { + super(FrameThresholdType.GREYSCALE); + this.settings = settings; + } - @Override - protected void setPipeParamsImpl() { - // Sanitize thread count - not supported to have fewer than 1 threads - settings.threads = Math.max(1, settings.threads); + @Override + protected void setPipeParamsImpl() { + // Sanitize thread count - not supported to have fewer than 1 threads + settings.threads = Math.max(1, settings.threads); - RotateImagePipe.RotateImageParams rotateImageParams = - new RotateImagePipe.RotateImageParams(settings.inputImageRotationMode); - rotateImagePipe.setParams(rotateImageParams); + RotateImagePipe.RotateImageParams rotateImageParams = + new RotateImagePipe.RotateImageParams(settings.inputImageRotationMode); + rotateImagePipe.setParams(rotateImageParams); - m_arucoDetectorParams.setDecimation((float) settings.decimate); - m_arucoDetectorParams.setCornerRefinementMaxIterations(settings.numIterations); - m_arucoDetectorParams.setCornerAccuracy(settings.cornerAccuracy); + m_arucoDetectorParams.setDecimation((float) settings.decimate); + m_arucoDetectorParams.setCornerRefinementMaxIterations(settings.numIterations); + m_arucoDetectorParams.setCornerAccuracy(settings.cornerAccuracy); - arucoDetectionPipe.setParams( - new ArucoDetectionPipeParams( - m_arucoDetectorParams.getDetector(), frameStaticProperties.cameraCalibration)); - } + arucoDetectionPipe.setParams( + new ArucoDetectionPipeParams( + m_arucoDetectorParams.getDetector(), frameStaticProperties.cameraCalibration)); + } - @Override - protected CVPipelineResult process(Frame frame, ArucoPipelineSettings settings) { - long sumPipeNanosElapsed = 0L; - Mat rawInputMat; - rawInputMat = frame.colorImage.getMat(); + @Override + protected CVPipelineResult process(Frame frame, ArucoPipelineSettings settings) { + long sumPipeNanosElapsed = 0L; + Mat rawInputMat; + rawInputMat = frame.colorImage.getMat(); - List targetList; - CVPipeResult> tagDetectionPipeResult; + List targetList; + CVPipeResult> tagDetectionPipeResult; - if (rawInputMat.empty()) { - return new CVPipelineResult(sumPipeNanosElapsed, 0, List.of(), frame); - } + if (rawInputMat.empty()) { + return new CVPipelineResult(sumPipeNanosElapsed, 0, List.of(), frame); + } - tagDetectionPipeResult = arucoDetectionPipe.run(rawInputMat); - targetList = new ArrayList<>(); - for (ArucoDetectionResult detection : tagDetectionPipeResult.output) { - // TODO this should be in a pipe, not in the top level here (Matt) + tagDetectionPipeResult = arucoDetectionPipe.run(rawInputMat); + targetList = new ArrayList<>(); + for (ArucoDetectionResult detection : tagDetectionPipeResult.output) { + // TODO this should be in a pipe, not in the top level here (Matt) - // populate the target list - // Challenge here is that TrackedTarget functions with OpenCV Contour - TrackedTarget target = - new TrackedTarget( - detection, - new TargetCalculationParameters( - false, null, null, null, null, frameStaticProperties)); + // populate the target list + // Challenge here is that TrackedTarget functions with OpenCV Contour + TrackedTarget target = + new TrackedTarget( + detection, + new TargetCalculationParameters( + false, null, null, null, null, frameStaticProperties)); - var correctedBestPose = target.getBestCameraToTarget3d(); + var correctedBestPose = target.getBestCameraToTarget3d(); - target.setBestCameraToTarget3d( - new Transform3d(correctedBestPose.getTranslation(), correctedBestPose.getRotation())); + target.setBestCameraToTarget3d( + new Transform3d(correctedBestPose.getTranslation(), correctedBestPose.getRotation())); - targetList.add(target); - } + targetList.add(target); + } - var fpsResult = calculateFPSPipe.run(null); - var fps = fpsResult.output; + var fpsResult = calculateFPSPipe.run(null); + var fps = fpsResult.output; - return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, frame); - } + return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, frame); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipelineSettings.java index 103a459634..265a8d202e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/ArucoPipelineSettings.java @@ -22,21 +22,21 @@ @JsonTypeName("ArucoPipelineSettings") public class ArucoPipelineSettings extends AdvancedPipelineSettings { - public double decimate = 1; - public int threads = 2; - public int numIterations = 100; - public double cornerAccuracy = 25.0; - public boolean useAruco3 = true; + public double decimate = 1; + public int threads = 2; + public int numIterations = 100; + public double cornerAccuracy = 25.0; + public boolean useAruco3 = true; - // 3d settings + // 3d settings - public ArucoPipelineSettings() { - super(); - pipelineType = PipelineType.Aruco; - outputShowMultipleTargets = true; - targetModel = TargetModel.kAruco6in_16h5; - cameraExposure = -1; - cameraAutoExposure = true; - ledMode = false; - } + public ArucoPipelineSettings() { + super(); + pipelineType = PipelineType.Aruco; + outputShowMultipleTargets = true; + targetModel = TargetModel.kAruco6in_16h5; + cameraExposure = -1; + cameraAutoExposure = true; + ledMode = false; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipeline.java index d1621a63e5..3df5b268f7 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipeline.java @@ -24,55 +24,55 @@ import org.photonvision.vision.pipeline.result.CVPipelineResult; public abstract class CVPipeline { - protected S settings; - protected FrameStaticProperties frameStaticProperties; - protected QuirkyCamera cameraQuirks; + protected S settings; + protected FrameStaticProperties frameStaticProperties; + protected QuirkyCamera cameraQuirks; - private final FrameThresholdType thresholdType; + private final FrameThresholdType thresholdType; - public CVPipeline(FrameThresholdType thresholdType) { - this.thresholdType = thresholdType; - } - - public FrameThresholdType getThresholdType() { - return thresholdType; - } + public CVPipeline(FrameThresholdType thresholdType) { + this.thresholdType = thresholdType; + } - protected void setPipeParams( - FrameStaticProperties frameStaticProperties, S settings, QuirkyCamera cameraQuirks) { - this.settings = settings; - this.frameStaticProperties = frameStaticProperties; - this.cameraQuirks = cameraQuirks; + public FrameThresholdType getThresholdType() { + return thresholdType; + } - setPipeParamsImpl(); - } + protected void setPipeParams( + FrameStaticProperties frameStaticProperties, S settings, QuirkyCamera cameraQuirks) { + this.settings = settings; + this.frameStaticProperties = frameStaticProperties; + this.cameraQuirks = cameraQuirks; - protected abstract void setPipeParamsImpl(); + setPipeParamsImpl(); + } - protected abstract R process(Frame frame, S settings); + protected abstract void setPipeParamsImpl(); - public S getSettings() { - return settings; - } + protected abstract R process(Frame frame, S settings); - public void setSettings(S s) { - this.settings = s; - } + public S getSettings() { + return settings; + } - public R run(Frame frame, QuirkyCamera cameraQuirks) { - if (settings == null) { - throw new RuntimeException("No settings provided for pipeline!"); + public void setSettings(S s) { + this.settings = s; } - setPipeParams(frame.frameStaticProperties, settings, cameraQuirks); - // if (frame.image.getMat().empty()) { - // //noinspection unchecked - // return (R) new CVPipelineResult(0, 0, List.of(), frame); - // } - R result = process(frame, settings); + public R run(Frame frame, QuirkyCamera cameraQuirks) { + if (settings == null) { + throw new RuntimeException("No settings provided for pipeline!"); + } + setPipeParams(frame.frameStaticProperties, settings, cameraQuirks); + + // if (frame.image.getMat().empty()) { + // //noinspection unchecked + // return (R) new CVPipelineResult(0, 0, List.of(), frame); + // } + R result = process(frame, settings); - result.setImageCaptureTimestampNanos(frame.timestampNanos); + result.setImageCaptureTimestampNanos(frame.timestampNanos); - return result; - } + return result; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipelineSettings.java index 60b259af25..f836e30999 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/CVPipelineSettings.java @@ -24,119 +24,119 @@ import org.photonvision.vision.opencv.ImageRotationMode; @JsonTypeInfo( - use = JsonTypeInfo.Id.NAME, - include = JsonTypeInfo.As.WRAPPER_ARRAY, - property = "type") + use = JsonTypeInfo.Id.NAME, + include = JsonTypeInfo.As.WRAPPER_ARRAY, + property = "type") @JsonSubTypes({ - @JsonSubTypes.Type(value = ColoredShapePipelineSettings.class), - @JsonSubTypes.Type(value = ReflectivePipelineSettings.class), - @JsonSubTypes.Type(value = DriverModePipelineSettings.class), - @JsonSubTypes.Type(value = AprilTagPipelineSettings.class), - @JsonSubTypes.Type(value = ArucoPipelineSettings.class) + @JsonSubTypes.Type(value = ColoredShapePipelineSettings.class), + @JsonSubTypes.Type(value = ReflectivePipelineSettings.class), + @JsonSubTypes.Type(value = DriverModePipelineSettings.class), + @JsonSubTypes.Type(value = AprilTagPipelineSettings.class), + @JsonSubTypes.Type(value = ArucoPipelineSettings.class) }) public class CVPipelineSettings implements Cloneable { - public int pipelineIndex = 0; - public PipelineType pipelineType = PipelineType.DriverMode; - public ImageRotationMode inputImageRotationMode = ImageRotationMode.DEG_0; - public String pipelineNickname = "New Pipeline"; - public boolean cameraAutoExposure = false; - // manual exposure only used if cameraAutoExposure is false - public double cameraExposure = 20; - public int cameraBrightness = 50; - // Currently only used by a few cameras (notably the zero-copy Pi Camera driver) with the Gain - // quirk - public int cameraGain = 75; - // Currently only used by the zero-copy Pi Camera driver - public int cameraRedGain = 11; - public int cameraBlueGain = 20; - public int cameraVideoModeIndex = 0; - public FrameDivisor streamingFrameDivisor = FrameDivisor.NONE; - public boolean ledMode = false; - public boolean inputShouldShow = false; - public boolean outputShouldShow = true; + public int pipelineIndex = 0; + public PipelineType pipelineType = PipelineType.DriverMode; + public ImageRotationMode inputImageRotationMode = ImageRotationMode.DEG_0; + public String pipelineNickname = "New Pipeline"; + public boolean cameraAutoExposure = false; + // manual exposure only used if cameraAutoExposure is false + public double cameraExposure = 20; + public int cameraBrightness = 50; + // Currently only used by a few cameras (notably the zero-copy Pi Camera driver) with the Gain + // quirk + public int cameraGain = 75; + // Currently only used by the zero-copy Pi Camera driver + public int cameraRedGain = 11; + public int cameraBlueGain = 20; + public int cameraVideoModeIndex = 0; + public FrameDivisor streamingFrameDivisor = FrameDivisor.NONE; + public boolean ledMode = false; + public boolean inputShouldShow = false; + public boolean outputShouldShow = true; - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CVPipelineSettings that = (CVPipelineSettings) o; - return pipelineIndex == that.pipelineIndex - && Double.compare(that.cameraExposure, cameraExposure) == 0 - && Double.compare(that.cameraBrightness, cameraBrightness) == 0 - && Double.compare(that.cameraGain, cameraGain) == 0 - && Double.compare(that.cameraRedGain, cameraRedGain) == 0 - && Double.compare(that.cameraBlueGain, cameraBlueGain) == 0 - && cameraVideoModeIndex == that.cameraVideoModeIndex - && ledMode == that.ledMode - && pipelineType == that.pipelineType - && inputImageRotationMode == that.inputImageRotationMode - && pipelineNickname.equals(that.pipelineNickname) - && streamingFrameDivisor == that.streamingFrameDivisor - && inputShouldShow == that.inputShouldShow - && outputShouldShow == that.outputShouldShow; - } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CVPipelineSettings that = (CVPipelineSettings) o; + return pipelineIndex == that.pipelineIndex + && Double.compare(that.cameraExposure, cameraExposure) == 0 + && Double.compare(that.cameraBrightness, cameraBrightness) == 0 + && Double.compare(that.cameraGain, cameraGain) == 0 + && Double.compare(that.cameraRedGain, cameraRedGain) == 0 + && Double.compare(that.cameraBlueGain, cameraBlueGain) == 0 + && cameraVideoModeIndex == that.cameraVideoModeIndex + && ledMode == that.ledMode + && pipelineType == that.pipelineType + && inputImageRotationMode == that.inputImageRotationMode + && pipelineNickname.equals(that.pipelineNickname) + && streamingFrameDivisor == that.streamingFrameDivisor + && inputShouldShow == that.inputShouldShow + && outputShouldShow == that.outputShouldShow; + } - @Override - public int hashCode() { - return Objects.hash( - pipelineIndex, - pipelineType, - inputImageRotationMode, - pipelineNickname, - cameraExposure, - cameraBrightness, - cameraGain, - cameraRedGain, - cameraBlueGain, - cameraVideoModeIndex, - streamingFrameDivisor, - ledMode, - inputShouldShow, - outputShouldShow); - } + @Override + public int hashCode() { + return Objects.hash( + pipelineIndex, + pipelineType, + inputImageRotationMode, + pipelineNickname, + cameraExposure, + cameraBrightness, + cameraGain, + cameraRedGain, + cameraBlueGain, + cameraVideoModeIndex, + streamingFrameDivisor, + ledMode, + inputShouldShow, + outputShouldShow); + } - @Override - public CVPipelineSettings clone() { - try { - return (CVPipelineSettings) super.clone(); - } catch (CloneNotSupportedException e) { - e.printStackTrace(); - return null; + @Override + public CVPipelineSettings clone() { + try { + return (CVPipelineSettings) super.clone(); + } catch (CloneNotSupportedException e) { + e.printStackTrace(); + return null; + } } - } - @Override - public String toString() { - return "CVPipelineSettings{" - + "pipelineIndex=" - + pipelineIndex - + ", pipelineType=" - + pipelineType - + ", inputImageRotationMode=" - + inputImageRotationMode - + ", pipelineNickname='" - + pipelineNickname - + '\'' - + ", cameraExposure=" - + cameraExposure - + ", cameraBrightness=" - + cameraBrightness - + ", cameraGain=" - + cameraGain - + ", cameraRedGain=" - + cameraRedGain - + ", cameraBlueGain=" - + cameraBlueGain - + ", cameraVideoModeIndex=" - + cameraVideoModeIndex - + ", streamingFrameDivisor=" - + streamingFrameDivisor - + ", ledMode=" - + ledMode - + ", inputShouldShow=" - + inputShouldShow - + ", outputShouldShow=" - + outputShouldShow - + '}'; - } + @Override + public String toString() { + return "CVPipelineSettings{" + + "pipelineIndex=" + + pipelineIndex + + ", pipelineType=" + + pipelineType + + ", inputImageRotationMode=" + + inputImageRotationMode + + ", pipelineNickname='" + + pipelineNickname + + '\'' + + ", cameraExposure=" + + cameraExposure + + ", cameraBrightness=" + + cameraBrightness + + ", cameraGain=" + + cameraGain + + ", cameraRedGain=" + + cameraRedGain + + ", cameraBlueGain=" + + cameraBlueGain + + ", cameraVideoModeIndex=" + + cameraVideoModeIndex + + ", streamingFrameDivisor=" + + streamingFrameDivisor + + ", ledMode=" + + ledMode + + ", inputShouldShow=" + + inputShouldShow + + ", outputShouldShow=" + + outputShouldShow + + '}'; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibrate3dPipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibrate3dPipeline.java index 933a269041..95008e77f9 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibrate3dPipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibrate3dPipeline.java @@ -46,191 +46,191 @@ import org.photonvision.vision.pipeline.result.CVPipelineResult; public class Calibrate3dPipeline - extends CVPipeline { - // For logging - private static final Logger logger = new Logger(Calibrate3dPipeline.class, LogGroup.General); - - // Only 2 pipes needed, one for finding the board corners and one for actually calibrating - private final FindBoardCornersPipe findBoardCornersPipe = new FindBoardCornersPipe(); - private final Calibrate3dPipe calibrate3dPipe = new Calibrate3dPipe(); - private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); - - // Getter methods have been set for calibrate and takeSnapshot - private boolean takeSnapshot = false; - - // Output of the corners - final List> foundCornersList; - - /// Output of the calibration, getter method is set for this. - private CVPipeResult calibrationOutput; - - private final int minSnapshots; - - private boolean calibrating = false; - - // Path to save images - private final Path imageDir = ConfigManager.getInstance().getCalibDir(); - - private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.NONE; - - public Calibrate3dPipeline() { - this(12); - } - - public Calibrate3dPipeline(int minSnapshots) { - super(PROCESSING_TYPE); - this.settings = new Calibration3dPipelineSettings(); - this.foundCornersList = new ArrayList<>(); - this.minSnapshots = minSnapshots; - } - - @Override - protected void setPipeParamsImpl() { - FindBoardCornersPipe.FindCornersPipeParams findCornersPipeParams = - new FindBoardCornersPipe.FindCornersPipeParams( - settings.boardHeight, - settings.boardWidth, - settings.boardType, - settings.gridSize, - settings.streamingFrameDivisor); - findBoardCornersPipe.setParams(findCornersPipeParams); - - Calibrate3dPipe.CalibratePipeParams calibratePipeParams = - new Calibrate3dPipe.CalibratePipeParams( - new Size(frameStaticProperties.imageWidth, frameStaticProperties.imageHeight)); - calibrate3dPipe.setParams(calibratePipeParams); - - // if (cameraQuirks.hasQuirk(CameraQuirk.PiCam) && LibCameraJNI.isSupported()) { - // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); - // // LibCameraJNI.setShouldCopyColor(true); - // } - } - - @Override - protected CVPipelineResult process(Frame frame, Calibration3dPipelineSettings settings) { - Mat inputColorMat = frame.colorImage.getMat(); - - if (this.calibrating || inputColorMat.empty()) { - return new CVPipelineResult(0, 0, null, frame); + extends CVPipeline { + // For logging + private static final Logger logger = new Logger(Calibrate3dPipeline.class, LogGroup.General); + + // Only 2 pipes needed, one for finding the board corners and one for actually calibrating + private final FindBoardCornersPipe findBoardCornersPipe = new FindBoardCornersPipe(); + private final Calibrate3dPipe calibrate3dPipe = new Calibrate3dPipe(); + private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); + + // Getter methods have been set for calibrate and takeSnapshot + private boolean takeSnapshot = false; + + // Output of the corners + final List> foundCornersList; + + /// Output of the calibration, getter method is set for this. + private CVPipeResult calibrationOutput; + + private final int minSnapshots; + + private boolean calibrating = false; + + // Path to save images + private final Path imageDir = ConfigManager.getInstance().getCalibDir(); + + private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.NONE; + + public Calibrate3dPipeline() { + this(12); + } + + public Calibrate3dPipeline(int minSnapshots) { + super(PROCESSING_TYPE); + this.settings = new Calibration3dPipelineSettings(); + this.foundCornersList = new ArrayList<>(); + this.minSnapshots = minSnapshots; + } + + @Override + protected void setPipeParamsImpl() { + FindBoardCornersPipe.FindCornersPipeParams findCornersPipeParams = + new FindBoardCornersPipe.FindCornersPipeParams( + settings.boardHeight, + settings.boardWidth, + settings.boardType, + settings.gridSize, + settings.streamingFrameDivisor); + findBoardCornersPipe.setParams(findCornersPipeParams); + + Calibrate3dPipe.CalibratePipeParams calibratePipeParams = + new Calibrate3dPipe.CalibratePipeParams( + new Size(frameStaticProperties.imageWidth, frameStaticProperties.imageHeight)); + calibrate3dPipe.setParams(calibratePipeParams); + + // if (cameraQuirks.hasQuirk(CameraQuirk.PiCam) && LibCameraJNI.isSupported()) { + // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); + // // LibCameraJNI.setShouldCopyColor(true); + // } } - long sumPipeNanosElapsed = 0L; + @Override + protected CVPipelineResult process(Frame frame, Calibration3dPipelineSettings settings) { + Mat inputColorMat = frame.colorImage.getMat(); + + if (this.calibrating || inputColorMat.empty()) { + return new CVPipelineResult(0, 0, null, frame); + } + + long sumPipeNanosElapsed = 0L; + + // Check if the frame has chessboard corners + var outputColorCVMat = new CVMat(); + inputColorMat.copyTo(outputColorCVMat.getMat()); + var findBoardResult = + findBoardCornersPipe.run(Pair.of(inputColorMat, outputColorCVMat.getMat())).output; + + var fpsResult = calculateFPSPipe.run(null); + var fps = fpsResult.output; + + if (takeSnapshot) { + // Set snapshot to false even if we don't find a board + takeSnapshot = false; + + if (findBoardResult != null) { + foundCornersList.add(findBoardResult); + Imgcodecs.imwrite( + Path.of(imageDir.toString(), "img" + foundCornersList.size() + ".jpg").toString(), + inputColorMat); + + // update the UI + broadcastState(); + } + } + + frame.release(); + + // Return the drawn chessboard if corners are found, if not, then return the input image. + return new CVPipelineResult( + sumPipeNanosElapsed, + fps, // Unused but here in case + Collections.emptyList(), + new MultiTargetPNPResults(), + new Frame( + new CVMat(), outputColorCVMat, FrameThresholdType.NONE, frame.frameStaticProperties)); + } + + public void deleteSavedImages() { + imageDir.toFile().mkdirs(); + imageDir.toFile().mkdir(); + FileUtils.deleteDirectory(imageDir); + } - // Check if the frame has chessboard corners - var outputColorCVMat = new CVMat(); - inputColorMat.copyTo(outputColorCVMat.getMat()); - var findBoardResult = - findBoardCornersPipe.run(Pair.of(inputColorMat, outputColorCVMat.getMat())).output; + public boolean hasEnough() { + return foundCornersList.size() >= minSnapshots; + } - var fpsResult = calculateFPSPipe.run(null); - var fps = fpsResult.output; + public CameraCalibrationCoefficients tryCalibration() { + if (!hasEnough()) { + logger.info( + "Not enough snapshots! Only got " + + foundCornersList.size() + + " of " + + minSnapshots + + " -- returning null.."); + return null; + } - if (takeSnapshot) { - // Set snapshot to false even if we don't find a board - takeSnapshot = false; + this.calibrating = true; - if (findBoardResult != null) { - foundCornersList.add(findBoardResult); - Imgcodecs.imwrite( - Path.of(imageDir.toString(), "img" + foundCornersList.size() + ".jpg").toString(), - inputColorMat); + /*Pass the board corners to the pipe, which will check again to see if all boards are valid + and returns the corresponding image and object points*/ + calibrationOutput = calibrate3dPipe.run(foundCornersList); + + this.calibrating = false; + + return calibrationOutput.output; + } + + public void takeSnapshot() { + takeSnapshot = true; + } + + public double[] perViewErrors() { + return calibrationOutput.output.perViewErrors; + } + + public void finishCalibration() { + foundCornersList.forEach( + it -> { + it.getMiddle().release(); + it.getRight().release(); + }); + foundCornersList.clear(); - // update the UI broadcastState(); - } } - frame.release(); - - // Return the drawn chessboard if corners are found, if not, then return the input image. - return new CVPipelineResult( - sumPipeNanosElapsed, - fps, // Unused but here in case - Collections.emptyList(), - new MultiTargetPNPResults(), - new Frame( - new CVMat(), outputColorCVMat, FrameThresholdType.NONE, frame.frameStaticProperties)); - } - - public void deleteSavedImages() { - imageDir.toFile().mkdirs(); - imageDir.toFile().mkdir(); - FileUtils.deleteDirectory(imageDir); - } - - public boolean hasEnough() { - return foundCornersList.size() >= minSnapshots; - } - - public CameraCalibrationCoefficients tryCalibration() { - if (!hasEnough()) { - logger.info( - "Not enough snapshots! Only got " - + foundCornersList.size() - + " of " - + minSnapshots - + " -- returning null.."); - return null; + private void broadcastState() { + var state = + SerializationUtils.objectToHashMap( + new UICalibrationData( + foundCornersList.size(), + settings.cameraVideoModeIndex, + minSnapshots, + hasEnough(), + Units.metersToInches(settings.gridSize), + settings.boardWidth, + settings.boardHeight, + settings.boardType)); + + DataChangeService.getInstance() + .publishEvent(OutgoingUIEvent.wrappedOf("calibrationData", state)); } - this.calibrating = true; - - /*Pass the board corners to the pipe, which will check again to see if all boards are valid - and returns the corresponding image and object points*/ - calibrationOutput = calibrate3dPipe.run(foundCornersList); - - this.calibrating = false; - - return calibrationOutput.output; - } - - public void takeSnapshot() { - takeSnapshot = true; - } - - public double[] perViewErrors() { - return calibrationOutput.output.perViewErrors; - } - - public void finishCalibration() { - foundCornersList.forEach( - it -> { - it.getMiddle().release(); - it.getRight().release(); - }); - foundCornersList.clear(); - - broadcastState(); - } - - private void broadcastState() { - var state = - SerializationUtils.objectToHashMap( - new UICalibrationData( - foundCornersList.size(), - settings.cameraVideoModeIndex, - minSnapshots, - hasEnough(), - Units.metersToInches(settings.gridSize), - settings.boardWidth, - settings.boardHeight, - settings.boardType)); - - DataChangeService.getInstance() - .publishEvent(OutgoingUIEvent.wrappedOf("calibrationData", state)); - } - - public boolean removeSnapshot(int index) { - try { - foundCornersList.remove(index); - return true; - } catch (ArrayIndexOutOfBoundsException e) { - logger.error("Could not remove snapshot at index " + index, e); - return false; + public boolean removeSnapshot(int index) { + try { + foundCornersList.remove(index); + return true; + } catch (ArrayIndexOutOfBoundsException e) { + logger.error("Could not remove snapshot at index " + index, e); + return false; + } } - } - public CameraCalibrationCoefficients cameraCalibrationCoefficients() { - return calibrationOutput.output; - } + public CameraCalibrationCoefficients cameraCalibrationCoefficients() { + return calibrationOutput.output; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibration3dPipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibration3dPipelineSettings.java index 4f2b3c314e..1d5e57df2f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibration3dPipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/Calibration3dPipelineSettings.java @@ -22,18 +22,18 @@ import org.photonvision.vision.frame.FrameDivisor; public class Calibration3dPipelineSettings extends AdvancedPipelineSettings { - public int boardHeight = 8; - public int boardWidth = 8; - public UICalibrationData.BoardType boardType = UICalibrationData.BoardType.CHESSBOARD; - public double gridSize = Units.inchesToMeters(1.0); + public int boardHeight = 8; + public int boardWidth = 8; + public UICalibrationData.BoardType boardType = UICalibrationData.BoardType.CHESSBOARD; + public double gridSize = Units.inchesToMeters(1.0); - public Size resolution = new Size(640, 480); + public Size resolution = new Size(640, 480); - public Calibration3dPipelineSettings() { - super(); - this.cameraAutoExposure = true; - this.inputShouldShow = true; - this.outputShouldShow = true; - this.streamingFrameDivisor = FrameDivisor.HALF; - } + public Calibration3dPipelineSettings() { + super(); + this.cameraAutoExposure = true; + this.inputShouldShow = true; + this.outputShouldShow = true; + this.streamingFrameDivisor = FrameDivisor.HALF; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipeline.java index 8b8b2269cd..190de9e68b 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipeline.java @@ -35,199 +35,199 @@ import org.photonvision.vision.target.TrackedTarget; public class ColoredShapePipeline - extends CVPipeline { - private final SpeckleRejectPipe speckleRejectPipe = new SpeckleRejectPipe(); - private final FindContoursPipe findContoursPipe = new FindContoursPipe(); - private final FindPolygonPipe findPolygonPipe = new FindPolygonPipe(); - private final FindCirclesPipe findCirclesPipe = new FindCirclesPipe(); - private final FilterShapesPipe filterShapesPipe = new FilterShapesPipe(); - private final SortContoursPipe sortContoursPipe = new SortContoursPipe(); - private final Collect2dTargetsPipe collect2dTargetsPipe = new Collect2dTargetsPipe(); - private final CornerDetectionPipe cornerDetectionPipe = new CornerDetectionPipe(); - private final SolvePNPPipe solvePNPPipe = new SolvePNPPipe(); - private final Draw2dCrosshairPipe draw2dCrosshairPipe = new Draw2dCrosshairPipe(); - private final Draw2dTargetsPipe draw2DTargetsPipe = new Draw2dTargetsPipe(); - private final Draw3dTargetsPipe draw3dTargetsPipe = new Draw3dTargetsPipe(); - private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); - - private final Point[] rectPoints = new Point[4]; - - private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.HSV; - - public ColoredShapePipeline() { - super(PROCESSING_TYPE); - settings = new ColoredShapePipelineSettings(); - } - - public ColoredShapePipeline(ColoredShapePipelineSettings settings) { - super(PROCESSING_TYPE); - this.settings = settings; - } - - @Override - protected void setPipeParamsImpl() { - DualOffsetValues dualOffsetValues = - new DualOffsetValues( - settings.offsetDualPointA, - settings.offsetDualPointAArea, - settings.offsetDualPointB, - settings.offsetDualPointBArea); - - SpeckleRejectPipe.SpeckleRejectParams speckleRejectParams = - new SpeckleRejectPipe.SpeckleRejectParams(settings.contourSpecklePercentage); - speckleRejectPipe.setParams(speckleRejectParams); - - FindContoursPipe.FindContoursParams findContoursParams = - new FindContoursPipe.FindContoursParams(); - findContoursPipe.setParams(findContoursParams); - - FindPolygonPipe.FindPolygonPipeParams findPolygonPipeParams = - new FindPolygonPipe.FindPolygonPipeParams(settings.accuracyPercentage); - findPolygonPipe.setParams(findPolygonPipeParams); - - FindCirclesPipe.FindCirclePipeParams findCirclePipeParams = - new FindCirclesPipe.FindCirclePipeParams( - settings.circleDetectThreshold, - settings.contourRadius.getFirst(), - settings.minDist, - settings.contourRadius.getSecond(), - settings.maxCannyThresh, - settings.circleAccuracy, - Math.hypot(frameStaticProperties.imageWidth, frameStaticProperties.imageHeight)); - findCirclesPipe.setParams(findCirclePipeParams); - - FilterShapesPipe.FilterShapesPipeParams filterShapesPipeParams = - new FilterShapesPipe.FilterShapesPipeParams( - settings.contourShape, - settings.contourArea.getFirst(), - settings.contourArea.getSecond(), - settings.contourPerimeter.getFirst(), - settings.contourPerimeter.getSecond(), - frameStaticProperties); - filterShapesPipe.setParams(filterShapesPipeParams); - - SortContoursPipe.SortContoursParams sortContoursParams = - new SortContoursPipe.SortContoursParams( - settings.contourSortMode, - settings.outputShowMultipleTargets ? 5 : 1, - frameStaticProperties); // TODO don't hardcode? - sortContoursPipe.setParams(sortContoursParams); - - Collect2dTargetsPipe.Collect2dTargetsParams collect2dTargetsParams = - new Collect2dTargetsPipe.Collect2dTargetsParams( - settings.offsetRobotOffsetMode, - settings.offsetSinglePoint, - dualOffsetValues, - settings.contourTargetOffsetPointEdge, - settings.contourTargetOrientation, - frameStaticProperties); - collect2dTargetsPipe.setParams(collect2dTargetsParams); - - var params = - new CornerDetectionPipe.CornerDetectionPipeParameters( - settings.cornerDetectionStrategy, - settings.cornerDetectionUseConvexHulls, - settings.cornerDetectionExactSideCount, - settings.cornerDetectionSideCount, - settings.cornerDetectionAccuracyPercentage); - cornerDetectionPipe.setParams(params); - - var solvePNPParams = - new SolvePNPPipe.SolvePNPPipeParams( - frameStaticProperties.cameraCalibration, settings.targetModel); - solvePNPPipe.setParams(solvePNPParams); - - Draw2dTargetsPipe.Draw2dTargetsParams draw2DTargetsParams = - new Draw2dTargetsPipe.Draw2dTargetsParams( - settings.outputShouldDraw, - settings.outputShowMultipleTargets, - settings.streamingFrameDivisor); - draw2DTargetsParams.showShape = true; - draw2DTargetsParams.showMaximumBox = false; - draw2DTargetsParams.showRotatedBox = false; - draw2DTargetsPipe.setParams(draw2DTargetsParams); - - Draw2dCrosshairPipe.Draw2dCrosshairParams draw2dCrosshairParams = - new Draw2dCrosshairPipe.Draw2dCrosshairParams( - settings.outputShouldDraw, - settings.offsetRobotOffsetMode, - settings.offsetSinglePoint, - dualOffsetValues, - frameStaticProperties, - settings.streamingFrameDivisor, - settings.inputImageRotationMode); - draw2dCrosshairPipe.setParams(draw2dCrosshairParams); - - var draw3dTargetsParams = - new Draw3dTargetsPipe.Draw3dContoursParams( - settings.outputShouldDraw, - frameStaticProperties.cameraCalibration, - settings.targetModel, - settings.streamingFrameDivisor); - draw3dTargetsPipe.setParams(draw3dTargetsParams); - } - - @Override - protected CVPipelineResult process(Frame frame, ColoredShapePipelineSettings settings) { - long sumPipeNanosElapsed = 0L; - - CVPipeResult> findContoursResult = - findContoursPipe.run(frame.processedImage.getMat()); - sumPipeNanosElapsed += findContoursResult.nanosElapsed; - - CVPipeResult> speckleRejectResult = - speckleRejectPipe.run(findContoursResult.output); - sumPipeNanosElapsed += speckleRejectResult.nanosElapsed; - - List shapes = null; - if (settings.contourShape == ContourShape.Circle) { - CVPipeResult> findCirclesResult = - findCirclesPipe.run(Pair.of(frame.processedImage.getMat(), speckleRejectResult.output)); - sumPipeNanosElapsed += findCirclesResult.nanosElapsed; - shapes = findCirclesResult.output; - } else { - CVPipeResult> findPolygonsResult = - findPolygonPipe.run(speckleRejectResult.output); - sumPipeNanosElapsed += findPolygonsResult.nanosElapsed; - shapes = findPolygonsResult.output; + extends CVPipeline { + private final SpeckleRejectPipe speckleRejectPipe = new SpeckleRejectPipe(); + private final FindContoursPipe findContoursPipe = new FindContoursPipe(); + private final FindPolygonPipe findPolygonPipe = new FindPolygonPipe(); + private final FindCirclesPipe findCirclesPipe = new FindCirclesPipe(); + private final FilterShapesPipe filterShapesPipe = new FilterShapesPipe(); + private final SortContoursPipe sortContoursPipe = new SortContoursPipe(); + private final Collect2dTargetsPipe collect2dTargetsPipe = new Collect2dTargetsPipe(); + private final CornerDetectionPipe cornerDetectionPipe = new CornerDetectionPipe(); + private final SolvePNPPipe solvePNPPipe = new SolvePNPPipe(); + private final Draw2dCrosshairPipe draw2dCrosshairPipe = new Draw2dCrosshairPipe(); + private final Draw2dTargetsPipe draw2DTargetsPipe = new Draw2dTargetsPipe(); + private final Draw3dTargetsPipe draw3dTargetsPipe = new Draw3dTargetsPipe(); + private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); + + private final Point[] rectPoints = new Point[4]; + + private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.HSV; + + public ColoredShapePipeline() { + super(PROCESSING_TYPE); + settings = new ColoredShapePipelineSettings(); } - CVPipeResult> filterShapeResult = filterShapesPipe.run(shapes); - sumPipeNanosElapsed += filterShapeResult.nanosElapsed; - - CVPipeResult> sortContoursResult = - sortContoursPipe.run( - filterShapeResult.output.stream() - .map(shape -> new PotentialTarget(shape.getContour(), shape)) - .collect(Collectors.toList())); - sumPipeNanosElapsed += sortContoursResult.nanosElapsed; - - CVPipeResult> collect2dTargetsResult = - collect2dTargetsPipe.run(sortContoursResult.output); - sumPipeNanosElapsed += collect2dTargetsResult.nanosElapsed; - - List targetList; - - if (settings.solvePNPEnabled && settings.contourShape == ContourShape.Circle) { - var cornerDetectionResult = cornerDetectionPipe.run(collect2dTargetsResult.output); - collect2dTargetsResult.output.forEach( - shape -> { - shape.getMinAreaRect().points(rectPoints); - shape.setTargetCorners(Arrays.asList(rectPoints)); - }); - sumPipeNanosElapsed += cornerDetectionResult.nanosElapsed; - - var solvePNPResult = solvePNPPipe.run(cornerDetectionResult.output); - sumPipeNanosElapsed += solvePNPResult.nanosElapsed; - - targetList = solvePNPResult.output; - } else { - targetList = collect2dTargetsResult.output; + public ColoredShapePipeline(ColoredShapePipelineSettings settings) { + super(PROCESSING_TYPE); + this.settings = settings; } - var fpsResult = calculateFPSPipe.run(null); - var fps = fpsResult.output; + @Override + protected void setPipeParamsImpl() { + DualOffsetValues dualOffsetValues = + new DualOffsetValues( + settings.offsetDualPointA, + settings.offsetDualPointAArea, + settings.offsetDualPointB, + settings.offsetDualPointBArea); + + SpeckleRejectPipe.SpeckleRejectParams speckleRejectParams = + new SpeckleRejectPipe.SpeckleRejectParams(settings.contourSpecklePercentage); + speckleRejectPipe.setParams(speckleRejectParams); + + FindContoursPipe.FindContoursParams findContoursParams = + new FindContoursPipe.FindContoursParams(); + findContoursPipe.setParams(findContoursParams); + + FindPolygonPipe.FindPolygonPipeParams findPolygonPipeParams = + new FindPolygonPipe.FindPolygonPipeParams(settings.accuracyPercentage); + findPolygonPipe.setParams(findPolygonPipeParams); + + FindCirclesPipe.FindCirclePipeParams findCirclePipeParams = + new FindCirclesPipe.FindCirclePipeParams( + settings.circleDetectThreshold, + settings.contourRadius.getFirst(), + settings.minDist, + settings.contourRadius.getSecond(), + settings.maxCannyThresh, + settings.circleAccuracy, + Math.hypot(frameStaticProperties.imageWidth, frameStaticProperties.imageHeight)); + findCirclesPipe.setParams(findCirclePipeParams); + + FilterShapesPipe.FilterShapesPipeParams filterShapesPipeParams = + new FilterShapesPipe.FilterShapesPipeParams( + settings.contourShape, + settings.contourArea.getFirst(), + settings.contourArea.getSecond(), + settings.contourPerimeter.getFirst(), + settings.contourPerimeter.getSecond(), + frameStaticProperties); + filterShapesPipe.setParams(filterShapesPipeParams); + + SortContoursPipe.SortContoursParams sortContoursParams = + new SortContoursPipe.SortContoursParams( + settings.contourSortMode, + settings.outputShowMultipleTargets ? 5 : 1, + frameStaticProperties); // TODO don't hardcode? + sortContoursPipe.setParams(sortContoursParams); + + Collect2dTargetsPipe.Collect2dTargetsParams collect2dTargetsParams = + new Collect2dTargetsPipe.Collect2dTargetsParams( + settings.offsetRobotOffsetMode, + settings.offsetSinglePoint, + dualOffsetValues, + settings.contourTargetOffsetPointEdge, + settings.contourTargetOrientation, + frameStaticProperties); + collect2dTargetsPipe.setParams(collect2dTargetsParams); + + var params = + new CornerDetectionPipe.CornerDetectionPipeParameters( + settings.cornerDetectionStrategy, + settings.cornerDetectionUseConvexHulls, + settings.cornerDetectionExactSideCount, + settings.cornerDetectionSideCount, + settings.cornerDetectionAccuracyPercentage); + cornerDetectionPipe.setParams(params); + + var solvePNPParams = + new SolvePNPPipe.SolvePNPPipeParams( + frameStaticProperties.cameraCalibration, settings.targetModel); + solvePNPPipe.setParams(solvePNPParams); + + Draw2dTargetsPipe.Draw2dTargetsParams draw2DTargetsParams = + new Draw2dTargetsPipe.Draw2dTargetsParams( + settings.outputShouldDraw, + settings.outputShowMultipleTargets, + settings.streamingFrameDivisor); + draw2DTargetsParams.showShape = true; + draw2DTargetsParams.showMaximumBox = false; + draw2DTargetsParams.showRotatedBox = false; + draw2DTargetsPipe.setParams(draw2DTargetsParams); + + Draw2dCrosshairPipe.Draw2dCrosshairParams draw2dCrosshairParams = + new Draw2dCrosshairPipe.Draw2dCrosshairParams( + settings.outputShouldDraw, + settings.offsetRobotOffsetMode, + settings.offsetSinglePoint, + dualOffsetValues, + frameStaticProperties, + settings.streamingFrameDivisor, + settings.inputImageRotationMode); + draw2dCrosshairPipe.setParams(draw2dCrosshairParams); + + var draw3dTargetsParams = + new Draw3dTargetsPipe.Draw3dContoursParams( + settings.outputShouldDraw, + frameStaticProperties.cameraCalibration, + settings.targetModel, + settings.streamingFrameDivisor); + draw3dTargetsPipe.setParams(draw3dTargetsParams); + } - return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, frame); - } + @Override + protected CVPipelineResult process(Frame frame, ColoredShapePipelineSettings settings) { + long sumPipeNanosElapsed = 0L; + + CVPipeResult> findContoursResult = + findContoursPipe.run(frame.processedImage.getMat()); + sumPipeNanosElapsed += findContoursResult.nanosElapsed; + + CVPipeResult> speckleRejectResult = + speckleRejectPipe.run(findContoursResult.output); + sumPipeNanosElapsed += speckleRejectResult.nanosElapsed; + + List shapes = null; + if (settings.contourShape == ContourShape.Circle) { + CVPipeResult> findCirclesResult = + findCirclesPipe.run(Pair.of(frame.processedImage.getMat(), speckleRejectResult.output)); + sumPipeNanosElapsed += findCirclesResult.nanosElapsed; + shapes = findCirclesResult.output; + } else { + CVPipeResult> findPolygonsResult = + findPolygonPipe.run(speckleRejectResult.output); + sumPipeNanosElapsed += findPolygonsResult.nanosElapsed; + shapes = findPolygonsResult.output; + } + + CVPipeResult> filterShapeResult = filterShapesPipe.run(shapes); + sumPipeNanosElapsed += filterShapeResult.nanosElapsed; + + CVPipeResult> sortContoursResult = + sortContoursPipe.run( + filterShapeResult.output.stream() + .map(shape -> new PotentialTarget(shape.getContour(), shape)) + .collect(Collectors.toList())); + sumPipeNanosElapsed += sortContoursResult.nanosElapsed; + + CVPipeResult> collect2dTargetsResult = + collect2dTargetsPipe.run(sortContoursResult.output); + sumPipeNanosElapsed += collect2dTargetsResult.nanosElapsed; + + List targetList; + + if (settings.solvePNPEnabled && settings.contourShape == ContourShape.Circle) { + var cornerDetectionResult = cornerDetectionPipe.run(collect2dTargetsResult.output); + collect2dTargetsResult.output.forEach( + shape -> { + shape.getMinAreaRect().points(rectPoints); + shape.setTargetCorners(Arrays.asList(rectPoints)); + }); + sumPipeNanosElapsed += cornerDetectionResult.nanosElapsed; + + var solvePNPResult = solvePNPPipe.run(cornerDetectionResult.output); + sumPipeNanosElapsed += solvePNPResult.nanosElapsed; + + targetList = solvePNPResult.output; + } else { + targetList = collect2dTargetsResult.output; + } + + var fpsResult = calculateFPSPipe.run(null); + var fps = fpsResult.output; + + return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, frame); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipelineSettings.java index a5e69f493e..62db7bc9ee 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/ColoredShapePipelineSettings.java @@ -26,71 +26,71 @@ @JsonTypeName("ColoredShapePipelineSettings") public class ColoredShapePipelineSettings extends AdvancedPipelineSettings { - public ContourShape contourShape = ContourShape.Triangle; - public DoubleCouple contourPerimeter = new DoubleCouple(0, Double.MAX_VALUE); - public double accuracyPercentage = 10.0; - // Circle detection - public int circleDetectThreshold = 5; - public IntegerCouple contourRadius = new IntegerCouple(0, 100); - public int minDist = 20; - public int maxCannyThresh = 90; - public int circleAccuracy = 20; + public ContourShape contourShape = ContourShape.Triangle; + public DoubleCouple contourPerimeter = new DoubleCouple(0, Double.MAX_VALUE); + public double accuracyPercentage = 10.0; + // Circle detection + public int circleDetectThreshold = 5; + public IntegerCouple contourRadius = new IntegerCouple(0, 100); + public int minDist = 20; + public int maxCannyThresh = 90; + public int circleAccuracy = 20; - // 3d settings - public CameraCalibrationCoefficients cameraCalibration; + // 3d settings + public CameraCalibrationCoefficients cameraCalibration; - public ColoredShapePipelineSettings() { - super(); - pipelineType = PipelineType.ColoredShape; - cameraExposure = 20; - } + public ColoredShapePipelineSettings() { + super(); + pipelineType = PipelineType.ColoredShape; + cameraExposure = 20; + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; - ColoredShapePipelineSettings that = (ColoredShapePipelineSettings) o; - return Double.compare(that.accuracyPercentage, accuracyPercentage) == 0 - && circleDetectThreshold == that.circleDetectThreshold - && minDist == that.minDist - && maxCannyThresh == that.maxCannyThresh - && circleAccuracy == that.circleAccuracy - && cornerDetectionUseConvexHulls == that.cornerDetectionUseConvexHulls - && cornerDetectionExactSideCount == that.cornerDetectionExactSideCount - && cornerDetectionSideCount == that.cornerDetectionSideCount - && Double.compare(that.cornerDetectionAccuracyPercentage, cornerDetectionAccuracyPercentage) - == 0 - && contourShape == that.contourShape - && Objects.equals(contourArea, that.contourArea) - && Objects.equals(contourPerimeter, that.contourPerimeter) - && Objects.equals(contourRadius, that.contourRadius) - && contourGroupingMode == that.contourGroupingMode - && contourIntersection == that.contourIntersection - && Objects.equals(cameraCalibration, that.cameraCalibration) - && cornerDetectionStrategy == that.cornerDetectionStrategy; - } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + ColoredShapePipelineSettings that = (ColoredShapePipelineSettings) o; + return Double.compare(that.accuracyPercentage, accuracyPercentage) == 0 + && circleDetectThreshold == that.circleDetectThreshold + && minDist == that.minDist + && maxCannyThresh == that.maxCannyThresh + && circleAccuracy == that.circleAccuracy + && cornerDetectionUseConvexHulls == that.cornerDetectionUseConvexHulls + && cornerDetectionExactSideCount == that.cornerDetectionExactSideCount + && cornerDetectionSideCount == that.cornerDetectionSideCount + && Double.compare(that.cornerDetectionAccuracyPercentage, cornerDetectionAccuracyPercentage) + == 0 + && contourShape == that.contourShape + && Objects.equals(contourArea, that.contourArea) + && Objects.equals(contourPerimeter, that.contourPerimeter) + && Objects.equals(contourRadius, that.contourRadius) + && contourGroupingMode == that.contourGroupingMode + && contourIntersection == that.contourIntersection + && Objects.equals(cameraCalibration, that.cameraCalibration) + && cornerDetectionStrategy == that.cornerDetectionStrategy; + } - @Override - public int hashCode() { - return Objects.hash( - super.hashCode(), - contourShape, - contourArea, - contourPerimeter, - accuracyPercentage, - circleDetectThreshold, - contourRadius, - minDist, - maxCannyThresh, - circleAccuracy, - contourGroupingMode, - contourIntersection, - cameraCalibration, - cornerDetectionStrategy, - cornerDetectionUseConvexHulls, - cornerDetectionExactSideCount, - cornerDetectionSideCount, - cornerDetectionAccuracyPercentage); - } + @Override + public int hashCode() { + return Objects.hash( + super.hashCode(), + contourShape, + contourArea, + contourPerimeter, + accuracyPercentage, + circleDetectThreshold, + contourRadius, + minDist, + maxCannyThresh, + circleAccuracy, + contourGroupingMode, + contourIntersection, + cameraCalibration, + cornerDetectionStrategy, + cornerDetectionUseConvexHulls, + cornerDetectionExactSideCount, + cornerDetectionSideCount, + cornerDetectionAccuracyPercentage); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipeline.java index aeec75ddb6..f29cb2be3d 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipeline.java @@ -31,75 +31,75 @@ import org.photonvision.vision.pipeline.result.DriverModePipelineResult; public class DriverModePipeline - extends CVPipeline { - private final RotateImagePipe rotateImagePipe = new RotateImagePipe(); - private final Draw2dCrosshairPipe draw2dCrosshairPipe = new Draw2dCrosshairPipe(); - private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); - private final ResizeImagePipe resizeImagePipe = new ResizeImagePipe(); - - private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.NONE; - - public DriverModePipeline() { - super(PROCESSING_TYPE); - settings = new DriverModePipelineSettings(); - } - - public DriverModePipeline(DriverModePipelineSettings settings) { - super(PROCESSING_TYPE); - this.settings = settings; - } - - @Override - protected void setPipeParamsImpl() { - RotateImagePipe.RotateImageParams rotateImageParams = - new RotateImagePipe.RotateImageParams(settings.inputImageRotationMode); - rotateImagePipe.setParams(rotateImageParams); - - Draw2dCrosshairPipe.Draw2dCrosshairParams draw2dCrosshairParams = - new Draw2dCrosshairPipe.Draw2dCrosshairParams( - frameStaticProperties, settings.streamingFrameDivisor, settings.inputImageRotationMode); - draw2dCrosshairPipe.setParams(draw2dCrosshairParams); - - resizeImagePipe.setParams( - new ResizeImagePipe.ResizeImageParams(settings.streamingFrameDivisor)); - - // if (LibCameraJNI.isSupported() && cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { - // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); - // LibCameraJNI.setShouldCopyColor(true); - // } - } - - @Override - public DriverModePipelineResult process(Frame frame, DriverModePipelineSettings settings) { - long totalNanos = 0; - boolean accelerated = LibCameraJNI.isSupported() && cameraQuirks.hasQuirk(CameraQuirk.PiCam); - - // apply pipes - var inputMat = frame.colorImage.getMat(); - - boolean emptyIn = inputMat.empty(); - - if (!emptyIn) { - if (!accelerated) { - var rotateImageResult = rotateImagePipe.run(inputMat); - totalNanos += rotateImageResult.nanosElapsed; - } - - totalNanos += resizeImagePipe.run(inputMat).nanosElapsed; - - var draw2dCrosshairResult = draw2dCrosshairPipe.run(Pair.of(inputMat, List.of())); - - // calculate elapsed nanoseconds - totalNanos += draw2dCrosshairResult.nanosElapsed; + extends CVPipeline { + private final RotateImagePipe rotateImagePipe = new RotateImagePipe(); + private final Draw2dCrosshairPipe draw2dCrosshairPipe = new Draw2dCrosshairPipe(); + private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); + private final ResizeImagePipe resizeImagePipe = new ResizeImagePipe(); + + private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.NONE; + + public DriverModePipeline() { + super(PROCESSING_TYPE); + settings = new DriverModePipelineSettings(); + } + + public DriverModePipeline(DriverModePipelineSettings settings) { + super(PROCESSING_TYPE); + this.settings = settings; + } + + @Override + protected void setPipeParamsImpl() { + RotateImagePipe.RotateImageParams rotateImageParams = + new RotateImagePipe.RotateImageParams(settings.inputImageRotationMode); + rotateImagePipe.setParams(rotateImageParams); + + Draw2dCrosshairPipe.Draw2dCrosshairParams draw2dCrosshairParams = + new Draw2dCrosshairPipe.Draw2dCrosshairParams( + frameStaticProperties, settings.streamingFrameDivisor, settings.inputImageRotationMode); + draw2dCrosshairPipe.setParams(draw2dCrosshairParams); + + resizeImagePipe.setParams( + new ResizeImagePipe.ResizeImageParams(settings.streamingFrameDivisor)); + + // if (LibCameraJNI.isSupported() && cameraQuirks.hasQuirk(CameraQuirk.PiCam)) { + // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); + // LibCameraJNI.setShouldCopyColor(true); + // } } - var fpsResult = calculateFPSPipe.run(null); - var fps = fpsResult.output; + @Override + public DriverModePipelineResult process(Frame frame, DriverModePipelineSettings settings) { + long totalNanos = 0; + boolean accelerated = LibCameraJNI.isSupported() && cameraQuirks.hasQuirk(CameraQuirk.PiCam); - // Flip-flop input and output in the Frame - return new DriverModePipelineResult( - MathUtils.nanosToMillis(totalNanos), - fps, - new Frame(frame.processedImage, frame.colorImage, frame.type, frame.frameStaticProperties)); - } + // apply pipes + var inputMat = frame.colorImage.getMat(); + + boolean emptyIn = inputMat.empty(); + + if (!emptyIn) { + if (!accelerated) { + var rotateImageResult = rotateImagePipe.run(inputMat); + totalNanos += rotateImageResult.nanosElapsed; + } + + totalNanos += resizeImagePipe.run(inputMat).nanosElapsed; + + var draw2dCrosshairResult = draw2dCrosshairPipe.run(Pair.of(inputMat, List.of())); + + // calculate elapsed nanoseconds + totalNanos += draw2dCrosshairResult.nanosElapsed; + } + + var fpsResult = calculateFPSPipe.run(null); + var fps = fpsResult.output; + + // Flip-flop input and output in the Frame + return new DriverModePipelineResult( + MathUtils.nanosToMillis(totalNanos), + fps, + new Frame(frame.processedImage, frame.colorImage, frame.type, frame.frameStaticProperties)); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipelineSettings.java index 7afb3bc169..70beebf7a5 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/DriverModePipelineSettings.java @@ -23,14 +23,14 @@ @JsonTypeName("DriverModePipelineSettings") public class DriverModePipelineSettings extends CVPipelineSettings { - public DoubleCouple offsetPoint = new DoubleCouple(); + public DoubleCouple offsetPoint = new DoubleCouple(); - public DriverModePipelineSettings() { - super(); - pipelineNickname = "Driver Mode"; - pipelineIndex = PipelineManager.DRIVERMODE_INDEX; - pipelineType = PipelineType.DriverMode; - inputShouldShow = true; - cameraAutoExposure = true; - } + public DriverModePipelineSettings() { + super(); + pipelineNickname = "Driver Mode"; + pipelineIndex = PipelineManager.DRIVERMODE_INDEX; + pipelineType = PipelineType.DriverMode; + inputShouldShow = true; + cameraAutoExposure = true; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/OutputStreamPipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/OutputStreamPipeline.java index 53372b864b..0474b6b562 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/OutputStreamPipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/OutputStreamPipeline.java @@ -32,198 +32,198 @@ * shall not get its settings saved, nor shall it be managed by PipelineManager */ public class OutputStreamPipeline { - private final OutputMatPipe outputMatPipe = new OutputMatPipe(); - private final Draw2dCrosshairPipe draw2dCrosshairPipe = new Draw2dCrosshairPipe(); - private final Draw2dTargetsPipe draw2dTargetsPipe = new Draw2dTargetsPipe(); - private final Draw3dTargetsPipe draw3dTargetsPipe = new Draw3dTargetsPipe(); - private final Draw2dAprilTagsPipe draw2dAprilTagsPipe = new Draw2dAprilTagsPipe(); - private final Draw3dAprilTagsPipe draw3dAprilTagsPipe = new Draw3dAprilTagsPipe(); - - private final Draw2dArucoPipe draw2dArucoPipe = new Draw2dArucoPipe(); - private final Draw3dArucoPipe draw3dArucoPipe = new Draw3dArucoPipe(); - private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); - private final ResizeImagePipe resizeImagePipe = new ResizeImagePipe(); - - private final long[] pipeProfileNanos = new long[12]; - - protected void setPipeParams( - FrameStaticProperties frameStaticProperties, AdvancedPipelineSettings settings) { - var dualOffsetValues = - new DualOffsetValues( - settings.offsetDualPointA, - settings.offsetDualPointAArea, - settings.offsetDualPointB, - settings.offsetDualPointBArea); - - var draw2DTargetsParams = - new Draw2dTargetsPipe.Draw2dTargetsParams( - settings.outputShouldDraw, - settings.outputShowMultipleTargets, - settings.streamingFrameDivisor); - draw2dTargetsPipe.setParams(draw2DTargetsParams); - - var draw2DAprilTagsParams = - new Draw2dAprilTagsPipe.Draw2dAprilTagsParams( - settings.outputShouldDraw, - settings.outputShowMultipleTargets, - settings.streamingFrameDivisor); - draw2dAprilTagsPipe.setParams(draw2DAprilTagsParams); - - var draw2DArucoParams = - new Draw2dArucoPipe.Draw2dArucoParams( - settings.outputShouldDraw, - settings.outputShowMultipleTargets, - settings.streamingFrameDivisor); - draw2dArucoPipe.setParams(draw2DArucoParams); - - var draw2dCrosshairParams = - new Draw2dCrosshairPipe.Draw2dCrosshairParams( - settings.outputShouldDraw, - settings.offsetRobotOffsetMode, - settings.offsetSinglePoint, - dualOffsetValues, - frameStaticProperties, - settings.streamingFrameDivisor, - settings.inputImageRotationMode); - draw2dCrosshairPipe.setParams(draw2dCrosshairParams); - - var draw3dTargetsParams = - new Draw3dTargetsPipe.Draw3dContoursParams( - settings.outputShouldDraw, - frameStaticProperties.cameraCalibration, - settings.targetModel, - settings.streamingFrameDivisor); - draw3dTargetsPipe.setParams(draw3dTargetsParams); - - var draw3dAprilTagsParams = - new Draw3dAprilTagsPipe.Draw3dAprilTagsParams( - settings.outputShouldDraw, - frameStaticProperties.cameraCalibration, - settings.targetModel, - settings.streamingFrameDivisor); - draw3dAprilTagsPipe.setParams(draw3dAprilTagsParams); - - var draw3dArucoParams = - new Draw3dArucoPipe.Draw3dArucoParams( - settings.outputShouldDraw, - frameStaticProperties.cameraCalibration, - TargetModel.k6in_16h5, - settings.streamingFrameDivisor); - draw3dArucoPipe.setParams(draw3dArucoParams); - - resizeImagePipe.setParams( - new ResizeImagePipe.ResizeImageParams(settings.streamingFrameDivisor)); - } - - public CVPipelineResult process( - Frame inputAndOutputFrame, - AdvancedPipelineSettings settings, - List targetsToDraw) { - setPipeParams(inputAndOutputFrame.frameStaticProperties, settings); - var inMat = inputAndOutputFrame.colorImage.getMat(); - var outMat = inputAndOutputFrame.processedImage.getMat(); - - long sumPipeNanosElapsed = 0L; - - // Resize both in place before doing any conversion - boolean inEmpty = inMat.empty(); - if (!inEmpty) - sumPipeNanosElapsed += pipeProfileNanos[0] = resizeImagePipe.run(inMat).nanosElapsed; - - boolean outEmpty = outMat.empty(); - if (!outEmpty) - sumPipeNanosElapsed += pipeProfileNanos[1] = resizeImagePipe.run(outMat).nanosElapsed; - - // Only attempt drawing on a non-empty frame - if (!outEmpty) { - // Convert single-channel HSV output mat to 3-channel BGR in preparation for streaming - if (outMat.channels() == 1) { - var outputMatPipeResult = outputMatPipe.run(outMat); - sumPipeNanosElapsed += pipeProfileNanos[2] = outputMatPipeResult.nanosElapsed; - } else { - pipeProfileNanos[2] = 0; - } - - // Draw 2D Crosshair on output - var draw2dCrosshairResultOnInput = draw2dCrosshairPipe.run(Pair.of(inMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[3] = draw2dCrosshairResultOnInput.nanosElapsed; - - if (!(settings instanceof AprilTagPipelineSettings) - && !(settings instanceof ArucoPipelineSettings)) { - // If we're processing anything other than Apriltags... - var draw2dCrosshairResultOnOutput = draw2dCrosshairPipe.run(Pair.of(outMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[4] = draw2dCrosshairResultOnOutput.nanosElapsed; - - if (settings.solvePNPEnabled) { - // Draw 3D Targets on input and output if possible - pipeProfileNanos[5] = 0; - pipeProfileNanos[6] = 0; - pipeProfileNanos[7] = 0; - - var drawOnOutputResult = draw3dTargetsPipe.run(Pair.of(outMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[8] = drawOnOutputResult.nanosElapsed; - } else { - // Only draw 2d targets - pipeProfileNanos[5] = 0; - - var draw2dTargetsOnOutput = draw2dTargetsPipe.run(Pair.of(outMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[6] = draw2dTargetsOnOutput.nanosElapsed; - - pipeProfileNanos[7] = 0; - pipeProfileNanos[8] = 0; - } - - } else if (settings instanceof AprilTagPipelineSettings) { - // If we are doing apriltags... - if (settings.solvePNPEnabled) { - // Draw 3d Apriltag markers (camera is calibrated and running in 3d mode) - pipeProfileNanos[5] = 0; - pipeProfileNanos[6] = 0; - - var drawOnInputResult = draw3dAprilTagsPipe.run(Pair.of(outMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[7] = drawOnInputResult.nanosElapsed; - - pipeProfileNanos[8] = 0; - - } else { - // Draw 2d apriltag markers - var draw2dTargetsOnInput = draw2dAprilTagsPipe.run(Pair.of(outMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[5] = draw2dTargetsOnInput.nanosElapsed; + private final OutputMatPipe outputMatPipe = new OutputMatPipe(); + private final Draw2dCrosshairPipe draw2dCrosshairPipe = new Draw2dCrosshairPipe(); + private final Draw2dTargetsPipe draw2dTargetsPipe = new Draw2dTargetsPipe(); + private final Draw3dTargetsPipe draw3dTargetsPipe = new Draw3dTargetsPipe(); + private final Draw2dAprilTagsPipe draw2dAprilTagsPipe = new Draw2dAprilTagsPipe(); + private final Draw3dAprilTagsPipe draw3dAprilTagsPipe = new Draw3dAprilTagsPipe(); + + private final Draw2dArucoPipe draw2dArucoPipe = new Draw2dArucoPipe(); + private final Draw3dArucoPipe draw3dArucoPipe = new Draw3dArucoPipe(); + private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); + private final ResizeImagePipe resizeImagePipe = new ResizeImagePipe(); + + private final long[] pipeProfileNanos = new long[12]; + + protected void setPipeParams( + FrameStaticProperties frameStaticProperties, AdvancedPipelineSettings settings) { + var dualOffsetValues = + new DualOffsetValues( + settings.offsetDualPointA, + settings.offsetDualPointAArea, + settings.offsetDualPointB, + settings.offsetDualPointBArea); + + var draw2DTargetsParams = + new Draw2dTargetsPipe.Draw2dTargetsParams( + settings.outputShouldDraw, + settings.outputShowMultipleTargets, + settings.streamingFrameDivisor); + draw2dTargetsPipe.setParams(draw2DTargetsParams); + + var draw2DAprilTagsParams = + new Draw2dAprilTagsPipe.Draw2dAprilTagsParams( + settings.outputShouldDraw, + settings.outputShowMultipleTargets, + settings.streamingFrameDivisor); + draw2dAprilTagsPipe.setParams(draw2DAprilTagsParams); + + var draw2DArucoParams = + new Draw2dArucoPipe.Draw2dArucoParams( + settings.outputShouldDraw, + settings.outputShowMultipleTargets, + settings.streamingFrameDivisor); + draw2dArucoPipe.setParams(draw2DArucoParams); + + var draw2dCrosshairParams = + new Draw2dCrosshairPipe.Draw2dCrosshairParams( + settings.outputShouldDraw, + settings.offsetRobotOffsetMode, + settings.offsetSinglePoint, + dualOffsetValues, + frameStaticProperties, + settings.streamingFrameDivisor, + settings.inputImageRotationMode); + draw2dCrosshairPipe.setParams(draw2dCrosshairParams); + + var draw3dTargetsParams = + new Draw3dTargetsPipe.Draw3dContoursParams( + settings.outputShouldDraw, + frameStaticProperties.cameraCalibration, + settings.targetModel, + settings.streamingFrameDivisor); + draw3dTargetsPipe.setParams(draw3dTargetsParams); + + var draw3dAprilTagsParams = + new Draw3dAprilTagsPipe.Draw3dAprilTagsParams( + settings.outputShouldDraw, + frameStaticProperties.cameraCalibration, + settings.targetModel, + settings.streamingFrameDivisor); + draw3dAprilTagsPipe.setParams(draw3dAprilTagsParams); + + var draw3dArucoParams = + new Draw3dArucoPipe.Draw3dArucoParams( + settings.outputShouldDraw, + frameStaticProperties.cameraCalibration, + TargetModel.k6in_16h5, + settings.streamingFrameDivisor); + draw3dArucoPipe.setParams(draw3dArucoParams); + + resizeImagePipe.setParams( + new ResizeImagePipe.ResizeImageParams(settings.streamingFrameDivisor)); + } - pipeProfileNanos[6] = 0; - pipeProfileNanos[7] = 0; - pipeProfileNanos[8] = 0; + public CVPipelineResult process( + Frame inputAndOutputFrame, + AdvancedPipelineSettings settings, + List targetsToDraw) { + setPipeParams(inputAndOutputFrame.frameStaticProperties, settings); + var inMat = inputAndOutputFrame.colorImage.getMat(); + var outMat = inputAndOutputFrame.processedImage.getMat(); + + long sumPipeNanosElapsed = 0L; + + // Resize both in place before doing any conversion + boolean inEmpty = inMat.empty(); + if (!inEmpty) + sumPipeNanosElapsed += pipeProfileNanos[0] = resizeImagePipe.run(inMat).nanosElapsed; + + boolean outEmpty = outMat.empty(); + if (!outEmpty) + sumPipeNanosElapsed += pipeProfileNanos[1] = resizeImagePipe.run(outMat).nanosElapsed; + + // Only attempt drawing on a non-empty frame + if (!outEmpty) { + // Convert single-channel HSV output mat to 3-channel BGR in preparation for streaming + if (outMat.channels() == 1) { + var outputMatPipeResult = outputMatPipe.run(outMat); + sumPipeNanosElapsed += pipeProfileNanos[2] = outputMatPipeResult.nanosElapsed; + } else { + pipeProfileNanos[2] = 0; + } + + // Draw 2D Crosshair on output + var draw2dCrosshairResultOnInput = draw2dCrosshairPipe.run(Pair.of(inMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[3] = draw2dCrosshairResultOnInput.nanosElapsed; + + if (!(settings instanceof AprilTagPipelineSettings) + && !(settings instanceof ArucoPipelineSettings)) { + // If we're processing anything other than Apriltags... + var draw2dCrosshairResultOnOutput = draw2dCrosshairPipe.run(Pair.of(outMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[4] = draw2dCrosshairResultOnOutput.nanosElapsed; + + if (settings.solvePNPEnabled) { + // Draw 3D Targets on input and output if possible + pipeProfileNanos[5] = 0; + pipeProfileNanos[6] = 0; + pipeProfileNanos[7] = 0; + + var drawOnOutputResult = draw3dTargetsPipe.run(Pair.of(outMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[8] = drawOnOutputResult.nanosElapsed; + } else { + // Only draw 2d targets + pipeProfileNanos[5] = 0; + + var draw2dTargetsOnOutput = draw2dTargetsPipe.run(Pair.of(outMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[6] = draw2dTargetsOnOutput.nanosElapsed; + + pipeProfileNanos[7] = 0; + pipeProfileNanos[8] = 0; + } + + } else if (settings instanceof AprilTagPipelineSettings) { + // If we are doing apriltags... + if (settings.solvePNPEnabled) { + // Draw 3d Apriltag markers (camera is calibrated and running in 3d mode) + pipeProfileNanos[5] = 0; + pipeProfileNanos[6] = 0; + + var drawOnInputResult = draw3dAprilTagsPipe.run(Pair.of(outMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[7] = drawOnInputResult.nanosElapsed; + + pipeProfileNanos[8] = 0; + + } else { + // Draw 2d apriltag markers + var draw2dTargetsOnInput = draw2dAprilTagsPipe.run(Pair.of(outMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[5] = draw2dTargetsOnInput.nanosElapsed; + + pipeProfileNanos[6] = 0; + pipeProfileNanos[7] = 0; + pipeProfileNanos[8] = 0; + } + } else if (settings instanceof ArucoPipelineSettings) { + if (settings.solvePNPEnabled) { + // Draw 3d Apriltag markers (camera is calibrated and running in 3d mode) + pipeProfileNanos[5] = 0; + pipeProfileNanos[6] = 0; + + var drawOnInputResult = draw3dArucoPipe.run(Pair.of(outMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[7] = drawOnInputResult.nanosElapsed; + + pipeProfileNanos[8] = 0; + + } else { + // Draw 2d apriltag markers + var draw2dTargetsOnInput = draw2dArucoPipe.run(Pair.of(outMat, targetsToDraw)); + sumPipeNanosElapsed += pipeProfileNanos[5] = draw2dTargetsOnInput.nanosElapsed; + + pipeProfileNanos[6] = 0; + pipeProfileNanos[7] = 0; + pipeProfileNanos[8] = 0; + } + } } - } else if (settings instanceof ArucoPipelineSettings) { - if (settings.solvePNPEnabled) { - // Draw 3d Apriltag markers (camera is calibrated and running in 3d mode) - pipeProfileNanos[5] = 0; - pipeProfileNanos[6] = 0; - var drawOnInputResult = draw3dArucoPipe.run(Pair.of(outMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[7] = drawOnInputResult.nanosElapsed; + var fpsResult = calculateFPSPipe.run(null); + var fps = fpsResult.output; - pipeProfileNanos[8] = 0; - - } else { - // Draw 2d apriltag markers - var draw2dTargetsOnInput = draw2dArucoPipe.run(Pair.of(outMat, targetsToDraw)); - sumPipeNanosElapsed += pipeProfileNanos[5] = draw2dTargetsOnInput.nanosElapsed; - - pipeProfileNanos[6] = 0; - pipeProfileNanos[7] = 0; - pipeProfileNanos[8] = 0; - } - } + return new CVPipelineResult( + sumPipeNanosElapsed, + fps, // Unused but here just in case + targetsToDraw, + inputAndOutputFrame); } - - var fpsResult = calculateFPSPipe.run(null); - var fps = fpsResult.output; - - return new CVPipelineResult( - sumPipeNanosElapsed, - fps, // Unused but here just in case - targetsToDraw, - inputAndOutputFrame); - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineProfiler.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineProfiler.java index 93a1c27c56..57c94b0640 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineProfiler.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineProfiler.java @@ -22,77 +22,77 @@ import org.photonvision.common.util.math.MathUtils; public class PipelineProfiler { - private static boolean shouldLog; + private static boolean shouldLog; - private static final Logger reflectiveLogger = - new Logger(ReflectivePipeline.class, LogGroup.VisionModule); - private static final Logger coloredShapeLogger = - new Logger(ColoredShapePipeline.class, LogGroup.VisionModule); + private static final Logger reflectiveLogger = + new Logger(ReflectivePipeline.class, LogGroup.VisionModule); + private static final Logger coloredShapeLogger = + new Logger(ColoredShapePipeline.class, LogGroup.VisionModule); - /** - * Indices for Reflective profiling 0 - rotateImagePipe 1 - inputCopy (not a pipe) 2 - hsvPipe 3 - - * findContoursPipe 4 - speckleRejectPipe 5 - filterContoursPipe 6 - groupContoursPipe 7 - - * sortContoursPipe 8 - collect2dTargetsPipe 9 - cornerDetectionPipe 10 - solvePNPPipe (OPTIONAL) - * 11 - outputMatPipe (OPTIONAL) 12 - draw2dCrosshairPipe (on input) 13 - draw2dCrosshairPipe (on - * output) 14 - draw2dTargetsPipe (on input) 15 - draw2dTargetsPipe (on output) 16 - - * draw3dTargetsPipe (OPTIONAL, on input) 17 - draw3dTargetsPipe (OPTIONAL, on output) - */ - private static final String[] ReflectivePipeNames = - new String[] { - "RotateImage", - "HSV", - "FindContours", - "SpeckleReject", - "FilterContours", - "GroupContours", - "SortContours", - "Collect2dTargets", - "CornerDetection", - "SolvePNP", - }; + /** + * Indices for Reflective profiling 0 - rotateImagePipe 1 - inputCopy (not a pipe) 2 - hsvPipe 3 - + * findContoursPipe 4 - speckleRejectPipe 5 - filterContoursPipe 6 - groupContoursPipe 7 - + * sortContoursPipe 8 - collect2dTargetsPipe 9 - cornerDetectionPipe 10 - solvePNPPipe (OPTIONAL) + * 11 - outputMatPipe (OPTIONAL) 12 - draw2dCrosshairPipe (on input) 13 - draw2dCrosshairPipe (on + * output) 14 - draw2dTargetsPipe (on input) 15 - draw2dTargetsPipe (on output) 16 - + * draw3dTargetsPipe (OPTIONAL, on input) 17 - draw3dTargetsPipe (OPTIONAL, on output) + */ + private static final String[] ReflectivePipeNames = + new String[] { + "RotateImage", + "HSV", + "FindContours", + "SpeckleReject", + "FilterContours", + "GroupContours", + "SortContours", + "Collect2dTargets", + "CornerDetection", + "SolvePNP", + }; - public static final int ReflectivePipeCount = ReflectivePipeNames.length; + public static final int ReflectivePipeCount = ReflectivePipeNames.length; - protected static String getReflectiveProfileString(long[] nanos) { - if (nanos.length != ReflectivePipeCount) { - return "Invalid data"; - } + protected static String getReflectiveProfileString(long[] nanos) { + if (nanos.length != ReflectivePipeCount) { + return "Invalid data"; + } - var sb = new StringBuilder("Profiling - "); - double totalMs = 0; - for (int i = 0; i < nanos.length; i++) { - if ((i == 10 || i == 11 || i == 17 || i == 18) && nanos[i] == 0) { - continue; // skip empty pipe profiles - } + var sb = new StringBuilder("Profiling - "); + double totalMs = 0; + for (int i = 0; i < nanos.length; i++) { + if ((i == 10 || i == 11 || i == 17 || i == 18) && nanos[i] == 0) { + continue; // skip empty pipe profiles + } - sb.append(ReflectivePipeNames[i]); - sb.append(": "); - var ms = MathUtils.roundTo(nanos[i] / 1e+6, 3); - totalMs += ms; - sb.append(ms); - sb.append("ms"); - // boolean isLast = (i + 1 == 17 && nanos[i+1] == 0) || i == 18; - // if (isLast) { - // var foo = "bar"; - // } else { - sb.append(", "); - // } - } + sb.append(ReflectivePipeNames[i]); + sb.append(": "); + var ms = MathUtils.roundTo(nanos[i] / 1e+6, 3); + totalMs += ms; + sb.append(ms); + sb.append("ms"); + // boolean isLast = (i + 1 == 17 && nanos[i+1] == 0) || i == 18; + // if (isLast) { + // var foo = "bar"; + // } else { + sb.append(", "); + // } + } - sb.append("Total: "); - sb.append(MathUtils.roundTo(totalMs, 3)); - sb.append("ms"); + sb.append("Total: "); + sb.append(MathUtils.roundTo(totalMs, 3)); + sb.append("ms"); - return sb.toString(); - } + return sb.toString(); + } - public static void printReflectiveProfile(long[] nanos) { - if (shouldLog) { - reflectiveLogger.trace(() -> getReflectiveProfileString(nanos)); + public static void printReflectiveProfile(long[] nanos) { + if (shouldLog) { + reflectiveLogger.trace(() -> getReflectiveProfileString(nanos)); + } } - } - public static void enablePrint(boolean enable) { - shouldLog = enable; - } + public static void enablePrint(boolean enable) { + shouldLog = enable; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineType.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineType.java index 58cf44615c..a2f6346b89 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineType.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/PipelineType.java @@ -19,18 +19,18 @@ @SuppressWarnings("rawtypes") public enum PipelineType { - Calib3d(-2, Calibrate3dPipeline.class), - DriverMode(-1, DriverModePipeline.class), - Reflective(0, ReflectivePipeline.class), - ColoredShape(1, ColoredShapePipeline.class), - AprilTag(2, AprilTagPipeline.class), - Aruco(3, ArucoPipeline.class); + Calib3d(-2, Calibrate3dPipeline.class), + DriverMode(-1, DriverModePipeline.class), + Reflective(0, ReflectivePipeline.class), + ColoredShape(1, ColoredShapePipeline.class), + AprilTag(2, AprilTagPipeline.class), + Aruco(3, ArucoPipeline.class); - public final int baseIndex; - public final Class clazz; + public final int baseIndex; + public final Class clazz; - PipelineType(int baseIndex, Class clazz) { - this.baseIndex = baseIndex; - this.clazz = clazz; - } + PipelineType(int baseIndex, Class clazz) { + this.baseIndex = baseIndex; + this.clazz = clazz; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipeline.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipeline.java index 40e38af8b5..e961488b78 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipeline.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipeline.java @@ -31,167 +31,167 @@ /** Represents a pipeline for tracking retro-reflective targets. */ public class ReflectivePipeline extends CVPipeline { - private final FindContoursPipe findContoursPipe = new FindContoursPipe(); - private final SpeckleRejectPipe speckleRejectPipe = new SpeckleRejectPipe(); - private final FilterContoursPipe filterContoursPipe = new FilterContoursPipe(); - private final GroupContoursPipe groupContoursPipe = new GroupContoursPipe(); - private final SortContoursPipe sortContoursPipe = new SortContoursPipe(); - private final Collect2dTargetsPipe collect2dTargetsPipe = new Collect2dTargetsPipe(); - private final CornerDetectionPipe cornerDetectionPipe = new CornerDetectionPipe(); - private final SolvePNPPipe solvePNPPipe = new SolvePNPPipe(); - private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); - - private final long[] pipeProfileNanos = new long[PipelineProfiler.ReflectivePipeCount]; - - private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.HSV; - - public ReflectivePipeline() { - super(PROCESSING_TYPE); - settings = new ReflectivePipelineSettings(); - } - - public ReflectivePipeline(ReflectivePipelineSettings settings) { - super(PROCESSING_TYPE); - this.settings = settings; - } - - @Override - protected void setPipeParamsImpl() { - var dualOffsetValues = - new DualOffsetValues( - settings.offsetDualPointA, - settings.offsetDualPointAArea, - settings.offsetDualPointB, - settings.offsetDualPointBArea); - - // var rotateImageParams = new - // RotateImagePipe.RotateImageParams(settings.inputImageRotationMode); - // rotateImagePipe.setParams(rotateImageParams); - - // if (cameraQuirks.hasQuirk(CameraQuirk.PiCam) && LibCameraJNI.isSupported()) { - // LibCameraJNI.setThresholds( - // settings.hsvHue.getFirst() / 180d, - // settings.hsvSaturation.getFirst() / 255d, - // settings.hsvValue.getFirst() / 255d, - // settings.hsvHue.getSecond() / 180d, - // settings.hsvSaturation.getSecond() / 255d, - // settings.hsvValue.getSecond() / 255d); - // // LibCameraJNI.setInvertHue(settings.hueInverted); - // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); - // // LibCameraJNI.setShouldCopyColor(settings.inputShouldShow); - // } else { - // var hsvParams = - // new HSVPipe.HSVParams( - // settings.hsvHue, settings.hsvSaturation, settings.hsvValue, - // settings.hueInverted); - // hsvPipe.setParams(hsvParams); - // } - - var findContoursParams = new FindContoursPipe.FindContoursParams(); - findContoursPipe.setParams(findContoursParams); - - var speckleRejectParams = - new SpeckleRejectPipe.SpeckleRejectParams(settings.contourSpecklePercentage); - speckleRejectPipe.setParams(speckleRejectParams); - - var filterContoursParams = - new FilterContoursPipe.FilterContoursParams( - settings.contourArea, - settings.contourRatio, - settings.contourFullness, - frameStaticProperties, - settings.contourFilterRangeX, - settings.contourFilterRangeY, - settings.contourTargetOrientation == TargetOrientation.Landscape); - filterContoursPipe.setParams(filterContoursParams); - - var groupContoursParams = - new GroupContoursPipe.GroupContoursParams( - settings.contourGroupingMode, settings.contourIntersection); - groupContoursPipe.setParams(groupContoursParams); - - var sortContoursParams = - new SortContoursPipe.SortContoursParams( - settings.contourSortMode, - settings.outputShowMultipleTargets ? 8 : 1, // TODO don't hardcode? - frameStaticProperties); - sortContoursPipe.setParams(sortContoursParams); - - var collect2dTargetsParams = - new Collect2dTargetsPipe.Collect2dTargetsParams( - settings.offsetRobotOffsetMode, - settings.offsetSinglePoint, - dualOffsetValues, - settings.contourTargetOffsetPointEdge, - settings.contourTargetOrientation, - frameStaticProperties); - collect2dTargetsPipe.setParams(collect2dTargetsParams); - - var cornerDetectionPipeParams = - new CornerDetectionPipe.CornerDetectionPipeParameters( - settings.cornerDetectionStrategy, - settings.cornerDetectionUseConvexHulls, - settings.cornerDetectionExactSideCount, - settings.cornerDetectionSideCount, - settings.cornerDetectionAccuracyPercentage); - cornerDetectionPipe.setParams(cornerDetectionPipeParams); - - var solvePNPParams = - new SolvePNPPipe.SolvePNPPipeParams( - frameStaticProperties.cameraCalibration, settings.targetModel); - solvePNPPipe.setParams(solvePNPParams); - } - - @Override - public CVPipelineResult process(Frame frame, ReflectivePipelineSettings settings) { - long sumPipeNanosElapsed = 0L; - - CVPipeResult> findContoursResult = - findContoursPipe.run(frame.processedImage.getMat()); - sumPipeNanosElapsed += pipeProfileNanos[2] = findContoursResult.nanosElapsed; - - CVPipeResult> speckleRejectResult = - speckleRejectPipe.run(findContoursResult.output); - sumPipeNanosElapsed += pipeProfileNanos[3] = speckleRejectResult.nanosElapsed; - - CVPipeResult> filterContoursResult = - filterContoursPipe.run(speckleRejectResult.output); - sumPipeNanosElapsed += pipeProfileNanos[4] = filterContoursResult.nanosElapsed; - - CVPipeResult> groupContoursResult = - groupContoursPipe.run(filterContoursResult.output); - sumPipeNanosElapsed += pipeProfileNanos[5] = groupContoursResult.nanosElapsed; - - CVPipeResult> sortContoursResult = - sortContoursPipe.run(groupContoursResult.output); - sumPipeNanosElapsed += pipeProfileNanos[6] = sortContoursResult.nanosElapsed; - - CVPipeResult> collect2dTargetsResult = - collect2dTargetsPipe.run(sortContoursResult.output); - sumPipeNanosElapsed += pipeProfileNanos[7] = collect2dTargetsResult.nanosElapsed; - - List targetList; - - // 3d stuff - if (settings.solvePNPEnabled) { - var cornerDetectionResult = cornerDetectionPipe.run(collect2dTargetsResult.output); - sumPipeNanosElapsed += pipeProfileNanos[8] = cornerDetectionResult.nanosElapsed; - - var solvePNPResult = solvePNPPipe.run(cornerDetectionResult.output); - sumPipeNanosElapsed += pipeProfileNanos[9] = solvePNPResult.nanosElapsed; - - targetList = solvePNPResult.output; - } else { - pipeProfileNanos[8] = 0; - pipeProfileNanos[9] = 0; - targetList = collect2dTargetsResult.output; + private final FindContoursPipe findContoursPipe = new FindContoursPipe(); + private final SpeckleRejectPipe speckleRejectPipe = new SpeckleRejectPipe(); + private final FilterContoursPipe filterContoursPipe = new FilterContoursPipe(); + private final GroupContoursPipe groupContoursPipe = new GroupContoursPipe(); + private final SortContoursPipe sortContoursPipe = new SortContoursPipe(); + private final Collect2dTargetsPipe collect2dTargetsPipe = new Collect2dTargetsPipe(); + private final CornerDetectionPipe cornerDetectionPipe = new CornerDetectionPipe(); + private final SolvePNPPipe solvePNPPipe = new SolvePNPPipe(); + private final CalculateFPSPipe calculateFPSPipe = new CalculateFPSPipe(); + + private final long[] pipeProfileNanos = new long[PipelineProfiler.ReflectivePipeCount]; + + private static final FrameThresholdType PROCESSING_TYPE = FrameThresholdType.HSV; + + public ReflectivePipeline() { + super(PROCESSING_TYPE); + settings = new ReflectivePipelineSettings(); } - var fpsResult = calculateFPSPipe.run(null); - var fps = fpsResult.output; + public ReflectivePipeline(ReflectivePipelineSettings settings) { + super(PROCESSING_TYPE); + this.settings = settings; + } + + @Override + protected void setPipeParamsImpl() { + var dualOffsetValues = + new DualOffsetValues( + settings.offsetDualPointA, + settings.offsetDualPointAArea, + settings.offsetDualPointB, + settings.offsetDualPointBArea); + + // var rotateImageParams = new + // RotateImagePipe.RotateImageParams(settings.inputImageRotationMode); + // rotateImagePipe.setParams(rotateImageParams); + + // if (cameraQuirks.hasQuirk(CameraQuirk.PiCam) && LibCameraJNI.isSupported()) { + // LibCameraJNI.setThresholds( + // settings.hsvHue.getFirst() / 180d, + // settings.hsvSaturation.getFirst() / 255d, + // settings.hsvValue.getFirst() / 255d, + // settings.hsvHue.getSecond() / 180d, + // settings.hsvSaturation.getSecond() / 255d, + // settings.hsvValue.getSecond() / 255d); + // // LibCameraJNI.setInvertHue(settings.hueInverted); + // LibCameraJNI.setRotation(settings.inputImageRotationMode.value); + // // LibCameraJNI.setShouldCopyColor(settings.inputShouldShow); + // } else { + // var hsvParams = + // new HSVPipe.HSVParams( + // settings.hsvHue, settings.hsvSaturation, settings.hsvValue, + // settings.hueInverted); + // hsvPipe.setParams(hsvParams); + // } + + var findContoursParams = new FindContoursPipe.FindContoursParams(); + findContoursPipe.setParams(findContoursParams); + + var speckleRejectParams = + new SpeckleRejectPipe.SpeckleRejectParams(settings.contourSpecklePercentage); + speckleRejectPipe.setParams(speckleRejectParams); + + var filterContoursParams = + new FilterContoursPipe.FilterContoursParams( + settings.contourArea, + settings.contourRatio, + settings.contourFullness, + frameStaticProperties, + settings.contourFilterRangeX, + settings.contourFilterRangeY, + settings.contourTargetOrientation == TargetOrientation.Landscape); + filterContoursPipe.setParams(filterContoursParams); + + var groupContoursParams = + new GroupContoursPipe.GroupContoursParams( + settings.contourGroupingMode, settings.contourIntersection); + groupContoursPipe.setParams(groupContoursParams); + + var sortContoursParams = + new SortContoursPipe.SortContoursParams( + settings.contourSortMode, + settings.outputShowMultipleTargets ? 8 : 1, // TODO don't hardcode? + frameStaticProperties); + sortContoursPipe.setParams(sortContoursParams); + + var collect2dTargetsParams = + new Collect2dTargetsPipe.Collect2dTargetsParams( + settings.offsetRobotOffsetMode, + settings.offsetSinglePoint, + dualOffsetValues, + settings.contourTargetOffsetPointEdge, + settings.contourTargetOrientation, + frameStaticProperties); + collect2dTargetsPipe.setParams(collect2dTargetsParams); + + var cornerDetectionPipeParams = + new CornerDetectionPipe.CornerDetectionPipeParameters( + settings.cornerDetectionStrategy, + settings.cornerDetectionUseConvexHulls, + settings.cornerDetectionExactSideCount, + settings.cornerDetectionSideCount, + settings.cornerDetectionAccuracyPercentage); + cornerDetectionPipe.setParams(cornerDetectionPipeParams); + + var solvePNPParams = + new SolvePNPPipe.SolvePNPPipeParams( + frameStaticProperties.cameraCalibration, settings.targetModel); + solvePNPPipe.setParams(solvePNPParams); + } + + @Override + public CVPipelineResult process(Frame frame, ReflectivePipelineSettings settings) { + long sumPipeNanosElapsed = 0L; + + CVPipeResult> findContoursResult = + findContoursPipe.run(frame.processedImage.getMat()); + sumPipeNanosElapsed += pipeProfileNanos[2] = findContoursResult.nanosElapsed; + + CVPipeResult> speckleRejectResult = + speckleRejectPipe.run(findContoursResult.output); + sumPipeNanosElapsed += pipeProfileNanos[3] = speckleRejectResult.nanosElapsed; + + CVPipeResult> filterContoursResult = + filterContoursPipe.run(speckleRejectResult.output); + sumPipeNanosElapsed += pipeProfileNanos[4] = filterContoursResult.nanosElapsed; + + CVPipeResult> groupContoursResult = + groupContoursPipe.run(filterContoursResult.output); + sumPipeNanosElapsed += pipeProfileNanos[5] = groupContoursResult.nanosElapsed; - PipelineProfiler.printReflectiveProfile(pipeProfileNanos); + CVPipeResult> sortContoursResult = + sortContoursPipe.run(groupContoursResult.output); + sumPipeNanosElapsed += pipeProfileNanos[6] = sortContoursResult.nanosElapsed; - return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, frame); - } + CVPipeResult> collect2dTargetsResult = + collect2dTargetsPipe.run(sortContoursResult.output); + sumPipeNanosElapsed += pipeProfileNanos[7] = collect2dTargetsResult.nanosElapsed; + + List targetList; + + // 3d stuff + if (settings.solvePNPEnabled) { + var cornerDetectionResult = cornerDetectionPipe.run(collect2dTargetsResult.output); + sumPipeNanosElapsed += pipeProfileNanos[8] = cornerDetectionResult.nanosElapsed; + + var solvePNPResult = solvePNPPipe.run(cornerDetectionResult.output); + sumPipeNanosElapsed += pipeProfileNanos[9] = solvePNPResult.nanosElapsed; + + targetList = solvePNPResult.output; + } else { + pipeProfileNanos[8] = 0; + pipeProfileNanos[9] = 0; + targetList = collect2dTargetsResult.output; + } + + var fpsResult = calculateFPSPipe.run(null); + var fps = fpsResult.output; + + PipelineProfiler.printReflectiveProfile(pipeProfileNanos); + + return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, frame); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipelineSettings.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipelineSettings.java index 7ac8353d8f..b06dcaddef 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipelineSettings.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/ReflectivePipelineSettings.java @@ -21,13 +21,13 @@ @JsonTypeName("ReflectivePipelineSettings") public class ReflectivePipelineSettings extends AdvancedPipelineSettings { - public double contourFilterRangeX = 2; - public double contourFilterRangeY = 2; + public double contourFilterRangeX = 2; + public double contourFilterRangeY = 2; - public ReflectivePipelineSettings() { - super(); - pipelineType = PipelineType.Reflective; - cameraExposure = 6; - cameraGain = 20; - } + public ReflectivePipelineSettings() { + super(); + pipelineType = PipelineType.Reflective; + cameraExposure = 6; + cameraGain = 20; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/UICalibrationData.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/UICalibrationData.java index ed90581d14..287e271974 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/UICalibrationData.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/UICalibrationData.java @@ -20,70 +20,70 @@ import java.util.Map; public class UICalibrationData { - public final int videoModeIndex; - public int count; - public final int minCount; - public final boolean hasEnough; - public final double squareSizeIn; - public final int patternWidth; - public final int patternHeight; - public final BoardType boardType; // + public final int videoModeIndex; + public int count; + public final int minCount; + public final boolean hasEnough; + public final double squareSizeIn; + public final int patternWidth; + public final int patternHeight; + public final BoardType boardType; // - public UICalibrationData( - int count, - int videoModeIndex, - int minCount, - boolean hasEnough, - double squareSizeIn, - int patternWidth, - int patternHeight, - BoardType boardType) { - this.count = count; - this.minCount = minCount; - this.videoModeIndex = videoModeIndex; - this.hasEnough = hasEnough; - this.squareSizeIn = squareSizeIn; - this.patternWidth = patternWidth; - this.patternHeight = patternHeight; - this.boardType = boardType; - } + public UICalibrationData( + int count, + int videoModeIndex, + int minCount, + boolean hasEnough, + double squareSizeIn, + int patternWidth, + int patternHeight, + BoardType boardType) { + this.count = count; + this.minCount = minCount; + this.videoModeIndex = videoModeIndex; + this.hasEnough = hasEnough; + this.squareSizeIn = squareSizeIn; + this.patternWidth = patternWidth; + this.patternHeight = patternHeight; + this.boardType = boardType; + } - public enum BoardType { - CHESSBOARD, - DOTBOARD - } + public enum BoardType { + CHESSBOARD, + DOTBOARD + } - public static UICalibrationData fromMap(Map map) { - return new UICalibrationData( - ((Number) map.get("count")).intValue(), - ((Number) map.get("videoModeIndex")).intValue(), - ((Number) map.get("minCount")).intValue(), - (boolean) map.get("hasEnough"), - ((Number) map.get("squareSizeIn")).doubleValue(), - ((Number) map.get("patternWidth")).intValue(), - ((Number) map.get("patternHeight")).intValue(), - BoardType.values()[(int) map.get("boardType")]); - } + public static UICalibrationData fromMap(Map map) { + return new UICalibrationData( + ((Number) map.get("count")).intValue(), + ((Number) map.get("videoModeIndex")).intValue(), + ((Number) map.get("minCount")).intValue(), + (boolean) map.get("hasEnough"), + ((Number) map.get("squareSizeIn")).doubleValue(), + ((Number) map.get("patternWidth")).intValue(), + ((Number) map.get("patternHeight")).intValue(), + BoardType.values()[(int) map.get("boardType")]); + } - @Override - public String toString() { - return "UICalibrationData{" - + "videoModeIndex=" - + videoModeIndex - + ", count=" - + count - + ", minCount=" - + minCount - + ", hasEnough=" - + hasEnough - + ", squareSizeIn=" - + squareSizeIn - + ", patternWidth=" - + patternWidth - + ", patternHeight=" - + patternHeight - + ", boardType=" - + boardType - + '}'; - } + @Override + public String toString() { + return "UICalibrationData{" + + "videoModeIndex=" + + videoModeIndex + + ", count=" + + count + + ", minCount=" + + minCount + + ", hasEnough=" + + hasEnough + + ", squareSizeIn=" + + squareSizeIn + + ", patternWidth=" + + patternWidth + + ", patternHeight=" + + patternHeight + + ", boardType=" + + boardType + + '}'; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/result/BytePackable.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/result/BytePackable.java index dd600990d3..c729d35f65 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/result/BytePackable.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/result/BytePackable.java @@ -19,123 +19,123 @@ @SuppressWarnings("PointlessBitwiseExpression") public abstract class BytePackable { - public abstract byte[] toByteArray(); - - public abstract void fromByteArray(byte[] src); - - protected int bufferPosition = 0; - - public int getBufferPosition() { - return bufferPosition; - } - - public void resetBufferPosition() { - bufferPosition = 0; - } - - protected void bufferData(byte[] src, byte[] dest) { - System.arraycopy(src, 0, dest, bufferPosition, src.length); - bufferPosition += src.length; - } - - protected void bufferData(byte src, byte[] dest) { - System.arraycopy(new byte[] {src}, 0, dest, bufferPosition, 1); - bufferPosition++; - } - - protected void bufferData(int src, byte[] dest) { - System.arraycopy(toBytes(src), 0, dest, bufferPosition, Integer.BYTES); - bufferPosition += Integer.BYTES; - } - - protected void bufferData(double src, byte[] dest) { - System.arraycopy(toBytes(src), 0, dest, bufferPosition, Double.BYTES); - bufferPosition += Double.BYTES; - } - - protected void bufferData(boolean src, byte[] dest) { - System.arraycopy(toBytes(src), 0, dest, bufferPosition, 1); - bufferPosition += 1; - } - - protected boolean unbufferBoolean(byte[] src) { - return toBoolean(src, bufferPosition++); - } - - protected byte unbufferByte(byte[] src) { - var value = src[bufferPosition]; - bufferPosition++; - return value; - } - - protected byte[] unbufferBytes(byte[] src, int len) { - var bytes = new byte[len]; - System.arraycopy(src, bufferPosition, bytes, 0, len); - return bytes; - } - - protected int unbufferInt(byte[] src) { - var value = toInt(src, bufferPosition); - bufferPosition += Integer.BYTES; - return value; - } - - protected double unbufferDouble(byte[] src) { - var value = toDouble(src, bufferPosition); - bufferPosition += Double.BYTES; - return value; - } - - private static boolean toBoolean(byte[] src, int pos) { - return src[pos] != 0; - } - - private static int toInt(byte[] src, int pos) { - return (0xff & src[pos]) << 24 - | (0xff & src[pos + 1]) << 16 - | (0xff & src[pos + 2]) << 8 - | (0xff & src[pos + 3]) << 0; - } - - private static long toLong(byte[] src, int pos) { - return (long) (0xff & src[pos]) << 56 - | (long) (0xff & src[pos + 1]) << 48 - | (long) (0xff & src[pos + 2]) << 40 - | (long) (0xff & src[pos + 3]) << 32 - | (long) (0xff & src[pos + 4]) << 24 - | (long) (0xff & src[pos + 5]) << 16 - | (long) (0xff & src[pos + 6]) << 8 - | (long) (0xff & src[pos + 7]) << 0; - } - - private static double toDouble(byte[] src, int pos) { - return Double.longBitsToDouble(toLong(src, pos)); - } - - protected byte[] toBytes(double value) { - long data = Double.doubleToRawLongBits(value); - return new byte[] { - (byte) ((data >> 56) & 0xff), - (byte) ((data >> 48) & 0xff), - (byte) ((data >> 40) & 0xff), - (byte) ((data >> 32) & 0xff), - (byte) ((data >> 24) & 0xff), - (byte) ((data >> 16) & 0xff), - (byte) ((data >> 8) & 0xff), - (byte) ((data >> 0) & 0xff), - }; - } - - protected byte[] toBytes(int value) { - return new byte[] { - (byte) ((value >> 24) & 0xff), - (byte) ((value >> 16) & 0xff), - (byte) ((value >> 8) & 0xff), - (byte) ((value >> 0) & 0xff) - }; - } - - protected byte[] toBytes(boolean value) { - return new byte[] {(byte) (value ? 1 : 0)}; - } + public abstract byte[] toByteArray(); + + public abstract void fromByteArray(byte[] src); + + protected int bufferPosition = 0; + + public int getBufferPosition() { + return bufferPosition; + } + + public void resetBufferPosition() { + bufferPosition = 0; + } + + protected void bufferData(byte[] src, byte[] dest) { + System.arraycopy(src, 0, dest, bufferPosition, src.length); + bufferPosition += src.length; + } + + protected void bufferData(byte src, byte[] dest) { + System.arraycopy(new byte[] {src}, 0, dest, bufferPosition, 1); + bufferPosition++; + } + + protected void bufferData(int src, byte[] dest) { + System.arraycopy(toBytes(src), 0, dest, bufferPosition, Integer.BYTES); + bufferPosition += Integer.BYTES; + } + + protected void bufferData(double src, byte[] dest) { + System.arraycopy(toBytes(src), 0, dest, bufferPosition, Double.BYTES); + bufferPosition += Double.BYTES; + } + + protected void bufferData(boolean src, byte[] dest) { + System.arraycopy(toBytes(src), 0, dest, bufferPosition, 1); + bufferPosition += 1; + } + + protected boolean unbufferBoolean(byte[] src) { + return toBoolean(src, bufferPosition++); + } + + protected byte unbufferByte(byte[] src) { + var value = src[bufferPosition]; + bufferPosition++; + return value; + } + + protected byte[] unbufferBytes(byte[] src, int len) { + var bytes = new byte[len]; + System.arraycopy(src, bufferPosition, bytes, 0, len); + return bytes; + } + + protected int unbufferInt(byte[] src) { + var value = toInt(src, bufferPosition); + bufferPosition += Integer.BYTES; + return value; + } + + protected double unbufferDouble(byte[] src) { + var value = toDouble(src, bufferPosition); + bufferPosition += Double.BYTES; + return value; + } + + private static boolean toBoolean(byte[] src, int pos) { + return src[pos] != 0; + } + + private static int toInt(byte[] src, int pos) { + return (0xff & src[pos]) << 24 + | (0xff & src[pos + 1]) << 16 + | (0xff & src[pos + 2]) << 8 + | (0xff & src[pos + 3]) << 0; + } + + private static long toLong(byte[] src, int pos) { + return (long) (0xff & src[pos]) << 56 + | (long) (0xff & src[pos + 1]) << 48 + | (long) (0xff & src[pos + 2]) << 40 + | (long) (0xff & src[pos + 3]) << 32 + | (long) (0xff & src[pos + 4]) << 24 + | (long) (0xff & src[pos + 5]) << 16 + | (long) (0xff & src[pos + 6]) << 8 + | (long) (0xff & src[pos + 7]) << 0; + } + + private static double toDouble(byte[] src, int pos) { + return Double.longBitsToDouble(toLong(src, pos)); + } + + protected byte[] toBytes(double value) { + long data = Double.doubleToRawLongBits(value); + return new byte[] { + (byte) ((data >> 56) & 0xff), + (byte) ((data >> 48) & 0xff), + (byte) ((data >> 40) & 0xff), + (byte) ((data >> 32) & 0xff), + (byte) ((data >> 24) & 0xff), + (byte) ((data >> 16) & 0xff), + (byte) ((data >> 8) & 0xff), + (byte) ((data >> 0) & 0xff), + }; + } + + protected byte[] toBytes(int value) { + return new byte[] { + (byte) ((value >> 24) & 0xff), + (byte) ((value >> 16) & 0xff), + (byte) ((value >> 8) & 0xff), + (byte) ((value >> 0) & 0xff) + }; + } + + protected byte[] toBytes(boolean value) { + return new byte[] {(byte) (value ? 1 : 0)}; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/result/CVPipelineResult.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/result/CVPipelineResult.java index 000611461e..5993cc9531 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/result/CVPipelineResult.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/result/CVPipelineResult.java @@ -26,71 +26,71 @@ import org.photonvision.vision.target.TrackedTarget; public class CVPipelineResult implements Releasable { - private long imageCaptureTimestampNanos; - public final double processingNanos; - public final double fps; - public final List targets; - public final Frame inputAndOutputFrame; - public MultiTargetPNPResults multiTagResult; + private long imageCaptureTimestampNanos; + public final double processingNanos; + public final double fps; + public final List targets; + public final Frame inputAndOutputFrame; + public MultiTargetPNPResults multiTagResult; - public CVPipelineResult( - double processingNanos, double fps, List targets, Frame inputFrame) { - this(processingNanos, fps, targets, new MultiTargetPNPResults(), inputFrame); - } + public CVPipelineResult( + double processingNanos, double fps, List targets, Frame inputFrame) { + this(processingNanos, fps, targets, new MultiTargetPNPResults(), inputFrame); + } - public CVPipelineResult( - double processingNanos, - double fps, - List targets, - MultiTargetPNPResults multiTagResults, - Frame inputFrame) { - this.processingNanos = processingNanos; - this.fps = fps; - this.targets = targets != null ? targets : Collections.emptyList(); - this.multiTagResult = multiTagResults; + public CVPipelineResult( + double processingNanos, + double fps, + List targets, + MultiTargetPNPResults multiTagResults, + Frame inputFrame) { + this.processingNanos = processingNanos; + this.fps = fps; + this.targets = targets != null ? targets : Collections.emptyList(); + this.multiTagResult = multiTagResults; - this.inputAndOutputFrame = inputFrame; - } + this.inputAndOutputFrame = inputFrame; + } - public CVPipelineResult( - double processingNanos, - double fps, - List targets, - MultiTargetPNPResults multiTagResults) { - this(processingNanos, fps, targets, multiTagResults, null); - } + public CVPipelineResult( + double processingNanos, + double fps, + List targets, + MultiTargetPNPResults multiTagResults) { + this(processingNanos, fps, targets, multiTagResults, null); + } - public boolean hasTargets() { - return !targets.isEmpty(); - } + public boolean hasTargets() { + return !targets.isEmpty(); + } - public void release() { - for (TrackedTarget tt : targets) { - tt.release(); + public void release() { + for (TrackedTarget tt : targets) { + tt.release(); + } + if (inputAndOutputFrame != null) inputAndOutputFrame.release(); } - if (inputAndOutputFrame != null) inputAndOutputFrame.release(); - } - /** - * Get the latency between now (wpi::Now) and the time at which the image was captured. FOOTGUN: - * the latency is relative to the time at which this method is called. Waiting to call this method - * will change the latency this method returns. - */ - @Deprecated - public double getLatencyMillis() { - var now = MathUtils.wpiNanoTime(); - return MathUtils.nanosToMillis(now - imageCaptureTimestampNanos); - } + /** + * Get the latency between now (wpi::Now) and the time at which the image was captured. FOOTGUN: + * the latency is relative to the time at which this method is called. Waiting to call this method + * will change the latency this method returns. + */ + @Deprecated + public double getLatencyMillis() { + var now = MathUtils.wpiNanoTime(); + return MathUtils.nanosToMillis(now - imageCaptureTimestampNanos); + } - public double getProcessingMillis() { - return MathUtils.nanosToMillis(processingNanos); - } + public double getProcessingMillis() { + return MathUtils.nanosToMillis(processingNanos); + } - public long getImageCaptureTimestampNanos() { - return imageCaptureTimestampNanos; - } + public long getImageCaptureTimestampNanos() { + return imageCaptureTimestampNanos; + } - public void setImageCaptureTimestampNanos(long imageCaptureTimestampNanos) { - this.imageCaptureTimestampNanos = imageCaptureTimestampNanos; - } + public void setImageCaptureTimestampNanos(long imageCaptureTimestampNanos) { + this.imageCaptureTimestampNanos = imageCaptureTimestampNanos; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/pipeline/result/DriverModePipelineResult.java b/photon-core/src/main/java/org/photonvision/vision/pipeline/result/DriverModePipelineResult.java index c87f0b746e..609a3f4483 100644 --- a/photon-core/src/main/java/org/photonvision/vision/pipeline/result/DriverModePipelineResult.java +++ b/photon-core/src/main/java/org/photonvision/vision/pipeline/result/DriverModePipelineResult.java @@ -21,7 +21,7 @@ import org.photonvision.vision.frame.Frame; public class DriverModePipelineResult extends CVPipelineResult { - public DriverModePipelineResult(double latencyNanos, double fps, Frame outputFrame) { - super(latencyNanos, fps, List.of(), outputFrame); - } + public DriverModePipelineResult(double latencyNanos, double fps, Frame outputFrame) { + super(latencyNanos, fps, List.of(), outputFrame); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/Data.java b/photon-core/src/main/java/org/photonvision/vision/processes/Data.java index b4f6e3d8f5..c146a0e275 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/Data.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/Data.java @@ -22,10 +22,10 @@ // TODO replace with CTT's data class public class Data implements Releasable { - public CVPipelineResult result; + public CVPipelineResult result; - @Override - public void release() { - result.release(); - } + @Override + public void release() { + result.release(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/PipelineManager.java b/photon-core/src/main/java/org/photonvision/vision/processes/PipelineManager.java index 76932ae277..e4f6df3a97 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/PipelineManager.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/PipelineManager.java @@ -31,420 +31,420 @@ @SuppressWarnings({"rawtypes", "unused"}) public class PipelineManager { - private static final Logger logger = new Logger(PipelineManager.class, LogGroup.VisionModule); - - public static final int DRIVERMODE_INDEX = -1; - public static final int CAL_3D_INDEX = -2; - - protected final List userPipelineSettings; - protected final Calibrate3dPipeline calibration3dPipeline = new Calibrate3dPipeline(); - protected final DriverModePipeline driverModePipeline = new DriverModePipeline(); - - /** Index of the currently active pipeline. Defaults to 0. */ - private int currentPipelineIndex = 0; - - /** The currently active pipeline. */ - private CVPipeline currentUserPipeline = driverModePipeline; - - /** - * Index of the last active user-created pipeline.
- *
- * Used only when switching from any of the built-in pipelines back to a user-created pipeline. - */ - private int lastUserPipelineIdx; - - /** - * Creates a PipelineManager with a DriverModePipeline, a Calibration3dPipeline, and all provided - * pipelines. - * - * @param userPipelines Pipelines to add to the manager. - */ - public PipelineManager( - DriverModePipelineSettings driverSettings, List userPipelines) { - this.userPipelineSettings = new ArrayList<>(userPipelines); - // This is to respect the default res idx for vendor cameras - - this.driverModePipeline.setSettings(driverSettings); - - if (userPipelines.isEmpty()) addPipeline(PipelineType.Reflective); - } - - public PipelineManager(CameraConfiguration config) { - this(config.driveModeSettings, config.pipelineSettings); - } - - /** - * Get the settings for a pipeline by index. - * - * @param index Index of pipeline whose settings need getting. - * @return The gotten settings of the pipeline whose index was provided. - */ - public CVPipelineSettings getPipelineSettings(int index) { - if (index < 0) { - switch (index) { - case DRIVERMODE_INDEX: - return driverModePipeline.getSettings(); - case CAL_3D_INDEX: - return calibration3dPipeline.getSettings(); - } + private static final Logger logger = new Logger(PipelineManager.class, LogGroup.VisionModule); + + public static final int DRIVERMODE_INDEX = -1; + public static final int CAL_3D_INDEX = -2; + + protected final List userPipelineSettings; + protected final Calibrate3dPipeline calibration3dPipeline = new Calibrate3dPipeline(); + protected final DriverModePipeline driverModePipeline = new DriverModePipeline(); + + /** Index of the currently active pipeline. Defaults to 0. */ + private int currentPipelineIndex = 0; + + /** The currently active pipeline. */ + private CVPipeline currentUserPipeline = driverModePipeline; + + /** + * Index of the last active user-created pipeline.
+ *
+ * Used only when switching from any of the built-in pipelines back to a user-created pipeline. + */ + private int lastUserPipelineIdx; + + /** + * Creates a PipelineManager with a DriverModePipeline, a Calibration3dPipeline, and all provided + * pipelines. + * + * @param userPipelines Pipelines to add to the manager. + */ + public PipelineManager( + DriverModePipelineSettings driverSettings, List userPipelines) { + this.userPipelineSettings = new ArrayList<>(userPipelines); + // This is to respect the default res idx for vendor cameras + + this.driverModePipeline.setSettings(driverSettings); + + if (userPipelines.isEmpty()) addPipeline(PipelineType.Reflective); } - for (var setting : userPipelineSettings) { - if (setting.pipelineIndex == index) return setting; + public PipelineManager(CameraConfiguration config) { + this(config.driveModeSettings, config.pipelineSettings); } - return null; - } - - /** - * Get the settings for a pipeline by index. - * - * @param index Index of pipeline whose nickname needs getting. - * @return the nickname of the pipeline whose index was provided. - */ - public String getPipelineNickname(int index) { - if (index < 0) { - switch (index) { - case DRIVERMODE_INDEX: - return driverModePipeline.getSettings().pipelineNickname; - case CAL_3D_INDEX: - return calibration3dPipeline.getSettings().pipelineNickname; - } + + /** + * Get the settings for a pipeline by index. + * + * @param index Index of pipeline whose settings need getting. + * @return The gotten settings of the pipeline whose index was provided. + */ + public CVPipelineSettings getPipelineSettings(int index) { + if (index < 0) { + switch (index) { + case DRIVERMODE_INDEX: + return driverModePipeline.getSettings(); + case CAL_3D_INDEX: + return calibration3dPipeline.getSettings(); + } + } + + for (var setting : userPipelineSettings) { + if (setting.pipelineIndex == index) return setting; + } + return null; } - for (var setting : userPipelineSettings) { - if (setting.pipelineIndex == index) return setting.pipelineNickname; + /** + * Get the settings for a pipeline by index. + * + * @param index Index of pipeline whose nickname needs getting. + * @return the nickname of the pipeline whose index was provided. + */ + public String getPipelineNickname(int index) { + if (index < 0) { + switch (index) { + case DRIVERMODE_INDEX: + return driverModePipeline.getSettings().pipelineNickname; + case CAL_3D_INDEX: + return calibration3dPipeline.getSettings().pipelineNickname; + } + } + + for (var setting : userPipelineSettings) { + if (setting.pipelineIndex == index) return setting.pipelineNickname; + } + return null; } - return null; - } - - /** - * Gets a list of nicknames for all user pipelines - * - * @return The list of nicknames for all user pipelines - */ - public List getPipelineNicknames() { - List ret = new ArrayList<>(); - for (var p : userPipelineSettings) { - ret.add(p.pipelineNickname); + + /** + * Gets a list of nicknames for all user pipelines + * + * @return The list of nicknames for all user pipelines + */ + public List getPipelineNicknames() { + List ret = new ArrayList<>(); + for (var p : userPipelineSettings) { + ret.add(p.pipelineNickname); + } + return ret; } - return ret; - } - - /** - * Gets the index of the currently active pipeline - * - * @return The index of the currently active pipeline - */ - public int getCurrentPipelineIndex() { - return currentPipelineIndex; - } - - /** - * Get the currently active pipeline. - * - * @return The currently active pipeline. - */ - public CVPipeline getCurrentPipeline() { - if (currentPipelineIndex < 0) { - switch (currentPipelineIndex) { - case CAL_3D_INDEX: - return calibration3dPipeline; - case DRIVERMODE_INDEX: - return driverModePipeline; - } + + /** + * Gets the index of the currently active pipeline + * + * @return The index of the currently active pipeline + */ + public int getCurrentPipelineIndex() { + return currentPipelineIndex; } - // Just return the current user pipeline, we're not on aa built-in one - return currentUserPipeline; - } - - /** - * Get the currently active pipelines settings - * - * @return The currently active pipelines settings - */ - public CVPipelineSettings getCurrentPipelineSettings() { - return getPipelineSettings(currentPipelineIndex); - } - - /** - * Internal method for setting the active pipeline.
- *
- * All externally accessible methods that intend to change the active pipeline MUST go through - * here to ensure all proper steps are taken. - * - * @param newIndex Index of pipeline to be active - */ - private void setPipelineInternal(int newIndex) { - if (newIndex < 0 && currentPipelineIndex >= 0) { - // Transitioning to a built-in pipe, save off the current user one - lastUserPipelineIdx = currentPipelineIndex; + /** + * Get the currently active pipeline. + * + * @return The currently active pipeline. + */ + public CVPipeline getCurrentPipeline() { + if (currentPipelineIndex < 0) { + switch (currentPipelineIndex) { + case CAL_3D_INDEX: + return calibration3dPipeline; + case DRIVERMODE_INDEX: + return driverModePipeline; + } + } + + // Just return the current user pipeline, we're not on aa built-in one + return currentUserPipeline; } - if (userPipelineSettings.size() - 1 < newIndex) { - logger.warn("User attempted to set index to non-existent pipeline!"); - return; + /** + * Get the currently active pipelines settings + * + * @return The currently active pipelines settings + */ + public CVPipelineSettings getCurrentPipelineSettings() { + return getPipelineSettings(currentPipelineIndex); } - currentPipelineIndex = newIndex; - if (newIndex >= 0) { - var desiredPipelineSettings = userPipelineSettings.get(currentPipelineIndex); - switch (desiredPipelineSettings.pipelineType) { - case Reflective: - logger.debug("Creating Reflective pipeline"); - currentUserPipeline = - new ReflectivePipeline((ReflectivePipelineSettings) desiredPipelineSettings); - break; - case ColoredShape: - logger.debug("Creating ColoredShape pipeline"); - currentUserPipeline = - new ColoredShapePipeline((ColoredShapePipelineSettings) desiredPipelineSettings); - break; - case AprilTag: - logger.debug("Creating AprilTag pipeline"); - currentUserPipeline = - new AprilTagPipeline((AprilTagPipelineSettings) desiredPipelineSettings); - break; - - case Aruco: - logger.debug("Creating Aruco Pipeline"); - currentUserPipeline = new ArucoPipeline((ArucoPipelineSettings) desiredPipelineSettings); - break; - default: - // Can be calib3d or drivermode, both of which are special cases - break; - } + /** + * Internal method for setting the active pipeline.
+ *
+ * All externally accessible methods that intend to change the active pipeline MUST go through + * here to ensure all proper steps are taken. + * + * @param newIndex Index of pipeline to be active + */ + private void setPipelineInternal(int newIndex) { + if (newIndex < 0 && currentPipelineIndex >= 0) { + // Transitioning to a built-in pipe, save off the current user one + lastUserPipelineIdx = currentPipelineIndex; + } + + if (userPipelineSettings.size() - 1 < newIndex) { + logger.warn("User attempted to set index to non-existent pipeline!"); + return; + } + + currentPipelineIndex = newIndex; + if (newIndex >= 0) { + var desiredPipelineSettings = userPipelineSettings.get(currentPipelineIndex); + switch (desiredPipelineSettings.pipelineType) { + case Reflective: + logger.debug("Creating Reflective pipeline"); + currentUserPipeline = + new ReflectivePipeline((ReflectivePipelineSettings) desiredPipelineSettings); + break; + case ColoredShape: + logger.debug("Creating ColoredShape pipeline"); + currentUserPipeline = + new ColoredShapePipeline((ColoredShapePipelineSettings) desiredPipelineSettings); + break; + case AprilTag: + logger.debug("Creating AprilTag pipeline"); + currentUserPipeline = + new AprilTagPipeline((AprilTagPipelineSettings) desiredPipelineSettings); + break; + + case Aruco: + logger.debug("Creating Aruco Pipeline"); + currentUserPipeline = new ArucoPipeline((ArucoPipelineSettings) desiredPipelineSettings); + break; + default: + // Can be calib3d or drivermode, both of which are special cases + break; + } + } + + DataChangeService.getInstance() + .publishEvent( + new OutgoingUIEvent<>( + "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); } - DataChangeService.getInstance() - .publishEvent( - new OutgoingUIEvent<>( - "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); - } - - /** - * Enters or exits calibration mode based on the parameter.
- *
- * Exiting returns to the last used user pipeline. - * - * @param wantsCalibration True to enter calibration mode, false to exit calibration mode. - */ - public void setCalibrationMode(boolean wantsCalibration) { - if (!wantsCalibration) calibration3dPipeline.finishCalibration(); - setPipelineInternal(wantsCalibration ? CAL_3D_INDEX : lastUserPipelineIdx); - } - - /** - * Enters or exits driver mode based on the parameter.
- *
- * Exiting returns to the last used user pipeline. - * - * @param state True to enter driver mode, false to exit driver mode. - */ - public void setDriverMode(boolean state) { - setPipelineInternal(state ? DRIVERMODE_INDEX : lastUserPipelineIdx); - } - - /** - * Returns whether driver mode is active. - * - * @return Whether driver mode is active. - */ - public boolean getDriverMode() { - return currentPipelineIndex == DRIVERMODE_INDEX; - } - - public static final Comparator PipelineSettingsIndexComparator = - Comparator.comparingInt(o -> o.pipelineIndex); - - /** - * Sorts the pipeline list by index, and reassigns their indexes to match the new order.
- *
- * I don't like this, but I have no other ideas, and it works so - */ - private void reassignIndexes() { - userPipelineSettings.sort(PipelineSettingsIndexComparator); - for (int i = 0; i < userPipelineSettings.size(); i++) { - userPipelineSettings.get(i).pipelineIndex = i; + /** + * Enters or exits calibration mode based on the parameter.
+ *
+ * Exiting returns to the last used user pipeline. + * + * @param wantsCalibration True to enter calibration mode, false to exit calibration mode. + */ + public void setCalibrationMode(boolean wantsCalibration) { + if (!wantsCalibration) calibration3dPipeline.finishCalibration(); + setPipelineInternal(wantsCalibration ? CAL_3D_INDEX : lastUserPipelineIdx); } - } - public CVPipelineSettings addPipeline(PipelineType type) { - return addPipeline(type, "New Pipeline"); - } + /** + * Enters or exits driver mode based on the parameter.
+ *
+ * Exiting returns to the last used user pipeline. + * + * @param state True to enter driver mode, false to exit driver mode. + */ + public void setDriverMode(boolean state) { + setPipelineInternal(state ? DRIVERMODE_INDEX : lastUserPipelineIdx); + } - public CVPipelineSettings addPipeline(PipelineType type, String nickname) { - var added = createSettingsForType(type, nickname); - if (added == null) { - logger.error("Cannot add null pipeline!"); - return null; + /** + * Returns whether driver mode is active. + * + * @return Whether driver mode is active. + */ + public boolean getDriverMode() { + return currentPipelineIndex == DRIVERMODE_INDEX; } - addPipelineInternal(added); - reassignIndexes(); - return added; - } - - private CVPipelineSettings createSettingsForType(PipelineType type, String nickname) { - CVPipelineSettings newSettings; - switch (type) { - case Reflective: - { - var added = new ReflectivePipelineSettings(); - added.pipelineNickname = nickname; - return added; - } - case ColoredShape: - { - var added = new ColoredShapePipelineSettings(); - added.pipelineNickname = nickname; - return added; - } - case AprilTag: - { - var added = new AprilTagPipelineSettings(); - added.pipelineNickname = nickname; - return added; + + public static final Comparator PipelineSettingsIndexComparator = + Comparator.comparingInt(o -> o.pipelineIndex); + + /** + * Sorts the pipeline list by index, and reassigns their indexes to match the new order.
+ *
+ * I don't like this, but I have no other ideas, and it works so + */ + private void reassignIndexes() { + userPipelineSettings.sort(PipelineSettingsIndexComparator); + for (int i = 0; i < userPipelineSettings.size(); i++) { + userPipelineSettings.get(i).pipelineIndex = i; } - case Aruco: - { - var added = new ArucoPipelineSettings(); - added.pipelineNickname = nickname; - return added; + } + + public CVPipelineSettings addPipeline(PipelineType type) { + return addPipeline(type, "New Pipeline"); + } + + public CVPipelineSettings addPipeline(PipelineType type, String nickname) { + var added = createSettingsForType(type, nickname); + if (added == null) { + logger.error("Cannot add null pipeline!"); + return null; } - default: - { - logger.error("Got invalid pipeline type: " + type); - return null; + addPipelineInternal(added); + reassignIndexes(); + return added; + } + + private CVPipelineSettings createSettingsForType(PipelineType type, String nickname) { + CVPipelineSettings newSettings; + switch (type) { + case Reflective: + { + var added = new ReflectivePipelineSettings(); + added.pipelineNickname = nickname; + return added; + } + case ColoredShape: + { + var added = new ColoredShapePipelineSettings(); + added.pipelineNickname = nickname; + return added; + } + case AprilTag: + { + var added = new AprilTagPipelineSettings(); + added.pipelineNickname = nickname; + return added; + } + case Aruco: + { + var added = new ArucoPipelineSettings(); + added.pipelineNickname = nickname; + return added; + } + default: + { + logger.error("Got invalid pipeline type: " + type); + return null; + } } } - } - - private void addPipelineInternal(CVPipelineSettings settings) { - settings.pipelineIndex = userPipelineSettings.size(); - userPipelineSettings.add(settings); - reassignIndexes(); - } - - /** - * Remove a pipeline settings at the given index and return the new current index - * - * @param index The idx to remove - */ - private int removePipelineInternal(int index) { - userPipelineSettings.remove(index); - currentPipelineIndex = Math.min(index, userPipelineSettings.size() - 1); - reassignIndexes(); - return currentPipelineIndex; - } - - public void setIndex(int index) { - this.setPipelineInternal(index); - } - - public int removePipeline(int index) { - if (index < 0) { - return currentPipelineIndex; + + private void addPipelineInternal(CVPipelineSettings settings) { + settings.pipelineIndex = userPipelineSettings.size(); + userPipelineSettings.add(settings); + reassignIndexes(); } - // TODO should we block/lock on a mutex? - return removePipelineInternal(index); - } - - public void renameCurrentPipeline(String newName) { - getCurrentPipelineSettings().pipelineNickname = newName; - } - - /** - * Duplicate a pipeline at a given index - * - * @param index the index of the target pipeline - * @return The new index - */ - public int duplicatePipeline(int index) { - var settings = userPipelineSettings.get(index); - var newSettings = settings.clone(); - newSettings.pipelineNickname = - createUniqueName(settings.pipelineNickname, userPipelineSettings); - newSettings.pipelineIndex = Integer.MAX_VALUE; - logger.debug("Duplicating pipe " + index + " to " + newSettings.pipelineNickname); - userPipelineSettings.add(newSettings); - reassignIndexes(); - - // Now we look for the index of the new pipeline and return it - return userPipelineSettings.indexOf(newSettings); - } - - private static String createUniqueName( - String nickname, List existingSettings) { - StringBuilder uniqueName = new StringBuilder(nickname); - while (true) { - String finalUniqueName = uniqueName.toString(); // To get around lambda capture - var conflictingName = - existingSettings.stream().anyMatch(it -> it.pipelineNickname.equals(finalUniqueName)); - - if (!conflictingName) { - // If no conflict, we're done - return uniqueName.toString(); - } else { - // Otherwise, we need to add a suffix to the name - // If the string doesn't already end in "([0-9]*)", we'll add it - // If it does, we'll increment the number in the suffix - - if (uniqueName.toString().matches(".*\\([0-9]*\\)")) { - // Because java strings are immutable, we have to do this curstedness - // This is like doing "New pipeline (" + 2 + ")" - - var parenStart = uniqueName.toString().lastIndexOf('('); - var parenEnd = uniqueName.length() - 1; - var number = Integer.parseInt(uniqueName.substring(parenStart + 1, parenEnd)) + 1; - - uniqueName = new StringBuilder(uniqueName.substring(0, parenStart + 1) + number + ")"); - } else { - uniqueName.append(" (1)"); - } - } + + /** + * Remove a pipeline settings at the given index and return the new current index + * + * @param index The idx to remove + */ + private int removePipelineInternal(int index) { + userPipelineSettings.remove(index); + currentPipelineIndex = Math.min(index, userPipelineSettings.size() - 1); + reassignIndexes(); + return currentPipelineIndex; } - } - - public void changePipelineType(int newType) { - // Find the PipelineType proposed - // To do this we look at all the PipelineType entries and look for one with matching - // base indexes - PipelineType type = - Arrays.stream(PipelineType.values()) - .filter(it -> it.baseIndex == newType) - .findAny() - .orElse(null); - if (type == null) { - logger.error("Could not match type " + newType + " to a PipelineType!"); - return; + + public void setIndex(int index) { + this.setPipelineInternal(index); + } + + public int removePipeline(int index) { + if (index < 0) { + return currentPipelineIndex; + } + // TODO should we block/lock on a mutex? + return removePipelineInternal(index); } - if (type.baseIndex == getCurrentPipelineSettings().pipelineType.baseIndex) { - logger.debug( - "Not changing settings as " - + type - + " and " - + getCurrentPipelineSettings().pipelineType - + " are identical!"); - return; + public void renameCurrentPipeline(String newName) { + getCurrentPipelineSettings().pipelineNickname = newName; } - // Our new settings will be totally nuked, but that's ok - // We *could* set things in common between the two, if we want - // But they're different enough it shouldn't be an issue - var name = getCurrentPipelineSettings().pipelineNickname; - var newSettings = createSettingsForType(type, name); + /** + * Duplicate a pipeline at a given index + * + * @param index the index of the target pipeline + * @return The new index + */ + public int duplicatePipeline(int index) { + var settings = userPipelineSettings.get(index); + var newSettings = settings.clone(); + newSettings.pipelineNickname = + createUniqueName(settings.pipelineNickname, userPipelineSettings); + newSettings.pipelineIndex = Integer.MAX_VALUE; + logger.debug("Duplicating pipe " + index + " to " + newSettings.pipelineNickname); + userPipelineSettings.add(newSettings); + reassignIndexes(); + + // Now we look for the index of the new pipeline and return it + return userPipelineSettings.indexOf(newSettings); + } - var idx = currentPipelineIndex; - if (idx < 0) { - logger.error("Cannot replace non-user pipeline!"); - return; + private static String createUniqueName( + String nickname, List existingSettings) { + StringBuilder uniqueName = new StringBuilder(nickname); + while (true) { + String finalUniqueName = uniqueName.toString(); // To get around lambda capture + var conflictingName = + existingSettings.stream().anyMatch(it -> it.pipelineNickname.equals(finalUniqueName)); + + if (!conflictingName) { + // If no conflict, we're done + return uniqueName.toString(); + } else { + // Otherwise, we need to add a suffix to the name + // If the string doesn't already end in "([0-9]*)", we'll add it + // If it does, we'll increment the number in the suffix + + if (uniqueName.toString().matches(".*\\([0-9]*\\)")) { + // Because java strings are immutable, we have to do this curstedness + // This is like doing "New pipeline (" + 2 + ")" + + var parenStart = uniqueName.toString().lastIndexOf('('); + var parenEnd = uniqueName.length() - 1; + var number = Integer.parseInt(uniqueName.substring(parenStart + 1, parenEnd)) + 1; + + uniqueName = new StringBuilder(uniqueName.substring(0, parenStart + 1) + number + ")"); + } else { + uniqueName.append(" (1)"); + } + } + } } - logger.info("Adding new pipe of type " + type + " at idx " + idx); - newSettings.pipelineIndex = idx; - userPipelineSettings.set(idx, newSettings); - setPipelineInternal(idx); - reassignIndexes(); - } + public void changePipelineType(int newType) { + // Find the PipelineType proposed + // To do this we look at all the PipelineType entries and look for one with matching + // base indexes + PipelineType type = + Arrays.stream(PipelineType.values()) + .filter(it -> it.baseIndex == newType) + .findAny() + .orElse(null); + if (type == null) { + logger.error("Could not match type " + newType + " to a PipelineType!"); + return; + } + + if (type.baseIndex == getCurrentPipelineSettings().pipelineType.baseIndex) { + logger.debug( + "Not changing settings as " + + type + + " and " + + getCurrentPipelineSettings().pipelineType + + " are identical!"); + return; + } + + // Our new settings will be totally nuked, but that's ok + // We *could* set things in common between the two, if we want + // But they're different enough it shouldn't be an issue + var name = getCurrentPipelineSettings().pipelineNickname; + var newSettings = createSettingsForType(type, name); + + var idx = currentPipelineIndex; + if (idx < 0) { + logger.error("Cannot replace non-user pipeline!"); + return; + } + + logger.info("Adding new pipe of type " + type + " at idx " + idx); + newSettings.pipelineIndex = idx; + userPipelineSettings.set(idx, newSettings); + setPipelineInternal(idx); + reassignIndexes(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionModule.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionModule.java index 92c137baf7..75978739e7 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionModule.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionModule.java @@ -61,559 +61,559 @@ * provide info on settings changes. VisionModuleManager holds a list of all current vision modules. */ public class VisionModule { - private final Logger logger; - protected final PipelineManager pipelineManager; - protected final VisionSource visionSource; - private final VisionRunner visionRunner; - private final StreamRunnable streamRunnable; - private final LinkedList resultConsumers = new LinkedList<>(); - // Raw result consumers run before any drawing has been done by the OutputStreamPipeline - private final LinkedList>> streamResultConsumers = - new LinkedList<>(); - private final NTDataPublisher ntConsumer; - private final UIDataPublisher uiDataConsumer; - protected final int moduleIndex; - protected final QuirkyCamera cameraQuirks; - - protected TrackedTarget lastPipelineResultBestTarget; - - private int inputStreamPort = -1; - private int outputStreamPort = -1; - - FileSaveFrameConsumer inputFrameSaver; - FileSaveFrameConsumer outputFrameSaver; - - SocketVideoStream inputVideoStreamer; - SocketVideoStream outputVideoStreamer; - - public VisionModule(PipelineManager pipelineManager, VisionSource visionSource, int index) { - logger = - new Logger( - VisionModule.class, - visionSource.getSettables().getConfiguration().nickname, - LogGroup.VisionModule); - - // Find quirks for the current camera - if (visionSource instanceof USBCameraSource) { - cameraQuirks = ((USBCameraSource) visionSource).cameraQuirks; - } else if (visionSource instanceof LibcameraGpuSource) { - cameraQuirks = QuirkyCamera.ZeroCopyPiCamera; - } else { - cameraQuirks = QuirkyCamera.DefaultCamera; + private final Logger logger; + protected final PipelineManager pipelineManager; + protected final VisionSource visionSource; + private final VisionRunner visionRunner; + private final StreamRunnable streamRunnable; + private final LinkedList resultConsumers = new LinkedList<>(); + // Raw result consumers run before any drawing has been done by the OutputStreamPipeline + private final LinkedList>> streamResultConsumers = + new LinkedList<>(); + private final NTDataPublisher ntConsumer; + private final UIDataPublisher uiDataConsumer; + protected final int moduleIndex; + protected final QuirkyCamera cameraQuirks; + + protected TrackedTarget lastPipelineResultBestTarget; + + private int inputStreamPort = -1; + private int outputStreamPort = -1; + + FileSaveFrameConsumer inputFrameSaver; + FileSaveFrameConsumer outputFrameSaver; + + SocketVideoStream inputVideoStreamer; + SocketVideoStream outputVideoStreamer; + + public VisionModule(PipelineManager pipelineManager, VisionSource visionSource, int index) { + logger = + new Logger( + VisionModule.class, + visionSource.getSettables().getConfiguration().nickname, + LogGroup.VisionModule); + + // Find quirks for the current camera + if (visionSource instanceof USBCameraSource) { + cameraQuirks = ((USBCameraSource) visionSource).cameraQuirks; + } else if (visionSource instanceof LibcameraGpuSource) { + cameraQuirks = QuirkyCamera.ZeroCopyPiCamera; + } else { + cameraQuirks = QuirkyCamera.DefaultCamera; + } + + // We don't show gain if the config says it's -1. So check here to make sure it's non-negative + // if it _is_ supported + if (cameraQuirks.hasQuirk(CameraQuirk.Gain)) { + pipelineManager.userPipelineSettings.forEach( + it -> { + if (it.cameraGain == -1) it.cameraGain = 75; // Sane default + }); + } + if (cameraQuirks.hasQuirk(CameraQuirk.AWBGain)) { + pipelineManager.userPipelineSettings.forEach( + it -> { + if (it.cameraRedGain == -1) it.cameraRedGain = 11; // Sane defaults + if (it.cameraBlueGain == -1) it.cameraBlueGain = 20; + }); + } + + this.pipelineManager = pipelineManager; + this.visionSource = visionSource; + this.visionRunner = + new VisionRunner( + this.visionSource.getFrameProvider(), + this.pipelineManager::getCurrentPipeline, + this::consumeResult, + this.cameraQuirks); + this.streamRunnable = new StreamRunnable(new OutputStreamPipeline()); + this.moduleIndex = index; + + DataChangeService.getInstance().addSubscriber(new VisionModuleChangeSubscriber(this)); + + createStreams(); + + recreateStreamResultConsumers(); + + ntConsumer = + new NTDataPublisher( + visionSource.getSettables().getConfiguration().nickname, + pipelineManager::getCurrentPipelineIndex, + this::setPipeline, + pipelineManager::getDriverMode, + this::setDriverMode); + uiDataConsumer = new UIDataPublisher(index); + addResultConsumer(ntConsumer); + addResultConsumer(uiDataConsumer); + addResultConsumer( + (result) -> + lastPipelineResultBestTarget = result.hasTargets() ? result.targets.get(0) : null); + + setPipeline(visionSource.getSettables().getConfiguration().currentPipelineIndex); + + // Set vendor FOV + if (isVendorCamera()) { + var fov = ConfigManager.getInstance().getConfig().getHardwareConfig().vendorFOV; + logger.info("Setting FOV of vendor camera to " + fov); + visionSource.getSettables().setFOV(fov); + } + + // Configure LED's if supported by the underlying hardware + if (HardwareManager.getInstance().visionLED != null && this.camShouldControlLEDs()) { + HardwareManager.getInstance() + .visionLED + .setPipelineModeSupplier(() -> pipelineManager.getCurrentPipelineSettings().ledMode); + setVisionLEDs(pipelineManager.getCurrentPipelineSettings().ledMode); + } + + saveAndBroadcastAll(); } - // We don't show gain if the config says it's -1. So check here to make sure it's non-negative - // if it _is_ supported - if (cameraQuirks.hasQuirk(CameraQuirk.Gain)) { - pipelineManager.userPipelineSettings.forEach( - it -> { - if (it.cameraGain == -1) it.cameraGain = 75; // Sane default - }); + private void destroyStreams() { + SocketVideoStreamManager.getInstance().removeStream(inputVideoStreamer); + SocketVideoStreamManager.getInstance().removeStream(outputVideoStreamer); } - if (cameraQuirks.hasQuirk(CameraQuirk.AWBGain)) { - pipelineManager.userPipelineSettings.forEach( - it -> { - if (it.cameraRedGain == -1) it.cameraRedGain = 11; // Sane defaults - if (it.cameraBlueGain == -1) it.cameraBlueGain = 20; - }); + + private void createStreams() { + var camStreamIdx = visionSource.getSettables().getConfiguration().streamIndex; + // If idx = 0, we want (1181, 1182) + this.inputStreamPort = 1181 + (camStreamIdx * 2); + this.outputStreamPort = 1181 + (camStreamIdx * 2) + 1; + + inputFrameSaver = + new FileSaveFrameConsumer(visionSource.getSettables().getConfiguration().nickname, "input"); + outputFrameSaver = + new FileSaveFrameConsumer( + visionSource.getSettables().getConfiguration().nickname, "output"); + + inputVideoStreamer = new SocketVideoStream(this.inputStreamPort); + outputVideoStreamer = new SocketVideoStream(this.outputStreamPort); + SocketVideoStreamManager.getInstance().addStream(inputVideoStreamer); + SocketVideoStreamManager.getInstance().addStream(outputVideoStreamer); } - this.pipelineManager = pipelineManager; - this.visionSource = visionSource; - this.visionRunner = - new VisionRunner( - this.visionSource.getFrameProvider(), - this.pipelineManager::getCurrentPipeline, - this::consumeResult, - this.cameraQuirks); - this.streamRunnable = new StreamRunnable(new OutputStreamPipeline()); - this.moduleIndex = index; - - DataChangeService.getInstance().addSubscriber(new VisionModuleChangeSubscriber(this)); - - createStreams(); - - recreateStreamResultConsumers(); - - ntConsumer = - new NTDataPublisher( - visionSource.getSettables().getConfiguration().nickname, - pipelineManager::getCurrentPipelineIndex, - this::setPipeline, - pipelineManager::getDriverMode, - this::setDriverMode); - uiDataConsumer = new UIDataPublisher(index); - addResultConsumer(ntConsumer); - addResultConsumer(uiDataConsumer); - addResultConsumer( - (result) -> - lastPipelineResultBestTarget = result.hasTargets() ? result.targets.get(0) : null); - - setPipeline(visionSource.getSettables().getConfiguration().currentPipelineIndex); - - // Set vendor FOV - if (isVendorCamera()) { - var fov = ConfigManager.getInstance().getConfig().getHardwareConfig().vendorFOV; - logger.info("Setting FOV of vendor camera to " + fov); - visionSource.getSettables().setFOV(fov); + private void recreateStreamResultConsumers() { + streamResultConsumers.add( + (frame, tgts) -> { + if (frame != null) inputFrameSaver.accept(frame.colorImage); + }); + streamResultConsumers.add( + (frame, tgts) -> { + if (frame != null) outputFrameSaver.accept(frame.processedImage); + }); + streamResultConsumers.add( + (frame, tgts) -> { + if (frame != null) inputVideoStreamer.accept(frame.colorImage); + }); + streamResultConsumers.add( + (frame, tgts) -> { + if (frame != null) outputVideoStreamer.accept(frame.processedImage); + }); } - // Configure LED's if supported by the underlying hardware - if (HardwareManager.getInstance().visionLED != null && this.camShouldControlLEDs()) { - HardwareManager.getInstance() - .visionLED - .setPipelineModeSupplier(() -> pipelineManager.getCurrentPipelineSettings().ledMode); - setVisionLEDs(pipelineManager.getCurrentPipelineSettings().ledMode); + private class StreamRunnable extends Thread { + private final OutputStreamPipeline outputStreamPipeline; + + private final Object frameLock = new Object(); + private Frame latestFrame; + private AdvancedPipelineSettings settings = new AdvancedPipelineSettings(); + private List targets = new ArrayList<>(); + + private boolean shouldRun = false; + + public StreamRunnable(OutputStreamPipeline outputStreamPipeline) { + this.outputStreamPipeline = outputStreamPipeline; + } + + public void updateData( + Frame inputOutputFrame, AdvancedPipelineSettings settings, List targets) { + synchronized (frameLock) { + if (shouldRun && this.latestFrame != null) { + logger.trace("Fell behind; releasing last unused Mats"); + this.latestFrame.release(); + } + + this.latestFrame = inputOutputFrame; + this.settings = settings; + this.targets = targets; + + shouldRun = inputOutputFrame != null; + // && inputOutputFrame.colorImage != null + // && !inputOutputFrame.colorImage.getMat().empty() + // && inputOutputFrame.processedImage != null + // && !inputOutputFrame.processedImage.getMat().empty(); + } + } + + @Override + public void run() { + while (true) { + final Frame m_frame; + final AdvancedPipelineSettings settings; + final List targets; + final boolean shouldRun; + synchronized (frameLock) { + m_frame = this.latestFrame; + this.latestFrame = null; + + settings = this.settings; + targets = this.targets; + shouldRun = this.shouldRun; + + this.shouldRun = false; + } + if (shouldRun) { + try { + CVPipelineResult osr = outputStreamPipeline.process(m_frame, settings, targets); + consumeResults(m_frame, targets); + + } catch (Exception e) { + // Never die + logger.error("Exception while running stream runnable!", e); + } + try { + m_frame.release(); + } catch (Exception e) { + logger.error("Exception freeing frames", e); + } + } else { + // busy wait! hurray! + try { + Thread.sleep(1); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + } } - saveAndBroadcastAll(); - } - - private void destroyStreams() { - SocketVideoStreamManager.getInstance().removeStream(inputVideoStreamer); - SocketVideoStreamManager.getInstance().removeStream(outputVideoStreamer); - } - - private void createStreams() { - var camStreamIdx = visionSource.getSettables().getConfiguration().streamIndex; - // If idx = 0, we want (1181, 1182) - this.inputStreamPort = 1181 + (camStreamIdx * 2); - this.outputStreamPort = 1181 + (camStreamIdx * 2) + 1; - - inputFrameSaver = - new FileSaveFrameConsumer(visionSource.getSettables().getConfiguration().nickname, "input"); - outputFrameSaver = - new FileSaveFrameConsumer( - visionSource.getSettables().getConfiguration().nickname, "output"); - - inputVideoStreamer = new SocketVideoStream(this.inputStreamPort); - outputVideoStreamer = new SocketVideoStream(this.outputStreamPort); - SocketVideoStreamManager.getInstance().addStream(inputVideoStreamer); - SocketVideoStreamManager.getInstance().addStream(outputVideoStreamer); - } - - private void recreateStreamResultConsumers() { - streamResultConsumers.add( - (frame, tgts) -> { - if (frame != null) inputFrameSaver.accept(frame.colorImage); - }); - streamResultConsumers.add( - (frame, tgts) -> { - if (frame != null) outputFrameSaver.accept(frame.processedImage); - }); - streamResultConsumers.add( - (frame, tgts) -> { - if (frame != null) inputVideoStreamer.accept(frame.colorImage); - }); - streamResultConsumers.add( - (frame, tgts) -> { - if (frame != null) outputVideoStreamer.accept(frame.processedImage); - }); - } - - private class StreamRunnable extends Thread { - private final OutputStreamPipeline outputStreamPipeline; - - private final Object frameLock = new Object(); - private Frame latestFrame; - private AdvancedPipelineSettings settings = new AdvancedPipelineSettings(); - private List targets = new ArrayList<>(); - - private boolean shouldRun = false; - - public StreamRunnable(OutputStreamPipeline outputStreamPipeline) { - this.outputStreamPipeline = outputStreamPipeline; + public void start() { + visionRunner.startProcess(); + streamRunnable.start(); } - public void updateData( - Frame inputOutputFrame, AdvancedPipelineSettings settings, List targets) { - synchronized (frameLock) { - if (shouldRun && this.latestFrame != null) { - logger.trace("Fell behind; releasing last unused Mats"); - this.latestFrame.release(); + public void setFov(double fov) { + var settables = visionSource.getSettables(); + logger.trace(() -> "Setting " + settables.getConfiguration().nickname + ") FOV (" + fov + ")"); + + // Only set FOV if we have no vendor JSON, and we aren't using a PiCAM + if (isVendorCamera()) { + logger.info("Cannot set FOV on a vendor device! Ignoring..."); + } else { + settables.setFOV(fov); } + } - this.latestFrame = inputOutputFrame; - this.settings = settings; - this.targets = targets; + private boolean isVendorCamera() { + return visionSource.isVendorCamera(); + } - shouldRun = inputOutputFrame != null; - // && inputOutputFrame.colorImage != null - // && !inputOutputFrame.colorImage.getMat().empty() - // && inputOutputFrame.processedImage != null - // && !inputOutputFrame.processedImage.getMat().empty(); - } + void changePipelineType(int newType) { + pipelineManager.changePipelineType(newType); + setPipeline(pipelineManager.getCurrentPipelineIndex()); + saveAndBroadcastAll(); } - @Override - public void run() { - while (true) { - final Frame m_frame; - final AdvancedPipelineSettings settings; - final List targets; - final boolean shouldRun; - synchronized (frameLock) { - m_frame = this.latestFrame; - this.latestFrame = null; - - settings = this.settings; - targets = this.targets; - shouldRun = this.shouldRun; - - this.shouldRun = false; + void setDriverMode(boolean isDriverMode) { + pipelineManager.setDriverMode(isDriverMode); + setVisionLEDs(!isDriverMode); + setPipeline( + isDriverMode + ? PipelineManager.DRIVERMODE_INDEX + : pipelineManager.getCurrentPipelineIndex()); + saveAndBroadcastAll(); + } + + public void startCalibration(UICalibrationData data) { + var settings = pipelineManager.calibration3dPipeline.getSettings(); + pipelineManager.calibration3dPipeline.deleteSavedImages(); + settings.cameraVideoModeIndex = data.videoModeIndex; + visionSource.getSettables().setVideoModeIndex(data.videoModeIndex); + logger.info( + "Starting calibration at resolution index " + + data.videoModeIndex + + " and settings " + + data); + settings.gridSize = Units.inchesToMeters(data.squareSizeIn); + settings.boardHeight = data.patternHeight; + settings.boardWidth = data.patternWidth; + settings.boardType = data.boardType; + + // Disable gain if not applicable + if (!cameraQuirks.hasQuirk(CameraQuirk.Gain)) { + settings.cameraGain = -1; } - if (shouldRun) { - try { - CVPipelineResult osr = outputStreamPipeline.process(m_frame, settings, targets); - consumeResults(m_frame, targets); - - } catch (Exception e) { - // Never die - logger.error("Exception while running stream runnable!", e); - } - try { - m_frame.release(); - } catch (Exception e) { - logger.error("Exception freeing frames", e); - } - } else { - // busy wait! hurray! - try { - Thread.sleep(1); - } catch (InterruptedException e) { - e.printStackTrace(); - } + if (!cameraQuirks.hasQuirk(CameraQuirk.AWBGain)) { + settings.cameraRedGain = -1; + settings.cameraBlueGain = -1; } - } + + settings.cameraAutoExposure = true; + + setPipeline(PipelineManager.CAL_3D_INDEX); } - } - - public void start() { - visionRunner.startProcess(); - streamRunnable.start(); - } - - public void setFov(double fov) { - var settables = visionSource.getSettables(); - logger.trace(() -> "Setting " + settables.getConfiguration().nickname + ") FOV (" + fov + ")"); - - // Only set FOV if we have no vendor JSON, and we aren't using a PiCAM - if (isVendorCamera()) { - logger.info("Cannot set FOV on a vendor device! Ignoring..."); - } else { - settables.setFOV(fov); + + public void saveInputSnapshot() { + inputFrameSaver.overrideTakeSnapshot(); } - } - - private boolean isVendorCamera() { - return visionSource.isVendorCamera(); - } - - void changePipelineType(int newType) { - pipelineManager.changePipelineType(newType); - setPipeline(pipelineManager.getCurrentPipelineIndex()); - saveAndBroadcastAll(); - } - - void setDriverMode(boolean isDriverMode) { - pipelineManager.setDriverMode(isDriverMode); - setVisionLEDs(!isDriverMode); - setPipeline( - isDriverMode - ? PipelineManager.DRIVERMODE_INDEX - : pipelineManager.getCurrentPipelineIndex()); - saveAndBroadcastAll(); - } - - public void startCalibration(UICalibrationData data) { - var settings = pipelineManager.calibration3dPipeline.getSettings(); - pipelineManager.calibration3dPipeline.deleteSavedImages(); - settings.cameraVideoModeIndex = data.videoModeIndex; - visionSource.getSettables().setVideoModeIndex(data.videoModeIndex); - logger.info( - "Starting calibration at resolution index " - + data.videoModeIndex - + " and settings " - + data); - settings.gridSize = Units.inchesToMeters(data.squareSizeIn); - settings.boardHeight = data.patternHeight; - settings.boardWidth = data.patternWidth; - settings.boardType = data.boardType; - - // Disable gain if not applicable - if (!cameraQuirks.hasQuirk(CameraQuirk.Gain)) { - settings.cameraGain = -1; + + public void saveOutputSnapshot() { + outputFrameSaver.overrideTakeSnapshot(); } - if (!cameraQuirks.hasQuirk(CameraQuirk.AWBGain)) { - settings.cameraRedGain = -1; - settings.cameraBlueGain = -1; + + public void takeCalibrationSnapshot() { + pipelineManager.calibration3dPipeline.takeSnapshot(); + } + + public CameraCalibrationCoefficients endCalibration() { + var ret = pipelineManager.calibration3dPipeline.tryCalibration(); + pipelineManager.setCalibrationMode(false); + + setPipeline(pipelineManager.getCurrentPipelineIndex()); + + if (ret != null) { + logger.debug("Saving calibration..."); + visionSource.getSettables().addCalibration(ret); + } else { + logger.error("Calibration failed..."); + } + saveAndBroadcastAll(); + return ret; } - settings.cameraAutoExposure = true; + boolean setPipeline(int index) { + logger.info("Setting pipeline to " + index); + logger.info("Pipeline name: " + pipelineManager.getPipelineNickname(index)); + pipelineManager.setIndex(index); + var pipelineSettings = pipelineManager.getPipelineSettings(index); - setPipeline(PipelineManager.CAL_3D_INDEX); - } + if (pipelineSettings == null) { + logger.error("Config for index " + index + " was null! Not changing pipelines"); + return false; + } + + visionSource.getSettables().setVideoModeInternal(pipelineSettings.cameraVideoModeIndex); + visionSource.getSettables().setBrightness(pipelineSettings.cameraBrightness); + visionSource.getSettables().setGain(pipelineSettings.cameraGain); - public void saveInputSnapshot() { - inputFrameSaver.overrideTakeSnapshot(); - } + // If manual exposure, force exposure slider to be valid + if (!pipelineSettings.cameraAutoExposure) { + if (pipelineSettings.cameraExposure < 0) + pipelineSettings.cameraExposure = 10; // reasonable default + } - public void saveOutputSnapshot() { - outputFrameSaver.overrideTakeSnapshot(); - } + visionSource.getSettables().setExposure(pipelineSettings.cameraExposure); + try { + visionSource.getSettables().setAutoExposure(pipelineSettings.cameraAutoExposure); + } catch (VideoException e) { + logger.error("Unable to set camera auto exposure!"); + logger.error(e.toString()); + } + if (cameraQuirks.hasQuirk(CameraQuirk.Gain)) { + // If the gain is disabled for some reason, re-enable it + if (pipelineSettings.cameraGain == -1) pipelineSettings.cameraGain = 75; + visionSource.getSettables().setGain(Math.max(0, pipelineSettings.cameraGain)); + } else { + pipelineSettings.cameraGain = -1; + } - public void takeCalibrationSnapshot() { - pipelineManager.calibration3dPipeline.takeSnapshot(); - } + if (cameraQuirks.hasQuirk(CameraQuirk.AWBGain)) { + // If the AWB gains are disabled for some reason, re-enable it + if (pipelineSettings.cameraRedGain == -1) pipelineSettings.cameraRedGain = 11; + if (pipelineSettings.cameraBlueGain == -1) pipelineSettings.cameraBlueGain = 20; + visionSource.getSettables().setRedGain(Math.max(0, pipelineSettings.cameraRedGain)); + visionSource.getSettables().setBlueGain(Math.max(0, pipelineSettings.cameraBlueGain)); + } else { + pipelineSettings.cameraRedGain = -1; + pipelineSettings.cameraBlueGain = -1; + } - public CameraCalibrationCoefficients endCalibration() { - var ret = pipelineManager.calibration3dPipeline.tryCalibration(); - pipelineManager.setCalibrationMode(false); + setVisionLEDs(pipelineSettings.ledMode); - setPipeline(pipelineManager.getCurrentPipelineIndex()); + visionSource.getSettables().getConfiguration().currentPipelineIndex = + pipelineManager.getCurrentPipelineIndex(); - if (ret != null) { - logger.debug("Saving calibration..."); - visionSource.getSettables().addCalibration(ret); - } else { - logger.error("Calibration failed..."); + return true; } - saveAndBroadcastAll(); - return ret; - } - - boolean setPipeline(int index) { - logger.info("Setting pipeline to " + index); - logger.info("Pipeline name: " + pipelineManager.getPipelineNickname(index)); - pipelineManager.setIndex(index); - var pipelineSettings = pipelineManager.getPipelineSettings(index); - - if (pipelineSettings == null) { - logger.error("Config for index " + index + " was null! Not changing pipelines"); - return false; + + private boolean camShouldControlLEDs() { + // Heuristic - if the camera has a known FOV or is a piCam, assume it's in use for + // vision processing, and should command stuff to the LED's. + // TODO: Make LED control a property of the camera itself and controllable in the UI. + return isVendorCamera() || cameraQuirks.hasQuirk(CameraQuirk.PiCam); } - visionSource.getSettables().setVideoModeInternal(pipelineSettings.cameraVideoModeIndex); - visionSource.getSettables().setBrightness(pipelineSettings.cameraBrightness); - visionSource.getSettables().setGain(pipelineSettings.cameraGain); + private void setVisionLEDs(boolean on) { + if (camShouldControlLEDs() && HardwareManager.getInstance().visionLED != null) + HardwareManager.getInstance().visionLED.setState(on); + } - // If manual exposure, force exposure slider to be valid - if (!pipelineSettings.cameraAutoExposure) { - if (pipelineSettings.cameraExposure < 0) - pipelineSettings.cameraExposure = 10; // reasonable default + public void saveModule() { + ConfigManager.getInstance() + .saveModule( + getStateAsCameraConfig(), visionSource.getSettables().getConfiguration().uniqueName); } - visionSource.getSettables().setExposure(pipelineSettings.cameraExposure); - try { - visionSource.getSettables().setAutoExposure(pipelineSettings.cameraAutoExposure); - } catch (VideoException e) { - logger.error("Unable to set camera auto exposure!"); - logger.error(e.toString()); + void saveAndBroadcastAll() { + saveModule(); + DataChangeService.getInstance() + .publishEvent( + new OutgoingUIEvent<>( + "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); } - if (cameraQuirks.hasQuirk(CameraQuirk.Gain)) { - // If the gain is disabled for some reason, re-enable it - if (pipelineSettings.cameraGain == -1) pipelineSettings.cameraGain = 75; - visionSource.getSettables().setGain(Math.max(0, pipelineSettings.cameraGain)); - } else { - pipelineSettings.cameraGain = -1; + + void saveAndBroadcastSelective(WsContext originContext, String propertyName, Object value) { + logger.trace("Broadcasting PSC mutation - " + propertyName + ": " + value); + saveModule(); + + HashMap map = new HashMap<>(); + HashMap subMap = new HashMap<>(); + subMap.put(propertyName, value); + map.put("cameraIndex", this.moduleIndex); + map.put("mutatePipelineSettings", subMap); + + DataChangeService.getInstance() + .publishEvent(new OutgoingUIEvent<>("mutatePipeline", map, originContext)); } - if (cameraQuirks.hasQuirk(CameraQuirk.AWBGain)) { - // If the AWB gains are disabled for some reason, re-enable it - if (pipelineSettings.cameraRedGain == -1) pipelineSettings.cameraRedGain = 11; - if (pipelineSettings.cameraBlueGain == -1) pipelineSettings.cameraBlueGain = 20; - visionSource.getSettables().setRedGain(Math.max(0, pipelineSettings.cameraRedGain)); - visionSource.getSettables().setBlueGain(Math.max(0, pipelineSettings.cameraBlueGain)); - } else { - pipelineSettings.cameraRedGain = -1; - pipelineSettings.cameraBlueGain = -1; + public void setCameraNickname(String newName) { + visionSource.getSettables().getConfiguration().nickname = newName; + ntConsumer.updateCameraNickname(newName); + inputFrameSaver.updateCameraNickname(newName); + outputFrameSaver.updateCameraNickname(newName); + + // Rename streams + streamResultConsumers.clear(); + + // Teardown and recreate streams + destroyStreams(); + createStreams(); + + // Rebuild streamers + recreateStreamResultConsumers(); + + // Push new data to the UI + saveAndBroadcastAll(); } - setVisionLEDs(pipelineSettings.ledMode); - - visionSource.getSettables().getConfiguration().currentPipelineIndex = - pipelineManager.getCurrentPipelineIndex(); - - return true; - } - - private boolean camShouldControlLEDs() { - // Heuristic - if the camera has a known FOV or is a piCam, assume it's in use for - // vision processing, and should command stuff to the LED's. - // TODO: Make LED control a property of the camera itself and controllable in the UI. - return isVendorCamera() || cameraQuirks.hasQuirk(CameraQuirk.PiCam); - } - - private void setVisionLEDs(boolean on) { - if (camShouldControlLEDs() && HardwareManager.getInstance().visionLED != null) - HardwareManager.getInstance().visionLED.setState(on); - } - - public void saveModule() { - ConfigManager.getInstance() - .saveModule( - getStateAsCameraConfig(), visionSource.getSettables().getConfiguration().uniqueName); - } - - void saveAndBroadcastAll() { - saveModule(); - DataChangeService.getInstance() - .publishEvent( - new OutgoingUIEvent<>( - "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); - } - - void saveAndBroadcastSelective(WsContext originContext, String propertyName, Object value) { - logger.trace("Broadcasting PSC mutation - " + propertyName + ": " + value); - saveModule(); - - HashMap map = new HashMap<>(); - HashMap subMap = new HashMap<>(); - subMap.put(propertyName, value); - map.put("cameraIndex", this.moduleIndex); - map.put("mutatePipelineSettings", subMap); - - DataChangeService.getInstance() - .publishEvent(new OutgoingUIEvent<>("mutatePipeline", map, originContext)); - } - - public void setCameraNickname(String newName) { - visionSource.getSettables().getConfiguration().nickname = newName; - ntConsumer.updateCameraNickname(newName); - inputFrameSaver.updateCameraNickname(newName); - outputFrameSaver.updateCameraNickname(newName); - - // Rename streams - streamResultConsumers.clear(); - - // Teardown and recreate streams - destroyStreams(); - createStreams(); - - // Rebuild streamers - recreateStreamResultConsumers(); - - // Push new data to the UI - saveAndBroadcastAll(); - } - - public PhotonConfiguration.UICameraConfiguration toUICameraConfig() { - var ret = new PhotonConfiguration.UICameraConfiguration(); - - ret.fov = visionSource.getSettables().getFOV(); - ret.nickname = visionSource.getSettables().getConfiguration().nickname; - ret.currentPipelineSettings = - SerializationUtils.objectToHashMap(pipelineManager.getCurrentPipelineSettings()); - ret.currentPipelineIndex = pipelineManager.getCurrentPipelineIndex(); - ret.pipelineNicknames = pipelineManager.getPipelineNicknames(); - - // TODO refactor into helper method - var temp = new HashMap>(); - var videoModes = visionSource.getSettables().getAllVideoModes(); - for (var k : videoModes.keySet()) { - var internalMap = new HashMap(); - - internalMap.put("width", videoModes.get(k).width); - internalMap.put("height", videoModes.get(k).height); - internalMap.put("fps", videoModes.get(k).fps); - internalMap.put( - "pixelFormat", - ((videoModes.get(k) instanceof LibcameraGpuSource.FPSRatedVideoMode) - ? "kPicam" - : videoModes.get(k).pixelFormat.toString()) - .substring(1)); // Remove the k prefix - - temp.put(k, internalMap); + public PhotonConfiguration.UICameraConfiguration toUICameraConfig() { + var ret = new PhotonConfiguration.UICameraConfiguration(); + + ret.fov = visionSource.getSettables().getFOV(); + ret.nickname = visionSource.getSettables().getConfiguration().nickname; + ret.currentPipelineSettings = + SerializationUtils.objectToHashMap(pipelineManager.getCurrentPipelineSettings()); + ret.currentPipelineIndex = pipelineManager.getCurrentPipelineIndex(); + ret.pipelineNicknames = pipelineManager.getPipelineNicknames(); + + // TODO refactor into helper method + var temp = new HashMap>(); + var videoModes = visionSource.getSettables().getAllVideoModes(); + for (var k : videoModes.keySet()) { + var internalMap = new HashMap(); + + internalMap.put("width", videoModes.get(k).width); + internalMap.put("height", videoModes.get(k).height); + internalMap.put("fps", videoModes.get(k).fps); + internalMap.put( + "pixelFormat", + ((videoModes.get(k) instanceof LibcameraGpuSource.FPSRatedVideoMode) + ? "kPicam" + : videoModes.get(k).pixelFormat.toString()) + .substring(1)); // Remove the k prefix + + temp.put(k, internalMap); + } + ret.videoFormatList = temp; + ret.outputStreamPort = this.outputStreamPort; + ret.inputStreamPort = this.inputStreamPort; + + var calList = new ArrayList>(); + for (var c : visionSource.getSettables().getConfiguration().calibrations) { + var internalMap = new HashMap(); + + internalMap.put("perViewErrors", c.perViewErrors); + internalMap.put("standardDeviation", c.standardDeviation); + internalMap.put("width", c.resolution.width); + internalMap.put("height", c.resolution.height); + internalMap.put("intrinsics", c.cameraIntrinsics.data); + internalMap.put("distCoeffs", c.distCoeffs.data); + + calList.add(internalMap); + } + ret.calibrations = calList; + + ret.isFovConfigurable = + !(ConfigManager.getInstance().getConfig().getHardwareConfig().hasPresetFOV() + && cameraQuirks.hasQuirk(CameraQuirk.PiCam)); + + return ret; } - ret.videoFormatList = temp; - ret.outputStreamPort = this.outputStreamPort; - ret.inputStreamPort = this.inputStreamPort; - - var calList = new ArrayList>(); - for (var c : visionSource.getSettables().getConfiguration().calibrations) { - var internalMap = new HashMap(); - - internalMap.put("perViewErrors", c.perViewErrors); - internalMap.put("standardDeviation", c.standardDeviation); - internalMap.put("width", c.resolution.width); - internalMap.put("height", c.resolution.height); - internalMap.put("intrinsics", c.cameraIntrinsics.data); - internalMap.put("distCoeffs", c.distCoeffs.data); - - calList.add(internalMap); + + public CameraConfiguration getStateAsCameraConfig() { + var config = visionSource.getSettables().getConfiguration(); + config.setPipelineSettings(pipelineManager.userPipelineSettings); + config.driveModeSettings = pipelineManager.driverModePipeline.getSettings(); + config.currentPipelineIndex = Math.max(pipelineManager.getCurrentPipelineIndex(), -1); + + return config; } - ret.calibrations = calList; - - ret.isFovConfigurable = - !(ConfigManager.getInstance().getConfig().getHardwareConfig().hasPresetFOV() - && cameraQuirks.hasQuirk(CameraQuirk.PiCam)); - - return ret; - } - - public CameraConfiguration getStateAsCameraConfig() { - var config = visionSource.getSettables().getConfiguration(); - config.setPipelineSettings(pipelineManager.userPipelineSettings); - config.driveModeSettings = pipelineManager.driverModePipeline.getSettings(); - config.currentPipelineIndex = Math.max(pipelineManager.getCurrentPipelineIndex(), -1); - - return config; - } - - public void addResultConsumer(CVPipelineResultConsumer dataConsumer) { - resultConsumers.add(dataConsumer); - } - - private void consumeResult(CVPipelineResult result) { - consumePipelineResult(result); - - // Pipelines like DriverMode and Calibrate3dPipeline have null output frames - if (result.inputAndOutputFrame != null - && (pipelineManager.getCurrentPipelineSettings() instanceof AdvancedPipelineSettings)) { - streamRunnable.updateData( - result.inputAndOutputFrame, - (AdvancedPipelineSettings) pipelineManager.getCurrentPipelineSettings(), - result.targets); - // The streamRunnable manages releasing in this case - } else { - consumeResults(result.inputAndOutputFrame, result.targets); - - result.release(); - // In this case we don't bother with a separate streaming thread and we release + + public void addResultConsumer(CVPipelineResultConsumer dataConsumer) { + resultConsumers.add(dataConsumer); } - } - private void consumePipelineResult(CVPipelineResult result) { - for (var dataConsumer : resultConsumers) { - dataConsumer.accept(result); + private void consumeResult(CVPipelineResult result) { + consumePipelineResult(result); + + // Pipelines like DriverMode and Calibrate3dPipeline have null output frames + if (result.inputAndOutputFrame != null + && (pipelineManager.getCurrentPipelineSettings() instanceof AdvancedPipelineSettings)) { + streamRunnable.updateData( + result.inputAndOutputFrame, + (AdvancedPipelineSettings) pipelineManager.getCurrentPipelineSettings(), + result.targets); + // The streamRunnable manages releasing in this case + } else { + consumeResults(result.inputAndOutputFrame, result.targets); + + result.release(); + // In this case we don't bother with a separate streaming thread and we release + } } - } - /** Consume stream/target results, no rate limiting applied */ - private void consumeResults(Frame frame, List targets) { - for (var c : streamResultConsumers) { - c.accept(frame, targets); + private void consumePipelineResult(CVPipelineResult result) { + for (var dataConsumer : resultConsumers) { + dataConsumer.accept(result); + } } - } - - public void setTargetModel(TargetModel targetModel) { - var settings = pipelineManager.getCurrentPipeline().getSettings(); - if (settings instanceof ReflectivePipelineSettings) { - ((ReflectivePipelineSettings) settings).targetModel = targetModel; - saveAndBroadcastAll(); - } else { - logger.error("Cannot set target model of non-reflective pipe! Ignoring..."); + + /** Consume stream/target results, no rate limiting applied */ + private void consumeResults(Frame frame, List targets) { + for (var c : streamResultConsumers) { + c.accept(frame, targets); + } } - } - - public void addCalibrationToConfig(CameraCalibrationCoefficients newCalibration) { - if (newCalibration != null) { - logger.info("Got new calibration for " + newCalibration.resolution); - visionSource.getSettables().getConfiguration().addCalibration(newCalibration); - } else { - logger.error("Got null calibration?"); + + public void setTargetModel(TargetModel targetModel) { + var settings = pipelineManager.getCurrentPipeline().getSettings(); + if (settings instanceof ReflectivePipelineSettings) { + ((ReflectivePipelineSettings) settings).targetModel = targetModel; + saveAndBroadcastAll(); + } else { + logger.error("Cannot set target model of non-reflective pipe! Ignoring..."); + } } - saveAndBroadcastAll(); - } + public void addCalibrationToConfig(CameraCalibrationCoefficients newCalibration) { + if (newCalibration != null) { + logger.info("Got new calibration for " + newCalibration.resolution); + visionSource.getSettables().getConfiguration().addCalibration(newCalibration); + } else { + logger.error("Got null calibration?"); + } + + saveAndBroadcastAll(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleChangeSubscriber.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleChangeSubscriber.java index 9e89074939..e824ce89dc 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleChangeSubscriber.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleChangeSubscriber.java @@ -36,205 +36,205 @@ @SuppressWarnings("unchecked") public class VisionModuleChangeSubscriber extends DataChangeSubscriber { - private final VisionModule parentModule; - private final Logger logger; - - public VisionModuleChangeSubscriber(VisionModule parentModule) { - this.parentModule = parentModule; - logger = - new Logger( - VisionModuleChangeSubscriber.class, - parentModule.visionSource.getSettables().getConfiguration().nickname, - LogGroup.VisionModule); - } - - @Override - public void onDataChangeEvent(DataChangeEvent event) { - if (event instanceof IncomingWebSocketEvent) { - var wsEvent = (IncomingWebSocketEvent) event; - - // Camera index -1 means a "multicast event" (i.e. the event is received by all cameras) - if (wsEvent.cameraIndex != null - && (wsEvent.cameraIndex == parentModule.moduleIndex || wsEvent.cameraIndex == -1)) { - logger.trace("Got PSC event - propName: " + wsEvent.propertyName); - - var propName = wsEvent.propertyName; - var newPropValue = wsEvent.data; - var currentSettings = parentModule.pipelineManager.getCurrentPipeline().getSettings(); - - // special case for non-PipelineSetting changes - switch (propName) { - // case "cameraNickname": // rename camera - // var newNickname = (String) newPropValue; - // logger.info("Changing nickname to " + newNickname); - // parentModule.setCameraNickname(newNickname); - // return; - case "pipelineName": // rename current pipeline - logger.info("Changing nick to " + newPropValue); - parentModule.pipelineManager.getCurrentPipelineSettings().pipelineNickname = - (String) newPropValue; - parentModule.saveAndBroadcastAll(); - return; - case "newPipelineInfo": // add new pipeline - var typeName = (Pair) newPropValue; - var type = typeName.getRight(); - var name = typeName.getLeft(); - - logger.info("Adding a " + type + " pipeline with name " + name); - - var addedSettings = parentModule.pipelineManager.addPipeline(type); - addedSettings.pipelineNickname = name; - parentModule.saveAndBroadcastAll(); - return; - case "deleteCurrPipeline": - var indexToDelete = parentModule.pipelineManager.getCurrentPipelineIndex(); - logger.info("Deleting current pipe at index " + indexToDelete); - int newIndex = parentModule.pipelineManager.removePipeline(indexToDelete); - parentModule.setPipeline(newIndex); - parentModule.saveAndBroadcastAll(); - return; - case "changePipeline": // change active pipeline - var index = (Integer) newPropValue; - if (index == parentModule.pipelineManager.getCurrentPipelineIndex()) { - logger.debug("Skipping pipeline change, index " + index + " already active"); - return; - } - parentModule.setPipeline(index); - parentModule.saveAndBroadcastAll(); - return; - case "startCalibration": - var data = UICalibrationData.fromMap((Map) newPropValue); - parentModule.startCalibration(data); - parentModule.saveAndBroadcastAll(); - return; - case "saveInputSnapshot": - parentModule.saveInputSnapshot(); - return; - case "saveOutputSnapshot": - parentModule.saveOutputSnapshot(); - return; - case "takeCalSnapshot": - parentModule.takeCalibrationSnapshot(); - return; - case "duplicatePipeline": - int idx = parentModule.pipelineManager.duplicatePipeline((Integer) newPropValue); - parentModule.setPipeline(idx); - parentModule.saveAndBroadcastAll(); - return; - case "calibrationUploaded": - if (newPropValue instanceof CameraCalibrationCoefficients) - parentModule.addCalibrationToConfig((CameraCalibrationCoefficients) newPropValue); - return; - case "robotOffsetPoint": - if (currentSettings instanceof AdvancedPipelineSettings) { - var curAdvSettings = (AdvancedPipelineSettings) currentSettings; - var offsetOperation = RobotOffsetPointOperation.fromIndex((int) newPropValue); - var latestTarget = parentModule.lastPipelineResultBestTarget; - - if (latestTarget != null) { - var newPoint = latestTarget.getTargetOffsetPoint(); - - switch (curAdvSettings.offsetRobotOffsetMode) { - case Single: - if (offsetOperation == RobotOffsetPointOperation.ROPO_CLEAR) { - curAdvSettings.offsetSinglePoint = new Point(); - } else if (offsetOperation == RobotOffsetPointOperation.ROPO_TAKESINGLE) { - curAdvSettings.offsetSinglePoint = newPoint; + private final VisionModule parentModule; + private final Logger logger; + + public VisionModuleChangeSubscriber(VisionModule parentModule) { + this.parentModule = parentModule; + logger = + new Logger( + VisionModuleChangeSubscriber.class, + parentModule.visionSource.getSettables().getConfiguration().nickname, + LogGroup.VisionModule); + } + + @Override + public void onDataChangeEvent(DataChangeEvent event) { + if (event instanceof IncomingWebSocketEvent) { + var wsEvent = (IncomingWebSocketEvent) event; + + // Camera index -1 means a "multicast event" (i.e. the event is received by all cameras) + if (wsEvent.cameraIndex != null + && (wsEvent.cameraIndex == parentModule.moduleIndex || wsEvent.cameraIndex == -1)) { + logger.trace("Got PSC event - propName: " + wsEvent.propertyName); + + var propName = wsEvent.propertyName; + var newPropValue = wsEvent.data; + var currentSettings = parentModule.pipelineManager.getCurrentPipeline().getSettings(); + + // special case for non-PipelineSetting changes + switch (propName) { + // case "cameraNickname": // rename camera + // var newNickname = (String) newPropValue; + // logger.info("Changing nickname to " + newNickname); + // parentModule.setCameraNickname(newNickname); + // return; + case "pipelineName": // rename current pipeline + logger.info("Changing nick to " + newPropValue); + parentModule.pipelineManager.getCurrentPipelineSettings().pipelineNickname = + (String) newPropValue; + parentModule.saveAndBroadcastAll(); + return; + case "newPipelineInfo": // add new pipeline + var typeName = (Pair) newPropValue; + var type = typeName.getRight(); + var name = typeName.getLeft(); + + logger.info("Adding a " + type + " pipeline with name " + name); + + var addedSettings = parentModule.pipelineManager.addPipeline(type); + addedSettings.pipelineNickname = name; + parentModule.saveAndBroadcastAll(); + return; + case "deleteCurrPipeline": + var indexToDelete = parentModule.pipelineManager.getCurrentPipelineIndex(); + logger.info("Deleting current pipe at index " + indexToDelete); + int newIndex = parentModule.pipelineManager.removePipeline(indexToDelete); + parentModule.setPipeline(newIndex); + parentModule.saveAndBroadcastAll(); + return; + case "changePipeline": // change active pipeline + var index = (Integer) newPropValue; + if (index == parentModule.pipelineManager.getCurrentPipelineIndex()) { + logger.debug("Skipping pipeline change, index " + index + " already active"); + return; + } + parentModule.setPipeline(index); + parentModule.saveAndBroadcastAll(); + return; + case "startCalibration": + var data = UICalibrationData.fromMap((Map) newPropValue); + parentModule.startCalibration(data); + parentModule.saveAndBroadcastAll(); + return; + case "saveInputSnapshot": + parentModule.saveInputSnapshot(); + return; + case "saveOutputSnapshot": + parentModule.saveOutputSnapshot(); + return; + case "takeCalSnapshot": + parentModule.takeCalibrationSnapshot(); + return; + case "duplicatePipeline": + int idx = parentModule.pipelineManager.duplicatePipeline((Integer) newPropValue); + parentModule.setPipeline(idx); + parentModule.saveAndBroadcastAll(); + return; + case "calibrationUploaded": + if (newPropValue instanceof CameraCalibrationCoefficients) + parentModule.addCalibrationToConfig((CameraCalibrationCoefficients) newPropValue); + return; + case "robotOffsetPoint": + if (currentSettings instanceof AdvancedPipelineSettings) { + var curAdvSettings = (AdvancedPipelineSettings) currentSettings; + var offsetOperation = RobotOffsetPointOperation.fromIndex((int) newPropValue); + var latestTarget = parentModule.lastPipelineResultBestTarget; + + if (latestTarget != null) { + var newPoint = latestTarget.getTargetOffsetPoint(); + + switch (curAdvSettings.offsetRobotOffsetMode) { + case Single: + if (offsetOperation == RobotOffsetPointOperation.ROPO_CLEAR) { + curAdvSettings.offsetSinglePoint = new Point(); + } else if (offsetOperation == RobotOffsetPointOperation.ROPO_TAKESINGLE) { + curAdvSettings.offsetSinglePoint = newPoint; + } + break; + case Dual: + if (offsetOperation == RobotOffsetPointOperation.ROPO_CLEAR) { + curAdvSettings.offsetDualPointA = new Point(); + curAdvSettings.offsetDualPointAArea = 0; + curAdvSettings.offsetDualPointB = new Point(); + curAdvSettings.offsetDualPointBArea = 0; + } else { + // update point and area + switch (offsetOperation) { + case ROPO_TAKEFIRSTDUAL: + curAdvSettings.offsetDualPointA = newPoint; + curAdvSettings.offsetDualPointAArea = latestTarget.getArea(); + break; + case ROPO_TAKESECONDDUAL: + curAdvSettings.offsetDualPointB = newPoint; + curAdvSettings.offsetDualPointBArea = latestTarget.getArea(); + break; + default: + break; + } + } + break; + default: + break; + } + } + } + return; + case "changePipelineType": + parentModule.changePipelineType((Integer) newPropValue); + parentModule.saveAndBroadcastAll(); + return; + } + + // special case for camera settables + if (propName.startsWith("camera")) { + var propMethodName = "set" + propName.replace("camera", ""); + var methods = parentModule.visionSource.getSettables().getClass().getMethods(); + for (var method : methods) { + if (method.getName().equalsIgnoreCase(propMethodName)) { + try { + method.invoke(parentModule.visionSource.getSettables(), newPropValue); + } catch (Exception e) { + logger.error("Failed to invoke camera settable method: " + method.getName(), e); + } + } } - break; - case Dual: - if (offsetOperation == RobotOffsetPointOperation.ROPO_CLEAR) { - curAdvSettings.offsetDualPointA = new Point(); - curAdvSettings.offsetDualPointAArea = 0; - curAdvSettings.offsetDualPointB = new Point(); - curAdvSettings.offsetDualPointBArea = 0; + } + + try { + var propField = currentSettings.getClass().getField(propName); + var propType = propField.getType(); + + if (propType.isEnum()) { + var actual = propType.getEnumConstants()[(int) newPropValue]; + propField.set(currentSettings, actual); + } else if (propType.isAssignableFrom(DoubleCouple.class)) { + var orig = (ArrayList) newPropValue; + var actual = new DoubleCouple(orig.get(0), orig.get(1)); + propField.set(currentSettings, actual); + } else if (propType.isAssignableFrom(IntegerCouple.class)) { + var orig = (ArrayList) newPropValue; + var actual = new IntegerCouple(orig.get(0).intValue(), orig.get(1).intValue()); + propField.set(currentSettings, actual); + } else if (propType.equals(Double.TYPE)) { + propField.setDouble(currentSettings, ((Number) newPropValue).doubleValue()); + } else if (propType.equals(Integer.TYPE)) { + propField.setInt(currentSettings, (Integer) newPropValue); + } else if (propType.equals(Boolean.TYPE)) { + if (newPropValue instanceof Integer) { + propField.setBoolean(currentSettings, (Integer) newPropValue != 0); + } else { + propField.setBoolean(currentSettings, (Boolean) newPropValue); + } } else { - // update point and area - switch (offsetOperation) { - case ROPO_TAKEFIRSTDUAL: - curAdvSettings.offsetDualPointA = newPoint; - curAdvSettings.offsetDualPointAArea = latestTarget.getArea(); - break; - case ROPO_TAKESECONDDUAL: - curAdvSettings.offsetDualPointB = newPoint; - curAdvSettings.offsetDualPointBArea = latestTarget.getArea(); - break; - default: - break; - } + propField.set(newPropValue, newPropValue); } - break; - default: - break; + logger.trace("Set prop " + propName + " to value " + newPropValue); + } catch (NoSuchFieldException | IllegalAccessException e) { + logger.error( + "Could not set prop " + + propName + + " with value " + + newPropValue + + " on " + + currentSettings, + e); + } catch (Exception e) { + logger.error("Unknown exception when setting PSC prop!", e); } - } - } - return; - case "changePipelineType": - parentModule.changePipelineType((Integer) newPropValue); - parentModule.saveAndBroadcastAll(); - return; - } - - // special case for camera settables - if (propName.startsWith("camera")) { - var propMethodName = "set" + propName.replace("camera", ""); - var methods = parentModule.visionSource.getSettables().getClass().getMethods(); - for (var method : methods) { - if (method.getName().equalsIgnoreCase(propMethodName)) { - try { - method.invoke(parentModule.visionSource.getSettables(), newPropValue); - } catch (Exception e) { - logger.error("Failed to invoke camera settable method: " + method.getName(), e); - } - } - } - } - try { - var propField = currentSettings.getClass().getField(propName); - var propType = propField.getType(); - - if (propType.isEnum()) { - var actual = propType.getEnumConstants()[(int) newPropValue]; - propField.set(currentSettings, actual); - } else if (propType.isAssignableFrom(DoubleCouple.class)) { - var orig = (ArrayList) newPropValue; - var actual = new DoubleCouple(orig.get(0), orig.get(1)); - propField.set(currentSettings, actual); - } else if (propType.isAssignableFrom(IntegerCouple.class)) { - var orig = (ArrayList) newPropValue; - var actual = new IntegerCouple(orig.get(0).intValue(), orig.get(1).intValue()); - propField.set(currentSettings, actual); - } else if (propType.equals(Double.TYPE)) { - propField.setDouble(currentSettings, ((Number) newPropValue).doubleValue()); - } else if (propType.equals(Integer.TYPE)) { - propField.setInt(currentSettings, (Integer) newPropValue); - } else if (propType.equals(Boolean.TYPE)) { - if (newPropValue instanceof Integer) { - propField.setBoolean(currentSettings, (Integer) newPropValue != 0); - } else { - propField.setBoolean(currentSettings, (Boolean) newPropValue); + parentModule.saveAndBroadcastSelective(wsEvent.originContext, propName, newPropValue); } - } else { - propField.set(newPropValue, newPropValue); - } - logger.trace("Set prop " + propName + " to value " + newPropValue); - } catch (NoSuchFieldException | IllegalAccessException e) { - logger.error( - "Could not set prop " - + propName - + " with value " - + newPropValue - + " on " - + currentSettings, - e); - } catch (Exception e) { - logger.error("Unknown exception when setting PSC prop!", e); } - - parentModule.saveAndBroadcastSelective(wsEvent.originContext, propName, newPropValue); - } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleManager.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleManager.java index cb932f5e31..d75e62cb7e 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleManager.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionModuleManager.java @@ -24,85 +24,85 @@ /** VisionModuleManager has many VisionModules, and provides camera configuration data to them. */ public class VisionModuleManager { - private final Logger logger = new Logger(VisionModuleManager.class, LogGroup.VisionModule); + private final Logger logger = new Logger(VisionModuleManager.class, LogGroup.VisionModule); - private static class ThreadSafeSingleton { - private static final VisionModuleManager INSTANCE = new VisionModuleManager(); - } + private static class ThreadSafeSingleton { + private static final VisionModuleManager INSTANCE = new VisionModuleManager(); + } - public static VisionModuleManager getInstance() { - return VisionModuleManager.ThreadSafeSingleton.INSTANCE; - } + public static VisionModuleManager getInstance() { + return VisionModuleManager.ThreadSafeSingleton.INSTANCE; + } - protected final List visionModules = new ArrayList<>(); + protected final List visionModules = new ArrayList<>(); - VisionModuleManager() {} + VisionModuleManager() {} - public List getModules() { - return visionModules; - } + public List getModules() { + return visionModules; + } - public VisionModule getModule(String nickname) { - for (var module : visionModules) { - if (module.getStateAsCameraConfig().nickname.equals(nickname)) return module; + public VisionModule getModule(String nickname) { + for (var module : visionModules) { + if (module.getStateAsCameraConfig().nickname.equals(nickname)) return module; + } + return null; } - return null; - } - public VisionModule getModule(int i) { - return visionModules.get(i); - } + public VisionModule getModule(int i) { + return visionModules.get(i); + } + + public List addSources(List visionSources) { + var addedModules = new HashMap(); - public List addSources(List visionSources) { - var addedModules = new HashMap(); + assignCameraIndex(visionSources); + for (var visionSource : visionSources) { + var pipelineManager = new PipelineManager(visionSource.getCameraConfiguration()); - assignCameraIndex(visionSources); - for (var visionSource : visionSources) { - var pipelineManager = new PipelineManager(visionSource.getCameraConfiguration()); + var module = new VisionModule(pipelineManager, visionSource, visionModules.size()); + visionModules.add(module); + addedModules.put(visionSource.getCameraConfiguration().streamIndex, module); + } - var module = new VisionModule(pipelineManager, visionSource, visionModules.size()); - visionModules.add(module); - addedModules.put(visionSource.getCameraConfiguration().streamIndex, module); + return addedModules.entrySet().stream() + .sorted(Comparator.comparingInt(Map.Entry::getKey)) // sort by stream index + .map(Map.Entry::getValue) // map to Stream of VisionModule + .collect(Collectors.toList()); // collect in a List } - return addedModules.entrySet().stream() - .sorted(Comparator.comparingInt(Map.Entry::getKey)) // sort by stream index - .map(Map.Entry::getValue) // map to Stream of VisionModule - .collect(Collectors.toList()); // collect in a List - } - - private void assignCameraIndex(List config) { - // We won't necessarily have already added all the cameras we need to at this point - // But by operating on the list, we have a fairly good idea of which we need to change, - // but it's not guaranteed that we change the correct one - // The best we can do is try to avoid a case where the stream index runs away to infinity - // since we can only stream 5 cameras at once - - // Big list, which should contain every vision source (currently loaded plus the new ones being - // added) - var bigList = new ArrayList(); - bigList.addAll( - this.getModules().stream().map(it -> it.visionSource).collect(Collectors.toList())); - bigList.addAll(config); - - for (var v : config) { - var listNoV = new ArrayList<>(bigList); - listNoV.remove(v); - if (listNoV.stream() - .anyMatch( - it -> - it.getCameraConfiguration().streamIndex - == v.getCameraConfiguration().streamIndex)) { - int idx = 0; - while (listNoV.stream() - .map(it -> it.getCameraConfiguration().streamIndex) - .collect(Collectors.toList()) - .contains(idx)) { - idx++; + private void assignCameraIndex(List config) { + // We won't necessarily have already added all the cameras we need to at this point + // But by operating on the list, we have a fairly good idea of which we need to change, + // but it's not guaranteed that we change the correct one + // The best we can do is try to avoid a case where the stream index runs away to infinity + // since we can only stream 5 cameras at once + + // Big list, which should contain every vision source (currently loaded plus the new ones being + // added) + var bigList = new ArrayList(); + bigList.addAll( + this.getModules().stream().map(it -> it.visionSource).collect(Collectors.toList())); + bigList.addAll(config); + + for (var v : config) { + var listNoV = new ArrayList<>(bigList); + listNoV.remove(v); + if (listNoV.stream() + .anyMatch( + it -> + it.getCameraConfiguration().streamIndex + == v.getCameraConfiguration().streamIndex)) { + int idx = 0; + while (listNoV.stream() + .map(it -> it.getCameraConfiguration().streamIndex) + .collect(Collectors.toList()) + .contains(idx)) { + idx++; + } + logger.debug("Assigning idx " + idx); + v.getCameraConfiguration().streamIndex = idx; + } } - logger.debug("Assigning idx " + idx); - v.getCameraConfiguration().streamIndex = idx; - } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionRunner.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionRunner.java index 41ec35b5ff..60acc0d391 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionRunner.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionRunner.java @@ -31,77 +31,77 @@ /** VisionRunner has a frame supplier, a pipeline supplier, and a result consumer */ @SuppressWarnings("rawtypes") public class VisionRunner { - private final Logger logger; - private final Thread visionProcessThread; - private final FrameProvider frameSupplier; - private final Supplier pipelineSupplier; - private final Consumer pipelineResultConsumer; - private final QuirkyCamera cameraQuirks; + private final Logger logger; + private final Thread visionProcessThread; + private final FrameProvider frameSupplier; + private final Supplier pipelineSupplier; + private final Consumer pipelineResultConsumer; + private final QuirkyCamera cameraQuirks; - private long loopCount; + private long loopCount; - /** - * VisionRunner contains a thread to run a pipeline, given a frame, and will give the result to - * the consumer. - * - * @param frameSupplier The supplier of the latest frame. - * @param pipelineSupplier The supplier of the current pipeline. - * @param pipelineResultConsumer The consumer of the latest result. - */ - public VisionRunner( - FrameProvider frameSupplier, - Supplier pipelineSupplier, - Consumer pipelineResultConsumer, - QuirkyCamera cameraQuirks) { - this.frameSupplier = frameSupplier; - this.pipelineSupplier = pipelineSupplier; - this.pipelineResultConsumer = pipelineResultConsumer; - this.cameraQuirks = cameraQuirks; + /** + * VisionRunner contains a thread to run a pipeline, given a frame, and will give the result to + * the consumer. + * + * @param frameSupplier The supplier of the latest frame. + * @param pipelineSupplier The supplier of the current pipeline. + * @param pipelineResultConsumer The consumer of the latest result. + */ + public VisionRunner( + FrameProvider frameSupplier, + Supplier pipelineSupplier, + Consumer pipelineResultConsumer, + QuirkyCamera cameraQuirks) { + this.frameSupplier = frameSupplier; + this.pipelineSupplier = pipelineSupplier; + this.pipelineResultConsumer = pipelineResultConsumer; + this.cameraQuirks = cameraQuirks; - visionProcessThread = new Thread(this::update); - visionProcessThread.setName("VisionRunner - " + frameSupplier.getName()); - logger = new Logger(VisionRunner.class, frameSupplier.getName(), LogGroup.VisionModule); - } + visionProcessThread = new Thread(this::update); + visionProcessThread.setName("VisionRunner - " + frameSupplier.getName()); + logger = new Logger(VisionRunner.class, frameSupplier.getName(), LogGroup.VisionModule); + } - public void startProcess() { - visionProcessThread.start(); - } + public void startProcess() { + visionProcessThread.start(); + } - private void update() { - while (!Thread.interrupted()) { - var pipeline = pipelineSupplier.get(); + private void update() { + while (!Thread.interrupted()) { + var pipeline = pipelineSupplier.get(); - // Tell our camera implementation here what kind of pre-processing we need it to be doing - // (pipeline-dependent). I kinda hate how much leak this has... - // TODO would a callback object be a better fit? - var wantedProcessType = pipeline.getThresholdType(); - frameSupplier.requestFrameThresholdType(wantedProcessType); - var settings = pipeline.getSettings(); - if (settings instanceof AdvancedPipelineSettings) { - var advanced = (AdvancedPipelineSettings) settings; - var hsvParams = - new HSVPipe.HSVParams( - advanced.hsvHue, advanced.hsvSaturation, advanced.hsvValue, advanced.hueInverted); - // TODO who should deal with preventing this from happening _every single loop_? - frameSupplier.requestHsvSettings(hsvParams); - } - frameSupplier.requestFrameRotation(settings.inputImageRotationMode); - frameSupplier.requestFrameCopies(settings.inputShouldShow, settings.outputShouldShow); + // Tell our camera implementation here what kind of pre-processing we need it to be doing + // (pipeline-dependent). I kinda hate how much leak this has... + // TODO would a callback object be a better fit? + var wantedProcessType = pipeline.getThresholdType(); + frameSupplier.requestFrameThresholdType(wantedProcessType); + var settings = pipeline.getSettings(); + if (settings instanceof AdvancedPipelineSettings) { + var advanced = (AdvancedPipelineSettings) settings; + var hsvParams = + new HSVPipe.HSVParams( + advanced.hsvHue, advanced.hsvSaturation, advanced.hsvValue, advanced.hueInverted); + // TODO who should deal with preventing this from happening _every single loop_? + frameSupplier.requestHsvSettings(hsvParams); + } + frameSupplier.requestFrameRotation(settings.inputImageRotationMode); + frameSupplier.requestFrameCopies(settings.inputShouldShow, settings.outputShouldShow); - // Grab the new camera frame - var frame = frameSupplier.get(); + // Grab the new camera frame + var frame = frameSupplier.get(); - // There's no guarantee the processing type change will occur this tick, so pipelines should - // check themselves - try { - var pipelineResult = pipeline.run(frame, cameraQuirks); - pipelineResultConsumer.accept(pipelineResult); - } catch (Exception ex) { - logger.error("Exception on loop " + loopCount); - ex.printStackTrace(); - } + // There's no guarantee the processing type change will occur this tick, so pipelines should + // check themselves + try { + var pipelineResult = pipeline.run(frame, cameraQuirks); + pipelineResultConsumer.accept(pipelineResult); + } catch (Exception ex) { + logger.error("Exception on loop " + loopCount); + ex.printStackTrace(); + } - loopCount++; + loopCount++; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionSource.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionSource.java index b81a2928cc..65caf8bd0f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionSource.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionSource.java @@ -21,19 +21,19 @@ import org.photonvision.vision.frame.FrameProvider; public abstract class VisionSource { - protected final CameraConfiguration cameraConfiguration; + protected final CameraConfiguration cameraConfiguration; - protected VisionSource(CameraConfiguration cameraConfiguration) { - this.cameraConfiguration = cameraConfiguration; - } + protected VisionSource(CameraConfiguration cameraConfiguration) { + this.cameraConfiguration = cameraConfiguration; + } - public CameraConfiguration getCameraConfiguration() { - return cameraConfiguration; - } + public CameraConfiguration getCameraConfiguration() { + return cameraConfiguration; + } - public abstract FrameProvider getFrameProvider(); + public abstract FrameProvider getFrameProvider(); - public abstract VisionSourceSettables getSettables(); + public abstract VisionSourceSettables getSettables(); - public abstract boolean isVendorCamera(); + public abstract boolean isVendorCamera(); } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceManager.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceManager.java index aee2ab026d..4b31ba0691 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceManager.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceManager.java @@ -41,326 +41,326 @@ import org.photonvision.vision.camera.USBCameraSource; public class VisionSourceManager { - private static final Logger logger = new Logger(VisionSourceManager.class, LogGroup.Camera); - private static final List deviceBlacklist = List.of("bcm2835-isp"); - - final List knownUsbCameras = new CopyOnWriteArrayList<>(); - final List unmatchedLoadedConfigs = new CopyOnWriteArrayList<>(); - private boolean hasWarned; - private String ignoredCamerasRegex = ""; - - private static class SingletonHolder { - private static final VisionSourceManager INSTANCE = new VisionSourceManager(); - } - - public static VisionSourceManager getInstance() { - return SingletonHolder.INSTANCE; - } - - VisionSourceManager() {} - - public void registerTimedTask() { - TimedTaskManager.getInstance().addTask("VisionSourceManager", this::tryMatchUSBCams, 3000); - } - - public void registerLoadedConfigs(CameraConfiguration... configs) { - registerLoadedConfigs(Arrays.asList(configs)); - } - - /** - * Register new camera configs loaded from disk. This will add them to the list of configs to try - * to match, and also automatically spawn new vision processes as necessary. - * - * @param configs The loaded camera configs. - */ - public void registerLoadedConfigs(Collection configs) { - unmatchedLoadedConfigs.addAll(configs); - } - - protected Supplier> cameraInfoSupplier = - () -> List.of(UsbCamera.enumerateUsbCameras()); - - protected void tryMatchUSBCams() { - var visionSourceList = tryMatchUSBCamImpl(); - if (visionSourceList == null) return; - - logger.info("Adding " + visionSourceList.size() + " configs to VMM."); - ConfigManager.getInstance().addCameraConfigurations(visionSourceList); - var addedSources = VisionModuleManager.getInstance().addSources(visionSourceList); - addedSources.forEach(VisionModule::start); - DataChangeService.getInstance() - .publishEvent( - new OutgoingUIEvent<>( - "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); - } - - protected List tryMatchUSBCamImpl() { - return tryMatchUSBCamImpl(true); - } - - protected List tryMatchUSBCamImpl(boolean createSources) { - // Detect cameras using CSCore - List connectedCameras = - new ArrayList<>(filterAllowedDevices(cameraInfoSupplier.get())); - - // Remove all known devices - var notYetLoadedCams = new ArrayList(); - for (var connectedCam : connectedCameras) { - boolean cameraIsUnknown = true; - for (UsbCameraInfo knownCam : this.knownUsbCameras) { - if (usbCamEquals(knownCam, connectedCam)) { - cameraIsUnknown = false; - break; - } - } - if (cameraIsUnknown) { - notYetLoadedCams.add(connectedCam); - } + private static final Logger logger = new Logger(VisionSourceManager.class, LogGroup.Camera); + private static final List deviceBlacklist = List.of("bcm2835-isp"); + + final List knownUsbCameras = new CopyOnWriteArrayList<>(); + final List unmatchedLoadedConfigs = new CopyOnWriteArrayList<>(); + private boolean hasWarned; + private String ignoredCamerasRegex = ""; + + private static class SingletonHolder { + private static final VisionSourceManager INSTANCE = new VisionSourceManager(); + } + + public static VisionSourceManager getInstance() { + return SingletonHolder.INSTANCE; } - if (notYetLoadedCams.isEmpty()) return null; + VisionSourceManager() {} - if (connectedCameras.isEmpty()) { - logger.warn( - "No USB cameras were detected! Check that all cameras are connected, and that the path is correct."); - return null; + public void registerTimedTask() { + TimedTaskManager.getInstance().addTask("VisionSourceManager", this::tryMatchUSBCams, 3000); } - logger.debug("Matching " + notYetLoadedCams.size() + " new cameras!"); - // Sort out just the USB cams - var usbCamConfigs = new ArrayList<>(); - for (var config : unmatchedLoadedConfigs) { - if (config.cameraType == CameraType.UsbCamera) usbCamConfigs.add(config); + public void registerLoadedConfigs(CameraConfiguration... configs) { + registerLoadedConfigs(Arrays.asList(configs)); } - // Debug prints - for (var info : notYetLoadedCams) { - logger.info("Adding local video device - \"" + info.name + "\" at \"" + info.path + "\""); + /** + * Register new camera configs loaded from disk. This will add them to the list of configs to try + * to match, and also automatically spawn new vision processes as necessary. + * + * @param configs The loaded camera configs. + */ + public void registerLoadedConfigs(Collection configs) { + unmatchedLoadedConfigs.addAll(configs); } - if (!usbCamConfigs.isEmpty()) - logger.debug("Trying to match " + usbCamConfigs.size() + " unmatched configs..."); - - // Match camera configs to physical cameras - var matchedCameras = matchUSBCameras(notYetLoadedCams, unmatchedLoadedConfigs); - unmatchedLoadedConfigs.removeAll(matchedCameras); - if (!unmatchedLoadedConfigs.isEmpty() && !hasWarned) { - logger.warn( - () -> - "After matching, " - + unmatchedLoadedConfigs.size() - + " configs remained unmatched. Is your camera disconnected?"); - logger.warn( - "Unloaded configs: " - + unmatchedLoadedConfigs.stream() - .map(it -> it.nickname) - .collect(Collectors.joining())); - hasWarned = true; + protected Supplier> cameraInfoSupplier = + () -> List.of(UsbCamera.enumerateUsbCameras()); + + protected void tryMatchUSBCams() { + var visionSourceList = tryMatchUSBCamImpl(); + if (visionSourceList == null) return; + + logger.info("Adding " + visionSourceList.size() + " configs to VMM."); + ConfigManager.getInstance().addCameraConfigurations(visionSourceList); + var addedSources = VisionModuleManager.getInstance().addSources(visionSourceList); + addedSources.forEach(VisionModule::start); + DataChangeService.getInstance() + .publishEvent( + new OutgoingUIEvent<>( + "fullsettings", ConfigManager.getInstance().getConfig().toHashMap())); } - // We add the matched cameras to the known camera list - for (var cam : notYetLoadedCams) { - if (this.knownUsbCameras.stream().noneMatch(it -> usbCamEquals(it, cam))) { - this.knownUsbCameras.add(cam); - } + protected List tryMatchUSBCamImpl() { + return tryMatchUSBCamImpl(true); + } + + protected List tryMatchUSBCamImpl(boolean createSources) { + // Detect cameras using CSCore + List connectedCameras = + new ArrayList<>(filterAllowedDevices(cameraInfoSupplier.get())); + + // Remove all known devices + var notYetLoadedCams = new ArrayList(); + for (var connectedCam : connectedCameras) { + boolean cameraIsUnknown = true; + for (UsbCameraInfo knownCam : this.knownUsbCameras) { + if (usbCamEquals(knownCam, connectedCam)) { + cameraIsUnknown = false; + break; + } + } + if (cameraIsUnknown) { + notYetLoadedCams.add(connectedCam); + } + } + + if (notYetLoadedCams.isEmpty()) return null; + + if (connectedCameras.isEmpty()) { + logger.warn( + "No USB cameras were detected! Check that all cameras are connected, and that the path is correct."); + return null; + } + logger.debug("Matching " + notYetLoadedCams.size() + " new cameras!"); + + // Sort out just the USB cams + var usbCamConfigs = new ArrayList<>(); + for (var config : unmatchedLoadedConfigs) { + if (config.cameraType == CameraType.UsbCamera) usbCamConfigs.add(config); + } + + // Debug prints + for (var info : notYetLoadedCams) { + logger.info("Adding local video device - \"" + info.name + "\" at \"" + info.path + "\""); + } + + if (!usbCamConfigs.isEmpty()) + logger.debug("Trying to match " + usbCamConfigs.size() + " unmatched configs..."); + + // Match camera configs to physical cameras + var matchedCameras = matchUSBCameras(notYetLoadedCams, unmatchedLoadedConfigs); + unmatchedLoadedConfigs.removeAll(matchedCameras); + if (!unmatchedLoadedConfigs.isEmpty() && !hasWarned) { + logger.warn( + () -> + "After matching, " + + unmatchedLoadedConfigs.size() + + " configs remained unmatched. Is your camera disconnected?"); + logger.warn( + "Unloaded configs: " + + unmatchedLoadedConfigs.stream() + .map(it -> it.nickname) + .collect(Collectors.joining())); + hasWarned = true; + } + + // We add the matched cameras to the known camera list + for (var cam : notYetLoadedCams) { + if (this.knownUsbCameras.stream().noneMatch(it -> usbCamEquals(it, cam))) { + this.knownUsbCameras.add(cam); + } + } + if (matchedCameras.isEmpty()) return null; + + // for unit tests only! + if (!createSources) { + return List.of(); + } + + // Turn these camera configs into vision sources + var sources = loadVisionSourcesFromCamConfigs(matchedCameras); + + // Print info about each vision source + for (var src : sources) { + logger.debug( + () -> + "Matched config for camera \"" + + src.getFrameProvider().getName() + + "\" and loaded " + + src.getCameraConfiguration().pipelineSettings.size() + + " pipelines"); + } + + return sources; } - if (matchedCameras.isEmpty()) return null; - // for unit tests only! - if (!createSources) { - return List.of(); + /** + * Create {@link CameraConfiguration}s based on a list of detected USB cameras and the configs on + * disk. + * + * @param detectedCamInfos Information about currently connected USB cameras. + * @param loadedUsbCamConfigs The USB {@link CameraConfiguration}s loaded from disk. + * @return the matched configurations. + */ + private List matchUSBCameras( + List detectedCamInfos, List loadedUsbCamConfigs) { + var detectedCameraList = new ArrayList<>(detectedCamInfos); + ArrayList cameraConfigurations = new ArrayList<>(); + + // loop over all the configs loaded from disk + for (CameraConfiguration config : loadedUsbCamConfigs) { + UsbCameraInfo cameraInfo; + + // attempt matching by path and basename + logger.debug( + "Trying to find a match for loaded camera " + + config.baseName + + " with path " + + config.path); + cameraInfo = + detectedCameraList.stream() + .filter( + usbCameraInfo -> + usbCameraInfo.path.equals(config.path) + && cameraNameToBaseName(usbCameraInfo.name).equals(config.baseName)) + .findFirst() + .orElse(null); + + // if path based fails, attempt basename only match + if (cameraInfo == null) { + logger.debug("Failed to match by path and name, falling back to name-only match"); + cameraInfo = + detectedCameraList.stream() + .filter( + usbCameraInfo -> + cameraNameToBaseName(usbCameraInfo.name).equals(config.baseName)) + .findFirst() + .orElse(null); + } + + // If we actually matched a camera to a config, remove that camera from the list and add it to + // the output + if (cameraInfo != null) { + logger.debug("Matched the config for " + config.baseName + " to a physical camera!"); + detectedCameraList.remove(cameraInfo); + cameraConfigurations.add(mergeInfoIntoConfig(config, cameraInfo)); + } + } + + // If we have any unmatched cameras left, create a new CameraConfiguration for them here. + logger.debug( + "After matching loaded configs " + detectedCameraList.size() + " cameras were unmatched."); + for (UsbCameraInfo info : detectedCameraList) { + // create new camera config for all new cameras + String baseName = cameraNameToBaseName(info.name); + String uniqueName = baseNameToUniqueName(baseName); + + int suffix = 0; + while (containsName(cameraConfigurations, uniqueName)) { + suffix++; + uniqueName = String.format("%s (%d)", uniqueName, suffix); + } + + logger.info("Creating a new camera config for camera " + uniqueName); + + // HACK -- for picams, we want to use the camera model + String nickname = uniqueName; + if (isCsiCamera(info)) { + nickname = LibCameraJNI.getSensorModel().toString(); + } + + CameraConfiguration configuration = + new CameraConfiguration(baseName, uniqueName, nickname, info.path, info.otherPaths); + cameraConfigurations.add(configuration); + } + + logger.debug("Matched or created " + cameraConfigurations.size() + " camera configs!"); + return cameraConfigurations; } - // Turn these camera configs into vision sources - var sources = loadVisionSourcesFromCamConfigs(matchedCameras); - - // Print info about each vision source - for (var src : sources) { - logger.debug( - () -> - "Matched config for camera \"" - + src.getFrameProvider().getName() - + "\" and loaded " - + src.getCameraConfiguration().pipelineSettings.size() - + " pipelines"); + private boolean isCsiCamera(UsbCameraInfo configuration) { + return (Arrays.stream(configuration.otherPaths).anyMatch(it -> it.contains("csi-video")) + || cameraNameToBaseName(configuration.name).equals("unicam")); } - return sources; - } - - /** - * Create {@link CameraConfiguration}s based on a list of detected USB cameras and the configs on - * disk. - * - * @param detectedCamInfos Information about currently connected USB cameras. - * @param loadedUsbCamConfigs The USB {@link CameraConfiguration}s loaded from disk. - * @return the matched configurations. - */ - private List matchUSBCameras( - List detectedCamInfos, List loadedUsbCamConfigs) { - var detectedCameraList = new ArrayList<>(detectedCamInfos); - ArrayList cameraConfigurations = new ArrayList<>(); - - // loop over all the configs loaded from disk - for (CameraConfiguration config : loadedUsbCamConfigs) { - UsbCameraInfo cameraInfo; - - // attempt matching by path and basename - logger.debug( - "Trying to find a match for loaded camera " - + config.baseName - + " with path " - + config.path); - cameraInfo = - detectedCameraList.stream() - .filter( - usbCameraInfo -> - usbCameraInfo.path.equals(config.path) - && cameraNameToBaseName(usbCameraInfo.name).equals(config.baseName)) - .findFirst() - .orElse(null); - - // if path based fails, attempt basename only match - if (cameraInfo == null) { - logger.debug("Failed to match by path and name, falling back to name-only match"); - cameraInfo = - detectedCameraList.stream() - .filter( - usbCameraInfo -> - cameraNameToBaseName(usbCameraInfo.name).equals(config.baseName)) - .findFirst() - .orElse(null); - } - - // If we actually matched a camera to a config, remove that camera from the list and add it to - // the output - if (cameraInfo != null) { - logger.debug("Matched the config for " + config.baseName + " to a physical camera!"); - detectedCameraList.remove(cameraInfo); - cameraConfigurations.add(mergeInfoIntoConfig(config, cameraInfo)); - } + private CameraConfiguration mergeInfoIntoConfig(CameraConfiguration cfg, UsbCameraInfo info) { + if (!cfg.path.equals(info.path)) { + logger.debug("Updating path config from " + cfg.path + " to " + info.path); + cfg.path = info.path; + } + + return cfg; } - // If we have any unmatched cameras left, create a new CameraConfiguration for them here. - logger.debug( - "After matching loaded configs " + detectedCameraList.size() + " cameras were unmatched."); - for (UsbCameraInfo info : detectedCameraList) { - // create new camera config for all new cameras - String baseName = cameraNameToBaseName(info.name); - String uniqueName = baseNameToUniqueName(baseName); - - int suffix = 0; - while (containsName(cameraConfigurations, uniqueName)) { - suffix++; - uniqueName = String.format("%s (%d)", uniqueName, suffix); - } - - logger.info("Creating a new camera config for camera " + uniqueName); - - // HACK -- for picams, we want to use the camera model - String nickname = uniqueName; - if (isCsiCamera(info)) { - nickname = LibCameraJNI.getSensorModel().toString(); - } - - CameraConfiguration configuration = - new CameraConfiguration(baseName, uniqueName, nickname, info.path, info.otherPaths); - cameraConfigurations.add(configuration); + public void setIgnoredCamerasRegex(String ignoredCamerasRegex) { + this.ignoredCamerasRegex = ignoredCamerasRegex; } - logger.debug("Matched or created " + cameraConfigurations.size() + " camera configs!"); - return cameraConfigurations; - } + private List filterAllowedDevices(List allDevices) { + List filteredDevices = new ArrayList<>(); + for (var device : allDevices) { + if (deviceBlacklist.contains(device.name)) { + logger.trace( + "Skipping blacklisted device: \"" + device.name + "\" at \"" + device.path + "\""); + } else if (device.name.matches(ignoredCamerasRegex)) { + logger.trace("Skipping ignored device: \"" + device.name + "\" at \"" + device.path); + } else { + filteredDevices.add(device); + logger.trace( + "Adding local video device - \"" + device.name + "\" at \"" + device.path + "\""); + } + } + return filteredDevices; + } - private boolean isCsiCamera(UsbCameraInfo configuration) { - return (Arrays.stream(configuration.otherPaths).anyMatch(it -> it.contains("csi-video")) - || cameraNameToBaseName(configuration.name).equals("unicam")); - } + private boolean usbCamEquals(UsbCameraInfo a, UsbCameraInfo b) { + return a.path.equals(b.path) + && a.dev == b.dev + && a.name.equals(b.name) + && a.productId == b.productId + && a.vendorId == b.vendorId; + } - private CameraConfiguration mergeInfoIntoConfig(CameraConfiguration cfg, UsbCameraInfo info) { - if (!cfg.path.equals(info.path)) { - logger.debug("Updating path config from " + cfg.path + " to " + info.path); - cfg.path = info.path; + // Remove all non-ASCII characters + private static String cameraNameToBaseName(String cameraName) { + return cameraName.replaceAll("[^\\x00-\\x7F]", ""); } - return cfg; - } - - public void setIgnoredCamerasRegex(String ignoredCamerasRegex) { - this.ignoredCamerasRegex = ignoredCamerasRegex; - } - - private List filterAllowedDevices(List allDevices) { - List filteredDevices = new ArrayList<>(); - for (var device : allDevices) { - if (deviceBlacklist.contains(device.name)) { - logger.trace( - "Skipping blacklisted device: \"" + device.name + "\" at \"" + device.path + "\""); - } else if (device.name.matches(ignoredCamerasRegex)) { - logger.trace("Skipping ignored device: \"" + device.name + "\" at \"" + device.path); - } else { - filteredDevices.add(device); - logger.trace( - "Adding local video device - \"" + device.name + "\" at \"" + device.path + "\""); - } + // Replace spaces with underscores + private static String baseNameToUniqueName(String baseName) { + return baseName.replaceAll(" ", "_"); } - return filteredDevices; - } - - private boolean usbCamEquals(UsbCameraInfo a, UsbCameraInfo b) { - return a.path.equals(b.path) - && a.dev == b.dev - && a.name.equals(b.name) - && a.productId == b.productId - && a.vendorId == b.vendorId; - } - - // Remove all non-ASCII characters - private static String cameraNameToBaseName(String cameraName) { - return cameraName.replaceAll("[^\\x00-\\x7F]", ""); - } - - // Replace spaces with underscores - private static String baseNameToUniqueName(String baseName) { - return baseName.replaceAll(" ", "_"); - } - - private static List loadVisionSourcesFromCamConfigs( - List camConfigs) { - var cameraSources = new ArrayList(); - for (var configuration : camConfigs) { - logger.debug("Creating VisionSource for " + configuration); - - // Picams should have csi-video in the path - boolean is_picam = - (Arrays.stream(configuration.otherPaths).anyMatch(it -> it.contains("csi-video")) - || configuration.baseName.equals("unicam")); - boolean is_pi = Platform.isRaspberryPi(); - if (is_picam && is_pi) { - configuration.cameraType = CameraType.ZeroCopyPicam; - var piCamSrc = new LibcameraGpuSource(configuration); - cameraSources.add(piCamSrc); - } else { - var newCam = new USBCameraSource(configuration); - if (!newCam.cameraQuirks.hasQuirk(CameraQuirk.CompletelyBroken) - && !newCam.getSettables().videoModes.isEmpty()) { - cameraSources.add(newCam); + + private static List loadVisionSourcesFromCamConfigs( + List camConfigs) { + var cameraSources = new ArrayList(); + for (var configuration : camConfigs) { + logger.debug("Creating VisionSource for " + configuration); + + // Picams should have csi-video in the path + boolean is_picam = + (Arrays.stream(configuration.otherPaths).anyMatch(it -> it.contains("csi-video")) + || configuration.baseName.equals("unicam")); + boolean is_pi = Platform.isRaspberryPi(); + if (is_picam && is_pi) { + configuration.cameraType = CameraType.ZeroCopyPicam; + var piCamSrc = new LibcameraGpuSource(configuration); + cameraSources.add(piCamSrc); + } else { + var newCam = new USBCameraSource(configuration); + if (!newCam.cameraQuirks.hasQuirk(CameraQuirk.CompletelyBroken) + && !newCam.getSettables().videoModes.isEmpty()) { + cameraSources.add(newCam); + } + } } - } + return cameraSources; + } + + /** + * Check if a given config list contains the given unique name. + * + * @param configList A list of camera configs. + * @param uniqueName The unique name. + * @return If the list of configs contains the unique name. + */ + private boolean containsName( + final List configList, final String uniqueName) { + return configList.stream() + .anyMatch(configuration -> configuration.uniqueName.equals(uniqueName)); } - return cameraSources; - } - - /** - * Check if a given config list contains the given unique name. - * - * @param configList A list of camera configs. - * @param uniqueName The unique name. - * @return If the list of configs contains the unique name. - */ - private boolean containsName( - final List configList, final String uniqueName) { - return configList.stream() - .anyMatch(configuration -> configuration.uniqueName.equals(uniqueName)); - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceSettables.java b/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceSettables.java index 8de085ffaf..cda7d08e6f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceSettables.java +++ b/photon-core/src/main/java/org/photonvision/vision/processes/VisionSourceSettables.java @@ -26,97 +26,97 @@ import org.photonvision.vision.frame.FrameStaticProperties; public abstract class VisionSourceSettables { - protected static final Logger logger = - new Logger(VisionSourceSettables.class, LogGroup.VisionModule); - - private final CameraConfiguration configuration; - - protected VisionSourceSettables(CameraConfiguration configuration) { - this.configuration = configuration; - } - - protected FrameStaticProperties frameStaticProperties; - protected HashMap videoModes; - - public CameraConfiguration getConfiguration() { - return configuration; - } - - public abstract void setExposure(double exposure); - - public abstract void setAutoExposure(boolean cameraAutoExposure); - - public abstract void setBrightness(int brightness); - - public abstract void setGain(int gain); - - // Pretty uncommon so instead of abstract this is just a no-op by default - // Overriden by cameras with AWB gain support - public void setRedGain(int red) {} - - public void setBlueGain(int blue) {} - - public abstract VideoMode getCurrentVideoMode(); - - public void setVideoModeInternal(int index) { - setVideoMode(getAllVideoModes().get(index)); - } - - public void setVideoMode(VideoMode mode) { - logger.info( - "Setting video mode to " - + "FPS: " - + mode.fps - + " Width: " - + mode.width - + " Height: " - + mode.height - + " Pixel Format: " - + mode.pixelFormat); - setVideoModeInternal(mode); - calculateFrameStaticProps(); - } - - protected abstract void setVideoModeInternal(VideoMode videoMode); - - @SuppressWarnings("unused") - public void setVideoModeIndex(int index) { - setVideoMode(videoModes.get(index)); - } - - public abstract HashMap getAllVideoModes(); - - public double getFOV() { - return configuration.FOV; - } - - public void setFOV(double fov) { - logger.info("Setting FOV to " + fov); - configuration.FOV = fov; - calculateFrameStaticProps(); - } - - public void addCalibration(CameraCalibrationCoefficients calibrationCoefficients) { - configuration.addCalibration(calibrationCoefficients); - calculateFrameStaticProps(); - } - - private void calculateFrameStaticProps() { - var videoMode = getCurrentVideoMode(); - this.frameStaticProperties = - new FrameStaticProperties( - videoMode, - getFOV(), - configuration.calibrations.stream() - .filter( - it -> - it.resolution.width == videoMode.width - && it.resolution.height == videoMode.height) - .findFirst() - .orElse(null)); - } - - public FrameStaticProperties getFrameStaticProperties() { - return frameStaticProperties; - } + protected static final Logger logger = + new Logger(VisionSourceSettables.class, LogGroup.VisionModule); + + private final CameraConfiguration configuration; + + protected VisionSourceSettables(CameraConfiguration configuration) { + this.configuration = configuration; + } + + protected FrameStaticProperties frameStaticProperties; + protected HashMap videoModes; + + public CameraConfiguration getConfiguration() { + return configuration; + } + + public abstract void setExposure(double exposure); + + public abstract void setAutoExposure(boolean cameraAutoExposure); + + public abstract void setBrightness(int brightness); + + public abstract void setGain(int gain); + + // Pretty uncommon so instead of abstract this is just a no-op by default + // Overriden by cameras with AWB gain support + public void setRedGain(int red) {} + + public void setBlueGain(int blue) {} + + public abstract VideoMode getCurrentVideoMode(); + + public void setVideoModeInternal(int index) { + setVideoMode(getAllVideoModes().get(index)); + } + + public void setVideoMode(VideoMode mode) { + logger.info( + "Setting video mode to " + + "FPS: " + + mode.fps + + " Width: " + + mode.width + + " Height: " + + mode.height + + " Pixel Format: " + + mode.pixelFormat); + setVideoModeInternal(mode); + calculateFrameStaticProps(); + } + + protected abstract void setVideoModeInternal(VideoMode videoMode); + + @SuppressWarnings("unused") + public void setVideoModeIndex(int index) { + setVideoMode(videoModes.get(index)); + } + + public abstract HashMap getAllVideoModes(); + + public double getFOV() { + return configuration.FOV; + } + + public void setFOV(double fov) { + logger.info("Setting FOV to " + fov); + configuration.FOV = fov; + calculateFrameStaticProps(); + } + + public void addCalibration(CameraCalibrationCoefficients calibrationCoefficients) { + configuration.addCalibration(calibrationCoefficients); + calculateFrameStaticProps(); + } + + private void calculateFrameStaticProps() { + var videoMode = getCurrentVideoMode(); + this.frameStaticProperties = + new FrameStaticProperties( + videoMode, + getFOV(), + configuration.calibrations.stream() + .filter( + it -> + it.resolution.width == videoMode.width + && it.resolution.height == videoMode.height) + .findFirst() + .orElse(null)); + } + + public FrameStaticProperties getFrameStaticProperties() { + return frameStaticProperties; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/PotentialTarget.java b/photon-core/src/main/java/org/photonvision/vision/target/PotentialTarget.java index f4306070b8..deb3de569d 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/PotentialTarget.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/PotentialTarget.java @@ -25,43 +25,43 @@ public class PotentialTarget implements Releasable { - public final Contour m_mainContour; - public final List m_subContours; - public final CVShape shape; + public final Contour m_mainContour; + public final List m_subContours; + public final CVShape shape; - public PotentialTarget(Contour inputContour) { - this(inputContour, List.of()); - } + public PotentialTarget(Contour inputContour) { + this(inputContour, List.of()); + } - public PotentialTarget(Contour inputContour, List subContours) { - this(inputContour, subContours, null); - } + public PotentialTarget(Contour inputContour, List subContours) { + this(inputContour, subContours, null); + } - public PotentialTarget(Contour inputContour, List subContours, CVShape shape) { - m_mainContour = inputContour; - m_subContours = new ArrayList<>(subContours); - this.shape = shape; - } + public PotentialTarget(Contour inputContour, List subContours, CVShape shape) { + m_mainContour = inputContour; + m_subContours = new ArrayList<>(subContours); + this.shape = shape; + } - public PotentialTarget(Contour inputContour, CVShape shape) { - this(inputContour, List.of(), shape); - } + public PotentialTarget(Contour inputContour, CVShape shape) { + this(inputContour, List.of(), shape); + } - public RotatedRect getMinAreaRect() { - return m_mainContour.getMinAreaRect(); - } + public RotatedRect getMinAreaRect() { + return m_mainContour.getMinAreaRect(); + } - public double getArea() { - return m_mainContour.getArea(); - } + public double getArea() { + return m_mainContour.getArea(); + } - @Override - public void release() { - m_mainContour.release(); - for (var sc : m_subContours) { - sc.release(); + @Override + public void release() { + m_mainContour.release(); + for (var sc : m_subContours) { + sc.release(); + } + m_subContours.clear(); + if (shape != null) shape.release(); } - m_subContours.clear(); - if (shape != null) shape.release(); - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointMode.java b/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointMode.java index 17c8478dfb..434e2de6ff 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointMode.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointMode.java @@ -17,7 +17,7 @@ package org.photonvision.vision.target; public enum RobotOffsetPointMode { - None, - Single, - Dual + None, + Single, + Dual } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointOperation.java b/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointOperation.java index f1f1eee86b..0e69fe9d8f 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointOperation.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/RobotOffsetPointOperation.java @@ -17,29 +17,29 @@ package org.photonvision.vision.target; public enum RobotOffsetPointOperation { - ROPO_CLEAR(0), - ROPO_TAKESINGLE(1), - ROPO_TAKEFIRSTDUAL(2), - ROPO_TAKESECONDDUAL(3); + ROPO_CLEAR(0), + ROPO_TAKESINGLE(1), + ROPO_TAKEFIRSTDUAL(2), + ROPO_TAKESECONDDUAL(3); - public final int index; + public final int index; - RobotOffsetPointOperation(int index) { - this.index = index; - } + RobotOffsetPointOperation(int index) { + this.index = index; + } - public static RobotOffsetPointOperation fromIndex(int index) { - switch (index) { - case 0: - return ROPO_CLEAR; - case 1: - return ROPO_TAKESINGLE; - case 2: - return ROPO_TAKEFIRSTDUAL; - case 3: - return ROPO_TAKESECONDDUAL; - default: - return ROPO_CLEAR; + public static RobotOffsetPointOperation fromIndex(int index) { + switch (index) { + case 0: + return ROPO_CLEAR; + case 1: + return ROPO_TAKESINGLE; + case 2: + return ROPO_TAKEFIRSTDUAL; + case 3: + return ROPO_TAKESECONDDUAL; + default: + return ROPO_CLEAR; + } } - } } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/TargetCalculations.java b/photon-core/src/main/java/org/photonvision/vision/target/TargetCalculations.java index 8b93b37f37..5e64237443 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/TargetCalculations.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/TargetCalculations.java @@ -23,145 +23,145 @@ import org.photonvision.vision.opencv.DualOffsetValues; public class TargetCalculations { - /** - * Calculates the yaw and pitch of a point in the image. Yaw and pitch must be calculated together - * to account for perspective distortion. Yaw is positive right, and pitch is positive up. - * - * @param offsetCenterX The X value of the offset principal point (cx) in pixels - * @param targetCenterX The X value of the target's center point in pixels - * @param horizontalFocalLength The horizontal focal length (fx) in pixels - * @param offsetCenterY The Y value of the offset principal point (cy) in pixels - * @param targetCenterY The Y value of the target's center point in pixels - * @param verticalFocalLength The vertical focal length (fy) in pixels - * @return The yaw and pitch from the principal axis to the target center, in degrees. - */ - public static DoubleCouple calculateYawPitch( - double offsetCenterX, - double targetCenterX, - double horizontalFocalLength, - double offsetCenterY, - double targetCenterY, - double verticalFocalLength) { - double yaw = Math.atan((targetCenterX - offsetCenterX) / horizontalFocalLength); - double pitch = - Math.atan((offsetCenterY - targetCenterY) / (verticalFocalLength / Math.cos(yaw))); - return new DoubleCouple(Math.toDegrees(yaw), Math.toDegrees(pitch)); - } - - public static double calculateSkew(boolean isLandscape, RotatedRect minAreaRect) { - // https://namkeenman.wordpress.com/2015/12/18/open-cv-determine-angle-of-rotatedrect-minarearect/ - var angle = minAreaRect.angle; - - if (isLandscape && minAreaRect.size.width < minAreaRect.size.height) angle += 90; - else if (!isLandscape && minAreaRect.size.height < minAreaRect.size.width) angle += 90; - - // Ensure skew is bounded on [-90, 90] - while (angle > 90) angle -= 180; - while (angle < -90) angle += 180; - - return angle; - } - - public static Point calculateTargetOffsetPoint( - boolean isLandscape, TargetOffsetPointEdge offsetRegion, RotatedRect minAreaRect) { - Point[] vertices = new Point[4]; - - minAreaRect.points(vertices); - - Point bottom = getMiddle(vertices[0], vertices[1]); - Point left = getMiddle(vertices[1], vertices[2]); - Point top = getMiddle(vertices[2], vertices[3]); - Point right = getMiddle(vertices[3], vertices[0]); - - boolean orientationCorrect = minAreaRect.size.width > minAreaRect.size.height; - if (!isLandscape) orientationCorrect = !orientationCorrect; - - switch (offsetRegion) { - case Top: - if (orientationCorrect) return (left.y < right.y) ? left : right; - else return (top.y < bottom.y) ? top : bottom; - case Bottom: - if (orientationCorrect) return (left.y > right.y) ? left : right; - else return (top.y > bottom.y) ? top : bottom; - case Left: - if (orientationCorrect) return (top.x < bottom.x) ? top : bottom; - else return (left.x < right.x) ? left : right; - case Right: - if (orientationCorrect) return (top.x > bottom.x) ? top : bottom; - else return (left.x > right.x) ? left : right; - default: - return minAreaRect.center; + /** + * Calculates the yaw and pitch of a point in the image. Yaw and pitch must be calculated together + * to account for perspective distortion. Yaw is positive right, and pitch is positive up. + * + * @param offsetCenterX The X value of the offset principal point (cx) in pixels + * @param targetCenterX The X value of the target's center point in pixels + * @param horizontalFocalLength The horizontal focal length (fx) in pixels + * @param offsetCenterY The Y value of the offset principal point (cy) in pixels + * @param targetCenterY The Y value of the target's center point in pixels + * @param verticalFocalLength The vertical focal length (fy) in pixels + * @return The yaw and pitch from the principal axis to the target center, in degrees. + */ + public static DoubleCouple calculateYawPitch( + double offsetCenterX, + double targetCenterX, + double horizontalFocalLength, + double offsetCenterY, + double targetCenterY, + double verticalFocalLength) { + double yaw = Math.atan((targetCenterX - offsetCenterX) / horizontalFocalLength); + double pitch = + Math.atan((offsetCenterY - targetCenterY) / (verticalFocalLength / Math.cos(yaw))); + return new DoubleCouple(Math.toDegrees(yaw), Math.toDegrees(pitch)); } - } - - private static Point getMiddle(Point p1, Point p2) { - return new Point(((p1.x + p2.x) / 2), ((p1.y + p2.y) / 2)); - } - - public static Point calculateRobotOffsetPoint( - Point offsetPoint, - Point camCenterPoint, - DualOffsetValues dualOffsetValues, - RobotOffsetPointMode offsetMode) { - switch (offsetMode) { - case None: - default: - return camCenterPoint; - case Single: - if (offsetPoint.x == 0 && offsetPoint.y == 0) { - return camCenterPoint; - } else { - return offsetPoint; + + public static double calculateSkew(boolean isLandscape, RotatedRect minAreaRect) { + // https://namkeenman.wordpress.com/2015/12/18/open-cv-determine-angle-of-rotatedrect-minarearect/ + var angle = minAreaRect.angle; + + if (isLandscape && minAreaRect.size.width < minAreaRect.size.height) angle += 90; + else if (!isLandscape && minAreaRect.size.height < minAreaRect.size.width) angle += 90; + + // Ensure skew is bounded on [-90, 90] + while (angle > 90) angle -= 180; + while (angle < -90) angle += 180; + + return angle; + } + + public static Point calculateTargetOffsetPoint( + boolean isLandscape, TargetOffsetPointEdge offsetRegion, RotatedRect minAreaRect) { + Point[] vertices = new Point[4]; + + minAreaRect.points(vertices); + + Point bottom = getMiddle(vertices[0], vertices[1]); + Point left = getMiddle(vertices[1], vertices[2]); + Point top = getMiddle(vertices[2], vertices[3]); + Point right = getMiddle(vertices[3], vertices[0]); + + boolean orientationCorrect = minAreaRect.size.width > minAreaRect.size.height; + if (!isLandscape) orientationCorrect = !orientationCorrect; + + switch (offsetRegion) { + case Top: + if (orientationCorrect) return (left.y < right.y) ? left : right; + else return (top.y < bottom.y) ? top : bottom; + case Bottom: + if (orientationCorrect) return (left.y > right.y) ? left : right; + else return (top.y > bottom.y) ? top : bottom; + case Left: + if (orientationCorrect) return (top.x < bottom.x) ? top : bottom; + else return (left.x < right.x) ? left : right; + case Right: + if (orientationCorrect) return (top.x > bottom.x) ? top : bottom; + else return (left.x > right.x) ? left : right; + default: + return minAreaRect.center; + } + } + + private static Point getMiddle(Point p1, Point p2) { + return new Point(((p1.x + p2.x) / 2), ((p1.y + p2.y) / 2)); + } + + public static Point calculateRobotOffsetPoint( + Point offsetPoint, + Point camCenterPoint, + DualOffsetValues dualOffsetValues, + RobotOffsetPointMode offsetMode) { + switch (offsetMode) { + case None: + default: + return camCenterPoint; + case Single: + if (offsetPoint.x == 0 && offsetPoint.y == 0) { + return camCenterPoint; + } else { + return offsetPoint; + } + case Dual: + var resultPoint = new Point(); + var lineValues = dualOffsetValues.getLineValues(); + var offsetSlope = lineValues.getFirst(); + var offsetIntercept = lineValues.getSecond(); + + resultPoint.x = (offsetPoint.x - offsetIntercept) / offsetSlope; + resultPoint.y = (offsetPoint.y * offsetSlope) + offsetIntercept; + return resultPoint; } - case Dual: - var resultPoint = new Point(); - var lineValues = dualOffsetValues.getLineValues(); - var offsetSlope = lineValues.getFirst(); - var offsetIntercept = lineValues.getSecond(); - - resultPoint.x = (offsetPoint.x - offsetIntercept) / offsetSlope; - resultPoint.y = (offsetPoint.y * offsetSlope) + offsetIntercept; - return resultPoint; } - } - public static double getAspectRatio(RotatedRect rect, boolean isLandscape) { - if (rect.size.width == 0 || rect.size.height == 0) return 0; - double ratio = rect.size.width / rect.size.height; + public static double getAspectRatio(RotatedRect rect, boolean isLandscape) { + if (rect.size.width == 0 || rect.size.height == 0) return 0; + double ratio = rect.size.width / rect.size.height; + + // In landscape, we should be shorter than we are wide (that is, aspect ratio should be >1) + if (isLandscape && ratio < 1) { + ratio = 1.0 / ratio; + } + + // If portrait, should always be taller than wide (ratio < 1) + else if (!isLandscape && ratio > 1) { + ratio = 1.0 / ratio; + } - // In landscape, we should be shorter than we are wide (that is, aspect ratio should be >1) - if (isLandscape && ratio < 1) { - ratio = 1.0 / ratio; + return ratio; } - // If portrait, should always be taller than wide (ratio < 1) - else if (!isLandscape && ratio > 1) { - ratio = 1.0 / ratio; + public static Point calculateDualOffsetCrosshair( + DualOffsetValues dualOffsetValues, double currentArea) { + boolean firstLarger = dualOffsetValues.firstPointArea >= dualOffsetValues.secondPointArea; + double upperArea = + firstLarger ? dualOffsetValues.secondPointArea : dualOffsetValues.firstPointArea; + double lowerArea = + firstLarger ? dualOffsetValues.firstPointArea : dualOffsetValues.secondPointArea; + + var areaFraction = MathUtils.map(currentArea, lowerArea, upperArea, 0, 1); + var xLerp = + MathUtils.lerp(dualOffsetValues.firstPoint.x, dualOffsetValues.secondPoint.x, areaFraction); + var yLerp = + MathUtils.lerp(dualOffsetValues.firstPoint.y, dualOffsetValues.secondPoint.y, areaFraction); + + return new Point(xLerp, yLerp); } - return ratio; - } - - public static Point calculateDualOffsetCrosshair( - DualOffsetValues dualOffsetValues, double currentArea) { - boolean firstLarger = dualOffsetValues.firstPointArea >= dualOffsetValues.secondPointArea; - double upperArea = - firstLarger ? dualOffsetValues.secondPointArea : dualOffsetValues.firstPointArea; - double lowerArea = - firstLarger ? dualOffsetValues.firstPointArea : dualOffsetValues.secondPointArea; - - var areaFraction = MathUtils.map(currentArea, lowerArea, upperArea, 0, 1); - var xLerp = - MathUtils.lerp(dualOffsetValues.firstPoint.x, dualOffsetValues.secondPoint.x, areaFraction); - var yLerp = - MathUtils.lerp(dualOffsetValues.firstPoint.y, dualOffsetValues.secondPoint.y, areaFraction); - - return new Point(xLerp, yLerp); - } - - public static DoubleCouple getLineFromPoints(Point firstPoint, Point secondPoint) { - var offsetLineSlope = (secondPoint.y - firstPoint.y) / (secondPoint.x - firstPoint.x); - var offsetLineIntercept = firstPoint.y - (offsetLineSlope * firstPoint.x); - return new DoubleCouple(offsetLineSlope, offsetLineIntercept); - } + public static DoubleCouple getLineFromPoints(Point firstPoint, Point secondPoint) { + var offsetLineSlope = (secondPoint.y - firstPoint.y) / (secondPoint.x - firstPoint.x); + var offsetLineIntercept = firstPoint.y - (offsetLineSlope * firstPoint.x); + return new DoubleCouple(offsetLineSlope, offsetLineIntercept); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/TargetModel.java b/photon-core/src/main/java/org/photonvision/vision/target/TargetModel.java index 36bd2b0485..198cbd5139 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/TargetModel.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/TargetModel.java @@ -48,169 +48,169 @@ *

AprilTag models are currently only used for drawing on the output stream. */ public enum TargetModel implements Releasable { - k2016HighGoal( - List.of( - new Point3(Units.inchesToMeters(10), Units.inchesToMeters(6), 0), - new Point3(Units.inchesToMeters(-10), Units.inchesToMeters(6), 0), - new Point3(Units.inchesToMeters(-10), Units.inchesToMeters(-6), 0), - new Point3(Units.inchesToMeters(10), Units.inchesToMeters(-6), 0)), - Units.inchesToMeters(6)), - k2019DualTarget( - List.of( - new Point3(Units.inchesToMeters(7.313), Units.inchesToMeters(2.662), 0), - new Point3(Units.inchesToMeters(-7.313), Units.inchesToMeters(2.662), 0), - new Point3(Units.inchesToMeters(-5.936), Units.inchesToMeters(-2.662), 0), - new Point3(Units.inchesToMeters(5.936), Units.inchesToMeters(-2.662), 0)), - 0.1), - k2020HighGoalOuter( - List.of( - new Point3(Units.inchesToMeters(9.819867), Units.inchesToMeters(8.5), 0), - new Point3(Units.inchesToMeters(-9.819867), Units.inchesToMeters(8.5), 0), - new Point3(Units.inchesToMeters(-19.625), Units.inchesToMeters(-8.5), 0), - new Point3(Units.inchesToMeters(19.625), Units.inchesToMeters(-8.5), 0)), - Units.inchesToMeters(12)), - kCircularPowerCell7in( - List.of( - new Point3( - -Units.inchesToMeters(7) / 2, - -Units.inchesToMeters(7) / 2, - -Units.inchesToMeters(7) / 2), - new Point3( - -Units.inchesToMeters(7) / 2, - Units.inchesToMeters(7) / 2, - -Units.inchesToMeters(7) / 2), - new Point3( - Units.inchesToMeters(7) / 2, - Units.inchesToMeters(7) / 2, - -Units.inchesToMeters(7) / 2), - new Point3( - Units.inchesToMeters(7) / 2, - -Units.inchesToMeters(7) / 2, - -Units.inchesToMeters(7) / 2)), - 0), - k2022CircularCargoBall( - List.of( - new Point3( - -Units.inchesToMeters(9.5) / 2, - -Units.inchesToMeters(9.5) / 2, - -Units.inchesToMeters(9.5) / 2), - new Point3( - -Units.inchesToMeters(9.5) / 2, - Units.inchesToMeters(9.5) / 2, - -Units.inchesToMeters(9.5) / 2), - new Point3( - Units.inchesToMeters(9.5) / 2, - Units.inchesToMeters(9.5) / 2, - -Units.inchesToMeters(9.5) / 2), - new Point3( - Units.inchesToMeters(9.5) / 2, - -Units.inchesToMeters(9.5) / 2, - -Units.inchesToMeters(9.5) / 2)), - 0), - k200mmAprilTag( // Corners of the tag's inner black square (excluding white border) - List.of( - new Point3(Units.inchesToMeters(3.25), Units.inchesToMeters(3.25), 0), - new Point3(-Units.inchesToMeters(3.25), Units.inchesToMeters(3.25), 0), - new Point3(-Units.inchesToMeters(3.25), -Units.inchesToMeters(3.25), 0), - new Point3(Units.inchesToMeters(3.25), -Units.inchesToMeters(3.25), 0)), - Units.inchesToMeters(3.25 * 2)), - kAruco6in_16h5( // Corners of the tag's inner black square (excluding white border) - List.of( - new Point3(Units.inchesToMeters(3), Units.inchesToMeters(3), 0), - new Point3(Units.inchesToMeters(3), -Units.inchesToMeters(3), 0), - new Point3(-Units.inchesToMeters(3), -Units.inchesToMeters(3), 0), - new Point3(Units.inchesToMeters(3), -Units.inchesToMeters(3), 0)), - Units.inchesToMeters(3 * 2)), - k6in_16h5( // Corners of the tag's inner black square (excluding white border) - List.of( - new Point3(Units.inchesToMeters(3), Units.inchesToMeters(3), 0), - new Point3(-Units.inchesToMeters(3), Units.inchesToMeters(3), 0), - new Point3(-Units.inchesToMeters(3), -Units.inchesToMeters(3), 0), - new Point3(Units.inchesToMeters(3), -Units.inchesToMeters(3), 0)), - Units.inchesToMeters(3 * 2)); - - @JsonIgnore private MatOfPoint3f realWorldTargetCoordinates; - @JsonIgnore private final MatOfPoint3f visualizationBoxBottom = new MatOfPoint3f(); - @JsonIgnore private final MatOfPoint3f visualizationBoxTop = new MatOfPoint3f(); - - @JsonProperty("realWorldCoordinatesArray") - private List realWorldCoordinatesArray; - - @JsonProperty("boxHeight") - private double boxHeight; - - TargetModel() {} - - TargetModel(MatOfPoint3f realWorldTargetCoordinates, double boxHeight) { - this.realWorldTargetCoordinates = realWorldTargetCoordinates; - this.realWorldCoordinatesArray = realWorldTargetCoordinates.toList(); - this.boxHeight = boxHeight; - - var bottomList = realWorldTargetCoordinates.toList(); - var topList = new ArrayList(); - for (var c : bottomList) { - topList.add(new Point3(c.x, c.y, c.z + boxHeight)); + k2016HighGoal( + List.of( + new Point3(Units.inchesToMeters(10), Units.inchesToMeters(6), 0), + new Point3(Units.inchesToMeters(-10), Units.inchesToMeters(6), 0), + new Point3(Units.inchesToMeters(-10), Units.inchesToMeters(-6), 0), + new Point3(Units.inchesToMeters(10), Units.inchesToMeters(-6), 0)), + Units.inchesToMeters(6)), + k2019DualTarget( + List.of( + new Point3(Units.inchesToMeters(7.313), Units.inchesToMeters(2.662), 0), + new Point3(Units.inchesToMeters(-7.313), Units.inchesToMeters(2.662), 0), + new Point3(Units.inchesToMeters(-5.936), Units.inchesToMeters(-2.662), 0), + new Point3(Units.inchesToMeters(5.936), Units.inchesToMeters(-2.662), 0)), + 0.1), + k2020HighGoalOuter( + List.of( + new Point3(Units.inchesToMeters(9.819867), Units.inchesToMeters(8.5), 0), + new Point3(Units.inchesToMeters(-9.819867), Units.inchesToMeters(8.5), 0), + new Point3(Units.inchesToMeters(-19.625), Units.inchesToMeters(-8.5), 0), + new Point3(Units.inchesToMeters(19.625), Units.inchesToMeters(-8.5), 0)), + Units.inchesToMeters(12)), + kCircularPowerCell7in( + List.of( + new Point3( + -Units.inchesToMeters(7) / 2, + -Units.inchesToMeters(7) / 2, + -Units.inchesToMeters(7) / 2), + new Point3( + -Units.inchesToMeters(7) / 2, + Units.inchesToMeters(7) / 2, + -Units.inchesToMeters(7) / 2), + new Point3( + Units.inchesToMeters(7) / 2, + Units.inchesToMeters(7) / 2, + -Units.inchesToMeters(7) / 2), + new Point3( + Units.inchesToMeters(7) / 2, + -Units.inchesToMeters(7) / 2, + -Units.inchesToMeters(7) / 2)), + 0), + k2022CircularCargoBall( + List.of( + new Point3( + -Units.inchesToMeters(9.5) / 2, + -Units.inchesToMeters(9.5) / 2, + -Units.inchesToMeters(9.5) / 2), + new Point3( + -Units.inchesToMeters(9.5) / 2, + Units.inchesToMeters(9.5) / 2, + -Units.inchesToMeters(9.5) / 2), + new Point3( + Units.inchesToMeters(9.5) / 2, + Units.inchesToMeters(9.5) / 2, + -Units.inchesToMeters(9.5) / 2), + new Point3( + Units.inchesToMeters(9.5) / 2, + -Units.inchesToMeters(9.5) / 2, + -Units.inchesToMeters(9.5) / 2)), + 0), + k200mmAprilTag( // Corners of the tag's inner black square (excluding white border) + List.of( + new Point3(Units.inchesToMeters(3.25), Units.inchesToMeters(3.25), 0), + new Point3(-Units.inchesToMeters(3.25), Units.inchesToMeters(3.25), 0), + new Point3(-Units.inchesToMeters(3.25), -Units.inchesToMeters(3.25), 0), + new Point3(Units.inchesToMeters(3.25), -Units.inchesToMeters(3.25), 0)), + Units.inchesToMeters(3.25 * 2)), + kAruco6in_16h5( // Corners of the tag's inner black square (excluding white border) + List.of( + new Point3(Units.inchesToMeters(3), Units.inchesToMeters(3), 0), + new Point3(Units.inchesToMeters(3), -Units.inchesToMeters(3), 0), + new Point3(-Units.inchesToMeters(3), -Units.inchesToMeters(3), 0), + new Point3(Units.inchesToMeters(3), -Units.inchesToMeters(3), 0)), + Units.inchesToMeters(3 * 2)), + k6in_16h5( // Corners of the tag's inner black square (excluding white border) + List.of( + new Point3(Units.inchesToMeters(3), Units.inchesToMeters(3), 0), + new Point3(-Units.inchesToMeters(3), Units.inchesToMeters(3), 0), + new Point3(-Units.inchesToMeters(3), -Units.inchesToMeters(3), 0), + new Point3(Units.inchesToMeters(3), -Units.inchesToMeters(3), 0)), + Units.inchesToMeters(3 * 2)); + + @JsonIgnore private MatOfPoint3f realWorldTargetCoordinates; + @JsonIgnore private final MatOfPoint3f visualizationBoxBottom = new MatOfPoint3f(); + @JsonIgnore private final MatOfPoint3f visualizationBoxTop = new MatOfPoint3f(); + + @JsonProperty("realWorldCoordinatesArray") + private List realWorldCoordinatesArray; + + @JsonProperty("boxHeight") + private double boxHeight; + + TargetModel() {} + + TargetModel(MatOfPoint3f realWorldTargetCoordinates, double boxHeight) { + this.realWorldTargetCoordinates = realWorldTargetCoordinates; + this.realWorldCoordinatesArray = realWorldTargetCoordinates.toList(); + this.boxHeight = boxHeight; + + var bottomList = realWorldTargetCoordinates.toList(); + var topList = new ArrayList(); + for (var c : bottomList) { + topList.add(new Point3(c.x, c.y, c.z + boxHeight)); + } + + this.visualizationBoxBottom.fromList(bottomList); + this.visualizationBoxTop.fromList(topList); } - this.visualizationBoxBottom.fromList(bottomList); - this.visualizationBoxTop.fromList(topList); - } - - @JsonCreator - TargetModel( - @JsonProperty(value = "realWorldCoordinatesArray") List points, - @JsonProperty(value = "boxHeight") double boxHeight) { - this(listToMat(points), boxHeight); - } - - public List getRealWorldCoordinatesArray() { - return this.realWorldCoordinatesArray; - } - - public double getBoxHeight() { - return boxHeight; - } - - public void setRealWorldCoordinatesArray(List realWorldCoordinatesArray) { - this.realWorldCoordinatesArray = realWorldCoordinatesArray; - } - - public void setBoxHeight(double boxHeight) { - this.boxHeight = boxHeight; - } - - private static MatOfPoint3f listToMat(List points) { - var mat = new MatOfPoint3f(); - mat.fromList(points); - return mat; - } - - public MatOfPoint3f getRealWorldTargetCoordinates() { - return realWorldTargetCoordinates; - } - - public MatOfPoint3f getVisualizationBoxBottom() { - return visualizationBoxBottom; - } - - public MatOfPoint3f getVisualizationBoxTop() { - return visualizationBoxTop; - } - - // public static TargetModel getCircleTarget(double Units.inchesToMeters(7)) { - // var corners = - // List.of( - // new Point3(-Units.inchesToMeters(7) / 2, -radius / 2, -radius / 2), - // new Point3(-Units.inchesToMeters(7) / 2, radius / 2, -radius / 2), - // new Point3(Units.inchesToMeters(7) / 2, radius / 2, -radius / 2), - // new Point3(Units.inchesToMeters(7) / 2, -radius / 2, -radius / 2)); - // return new TargetModel(corners, 0); - // } - - @Override - public void release() { - realWorldTargetCoordinates.release(); - visualizationBoxBottom.release(); - visualizationBoxTop.release(); - } + @JsonCreator + TargetModel( + @JsonProperty(value = "realWorldCoordinatesArray") List points, + @JsonProperty(value = "boxHeight") double boxHeight) { + this(listToMat(points), boxHeight); + } + + public List getRealWorldCoordinatesArray() { + return this.realWorldCoordinatesArray; + } + + public double getBoxHeight() { + return boxHeight; + } + + public void setRealWorldCoordinatesArray(List realWorldCoordinatesArray) { + this.realWorldCoordinatesArray = realWorldCoordinatesArray; + } + + public void setBoxHeight(double boxHeight) { + this.boxHeight = boxHeight; + } + + private static MatOfPoint3f listToMat(List points) { + var mat = new MatOfPoint3f(); + mat.fromList(points); + return mat; + } + + public MatOfPoint3f getRealWorldTargetCoordinates() { + return realWorldTargetCoordinates; + } + + public MatOfPoint3f getVisualizationBoxBottom() { + return visualizationBoxBottom; + } + + public MatOfPoint3f getVisualizationBoxTop() { + return visualizationBoxTop; + } + + // public static TargetModel getCircleTarget(double Units.inchesToMeters(7)) { + // var corners = + // List.of( + // new Point3(-Units.inchesToMeters(7) / 2, -radius / 2, -radius / 2), + // new Point3(-Units.inchesToMeters(7) / 2, radius / 2, -radius / 2), + // new Point3(Units.inchesToMeters(7) / 2, radius / 2, -radius / 2), + // new Point3(Units.inchesToMeters(7) / 2, -radius / 2, -radius / 2)); + // return new TargetModel(corners, 0); + // } + + @Override + public void release() { + realWorldTargetCoordinates.release(); + visualizationBoxBottom.release(); + visualizationBoxTop.release(); + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/TargetOffsetPointEdge.java b/photon-core/src/main/java/org/photonvision/vision/target/TargetOffsetPointEdge.java index 1c166ed136..eccf287ec5 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/TargetOffsetPointEdge.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/TargetOffsetPointEdge.java @@ -17,9 +17,9 @@ package org.photonvision.vision.target; public enum TargetOffsetPointEdge { - Center, - Top, - Bottom, - Left, - Right + Center, + Top, + Bottom, + Left, + Right } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/TargetOrientation.java b/photon-core/src/main/java/org/photonvision/vision/target/TargetOrientation.java index 4f88382487..61b19c7f89 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/TargetOrientation.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/TargetOrientation.java @@ -17,6 +17,6 @@ package org.photonvision.vision.target; public enum TargetOrientation { - Portrait, - Landscape + Portrait, + Landscape } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/TrackedTarget.java b/photon-core/src/main/java/org/photonvision/vision/target/TrackedTarget.java index db1411f57a..9be1e9162c 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/TrackedTarget.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/TrackedTarget.java @@ -43,456 +43,456 @@ import org.photonvision.vision.opencv.Releasable; public class TrackedTarget implements Releasable { - public final Contour m_mainContour; - public List m_subContours; // can be empty - - private MatOfPoint2f m_approximateBoundingPolygon; - - private List m_targetCorners = List.of(); - - private Point m_targetOffsetPoint; - private Point m_robotOffsetPoint; - - private double m_pitch; - private double m_yaw; - private double m_area; - private double m_skew; - - private Transform3d m_bestCameraToTarget3d = new Transform3d(); - private Transform3d m_altCameraToTarget3d = new Transform3d(); - - private CVShape m_shape; - - private int m_fiducialId = -1; - private double m_poseAmbiguity = -1; - - private Mat m_cameraRelativeTvec, m_cameraRelativeRvec; - - public TrackedTarget( - PotentialTarget origTarget, TargetCalculationParameters params, CVShape shape) { - this.m_mainContour = origTarget.m_mainContour; - this.m_subContours = origTarget.m_subContours; - this.m_shape = shape; - calculateValues(params); - } - - public TrackedTarget( - AprilTagDetection tagDetection, - AprilTagPoseEstimate tagPose, - TargetCalculationParameters params) { - m_targetOffsetPoint = new Point(tagDetection.getCenterX(), tagDetection.getCenterY()); - m_robotOffsetPoint = new Point(); - var yawPitch = - TargetCalculations.calculateYawPitch( - params.cameraCenterPoint.x, - tagDetection.getCenterX(), - params.horizontalFocalLength, - params.cameraCenterPoint.y, - tagDetection.getCenterY(), - params.verticalFocalLength); - m_yaw = yawPitch.getFirst(); - m_pitch = yawPitch.getSecond(); - var bestPose = new Transform3d(); - var altPose = new Transform3d(); - - if (tagPose != null) { - if (tagPose.error1 <= tagPose.error2) { - bestPose = tagPose.pose1; - altPose = tagPose.pose2; - } else { - bestPose = tagPose.pose2; - altPose = tagPose.pose1; - } - - bestPose = MathUtils.convertApriltagtoOpenCV(bestPose); - altPose = MathUtils.convertApriltagtoOpenCV(altPose); - - m_bestCameraToTarget3d = bestPose; - m_altCameraToTarget3d = altPose; - - m_poseAmbiguity = tagPose.getAmbiguity(); - - var tvec = new Mat(3, 1, CvType.CV_64FC1); - tvec.put( - 0, - 0, - new double[] { - bestPose.getTranslation().getX(), - bestPose.getTranslation().getY(), - bestPose.getTranslation().getZ() - }); - setCameraRelativeTvec(tvec); - - // Opencv expects a 3d vector with norm = angle and direction = axis - var rvec = new Mat(3, 1, CvType.CV_64FC1); - MathUtils.rotationToOpencvRvec(bestPose.getRotation(), rvec); - setCameraRelativeRvec(rvec); - } - - double[] corners = tagDetection.getCorners(); - Point[] cornerPoints = - new Point[] { - new Point(corners[0], corners[1]), - new Point(corners[2], corners[3]), - new Point(corners[4], corners[5]), - new Point(corners[6], corners[7]) - }; - m_targetCorners = List.of(cornerPoints); - MatOfPoint contourMat = new MatOfPoint(cornerPoints); - m_approximateBoundingPolygon = new MatOfPoint2f(cornerPoints); - m_mainContour = new Contour(contourMat); - m_area = m_mainContour.getArea() / params.imageArea * 100; - m_fiducialId = tagDetection.getId(); - m_shape = null; - - // TODO implement skew? or just yeet - m_skew = 0; - - var tvec = new Mat(3, 1, CvType.CV_64FC1); - tvec.put( - 0, - 0, - new double[] { - bestPose.getTranslation().getX(), - bestPose.getTranslation().getY(), - bestPose.getTranslation().getZ() - }); - setCameraRelativeTvec(tvec); - - // Opencv expects a 3d vector with norm = angle and direction = axis - var rvec = new Mat(3, 1, CvType.CV_64FC1); - MathUtils.rotationToOpencvRvec(bestPose.getRotation(), rvec); - setCameraRelativeRvec(rvec); - } - - public TrackedTarget(ArucoDetectionResult result, TargetCalculationParameters params) { - m_targetOffsetPoint = new Point(result.getCenterX(), result.getCenterY()); - m_robotOffsetPoint = new Point(); - var yawPitch = - TargetCalculations.calculateYawPitch( - params.cameraCenterPoint.x, - result.getCenterX(), - params.horizontalFocalLength, - params.cameraCenterPoint.y, - result.getCenterY(), - params.verticalFocalLength); - m_yaw = yawPitch.getFirst(); - m_pitch = yawPitch.getSecond(); - - double[] xCorners = result.getxCorners(); - double[] yCorners = result.getyCorners(); - - Point[] cornerPoints = - new Point[] { - new Point(xCorners[0], yCorners[0]), - new Point(xCorners[1], yCorners[1]), - new Point(xCorners[2], yCorners[2]), - new Point(xCorners[3], yCorners[3]) - }; - m_targetCorners = List.of(cornerPoints); - MatOfPoint contourMat = new MatOfPoint(cornerPoints); - m_approximateBoundingPolygon = new MatOfPoint2f(cornerPoints); - m_mainContour = new Contour(contourMat); - m_area = m_mainContour.getArea() / params.imageArea * 100; - m_fiducialId = result.getId(); - m_shape = null; - - // TODO implement skew? or just yeet - - var tvec = new Mat(3, 1, CvType.CV_64FC1); - tvec.put(0, 0, result.getTvec()); - setCameraRelativeTvec(tvec); - - var rvec = new Mat(3, 1, CvType.CV_64FC1); - rvec.put(0, 0, result.getRvec()); - setCameraRelativeRvec(rvec); - - { - Translation3d translation = - // new Translation3d(tVec.get(0, 0)[0], tVec.get(1, 0)[0], tVec.get(2, 0)[0]); - new Translation3d(result.getTvec()[0], result.getTvec()[1], result.getTvec()[2]); - var axisangle = - VecBuilder.fill(result.getRvec()[0], result.getRvec()[1], result.getRvec()[2]); - Rotation3d rotation = new Rotation3d(axisangle, axisangle.normF()); - - m_bestCameraToTarget3d = - MathUtils.convertOpenCVtoPhotonTransform(new Transform3d(translation, rotation)); - } - } - - public void setFiducialId(int id) { - m_fiducialId = id; - } - - public int getFiducialId() { - return m_fiducialId; - } - - public void setPoseAmbiguity(double ambiguity) { - m_poseAmbiguity = ambiguity; - } - - public double getPoseAmbiguity() { - return m_poseAmbiguity; - } - - /** - * Set the approximate bounding polygon. - * - * @param boundingPolygon List of points to copy. Not modified. - */ - public void setApproximateBoundingPolygon(MatOfPoint2f boundingPolygon) { - if (m_approximateBoundingPolygon == null) m_approximateBoundingPolygon = new MatOfPoint2f(); - boundingPolygon.copyTo(m_approximateBoundingPolygon); - } - - public Point getTargetOffsetPoint() { - return m_targetOffsetPoint; - } - - public Point getRobotOffsetPoint() { - return m_robotOffsetPoint; - } - - public double getPitch() { - return m_pitch; - } - - public double getYaw() { - return m_yaw; - } - - public double getSkew() { - return m_skew; - } - - public double getArea() { - return m_area; - } - - public RotatedRect getMinAreaRect() { - return m_mainContour.getMinAreaRect(); - } - - public MatOfPoint2f getApproximateBoundingPolygon() { - return m_approximateBoundingPolygon; - } - - public void calculateValues(TargetCalculationParameters params) { - // this MUST happen in this exact order! (TODO: document why) - m_targetOffsetPoint = - TargetCalculations.calculateTargetOffsetPoint( - params.isLandscape, params.targetOffsetPointEdge, getMinAreaRect()); - m_robotOffsetPoint = - TargetCalculations.calculateRobotOffsetPoint( - params.robotOffsetSinglePoint, - params.cameraCenterPoint, - params.dualOffsetValues, - params.robotOffsetPointMode); - - // order of this stuff doesnt matter though - var yawPitch = - TargetCalculations.calculateYawPitch( - m_robotOffsetPoint.x, - m_targetOffsetPoint.x, - params.horizontalFocalLength, - m_robotOffsetPoint.y, - m_targetOffsetPoint.y, - params.verticalFocalLength); - m_yaw = yawPitch.getFirst(); - m_pitch = yawPitch.getSecond(); - - m_area = m_mainContour.getMinAreaRect().size.area() / params.imageArea * 100; - - m_skew = TargetCalculations.calculateSkew(params.isLandscape, getMinAreaRect()); - } - - @Override - public void release() { - m_mainContour.release(); - - // TODO how can this check fail? - if (m_subContours != null) { - for (var sc : m_subContours) { - sc.release(); - } - } - - if (m_cameraRelativeTvec != null) m_cameraRelativeTvec.release(); - if (m_cameraRelativeRvec != null) m_cameraRelativeRvec.release(); - } - - public void setTargetCorners(List targetCorners) { - this.m_targetCorners = targetCorners; - } - - public List getTargetCorners() { - return m_targetCorners; - } - - public boolean hasSubContours() { - return !m_subContours.isEmpty(); - } - - public Transform3d getBestCameraToTarget3d() { - return m_bestCameraToTarget3d; - } - - public Transform3d getAltCameraToTarget3d() { - return m_altCameraToTarget3d; - } - - public void setBestCameraToTarget3d(Transform3d pose) { - this.m_bestCameraToTarget3d = pose; - } - - public void setAltCameraToTarget3d(Transform3d pose) { - this.m_altCameraToTarget3d = pose; - } - - public Mat getCameraRelativeTvec() { - return m_cameraRelativeTvec; - } - - public void setCameraRelativeTvec(Mat cameraRelativeTvec) { - if (this.m_cameraRelativeTvec == null) m_cameraRelativeTvec = new Mat(); - cameraRelativeTvec.copyTo(this.m_cameraRelativeTvec); - } - - public Mat getCameraRelativeRvec() { - return m_cameraRelativeRvec; - } - - public void setCameraRelativeRvec(Mat cameraRelativeRvec) { - if (this.m_cameraRelativeRvec == null) m_cameraRelativeRvec = new Mat(); - cameraRelativeRvec.copyTo(this.m_cameraRelativeRvec); - } - - public CVShape getShape() { - return m_shape; - } - - public void setShape(CVShape shape) { - this.m_shape = shape; - } - - public HashMap toHashMap() { - var ret = new HashMap(); - ret.put("pitch", getPitch()); - ret.put("yaw", getYaw()); - ret.put("skew", getSkew()); - ret.put("area", getArea()); - ret.put("ambiguity", getPoseAmbiguity()); - - var bestCameraToTarget3d = getBestCameraToTarget3d(); - if (bestCameraToTarget3d != null) { - ret.put("pose", SerializationUtils.transformToHashMap(bestCameraToTarget3d)); - } - ret.put("fiducialId", getFiducialId()); - return ret; - } - - public boolean isFiducial() { - return this.m_fiducialId >= 0; - } - - public static List simpleFromTrackedTargets(List targets) { - var ret = new ArrayList(); - for (var t : targets) { - var minAreaRectCorners = new ArrayList(); - var detectedCorners = new ArrayList(); - { - var points = new Point[4]; - t.getMinAreaRect().points(points); - for (int i = 0; i < 4; i++) { - minAreaRectCorners.add(new TargetCorner(points[i].x, points[i].y)); + public final Contour m_mainContour; + public List m_subContours; // can be empty + + private MatOfPoint2f m_approximateBoundingPolygon; + + private List m_targetCorners = List.of(); + + private Point m_targetOffsetPoint; + private Point m_robotOffsetPoint; + + private double m_pitch; + private double m_yaw; + private double m_area; + private double m_skew; + + private Transform3d m_bestCameraToTarget3d = new Transform3d(); + private Transform3d m_altCameraToTarget3d = new Transform3d(); + + private CVShape m_shape; + + private int m_fiducialId = -1; + private double m_poseAmbiguity = -1; + + private Mat m_cameraRelativeTvec, m_cameraRelativeRvec; + + public TrackedTarget( + PotentialTarget origTarget, TargetCalculationParameters params, CVShape shape) { + this.m_mainContour = origTarget.m_mainContour; + this.m_subContours = origTarget.m_subContours; + this.m_shape = shape; + calculateValues(params); + } + + public TrackedTarget( + AprilTagDetection tagDetection, + AprilTagPoseEstimate tagPose, + TargetCalculationParameters params) { + m_targetOffsetPoint = new Point(tagDetection.getCenterX(), tagDetection.getCenterY()); + m_robotOffsetPoint = new Point(); + var yawPitch = + TargetCalculations.calculateYawPitch( + params.cameraCenterPoint.x, + tagDetection.getCenterX(), + params.horizontalFocalLength, + params.cameraCenterPoint.y, + tagDetection.getCenterY(), + params.verticalFocalLength); + m_yaw = yawPitch.getFirst(); + m_pitch = yawPitch.getSecond(); + var bestPose = new Transform3d(); + var altPose = new Transform3d(); + + if (tagPose != null) { + if (tagPose.error1 <= tagPose.error2) { + bestPose = tagPose.pose1; + altPose = tagPose.pose2; + } else { + bestPose = tagPose.pose2; + altPose = tagPose.pose1; + } + + bestPose = MathUtils.convertApriltagtoOpenCV(bestPose); + altPose = MathUtils.convertApriltagtoOpenCV(altPose); + + m_bestCameraToTarget3d = bestPose; + m_altCameraToTarget3d = altPose; + + m_poseAmbiguity = tagPose.getAmbiguity(); + + var tvec = new Mat(3, 1, CvType.CV_64FC1); + tvec.put( + 0, + 0, + new double[] { + bestPose.getTranslation().getX(), + bestPose.getTranslation().getY(), + bestPose.getTranslation().getZ() + }); + setCameraRelativeTvec(tvec); + + // Opencv expects a 3d vector with norm = angle and direction = axis + var rvec = new Mat(3, 1, CvType.CV_64FC1); + MathUtils.rotationToOpencvRvec(bestPose.getRotation(), rvec); + setCameraRelativeRvec(rvec); } - } - { - var points = t.getTargetCorners(); - for (int i = 0; i < points.size(); i++) { - detectedCorners.add(new TargetCorner(points.get(i).x, points.get(i).y)); + + double[] corners = tagDetection.getCorners(); + Point[] cornerPoints = + new Point[] { + new Point(corners[0], corners[1]), + new Point(corners[2], corners[3]), + new Point(corners[4], corners[5]), + new Point(corners[6], corners[7]) + }; + m_targetCorners = List.of(cornerPoints); + MatOfPoint contourMat = new MatOfPoint(cornerPoints); + m_approximateBoundingPolygon = new MatOfPoint2f(cornerPoints); + m_mainContour = new Contour(contourMat); + m_area = m_mainContour.getArea() / params.imageArea * 100; + m_fiducialId = tagDetection.getId(); + m_shape = null; + + // TODO implement skew? or just yeet + m_skew = 0; + + var tvec = new Mat(3, 1, CvType.CV_64FC1); + tvec.put( + 0, + 0, + new double[] { + bestPose.getTranslation().getX(), + bestPose.getTranslation().getY(), + bestPose.getTranslation().getZ() + }); + setCameraRelativeTvec(tvec); + + // Opencv expects a 3d vector with norm = angle and direction = axis + var rvec = new Mat(3, 1, CvType.CV_64FC1); + MathUtils.rotationToOpencvRvec(bestPose.getRotation(), rvec); + setCameraRelativeRvec(rvec); + } + + public TrackedTarget(ArucoDetectionResult result, TargetCalculationParameters params) { + m_targetOffsetPoint = new Point(result.getCenterX(), result.getCenterY()); + m_robotOffsetPoint = new Point(); + var yawPitch = + TargetCalculations.calculateYawPitch( + params.cameraCenterPoint.x, + result.getCenterX(), + params.horizontalFocalLength, + params.cameraCenterPoint.y, + result.getCenterY(), + params.verticalFocalLength); + m_yaw = yawPitch.getFirst(); + m_pitch = yawPitch.getSecond(); + + double[] xCorners = result.getxCorners(); + double[] yCorners = result.getyCorners(); + + Point[] cornerPoints = + new Point[] { + new Point(xCorners[0], yCorners[0]), + new Point(xCorners[1], yCorners[1]), + new Point(xCorners[2], yCorners[2]), + new Point(xCorners[3], yCorners[3]) + }; + m_targetCorners = List.of(cornerPoints); + MatOfPoint contourMat = new MatOfPoint(cornerPoints); + m_approximateBoundingPolygon = new MatOfPoint2f(cornerPoints); + m_mainContour = new Contour(contourMat); + m_area = m_mainContour.getArea() / params.imageArea * 100; + m_fiducialId = result.getId(); + m_shape = null; + + // TODO implement skew? or just yeet + + var tvec = new Mat(3, 1, CvType.CV_64FC1); + tvec.put(0, 0, result.getTvec()); + setCameraRelativeTvec(tvec); + + var rvec = new Mat(3, 1, CvType.CV_64FC1); + rvec.put(0, 0, result.getRvec()); + setCameraRelativeRvec(rvec); + + { + Translation3d translation = + // new Translation3d(tVec.get(0, 0)[0], tVec.get(1, 0)[0], tVec.get(2, 0)[0]); + new Translation3d(result.getTvec()[0], result.getTvec()[1], result.getTvec()[2]); + var axisangle = + VecBuilder.fill(result.getRvec()[0], result.getRvec()[1], result.getRvec()[2]); + Rotation3d rotation = new Rotation3d(axisangle, axisangle.normF()); + + m_bestCameraToTarget3d = + MathUtils.convertOpenCVtoPhotonTransform(new Transform3d(translation, rotation)); } - } - - ret.add( - new PhotonTrackedTarget( - t.getYaw(), - t.getPitch(), - t.getArea(), - t.getSkew(), - t.getFiducialId(), - t.getBestCameraToTarget3d(), - t.getAltCameraToTarget3d(), - t.getPoseAmbiguity(), - minAreaRectCorners, - detectedCorners)); - } - return ret; - } - - public static class TargetCalculationParameters { - // TargetOffset calculation values - final boolean isLandscape; - final TargetOffsetPointEdge targetOffsetPointEdge; - - // RobotOffset calculation values - final RobotOffsetPointMode robotOffsetPointMode; - final Point robotOffsetSinglePoint; - final DualOffsetValues dualOffsetValues; - - // center point of image - final Point cameraCenterPoint; - - // yaw calculation values - final double horizontalFocalLength; - - // pitch calculation values - final double verticalFocalLength; - - // area calculation values - final double imageArea; - - public TargetCalculationParameters( - boolean isLandscape, - TargetOffsetPointEdge targetOffsetPointEdge, - RobotOffsetPointMode robotOffsetPointMode, - Point robotOffsetSinglePoint, - DualOffsetValues dualOffsetValues, - Point cameraCenterPoint, - double horizontalFocalLength, - double verticalFocalLength, - double imageArea) { - - this.isLandscape = isLandscape; - this.targetOffsetPointEdge = targetOffsetPointEdge; - this.robotOffsetPointMode = robotOffsetPointMode; - this.robotOffsetSinglePoint = robotOffsetSinglePoint; - this.dualOffsetValues = dualOffsetValues; - this.cameraCenterPoint = cameraCenterPoint; - this.horizontalFocalLength = horizontalFocalLength; - this.verticalFocalLength = verticalFocalLength; - this.imageArea = imageArea; - } - - public TargetCalculationParameters( - boolean isLandscape, - TargetOffsetPointEdge targetOffsetPointEdge, - RobotOffsetPointMode robotOffsetPointMode, - Point robotOffsetSinglePoint, - DualOffsetValues dualOffsetValues, - FrameStaticProperties frameStaticProperties) { - - this.isLandscape = isLandscape; - this.targetOffsetPointEdge = targetOffsetPointEdge; - this.robotOffsetPointMode = robotOffsetPointMode; - this.robotOffsetSinglePoint = robotOffsetSinglePoint; - this.dualOffsetValues = dualOffsetValues; - - this.cameraCenterPoint = frameStaticProperties.centerPoint; - this.horizontalFocalLength = frameStaticProperties.horizontalFocalLength; - this.verticalFocalLength = frameStaticProperties.verticalFocalLength; - this.imageArea = frameStaticProperties.imageArea; - } - } + } + + public void setFiducialId(int id) { + m_fiducialId = id; + } + + public int getFiducialId() { + return m_fiducialId; + } + + public void setPoseAmbiguity(double ambiguity) { + m_poseAmbiguity = ambiguity; + } + + public double getPoseAmbiguity() { + return m_poseAmbiguity; + } + + /** + * Set the approximate bounding polygon. + * + * @param boundingPolygon List of points to copy. Not modified. + */ + public void setApproximateBoundingPolygon(MatOfPoint2f boundingPolygon) { + if (m_approximateBoundingPolygon == null) m_approximateBoundingPolygon = new MatOfPoint2f(); + boundingPolygon.copyTo(m_approximateBoundingPolygon); + } + + public Point getTargetOffsetPoint() { + return m_targetOffsetPoint; + } + + public Point getRobotOffsetPoint() { + return m_robotOffsetPoint; + } + + public double getPitch() { + return m_pitch; + } + + public double getYaw() { + return m_yaw; + } + + public double getSkew() { + return m_skew; + } + + public double getArea() { + return m_area; + } + + public RotatedRect getMinAreaRect() { + return m_mainContour.getMinAreaRect(); + } + + public MatOfPoint2f getApproximateBoundingPolygon() { + return m_approximateBoundingPolygon; + } + + public void calculateValues(TargetCalculationParameters params) { + // this MUST happen in this exact order! (TODO: document why) + m_targetOffsetPoint = + TargetCalculations.calculateTargetOffsetPoint( + params.isLandscape, params.targetOffsetPointEdge, getMinAreaRect()); + m_robotOffsetPoint = + TargetCalculations.calculateRobotOffsetPoint( + params.robotOffsetSinglePoint, + params.cameraCenterPoint, + params.dualOffsetValues, + params.robotOffsetPointMode); + + // order of this stuff doesnt matter though + var yawPitch = + TargetCalculations.calculateYawPitch( + m_robotOffsetPoint.x, + m_targetOffsetPoint.x, + params.horizontalFocalLength, + m_robotOffsetPoint.y, + m_targetOffsetPoint.y, + params.verticalFocalLength); + m_yaw = yawPitch.getFirst(); + m_pitch = yawPitch.getSecond(); + + m_area = m_mainContour.getMinAreaRect().size.area() / params.imageArea * 100; + + m_skew = TargetCalculations.calculateSkew(params.isLandscape, getMinAreaRect()); + } + + @Override + public void release() { + m_mainContour.release(); + + // TODO how can this check fail? + if (m_subContours != null) { + for (var sc : m_subContours) { + sc.release(); + } + } + + if (m_cameraRelativeTvec != null) m_cameraRelativeTvec.release(); + if (m_cameraRelativeRvec != null) m_cameraRelativeRvec.release(); + } + + public void setTargetCorners(List targetCorners) { + this.m_targetCorners = targetCorners; + } + + public List getTargetCorners() { + return m_targetCorners; + } + + public boolean hasSubContours() { + return !m_subContours.isEmpty(); + } + + public Transform3d getBestCameraToTarget3d() { + return m_bestCameraToTarget3d; + } + + public Transform3d getAltCameraToTarget3d() { + return m_altCameraToTarget3d; + } + + public void setBestCameraToTarget3d(Transform3d pose) { + this.m_bestCameraToTarget3d = pose; + } + + public void setAltCameraToTarget3d(Transform3d pose) { + this.m_altCameraToTarget3d = pose; + } + + public Mat getCameraRelativeTvec() { + return m_cameraRelativeTvec; + } + + public void setCameraRelativeTvec(Mat cameraRelativeTvec) { + if (this.m_cameraRelativeTvec == null) m_cameraRelativeTvec = new Mat(); + cameraRelativeTvec.copyTo(this.m_cameraRelativeTvec); + } + + public Mat getCameraRelativeRvec() { + return m_cameraRelativeRvec; + } + + public void setCameraRelativeRvec(Mat cameraRelativeRvec) { + if (this.m_cameraRelativeRvec == null) m_cameraRelativeRvec = new Mat(); + cameraRelativeRvec.copyTo(this.m_cameraRelativeRvec); + } + + public CVShape getShape() { + return m_shape; + } + + public void setShape(CVShape shape) { + this.m_shape = shape; + } + + public HashMap toHashMap() { + var ret = new HashMap(); + ret.put("pitch", getPitch()); + ret.put("yaw", getYaw()); + ret.put("skew", getSkew()); + ret.put("area", getArea()); + ret.put("ambiguity", getPoseAmbiguity()); + + var bestCameraToTarget3d = getBestCameraToTarget3d(); + if (bestCameraToTarget3d != null) { + ret.put("pose", SerializationUtils.transformToHashMap(bestCameraToTarget3d)); + } + ret.put("fiducialId", getFiducialId()); + return ret; + } + + public boolean isFiducial() { + return this.m_fiducialId >= 0; + } + + public static List simpleFromTrackedTargets(List targets) { + var ret = new ArrayList(); + for (var t : targets) { + var minAreaRectCorners = new ArrayList(); + var detectedCorners = new ArrayList(); + { + var points = new Point[4]; + t.getMinAreaRect().points(points); + for (int i = 0; i < 4; i++) { + minAreaRectCorners.add(new TargetCorner(points[i].x, points[i].y)); + } + } + { + var points = t.getTargetCorners(); + for (int i = 0; i < points.size(); i++) { + detectedCorners.add(new TargetCorner(points.get(i).x, points.get(i).y)); + } + } + + ret.add( + new PhotonTrackedTarget( + t.getYaw(), + t.getPitch(), + t.getArea(), + t.getSkew(), + t.getFiducialId(), + t.getBestCameraToTarget3d(), + t.getAltCameraToTarget3d(), + t.getPoseAmbiguity(), + minAreaRectCorners, + detectedCorners)); + } + return ret; + } + + public static class TargetCalculationParameters { + // TargetOffset calculation values + final boolean isLandscape; + final TargetOffsetPointEdge targetOffsetPointEdge; + + // RobotOffset calculation values + final RobotOffsetPointMode robotOffsetPointMode; + final Point robotOffsetSinglePoint; + final DualOffsetValues dualOffsetValues; + + // center point of image + final Point cameraCenterPoint; + + // yaw calculation values + final double horizontalFocalLength; + + // pitch calculation values + final double verticalFocalLength; + + // area calculation values + final double imageArea; + + public TargetCalculationParameters( + boolean isLandscape, + TargetOffsetPointEdge targetOffsetPointEdge, + RobotOffsetPointMode robotOffsetPointMode, + Point robotOffsetSinglePoint, + DualOffsetValues dualOffsetValues, + Point cameraCenterPoint, + double horizontalFocalLength, + double verticalFocalLength, + double imageArea) { + + this.isLandscape = isLandscape; + this.targetOffsetPointEdge = targetOffsetPointEdge; + this.robotOffsetPointMode = robotOffsetPointMode; + this.robotOffsetSinglePoint = robotOffsetSinglePoint; + this.dualOffsetValues = dualOffsetValues; + this.cameraCenterPoint = cameraCenterPoint; + this.horizontalFocalLength = horizontalFocalLength; + this.verticalFocalLength = verticalFocalLength; + this.imageArea = imageArea; + } + + public TargetCalculationParameters( + boolean isLandscape, + TargetOffsetPointEdge targetOffsetPointEdge, + RobotOffsetPointMode robotOffsetPointMode, + Point robotOffsetSinglePoint, + DualOffsetValues dualOffsetValues, + FrameStaticProperties frameStaticProperties) { + + this.isLandscape = isLandscape; + this.targetOffsetPointEdge = targetOffsetPointEdge; + this.robotOffsetPointMode = robotOffsetPointMode; + this.robotOffsetSinglePoint = robotOffsetSinglePoint; + this.dualOffsetValues = dualOffsetValues; + + this.cameraCenterPoint = frameStaticProperties.centerPoint; + this.horizontalFocalLength = frameStaticProperties.horizontalFocalLength; + this.verticalFocalLength = frameStaticProperties.verticalFocalLength; + this.imageArea = frameStaticProperties.imageArea; + } + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStream.java b/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStream.java index c5802c500d..dae270e9f6 100644 --- a/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStream.java +++ b/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStream.java @@ -30,88 +30,88 @@ import org.photonvision.vision.opencv.CVMat; public class SocketVideoStream implements Consumer { - int portID = 0; // Align with cscore's port for unique identification of stream - MatOfByte jpegBytes = null; + int portID = 0; // Align with cscore's port for unique identification of stream + MatOfByte jpegBytes = null; - // Gets set to true when another class reads out valid jpeg bytes at least once - // Set back to false when another frame is freshly converted - // Should eliminate synchronization issues of differing rates of putting frames in - // and taking them back out - boolean frameWasConsumed = false; + // Gets set to true when another class reads out valid jpeg bytes at least once + // Set back to false when another frame is freshly converted + // Should eliminate synchronization issues of differing rates of putting frames in + // and taking them back out + boolean frameWasConsumed = false; - // Synclock around manipulating the jpeg bytes from multiple threads - Lock jpegBytesLock = new ReentrantLock(); - private int userCount = 0; + // Synclock around manipulating the jpeg bytes from multiple threads + Lock jpegBytesLock = new ReentrantLock(); + private int userCount = 0; - // FPS-limited MJPEG sender - private final double FPS_MAX = 30.0; - private final long minFramePeriodNanos = Math.round(1000000000.0 / FPS_MAX); - private long nextFrameSendTime = MathUtils.wpiNanoTime() + minFramePeriodNanos; - MJPGFrameConsumer oldSchoolServer; + // FPS-limited MJPEG sender + private final double FPS_MAX = 30.0; + private final long minFramePeriodNanos = Math.round(1000000000.0 / FPS_MAX); + private long nextFrameSendTime = MathUtils.wpiNanoTime() + minFramePeriodNanos; + MJPGFrameConsumer oldSchoolServer; - public SocketVideoStream(int portID) { - this.portID = portID; - oldSchoolServer = - new MJPGFrameConsumer( - CameraServerJNI.getHostname() + "_Port_" + portID + "_MJPEG_Server", portID); - } + public SocketVideoStream(int portID) { + this.portID = portID; + oldSchoolServer = + new MJPGFrameConsumer( + CameraServerJNI.getHostname() + "_Port_" + portID + "_MJPEG_Server", portID); + } - @Override - public void accept(CVMat image) { - if (userCount > 0) { - if (jpegBytesLock - .tryLock()) { // we assume frames are coming in frequently. Just skip this frame if we're - // locked doing something else. - try { - // Does a single-shot frame receive and convert to JPEG for efficiency - // Will not capture/convert again until convertNextFrame() is called - if (image != null && !image.getMat().empty() && jpegBytes == null) { - frameWasConsumed = false; - jpegBytes = new MatOfByte(); - Imgcodecs.imencode( - ".jpg", - image.getMat(), - jpegBytes, - new MatOfInt(Imgcodecs.IMWRITE_JPEG_QUALITY, 75)); - } - } finally { - jpegBytesLock.unlock(); + @Override + public void accept(CVMat image) { + if (userCount > 0) { + if (jpegBytesLock + .tryLock()) { // we assume frames are coming in frequently. Just skip this frame if we're + // locked doing something else. + try { + // Does a single-shot frame receive and convert to JPEG for efficiency + // Will not capture/convert again until convertNextFrame() is called + if (image != null && !image.getMat().empty() && jpegBytes == null) { + frameWasConsumed = false; + jpegBytes = new MatOfByte(); + Imgcodecs.imencode( + ".jpg", + image.getMat(), + jpegBytes, + new MatOfInt(Imgcodecs.IMWRITE_JPEG_QUALITY, 75)); + } + } finally { + jpegBytesLock.unlock(); + } + } } - } - } - // Send the frame in an FPS-limited fashion - var now = MathUtils.wpiNanoTime(); - if (now > nextFrameSendTime) { - oldSchoolServer.accept(image); - nextFrameSendTime = now + minFramePeriodNanos; + // Send the frame in an FPS-limited fashion + var now = MathUtils.wpiNanoTime(); + if (now > nextFrameSendTime) { + oldSchoolServer.accept(image); + nextFrameSendTime = now + minFramePeriodNanos; + } } - } - public ByteBuffer getJPEGByteBuffer() { - ByteBuffer sendStr = null; - jpegBytesLock.lock(); - if (jpegBytes != null) { - sendStr = ByteBuffer.wrap(jpegBytes.toArray()); + public ByteBuffer getJPEGByteBuffer() { + ByteBuffer sendStr = null; + jpegBytesLock.lock(); + if (jpegBytes != null) { + sendStr = ByteBuffer.wrap(jpegBytes.toArray()); + } + jpegBytesLock.unlock(); + return sendStr; } - jpegBytesLock.unlock(); - return sendStr; - } - public void convertNextFrame() { - jpegBytesLock.lock(); - if (jpegBytes != null) { - jpegBytes.release(); - jpegBytes = null; + public void convertNextFrame() { + jpegBytesLock.lock(); + if (jpegBytes != null) { + jpegBytes.release(); + jpegBytes = null; + } + jpegBytesLock.unlock(); } - jpegBytesLock.unlock(); - } - public void addUser() { - userCount++; - } + public void addUser() { + userCount++; + } - public void removeUser() { - userCount--; - } + public void removeUser() { + userCount--; + } } diff --git a/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStreamManager.java b/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStreamManager.java index 9c4b71f791..ceb84feab6 100644 --- a/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStreamManager.java +++ b/photon-core/src/main/java/org/photonvision/vision/videoStream/SocketVideoStreamManager.java @@ -25,77 +25,77 @@ import org.photonvision.common.logging.Logger; public class SocketVideoStreamManager { - private static final int NO_STREAM_PORT = -1; + private static final int NO_STREAM_PORT = -1; - private final Logger logger = new Logger(SocketVideoStreamManager.class, LogGroup.Camera); + private final Logger logger = new Logger(SocketVideoStreamManager.class, LogGroup.Camera); - private final Map streams = new Hashtable<>(); - private final Map userSubscriptions = new Hashtable<>(); + private final Map streams = new Hashtable<>(); + private final Map userSubscriptions = new Hashtable<>(); - private static class ThreadSafeSingleton { - private static final SocketVideoStreamManager INSTANCE = new SocketVideoStreamManager(); - } + private static class ThreadSafeSingleton { + private static final SocketVideoStreamManager INSTANCE = new SocketVideoStreamManager(); + } - public static SocketVideoStreamManager getInstance() { - return ThreadSafeSingleton.INSTANCE; - } + public static SocketVideoStreamManager getInstance() { + return ThreadSafeSingleton.INSTANCE; + } - private SocketVideoStreamManager() {} + private SocketVideoStreamManager() {} - // Register a new available camera stream - public void addStream(SocketVideoStream newStream) { - streams.put(newStream.portID, newStream); - logger.debug("Added new stream for port " + newStream.portID); - } + // Register a new available camera stream + public void addStream(SocketVideoStream newStream) { + streams.put(newStream.portID, newStream); + logger.debug("Added new stream for port " + newStream.portID); + } - // Remove a previously-added camera stream, and unsubscribe all users - public void removeStream(SocketVideoStream oldStream) { - streams.remove(oldStream.portID); - logger.debug("Removed stream for port " + oldStream.portID); - } + // Remove a previously-added camera stream, and unsubscribe all users + public void removeStream(SocketVideoStream oldStream) { + streams.remove(oldStream.portID); + logger.debug("Removed stream for port " + oldStream.portID); + } - // Indicate a user would like to subscribe to a camera stream and get frames from it periodically - public void addSubscription(WsContext user, int streamPortID) { - var stream = streams.get(streamPortID); - if (stream != null) { - userSubscriptions.put(user, streamPortID); - stream.addUser(); - } else { - logger.error("User attempted to subscribe to non-existent port " + streamPortID); + // Indicate a user would like to subscribe to a camera stream and get frames from it periodically + public void addSubscription(WsContext user, int streamPortID) { + var stream = streams.get(streamPortID); + if (stream != null) { + userSubscriptions.put(user, streamPortID); + stream.addUser(); + } else { + logger.error("User attempted to subscribe to non-existent port " + streamPortID); + } } - } - // Indicate a user would like to stop receiving one camera stream - public void removeSubscription(WsContext user) { - var port = userSubscriptions.get(user); - if (port != null && port != NO_STREAM_PORT) { - var stream = streams.get(port); - userSubscriptions.put(user, NO_STREAM_PORT); - if (stream != null) { - stream.removeUser(); - } - } else { - logger.error( - "User attempted to unsubscribe, but had not yet previously subscribed successfully."); + // Indicate a user would like to stop receiving one camera stream + public void removeSubscription(WsContext user) { + var port = userSubscriptions.get(user); + if (port != null && port != NO_STREAM_PORT) { + var stream = streams.get(port); + userSubscriptions.put(user, NO_STREAM_PORT); + if (stream != null) { + stream.removeUser(); + } + } else { + logger.error( + "User attempted to unsubscribe, but had not yet previously subscribed successfully."); + } } - } - // For a given user, return the jpeg bytes (or null) for the most recent frame - public ByteBuffer getSendFrame(WsContext user) { - var port = userSubscriptions.get(user); - if (port != null && port != NO_STREAM_PORT) { - var stream = streams.get(port); - return stream.getJPEGByteBuffer(); - } else { - return null; + // For a given user, return the jpeg bytes (or null) for the most recent frame + public ByteBuffer getSendFrame(WsContext user) { + var port = userSubscriptions.get(user); + if (port != null && port != NO_STREAM_PORT) { + var stream = streams.get(port); + return stream.getJPEGByteBuffer(); + } else { + return null; + } } - } - // Causes all streams to "re-trigger" and receive and convert their next mjpeg frame - // Only invoke this after all returned jpeg Strings have been used. - public void allStreamConvertNextFrame() { - for (SocketVideoStream stream : streams.values()) { - stream.convertNextFrame(); + // Causes all streams to "re-trigger" and receive and convert their next mjpeg frame + // Only invoke this after all returned jpeg Strings have been used. + public void allStreamConvertNextFrame() { + for (SocketVideoStream stream : streams.values()) { + stream.convertNextFrame(); + } } - } } diff --git a/photon-core/src/test/java/org/photonvision/common/BenchmarkTest.java b/photon-core/src/test/java/org/photonvision/common/BenchmarkTest.java index 37822c4524..bbf83c6e6f 100644 --- a/photon-core/src/test/java/org/photonvision/common/BenchmarkTest.java +++ b/photon-core/src/test/java/org/photonvision/common/BenchmarkTest.java @@ -38,160 +38,160 @@ /** Various tests that check performance on long-running tasks (i.e. a pipeline) */ public class BenchmarkTest { - @BeforeAll - public static void init() { - TestUtils.loadLibraries(); - } - - @Test - @Order(1) - public void Reflective240pBenchmark() { - var pipeline = new ReflectivePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoSideStraightDark72in, false), - TestUtils.WPI2019Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - @Test - @Order(1) - public void Reflective480pBenchmark() { - var pipeline = new ReflectivePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(200, 255); - pipeline.getSettings().hsvValue.set(200, 255); - pipeline.getSettings().outputShouldDraw = true; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center, false), - TestUtils.WPI2020Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - @Test - @Order(3) - public void Reflective720pBenchmark() { - var pipeline = new ReflectivePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(200, 255); - pipeline.getSettings().hsvValue.set(200, 255); - pipeline.getSettings().outputShouldDraw = true; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center_720p, false), - TestUtils.WPI2020Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - @Test - @Order(4) - public void Reflective1920x1440Benchmark() { - var pipeline = new ReflectivePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - private static

void benchmarkPipeline( - FrameProvider frameProvider, P pipeline, int secondsToRun) { - CVMat.enablePrint(false); - // warmup for 5 loops. - System.out.println("Warming up for 5 loops..."); - for (int i = 0; i < 5; i++) { - pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + @BeforeAll + public static void init() { + TestUtils.loadLibraries(); } - final List processingTimes = new ArrayList<>(); - final List latencyTimes = new ArrayList<>(); - - var frameProps = frameProvider.get().frameStaticProperties; - - // begin benchmark - System.out.println( - "Beginning " - + secondsToRun - + " second benchmark at resolution " - + frameProps.imageWidth - + "x" - + frameProps.imageHeight); - var benchmarkStartMillis = System.currentTimeMillis(); - do { - CVPipelineResult pipelineResult = - pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - pipelineResult.release(); - processingTimes.add(pipelineResult.getProcessingMillis()); - latencyTimes.add(pipelineResult.getLatencyMillis()); - } while (System.currentTimeMillis() - benchmarkStartMillis < secondsToRun * 1000.0); - System.out.println("Benchmark complete."); - - var processingMin = Collections.min(processingTimes); - var processingMean = NumberListUtils.mean(processingTimes); - var processingMax = Collections.max(processingTimes); - - var latencyMin = Collections.min(latencyTimes); - var latencyMean = NumberListUtils.mean(latencyTimes); - var latencyMax = Collections.max(latencyTimes); - - String processingResult = - "Processing times - " - + "Min: " - + MathUtils.roundTo(processingMin, 3) - + "ms (" - + MathUtils.roundTo(1000 / processingMin, 3) - + " FPS), " - + "Mean: " - + MathUtils.roundTo(processingMean, 3) - + "ms (" - + MathUtils.roundTo(1000 / processingMean, 3) - + " FPS), " - + "Max: " - + MathUtils.roundTo(processingMax, 3) - + "ms (" - + MathUtils.roundTo(1000 / processingMax, 3) - + " FPS)"; - System.out.println(processingResult); - String latencyResult = - "Latency times - " - + "Min: " - + MathUtils.roundTo(latencyMin, 3) - + "ms (" - + MathUtils.roundTo(1000 / latencyMin, 3) - + " FPS), " - + "Mean: " - + MathUtils.roundTo(latencyMean, 3) - + "ms (" - + MathUtils.roundTo(1000 / latencyMean, 3) - + " FPS), " - + "Max: " - + MathUtils.roundTo(latencyMax, 3) - + "ms (" - + MathUtils.roundTo(1000 / latencyMax, 3) - + " FPS)"; - System.out.println(latencyResult); - } + @Test + @Order(1) + public void Reflective240pBenchmark() { + var pipeline = new ReflectivePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoSideStraightDark72in, false), + TestUtils.WPI2019Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + @Test + @Order(1) + public void Reflective480pBenchmark() { + var pipeline = new ReflectivePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(200, 255); + pipeline.getSettings().hsvValue.set(200, 255); + pipeline.getSettings().outputShouldDraw = true; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center, false), + TestUtils.WPI2020Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + @Test + @Order(3) + public void Reflective720pBenchmark() { + var pipeline = new ReflectivePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(200, 255); + pipeline.getSettings().hsvValue.set(200, 255); + pipeline.getSettings().outputShouldDraw = true; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center_720p, false), + TestUtils.WPI2020Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + @Test + @Order(4) + public void Reflective1920x1440Benchmark() { + var pipeline = new ReflectivePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + private static

void benchmarkPipeline( + FrameProvider frameProvider, P pipeline, int secondsToRun) { + CVMat.enablePrint(false); + // warmup for 5 loops. + System.out.println("Warming up for 5 loops..."); + for (int i = 0; i < 5; i++) { + pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + } + + final List processingTimes = new ArrayList<>(); + final List latencyTimes = new ArrayList<>(); + + var frameProps = frameProvider.get().frameStaticProperties; + + // begin benchmark + System.out.println( + "Beginning " + + secondsToRun + + " second benchmark at resolution " + + frameProps.imageWidth + + "x" + + frameProps.imageHeight); + var benchmarkStartMillis = System.currentTimeMillis(); + do { + CVPipelineResult pipelineResult = + pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + pipelineResult.release(); + processingTimes.add(pipelineResult.getProcessingMillis()); + latencyTimes.add(pipelineResult.getLatencyMillis()); + } while (System.currentTimeMillis() - benchmarkStartMillis < secondsToRun * 1000.0); + System.out.println("Benchmark complete."); + + var processingMin = Collections.min(processingTimes); + var processingMean = NumberListUtils.mean(processingTimes); + var processingMax = Collections.max(processingTimes); + + var latencyMin = Collections.min(latencyTimes); + var latencyMean = NumberListUtils.mean(latencyTimes); + var latencyMax = Collections.max(latencyTimes); + + String processingResult = + "Processing times - " + + "Min: " + + MathUtils.roundTo(processingMin, 3) + + "ms (" + + MathUtils.roundTo(1000 / processingMin, 3) + + " FPS), " + + "Mean: " + + MathUtils.roundTo(processingMean, 3) + + "ms (" + + MathUtils.roundTo(1000 / processingMean, 3) + + " FPS), " + + "Max: " + + MathUtils.roundTo(processingMax, 3) + + "ms (" + + MathUtils.roundTo(1000 / processingMax, 3) + + " FPS)"; + System.out.println(processingResult); + String latencyResult = + "Latency times - " + + "Min: " + + MathUtils.roundTo(latencyMin, 3) + + "ms (" + + MathUtils.roundTo(1000 / latencyMin, 3) + + " FPS), " + + "Mean: " + + MathUtils.roundTo(latencyMean, 3) + + "ms (" + + MathUtils.roundTo(1000 / latencyMean, 3) + + " FPS), " + + "Max: " + + MathUtils.roundTo(latencyMax, 3) + + "ms (" + + MathUtils.roundTo(1000 / latencyMax, 3) + + " FPS)"; + System.out.println(latencyResult); + } } diff --git a/photon-core/src/test/java/org/photonvision/common/ShapeBenchmarkTest.java b/photon-core/src/test/java/org/photonvision/common/ShapeBenchmarkTest.java index 988a9d1f90..6ce4781112 100644 --- a/photon-core/src/test/java/org/photonvision/common/ShapeBenchmarkTest.java +++ b/photon-core/src/test/java/org/photonvision/common/ShapeBenchmarkTest.java @@ -54,173 +54,173 @@ /** Various tests that check performance on long-running tasks (i.e. a pipeline) */ public class ShapeBenchmarkTest { - @BeforeAll - public static void init() { - TestUtils.loadLibraries(); - } - - @Test - public void Shape240pBenchmark() { - var pipeline = new ColoredShapePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - pipeline.getSettings().contourShape = ContourShape.Custom; - pipeline.getSettings().circleDetectThreshold = 10; - pipeline.getSettings().accuracyPercentage = 30.0; - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoSideStraightDark72in, false), - TestUtils.WPI2019Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - @Test - public void Shape480pBenchmark() { - var pipeline = new ColoredShapePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - pipeline.getSettings().contourShape = ContourShape.Custom; - pipeline.getSettings().circleDetectThreshold = 10; - pipeline.getSettings().accuracyPercentage = 30.0; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center, false), - TestUtils.WPI2020Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - @Test - public void Shape720pBenchmark() { - var pipeline = new ColoredShapePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - pipeline.getSettings().contourShape = ContourShape.Custom; - pipeline.getSettings().circleDetectThreshold = 10; - pipeline.getSettings().accuracyPercentage = 30.0; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center_720p, false), - TestUtils.WPI2020Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - @Test - public void Shape1920x1440Benchmark() { - var pipeline = new ColoredShapePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - pipeline.getSettings().contourShape = ContourShape.Custom; - pipeline.getSettings().circleDetectThreshold = 10; - pipeline.getSettings().accuracyPercentage = 30.0; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - - benchmarkPipeline(frameProvider, pipeline, 5); - } - - private static

void benchmarkPipeline( - FrameProvider frameProvider, P pipeline, int secondsToRun) { - CVMat.enablePrint(false); - // warmup for 5 loops. - System.out.println("Warming up for 5 loops..."); - for (int i = 0; i < 5; i++) { - pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + @BeforeAll + public static void init() { + TestUtils.loadLibraries(); } - final List processingTimes = new ArrayList<>(); - final List latencyTimes = new ArrayList<>(); - - var frameProps = frameProvider.get().frameStaticProperties; - - // begin benchmark - System.out.println( - "Beginning " - + secondsToRun - + " second benchmark at resolution " - + frameProps.imageWidth - + "x" - + frameProps.imageHeight); - var benchmarkStartMillis = System.currentTimeMillis(); - do { - CVPipelineResult pipelineResult = - pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - pipelineResult.release(); - processingTimes.add(pipelineResult.getProcessingMillis()); - latencyTimes.add(pipelineResult.getLatencyMillis()); - } while (System.currentTimeMillis() - benchmarkStartMillis < secondsToRun * 1000L); - System.out.println("Benchmark complete."); - - var processingMin = Collections.min(processingTimes); - var processingMean = NumberListUtils.mean(processingTimes); - var processingMax = Collections.max(processingTimes); - - var latencyMin = Collections.min(latencyTimes); - var latencyMean = NumberListUtils.mean(latencyTimes); - var latencyMax = Collections.max(latencyTimes); - - String processingResult = - "Processing times - " - + "Min: " - + MathUtils.roundTo(processingMin, 3) - + "ms (" - + MathUtils.roundTo(1000 / processingMin, 3) - + " FPS), " - + "Mean: " - + MathUtils.roundTo(processingMean, 3) - + "ms (" - + MathUtils.roundTo(1000 / processingMean, 3) - + " FPS), " - + "Max: " - + MathUtils.roundTo(processingMax, 3) - + "ms (" - + MathUtils.roundTo(1000 / processingMax, 3) - + " FPS)"; - System.out.println(processingResult); - String latencyResult = - "Latency times - " - + "Min: " - + MathUtils.roundTo(latencyMin, 3) - + "ms (" - + MathUtils.roundTo(1000 / latencyMin, 3) - + " FPS), " - + "Mean: " - + MathUtils.roundTo(latencyMean, 3) - + "ms (" - + MathUtils.roundTo(1000 / latencyMean, 3) - + " FPS), " - + "Max: " - + MathUtils.roundTo(latencyMax, 3) - + "ms (" - + MathUtils.roundTo(1000 / latencyMax, 3) - + " FPS)"; - System.out.println(latencyResult); - } + @Test + public void Shape240pBenchmark() { + var pipeline = new ColoredShapePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + pipeline.getSettings().contourShape = ContourShape.Custom; + pipeline.getSettings().circleDetectThreshold = 10; + pipeline.getSettings().accuracyPercentage = 30.0; + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoSideStraightDark72in, false), + TestUtils.WPI2019Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + @Test + public void Shape480pBenchmark() { + var pipeline = new ColoredShapePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + pipeline.getSettings().contourShape = ContourShape.Custom; + pipeline.getSettings().circleDetectThreshold = 10; + pipeline.getSettings().accuracyPercentage = 30.0; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center, false), + TestUtils.WPI2020Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + @Test + public void Shape720pBenchmark() { + var pipeline = new ColoredShapePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + pipeline.getSettings().contourShape = ContourShape.Custom; + pipeline.getSettings().circleDetectThreshold = 10; + pipeline.getSettings().accuracyPercentage = 30.0; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_084in_Center_720p, false), + TestUtils.WPI2020Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + @Test + public void Shape1920x1440Benchmark() { + var pipeline = new ColoredShapePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + pipeline.getSettings().contourShape = ContourShape.Custom; + pipeline.getSettings().circleDetectThreshold = 10; + pipeline.getSettings().accuracyPercentage = 30.0; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + + benchmarkPipeline(frameProvider, pipeline, 5); + } + + private static

void benchmarkPipeline( + FrameProvider frameProvider, P pipeline, int secondsToRun) { + CVMat.enablePrint(false); + // warmup for 5 loops. + System.out.println("Warming up for 5 loops..."); + for (int i = 0; i < 5; i++) { + pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + } + + final List processingTimes = new ArrayList<>(); + final List latencyTimes = new ArrayList<>(); + + var frameProps = frameProvider.get().frameStaticProperties; + + // begin benchmark + System.out.println( + "Beginning " + + secondsToRun + + " second benchmark at resolution " + + frameProps.imageWidth + + "x" + + frameProps.imageHeight); + var benchmarkStartMillis = System.currentTimeMillis(); + do { + CVPipelineResult pipelineResult = + pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + pipelineResult.release(); + processingTimes.add(pipelineResult.getProcessingMillis()); + latencyTimes.add(pipelineResult.getLatencyMillis()); + } while (System.currentTimeMillis() - benchmarkStartMillis < secondsToRun * 1000L); + System.out.println("Benchmark complete."); + + var processingMin = Collections.min(processingTimes); + var processingMean = NumberListUtils.mean(processingTimes); + var processingMax = Collections.max(processingTimes); + + var latencyMin = Collections.min(latencyTimes); + var latencyMean = NumberListUtils.mean(latencyTimes); + var latencyMax = Collections.max(latencyTimes); + + String processingResult = + "Processing times - " + + "Min: " + + MathUtils.roundTo(processingMin, 3) + + "ms (" + + MathUtils.roundTo(1000 / processingMin, 3) + + " FPS), " + + "Mean: " + + MathUtils.roundTo(processingMean, 3) + + "ms (" + + MathUtils.roundTo(1000 / processingMean, 3) + + " FPS), " + + "Max: " + + MathUtils.roundTo(processingMax, 3) + + "ms (" + + MathUtils.roundTo(1000 / processingMax, 3) + + " FPS)"; + System.out.println(processingResult); + String latencyResult = + "Latency times - " + + "Min: " + + MathUtils.roundTo(latencyMin, 3) + + "ms (" + + MathUtils.roundTo(1000 / latencyMin, 3) + + " FPS), " + + "Mean: " + + MathUtils.roundTo(latencyMean, 3) + + "ms (" + + MathUtils.roundTo(1000 / latencyMean, 3) + + " FPS), " + + "Max: " + + MathUtils.roundTo(latencyMax, 3) + + "ms (" + + MathUtils.roundTo(1000 / latencyMax, 3) + + " FPS)"; + System.out.println(latencyResult); + } } diff --git a/photon-core/src/test/java/org/photonvision/common/configuration/ConfigTest.java b/photon-core/src/test/java/org/photonvision/common/configuration/ConfigTest.java index 91145f2f37..beaae0e4d9 100644 --- a/photon-core/src/test/java/org/photonvision/common/configuration/ConfigTest.java +++ b/photon-core/src/test/java/org/photonvision/common/configuration/ConfigTest.java @@ -36,109 +36,109 @@ import org.photonvision.vision.target.TargetModel; public class ConfigTest { - private static ConfigManager configMgr; - private static final CameraConfiguration cameraConfig = - new CameraConfiguration("TestCamera", "/dev/video420"); - private static ReflectivePipelineSettings REFLECTIVE_PIPELINE_SETTINGS; - private static ColoredShapePipelineSettings COLORED_SHAPE_PIPELINE_SETTINGS; - private static AprilTagPipelineSettings APRIL_TAG_PIPELINE_SETTINGS; - - @BeforeAll - public static void init() { - TestUtils.loadLibraries(); - var path = Path.of("testconfigdir"); - configMgr = new ConfigManager(path, new LegacyConfigProvider(path)); - configMgr.load(); - - Logger.setLevel(LogGroup.General, LogLevel.TRACE); - - REFLECTIVE_PIPELINE_SETTINGS = new ReflectivePipelineSettings(); - COLORED_SHAPE_PIPELINE_SETTINGS = new ColoredShapePipelineSettings(); - APRIL_TAG_PIPELINE_SETTINGS = new AprilTagPipelineSettings(); - - REFLECTIVE_PIPELINE_SETTINGS.pipelineNickname = "2019Tape"; - REFLECTIVE_PIPELINE_SETTINGS.targetModel = TargetModel.k2019DualTarget; - - COLORED_SHAPE_PIPELINE_SETTINGS.pipelineNickname = "2019Cargo"; - COLORED_SHAPE_PIPELINE_SETTINGS.pipelineIndex = 1; - - APRIL_TAG_PIPELINE_SETTINGS.pipelineNickname = "apriltag"; - APRIL_TAG_PIPELINE_SETTINGS.pipelineIndex = 2; - - cameraConfig.addPipelineSetting(REFLECTIVE_PIPELINE_SETTINGS); - cameraConfig.addPipelineSetting(COLORED_SHAPE_PIPELINE_SETTINGS); - cameraConfig.addPipelineSetting(APRIL_TAG_PIPELINE_SETTINGS); - } - - @Test - @Order(1) - public void serializeConfig() { - TestUtils.loadLibraries(); - - Logger.setLevel(LogGroup.General, LogLevel.TRACE); - configMgr.getConfig().addCameraConfig(cameraConfig); - configMgr.saveToDisk(); - - var camConfDir = - new File( - Path.of(configMgr.configDirectoryFile.toString(), "cameras", "TestCamera") - .toAbsolutePath() - .toString()); - Assertions.assertTrue(camConfDir.exists(), "TestCamera config folder not found!"); - - Assertions.assertTrue( - Files.exists(Path.of(configMgr.configDirectoryFile.toString(), "networkSettings.json")), - "networkSettings.json file not found!"); - } - - @Test - @Order(2) - public void deserializeConfig() { - var reflectivePipelineSettings = - configMgr.getConfig().getCameraConfigurations().get("TestCamera").pipelineSettings.get(0); - var coloredShapePipelineSettings = - configMgr.getConfig().getCameraConfigurations().get("TestCamera").pipelineSettings.get(1); - var apriltagPipelineSettings = - configMgr.getConfig().getCameraConfigurations().get("TestCamera").pipelineSettings.get(2); - - Assertions.assertEquals(REFLECTIVE_PIPELINE_SETTINGS, reflectivePipelineSettings); - Assertions.assertEquals(COLORED_SHAPE_PIPELINE_SETTINGS, coloredShapePipelineSettings); - Assertions.assertEquals(APRIL_TAG_PIPELINE_SETTINGS, apriltagPipelineSettings); - - Assertions.assertTrue( - reflectivePipelineSettings instanceof ReflectivePipelineSettings, - "Config loaded pipeline settings for index 0 not of expected type ReflectivePipelineSettings!"); - Assertions.assertTrue( - coloredShapePipelineSettings instanceof ColoredShapePipelineSettings, - "Config loaded pipeline settings for index 1 not of expected type ColoredShapePipelineSettings!"); - Assertions.assertTrue( - apriltagPipelineSettings instanceof AprilTagPipelineSettings, - "Config loaded pipeline settings for index 2 not of expected type AprilTagPipelineSettings!"); - } - - @AfterAll - public static void cleanup() throws IOException { - try { - Files.deleteIfExists(Paths.get("settings.json")); - } catch (IOException e) { - e.printStackTrace(); + private static ConfigManager configMgr; + private static final CameraConfiguration cameraConfig = + new CameraConfiguration("TestCamera", "/dev/video420"); + private static ReflectivePipelineSettings REFLECTIVE_PIPELINE_SETTINGS; + private static ColoredShapePipelineSettings COLORED_SHAPE_PIPELINE_SETTINGS; + private static AprilTagPipelineSettings APRIL_TAG_PIPELINE_SETTINGS; + + @BeforeAll + public static void init() { + TestUtils.loadLibraries(); + var path = Path.of("testconfigdir"); + configMgr = new ConfigManager(path, new LegacyConfigProvider(path)); + configMgr.load(); + + Logger.setLevel(LogGroup.General, LogLevel.TRACE); + + REFLECTIVE_PIPELINE_SETTINGS = new ReflectivePipelineSettings(); + COLORED_SHAPE_PIPELINE_SETTINGS = new ColoredShapePipelineSettings(); + APRIL_TAG_PIPELINE_SETTINGS = new AprilTagPipelineSettings(); + + REFLECTIVE_PIPELINE_SETTINGS.pipelineNickname = "2019Tape"; + REFLECTIVE_PIPELINE_SETTINGS.targetModel = TargetModel.k2019DualTarget; + + COLORED_SHAPE_PIPELINE_SETTINGS.pipelineNickname = "2019Cargo"; + COLORED_SHAPE_PIPELINE_SETTINGS.pipelineIndex = 1; + + APRIL_TAG_PIPELINE_SETTINGS.pipelineNickname = "apriltag"; + APRIL_TAG_PIPELINE_SETTINGS.pipelineIndex = 2; + + cameraConfig.addPipelineSetting(REFLECTIVE_PIPELINE_SETTINGS); + cameraConfig.addPipelineSetting(COLORED_SHAPE_PIPELINE_SETTINGS); + cameraConfig.addPipelineSetting(APRIL_TAG_PIPELINE_SETTINGS); } - FileUtils.cleanDirectory(configMgr.configDirectoryFile); - configMgr.configDirectoryFile.delete(); - } - - @Test - public void testJacksonHandlesOldVersions() throws IOException { - var str = - "{\"baseName\":\"aaaaaa\",\"uniqueName\":\"aaaaaa\",\"nickname\":\"aaaaaa\",\"FOV\":70.0,\"path\":\"dev/vid\",\"cameraType\":\"UsbCamera\",\"currentPipelineIndex\":0,\"camPitch\":{\"radians\":0.0},\"calibrations\":[], \"cameraLEDs\":[]}"; - var writer = new FileWriter("test.json"); - writer.write(str); - writer.flush(); - writer.close(); - Assertions.assertDoesNotThrow( - () -> JacksonUtils.deserialize(Path.of("test.json"), CameraConfiguration.class)); - - new File("test.json").delete(); - } + @Test + @Order(1) + public void serializeConfig() { + TestUtils.loadLibraries(); + + Logger.setLevel(LogGroup.General, LogLevel.TRACE); + configMgr.getConfig().addCameraConfig(cameraConfig); + configMgr.saveToDisk(); + + var camConfDir = + new File( + Path.of(configMgr.configDirectoryFile.toString(), "cameras", "TestCamera") + .toAbsolutePath() + .toString()); + Assertions.assertTrue(camConfDir.exists(), "TestCamera config folder not found!"); + + Assertions.assertTrue( + Files.exists(Path.of(configMgr.configDirectoryFile.toString(), "networkSettings.json")), + "networkSettings.json file not found!"); + } + + @Test + @Order(2) + public void deserializeConfig() { + var reflectivePipelineSettings = + configMgr.getConfig().getCameraConfigurations().get("TestCamera").pipelineSettings.get(0); + var coloredShapePipelineSettings = + configMgr.getConfig().getCameraConfigurations().get("TestCamera").pipelineSettings.get(1); + var apriltagPipelineSettings = + configMgr.getConfig().getCameraConfigurations().get("TestCamera").pipelineSettings.get(2); + + Assertions.assertEquals(REFLECTIVE_PIPELINE_SETTINGS, reflectivePipelineSettings); + Assertions.assertEquals(COLORED_SHAPE_PIPELINE_SETTINGS, coloredShapePipelineSettings); + Assertions.assertEquals(APRIL_TAG_PIPELINE_SETTINGS, apriltagPipelineSettings); + + Assertions.assertTrue( + reflectivePipelineSettings instanceof ReflectivePipelineSettings, + "Config loaded pipeline settings for index 0 not of expected type ReflectivePipelineSettings!"); + Assertions.assertTrue( + coloredShapePipelineSettings instanceof ColoredShapePipelineSettings, + "Config loaded pipeline settings for index 1 not of expected type ColoredShapePipelineSettings!"); + Assertions.assertTrue( + apriltagPipelineSettings instanceof AprilTagPipelineSettings, + "Config loaded pipeline settings for index 2 not of expected type AprilTagPipelineSettings!"); + } + + @AfterAll + public static void cleanup() throws IOException { + try { + Files.deleteIfExists(Paths.get("settings.json")); + } catch (IOException e) { + e.printStackTrace(); + } + + FileUtils.cleanDirectory(configMgr.configDirectoryFile); + configMgr.configDirectoryFile.delete(); + } + + @Test + public void testJacksonHandlesOldVersions() throws IOException { + var str = + "{\"baseName\":\"aaaaaa\",\"uniqueName\":\"aaaaaa\",\"nickname\":\"aaaaaa\",\"FOV\":70.0,\"path\":\"dev/vid\",\"cameraType\":\"UsbCamera\",\"currentPipelineIndex\":0,\"camPitch\":{\"radians\":0.0},\"calibrations\":[], \"cameraLEDs\":[]}"; + var writer = new FileWriter("test.json"); + writer.write(str); + writer.flush(); + writer.close(); + Assertions.assertDoesNotThrow( + () -> JacksonUtils.deserialize(Path.of("test.json"), CameraConfiguration.class)); + + new File("test.json").delete(); + } } diff --git a/photon-core/src/test/java/org/photonvision/common/configuration/NetworkConfigTest.java b/photon-core/src/test/java/org/photonvision/common/configuration/NetworkConfigTest.java index 87f185e4c0..7c9d6d6f42 100644 --- a/photon-core/src/test/java/org/photonvision/common/configuration/NetworkConfigTest.java +++ b/photon-core/src/test/java/org/photonvision/common/configuration/NetworkConfigTest.java @@ -25,28 +25,28 @@ import org.junit.jupiter.api.Test; public class NetworkConfigTest { - @Test - public void testSerialization() throws IOException { - var mapper = new ObjectMapper(); - var path = Path.of("netTest.json"); - mapper.writeValue(path.toFile(), new NetworkConfig()); - Assertions.assertDoesNotThrow(() -> mapper.readValue(path.toFile(), NetworkConfig.class)); - new File("netTest.json").delete(); - } - - @Test - public void testDeserializeTeamNumberOrNtServerAddress() { - { - var folder = Path.of("test-resources/network-old-team-number"); - var configMgr = new ConfigManager(folder, new LegacyConfigProvider(folder)); - configMgr.load(); - Assertions.assertEquals("9999", configMgr.getConfig().getNetworkConfig().ntServerAddress); + @Test + public void testSerialization() throws IOException { + var mapper = new ObjectMapper(); + var path = Path.of("netTest.json"); + mapper.writeValue(path.toFile(), new NetworkConfig()); + Assertions.assertDoesNotThrow(() -> mapper.readValue(path.toFile(), NetworkConfig.class)); + new File("netTest.json").delete(); } - { - var folder = Path.of("test-resources/network-new-team-number"); - var configMgr = new ConfigManager(folder, new LegacyConfigProvider(folder)); - configMgr.load(); - Assertions.assertEquals("9999", configMgr.getConfig().getNetworkConfig().ntServerAddress); + + @Test + public void testDeserializeTeamNumberOrNtServerAddress() { + { + var folder = Path.of("test-resources/network-old-team-number"); + var configMgr = new ConfigManager(folder, new LegacyConfigProvider(folder)); + configMgr.load(); + Assertions.assertEquals("9999", configMgr.getConfig().getNetworkConfig().ntServerAddress); + } + { + var folder = Path.of("test-resources/network-new-team-number"); + var configMgr = new ConfigManager(folder, new LegacyConfigProvider(folder)); + configMgr.load(); + Assertions.assertEquals("9999", configMgr.getConfig().getNetworkConfig().ntServerAddress); + } } - } } diff --git a/photon-core/src/test/java/org/photonvision/common/configuration/SQLConfigTest.java b/photon-core/src/test/java/org/photonvision/common/configuration/SQLConfigTest.java index 0dd61dfcc9..18d4ae5b6a 100644 --- a/photon-core/src/test/java/org/photonvision/common/configuration/SQLConfigTest.java +++ b/photon-core/src/test/java/org/photonvision/common/configuration/SQLConfigTest.java @@ -30,40 +30,40 @@ import org.photonvision.vision.pipeline.ReflectivePipelineSettings; public class SQLConfigTest { - @BeforeAll - public static void init() { - TestUtils.loadLibraries(); - } + @BeforeAll + public static void init() { + TestUtils.loadLibraries(); + } - @Test - public void testLoad() { - var cfgLoader = new SqlConfigProvider(Path.of("jdbc_test")); + @Test + public void testLoad() { + var cfgLoader = new SqlConfigProvider(Path.of("jdbc_test")); - cfgLoader.load(); + cfgLoader.load(); - var testcamcfg = - new CameraConfiguration( - "basename", - "a_unique_name", - "a_nick_name", - 69, - "a/path/idk", - CameraType.UsbCamera, - List.of(), - 0); - testcamcfg.pipelineSettings = - List.of( - new ReflectivePipelineSettings(), - new AprilTagPipelineSettings(), - new ColoredShapePipelineSettings()); + var testcamcfg = + new CameraConfiguration( + "basename", + "a_unique_name", + "a_nick_name", + 69, + "a/path/idk", + CameraType.UsbCamera, + List.of(), + 0); + testcamcfg.pipelineSettings = + List.of( + new ReflectivePipelineSettings(), + new AprilTagPipelineSettings(), + new ColoredShapePipelineSettings()); - cfgLoader.getConfig().addCameraConfig(testcamcfg); - cfgLoader.getConfig().getNetworkConfig().ntServerAddress = "5940"; - cfgLoader.saveToDisk(); + cfgLoader.getConfig().addCameraConfig(testcamcfg); + cfgLoader.getConfig().getNetworkConfig().ntServerAddress = "5940"; + cfgLoader.saveToDisk(); - cfgLoader.load(); - System.out.println(cfgLoader.getConfig()); + cfgLoader.load(); + System.out.println(cfgLoader.getConfig()); - assertEquals(cfgLoader.getConfig().getNetworkConfig().ntServerAddress, "5940"); - } + assertEquals(cfgLoader.getConfig().getNetworkConfig().ntServerAddress, "5940"); + } } diff --git a/photon-core/src/test/java/org/photonvision/common/util/CoordinateConversionTest.java b/photon-core/src/test/java/org/photonvision/common/util/CoordinateConversionTest.java index 2573fff455..a452f517f5 100644 --- a/photon-core/src/test/java/org/photonvision/common/util/CoordinateConversionTest.java +++ b/photon-core/src/test/java/org/photonvision/common/util/CoordinateConversionTest.java @@ -28,52 +28,52 @@ import org.photonvision.common.util.math.MathUtils; public class CoordinateConversionTest { - @BeforeAll - public static void Init() { - TestUtils.loadLibraries(); - } + @BeforeAll + public static void Init() { + TestUtils.loadLibraries(); + } - @Test - public void testAprilTagToOpenCV() { - // AprilTag and OpenCV both use the EDN coordinate system. AprilTag, however, assumes the tag's - // z-axis points away from the camera while we expect it to point towards the camera. - var apriltag = - new Transform3d( - new Translation3d(1, 2, 3), - new Rotation3d(Math.toRadians(5), Math.toRadians(10), Math.toRadians(15))); - var opencv = MathUtils.convertApriltagtoOpenCV(apriltag); - final var expectedTrl = new Translation3d(1, 2, 3); - assertEquals( - expectedTrl, opencv.getTranslation(), "AprilTag to OpenCV translation conversion failed"); - var apriltagXaxis = new Translation3d(1, 0, 0).rotateBy(apriltag.getRotation()); - var apriltagYaxis = new Translation3d(0, 1, 0).rotateBy(apriltag.getRotation()); - var apriltagZaxis = new Translation3d(0, 0, 1).rotateBy(apriltag.getRotation()); - var opencvXaxis = new Translation3d(1, 0, 0).rotateBy(opencv.getRotation()); - var opencvYaxis = new Translation3d(0, 1, 0).rotateBy(opencv.getRotation()); - var opencvZaxis = new Translation3d(0, 0, 1).rotateBy(opencv.getRotation()); - assertEquals( - apriltagXaxis.unaryMinus(), - opencvXaxis, - "AprilTag to OpenCV rotation conversion failed(X-axis)"); - assertEquals( - apriltagYaxis, opencvYaxis, "AprilTag to OpenCV rotation conversion failed(Y-axis)"); - assertEquals( - apriltagZaxis.unaryMinus(), - opencvZaxis, - "AprilTag to OpenCV rotation conversion failed(Z-axis)"); - } + @Test + public void testAprilTagToOpenCV() { + // AprilTag and OpenCV both use the EDN coordinate system. AprilTag, however, assumes the tag's + // z-axis points away from the camera while we expect it to point towards the camera. + var apriltag = + new Transform3d( + new Translation3d(1, 2, 3), + new Rotation3d(Math.toRadians(5), Math.toRadians(10), Math.toRadians(15))); + var opencv = MathUtils.convertApriltagtoOpenCV(apriltag); + final var expectedTrl = new Translation3d(1, 2, 3); + assertEquals( + expectedTrl, opencv.getTranslation(), "AprilTag to OpenCV translation conversion failed"); + var apriltagXaxis = new Translation3d(1, 0, 0).rotateBy(apriltag.getRotation()); + var apriltagYaxis = new Translation3d(0, 1, 0).rotateBy(apriltag.getRotation()); + var apriltagZaxis = new Translation3d(0, 0, 1).rotateBy(apriltag.getRotation()); + var opencvXaxis = new Translation3d(1, 0, 0).rotateBy(opencv.getRotation()); + var opencvYaxis = new Translation3d(0, 1, 0).rotateBy(opencv.getRotation()); + var opencvZaxis = new Translation3d(0, 0, 1).rotateBy(opencv.getRotation()); + assertEquals( + apriltagXaxis.unaryMinus(), + opencvXaxis, + "AprilTag to OpenCV rotation conversion failed(X-axis)"); + assertEquals( + apriltagYaxis, opencvYaxis, "AprilTag to OpenCV rotation conversion failed(Y-axis)"); + assertEquals( + apriltagZaxis.unaryMinus(), + opencvZaxis, + "AprilTag to OpenCV rotation conversion failed(Z-axis)"); + } - @Test - public void testOpenCVToPhoton() { - // OpenCV uses the EDN coordinate system while wpilib is in NWU. - var opencv = - new Transform3d( - new Translation3d(1, 2, 3), new Rotation3d(VecBuilder.fill(1, 2, 3), Math.PI / 8)); - var wpilib = MathUtils.convertOpenCVtoPhotonTransform(opencv); - final var expectedTrl = new Translation3d(3, -1, -2); - assertEquals( - expectedTrl, wpilib.getTranslation(), "OpenCV to WPILib translation conversion failed"); - var expectedRot = new Rotation3d(VecBuilder.fill(3, -1, -2), Math.PI / 8); - assertEquals(expectedRot, wpilib.getRotation(), "OpenCV to WPILib rotation conversion failed"); - } + @Test + public void testOpenCVToPhoton() { + // OpenCV uses the EDN coordinate system while wpilib is in NWU. + var opencv = + new Transform3d( + new Translation3d(1, 2, 3), new Rotation3d(VecBuilder.fill(1, 2, 3), Math.PI / 8)); + var wpilib = MathUtils.convertOpenCVtoPhotonTransform(opencv); + final var expectedTrl = new Translation3d(3, -1, -2); + assertEquals( + expectedTrl, wpilib.getTranslation(), "OpenCV to WPILib translation conversion failed"); + var expectedRot = new Rotation3d(VecBuilder.fill(3, -1, -2), Math.PI / 8); + assertEquals(expectedRot, wpilib.getRotation(), "OpenCV to WPILib rotation conversion failed"); + } } diff --git a/photon-core/src/test/java/org/photonvision/common/util/LogFileManagementTest.java b/photon-core/src/test/java/org/photonvision/common/util/LogFileManagementTest.java index ff11970f6a..e834874641 100644 --- a/photon-core/src/test/java/org/photonvision/common/util/LogFileManagementTest.java +++ b/photon-core/src/test/java/org/photonvision/common/util/LogFileManagementTest.java @@ -32,53 +32,53 @@ import org.photonvision.common.logging.Logger; public class LogFileManagementTest { - @Test - public void fileCleanupTest() { - // Ensure we instantiate the new log correctly - ConfigManager.getInstance(); + @Test + public void fileCleanupTest() { + // Ensure we instantiate the new log correctly + ConfigManager.getInstance(); - String testDir = ConfigManager.getInstance().getLogsDir().toString() + "/test"; + String testDir = ConfigManager.getInstance().getLogsDir().toString() + "/test"; - Assertions.assertDoesNotThrow(() -> Files.createDirectories(Path.of(testDir))); + Assertions.assertDoesNotThrow(() -> Files.createDirectories(Path.of(testDir))); - // Create a bunch of log files with dummy contents. - for (int fileIdx = 0; fileIdx < Logger.MAX_LOGS_TO_KEEP + 5; fileIdx++) { - String fname = - ConfigManager.getInstance() - .taToLogFname( - LocalDateTime.ofEpochSecond(1500000000 + fileIdx * 60, 0, ZoneOffset.UTC)); - try { - FileWriter testLogWriter = new FileWriter(Path.of(testDir, fname).toString()); - testLogWriter.write("Test log contents created for testing purposes only"); - testLogWriter.close(); - } catch (IOException e) { - Assertions.fail("Could not create test files"); - } - } + // Create a bunch of log files with dummy contents. + for (int fileIdx = 0; fileIdx < Logger.MAX_LOGS_TO_KEEP + 5; fileIdx++) { + String fname = + ConfigManager.getInstance() + .taToLogFname( + LocalDateTime.ofEpochSecond(1500000000 + fileIdx * 60, 0, ZoneOffset.UTC)); + try { + FileWriter testLogWriter = new FileWriter(Path.of(testDir, fname).toString()); + testLogWriter.write("Test log contents created for testing purposes only"); + testLogWriter.close(); + } catch (IOException e) { + Assertions.fail("Could not create test files"); + } + } - // Confirm new log files were created - Assertions.assertTrue( - Logger.MAX_LOGS_TO_KEEP + 5 <= countLogFiles(testDir), "Not enough log files discovered"); + // Confirm new log files were created + Assertions.assertTrue( + Logger.MAX_LOGS_TO_KEEP + 5 <= countLogFiles(testDir), "Not enough log files discovered"); - // Run the log cleanup routine - Logger.cleanLogs(Path.of(testDir)); + // Run the log cleanup routine + Logger.cleanLogs(Path.of(testDir)); - // Confirm we deleted log files - Assertions.assertEquals( - Logger.MAX_LOGS_TO_KEEP, countLogFiles(testDir), "Not enough log files deleted"); + // Confirm we deleted log files + Assertions.assertEquals( + Logger.MAX_LOGS_TO_KEEP, countLogFiles(testDir), "Not enough log files deleted"); - // Clean uptest directory - org.photonvision.common.util.file.FileUtils.deleteDirectory(Path.of(testDir)); - try { - Files.delete(Path.of(testDir)); - } catch (IOException e) { - // it's OK if this fails + // Clean uptest directory + org.photonvision.common.util.file.FileUtils.deleteDirectory(Path.of(testDir)); + try { + Files.delete(Path.of(testDir)); + } catch (IOException e) { + // it's OK if this fails + } } - } - private int countLogFiles(String testDir) { - return FileUtils.listFiles( - new File(testDir), new WildcardFileFilter("photonvision-*.log"), null) - .size(); - } + private int countLogFiles(String testDir) { + return FileUtils.listFiles( + new File(testDir), new WildcardFileFilter("photonvision-*.log"), null) + .size(); + } } diff --git a/photon-core/src/test/java/org/photonvision/common/util/TimedTaskManagerTest.java b/photon-core/src/test/java/org/photonvision/common/util/TimedTaskManagerTest.java index 78c1347688..7b7567d671 100644 --- a/photon-core/src/test/java/org/photonvision/common/util/TimedTaskManagerTest.java +++ b/photon-core/src/test/java/org/photonvision/common/util/TimedTaskManagerTest.java @@ -22,11 +22,11 @@ import org.junit.jupiter.api.Test; public class TimedTaskManagerTest { - @Test - public void atomicIntegerIncrementTest() throws InterruptedException { - AtomicInteger i = new AtomicInteger(); - TimedTaskManager.getInstance().addTask("TaskManagerTest", i::getAndIncrement, 2); - Thread.sleep(400); - Assertions.assertEquals(200, i.get(), 15); - } + @Test + public void atomicIntegerIncrementTest() throws InterruptedException { + AtomicInteger i = new AtomicInteger(); + TimedTaskManager.getInstance().addTask("TaskManagerTest", i::getAndIncrement, 2); + Thread.sleep(400); + Assertions.assertEquals(200, i.get(), 15); + } } diff --git a/photon-core/src/test/java/org/photonvision/hardware/HardwareConfigTest.java b/photon-core/src/test/java/org/photonvision/hardware/HardwareConfigTest.java index 264007da2f..9cd13e5dda 100644 --- a/photon-core/src/test/java/org/photonvision/hardware/HardwareConfigTest.java +++ b/photon-core/src/test/java/org/photonvision/hardware/HardwareConfigTest.java @@ -28,21 +28,21 @@ import org.photonvision.common.util.TestUtils; public class HardwareConfigTest { - @Test - public void loadJson() { - try { - System.out.println("Loading Hardware configs..."); - var config = - new ObjectMapper().readValue(TestUtils.getHardwareConfigJson(), HardwareConfig.class); - assertEquals(config.deviceName, "PhotonVision"); - assertEquals(config.deviceLogoPath, "photonvision.png"); - assertEquals(config.supportURL, "https://support.photonvision.com"); - Assertions.assertArrayEquals( - config.ledPins.stream().mapToInt(i -> i).toArray(), new int[] {2, 13}); - CustomGPIO.setConfig(config); + @Test + public void loadJson() { + try { + System.out.println("Loading Hardware configs..."); + var config = + new ObjectMapper().readValue(TestUtils.getHardwareConfigJson(), HardwareConfig.class); + assertEquals(config.deviceName, "PhotonVision"); + assertEquals(config.deviceLogoPath, "photonvision.png"); + assertEquals(config.supportURL, "https://support.photonvision.com"); + Assertions.assertArrayEquals( + config.ledPins.stream().mapToInt(i -> i).toArray(), new int[] {2, 13}); + CustomGPIO.setConfig(config); - } catch (IOException e) { - e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } } - } } diff --git a/photon-core/src/test/java/org/photonvision/hardware/HardwareManagerTest.java b/photon-core/src/test/java/org/photonvision/hardware/HardwareManagerTest.java index 77e6b0e30c..6273696058 100644 --- a/photon-core/src/test/java/org/photonvision/hardware/HardwareManagerTest.java +++ b/photon-core/src/test/java/org/photonvision/hardware/HardwareManagerTest.java @@ -27,23 +27,23 @@ import org.photonvision.common.logging.Logger; public class HardwareManagerTest { - public static final Logger logger = new Logger(HardwareManager.class, LogGroup.General); + public static final Logger logger = new Logger(HardwareManager.class, LogGroup.General); - @Test - public void managementTest() throws InterruptedException { - Assumptions.assumeTrue(Platform.isRaspberryPi()); - var socket = new PigpioSocket(); - try { - socket.gpioWrite(18, false); - socket.gpioWrite(13, false); - Thread.sleep(500); - for (int i = 0; i < 1000000; i++) { - int duty = 1000000 - i; - socket.hardwarePWM(18, 1000000, duty); - Thread.sleep(0, 25); - } - } catch (PigpioException e) { - logger.error("error", e); + @Test + public void managementTest() throws InterruptedException { + Assumptions.assumeTrue(Platform.isRaspberryPi()); + var socket = new PigpioSocket(); + try { + socket.gpioWrite(18, false); + socket.gpioWrite(13, false); + Thread.sleep(500); + for (int i = 0; i < 1000000; i++) { + int duty = 1000000 - i; + socket.hardwarePWM(18, 1000000, duty); + Thread.sleep(0, 25); + } + } catch (PigpioException e) { + logger.error("error", e); + } } - } } diff --git a/photon-core/src/test/java/org/photonvision/hardware/HardwareTest.java b/photon-core/src/test/java/org/photonvision/hardware/HardwareTest.java index 002d16bf63..9d371d25f8 100644 --- a/photon-core/src/test/java/org/photonvision/hardware/HardwareTest.java +++ b/photon-core/src/test/java/org/photonvision/hardware/HardwareTest.java @@ -28,65 +28,65 @@ import org.photonvision.common.hardware.metrics.MetricsManager; public class HardwareTest { - @Test - public void testHardware() { - MetricsManager mm = new MetricsManager(); + @Test + public void testHardware() { + MetricsManager mm = new MetricsManager(); - if (!Platform.isRaspberryPi()) return; + if (!Platform.isRaspberryPi()) return; - System.out.println("Testing on platform: " + Platform.getPlatformName()); + System.out.println("Testing on platform: " + Platform.getPlatformName()); - System.out.println("Printing CPU Info:"); - System.out.println("Memory: " + mm.getMemory() + "MB"); - System.out.println("Temperature: " + mm.getTemp() + "C"); - System.out.println("Utilization: : " + mm.getUtilization() + "%"); + System.out.println("Printing CPU Info:"); + System.out.println("Memory: " + mm.getMemory() + "MB"); + System.out.println("Temperature: " + mm.getTemp() + "C"); + System.out.println("Utilization: : " + mm.getUtilization() + "%"); - System.out.println("Printing GPU Info:"); - System.out.println("Memory: " + mm.getGPUMemorySplit() + "MB"); + System.out.println("Printing GPU Info:"); + System.out.println("Memory: " + mm.getGPUMemorySplit() + "MB"); - System.out.println("Printing RAM Info: "); - System.out.println("Used RAM: : " + mm.getUsedRam() + "MB"); - } - - @Test - public void testGPIO() { - GPIOBase gpio; - if (Platform.isRaspberryPi()) { - gpio = new PigpioPin(18); - } else { - gpio = new CustomGPIO(18); + System.out.println("Printing RAM Info: "); + System.out.println("Used RAM: : " + mm.getUsedRam() + "MB"); } - gpio.setOn(); // HIGH - assertTrue(gpio.getState()); + @Test + public void testGPIO() { + GPIOBase gpio; + if (Platform.isRaspberryPi()) { + gpio = new PigpioPin(18); + } else { + gpio = new CustomGPIO(18); + } + + gpio.setOn(); // HIGH + assertTrue(gpio.getState()); - gpio.setOff(); // LOW - assertFalse(gpio.getState()); + gpio.setOff(); // LOW + assertFalse(gpio.getState()); - gpio.togglePin(); // HIGH - assertTrue(gpio.getState()); + gpio.togglePin(); // HIGH + assertTrue(gpio.getState()); - gpio.togglePin(); // LOW - assertFalse(gpio.getState()); + gpio.togglePin(); // LOW + assertFalse(gpio.getState()); - gpio.setState(true); // HIGH - assertTrue(gpio.getState()); + gpio.setState(true); // HIGH + assertTrue(gpio.getState()); - gpio.setState(false); // LOW - assertFalse(gpio.getState()); + gpio.setState(false); // LOW + assertFalse(gpio.getState()); - var success = gpio.shutdown(); - assertTrue(success); - } + var success = gpio.shutdown(); + assertTrue(success); + } - @Test - public void testBlink() { - if (!Platform.isRaspberryPi()) return; - GPIOBase pwm = new PigpioPin(18); - pwm.blink(125, 3); - var startms = System.currentTimeMillis(); - while (true) { - if (System.currentTimeMillis() - startms > 4500) break; + @Test + public void testBlink() { + if (!Platform.isRaspberryPi()) return; + GPIOBase pwm = new PigpioPin(18); + pwm.blink(125, 3); + var startms = System.currentTimeMillis(); + while (true) { + if (System.currentTimeMillis() - startms > 4500) break; + } } - } } diff --git a/photon-core/src/test/java/org/photonvision/vision/QuirkyCameraTest.java b/photon-core/src/test/java/org/photonvision/vision/QuirkyCameraTest.java index 19c5645181..45517ee029 100644 --- a/photon-core/src/test/java/org/photonvision/vision/QuirkyCameraTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/QuirkyCameraTest.java @@ -24,39 +24,39 @@ import org.photonvision.vision.camera.QuirkyCamera; public class QuirkyCameraTest { - @Test - public void ps3EyeTest() { - HashMap ps3EyeQuirks = new HashMap<>(); - ps3EyeQuirks.put(CameraQuirk.Gain, true); - ps3EyeQuirks.put(CameraQuirk.FPSCap100, true); - for (var q : CameraQuirk.values()) { - ps3EyeQuirks.putIfAbsent(q, false); + @Test + public void ps3EyeTest() { + HashMap ps3EyeQuirks = new HashMap<>(); + ps3EyeQuirks.put(CameraQuirk.Gain, true); + ps3EyeQuirks.put(CameraQuirk.FPSCap100, true); + for (var q : CameraQuirk.values()) { + ps3EyeQuirks.putIfAbsent(q, false); + } + + QuirkyCamera psEye = QuirkyCamera.getQuirkyCamera(0x2000, 0x1415); + Assertions.assertEquals(psEye.quirks, ps3EyeQuirks); } - QuirkyCamera psEye = QuirkyCamera.getQuirkyCamera(0x2000, 0x1415); - Assertions.assertEquals(psEye.quirks, ps3EyeQuirks); - } + @Test + public void picamTest() { + HashMap picamQuirks = new HashMap<>(); + picamQuirks.put(CameraQuirk.PiCam, true); + for (var q : CameraQuirk.values()) { + picamQuirks.putIfAbsent(q, false); + } - @Test - public void picamTest() { - HashMap picamQuirks = new HashMap<>(); - picamQuirks.put(CameraQuirk.PiCam, true); - for (var q : CameraQuirk.values()) { - picamQuirks.putIfAbsent(q, false); + QuirkyCamera picam = QuirkyCamera.getQuirkyCamera(-1, -1, "mmal service 16.1"); + Assertions.assertEquals(picam.quirks, picamQuirks); } - QuirkyCamera picam = QuirkyCamera.getQuirkyCamera(-1, -1, "mmal service 16.1"); - Assertions.assertEquals(picam.quirks, picamQuirks); - } + @Test + public void quirklessCameraTest() { + HashMap noQuirks = new HashMap<>(); + for (var q : CameraQuirk.values()) { + noQuirks.put(q, false); + } - @Test - public void quirklessCameraTest() { - HashMap noQuirks = new HashMap<>(); - for (var q : CameraQuirk.values()) { - noQuirks.put(q, false); + QuirkyCamera quirkless = QuirkyCamera.getQuirkyCamera(1234, 8888); + Assertions.assertEquals(quirkless.quirks, noQuirks); } - - QuirkyCamera quirkless = QuirkyCamera.getQuirkyCamera(1234, 8888); - Assertions.assertEquals(quirkless.quirks, noQuirks); - } } diff --git a/photon-core/src/test/java/org/photonvision/vision/frame/provider/FileFrameProviderTest.java b/photon-core/src/test/java/org/photonvision/vision/frame/provider/FileFrameProviderTest.java index 742542b885..05cddeb9d7 100644 --- a/photon-core/src/test/java/org/photonvision/vision/frame/provider/FileFrameProviderTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/frame/provider/FileFrameProviderTest.java @@ -27,79 +27,79 @@ import org.photonvision.vision.frame.Frame; public class FileFrameProviderTest { - @BeforeAll - public static void initPath() { - TestUtils.loadLibraries(); - } + @BeforeAll + public static void initPath() { + TestUtils.loadLibraries(); + } + + @Test + public void TestFilesExist() { + assertTrue(Files.exists(TestUtils.getTestImagesPath(false))); + } - @Test - public void TestFilesExist() { - assertTrue(Files.exists(TestUtils.getTestImagesPath(false))); - } + @Test + public void Load2019ImageOnceTest() { + var goodFilePath = + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in, false); - @Test - public void Load2019ImageOnceTest() { - var goodFilePath = - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in, false); + assertTrue(Files.exists(goodFilePath)); - assertTrue(Files.exists(goodFilePath)); + FileFrameProvider goodFrameProvider = new FileFrameProvider(goodFilePath, 68.5); - FileFrameProvider goodFrameProvider = new FileFrameProvider(goodFilePath, 68.5); + Frame goodFrame = goodFrameProvider.get(); - Frame goodFrame = goodFrameProvider.get(); + int goodFrameCols = goodFrame.colorImage.getMat().cols(); + int goodFrameRows = goodFrame.colorImage.getMat().rows(); - int goodFrameCols = goodFrame.colorImage.getMat().cols(); - int goodFrameRows = goodFrame.colorImage.getMat().rows(); + // 2019 Images are at 320x240 + assertEquals(320, goodFrameCols); + assertEquals(240, goodFrameRows); - // 2019 Images are at 320x240 - assertEquals(320, goodFrameCols); - assertEquals(240, goodFrameRows); + TestUtils.showImage(goodFrame.colorImage.getMat(), "2019"); - TestUtils.showImage(goodFrame.colorImage.getMat(), "2019"); + var badFilePath = Paths.get("bad.jpg"); // this file does not exist - var badFilePath = Paths.get("bad.jpg"); // this file does not exist + FileFrameProvider badFrameProvider = null; - FileFrameProvider badFrameProvider = null; + try { + badFrameProvider = new FileFrameProvider(badFilePath, 68.5); + } catch (Exception e) { + // ignored + } - try { - badFrameProvider = new FileFrameProvider(badFilePath, 68.5); - } catch (Exception e) { - // ignored + assertNull(badFrameProvider); } - assertNull(badFrameProvider); - } + @Test + public void Load2020ImageOnceTest() { + var goodFilePath = + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_108in_Center, false); - @Test - public void Load2020ImageOnceTest() { - var goodFilePath = - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_108in_Center, false); + assertTrue(Files.exists(goodFilePath)); - assertTrue(Files.exists(goodFilePath)); + FileFrameProvider goodFrameProvider = new FileFrameProvider(goodFilePath, 68.5); - FileFrameProvider goodFrameProvider = new FileFrameProvider(goodFilePath, 68.5); + Frame goodFrame = goodFrameProvider.get(); - Frame goodFrame = goodFrameProvider.get(); + int goodFrameCols = goodFrame.colorImage.getMat().cols(); + int goodFrameRows = goodFrame.colorImage.getMat().rows(); - int goodFrameCols = goodFrame.colorImage.getMat().cols(); - int goodFrameRows = goodFrame.colorImage.getMat().rows(); + // 2020 Images are at 640x480 + assertEquals(640, goodFrameCols); + assertEquals(480, goodFrameRows); - // 2020 Images are at 640x480 - assertEquals(640, goodFrameCols); - assertEquals(480, goodFrameRows); + TestUtils.showImage(goodFrame.colorImage.getMat(), "2020"); - TestUtils.showImage(goodFrame.colorImage.getMat(), "2020"); + var badFilePath = Paths.get("bad.jpg"); // this file does not exist - var badFilePath = Paths.get("bad.jpg"); // this file does not exist + FileFrameProvider badFrameProvider = null; - FileFrameProvider badFrameProvider = null; + try { + badFrameProvider = new FileFrameProvider(badFilePath, 68.5); + } catch (Exception e) { + // ignored + } - try { - badFrameProvider = new FileFrameProvider(badFilePath, 68.5); - } catch (Exception e) { - // ignored + assertNull(badFrameProvider); } - - assertNull(badFrameProvider); - } } diff --git a/photon-core/src/test/java/org/photonvision/vision/frame/provider/LibcameraTest.java b/photon-core/src/test/java/org/photonvision/vision/frame/provider/LibcameraTest.java index 989868d888..b23a1cd9bd 100644 --- a/photon-core/src/test/java/org/photonvision/vision/frame/provider/LibcameraTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/frame/provider/LibcameraTest.java @@ -22,18 +22,18 @@ // import org.photonvision.raspi.LibCameraJNI; public class LibcameraTest { - @Test - public void testBasic() { - // System.load("/home/pi/photon-libcamera-gl-driver/build/libphotonlibcamera.so"); - // LibCameraJNI.createCamera(1920, 1080, 60); - // try { - // Thread.sleep(1000); - // } catch (InterruptedException e) { - // } - // LibCameraJNI.startCamera(); - // try { - // Thread.sleep(5000); - // } catch (InterruptedException e) { - // } - } + @Test + public void testBasic() { + // System.load("/home/pi/photon-libcamera-gl-driver/build/libphotonlibcamera.so"); + // LibCameraJNI.createCamera(1920, 1080, 60); + // try { + // Thread.sleep(1000); + // } catch (InterruptedException e) { + // } + // LibCameraJNI.startCamera(); + // try { + // Thread.sleep(5000); + // } catch (InterruptedException e) { + // } + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/opencv/ContourTest.java b/photon-core/src/test/java/org/photonvision/vision/opencv/ContourTest.java index e84a1edd43..6e6c6fb178 100644 --- a/photon-core/src/test/java/org/photonvision/vision/opencv/ContourTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/opencv/ContourTest.java @@ -28,44 +28,44 @@ import org.photonvision.common.util.TestUtils; public class ContourTest { - @BeforeEach - public void Init() { - TestUtils.loadLibraries(); - } + @BeforeEach + public void Init() { + TestUtils.loadLibraries(); + } - @Test - public void simpleContourTest() { - var mat = new MatOfPoint(); - mat.fromList(List.of(new Point(0, 0), new Point(10, 0), new Point(10, 10), new Point(0, 10))); - var contour = new Contour(mat); - assertEquals(100, contour.getArea()); - assertEquals(40, contour.getPerimeter()); - assertEquals(new Point(5, 5), contour.getCenterPoint()); - } + @Test + public void simpleContourTest() { + var mat = new MatOfPoint(); + mat.fromList(List.of(new Point(0, 0), new Point(10, 0), new Point(10, 10), new Point(0, 10))); + var contour = new Contour(mat); + assertEquals(100, contour.getArea()); + assertEquals(40, contour.getPerimeter()); + assertEquals(new Point(5, 5), contour.getCenterPoint()); + } - @Test - public void test2019() { - var firstMat = new MatOfPoint(); - // contour 0 and 1 data from kCargoStraightDark72in_HighRes - firstMat.fromList( - List.of( - new Point(1328, 976), - new Point(1272, 985), - new Point(1230, 832), - new Point(1326, 948), - new Point(1328, 971))); + @Test + public void test2019() { + var firstMat = new MatOfPoint(); + // contour 0 and 1 data from kCargoStraightDark72in_HighRes + firstMat.fromList( + List.of( + new Point(1328, 976), + new Point(1272, 985), + new Point(1230, 832), + new Point(1326, 948), + new Point(1328, 971))); - var secondMat = new MatOfPoint(); - secondMat.fromList( - List.of( - new Point(956, 832), - new Point(882, 978), - new Point(927, 810), - new Point(954, 821), - new Point(956, 825))); - var firstContour = new Contour(firstMat); - var secondContour = new Contour(secondMat); - boolean result = firstContour.isIntersecting(secondContour, ContourIntersectionDirection.Up); - assertTrue(result); - } + var secondMat = new MatOfPoint(); + secondMat.fromList( + List.of( + new Point(956, 832), + new Point(882, 978), + new Point(927, 810), + new Point(954, 821), + new Point(956, 825))); + var firstContour = new Contour(firstMat); + var secondContour = new Contour(secondMat); + boolean result = firstContour.isIntersecting(secondContour, ContourIntersectionDirection.Up); + assertTrue(result); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/AprilTagTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/AprilTagTest.java index fd4ce75035..fe1b7b9d9c 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/AprilTagTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/AprilTagTest.java @@ -31,118 +31,118 @@ import org.photonvision.vision.target.TrackedTarget; public class AprilTagTest { - @BeforeEach - public void Init() { - TestUtils.loadLibraries(); - } - - @Test - public void testApriltagFacingCamera() { - var pipeline = new AprilTagPipeline(); - - pipeline.getSettings().inputShouldShow = true; - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().solvePNPEnabled = true; - pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; - pipeline.getSettings().cornerDetectionUseConvexHulls = true; - pipeline.getSettings().targetModel = TargetModel.k200mmAprilTag; - pipeline.getSettings().tagFamily = AprilTagFamily.kTag36h11; - - var frameProvider = - new FileFrameProvider( - TestUtils.getApriltagImagePath(TestUtils.ApriltagTestImages.kTag1_640_480, false), - TestUtils.WPI2020Image.FOV, - TestUtils.get2020LifeCamCoeffs(false)); - frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); - - CVPipelineResult pipelineResult; - try { - pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - } catch (RuntimeException e) { - // For now, will throw because of the Rotation3d ctor - return; + @BeforeEach + public void Init() { + TestUtils.loadLibraries(); } - // Draw on input - var outputPipe = new OutputStreamPipeline(); - var ret = - outputPipe.process( - pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); - - TestUtils.showImage(ret.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); - - // these numbers are not *accurate*, but they are known and expected - var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); - Assertions.assertEquals(2, pose.getTranslation().getX(), 0.2); - Assertions.assertEquals(0.1, pose.getTranslation().getY(), 0.2); - Assertions.assertEquals(0.0, pose.getTranslation().getZ(), 0.2); - - // We expect the object axes to be in NWU, with the x-axis coming out of the tag - // This visible tag is facing the camera almost parallel, so in world space: - - // The object's X axis should be (-1, 0, 0) - Assertions.assertEquals( - -1, new Translation3d(1, 0, 0).rotateBy(pose.getRotation()).getX(), 0.1); - // The object's Y axis should be (0, -1, 0) - Assertions.assertEquals( - -1, new Translation3d(0, 1, 0).rotateBy(pose.getRotation()).getY(), 0.1); - // The object's Z axis should be (0, 0, 1) - Assertions.assertEquals(1, new Translation3d(0, 0, 1).rotateBy(pose.getRotation()).getZ(), 0.1); - } - - @Test - public void testApriltagDistorted() { - var pipeline = new AprilTagPipeline(); - - pipeline.getSettings().inputShouldShow = true; - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().solvePNPEnabled = true; - pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; - pipeline.getSettings().cornerDetectionUseConvexHulls = true; - pipeline.getSettings().targetModel = TargetModel.k200mmAprilTag; - pipeline.getSettings().tagFamily = AprilTagFamily.kTag16h5; - - var frameProvider = - new FileFrameProvider( - TestUtils.getApriltagImagePath(TestUtils.ApriltagTestImages.kTag_corner_1280, false), - TestUtils.WPI2020Image.FOV, - TestUtils.getCoeffs(TestUtils.LIMELIGHT_480P_CAL_FILE, false)); - frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); - - CVPipelineResult pipelineResult; - try { - pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - } catch (RuntimeException e) { - // For now, will throw because of the Rotation3d ctor - return; + @Test + public void testApriltagFacingCamera() { + var pipeline = new AprilTagPipeline(); + + pipeline.getSettings().inputShouldShow = true; + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().solvePNPEnabled = true; + pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; + pipeline.getSettings().cornerDetectionUseConvexHulls = true; + pipeline.getSettings().targetModel = TargetModel.k200mmAprilTag; + pipeline.getSettings().tagFamily = AprilTagFamily.kTag36h11; + + var frameProvider = + new FileFrameProvider( + TestUtils.getApriltagImagePath(TestUtils.ApriltagTestImages.kTag1_640_480, false), + TestUtils.WPI2020Image.FOV, + TestUtils.get2020LifeCamCoeffs(false)); + frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); + + CVPipelineResult pipelineResult; + try { + pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + } catch (RuntimeException e) { + // For now, will throw because of the Rotation3d ctor + return; + } + + // Draw on input + var outputPipe = new OutputStreamPipeline(); + var ret = + outputPipe.process( + pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); + + TestUtils.showImage(ret.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); + + // these numbers are not *accurate*, but they are known and expected + var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); + Assertions.assertEquals(2, pose.getTranslation().getX(), 0.2); + Assertions.assertEquals(0.1, pose.getTranslation().getY(), 0.2); + Assertions.assertEquals(0.0, pose.getTranslation().getZ(), 0.2); + + // We expect the object axes to be in NWU, with the x-axis coming out of the tag + // This visible tag is facing the camera almost parallel, so in world space: + + // The object's X axis should be (-1, 0, 0) + Assertions.assertEquals( + -1, new Translation3d(1, 0, 0).rotateBy(pose.getRotation()).getX(), 0.1); + // The object's Y axis should be (0, -1, 0) + Assertions.assertEquals( + -1, new Translation3d(0, 1, 0).rotateBy(pose.getRotation()).getY(), 0.1); + // The object's Z axis should be (0, 0, 1) + Assertions.assertEquals(1, new Translation3d(0, 0, 1).rotateBy(pose.getRotation()).getZ(), 0.1); } - // Draw on input - var outputPipe = new OutputStreamPipeline(); - var ret = - outputPipe.process( - pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); - - TestUtils.showImage(ret.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); - - // these numbers are not *accurate*, but they are known and expected - var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); - Assertions.assertEquals(4.14, pose.getTranslation().getX(), 0.2); - Assertions.assertEquals(2, pose.getTranslation().getY(), 0.2); - Assertions.assertEquals(0.0, pose.getTranslation().getZ(), 0.2); - } - - private static void printTestResults(CVPipelineResult pipelineResult) { - double fps = 1000 / pipelineResult.getLatencyMillis(); - System.out.println( - "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); - System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); - System.out.println( - "Found targets at " - + pipelineResult.targets.stream() - .map(TrackedTarget::getBestCameraToTarget3d) - .collect(Collectors.toList())); - } + @Test + public void testApriltagDistorted() { + var pipeline = new AprilTagPipeline(); + + pipeline.getSettings().inputShouldShow = true; + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().solvePNPEnabled = true; + pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; + pipeline.getSettings().cornerDetectionUseConvexHulls = true; + pipeline.getSettings().targetModel = TargetModel.k200mmAprilTag; + pipeline.getSettings().tagFamily = AprilTagFamily.kTag16h5; + + var frameProvider = + new FileFrameProvider( + TestUtils.getApriltagImagePath(TestUtils.ApriltagTestImages.kTag_corner_1280, false), + TestUtils.WPI2020Image.FOV, + TestUtils.getCoeffs(TestUtils.LIMELIGHT_480P_CAL_FILE, false)); + frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); + + CVPipelineResult pipelineResult; + try { + pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + } catch (RuntimeException e) { + // For now, will throw because of the Rotation3d ctor + return; + } + + // Draw on input + var outputPipe = new OutputStreamPipeline(); + var ret = + outputPipe.process( + pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); + + TestUtils.showImage(ret.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); + + // these numbers are not *accurate*, but they are known and expected + var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); + Assertions.assertEquals(4.14, pose.getTranslation().getX(), 0.2); + Assertions.assertEquals(2, pose.getTranslation().getY(), 0.2); + Assertions.assertEquals(0.0, pose.getTranslation().getZ(), 0.2); + } + + private static void printTestResults(CVPipelineResult pipelineResult) { + double fps = 1000 / pipelineResult.getLatencyMillis(); + System.out.println( + "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); + System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + System.out.println( + "Found targets at " + + pipelineResult.targets.stream() + .map(TrackedTarget::getBestCameraToTarget3d) + .collect(Collectors.toList())); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/ArucoPipelineTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/ArucoPipelineTest.java index 44e8af4a98..168b7b92e9 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/ArucoPipelineTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/ArucoPipelineTest.java @@ -29,58 +29,58 @@ import org.photonvision.vision.target.TrackedTarget; public class ArucoPipelineTest { - @BeforeEach - public void Init() throws IOException { - TestUtils.loadLibraries(); - } + @BeforeEach + public void Init() throws IOException { + TestUtils.loadLibraries(); + } - @Test - public void testApriltagFacingCameraAruco() { - var pipeline = new ArucoPipeline(); + @Test + public void testApriltagFacingCameraAruco() { + var pipeline = new ArucoPipeline(); - pipeline.getSettings().inputShouldShow = true; - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().solvePNPEnabled = true; - pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; - pipeline.getSettings().cornerDetectionUseConvexHulls = true; - pipeline.getSettings().targetModel = TargetModel.k200mmAprilTag; + pipeline.getSettings().inputShouldShow = true; + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().solvePNPEnabled = true; + pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; + pipeline.getSettings().cornerDetectionUseConvexHulls = true; + pipeline.getSettings().targetModel = TargetModel.k200mmAprilTag; - // pipeline.getSettings().tagFamily = AprilTagFamily.kTag36h11; + // pipeline.getSettings().tagFamily = AprilTagFamily.kTag36h11; - var frameProvider = - new FileFrameProvider( - TestUtils.getApriltagImagePath(TestUtils.ApriltagTestImages.kTag1_16h5_1280, false), - 106, - TestUtils.getCoeffs("laptop_1280.json", false)); - frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); + var frameProvider = + new FileFrameProvider( + TestUtils.getApriltagImagePath(TestUtils.ApriltagTestImages.kTag1_16h5_1280, false), + 106, + TestUtils.getCoeffs("laptop_1280.json", false)); + frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); - CVPipelineResult pipelineResult; - try { - pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - } catch (RuntimeException e) { - // For now, will throw because of the Rotation3d ctor - return; - } + CVPipelineResult pipelineResult; + try { + pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + } catch (RuntimeException e) { + // For now, will throw because of the Rotation3d ctor + return; + } - // Draw on input - var outputPipe = new OutputStreamPipeline(); - outputPipe.process( - pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); + // Draw on input + var outputPipe = new OutputStreamPipeline(); + outputPipe.process( + pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); - TestUtils.showImage( - pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); - } + TestUtils.showImage( + pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); + } - private static void printTestResults(CVPipelineResult pipelineResult) { - double fps = 1000 / pipelineResult.getLatencyMillis(); - System.out.println( - "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); - System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); - System.out.println( - "Found targets at " - + pipelineResult.targets.stream() - .map(TrackedTarget::getBestCameraToTarget3d) - .collect(Collectors.toList())); - } + private static void printTestResults(CVPipelineResult pipelineResult) { + double fps = 1000 / pipelineResult.getLatencyMillis(); + System.out.println( + "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); + System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + System.out.println( + "Found targets at " + + pipelineResult.targets.stream() + .map(TrackedTarget::getBestCameraToTarget3d) + .collect(Collectors.toList())); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/Calibrate3dPipeTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/Calibrate3dPipeTest.java index eb960db0a2..72547829f1 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/Calibrate3dPipeTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/Calibrate3dPipeTest.java @@ -46,306 +46,306 @@ import org.photonvision.vision.pipe.impl.FindBoardCornersPipe; public class Calibrate3dPipeTest { - @BeforeAll - public static void init() { - TestUtils.loadLibraries(); - } - - @Test - public void perViewErrorsTest() { - List frames = new ArrayList<>(); - - File dir = new File(TestUtils.getDotBoardImagesPath().toAbsolutePath().toString()); - File[] directoryListing = dir.listFiles(); - for (var file : directoryListing) { - frames.add(Imgcodecs.imread(file.getAbsolutePath())); + @BeforeAll + public static void init() { + TestUtils.loadLibraries(); } - FindBoardCornersPipe findBoardCornersPipe = new FindBoardCornersPipe(); - findBoardCornersPipe.setParams( - new FindBoardCornersPipe.FindCornersPipeParams( - 11, 4, UICalibrationData.BoardType.DOTBOARD, 15, FrameDivisor.NONE)); + @Test + public void perViewErrorsTest() { + List frames = new ArrayList<>(); - List> foundCornersList = new ArrayList<>(); + File dir = new File(TestUtils.getDotBoardImagesPath().toAbsolutePath().toString()); + File[] directoryListing = dir.listFiles(); + for (var file : directoryListing) { + frames.add(Imgcodecs.imread(file.getAbsolutePath())); + } - for (var f : frames) { - var copy = new Mat(); - f.copyTo(copy); - foundCornersList.add(findBoardCornersPipe.run(Pair.of(f, copy)).output); - } + FindBoardCornersPipe findBoardCornersPipe = new FindBoardCornersPipe(); + findBoardCornersPipe.setParams( + new FindBoardCornersPipe.FindCornersPipeParams( + 11, 4, UICalibrationData.BoardType.DOTBOARD, 15, FrameDivisor.NONE)); - Calibrate3dPipe calibrate3dPipe = new Calibrate3dPipe(); - calibrate3dPipe.setParams(new Calibrate3dPipe.CalibratePipeParams(new Size(640, 480))); + List> foundCornersList = new ArrayList<>(); - var calibrate3dPipeOutput = calibrate3dPipe.run(foundCornersList); - assertTrue(calibrate3dPipeOutput.output.perViewErrors.length > 0); - System.out.println( - "Per View Errors: " + Arrays.toString(calibrate3dPipeOutput.output.perViewErrors)); + for (var f : frames) { + var copy = new Mat(); + f.copyTo(copy); + foundCornersList.add(findBoardCornersPipe.run(Pair.of(f, copy)).output); + } - for (var f : frames) { - f.release(); - } - } - - @Test - public void calibrationPipelineTest() { - int startMatCount = CVMat.getMatCount(); - - File dir = new File(TestUtils.getDotBoardImagesPath().toAbsolutePath().toString()); - File[] directoryListing = dir.listFiles(); - - Calibrate3dPipeline calibration3dPipeline = new Calibrate3dPipeline(20); - calibration3dPipeline.getSettings().boardHeight = 11; - calibration3dPipeline.getSettings().boardWidth = 4; - calibration3dPipeline.getSettings().boardType = UICalibrationData.BoardType.DOTBOARD; - calibration3dPipeline.getSettings().gridSize = 15; - calibration3dPipeline.getSettings().resolution = new Size(640, 480); - - for (var file : directoryListing) { - calibration3dPipeline.takeSnapshot(); - var frame = - new Frame( - new CVMat(Imgcodecs.imread(file.getAbsolutePath())), - new CVMat(), - FrameThresholdType.NONE, - new FrameStaticProperties(640, 480, 60, null)); - var output = calibration3dPipeline.run(frame, QuirkyCamera.DefaultCamera); - // TestUtils.showImage(output.inputAndOutputFrame.processedImage.getMat()); - output.release(); - frame.release(); + Calibrate3dPipe calibrate3dPipe = new Calibrate3dPipe(); + calibrate3dPipe.setParams(new Calibrate3dPipe.CalibratePipeParams(new Size(640, 480))); + + var calibrate3dPipeOutput = calibrate3dPipe.run(foundCornersList); + assertTrue(calibrate3dPipeOutput.output.perViewErrors.length > 0); + System.out.println( + "Per View Errors: " + Arrays.toString(calibrate3dPipeOutput.output.perViewErrors)); + + for (var f : frames) { + f.release(); + } } - assertTrue( - calibration3dPipeline.foundCornersList.stream() - .map(Triple::getRight) - .allMatch(it -> it.width() > 0 && it.height() > 0)); - - calibration3dPipeline.removeSnapshot(0); - var frame = - new Frame( - new CVMat(Imgcodecs.imread(directoryListing[0].getAbsolutePath())), - new CVMat(), - FrameThresholdType.NONE, - new FrameStaticProperties(640, 480, 60, null)); - calibration3dPipeline.run(frame, QuirkyCamera.DefaultCamera).release(); - frame.release(); - - assertTrue( - calibration3dPipeline.foundCornersList.stream() - .map(Triple::getRight) - .allMatch(it -> it.width() > 0 && it.height() > 0)); - - var cal = calibration3dPipeline.tryCalibration(); - calibration3dPipeline.finishCalibration(); - - assertNotNull(cal); - assertNotNull(cal.perViewErrors); - System.out.println("Per View Errors: " + Arrays.toString(cal.perViewErrors)); - System.out.println("Camera Intrinsics: " + cal.cameraIntrinsics.toString()); - System.out.println("Dist Coeffs: " + cal.distCoeffs.toString()); - System.out.println("Standard Deviation: " + cal.standardDeviation); - System.out.println( - "Mean: " + Arrays.stream(calibration3dPipeline.perViewErrors()).average().toString()); - - // Confirm we didn't get leaky on our mat usage - // assertTrue(CVMat.getMatCount() == startMatCount); // TODO Figure out why this doesn't work in - // CI - System.out.println("CVMats left: " + CVMat.getMatCount() + " Start: " + startMatCount); - } - - @Test - public void calibrateSquares320x240_pi() { - // Pi3 and V1.3 camera - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "piCam", "320_240_1").toFile(); - Size sz = new Size(320, 240); - calibrateSquaresCommon(sz, dir); - } - - @Test - public void calibrateSquares640x480_pi() { - // Pi3 and V1.3 camera - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "piCam", "640_480_1").toFile(); - Size sz = new Size(640, 480); - calibrateSquaresCommon(sz, dir); - } - - @Test - public void calibrateSquares960x720_pi() { - // Pi3 and V1.3 camera - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "piCam", "960_720_1").toFile(); - Size sz = new Size(960, 720); - calibrateSquaresCommon(sz, dir); - } - - @Test - public void calibrateSquares1920x1080_pi() { - // Pi3 and V1.3 camera - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "piCam", "1920_1080_1").toFile(); - Size sz = new Size(1920, 1080); - calibrateSquaresCommon(sz, dir); - } - - @Test - public void calibrateSquares320x240_gloworm() { - // Gloworm Beta - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "gloworm", "320_240_1").toFile(); - Size sz = new Size(320, 240); - Size boardDim = new Size(9, 7); - calibrateSquaresCommon(sz, dir, boardDim); - } - - @Test - public void calibrateSquares_960_720_gloworm() { - // Gloworm Beta - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "gloworm", "960_720_1").toFile(); - Size sz = new Size(960, 720); - Size boardDim = new Size(9, 7); - calibrateSquaresCommon(sz, dir, boardDim); - } - - @Test - public void calibrateSquares_1280_720_gloworm() { - // Gloworm Beta - // This image set will return a fairly offset Y-pixel for the optical center point - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "gloworm", "1280_720_1").toFile(); - Size sz = new Size(1280, 720); - Size boardDim = new Size(9, 7); - calibrateSquaresCommon(sz, dir, boardDim, 640, 192); - } - - @Test - public void calibrateSquares_1920_1080_gloworm() { - // Gloworm Beta - // This image set has most samples on the right, and is expected to return a slightly - // wonky calibration. - String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); - File dir = Path.of(base, "gloworm", "1920_1080_1").toFile(); - Size sz = new Size(1920, 1080); - Size boardDim = new Size(9, 7); - calibrateSquaresCommon(sz, dir, boardDim, 1311, 540); - } - - public void calibrateSquaresCommon(Size imgRes, File rootFolder) { - calibrateSquaresCommon(imgRes, rootFolder, new Size(8, 8)); - } - - public void calibrateSquaresCommon(Size imgRes, File rootFolder, Size boardDim) { - calibrateSquaresCommon( - imgRes, rootFolder, boardDim, Units.inchesToMeters(1), imgRes.width / 2, imgRes.height / 2); - } - - public void calibrateSquaresCommon( - Size imgRes, File rootFolder, Size boardDim, double expectedXCenter, double expectedYCenter) { - calibrateSquaresCommon( - imgRes, rootFolder, boardDim, Units.inchesToMeters(1), expectedXCenter, expectedYCenter); - } - - public void calibrateSquaresCommon( - Size imgRes, - File rootFolder, - Size boardDim, - double boardGridSize_m, - double expectedXCenter, - double expectedYCenter) { - int startMatCount = CVMat.getMatCount(); - - File[] directoryListing = rootFolder.listFiles(); - - assertTrue(directoryListing.length >= 25); - - Calibrate3dPipeline calibration3dPipeline = new Calibrate3dPipeline(20); - calibration3dPipeline.getSettings().boardType = UICalibrationData.BoardType.CHESSBOARD; - calibration3dPipeline.getSettings().resolution = imgRes; - calibration3dPipeline.getSettings().boardHeight = (int) Math.round(boardDim.height); - calibration3dPipeline.getSettings().boardWidth = (int) Math.round(boardDim.width); - calibration3dPipeline.getSettings().gridSize = boardGridSize_m; - calibration3dPipeline.getSettings().streamingFrameDivisor = FrameDivisor.NONE; - - for (var file : directoryListing) { - if (file.isFile()) { - calibration3dPipeline.takeSnapshot(); + @Test + public void calibrationPipelineTest() { + int startMatCount = CVMat.getMatCount(); + + File dir = new File(TestUtils.getDotBoardImagesPath().toAbsolutePath().toString()); + File[] directoryListing = dir.listFiles(); + + Calibrate3dPipeline calibration3dPipeline = new Calibrate3dPipeline(20); + calibration3dPipeline.getSettings().boardHeight = 11; + calibration3dPipeline.getSettings().boardWidth = 4; + calibration3dPipeline.getSettings().boardType = UICalibrationData.BoardType.DOTBOARD; + calibration3dPipeline.getSettings().gridSize = 15; + calibration3dPipeline.getSettings().resolution = new Size(640, 480); + + for (var file : directoryListing) { + calibration3dPipeline.takeSnapshot(); + var frame = + new Frame( + new CVMat(Imgcodecs.imread(file.getAbsolutePath())), + new CVMat(), + FrameThresholdType.NONE, + new FrameStaticProperties(640, 480, 60, null)); + var output = calibration3dPipeline.run(frame, QuirkyCamera.DefaultCamera); + // TestUtils.showImage(output.inputAndOutputFrame.processedImage.getMat()); + output.release(); + frame.release(); + } + + assertTrue( + calibration3dPipeline.foundCornersList.stream() + .map(Triple::getRight) + .allMatch(it -> it.width() > 0 && it.height() > 0)); + + calibration3dPipeline.removeSnapshot(0); var frame = - new Frame( - new CVMat(Imgcodecs.imread(file.getAbsolutePath())), - new CVMat(), - FrameThresholdType.NONE, - new FrameStaticProperties((int) imgRes.width, (int) imgRes.height, 67, null)); - var output = calibration3dPipeline.run(frame, QuirkyCamera.DefaultCamera); - - // TestUtils.showImage(output.inputAndOutputFrame.processedImage.getMat(), file.getName(), - // 1); - output.release(); + new Frame( + new CVMat(Imgcodecs.imread(directoryListing[0].getAbsolutePath())), + new CVMat(), + FrameThresholdType.NONE, + new FrameStaticProperties(640, 480, 60, null)); + calibration3dPipeline.run(frame, QuirkyCamera.DefaultCamera).release(); frame.release(); - } + + assertTrue( + calibration3dPipeline.foundCornersList.stream() + .map(Triple::getRight) + .allMatch(it -> it.width() > 0 && it.height() > 0)); + + var cal = calibration3dPipeline.tryCalibration(); + calibration3dPipeline.finishCalibration(); + + assertNotNull(cal); + assertNotNull(cal.perViewErrors); + System.out.println("Per View Errors: " + Arrays.toString(cal.perViewErrors)); + System.out.println("Camera Intrinsics: " + cal.cameraIntrinsics.toString()); + System.out.println("Dist Coeffs: " + cal.distCoeffs.toString()); + System.out.println("Standard Deviation: " + cal.standardDeviation); + System.out.println( + "Mean: " + Arrays.stream(calibration3dPipeline.perViewErrors()).average().toString()); + + // Confirm we didn't get leaky on our mat usage + // assertTrue(CVMat.getMatCount() == startMatCount); // TODO Figure out why this doesn't work in + // CI + System.out.println("CVMats left: " + CVMat.getMatCount() + " Start: " + startMatCount); + } + + @Test + public void calibrateSquares320x240_pi() { + // Pi3 and V1.3 camera + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "piCam", "320_240_1").toFile(); + Size sz = new Size(320, 240); + calibrateSquaresCommon(sz, dir); + } + + @Test + public void calibrateSquares640x480_pi() { + // Pi3 and V1.3 camera + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "piCam", "640_480_1").toFile(); + Size sz = new Size(640, 480); + calibrateSquaresCommon(sz, dir); + } + + @Test + public void calibrateSquares960x720_pi() { + // Pi3 and V1.3 camera + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "piCam", "960_720_1").toFile(); + Size sz = new Size(960, 720); + calibrateSquaresCommon(sz, dir); + } + + @Test + public void calibrateSquares1920x1080_pi() { + // Pi3 and V1.3 camera + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "piCam", "1920_1080_1").toFile(); + Size sz = new Size(1920, 1080); + calibrateSquaresCommon(sz, dir); + } + + @Test + public void calibrateSquares320x240_gloworm() { + // Gloworm Beta + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "gloworm", "320_240_1").toFile(); + Size sz = new Size(320, 240); + Size boardDim = new Size(9, 7); + calibrateSquaresCommon(sz, dir, boardDim); + } + + @Test + public void calibrateSquares_960_720_gloworm() { + // Gloworm Beta + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "gloworm", "960_720_1").toFile(); + Size sz = new Size(960, 720); + Size boardDim = new Size(9, 7); + calibrateSquaresCommon(sz, dir, boardDim); + } + + @Test + public void calibrateSquares_1280_720_gloworm() { + // Gloworm Beta + // This image set will return a fairly offset Y-pixel for the optical center point + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "gloworm", "1280_720_1").toFile(); + Size sz = new Size(1280, 720); + Size boardDim = new Size(9, 7); + calibrateSquaresCommon(sz, dir, boardDim, 640, 192); + } + + @Test + public void calibrateSquares_1920_1080_gloworm() { + // Gloworm Beta + // This image set has most samples on the right, and is expected to return a slightly + // wonky calibration. + String base = TestUtils.getSquaresBoardImagesPath().toAbsolutePath().toString(); + File dir = Path.of(base, "gloworm", "1920_1080_1").toFile(); + Size sz = new Size(1920, 1080); + Size boardDim = new Size(9, 7); + calibrateSquaresCommon(sz, dir, boardDim, 1311, 540); + } + + public void calibrateSquaresCommon(Size imgRes, File rootFolder) { + calibrateSquaresCommon(imgRes, rootFolder, new Size(8, 8)); + } + + public void calibrateSquaresCommon(Size imgRes, File rootFolder, Size boardDim) { + calibrateSquaresCommon( + imgRes, rootFolder, boardDim, Units.inchesToMeters(1), imgRes.width / 2, imgRes.height / 2); + } + + public void calibrateSquaresCommon( + Size imgRes, File rootFolder, Size boardDim, double expectedXCenter, double expectedYCenter) { + calibrateSquaresCommon( + imgRes, rootFolder, boardDim, Units.inchesToMeters(1), expectedXCenter, expectedYCenter); + } + + public void calibrateSquaresCommon( + Size imgRes, + File rootFolder, + Size boardDim, + double boardGridSize_m, + double expectedXCenter, + double expectedYCenter) { + int startMatCount = CVMat.getMatCount(); + + File[] directoryListing = rootFolder.listFiles(); + + assertTrue(directoryListing.length >= 25); + + Calibrate3dPipeline calibration3dPipeline = new Calibrate3dPipeline(20); + calibration3dPipeline.getSettings().boardType = UICalibrationData.BoardType.CHESSBOARD; + calibration3dPipeline.getSettings().resolution = imgRes; + calibration3dPipeline.getSettings().boardHeight = (int) Math.round(boardDim.height); + calibration3dPipeline.getSettings().boardWidth = (int) Math.round(boardDim.width); + calibration3dPipeline.getSettings().gridSize = boardGridSize_m; + calibration3dPipeline.getSettings().streamingFrameDivisor = FrameDivisor.NONE; + + for (var file : directoryListing) { + if (file.isFile()) { + calibration3dPipeline.takeSnapshot(); + var frame = + new Frame( + new CVMat(Imgcodecs.imread(file.getAbsolutePath())), + new CVMat(), + FrameThresholdType.NONE, + new FrameStaticProperties((int) imgRes.width, (int) imgRes.height, 67, null)); + var output = calibration3dPipeline.run(frame, QuirkyCamera.DefaultCamera); + + // TestUtils.showImage(output.inputAndOutputFrame.processedImage.getMat(), file.getName(), + // 1); + output.release(); + frame.release(); + } + } + + assertTrue( + calibration3dPipeline.foundCornersList.stream() + .map(Triple::getRight) + .allMatch(it -> it.width() > 0 && it.height() > 0)); + + var cal = calibration3dPipeline.tryCalibration(); + calibration3dPipeline.finishCalibration(); + + // visuallyDebugDistortion(directoryListing, imgRes, cal ); + + // Confirm we have indeed gotten valid calibration objects + assertNotNull(cal); + assertNotNull(cal.perViewErrors); + + // Confirm the calibrated center pixel is fairly close to of the "expected" location at the + // center of the sensor. + // For all our data samples so far, this should be true. + double centerXErrPct = + Math.abs(cal.cameraIntrinsics.data[2] - expectedXCenter) / (expectedXCenter) * 100.0; + double centerYErrPct = + Math.abs(cal.cameraIntrinsics.data[5] - expectedYCenter) / (expectedYCenter) * 100.0; + assertTrue(centerXErrPct < 10.0); + assertTrue(centerYErrPct < 10.0); + + System.out.println("Per View Errors: " + Arrays.toString(cal.perViewErrors)); + System.out.println("Camera Intrinsics: " + cal.cameraIntrinsics); + System.out.println("Dist Coeffs: " + cal.distCoeffs.toString()); + System.out.println("Standard Deviation: " + cal.standardDeviation); + System.out.println( + "Mean: " + Arrays.stream(calibration3dPipeline.perViewErrors()).average().toString()); + + // Confirm we didn't get leaky on our mat usage + // assertEquals(startMatCount, CVMat.getMatCount()); // TODO Figure out why this doesn't + // work in CI + System.out.println("CVMats left: " + CVMat.getMatCount() + " Start: " + startMatCount); } - assertTrue( - calibration3dPipeline.foundCornersList.stream() - .map(Triple::getRight) - .allMatch(it -> it.width() > 0 && it.height() > 0)); - - var cal = calibration3dPipeline.tryCalibration(); - calibration3dPipeline.finishCalibration(); - - // visuallyDebugDistortion(directoryListing, imgRes, cal ); - - // Confirm we have indeed gotten valid calibration objects - assertNotNull(cal); - assertNotNull(cal.perViewErrors); - - // Confirm the calibrated center pixel is fairly close to of the "expected" location at the - // center of the sensor. - // For all our data samples so far, this should be true. - double centerXErrPct = - Math.abs(cal.cameraIntrinsics.data[2] - expectedXCenter) / (expectedXCenter) * 100.0; - double centerYErrPct = - Math.abs(cal.cameraIntrinsics.data[5] - expectedYCenter) / (expectedYCenter) * 100.0; - assertTrue(centerXErrPct < 10.0); - assertTrue(centerYErrPct < 10.0); - - System.out.println("Per View Errors: " + Arrays.toString(cal.perViewErrors)); - System.out.println("Camera Intrinsics: " + cal.cameraIntrinsics); - System.out.println("Dist Coeffs: " + cal.distCoeffs.toString()); - System.out.println("Standard Deviation: " + cal.standardDeviation); - System.out.println( - "Mean: " + Arrays.stream(calibration3dPipeline.perViewErrors()).average().toString()); - - // Confirm we didn't get leaky on our mat usage - // assertEquals(startMatCount, CVMat.getMatCount()); // TODO Figure out why this doesn't - // work in CI - System.out.println("CVMats left: " + CVMat.getMatCount() + " Start: " + startMatCount); - } - - /** - * Uses a given camera coefficents matrix set to "undistort" every image file found in a given - * directory and display them. Provides an easy way to visually debug the results of the - * calibration routine. Seems to play havoc with CI and takes a chunk of time, so shouldn't - * usually be left active in tests. - * - * @param directoryListing - * @param imgRes - * @param cal - */ - @SuppressWarnings("unused") - private void visuallyDebugDistortion( - File[] directoryListing, Size imgRes, CameraCalibrationCoefficients cal) { - for (var file : directoryListing) { - if (file.isFile()) { - Mat raw = Imgcodecs.imread(file.getAbsolutePath()); - Mat undistorted = new Mat(new Size(imgRes.width * 2, imgRes.height * 2), raw.type()); - Calib3d.undistort( - raw, undistorted, cal.cameraIntrinsics.getAsMat(), cal.distCoeffs.getAsMat()); - TestUtils.showImage(undistorted, "undistorted " + file.getName(), 1); - raw.release(); - undistorted.release(); - } + /** + * Uses a given camera coefficents matrix set to "undistort" every image file found in a given + * directory and display them. Provides an easy way to visually debug the results of the + * calibration routine. Seems to play havoc with CI and takes a chunk of time, so shouldn't + * usually be left active in tests. + * + * @param directoryListing + * @param imgRes + * @param cal + */ + @SuppressWarnings("unused") + private void visuallyDebugDistortion( + File[] directoryListing, Size imgRes, CameraCalibrationCoefficients cal) { + for (var file : directoryListing) { + if (file.isFile()) { + Mat raw = Imgcodecs.imread(file.getAbsolutePath()); + Mat undistorted = new Mat(new Size(imgRes.width * 2, imgRes.height * 2), raw.type()); + Calib3d.undistort( + raw, undistorted, cal.cameraIntrinsics.getAsMat(), cal.distCoeffs.getAsMat()); + TestUtils.showImage(undistorted, "undistorted " + file.getName(), 1); + raw.release(); + undistorted.release(); + } + } } - } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/CirclePNPTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/CirclePNPTest.java index 357fdc3306..5a4bc4fcd5 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/CirclePNPTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/CirclePNPTest.java @@ -37,134 +37,134 @@ import org.photonvision.vision.target.TrackedTarget; public class CirclePNPTest { - private static final String LIFECAM_240P_CAL_FILE = "lifecam240p.json"; - private static final String LIFECAM_480P_CAL_FILE = "lifecam480p.json"; - - @BeforeEach - public void Init() { - TestUtils.loadLibraries(); - } - - @Test - public void loadCameraIntrinsics() { - var lifecam240pCal = getCoeffs(LIFECAM_240P_CAL_FILE); - var lifecam480pCal = getCoeffs(LIFECAM_480P_CAL_FILE); - - assertNotNull(lifecam240pCal); - checkCameraCoefficients(lifecam240pCal); - assertNotNull(lifecam480pCal); - checkCameraCoefficients(lifecam480pCal); - } - - private CameraCalibrationCoefficients getCoeffs(String filename) { - var cameraCalibration = TestUtils.getCoeffs(filename, false); - checkCameraCoefficients(cameraCalibration); - return cameraCalibration; - } - - private void checkCameraCoefficients(CameraCalibrationCoefficients cameraCalibration) { - assertNotNull(cameraCalibration); - assertEquals(3, cameraCalibration.cameraIntrinsics.rows); - assertEquals(3, cameraCalibration.cameraIntrinsics.cols); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().rows()); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().cols()); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().rows()); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().cols()); - assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().rows()); - assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().cols()); - assertEquals(1, cameraCalibration.distCoeffs.rows); - assertEquals(5, cameraCalibration.distCoeffs.cols); - assertEquals(1, cameraCalibration.distCoeffs.getAsMat().rows()); - assertEquals(5, cameraCalibration.distCoeffs.getAsMat().cols()); - assertEquals(1, cameraCalibration.distCoeffs.getAsMatOfDouble().rows()); - assertEquals(5, cameraCalibration.distCoeffs.getAsMatOfDouble().cols()); - assertEquals(1, cameraCalibration.getDistCoeffsMat().rows()); - assertEquals(5, cameraCalibration.getDistCoeffsMat().cols()); - } - - @Test - public void testCircle() { - var pipeline = new ColoredShapePipeline(); - - pipeline.getSettings().hsvHue.set(0, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(100, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().maxCannyThresh = 50; - pipeline.getSettings().circleAccuracy = 15; - pipeline.getSettings().circleDetectThreshold = 5; - pipeline.getSettings().solvePNPEnabled = true; - pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; - pipeline.getSettings().cornerDetectionUseConvexHulls = true; - pipeline.getSettings().cameraCalibration = getCoeffs(LIFECAM_480P_CAL_FILE); - pipeline.getSettings().targetModel = TargetModel.kCircularPowerCell7in; - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = false; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - pipeline.getSettings().contourShape = ContourShape.Circle; - pipeline.getSettings().circleDetectThreshold = 10; - pipeline.getSettings().contourRadius.setFirst(30); - pipeline.getSettings().accuracyPercentage = 30.0; - - var frameProvider = - new FileFrameProvider( - TestUtils.getPowercellImagePath(TestUtils.PowercellTestImages.kPowercell_test_6, false), - TestUtils.WPI2020Image.FOV, - TestUtils.get2020LifeCamCoeffs(true)); - frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); - - CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - - TestUtils.showImage( - pipelineResult.inputAndOutputFrame.colorImage.getMat(), "Pipeline output", 999999); - } - - private static void continuouslyRunPipeline(Frame frame, ReflectivePipelineSettings settings) { - var pipeline = new ReflectivePipeline(); - pipeline.settings = settings; - - while (true) { - CVPipelineResult pipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - int preRelease = CVMat.getMatCount(); - pipelineResult.release(); - int postRelease = CVMat.getMatCount(); - - System.out.printf("Pre: %d, Post: %d\n", preRelease, postRelease); + private static final String LIFECAM_240P_CAL_FILE = "lifecam240p.json"; + private static final String LIFECAM_480P_CAL_FILE = "lifecam480p.json"; + + @BeforeEach + public void Init() { + TestUtils.loadLibraries(); + } + + @Test + public void loadCameraIntrinsics() { + var lifecam240pCal = getCoeffs(LIFECAM_240P_CAL_FILE); + var lifecam480pCal = getCoeffs(LIFECAM_480P_CAL_FILE); + + assertNotNull(lifecam240pCal); + checkCameraCoefficients(lifecam240pCal); + assertNotNull(lifecam480pCal); + checkCameraCoefficients(lifecam480pCal); + } + + private CameraCalibrationCoefficients getCoeffs(String filename) { + var cameraCalibration = TestUtils.getCoeffs(filename, false); + checkCameraCoefficients(cameraCalibration); + return cameraCalibration; + } + + private void checkCameraCoefficients(CameraCalibrationCoefficients cameraCalibration) { + assertNotNull(cameraCalibration); + assertEquals(3, cameraCalibration.cameraIntrinsics.rows); + assertEquals(3, cameraCalibration.cameraIntrinsics.cols); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().rows()); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().cols()); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().rows()); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().cols()); + assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().rows()); + assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().cols()); + assertEquals(1, cameraCalibration.distCoeffs.rows); + assertEquals(5, cameraCalibration.distCoeffs.cols); + assertEquals(1, cameraCalibration.distCoeffs.getAsMat().rows()); + assertEquals(5, cameraCalibration.distCoeffs.getAsMat().cols()); + assertEquals(1, cameraCalibration.distCoeffs.getAsMatOfDouble().rows()); + assertEquals(5, cameraCalibration.distCoeffs.getAsMatOfDouble().cols()); + assertEquals(1, cameraCalibration.getDistCoeffsMat().rows()); + assertEquals(5, cameraCalibration.getDistCoeffsMat().cols()); + } + + @Test + public void testCircle() { + var pipeline = new ColoredShapePipeline(); + + pipeline.getSettings().hsvHue.set(0, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(100, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().maxCannyThresh = 50; + pipeline.getSettings().circleAccuracy = 15; + pipeline.getSettings().circleDetectThreshold = 5; + pipeline.getSettings().solvePNPEnabled = true; + pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; + pipeline.getSettings().cornerDetectionUseConvexHulls = true; + pipeline.getSettings().cameraCalibration = getCoeffs(LIFECAM_480P_CAL_FILE); + pipeline.getSettings().targetModel = TargetModel.kCircularPowerCell7in; + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = false; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Single; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + pipeline.getSettings().contourShape = ContourShape.Circle; + pipeline.getSettings().circleDetectThreshold = 10; + pipeline.getSettings().contourRadius.setFirst(30); + pipeline.getSettings().accuracyPercentage = 30.0; + + var frameProvider = + new FileFrameProvider( + TestUtils.getPowercellImagePath(TestUtils.PowercellTestImages.kPowercell_test_6, false), + TestUtils.WPI2020Image.FOV, + TestUtils.get2020LifeCamCoeffs(true)); + frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); + + CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + + TestUtils.showImage( + pipelineResult.inputAndOutputFrame.colorImage.getMat(), "Pipeline output", 999999); + } + + private static void continuouslyRunPipeline(Frame frame, ReflectivePipelineSettings settings) { + var pipeline = new ReflectivePipeline(); + pipeline.settings = settings; + + while (true) { + CVPipelineResult pipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + int preRelease = CVMat.getMatCount(); + pipelineResult.release(); + int postRelease = CVMat.getMatCount(); + + System.out.printf("Pre: %d, Post: %d\n", preRelease, postRelease); + } + } + + // used to run VisualVM for profiling, which won't run on unit tests. + public static void main(String[] args) { + TestUtils.loadLibraries(); + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + + var settings = new ReflectivePipelineSettings(); + settings.hsvHue.set(60, 100); + settings.hsvSaturation.set(100, 255); + settings.hsvValue.set(190, 255); + settings.outputShouldDraw = true; + settings.outputShowMultipleTargets = true; + settings.contourGroupingMode = ContourGroupingMode.Dual; + settings.contourIntersection = ContourIntersectionDirection.Up; + + continuouslyRunPipeline(frameProvider.get(), settings); + } + + private static void printTestResults(CVPipelineResult pipelineResult) { + double fps = 1000 / pipelineResult.getLatencyMillis(); + System.out.println( + "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); + System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + System.out.println( + "Found targets at " + + pipelineResult.targets.stream() + .map(TrackedTarget::getBestCameraToTarget3d) + .collect(Collectors.toList())); } - } - - // used to run VisualVM for profiling, which won't run on unit tests. - public static void main(String[] args) { - TestUtils.loadLibraries(); - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - - var settings = new ReflectivePipelineSettings(); - settings.hsvHue.set(60, 100); - settings.hsvSaturation.set(100, 255); - settings.hsvValue.set(190, 255); - settings.outputShouldDraw = true; - settings.outputShowMultipleTargets = true; - settings.contourGroupingMode = ContourGroupingMode.Dual; - settings.contourIntersection = ContourIntersectionDirection.Up; - - continuouslyRunPipeline(frameProvider.get(), settings); - } - - private static void printTestResults(CVPipelineResult pipelineResult) { - double fps = 1000 / pipelineResult.getLatencyMillis(); - System.out.println( - "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); - System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); - System.out.println( - "Found targets at " - + pipelineResult.targets.stream() - .map(TrackedTarget::getBestCameraToTarget3d) - .collect(Collectors.toList())); - } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/ColoredShapePipelineTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/ColoredShapePipelineTest.java index 22da41004f..b896185624 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/ColoredShapePipelineTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/ColoredShapePipelineTest.java @@ -28,98 +28,98 @@ import org.photonvision.vision.pipeline.result.CVPipelineResult; public class ColoredShapePipelineTest { - public static void testTriangleDetection( - ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { - pipeline.settings = settings; - CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); - TestUtils.showImage( - colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), - "Pipeline output: Triangle."); - printTestResults(colouredShapePipelineResult); - } + public static void testTriangleDetection( + ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { + pipeline.settings = settings; + CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); + TestUtils.showImage( + colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), + "Pipeline output: Triangle."); + printTestResults(colouredShapePipelineResult); + } - public static void testQuadrilateralDetection( - ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { - settings.contourShape = ContourShape.Quadrilateral; - pipeline.settings = settings; - CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); - TestUtils.showImage( - colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), - "Pipeline output: Quadrilateral."); - printTestResults(colouredShapePipelineResult); - } + public static void testQuadrilateralDetection( + ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { + settings.contourShape = ContourShape.Quadrilateral; + pipeline.settings = settings; + CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); + TestUtils.showImage( + colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), + "Pipeline output: Quadrilateral."); + printTestResults(colouredShapePipelineResult); + } - public static void testCustomShapeDetection( - ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { - settings.contourShape = ContourShape.Custom; - pipeline.settings = settings; - CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); - TestUtils.showImage( - colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), - "Pipeline output: Custom."); - printTestResults(colouredShapePipelineResult); - } + public static void testCustomShapeDetection( + ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { + settings.contourShape = ContourShape.Custom; + pipeline.settings = settings; + CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); + TestUtils.showImage( + colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), + "Pipeline output: Custom."); + printTestResults(colouredShapePipelineResult); + } - @Test - public static void testCircleShapeDetection( - ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { - settings.contourShape = ContourShape.Circle; - pipeline.settings = settings; - CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); - TestUtils.showImage( - colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), - "Pipeline output: Circle."); - printTestResults(colouredShapePipelineResult); - } + @Test + public static void testCircleShapeDetection( + ColoredShapePipeline pipeline, ColoredShapePipelineSettings settings, Frame frame) { + settings.contourShape = ContourShape.Circle; + pipeline.settings = settings; + CVPipelineResult colouredShapePipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); + TestUtils.showImage( + colouredShapePipelineResult.inputAndOutputFrame.processedImage.getMat(), + "Pipeline output: Circle."); + printTestResults(colouredShapePipelineResult); + } - @Test - public static void testPowercellDetection( - ColoredShapePipelineSettings settings, ColoredShapePipeline pipeline) { - settings.hsvHue.set(10, 40); - settings.hsvSaturation.set(100, 255); - settings.hsvValue.set(100, 255); - settings.maxCannyThresh = 50; - settings.circleAccuracy = 15; - settings.circleDetectThreshold = 5; - var frameProvider = - new FileFrameProvider( - TestUtils.getPowercellImagePath(TestUtils.PowercellTestImages.kPowercell_test_6, false), - TestUtils.WPI2019Image.FOV); - testCircleShapeDetection(pipeline, settings, frameProvider.get()); - } + @Test + public static void testPowercellDetection( + ColoredShapePipelineSettings settings, ColoredShapePipeline pipeline) { + settings.hsvHue.set(10, 40); + settings.hsvSaturation.set(100, 255); + settings.hsvValue.set(100, 255); + settings.maxCannyThresh = 50; + settings.circleAccuracy = 15; + settings.circleDetectThreshold = 5; + var frameProvider = + new FileFrameProvider( + TestUtils.getPowercellImagePath(TestUtils.PowercellTestImages.kPowercell_test_6, false), + TestUtils.WPI2019Image.FOV); + testCircleShapeDetection(pipeline, settings, frameProvider.get()); + } - public static void main(String[] args) { - TestUtils.loadLibraries(); - System.out.println( - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_108in_Center, false)); - var frameProvider = - new FileFrameProvider( - TestUtils.getPolygonImagePath(TestUtils.PolygonTestImages.kPolygons, false), - TestUtils.WPI2019Image.FOV); - var settings = new ColoredShapePipelineSettings(); - settings.hsvHue.set(0, 100); - settings.hsvSaturation.set(100, 255); - settings.hsvValue.set(100, 255); - settings.outputShouldDraw = true; - settings.outputShowMultipleTargets = true; - settings.contourGroupingMode = ContourGroupingMode.Single; - settings.contourIntersection = ContourIntersectionDirection.Up; - settings.contourShape = ContourShape.Triangle; - settings.circleDetectThreshold = 10; - settings.accuracyPercentage = 30.0; + public static void main(String[] args) { + TestUtils.loadLibraries(); + System.out.println( + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_108in_Center, false)); + var frameProvider = + new FileFrameProvider( + TestUtils.getPolygonImagePath(TestUtils.PolygonTestImages.kPolygons, false), + TestUtils.WPI2019Image.FOV); + var settings = new ColoredShapePipelineSettings(); + settings.hsvHue.set(0, 100); + settings.hsvSaturation.set(100, 255); + settings.hsvValue.set(100, 255); + settings.outputShouldDraw = true; + settings.outputShowMultipleTargets = true; + settings.contourGroupingMode = ContourGroupingMode.Single; + settings.contourIntersection = ContourIntersectionDirection.Up; + settings.contourShape = ContourShape.Triangle; + settings.circleDetectThreshold = 10; + settings.accuracyPercentage = 30.0; - ColoredShapePipeline pipeline = new ColoredShapePipeline(); - testTriangleDetection(pipeline, settings, frameProvider.get()); - testQuadrilateralDetection(pipeline, settings, frameProvider.get()); - testCustomShapeDetection(pipeline, settings, frameProvider.get()); - // testCircleShapeDetection(pipeline, settings, frameProvider.get()); - // testPowercellDetection(settings, pipeline); - } + ColoredShapePipeline pipeline = new ColoredShapePipeline(); + testTriangleDetection(pipeline, settings, frameProvider.get()); + testQuadrilateralDetection(pipeline, settings, frameProvider.get()); + testCustomShapeDetection(pipeline, settings, frameProvider.get()); + // testCircleShapeDetection(pipeline, settings, frameProvider.get()); + // testPowercellDetection(settings, pipeline); + } - private static void printTestResults(CVPipelineResult pipelineResult) { - double fps = 1000 / pipelineResult.getLatencyMillis(); - System.out.print( - "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " fps), "); - System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); - } + private static void printTestResults(CVPipelineResult pipelineResult) { + double fps = 1000 / pipelineResult.getLatencyMillis(); + System.out.print( + "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " fps), "); + System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/PipelineProfilerTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/PipelineProfilerTest.java index 79d75b5f1f..dbabd15e63 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/PipelineProfilerTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/PipelineProfilerTest.java @@ -21,21 +21,21 @@ import org.junit.jupiter.api.Test; public class PipelineProfilerTest { - @Test - public void reflectiveProfile() { - long[] invalidNanos = new long[20]; - long[] validNanos = new long[PipelineProfiler.ReflectivePipeCount]; + @Test + public void reflectiveProfile() { + long[] invalidNanos = new long[20]; + long[] validNanos = new long[PipelineProfiler.ReflectivePipeCount]; - for (int i = 0; i < validNanos.length; i++) { - validNanos[i] = (long) (i * 1e+6); // fill data - } + for (int i = 0; i < validNanos.length; i++) { + validNanos[i] = (long) (i * 1e+6); // fill data + } - var invalidResult = PipelineProfiler.getReflectiveProfileString(invalidNanos); - var validResult = PipelineProfiler.getReflectiveProfileString(validNanos); + var invalidResult = PipelineProfiler.getReflectiveProfileString(invalidNanos); + var validResult = PipelineProfiler.getReflectiveProfileString(validNanos); - System.out.println(validResult); + System.out.println(validResult); - Assertions.assertEquals("Invalid data", invalidResult); - Assertions.assertTrue(validResult.contains("Total: 45.0ms")); - } + Assertions.assertEquals("Invalid data", invalidResult); + Assertions.assertTrue(validResult.contains("Total: 45.0ms")); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/ReflectivePipelineTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/ReflectivePipelineTest.java index 1c6f109b82..0817cf37dc 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/ReflectivePipelineTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/ReflectivePipelineTest.java @@ -30,104 +30,104 @@ import org.photonvision.vision.pipeline.result.CVPipelineResult; public class ReflectivePipelineTest { - @Test - public void test2019() { - TestUtils.loadLibraries(); - var pipeline = new ReflectivePipeline(); - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); - var hsvParams = - new HSVPipe.HSVParams( - pipeline.getSettings().hsvHue, - pipeline.getSettings().hsvSaturation, - pipeline.getSettings().hsvValue, - pipeline.getSettings().hueInverted); - frameProvider.requestHsvSettings(hsvParams); - - TestUtils.showImage(frameProvider.get().colorImage.getMat(), "Pipeline input", 1); - - CVPipelineResult pipelineResult; - - pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - - Assertions.assertTrue(pipelineResult.hasTargets()); - Assertions.assertEquals(2, pipelineResult.targets.size(), "Target count wrong!"); - - TestUtils.showImage(pipelineResult.inputAndOutputFrame.colorImage.getMat(), "Pipeline output"); - } - - @Test - public void test2020() { - TestUtils.loadLibraries(); - var pipeline = new ReflectivePipeline(); - - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(200, 255); - pipeline.getSettings().hsvValue.set(200, 255); - pipeline.getSettings().outputShouldDraw = true; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_108in_Center, false), - TestUtils.WPI2020Image.FOV); - - CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - - TestUtils.showImage( - pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output"); - } - - private static void continuouslyRunPipeline(Frame frame, ReflectivePipelineSettings settings) { - var pipeline = new ReflectivePipeline(); - - while (true) { - CVPipelineResult pipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - int preRelease = CVMat.getMatCount(); - pipelineResult.release(); - int postRelease = CVMat.getMatCount(); - - System.out.printf("Pre: %d, Post: %d\n", preRelease, postRelease); + @Test + public void test2019() { + TestUtils.loadLibraries(); + var pipeline = new ReflectivePipeline(); + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); + var hsvParams = + new HSVPipe.HSVParams( + pipeline.getSettings().hsvHue, + pipeline.getSettings().hsvSaturation, + pipeline.getSettings().hsvValue, + pipeline.getSettings().hueInverted); + frameProvider.requestHsvSettings(hsvParams); + + TestUtils.showImage(frameProvider.get().colorImage.getMat(), "Pipeline input", 1); + + CVPipelineResult pipelineResult; + + pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + + Assertions.assertTrue(pipelineResult.hasTargets()); + Assertions.assertEquals(2, pipelineResult.targets.size(), "Target count wrong!"); + + TestUtils.showImage(pipelineResult.inputAndOutputFrame.colorImage.getMat(), "Pipeline output"); + } + + @Test + public void test2020() { + TestUtils.loadLibraries(); + var pipeline = new ReflectivePipeline(); + + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(200, 255); + pipeline.getSettings().hsvValue.set(200, 255); + pipeline.getSettings().outputShouldDraw = true; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_108in_Center, false), + TestUtils.WPI2020Image.FOV); + + CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + + TestUtils.showImage( + pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output"); + } + + private static void continuouslyRunPipeline(Frame frame, ReflectivePipelineSettings settings) { + var pipeline = new ReflectivePipeline(); + + while (true) { + CVPipelineResult pipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + int preRelease = CVMat.getMatCount(); + pipelineResult.release(); + int postRelease = CVMat.getMatCount(); + + System.out.printf("Pre: %d, Post: %d\n", preRelease, postRelease); + } + } + + // used to run VisualVM for profiling. It won't run on unit tests. + public static void main(String[] args) { + TestUtils.loadLibraries(); + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + + var settings = new ReflectivePipelineSettings(); + settings.hsvHue.set(60, 100); + settings.hsvSaturation.set(100, 255); + settings.hsvValue.set(190, 255); + settings.outputShouldDraw = true; + settings.outputShowMultipleTargets = true; + settings.contourGroupingMode = ContourGroupingMode.Dual; + settings.contourIntersection = ContourIntersectionDirection.Up; + + continuouslyRunPipeline(frameProvider.get(), settings); + } + + private static void printTestResults(CVPipelineResult pipelineResult) { + double fps = 1000 / pipelineResult.getLatencyMillis(); + System.out.print( + "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " fps), "); + System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); } - } - - // used to run VisualVM for profiling. It won't run on unit tests. - public static void main(String[] args) { - TestUtils.loadLibraries(); - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - - var settings = new ReflectivePipelineSettings(); - settings.hsvHue.set(60, 100); - settings.hsvSaturation.set(100, 255); - settings.hsvValue.set(190, 255); - settings.outputShouldDraw = true; - settings.outputShowMultipleTargets = true; - settings.contourGroupingMode = ContourGroupingMode.Dual; - settings.contourIntersection = ContourIntersectionDirection.Up; - - continuouslyRunPipeline(frameProvider.get(), settings); - } - - private static void printTestResults(CVPipelineResult pipelineResult) { - double fps = 1000 / pipelineResult.getLatencyMillis(); - System.out.print( - "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " fps), "); - System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); - } } diff --git a/photon-core/src/test/java/org/photonvision/vision/pipeline/SolvePNPTest.java b/photon-core/src/test/java/org/photonvision/vision/pipeline/SolvePNPTest.java index 5bc2541187..03bcc2535d 100644 --- a/photon-core/src/test/java/org/photonvision/vision/pipeline/SolvePNPTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/pipeline/SolvePNPTest.java @@ -41,211 +41,211 @@ import org.photonvision.vision.target.TrackedTarget; public class SolvePNPTest { - private static final String LIFECAM_240P_CAL_FILE = "lifecam240p.json"; - private static final String LIFECAM_480P_CAL_FILE = "lifecam480p.json"; - - @BeforeEach - public void Init() { - TestUtils.loadLibraries(); - } - - @Test - public void loadCameraIntrinsics() { - var lifecam240pCal = getCoeffs(LIFECAM_240P_CAL_FILE); - var lifecam480pCal = getCoeffs(LIFECAM_480P_CAL_FILE); - - assertNotNull(lifecam240pCal); - checkCameraCoefficients(lifecam240pCal); - assertNotNull(lifecam480pCal); - checkCameraCoefficients(lifecam480pCal); - } - - private CameraCalibrationCoefficients getCoeffs(String filename) { - var cameraCalibration = TestUtils.getCoeffs(filename, false); - checkCameraCoefficients(cameraCalibration); - return cameraCalibration; - } - - private void checkCameraCoefficients(CameraCalibrationCoefficients cameraCalibration) { - assertNotNull(cameraCalibration); - assertEquals(3, cameraCalibration.cameraIntrinsics.rows); - assertEquals(3, cameraCalibration.cameraIntrinsics.cols); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().rows()); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().cols()); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().rows()); - assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().cols()); - assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().rows()); - assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().cols()); - assertEquals(1, cameraCalibration.distCoeffs.rows); - assertEquals(5, cameraCalibration.distCoeffs.cols); - assertEquals(1, cameraCalibration.distCoeffs.getAsMat().rows()); - assertEquals(5, cameraCalibration.distCoeffs.getAsMat().cols()); - assertEquals(1, cameraCalibration.distCoeffs.getAsMatOfDouble().rows()); - assertEquals(5, cameraCalibration.distCoeffs.getAsMatOfDouble().cols()); - assertEquals(1, cameraCalibration.getDistCoeffsMat().rows()); - assertEquals(5, cameraCalibration.getDistCoeffsMat().cols()); - } - - @Test - public void test2019() { - var pipeline = new ReflectivePipeline(); - - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(190, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().outputShowMultipleTargets = true; - pipeline.getSettings().solvePNPEnabled = true; - pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; - pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; - pipeline.getSettings().cornerDetectionUseConvexHulls = true; - pipeline.getSettings().targetModel = TargetModel.k2019DualTarget; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark48in, false), - TestUtils.WPI2019Image.FOV, - TestUtils.get2019LifeCamCoeffs(false)); - - frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); - var hsvParams = - new HSVPipe.HSVParams( - pipeline.getSettings().hsvHue, - pipeline.getSettings().hsvSaturation, - pipeline.getSettings().hsvValue, - pipeline.getSettings().hueInverted); - frameProvider.requestHsvSettings(hsvParams); - - CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - - // Draw on input - var outputPipe = new OutputStreamPipeline(); - outputPipe.process( - pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); - - TestUtils.showImage( - pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); - - var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); - // these numbers are not *accurate*, but they are known and expected - var expectedTrl = new Translation3d(1.1, -0.05, -0.05); - assertTrue( - expectedTrl.getDistance(pose.getTranslation()) < 0.05, - "SolvePNP translation estimation failed"); - // We expect the object axes to be in NWU, with the x-axis coming out of the tag - // This target is facing the camera almost parallel, so in world space: - // The object's X axis should be (-1, 0, 0) - assertEquals(-1, new Translation3d(1, 0, 0).rotateBy(pose.getRotation()).getX(), 0.05); - // The object's Y axis should be (0, -1, 0) - assertEquals(-1, new Translation3d(0, 1, 0).rotateBy(pose.getRotation()).getY(), 0.05); - // The object's Z axis should be (0, 0, 1) - assertEquals(1, new Translation3d(0, 0, 1).rotateBy(pose.getRotation()).getZ(), 0.05); - } - - @Test - public void test2020() { - var pipeline = new ReflectivePipeline(); - - pipeline.getSettings().hsvHue.set(60, 100); - pipeline.getSettings().hsvSaturation.set(100, 255); - pipeline.getSettings().hsvValue.set(60, 255); - pipeline.getSettings().outputShouldDraw = true; - pipeline.getSettings().solvePNPEnabled = true; - pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; - pipeline.getSettings().cornerDetectionUseConvexHulls = true; - pipeline.getSettings().targetModel = TargetModel.k2020HighGoalOuter; - - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_224in_Left, false), - TestUtils.WPI2020Image.FOV, - TestUtils.get2020LifeCamCoeffs(false)); - - frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); - var hsvParams = - new HSVPipe.HSVParams( - pipeline.getSettings().hsvHue, - pipeline.getSettings().hsvSaturation, - pipeline.getSettings().hsvValue, - pipeline.getSettings().hueInverted); - frameProvider.requestHsvSettings(hsvParams); - - CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - - // Draw on input - var outputPipe = new OutputStreamPipeline(); - outputPipe.process( - pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); - - TestUtils.showImage( - pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); - - var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); - // these numbers are not *accurate*, but they are known and expected - var expectedTrl = - new Translation3d( - Units.inchesToMeters(236), Units.inchesToMeters(36), Units.inchesToMeters(-53)); - assertTrue( - expectedTrl.getDistance(pose.getTranslation()) < 0.05, - "SolvePNP translation estimation failed"); - // We expect the object axes to be in NWU, with the x-axis coming out of the tag - // Rotation around Z axis (yaw) should be mostly facing us - var xAxis = new Translation3d(1, 0, 0); - var yAxis = new Translation3d(0, 1, 0); - var zAxis = new Translation3d(0, 0, 1); - var expectedRot = - new Rotation3d(Math.toRadians(-20), Math.toRadians(-20), Math.toRadians(-120)); - assertTrue(xAxis.rotateBy(expectedRot).getDistance(xAxis.rotateBy(pose.getRotation())) < 0.1); - assertTrue(yAxis.rotateBy(expectedRot).getDistance(yAxis.rotateBy(pose.getRotation())) < 0.1); - assertTrue(zAxis.rotateBy(expectedRot).getDistance(zAxis.rotateBy(pose.getRotation())) < 0.1); - } - - private static void continuouslyRunPipeline(Frame frame, ReflectivePipelineSettings settings) { - var pipeline = new ReflectivePipeline(); - pipeline.settings = settings; - - while (true) { - CVPipelineResult pipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); - printTestResults(pipelineResult); - int preRelease = CVMat.getMatCount(); - pipelineResult.release(); - int postRelease = CVMat.getMatCount(); - - System.out.printf("Pre: %d, Post: %d\n", preRelease, postRelease); + private static final String LIFECAM_240P_CAL_FILE = "lifecam240p.json"; + private static final String LIFECAM_480P_CAL_FILE = "lifecam480p.json"; + + @BeforeEach + public void Init() { + TestUtils.loadLibraries(); + } + + @Test + public void loadCameraIntrinsics() { + var lifecam240pCal = getCoeffs(LIFECAM_240P_CAL_FILE); + var lifecam480pCal = getCoeffs(LIFECAM_480P_CAL_FILE); + + assertNotNull(lifecam240pCal); + checkCameraCoefficients(lifecam240pCal); + assertNotNull(lifecam480pCal); + checkCameraCoefficients(lifecam480pCal); + } + + private CameraCalibrationCoefficients getCoeffs(String filename) { + var cameraCalibration = TestUtils.getCoeffs(filename, false); + checkCameraCoefficients(cameraCalibration); + return cameraCalibration; + } + + private void checkCameraCoefficients(CameraCalibrationCoefficients cameraCalibration) { + assertNotNull(cameraCalibration); + assertEquals(3, cameraCalibration.cameraIntrinsics.rows); + assertEquals(3, cameraCalibration.cameraIntrinsics.cols); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().rows()); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMat().cols()); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().rows()); + assertEquals(3, cameraCalibration.cameraIntrinsics.getAsMatOfDouble().cols()); + assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().rows()); + assertEquals(3, cameraCalibration.getCameraIntrinsicsMat().cols()); + assertEquals(1, cameraCalibration.distCoeffs.rows); + assertEquals(5, cameraCalibration.distCoeffs.cols); + assertEquals(1, cameraCalibration.distCoeffs.getAsMat().rows()); + assertEquals(5, cameraCalibration.distCoeffs.getAsMat().cols()); + assertEquals(1, cameraCalibration.distCoeffs.getAsMatOfDouble().rows()); + assertEquals(5, cameraCalibration.distCoeffs.getAsMatOfDouble().cols()); + assertEquals(1, cameraCalibration.getDistCoeffsMat().rows()); + assertEquals(5, cameraCalibration.getDistCoeffsMat().cols()); + } + + @Test + public void test2019() { + var pipeline = new ReflectivePipeline(); + + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(190, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().outputShowMultipleTargets = true; + pipeline.getSettings().solvePNPEnabled = true; + pipeline.getSettings().contourGroupingMode = ContourGroupingMode.Dual; + pipeline.getSettings().contourIntersection = ContourIntersectionDirection.Up; + pipeline.getSettings().cornerDetectionUseConvexHulls = true; + pipeline.getSettings().targetModel = TargetModel.k2019DualTarget; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark48in, false), + TestUtils.WPI2019Image.FOV, + TestUtils.get2019LifeCamCoeffs(false)); + + frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); + var hsvParams = + new HSVPipe.HSVParams( + pipeline.getSettings().hsvHue, + pipeline.getSettings().hsvSaturation, + pipeline.getSettings().hsvValue, + pipeline.getSettings().hueInverted); + frameProvider.requestHsvSettings(hsvParams); + + CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + + // Draw on input + var outputPipe = new OutputStreamPipeline(); + outputPipe.process( + pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); + + TestUtils.showImage( + pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); + + var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); + // these numbers are not *accurate*, but they are known and expected + var expectedTrl = new Translation3d(1.1, -0.05, -0.05); + assertTrue( + expectedTrl.getDistance(pose.getTranslation()) < 0.05, + "SolvePNP translation estimation failed"); + // We expect the object axes to be in NWU, with the x-axis coming out of the tag + // This target is facing the camera almost parallel, so in world space: + // The object's X axis should be (-1, 0, 0) + assertEquals(-1, new Translation3d(1, 0, 0).rotateBy(pose.getRotation()).getX(), 0.05); + // The object's Y axis should be (0, -1, 0) + assertEquals(-1, new Translation3d(0, 1, 0).rotateBy(pose.getRotation()).getY(), 0.05); + // The object's Z axis should be (0, 0, 1) + assertEquals(1, new Translation3d(0, 0, 1).rotateBy(pose.getRotation()).getZ(), 0.05); + } + + @Test + public void test2020() { + var pipeline = new ReflectivePipeline(); + + pipeline.getSettings().hsvHue.set(60, 100); + pipeline.getSettings().hsvSaturation.set(100, 255); + pipeline.getSettings().hsvValue.set(60, 255); + pipeline.getSettings().outputShouldDraw = true; + pipeline.getSettings().solvePNPEnabled = true; + pipeline.getSettings().cornerDetectionAccuracyPercentage = 4; + pipeline.getSettings().cornerDetectionUseConvexHulls = true; + pipeline.getSettings().targetModel = TargetModel.k2020HighGoalOuter; + + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2020Image.kBlueGoal_224in_Left, false), + TestUtils.WPI2020Image.FOV, + TestUtils.get2020LifeCamCoeffs(false)); + + frameProvider.requestFrameThresholdType(pipeline.getThresholdType()); + var hsvParams = + new HSVPipe.HSVParams( + pipeline.getSettings().hsvHue, + pipeline.getSettings().hsvSaturation, + pipeline.getSettings().hsvValue, + pipeline.getSettings().hueInverted); + frameProvider.requestHsvSettings(hsvParams); + + CVPipelineResult pipelineResult = pipeline.run(frameProvider.get(), QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + + // Draw on input + var outputPipe = new OutputStreamPipeline(); + outputPipe.process( + pipelineResult.inputAndOutputFrame, pipeline.getSettings(), pipelineResult.targets); + + TestUtils.showImage( + pipelineResult.inputAndOutputFrame.processedImage.getMat(), "Pipeline output", 999999); + + var pose = pipelineResult.targets.get(0).getBestCameraToTarget3d(); + // these numbers are not *accurate*, but they are known and expected + var expectedTrl = + new Translation3d( + Units.inchesToMeters(236), Units.inchesToMeters(36), Units.inchesToMeters(-53)); + assertTrue( + expectedTrl.getDistance(pose.getTranslation()) < 0.05, + "SolvePNP translation estimation failed"); + // We expect the object axes to be in NWU, with the x-axis coming out of the tag + // Rotation around Z axis (yaw) should be mostly facing us + var xAxis = new Translation3d(1, 0, 0); + var yAxis = new Translation3d(0, 1, 0); + var zAxis = new Translation3d(0, 0, 1); + var expectedRot = + new Rotation3d(Math.toRadians(-20), Math.toRadians(-20), Math.toRadians(-120)); + assertTrue(xAxis.rotateBy(expectedRot).getDistance(xAxis.rotateBy(pose.getRotation())) < 0.1); + assertTrue(yAxis.rotateBy(expectedRot).getDistance(yAxis.rotateBy(pose.getRotation())) < 0.1); + assertTrue(zAxis.rotateBy(expectedRot).getDistance(zAxis.rotateBy(pose.getRotation())) < 0.1); + } + + private static void continuouslyRunPipeline(Frame frame, ReflectivePipelineSettings settings) { + var pipeline = new ReflectivePipeline(); + pipeline.settings = settings; + + while (true) { + CVPipelineResult pipelineResult = pipeline.run(frame, QuirkyCamera.DefaultCamera); + printTestResults(pipelineResult); + int preRelease = CVMat.getMatCount(); + pipelineResult.release(); + int postRelease = CVMat.getMatCount(); + + System.out.printf("Pre: %d, Post: %d\n", preRelease, postRelease); + } + } + + // used to run VisualVM for profiling, which won't run on unit tests. + public static void main(String[] args) { + TestUtils.loadLibraries(); + var frameProvider = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + + var settings = new ReflectivePipelineSettings(); + settings.hsvHue.set(60, 100); + settings.hsvSaturation.set(100, 255); + settings.hsvValue.set(190, 255); + settings.outputShouldDraw = true; + settings.outputShowMultipleTargets = true; + settings.contourGroupingMode = ContourGroupingMode.Dual; + settings.contourIntersection = ContourIntersectionDirection.Up; + + continuouslyRunPipeline(frameProvider.get(), settings); + } + + private static void printTestResults(CVPipelineResult pipelineResult) { + double fps = 1000 / pipelineResult.getLatencyMillis(); + System.out.println( + "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); + System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + System.out.println( + "Found targets at " + + pipelineResult.targets.stream() + .map(TrackedTarget::getBestCameraToTarget3d) + .collect(Collectors.toList())); } - } - - // used to run VisualVM for profiling, which won't run on unit tests. - public static void main(String[] args) { - TestUtils.loadLibraries(); - var frameProvider = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - - var settings = new ReflectivePipelineSettings(); - settings.hsvHue.set(60, 100); - settings.hsvSaturation.set(100, 255); - settings.hsvValue.set(190, 255); - settings.outputShouldDraw = true; - settings.outputShowMultipleTargets = true; - settings.contourGroupingMode = ContourGroupingMode.Dual; - settings.contourIntersection = ContourIntersectionDirection.Up; - - continuouslyRunPipeline(frameProvider.get(), settings); - } - - private static void printTestResults(CVPipelineResult pipelineResult) { - double fps = 1000 / pipelineResult.getLatencyMillis(); - System.out.println( - "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " " + "fps)"); - System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); - System.out.println( - "Found targets at " - + pipelineResult.targets.stream() - .map(TrackedTarget::getBestCameraToTarget3d) - .collect(Collectors.toList())); - } } diff --git a/photon-core/src/test/java/org/photonvision/vision/processes/PipelineManagerTest.java b/photon-core/src/test/java/org/photonvision/vision/processes/PipelineManagerTest.java index af1cf02cf5..7b90ce027d 100644 --- a/photon-core/src/test/java/org/photonvision/vision/processes/PipelineManagerTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/processes/PipelineManagerTest.java @@ -26,36 +26,36 @@ import org.photonvision.vision.pipeline.PipelineType; public class PipelineManagerTest { - @Test - public void testUniqueName() { - TestUtils.loadLibraries(); - PipelineManager manager = new PipelineManager(new DriverModePipelineSettings(), List.of()); - manager.addPipeline(PipelineType.Reflective, "Another"); + @Test + public void testUniqueName() { + TestUtils.loadLibraries(); + PipelineManager manager = new PipelineManager(new DriverModePipelineSettings(), List.of()); + manager.addPipeline(PipelineType.Reflective, "Another"); - // We now have ["New Pipeline", "Another"] - // After we duplicate 0 and 1, we expect ["New Pipeline", "Another", "New Pipeline (1)", - // "Another (1)"] - manager.duplicatePipeline(0); - manager.duplicatePipeline(1); + // We now have ["New Pipeline", "Another"] + // After we duplicate 0 and 1, we expect ["New Pipeline", "Another", "New Pipeline (1)", + // "Another (1)"] + manager.duplicatePipeline(0); + manager.duplicatePipeline(1); - // Should add "Another (2)" - manager.duplicatePipeline(3); - // Should add "Another (3) - manager.duplicatePipeline(3); - // Should add "Another (4) - manager.duplicatePipeline(1); + // Should add "Another (2)" + manager.duplicatePipeline(3); + // Should add "Another (3) + manager.duplicatePipeline(3); + // Should add "Another (4) + manager.duplicatePipeline(1); - // Should add "Another (5)" through "Another (15)" - for (int i = 5; i < 15; i++) { - manager.duplicatePipeline(1); - } + // Should add "Another (5)" through "Another (15)" + for (int i = 5; i < 15; i++) { + manager.duplicatePipeline(1); + } - var nicks = manager.getPipelineNicknames(); - var expected = - new ArrayList<>(List.of("New Pipeline", "Another", "New Pipeline (1)", "Another (1)")); - for (int i = 2; i < 15; i++) { - expected.add("Another (" + i + ")"); + var nicks = manager.getPipelineNicknames(); + var expected = + new ArrayList<>(List.of("New Pipeline", "Another", "New Pipeline (1)", "Another (1)")); + for (int i = 2; i < 15; i++) { + expected.add("Another (" + i + ")"); + } + Assertions.assertEquals(expected, nicks); } - Assertions.assertEquals(expected, nicks); - } } diff --git a/photon-core/src/test/java/org/photonvision/vision/processes/VisionModuleManagerTest.java b/photon-core/src/test/java/org/photonvision/vision/processes/VisionModuleManagerTest.java index ce36cad574..e0a8376159 100644 --- a/photon-core/src/test/java/org/photonvision/vision/processes/VisionModuleManagerTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/processes/VisionModuleManagerTest.java @@ -37,162 +37,162 @@ import org.photonvision.vision.pipeline.result.CVPipelineResult; public class VisionModuleManagerTest { - @BeforeAll - public static void init() { - TestUtils.loadLibraries(); - } + @BeforeAll + public static void init() { + TestUtils.loadLibraries(); + } - private static class TestSource extends VisionSource { - private final FrameProvider provider; + private static class TestSource extends VisionSource { + private final FrameProvider provider; - public TestSource(FrameProvider provider, CameraConfiguration cameraConfiguration) { - super(cameraConfiguration); - this.provider = provider; - } + public TestSource(FrameProvider provider, CameraConfiguration cameraConfiguration) { + super(cameraConfiguration); + this.provider = provider; + } - @Override - public FrameProvider getFrameProvider() { - return provider; - } + @Override + public FrameProvider getFrameProvider() { + return provider; + } - @Override - public VisionSourceSettables getSettables() { - return new TestSettables(getCameraConfiguration()); - } + @Override + public VisionSourceSettables getSettables() { + return new TestSettables(getCameraConfiguration()); + } - @Override - public boolean isVendorCamera() { - return false; + @Override + public boolean isVendorCamera() { + return false; + } } - } - private static class TestSettables extends VisionSourceSettables { - protected TestSettables(CameraConfiguration configuration) { - super(configuration); - } + private static class TestSettables extends VisionSourceSettables { + protected TestSettables(CameraConfiguration configuration) { + super(configuration); + } - @Override - public void setExposure(double exposure) {} + @Override + public void setExposure(double exposure) {} - @Override - public void setBrightness(int brightness) {} + @Override + public void setBrightness(int brightness) {} - @Override - public void setGain(int gain) {} + @Override + public void setGain(int gain) {} - @Override - public VideoMode getCurrentVideoMode() { - return new VideoMode(0, 320, 240, 254); - } + @Override + public VideoMode getCurrentVideoMode() { + return new VideoMode(0, 320, 240, 254); + } + + @Override + public void setVideoModeInternal(VideoMode videoMode) { + this.frameStaticProperties = new FrameStaticProperties(getCurrentVideoMode(), getFOV(), null); + } + + @Override + public HashMap getAllVideoModes() { + var ret = new HashMap(); + ret.put(0, getCurrentVideoMode()); + return ret; + } - @Override - public void setVideoModeInternal(VideoMode videoMode) { - this.frameStaticProperties = new FrameStaticProperties(getCurrentVideoMode(), getFOV(), null); + @Override + public void setAutoExposure(boolean cameraAutoExposure) {} } - @Override - public HashMap getAllVideoModes() { - var ret = new HashMap(); - ret.put(0, getCurrentVideoMode()); - return ret; + private static class TestDataConsumer implements CVPipelineResultConsumer { + private CVPipelineResult result; + + @Override + public void accept(CVPipelineResult result) { + this.result = result; + } } - @Override - public void setAutoExposure(boolean cameraAutoExposure) {} - } + @Test + public void setupManager() { + ConfigManager.getInstance().load(); + + var conf = new CameraConfiguration("Foo", "Bar"); + var ffp = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); - private static class TestDataConsumer implements CVPipelineResultConsumer { - private CVPipelineResult result; + var testSource = new TestSource(ffp, conf); - @Override - public void accept(CVPipelineResult result) { - this.result = result; + var modules = VisionModuleManager.getInstance().addSources(List.of(testSource)); + var module0DataConsumer = new TestDataConsumer(); + + VisionModuleManager.getInstance().visionModules.get(0).addResultConsumer(module0DataConsumer); + + modules.forEach(VisionModule::start); + + sleep(1500); + + Assertions.assertNotNull(module0DataConsumer.result); + printTestResults(module0DataConsumer.result); } - } - - @Test - public void setupManager() { - ConfigManager.getInstance().load(); - - var conf = new CameraConfiguration("Foo", "Bar"); - var ffp = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - - var testSource = new TestSource(ffp, conf); - - var modules = VisionModuleManager.getInstance().addSources(List.of(testSource)); - var module0DataConsumer = new TestDataConsumer(); - - VisionModuleManager.getInstance().visionModules.get(0).addResultConsumer(module0DataConsumer); - - modules.forEach(VisionModule::start); - - sleep(1500); - - Assertions.assertNotNull(module0DataConsumer.result); - printTestResults(module0DataConsumer.result); - } - - @Test - public void testMultipleStreamIndex() { - ConfigManager.getInstance().load(); - - var vmm = new VisionModuleManager(); - - var conf = new CameraConfiguration("Foo", "Bar"); - conf.streamIndex = 1; - var ffp = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - var testSource = new TestSource(ffp, conf); - - var conf2 = new CameraConfiguration("Foo2", "Bar"); - conf2.streamIndex = 0; - var ffp2 = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - var testSource2 = new TestSource(ffp2, conf2); - - var conf3 = new CameraConfiguration("Foo3", "Bar"); - conf3.streamIndex = 0; - var ffp3 = - new FileFrameProvider( - TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), - TestUtils.WPI2019Image.FOV); - var testSource3 = new TestSource(ffp3, conf3); - - var modules = vmm.addSources(List.of(testSource, testSource2, testSource3)); - - System.out.println( - Arrays.toString( - modules.stream() - .map(it -> it.visionSource.getCameraConfiguration().streamIndex) - .toArray())); - var idxs = - modules.stream() - .map(it -> it.visionSource.getCameraConfiguration().streamIndex) - .collect(Collectors.toList()); - assertTrue(idxs.contains(0)); - assertTrue(idxs.contains(1)); - assertTrue(idxs.contains(2)); - } - - private static void printTestResults(CVPipelineResult pipelineResult) { - double fps = 1000 / pipelineResult.getLatencyMillis(); - System.out.print( - "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " fps), "); - System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); - } - - private void sleep(int millis) { - try { - Thread.sleep(millis); - } catch (InterruptedException e) { - // ignored + + @Test + public void testMultipleStreamIndex() { + ConfigManager.getInstance().load(); + + var vmm = new VisionModuleManager(); + + var conf = new CameraConfiguration("Foo", "Bar"); + conf.streamIndex = 1; + var ffp = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + var testSource = new TestSource(ffp, conf); + + var conf2 = new CameraConfiguration("Foo2", "Bar"); + conf2.streamIndex = 0; + var ffp2 = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + var testSource2 = new TestSource(ffp2, conf2); + + var conf3 = new CameraConfiguration("Foo3", "Bar"); + conf3.streamIndex = 0; + var ffp3 = + new FileFrameProvider( + TestUtils.getWPIImagePath(TestUtils.WPI2019Image.kCargoStraightDark72in_HighRes, false), + TestUtils.WPI2019Image.FOV); + var testSource3 = new TestSource(ffp3, conf3); + + var modules = vmm.addSources(List.of(testSource, testSource2, testSource3)); + + System.out.println( + Arrays.toString( + modules.stream() + .map(it -> it.visionSource.getCameraConfiguration().streamIndex) + .toArray())); + var idxs = + modules.stream() + .map(it -> it.visionSource.getCameraConfiguration().streamIndex) + .collect(Collectors.toList()); + assertTrue(idxs.contains(0)); + assertTrue(idxs.contains(1)); + assertTrue(idxs.contains(2)); + } + + private static void printTestResults(CVPipelineResult pipelineResult) { + double fps = 1000 / pipelineResult.getLatencyMillis(); + System.out.print( + "Pipeline ran in " + pipelineResult.getLatencyMillis() + "ms (" + fps + " fps), "); + System.out.println("Found " + pipelineResult.targets.size() + " valid targets"); + } + + private void sleep(int millis) { + try { + Thread.sleep(millis); + } catch (InterruptedException e) { + // ignored + } } - } } diff --git a/photon-core/src/test/java/org/photonvision/vision/processes/VisionSourceManagerTest.java b/photon-core/src/test/java/org/photonvision/vision/processes/VisionSourceManagerTest.java index 33741f1579..2c64a9a622 100644 --- a/photon-core/src/test/java/org/photonvision/vision/processes/VisionSourceManagerTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/processes/VisionSourceManagerTest.java @@ -27,31 +27,31 @@ import org.photonvision.common.configuration.ConfigManager; public class VisionSourceManagerTest { - @Test - public void visionSourceTest() { - var inst = new VisionSourceManager(); - var infoList = new ArrayList(); - inst.cameraInfoSupplier = () -> infoList; - ConfigManager.getInstance().load(); - - inst.tryMatchUSBCamImpl(); - var config = new CameraConfiguration("secondTestVideo", "dev/video1"); - UsbCameraInfo info1 = new UsbCameraInfo(0, "dev/video0", "testVideo", new String[0], 1, 2); - infoList.add(info1); - - inst.registerLoadedConfigs(config); - var sources = inst.tryMatchUSBCamImpl(false); - - assertTrue(inst.knownUsbCameras.contains(info1)); - assertEquals(1, inst.unmatchedLoadedConfigs.size()); - - UsbCameraInfo info2 = - new UsbCameraInfo(0, "dev/video1", "secondTestVideo", new String[0], 2, 1); - infoList.add(info2); - inst.tryMatchUSBCamImpl(false); - - assertTrue(inst.knownUsbCameras.contains(info2)); - assertEquals(2, inst.knownUsbCameras.size()); - assertEquals(0, inst.unmatchedLoadedConfigs.size()); - } + @Test + public void visionSourceTest() { + var inst = new VisionSourceManager(); + var infoList = new ArrayList(); + inst.cameraInfoSupplier = () -> infoList; + ConfigManager.getInstance().load(); + + inst.tryMatchUSBCamImpl(); + var config = new CameraConfiguration("secondTestVideo", "dev/video1"); + UsbCameraInfo info1 = new UsbCameraInfo(0, "dev/video0", "testVideo", new String[0], 1, 2); + infoList.add(info1); + + inst.registerLoadedConfigs(config); + var sources = inst.tryMatchUSBCamImpl(false); + + assertTrue(inst.knownUsbCameras.contains(info1)); + assertEquals(1, inst.unmatchedLoadedConfigs.size()); + + UsbCameraInfo info2 = + new UsbCameraInfo(0, "dev/video1", "secondTestVideo", new String[0], 2, 1); + infoList.add(info2); + inst.tryMatchUSBCamImpl(false); + + assertTrue(inst.knownUsbCameras.contains(info2)); + assertEquals(2, inst.knownUsbCameras.size()); + assertEquals(0, inst.unmatchedLoadedConfigs.size()); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/target/TargetCalculationsTest.java b/photon-core/src/test/java/org/photonvision/vision/target/TargetCalculationsTest.java index 5b93948fce..7b23d3a600 100644 --- a/photon-core/src/test/java/org/photonvision/vision/target/TargetCalculationsTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/target/TargetCalculationsTest.java @@ -38,244 +38,244 @@ public class TargetCalculationsTest { - private static Size imageSize = new Size(800, 600); - private static Point imageCenterPoint = - new Point(imageSize.width / 2.0 - 0.5, imageSize.height / 2.0 - 0.5); - private static final double diagFOV = Math.toRadians(70.0); - - private static final FrameStaticProperties props = - new FrameStaticProperties((int) imageSize.width, (int) imageSize.height, diagFOV, null); - private static final TrackedTarget.TargetCalculationParameters params = - new TrackedTarget.TargetCalculationParameters( - true, - TargetOffsetPointEdge.Center, - RobotOffsetPointMode.None, - new Point(), - new DualOffsetValues(), - imageCenterPoint, - props.horizontalFocalLength, - props.verticalFocalLength, - imageSize.width * imageSize.height); - - @BeforeAll - public static void setup() { - TestUtils.loadLibraries(); - } - - @Test - public void testYawPitchBehavior() { - double targetPixelOffsetX = 100; - double targetPixelOffsetY = 100; - var targetCenterPoint = - new Point(imageCenterPoint.x + targetPixelOffsetX, imageCenterPoint.y + targetPixelOffsetY); - - var targetYawPitch = - TargetCalculations.calculateYawPitch( - imageCenterPoint.x, - targetCenterPoint.x, - params.horizontalFocalLength, - imageCenterPoint.y, - targetCenterPoint.y, - params.verticalFocalLength); - - assertTrue(targetYawPitch.getFirst() > 0, "Yaw is not positive right"); - assertTrue(targetYawPitch.getSecond() < 0, "Pitch is not positive up"); - - var fovs = - FrameStaticProperties.calculateHorizontalVerticalFoV( - diagFOV, (int) imageSize.width, (int) imageSize.height); - var maxYaw = - TargetCalculations.calculateYawPitch( - imageCenterPoint.x, - 2 * imageCenterPoint.x, - params.horizontalFocalLength, - imageCenterPoint.y, - imageCenterPoint.y, - params.verticalFocalLength); - assertEquals(fovs.getFirst() / 2.0, maxYaw.getFirst(), 0.025, "Horizontal FOV check failed"); - var maxPitch = - TargetCalculations.calculateYawPitch( - imageCenterPoint.x, - imageCenterPoint.x, - params.horizontalFocalLength, - imageCenterPoint.y, - 0, - params.verticalFocalLength); - assertEquals(fovs.getSecond() / 2.0, maxPitch.getSecond(), 0.025, "Vertical FOV check failed"); - } - - private static Stream testYawPitchCalcArgs() { - return Stream.of( - // (yaw, pitch) in degrees - Arguments.of(0, 0), - Arguments.of(10, 0), - Arguments.of(0, 10), - Arguments.of(10, 10), - Arguments.of(-10, -10), - Arguments.of(30, 45), - Arguments.of(-45, -20)); - } - - private static double[] testCameraMatrix = {240, 0, 320, 0, 240, 320, 0, 0, 1}; - - @ParameterizedTest - @MethodSource("testYawPitchCalcArgs") - public void testYawPitchCalc(double yawDeg, double pitchDeg) { - Mat testCameraMat = new Mat(3, 3, CvType.CV_64F); - testCameraMat.put(0, 0, testCameraMatrix); - // Since we create this translation using the given yaw/pitch, we should see the same angles - // calculated - var targetTrl = - new Translation3d(1, new Rotation3d(0, Math.toRadians(pitchDeg), Math.toRadians(yawDeg))); - // NWU to EDN - var objectPoints = - new MatOfPoint3f(new Point3(-targetTrl.getY(), -targetTrl.getZ(), targetTrl.getX())); - var imagePoints = new MatOfPoint2f(); - // Project translation into camera image - Calib3d.projectPoints( - objectPoints, - new MatOfDouble(0, 0, 0), - new MatOfDouble(0, 0, 0), - testCameraMat, - new MatOfDouble(0, 0, 0, 0, 0), - imagePoints); - var point = imagePoints.toArray()[0]; - // Test if the target yaw/pitch calculation matches what the target was created with - var yawPitch = - TargetCalculations.calculateYawPitch( - point.x, - testCameraMatrix[2], - testCameraMatrix[0], - point.y, - testCameraMatrix[5], - testCameraMatrix[4]); - assertEquals(yawDeg, yawPitch.getFirst(), 1e-3, "Yaw calculation incorrect"); - assertEquals(pitchDeg, yawPitch.getSecond(), 1e-3, "Pitch calculation incorrect"); - } - - @Test - public void testTargetOffset() { - Point center = new Point(0, 0); - Size rectSize = new Size(10, 5); - double angle = 30; - RotatedRect rect = new RotatedRect(center, rectSize, angle); - - // We pretend like x/y are in pixels, so the "top" is actually the bottom - var result = - TargetCalculations.calculateTargetOffsetPoint(true, TargetOffsetPointEdge.Top, rect); - assertEquals(1.25, result.x, 0.1, "Target offset x not as expected"); - assertEquals(-2.17, result.y, 0.1, "Target offset Y not as expected"); - result = - TargetCalculations.calculateTargetOffsetPoint(true, TargetOffsetPointEdge.Bottom, rect); - assertEquals(-1.25, result.x, 0.1, "Target offset x not as expected"); - assertEquals(2.17, result.y, 0.1, "Target offset Y not as expected"); - } - - @Test - public void testSkewCalculation() { - // Setup - var isLandscape = true; - var rect = new RotatedRect(new Point(), new Size(10, 5), 170); - - // Compute min area rect - var points = new Point[4]; - rect.points(points); - var mat2f = new MatOfPoint2f(points); - var minAreaRect = Imgproc.minAreaRect(mat2f); - - // Assert result - var result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); - assertEquals(-10, result, 0.01); - - // Setup - isLandscape = true; - rect = new RotatedRect(new Point(), new Size(10, 5), -70); - - // Compute min area rect - points = new Point[4]; - rect.points(points); - mat2f.release(); - mat2f = new MatOfPoint2f(points); - minAreaRect = Imgproc.minAreaRect(mat2f); - - // Assert result - result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); - assertEquals(-70, result, 0.01); - - // Setup - isLandscape = false; - rect = new RotatedRect(new Point(), new Size(5, 10), 10); - - // Compute min area rect - points = new Point[4]; - rect.points(points); - mat2f.release(); - mat2f = new MatOfPoint2f(points); - minAreaRect = Imgproc.minAreaRect(mat2f); - - // Assert result - result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); - assertEquals(10, result, 0.01); - - // Setup - isLandscape = false; - rect = new RotatedRect(new Point(), new Size(5, 10), 70); - - // Compute min area rect - points = new Point[4]; - rect.points(points); - mat2f.release(); - mat2f = new MatOfPoint2f(points); - minAreaRect = Imgproc.minAreaRect(mat2f); - - // Assert result - result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); - assertEquals(70, result, 0.01); - - // Setup - isLandscape = false; - rect = new RotatedRect(new Point(), new Size(5, 10), -70); - - // Compute min area rect - points = new Point[4]; - rect.points(points); - mat2f.release(); - mat2f = new MatOfPoint2f(points); - minAreaRect = Imgproc.minAreaRect(mat2f); - - // Assert result - result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); - assertEquals(-70, result, 0.01); - } - - @Test - public void testCameraFOVCalculation() { - final DoubleCouple glowormHorizVert = - FrameStaticProperties.calculateHorizontalVerticalFoV(74.8, 640, 480); - var gwHorizDeg = glowormHorizVert.getFirst(); - var gwVertDeg = glowormHorizVert.getSecond(); - assertEquals(62.7, gwHorizDeg, .3); - assertEquals(49, gwVertDeg, .3); - } - - @Test - public void testDualOffsetCrosshair() { - final DualOffsetValues dualOffsetValues = - new DualOffsetValues( - new Point(400, 150), 10, - new Point(390, 260), 2); - - final Point expectedHalfway = new Point(393.75, 218.75); - final Point expectedOutside = new Point(388.75, 273.75); - - Point crosshairPointHalfway = - TargetCalculations.calculateDualOffsetCrosshair(dualOffsetValues, 5); - Point crosshairPointOutside = - TargetCalculations.calculateDualOffsetCrosshair(dualOffsetValues, 1); - - Assertions.assertEquals(expectedHalfway.x, crosshairPointHalfway.x); - Assertions.assertEquals(expectedHalfway.y, crosshairPointHalfway.y); - Assertions.assertEquals(expectedOutside.x, crosshairPointOutside.x); - Assertions.assertEquals(expectedOutside.y, crosshairPointOutside.y); - } + private static Size imageSize = new Size(800, 600); + private static Point imageCenterPoint = + new Point(imageSize.width / 2.0 - 0.5, imageSize.height / 2.0 - 0.5); + private static final double diagFOV = Math.toRadians(70.0); + + private static final FrameStaticProperties props = + new FrameStaticProperties((int) imageSize.width, (int) imageSize.height, diagFOV, null); + private static final TrackedTarget.TargetCalculationParameters params = + new TrackedTarget.TargetCalculationParameters( + true, + TargetOffsetPointEdge.Center, + RobotOffsetPointMode.None, + new Point(), + new DualOffsetValues(), + imageCenterPoint, + props.horizontalFocalLength, + props.verticalFocalLength, + imageSize.width * imageSize.height); + + @BeforeAll + public static void setup() { + TestUtils.loadLibraries(); + } + + @Test + public void testYawPitchBehavior() { + double targetPixelOffsetX = 100; + double targetPixelOffsetY = 100; + var targetCenterPoint = + new Point(imageCenterPoint.x + targetPixelOffsetX, imageCenterPoint.y + targetPixelOffsetY); + + var targetYawPitch = + TargetCalculations.calculateYawPitch( + imageCenterPoint.x, + targetCenterPoint.x, + params.horizontalFocalLength, + imageCenterPoint.y, + targetCenterPoint.y, + params.verticalFocalLength); + + assertTrue(targetYawPitch.getFirst() > 0, "Yaw is not positive right"); + assertTrue(targetYawPitch.getSecond() < 0, "Pitch is not positive up"); + + var fovs = + FrameStaticProperties.calculateHorizontalVerticalFoV( + diagFOV, (int) imageSize.width, (int) imageSize.height); + var maxYaw = + TargetCalculations.calculateYawPitch( + imageCenterPoint.x, + 2 * imageCenterPoint.x, + params.horizontalFocalLength, + imageCenterPoint.y, + imageCenterPoint.y, + params.verticalFocalLength); + assertEquals(fovs.getFirst() / 2.0, maxYaw.getFirst(), 0.025, "Horizontal FOV check failed"); + var maxPitch = + TargetCalculations.calculateYawPitch( + imageCenterPoint.x, + imageCenterPoint.x, + params.horizontalFocalLength, + imageCenterPoint.y, + 0, + params.verticalFocalLength); + assertEquals(fovs.getSecond() / 2.0, maxPitch.getSecond(), 0.025, "Vertical FOV check failed"); + } + + private static Stream testYawPitchCalcArgs() { + return Stream.of( + // (yaw, pitch) in degrees + Arguments.of(0, 0), + Arguments.of(10, 0), + Arguments.of(0, 10), + Arguments.of(10, 10), + Arguments.of(-10, -10), + Arguments.of(30, 45), + Arguments.of(-45, -20)); + } + + private static double[] testCameraMatrix = {240, 0, 320, 0, 240, 320, 0, 0, 1}; + + @ParameterizedTest + @MethodSource("testYawPitchCalcArgs") + public void testYawPitchCalc(double yawDeg, double pitchDeg) { + Mat testCameraMat = new Mat(3, 3, CvType.CV_64F); + testCameraMat.put(0, 0, testCameraMatrix); + // Since we create this translation using the given yaw/pitch, we should see the same angles + // calculated + var targetTrl = + new Translation3d(1, new Rotation3d(0, Math.toRadians(pitchDeg), Math.toRadians(yawDeg))); + // NWU to EDN + var objectPoints = + new MatOfPoint3f(new Point3(-targetTrl.getY(), -targetTrl.getZ(), targetTrl.getX())); + var imagePoints = new MatOfPoint2f(); + // Project translation into camera image + Calib3d.projectPoints( + objectPoints, + new MatOfDouble(0, 0, 0), + new MatOfDouble(0, 0, 0), + testCameraMat, + new MatOfDouble(0, 0, 0, 0, 0), + imagePoints); + var point = imagePoints.toArray()[0]; + // Test if the target yaw/pitch calculation matches what the target was created with + var yawPitch = + TargetCalculations.calculateYawPitch( + point.x, + testCameraMatrix[2], + testCameraMatrix[0], + point.y, + testCameraMatrix[5], + testCameraMatrix[4]); + assertEquals(yawDeg, yawPitch.getFirst(), 1e-3, "Yaw calculation incorrect"); + assertEquals(pitchDeg, yawPitch.getSecond(), 1e-3, "Pitch calculation incorrect"); + } + + @Test + public void testTargetOffset() { + Point center = new Point(0, 0); + Size rectSize = new Size(10, 5); + double angle = 30; + RotatedRect rect = new RotatedRect(center, rectSize, angle); + + // We pretend like x/y are in pixels, so the "top" is actually the bottom + var result = + TargetCalculations.calculateTargetOffsetPoint(true, TargetOffsetPointEdge.Top, rect); + assertEquals(1.25, result.x, 0.1, "Target offset x not as expected"); + assertEquals(-2.17, result.y, 0.1, "Target offset Y not as expected"); + result = + TargetCalculations.calculateTargetOffsetPoint(true, TargetOffsetPointEdge.Bottom, rect); + assertEquals(-1.25, result.x, 0.1, "Target offset x not as expected"); + assertEquals(2.17, result.y, 0.1, "Target offset Y not as expected"); + } + + @Test + public void testSkewCalculation() { + // Setup + var isLandscape = true; + var rect = new RotatedRect(new Point(), new Size(10, 5), 170); + + // Compute min area rect + var points = new Point[4]; + rect.points(points); + var mat2f = new MatOfPoint2f(points); + var minAreaRect = Imgproc.minAreaRect(mat2f); + + // Assert result + var result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); + assertEquals(-10, result, 0.01); + + // Setup + isLandscape = true; + rect = new RotatedRect(new Point(), new Size(10, 5), -70); + + // Compute min area rect + points = new Point[4]; + rect.points(points); + mat2f.release(); + mat2f = new MatOfPoint2f(points); + minAreaRect = Imgproc.minAreaRect(mat2f); + + // Assert result + result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); + assertEquals(-70, result, 0.01); + + // Setup + isLandscape = false; + rect = new RotatedRect(new Point(), new Size(5, 10), 10); + + // Compute min area rect + points = new Point[4]; + rect.points(points); + mat2f.release(); + mat2f = new MatOfPoint2f(points); + minAreaRect = Imgproc.minAreaRect(mat2f); + + // Assert result + result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); + assertEquals(10, result, 0.01); + + // Setup + isLandscape = false; + rect = new RotatedRect(new Point(), new Size(5, 10), 70); + + // Compute min area rect + points = new Point[4]; + rect.points(points); + mat2f.release(); + mat2f = new MatOfPoint2f(points); + minAreaRect = Imgproc.minAreaRect(mat2f); + + // Assert result + result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); + assertEquals(70, result, 0.01); + + // Setup + isLandscape = false; + rect = new RotatedRect(new Point(), new Size(5, 10), -70); + + // Compute min area rect + points = new Point[4]; + rect.points(points); + mat2f.release(); + mat2f = new MatOfPoint2f(points); + minAreaRect = Imgproc.minAreaRect(mat2f); + + // Assert result + result = TargetCalculations.calculateSkew(isLandscape, minAreaRect); + assertEquals(-70, result, 0.01); + } + + @Test + public void testCameraFOVCalculation() { + final DoubleCouple glowormHorizVert = + FrameStaticProperties.calculateHorizontalVerticalFoV(74.8, 640, 480); + var gwHorizDeg = glowormHorizVert.getFirst(); + var gwVertDeg = glowormHorizVert.getSecond(); + assertEquals(62.7, gwHorizDeg, .3); + assertEquals(49, gwVertDeg, .3); + } + + @Test + public void testDualOffsetCrosshair() { + final DualOffsetValues dualOffsetValues = + new DualOffsetValues( + new Point(400, 150), 10, + new Point(390, 260), 2); + + final Point expectedHalfway = new Point(393.75, 218.75); + final Point expectedOutside = new Point(388.75, 273.75); + + Point crosshairPointHalfway = + TargetCalculations.calculateDualOffsetCrosshair(dualOffsetValues, 5); + Point crosshairPointOutside = + TargetCalculations.calculateDualOffsetCrosshair(dualOffsetValues, 1); + + Assertions.assertEquals(expectedHalfway.x, crosshairPointHalfway.x); + Assertions.assertEquals(expectedHalfway.y, crosshairPointHalfway.y); + Assertions.assertEquals(expectedOutside.x, crosshairPointOutside.x); + Assertions.assertEquals(expectedOutside.y, crosshairPointOutside.y); + } } diff --git a/photon-core/src/test/java/org/photonvision/vision/target/TrackedTargetTest.java b/photon-core/src/test/java/org/photonvision/vision/target/TrackedTargetTest.java index c1f0620359..f58f355720 100644 --- a/photon-core/src/test/java/org/photonvision/vision/target/TrackedTargetTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/target/TrackedTargetTest.java @@ -29,41 +29,41 @@ import org.photonvision.vision.opencv.DualOffsetValues; public class TrackedTargetTest { - @BeforeEach - public void Init() { - TestUtils.loadLibraries(); - } + @BeforeEach + public void Init() { + TestUtils.loadLibraries(); + } - @Test - void axisTest() { - MatOfPoint mat = new MatOfPoint(); - mat.fromList( - List.of( - new Point(400, 298), - new Point(426.22, 298), - new Point(426.22, 302), - new Point(400, 302))); // gives contour with center of 426, 300 - Contour contour = new Contour(mat); + @Test + void axisTest() { + MatOfPoint mat = new MatOfPoint(); + mat.fromList( + List.of( + new Point(400, 298), + new Point(426.22, 298), + new Point(426.22, 302), + new Point(400, 302))); // gives contour with center of 426, 300 + Contour contour = new Contour(mat); - var pTarget = new PotentialTarget(contour); + var pTarget = new PotentialTarget(contour); - var imageSize = new Size(800, 600); + var imageSize = new Size(800, 600); - var setting = - new TrackedTarget.TargetCalculationParameters( - false, - TargetOffsetPointEdge.Center, - RobotOffsetPointMode.None, - new Point(0, 0), - new DualOffsetValues(), - new Point(imageSize.width / 2, imageSize.height / 2), - 61, - 34.3, - imageSize.area()); + var setting = + new TrackedTarget.TargetCalculationParameters( + false, + TargetOffsetPointEdge.Center, + RobotOffsetPointMode.None, + new Point(0, 0), + new DualOffsetValues(), + new Point(imageSize.width / 2, imageSize.height / 2), + 61, + 34.3, + imageSize.area()); - var trackedTarget = new TrackedTarget(pTarget, setting, null); - // TODO change these hardcoded values - assertEquals(12.0, trackedTarget.getYaw(), 0.05, "Yaw was incorrect"); - assertEquals(0, trackedTarget.getPitch(), 0.05, "Pitch was incorrect"); - } + var trackedTarget = new TrackedTarget(pTarget, setting, null); + // TODO change these hardcoded values + assertEquals(12.0, trackedTarget.getYaw(), 0.05, "Yaw was incorrect"); + assertEquals(0, trackedTarget.getPitch(), 0.05, "Pitch was incorrect"); + } } diff --git a/photon-lib/src/main/java/org/photonvision/EstimatedRobotPose.java b/photon-lib/src/main/java/org/photonvision/EstimatedRobotPose.java index 9df8da3467..a355ab00f3 100644 --- a/photon-lib/src/main/java/org/photonvision/EstimatedRobotPose.java +++ b/photon-lib/src/main/java/org/photonvision/EstimatedRobotPose.java @@ -31,32 +31,32 @@ /** An estimated pose based on pipeline result */ public class EstimatedRobotPose { - /** The estimated pose */ - public final Pose3d estimatedPose; + /** The estimated pose */ + public final Pose3d estimatedPose; - /** The estimated time the frame used to derive the robot pose was taken */ - public final double timestampSeconds; + /** The estimated time the frame used to derive the robot pose was taken */ + public final double timestampSeconds; - /** A list of the targets used to compute this pose */ - public final List targetsUsed; + /** A list of the targets used to compute this pose */ + public final List targetsUsed; - /** The strategy actually used to produce this pose */ - public final PoseStrategy strategy; + /** The strategy actually used to produce this pose */ + public final PoseStrategy strategy; - /** - * Constructs an EstimatedRobotPose - * - * @param estimatedPose estimated pose - * @param timestampSeconds timestamp of the estimate - */ - public EstimatedRobotPose( - Pose3d estimatedPose, - double timestampSeconds, - List targetsUsed, - PoseStrategy strategy) { - this.estimatedPose = estimatedPose; - this.timestampSeconds = timestampSeconds; - this.targetsUsed = targetsUsed; - this.strategy = strategy; - } + /** + * Constructs an EstimatedRobotPose + * + * @param estimatedPose estimated pose + * @param timestampSeconds timestamp of the estimate + */ + public EstimatedRobotPose( + Pose3d estimatedPose, + double timestampSeconds, + List targetsUsed, + PoseStrategy strategy) { + this.estimatedPose = estimatedPose; + this.timestampSeconds = timestampSeconds; + this.targetsUsed = targetsUsed; + this.strategy = strategy; + } } diff --git a/photon-lib/src/main/java/org/photonvision/PhotonCamera.java b/photon-lib/src/main/java/org/photonvision/PhotonCamera.java index fd8e4f3b4c..9500629ca3 100644 --- a/photon-lib/src/main/java/org/photonvision/PhotonCamera.java +++ b/photon-lib/src/main/java/org/photonvision/PhotonCamera.java @@ -57,368 +57,368 @@ /** Represents a camera that is connected to PhotonVision. */ public class PhotonCamera implements AutoCloseable { - public static final String kTableName = "photonvision"; - - private final NetworkTable cameraTable; - RawSubscriber rawBytesEntry; - BooleanPublisher driverModePublisher; - BooleanSubscriber driverModeSubscriber; - DoublePublisher latencyMillisEntry; - BooleanPublisher hasTargetEntry; - DoublePublisher targetPitchEntry; - DoublePublisher targetYawEntry; - DoublePublisher targetAreaEntry; - DoubleArrayPublisher targetPoseEntry; - DoublePublisher targetSkewEntry; - StringSubscriber versionEntry; - IntegerEntry inputSaveImgEntry, outputSaveImgEntry; - IntegerPublisher pipelineIndexRequest, ledModeRequest; - IntegerSubscriber pipelineIndexState, ledModeState; - IntegerSubscriber heartbeatEntry; - private DoubleArraySubscriber cameraIntrinsicsSubscriber; - private DoubleArraySubscriber cameraDistortionSubscriber; - private StringPublisher atflPublisher; - - @Override - public void close() { - rawBytesEntry.close(); - driverModePublisher.close(); - driverModeSubscriber.close(); - latencyMillisEntry.close(); - hasTargetEntry.close(); - targetPitchEntry.close(); - targetYawEntry.close(); - targetAreaEntry.close(); - targetPoseEntry.close(); - targetSkewEntry.close(); - versionEntry.close(); - inputSaveImgEntry.close(); - outputSaveImgEntry.close(); - pipelineIndexRequest.close(); - pipelineIndexState.close(); - ledModeRequest.close(); - ledModeState.close(); - pipelineIndexRequest.close(); - cameraIntrinsicsSubscriber.close(); - cameraDistortionSubscriber.close(); - atflPublisher.close(); - } - - private final String path; - private final String name; - - private static boolean VERSION_CHECK_ENABLED = true; - private static long VERSION_CHECK_INTERVAL = 5; - private double lastVersionCheckTime = 0; - - private long prevHeartbeatValue = -1; - private double prevHeartbeatChangeTime = 0; - private static final double HEARBEAT_DEBOUNCE_SEC = 0.5; - - public static void setVersionCheckEnabled(boolean enabled) { - VERSION_CHECK_ENABLED = enabled; - } - - Packet packet = new Packet(1); - - // Existing is enough to make this multisubscriber do its thing - private final MultiSubscriber m_topicNameSubscriber; - - /** - * Constructs a PhotonCamera from a root table. - * - * @param instance The NetworkTableInstance to pull data from. This can be a custom instance in - * simulation, but should *usually* be the default NTInstance from - * NetworkTableInstance::getDefault - * @param cameraName The name of the camera, as seen in the UI. - */ - public PhotonCamera(NetworkTableInstance instance, String cameraName) { - name = cameraName; - var photonvision_root_table = instance.getTable(kTableName); - this.cameraTable = photonvision_root_table.getSubTable(cameraName); - path = cameraTable.getPath(); - rawBytesEntry = - cameraTable - .getRawTopic("rawBytes") - .subscribe( - "rawBytes", new byte[] {}, PubSubOption.periodic(0.01), PubSubOption.sendAll(true)); - driverModePublisher = cameraTable.getBooleanTopic("driverModeRequest").publish(); - driverModeSubscriber = cameraTable.getBooleanTopic("driverMode").subscribe(false); - inputSaveImgEntry = cameraTable.getIntegerTopic("inputSaveImgCmd").getEntry(0); - outputSaveImgEntry = cameraTable.getIntegerTopic("outputSaveImgCmd").getEntry(0); - pipelineIndexRequest = cameraTable.getIntegerTopic("pipelineIndexRequest").publish(); - pipelineIndexState = cameraTable.getIntegerTopic("pipelineIndexState").subscribe(0); - heartbeatEntry = cameraTable.getIntegerTopic("heartbeat").subscribe(-1); - cameraIntrinsicsSubscriber = - cameraTable.getDoubleArrayTopic("cameraIntrinsics").subscribe(null); - cameraDistortionSubscriber = - cameraTable.getDoubleArrayTopic("cameraDistortion").subscribe(null); - - ledModeRequest = photonvision_root_table.getIntegerTopic("ledModeRequest").publish(); - ledModeState = photonvision_root_table.getIntegerTopic("ledModeState").subscribe(-1); - versionEntry = photonvision_root_table.getStringTopic("version").subscribe(""); - - atflPublisher = photonvision_root_table.getStringTopic("apriltag_field_layout").publish(); - // Save the layout locally on Rio; on reboot, should be pushed out to NT clients - atflPublisher.getTopic().setPersistent(true); - - m_topicNameSubscriber = - new MultiSubscriber( - instance, new String[] {"/photonvision/"}, PubSubOption.topicsOnly(true)); - } - - /** - * Constructs a PhotonCamera from the name of the camera. - * - * @param cameraName The nickname of the camera (found in the PhotonVision UI). - */ - public PhotonCamera(String cameraName) { - this(NetworkTableInstance.getDefault(), cameraName); - } - - /** - * Returns the latest pipeline result. - * - * @return The latest pipeline result. - */ - public PhotonPipelineResult getLatestResult() { - verifyVersion(); - - // Clear the packet. - packet.clear(); - - // Create latest result. - var ret = new PhotonPipelineResult(); - - // Populate packet and create result. - packet.setData(rawBytesEntry.get(new byte[] {})); - - if (packet.getSize() < 1) return ret; - ret.createFromPacket(packet); - - // Set the timestamp of the result. - // getLatestChange returns in microseconds, so we divide by 1e6 to convert to seconds. - ret.setTimestampSeconds((rawBytesEntry.getLastChange() / 1e6) - ret.getLatencyMillis() / 1e3); - - // Return result. - return ret; - } - - /** - * Returns whether the camera is in driver mode. - * - * @return Whether the camera is in driver mode. - */ - public boolean getDriverMode() { - return driverModeSubscriber.get(); - } - - /** - * Toggles driver mode. - * - * @param driverMode Whether to set driver mode. - */ - public void setDriverMode(boolean driverMode) { - driverModePublisher.set(driverMode); - } - - /** - * Request the camera to save a new image file from the input camera stream with overlays. Images - * take up space in the filesystem of the PhotonCamera. Calling it frequently will fill up disk - * space and eventually cause the system to stop working. Clear out images in - * /opt/photonvision/photonvision_config/imgSaves frequently to prevent issues. - */ - public void takeInputSnapshot() { - inputSaveImgEntry.set(inputSaveImgEntry.get() + 1); - } - - /** - * Request the camera to save a new image file from the output stream with overlays. Images take - * up space in the filesystem of the PhotonCamera. Calling it frequently will fill up disk space - * and eventually cause the system to stop working. Clear out images in - * /opt/photonvision/photonvision_config/imgSaves frequently to prevent issues. - */ - public void takeOutputSnapshot() { - outputSaveImgEntry.set(outputSaveImgEntry.get() + 1); - } - - /** - * Returns the active pipeline index. - * - * @return The active pipeline index. - */ - public int getPipelineIndex() { - return (int) pipelineIndexState.get(0); - } - - /** - * Allows the user to select the active pipeline index. - * - * @param index The active pipeline index. - */ - public void setPipelineIndex(int index) { - pipelineIndexRequest.set(index); - } - - /** - * Returns the current LED mode. - * - * @return The current LED mode. - */ - public VisionLEDMode getLEDMode() { - int value = (int) ledModeState.get(-1); - switch (value) { - case 0: - return VisionLEDMode.kOff; - case 1: - return VisionLEDMode.kOn; - case 2: - return VisionLEDMode.kBlink; - case -1: - default: - return VisionLEDMode.kDefault; + public static final String kTableName = "photonvision"; + + private final NetworkTable cameraTable; + RawSubscriber rawBytesEntry; + BooleanPublisher driverModePublisher; + BooleanSubscriber driverModeSubscriber; + DoublePublisher latencyMillisEntry; + BooleanPublisher hasTargetEntry; + DoublePublisher targetPitchEntry; + DoublePublisher targetYawEntry; + DoublePublisher targetAreaEntry; + DoubleArrayPublisher targetPoseEntry; + DoublePublisher targetSkewEntry; + StringSubscriber versionEntry; + IntegerEntry inputSaveImgEntry, outputSaveImgEntry; + IntegerPublisher pipelineIndexRequest, ledModeRequest; + IntegerSubscriber pipelineIndexState, ledModeState; + IntegerSubscriber heartbeatEntry; + private DoubleArraySubscriber cameraIntrinsicsSubscriber; + private DoubleArraySubscriber cameraDistortionSubscriber; + private StringPublisher atflPublisher; + + @Override + public void close() { + rawBytesEntry.close(); + driverModePublisher.close(); + driverModeSubscriber.close(); + latencyMillisEntry.close(); + hasTargetEntry.close(); + targetPitchEntry.close(); + targetYawEntry.close(); + targetAreaEntry.close(); + targetPoseEntry.close(); + targetSkewEntry.close(); + versionEntry.close(); + inputSaveImgEntry.close(); + outputSaveImgEntry.close(); + pipelineIndexRequest.close(); + pipelineIndexState.close(); + ledModeRequest.close(); + ledModeState.close(); + pipelineIndexRequest.close(); + cameraIntrinsicsSubscriber.close(); + cameraDistortionSubscriber.close(); + atflPublisher.close(); } - } - - /** - * Sets the LED mode. - * - * @param led The mode to set to. - */ - public void setLED(VisionLEDMode led) { - ledModeRequest.set(led.value); - } - - /** - * Returns whether the latest target result has targets. - * - *

This method is deprecated; {@link PhotonPipelineResult#hasTargets()} should be used instead. - * - * @deprecated This method should be replaced with {@link PhotonPipelineResult#hasTargets()} - * @return Whether the latest target result has targets. - */ - @Deprecated - public boolean hasTargets() { - return getLatestResult().hasTargets(); - } - - /** - * Returns the name of the camera. This will return the same value that was given to the - * constructor as cameraName. - * - * @return The name of the camera. - */ - public String getName() { - return name; - } - - /** - * Returns whether the camera is connected and actively returning new data. Connection status is - * debounced. - * - * @return True if the camera is actively sending frame data, false otherwise. - */ - public boolean isConnected() { - var curHeartbeat = heartbeatEntry.get(); - var now = Timer.getFPGATimestamp(); - - if (curHeartbeat != prevHeartbeatValue) { - // New heartbeat value from the coprocessor - prevHeartbeatChangeTime = now; - prevHeartbeatValue = curHeartbeat; + + private final String path; + private final String name; + + private static boolean VERSION_CHECK_ENABLED = true; + private static long VERSION_CHECK_INTERVAL = 5; + private double lastVersionCheckTime = 0; + + private long prevHeartbeatValue = -1; + private double prevHeartbeatChangeTime = 0; + private static final double HEARBEAT_DEBOUNCE_SEC = 0.5; + + public static void setVersionCheckEnabled(boolean enabled) { + VERSION_CHECK_ENABLED = enabled; + } + + Packet packet = new Packet(1); + + // Existing is enough to make this multisubscriber do its thing + private final MultiSubscriber m_topicNameSubscriber; + + /** + * Constructs a PhotonCamera from a root table. + * + * @param instance The NetworkTableInstance to pull data from. This can be a custom instance in + * simulation, but should *usually* be the default NTInstance from + * NetworkTableInstance::getDefault + * @param cameraName The name of the camera, as seen in the UI. + */ + public PhotonCamera(NetworkTableInstance instance, String cameraName) { + name = cameraName; + var photonvision_root_table = instance.getTable(kTableName); + this.cameraTable = photonvision_root_table.getSubTable(cameraName); + path = cameraTable.getPath(); + rawBytesEntry = + cameraTable + .getRawTopic("rawBytes") + .subscribe( + "rawBytes", new byte[] {}, PubSubOption.periodic(0.01), PubSubOption.sendAll(true)); + driverModePublisher = cameraTable.getBooleanTopic("driverModeRequest").publish(); + driverModeSubscriber = cameraTable.getBooleanTopic("driverMode").subscribe(false); + inputSaveImgEntry = cameraTable.getIntegerTopic("inputSaveImgCmd").getEntry(0); + outputSaveImgEntry = cameraTable.getIntegerTopic("outputSaveImgCmd").getEntry(0); + pipelineIndexRequest = cameraTable.getIntegerTopic("pipelineIndexRequest").publish(); + pipelineIndexState = cameraTable.getIntegerTopic("pipelineIndexState").subscribe(0); + heartbeatEntry = cameraTable.getIntegerTopic("heartbeat").subscribe(-1); + cameraIntrinsicsSubscriber = + cameraTable.getDoubleArrayTopic("cameraIntrinsics").subscribe(null); + cameraDistortionSubscriber = + cameraTable.getDoubleArrayTopic("cameraDistortion").subscribe(null); + + ledModeRequest = photonvision_root_table.getIntegerTopic("ledModeRequest").publish(); + ledModeState = photonvision_root_table.getIntegerTopic("ledModeState").subscribe(-1); + versionEntry = photonvision_root_table.getStringTopic("version").subscribe(""); + + atflPublisher = photonvision_root_table.getStringTopic("apriltag_field_layout").publish(); + // Save the layout locally on Rio; on reboot, should be pushed out to NT clients + atflPublisher.getTopic().setPersistent(true); + + m_topicNameSubscriber = + new MultiSubscriber( + instance, new String[] {"/photonvision/"}, PubSubOption.topicsOnly(true)); + } + + /** + * Constructs a PhotonCamera from the name of the camera. + * + * @param cameraName The nickname of the camera (found in the PhotonVision UI). + */ + public PhotonCamera(String cameraName) { + this(NetworkTableInstance.getDefault(), cameraName); + } + + /** + * Returns the latest pipeline result. + * + * @return The latest pipeline result. + */ + public PhotonPipelineResult getLatestResult() { + verifyVersion(); + + // Clear the packet. + packet.clear(); + + // Create latest result. + var ret = new PhotonPipelineResult(); + + // Populate packet and create result. + packet.setData(rawBytesEntry.get(new byte[] {})); + + if (packet.getSize() < 1) return ret; + ret.createFromPacket(packet); + + // Set the timestamp of the result. + // getLatestChange returns in microseconds, so we divide by 1e6 to convert to seconds. + ret.setTimestampSeconds((rawBytesEntry.getLastChange() / 1e6) - ret.getLatencyMillis() / 1e3); + + // Return result. + return ret; + } + + /** + * Returns whether the camera is in driver mode. + * + * @return Whether the camera is in driver mode. + */ + public boolean getDriverMode() { + return driverModeSubscriber.get(); + } + + /** + * Toggles driver mode. + * + * @param driverMode Whether to set driver mode. + */ + public void setDriverMode(boolean driverMode) { + driverModePublisher.set(driverMode); + } + + /** + * Request the camera to save a new image file from the input camera stream with overlays. Images + * take up space in the filesystem of the PhotonCamera. Calling it frequently will fill up disk + * space and eventually cause the system to stop working. Clear out images in + * /opt/photonvision/photonvision_config/imgSaves frequently to prevent issues. + */ + public void takeInputSnapshot() { + inputSaveImgEntry.set(inputSaveImgEntry.get() + 1); + } + + /** + * Request the camera to save a new image file from the output stream with overlays. Images take + * up space in the filesystem of the PhotonCamera. Calling it frequently will fill up disk space + * and eventually cause the system to stop working. Clear out images in + * /opt/photonvision/photonvision_config/imgSaves frequently to prevent issues. + */ + public void takeOutputSnapshot() { + outputSaveImgEntry.set(outputSaveImgEntry.get() + 1); } - return (now - prevHeartbeatChangeTime) < HEARBEAT_DEBOUNCE_SEC; - } - - /** - * Set the Apriltag Field Layout used by all PhotonVision coprocessors that are (or might later) - * connect to this robot. The topic is marked as persistant, so even if you only call this once - * ever, it will be saved on the RoboRIO and pushed out to all NT clients when code reboots. - * PhotonVision will also store a copy of this layout locally on the coprocessor, but subscribes - * to this topic and the local copy will be updated when this function is called. - * - * @param layout The layout to use for *all* PhotonVision cameras - * @return Success of serializing the JSON. This does *not* mean that all PhotonVision clients - * have updated their internal layouts. - */ - public boolean setApriltagFieldLayout(AprilTagFieldLayout layout) { - try { - var layout_json = new ObjectMapper().writeValueAsString(layout); - atflPublisher.set(layout_json); - return true; - } catch (JsonProcessingException e) { - MathSharedStore.reportError("Error setting ATFL in " + this.getName(), e.getStackTrace()); + /** + * Returns the active pipeline index. + * + * @return The active pipeline index. + */ + public int getPipelineIndex() { + return (int) pipelineIndexState.get(0); } - return false; - } - - public Optional> getCameraMatrix() { - var cameraMatrix = cameraIntrinsicsSubscriber.get(); - if (cameraMatrix != null && cameraMatrix.length == 9) { - return Optional.of(new MatBuilder<>(Nat.N3(), Nat.N3()).fill(cameraMatrix)); - } else return Optional.empty(); - } - - public Optional> getDistCoeffs() { - var distCoeffs = cameraDistortionSubscriber.get(); - if (distCoeffs != null && distCoeffs.length == 5) { - return Optional.of(new MatBuilder<>(Nat.N5(), Nat.N1()).fill(distCoeffs)); - } else return Optional.empty(); - } - - /** - * Gets the NetworkTable representing this camera's subtable. You probably don't ever need to call - * this. - */ - public final NetworkTable getCameraTable() { - return cameraTable; - } - - private void verifyVersion() { - if (!VERSION_CHECK_ENABLED) return; - - if ((Timer.getFPGATimestamp() - lastVersionCheckTime) < VERSION_CHECK_INTERVAL) return; - lastVersionCheckTime = Timer.getFPGATimestamp(); - - // Heartbeat entry is assumed to always be present. If it's not present, we - // assume that a camera with that name was never connected in the first place. - if (!heartbeatEntry.exists()) { - Set cameraNames = cameraTable.getInstance().getTable(kTableName).getSubTables(); - if (cameraNames.isEmpty()) { - DriverStation.reportError( - "Could not find any PhotonVision coprocessors on NetworkTables. Double check that PhotonVision is running, and that your camera is connected!", - false); - } else { - DriverStation.reportError( - "PhotonVision coprocessor at path " - + path - + " not found on NetworkTables. Double check that your camera names match!", - true); - DriverStation.reportError( - "Found the following PhotonVision cameras on NetworkTables:\n" - + String.join("\n", cameraNames), - false); - } + + /** + * Allows the user to select the active pipeline index. + * + * @param index The active pipeline index. + */ + public void setPipelineIndex(int index) { + pipelineIndexRequest.set(index); + } + + /** + * Returns the current LED mode. + * + * @return The current LED mode. + */ + public VisionLEDMode getLEDMode() { + int value = (int) ledModeState.get(-1); + switch (value) { + case 0: + return VisionLEDMode.kOff; + case 1: + return VisionLEDMode.kOn; + case 2: + return VisionLEDMode.kBlink; + case -1: + default: + return VisionLEDMode.kDefault; + } } - // Check for connection status. Warn if disconnected. - else if (!isConnected()) { - DriverStation.reportWarning( - "PhotonVision coprocessor at path " + path + " is not sending new data.", true); + + /** + * Sets the LED mode. + * + * @param led The mode to set to. + */ + public void setLED(VisionLEDMode led) { + ledModeRequest.set(led.value); + } + + /** + * Returns whether the latest target result has targets. + * + *

This method is deprecated; {@link PhotonPipelineResult#hasTargets()} should be used instead. + * + * @deprecated This method should be replaced with {@link PhotonPipelineResult#hasTargets()} + * @return Whether the latest target result has targets. + */ + @Deprecated + public boolean hasTargets() { + return getLatestResult().hasTargets(); + } + + /** + * Returns the name of the camera. This will return the same value that was given to the + * constructor as cameraName. + * + * @return The name of the camera. + */ + public String getName() { + return name; + } + + /** + * Returns whether the camera is connected and actively returning new data. Connection status is + * debounced. + * + * @return True if the camera is actively sending frame data, false otherwise. + */ + public boolean isConnected() { + var curHeartbeat = heartbeatEntry.get(); + var now = Timer.getFPGATimestamp(); + + if (curHeartbeat != prevHeartbeatValue) { + // New heartbeat value from the coprocessor + prevHeartbeatChangeTime = now; + prevHeartbeatValue = curHeartbeat; + } + + return (now - prevHeartbeatChangeTime) < HEARBEAT_DEBOUNCE_SEC; + } + + /** + * Set the Apriltag Field Layout used by all PhotonVision coprocessors that are (or might later) + * connect to this robot. The topic is marked as persistant, so even if you only call this once + * ever, it will be saved on the RoboRIO and pushed out to all NT clients when code reboots. + * PhotonVision will also store a copy of this layout locally on the coprocessor, but subscribes + * to this topic and the local copy will be updated when this function is called. + * + * @param layout The layout to use for *all* PhotonVision cameras + * @return Success of serializing the JSON. This does *not* mean that all PhotonVision clients + * have updated their internal layouts. + */ + public boolean setApriltagFieldLayout(AprilTagFieldLayout layout) { + try { + var layout_json = new ObjectMapper().writeValueAsString(layout); + atflPublisher.set(layout_json); + return true; + } catch (JsonProcessingException e) { + MathSharedStore.reportError("Error setting ATFL in " + this.getName(), e.getStackTrace()); + } + return false; + } + + public Optional> getCameraMatrix() { + var cameraMatrix = cameraIntrinsicsSubscriber.get(); + if (cameraMatrix != null && cameraMatrix.length == 9) { + return Optional.of(new MatBuilder<>(Nat.N3(), Nat.N3()).fill(cameraMatrix)); + } else return Optional.empty(); + } + + public Optional> getDistCoeffs() { + var distCoeffs = cameraDistortionSubscriber.get(); + if (distCoeffs != null && distCoeffs.length == 5) { + return Optional.of(new MatBuilder<>(Nat.N5(), Nat.N1()).fill(distCoeffs)); + } else return Optional.empty(); + } + + /** + * Gets the NetworkTable representing this camera's subtable. You probably don't ever need to call + * this. + */ + public final NetworkTable getCameraTable() { + return cameraTable; } - // Check for version. Warn if the versions aren't aligned. - String versionString = versionEntry.get(""); - if (!versionString.equals("") && !PhotonVersion.versionMatches(versionString)) { - // Error on a verified version mismatch - // But stay silent otherwise - DriverStation.reportWarning( - "Photon version " - + PhotonVersion.versionString - + " does not match coprocessor version " - + versionString - + "!", - true); + private void verifyVersion() { + if (!VERSION_CHECK_ENABLED) return; + + if ((Timer.getFPGATimestamp() - lastVersionCheckTime) < VERSION_CHECK_INTERVAL) return; + lastVersionCheckTime = Timer.getFPGATimestamp(); + + // Heartbeat entry is assumed to always be present. If it's not present, we + // assume that a camera with that name was never connected in the first place. + if (!heartbeatEntry.exists()) { + Set cameraNames = cameraTable.getInstance().getTable(kTableName).getSubTables(); + if (cameraNames.isEmpty()) { + DriverStation.reportError( + "Could not find any PhotonVision coprocessors on NetworkTables. Double check that PhotonVision is running, and that your camera is connected!", + false); + } else { + DriverStation.reportError( + "PhotonVision coprocessor at path " + + path + + " not found on NetworkTables. Double check that your camera names match!", + true); + DriverStation.reportError( + "Found the following PhotonVision cameras on NetworkTables:\n" + + String.join("\n", cameraNames), + false); + } + } + // Check for connection status. Warn if disconnected. + else if (!isConnected()) { + DriverStation.reportWarning( + "PhotonVision coprocessor at path " + path + " is not sending new data.", true); + } + + // Check for version. Warn if the versions aren't aligned. + String versionString = versionEntry.get(""); + if (!versionString.equals("") && !PhotonVersion.versionMatches(versionString)) { + // Error on a verified version mismatch + // But stay silent otherwise + DriverStation.reportWarning( + "Photon version " + + PhotonVersion.versionString + + " does not match coprocessor version " + + versionString + + "!", + true); + } } - } } diff --git a/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java b/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java index e5c1388eab..ed29a9c265 100644 --- a/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java +++ b/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java @@ -51,672 +51,672 @@ * below. Example usage can be found in our apriltagExample example project. */ public class PhotonPoseEstimator { - /** Position estimation strategies that can be used by the {@link PhotonPoseEstimator} class. */ - public enum PoseStrategy { - /** Choose the Pose with the lowest ambiguity. */ - LOWEST_AMBIGUITY, + /** Position estimation strategies that can be used by the {@link PhotonPoseEstimator} class. */ + public enum PoseStrategy { + /** Choose the Pose with the lowest ambiguity. */ + LOWEST_AMBIGUITY, + + /** Choose the Pose which is closest to the camera height. */ + CLOSEST_TO_CAMERA_HEIGHT, + + /** Choose the Pose which is closest to a set Reference position. */ + CLOSEST_TO_REFERENCE_POSE, + + /** Choose the Pose which is closest to the last pose calculated */ + CLOSEST_TO_LAST_POSE, + + /** Return the average of the best target poses using ambiguity as weight. */ + AVERAGE_BEST_TARGETS, + + /** + * Use all visible tags to compute a single pose estimate on coprocessor. This option needs to + * be enabled on the PhotonVision web UI as well. + */ + MULTI_TAG_PNP_ON_COPROCESSOR, + + /** + * Use all visible tags to compute a single pose estimate. This runs on the RoboRIO, and can + * take a lot of time. + */ + MULTI_TAG_PNP_ON_RIO + } - /** Choose the Pose which is closest to the camera height. */ - CLOSEST_TO_CAMERA_HEIGHT, + private AprilTagFieldLayout fieldTags; + private PoseStrategy primaryStrategy; + private PoseStrategy multiTagFallbackStrategy = PoseStrategy.LOWEST_AMBIGUITY; + private final PhotonCamera camera; + private Transform3d robotToCamera; - /** Choose the Pose which is closest to a set Reference position. */ - CLOSEST_TO_REFERENCE_POSE, + private Pose3d lastPose; + private Pose3d referencePose; + protected double poseCacheTimestampSeconds = -1; + private final Set reportedErrors = new HashSet<>(); - /** Choose the Pose which is closest to the last pose calculated */ - CLOSEST_TO_LAST_POSE, + /** + * Create a new PhotonPoseEstimator. + * + * @param fieldTags A WPILib {@link AprilTagFieldLayout} linking AprilTag IDs to Pose3d objects + * with respect to the FIRST field using the Field + * Coordinate System. + * @param strategy The strategy it should use to determine the best pose. + * @param camera PhotonCamera + * @param robotToCamera Transform3d from the center of the robot to the camera mount position (ie, + * robot ➔ camera) in the Robot + * Coordinate System. + */ + public PhotonPoseEstimator( + AprilTagFieldLayout fieldTags, + PoseStrategy strategy, + PhotonCamera camera, + Transform3d robotToCamera) { + this.fieldTags = fieldTags; + this.primaryStrategy = strategy; + this.camera = camera; + this.robotToCamera = robotToCamera; + } - /** Return the average of the best target poses using ambiguity as weight. */ - AVERAGE_BEST_TARGETS, + public PhotonPoseEstimator( + AprilTagFieldLayout fieldTags, PoseStrategy strategy, Transform3d robotToCamera) { + this.fieldTags = fieldTags; + this.primaryStrategy = strategy; + this.camera = null; + this.robotToCamera = robotToCamera; + } + + /** Invalidates the pose cache. */ + private void invalidatePoseCache() { + poseCacheTimestampSeconds = -1; + } + + private void checkUpdate(Object oldObj, Object newObj) { + if (oldObj != newObj && oldObj != null && !oldObj.equals(newObj)) { + invalidatePoseCache(); + } + } /** - * Use all visible tags to compute a single pose estimate on coprocessor. This option needs to - * be enabled on the PhotonVision web UI as well. + * Get the AprilTagFieldLayout being used by the PositionEstimator. + * + * @return the AprilTagFieldLayout */ - MULTI_TAG_PNP_ON_COPROCESSOR, + public AprilTagFieldLayout getFieldTags() { + return fieldTags; + } /** - * Use all visible tags to compute a single pose estimate. This runs on the RoboRIO, and can - * take a lot of time. + * Set the AprilTagFieldLayout being used by the PositionEstimator. + * + * @param fieldTags the AprilTagFieldLayout */ - MULTI_TAG_PNP_ON_RIO - } - - private AprilTagFieldLayout fieldTags; - private PoseStrategy primaryStrategy; - private PoseStrategy multiTagFallbackStrategy = PoseStrategy.LOWEST_AMBIGUITY; - private final PhotonCamera camera; - private Transform3d robotToCamera; - - private Pose3d lastPose; - private Pose3d referencePose; - protected double poseCacheTimestampSeconds = -1; - private final Set reportedErrors = new HashSet<>(); - - /** - * Create a new PhotonPoseEstimator. - * - * @param fieldTags A WPILib {@link AprilTagFieldLayout} linking AprilTag IDs to Pose3d objects - * with respect to the FIRST field using the Field - * Coordinate System. - * @param strategy The strategy it should use to determine the best pose. - * @param camera PhotonCamera - * @param robotToCamera Transform3d from the center of the robot to the camera mount position (ie, - * robot ➔ camera) in the Robot - * Coordinate System. - */ - public PhotonPoseEstimator( - AprilTagFieldLayout fieldTags, - PoseStrategy strategy, - PhotonCamera camera, - Transform3d robotToCamera) { - this.fieldTags = fieldTags; - this.primaryStrategy = strategy; - this.camera = camera; - this.robotToCamera = robotToCamera; - } - - public PhotonPoseEstimator( - AprilTagFieldLayout fieldTags, PoseStrategy strategy, Transform3d robotToCamera) { - this.fieldTags = fieldTags; - this.primaryStrategy = strategy; - this.camera = null; - this.robotToCamera = robotToCamera; - } - - /** Invalidates the pose cache. */ - private void invalidatePoseCache() { - poseCacheTimestampSeconds = -1; - } - - private void checkUpdate(Object oldObj, Object newObj) { - if (oldObj != newObj && oldObj != null && !oldObj.equals(newObj)) { - invalidatePoseCache(); - } - } - - /** - * Get the AprilTagFieldLayout being used by the PositionEstimator. - * - * @return the AprilTagFieldLayout - */ - public AprilTagFieldLayout getFieldTags() { - return fieldTags; - } - - /** - * Set the AprilTagFieldLayout being used by the PositionEstimator. - * - * @param fieldTags the AprilTagFieldLayout - */ - public void setFieldTags(AprilTagFieldLayout fieldTags) { - checkUpdate(this.fieldTags, fieldTags); - this.fieldTags = fieldTags; - } - - /** - * Get the Position Estimation Strategy being used by the Position Estimator. - * - * @return the strategy - */ - public PoseStrategy getPrimaryStrategy() { - return primaryStrategy; - } - - /** - * Set the Position Estimation Strategy used by the Position Estimator. - * - * @param strategy the strategy to set - */ - public void setPrimaryStrategy(PoseStrategy strategy) { - checkUpdate(this.primaryStrategy, strategy); - this.primaryStrategy = strategy; - } - - /** - * Set the Position Estimation Strategy used in multi-tag mode when only one tag can be seen. Must - * NOT be MULTI_TAG_PNP - * - * @param strategy the strategy to set - */ - public void setMultiTagFallbackStrategy(PoseStrategy strategy) { - checkUpdate(this.multiTagFallbackStrategy, strategy); - if (strategy == PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR - || strategy == PoseStrategy.MULTI_TAG_PNP_ON_RIO) { - DriverStation.reportWarning( - "Fallback cannot be set to MULTI_TAG_PNP! Setting to lowest ambiguity", false); - strategy = PoseStrategy.LOWEST_AMBIGUITY; + public void setFieldTags(AprilTagFieldLayout fieldTags) { + checkUpdate(this.fieldTags, fieldTags); + this.fieldTags = fieldTags; } - this.multiTagFallbackStrategy = strategy; - } - - /** - * Return the reference position that is being used by the estimator. - * - * @return the referencePose - */ - public Pose3d getReferencePose() { - return referencePose; - } - - /** - * Update the stored reference pose for use when using the CLOSEST_TO_REFERENCE_POSE - * strategy. - * - * @param referencePose the referencePose to set - */ - public void setReferencePose(Pose3d referencePose) { - checkUpdate(this.referencePose, referencePose); - this.referencePose = referencePose; - } - - /** - * Update the stored reference pose for use when using the CLOSEST_TO_REFERENCE_POSE - * strategy. - * - * @param referencePose the referencePose to set - */ - public void setReferencePose(Pose2d referencePose) { - setReferencePose(new Pose3d(referencePose)); - } - - /** - * Update the stored last pose. Useful for setting the initial estimate when using the - * CLOSEST_TO_LAST_POSE strategy. - * - * @param lastPose the lastPose to set - */ - public void setLastPose(Pose3d lastPose) { - this.lastPose = lastPose; - } - - /** - * Update the stored last pose. Useful for setting the initial estimate when using the - * CLOSEST_TO_LAST_POSE strategy. - * - * @param lastPose the lastPose to set - */ - public void setLastPose(Pose2d lastPose) { - setLastPose(new Pose3d(lastPose)); - } - - /** - * @return The current transform from the center of the robot to the camera mount position - */ - public Transform3d getRobotToCameraTransform() { - return robotToCamera; - } - - /** - * Useful for pan and tilt mechanisms and such. - * - * @param robotToCamera The current transform from the center of the robot to the camera mount - * position - */ - public void setRobotToCameraTransform(Transform3d robotToCamera) { - this.robotToCamera = robotToCamera; - } - - /** - * Poll data from the configured cameras and update the estimated position of the robot. Returns - * empty if: - * - *

    - *
  • New data has not been received since the last call to {@code update()}. - *
  • No targets were found from the camera - *
  • There is no camera set - *
- * - * @return an {@link EstimatedRobotPose} with an estimated pose, timestamp, and targets used to - * create the estimate. - */ - public Optional update() { - if (camera == null) { - DriverStation.reportError("[PhotonPoseEstimator] Missing camera!", false); - return Optional.empty(); + + /** + * Get the Position Estimation Strategy being used by the Position Estimator. + * + * @return the strategy + */ + public PoseStrategy getPrimaryStrategy() { + return primaryStrategy; } - PhotonPipelineResult cameraResult = camera.getLatestResult(); - - return update(cameraResult, camera.getCameraMatrix(), camera.getDistCoeffs()); - } - - /** - * Updates the estimated position of the robot. Returns empty if: - * - *
    - *
  • The timestamp of the provided pipeline result is the same as in the previous call to - * {@code update()}. - *
  • No targets were found in the pipeline results. - *
- * - * @param cameraResult The latest pipeline result from the camera - * @return an {@link EstimatedRobotPose} with an estimated pose, timestamp, and targets used to - * create the estimate. - */ - public Optional update(PhotonPipelineResult cameraResult) { - if (camera == null) return update(cameraResult, Optional.empty(), Optional.empty()); - return update(cameraResult, camera.getCameraMatrix(), camera.getDistCoeffs()); - } - - /** - * Updates the estimated position of the robot. Returns empty if: - * - *
    - *
  • The timestamp of the provided pipeline result is the same as in the previous call to - * {@code update()}. - *
  • No targets were found in the pipeline results. - *
- * - * @param cameraMatrix Camera calibration data that can be used in the case of no assigned - * PhotonCamera. - * @param distCoeffs Camera calibration data that can be used in the case of no assigned - * PhotonCamera - * @return an {@link EstimatedRobotPose} with an estimated pose, timestamp, and targets used to - * create the estimate. - */ - public Optional update( - PhotonPipelineResult cameraResult, - Optional> cameraMatrix, - Optional> distCoeffs) { - // Time in the past -- give up, since the following if expects times > 0 - if (cameraResult.getTimestampSeconds() < 0) { - return Optional.empty(); + /** + * Set the Position Estimation Strategy used by the Position Estimator. + * + * @param strategy the strategy to set + */ + public void setPrimaryStrategy(PoseStrategy strategy) { + checkUpdate(this.primaryStrategy, strategy); + this.primaryStrategy = strategy; } - // If the pose cache timestamp was set, and the result is from the same - // timestamp, return an - // empty result - if (poseCacheTimestampSeconds > 0 - && Math.abs(poseCacheTimestampSeconds - cameraResult.getTimestampSeconds()) < 1e-6) { - return Optional.empty(); + /** + * Set the Position Estimation Strategy used in multi-tag mode when only one tag can be seen. Must + * NOT be MULTI_TAG_PNP + * + * @param strategy the strategy to set + */ + public void setMultiTagFallbackStrategy(PoseStrategy strategy) { + checkUpdate(this.multiTagFallbackStrategy, strategy); + if (strategy == PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR + || strategy == PoseStrategy.MULTI_TAG_PNP_ON_RIO) { + DriverStation.reportWarning( + "Fallback cannot be set to MULTI_TAG_PNP! Setting to lowest ambiguity", false); + strategy = PoseStrategy.LOWEST_AMBIGUITY; + } + this.multiTagFallbackStrategy = strategy; } - // Remember the timestamp of the current result used - poseCacheTimestampSeconds = cameraResult.getTimestampSeconds(); + /** + * Return the reference position that is being used by the estimator. + * + * @return the referencePose + */ + public Pose3d getReferencePose() { + return referencePose; + } - // If no targets seen, trivial case -- return empty result - if (!cameraResult.hasTargets()) { - return Optional.empty(); + /** + * Update the stored reference pose for use when using the CLOSEST_TO_REFERENCE_POSE + * strategy. + * + * @param referencePose the referencePose to set + */ + public void setReferencePose(Pose3d referencePose) { + checkUpdate(this.referencePose, referencePose); + this.referencePose = referencePose; } - return update(cameraResult, cameraMatrix, distCoeffs, this.primaryStrategy); - } - - private Optional update( - PhotonPipelineResult cameraResult, - Optional> cameraMatrix, - Optional> distCoeffs, - PoseStrategy strat) { - Optional estimatedPose; - switch (strat) { - case LOWEST_AMBIGUITY: - estimatedPose = lowestAmbiguityStrategy(cameraResult); - break; - case CLOSEST_TO_CAMERA_HEIGHT: - estimatedPose = closestToCameraHeightStrategy(cameraResult); - break; - case CLOSEST_TO_REFERENCE_POSE: - estimatedPose = closestToReferencePoseStrategy(cameraResult, referencePose); - break; - case CLOSEST_TO_LAST_POSE: - setReferencePose(lastPose); - estimatedPose = closestToReferencePoseStrategy(cameraResult, referencePose); - break; - case AVERAGE_BEST_TARGETS: - estimatedPose = averageBestTargetsStrategy(cameraResult); - break; - case MULTI_TAG_PNP_ON_RIO: - estimatedPose = multiTagOnRioStrategy(cameraResult, cameraMatrix, distCoeffs); - break; - case MULTI_TAG_PNP_ON_COPROCESSOR: - estimatedPose = multiTagOnCoprocStrategy(cameraResult, cameraMatrix, distCoeffs); - break; - default: - DriverStation.reportError( - "[PhotonPoseEstimator] Unknown Position Estimation Strategy!", false); - return Optional.empty(); + /** + * Update the stored reference pose for use when using the CLOSEST_TO_REFERENCE_POSE + * strategy. + * + * @param referencePose the referencePose to set + */ + public void setReferencePose(Pose2d referencePose) { + setReferencePose(new Pose3d(referencePose)); } - if (estimatedPose.isEmpty()) { - lastPose = null; + /** + * Update the stored last pose. Useful for setting the initial estimate when using the + * CLOSEST_TO_LAST_POSE strategy. + * + * @param lastPose the lastPose to set + */ + public void setLastPose(Pose3d lastPose) { + this.lastPose = lastPose; } - return estimatedPose; - } - - private Optional multiTagOnCoprocStrategy( - PhotonPipelineResult result, - Optional> cameraMatrixOpt, - Optional> distCoeffsOpt) { - if (result.getMultiTagResult().estimatedPose.isPresent) { - var best_tf = result.getMultiTagResult().estimatedPose.best; - var best = - new Pose3d() - .plus(best_tf) // field-to-camera - .plus(robotToCamera.inverse()); // field-to-robot - return Optional.of( - new EstimatedRobotPose( - best, - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR)); - } else { - return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); + /** + * Update the stored last pose. Useful for setting the initial estimate when using the + * CLOSEST_TO_LAST_POSE strategy. + * + * @param lastPose the lastPose to set + */ + public void setLastPose(Pose2d lastPose) { + setLastPose(new Pose3d(lastPose)); } - } - - private Optional multiTagOnRioStrategy( - PhotonPipelineResult result, - Optional> cameraMatrixOpt, - Optional> distCoeffsOpt) { - boolean hasCalibData = cameraMatrixOpt.isPresent() && distCoeffsOpt.isPresent(); - // cannot run multitagPNP, use fallback strategy - if (!hasCalibData || result.getTargets().size() < 2) { - return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); + + /** + * @return The current transform from the center of the robot to the camera mount position + */ + public Transform3d getRobotToCameraTransform() { + return robotToCamera; } - var pnpResults = - VisionEstimation.estimateCamPosePNP( - cameraMatrixOpt.get(), distCoeffsOpt.get(), result.getTargets(), fieldTags); - // try fallback strategy if solvePNP fails for some reason - if (!pnpResults.isPresent) - return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); - var best = - new Pose3d() - .plus(pnpResults.best) // field-to-camera - .plus(robotToCamera.inverse()); // field-to-robot - - return Optional.of( - new EstimatedRobotPose( - best, - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.MULTI_TAG_PNP_ON_RIO)); - } - - /** - * Return the estimated position of the robot with the lowest position ambiguity from a List of - * pipeline results. - * - * @param result pipeline result - * @return the estimated position of the robot in the FCS and the estimated timestamp of this - * estimation. - */ - private Optional lowestAmbiguityStrategy(PhotonPipelineResult result) { - PhotonTrackedTarget lowestAmbiguityTarget = null; - - double lowestAmbiguityScore = 10; - - for (PhotonTrackedTarget target : result.targets) { - double targetPoseAmbiguity = target.getPoseAmbiguity(); - // Make sure the target is a Fiducial target. - if (targetPoseAmbiguity != -1 && targetPoseAmbiguity < lowestAmbiguityScore) { - lowestAmbiguityScore = targetPoseAmbiguity; - lowestAmbiguityTarget = target; - } + /** + * Useful for pan and tilt mechanisms and such. + * + * @param robotToCamera The current transform from the center of the robot to the camera mount + * position + */ + public void setRobotToCameraTransform(Transform3d robotToCamera) { + this.robotToCamera = robotToCamera; } - // Although there are confirmed to be targets, none of them may be fiducial - // targets. - if (lowestAmbiguityTarget == null) return Optional.empty(); + /** + * Poll data from the configured cameras and update the estimated position of the robot. Returns + * empty if: + * + *
    + *
  • New data has not been received since the last call to {@code update()}. + *
  • No targets were found from the camera + *
  • There is no camera set + *
+ * + * @return an {@link EstimatedRobotPose} with an estimated pose, timestamp, and targets used to + * create the estimate. + */ + public Optional update() { + if (camera == null) { + DriverStation.reportError("[PhotonPoseEstimator] Missing camera!", false); + return Optional.empty(); + } - int targetFiducialId = lowestAmbiguityTarget.getFiducialId(); + PhotonPipelineResult cameraResult = camera.getLatestResult(); - Optional targetPosition = fieldTags.getTagPose(targetFiducialId); + return update(cameraResult, camera.getCameraMatrix(), camera.getDistCoeffs()); + } - if (targetPosition.isEmpty()) { - reportFiducialPoseError(targetFiducialId); - return Optional.empty(); + /** + * Updates the estimated position of the robot. Returns empty if: + * + *
    + *
  • The timestamp of the provided pipeline result is the same as in the previous call to + * {@code update()}. + *
  • No targets were found in the pipeline results. + *
+ * + * @param cameraResult The latest pipeline result from the camera + * @return an {@link EstimatedRobotPose} with an estimated pose, timestamp, and targets used to + * create the estimate. + */ + public Optional update(PhotonPipelineResult cameraResult) { + if (camera == null) return update(cameraResult, Optional.empty(), Optional.empty()); + return update(cameraResult, camera.getCameraMatrix(), camera.getDistCoeffs()); } - return Optional.of( - new EstimatedRobotPose( - targetPosition - .get() - .transformBy(lowestAmbiguityTarget.getBestCameraToTarget().inverse()) - .transformBy(robotToCamera.inverse()), - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.LOWEST_AMBIGUITY)); - } - - /** - * Return the estimated position of the robot using the target with the lowest delta height - * difference between the estimated and actual height of the camera. - * - * @param result pipeline result - * @return the estimated position of the robot in the FCS and the estimated timestamp of this - * estimation. - */ - private Optional closestToCameraHeightStrategy(PhotonPipelineResult result) { - double smallestHeightDifference = 10e9; - EstimatedRobotPose closestHeightTarget = null; - - for (PhotonTrackedTarget target : result.targets) { - int targetFiducialId = target.getFiducialId(); - - // Don't report errors for non-fiducial targets. This could also be resolved by - // adding -1 to - // the initial HashSet. - if (targetFiducialId == -1) continue; - - Optional targetPosition = fieldTags.getTagPose(target.getFiducialId()); - - if (targetPosition.isEmpty()) { - reportFiducialPoseError(target.getFiducialId()); - continue; - } - - double alternateTransformDelta = - Math.abs( - robotToCamera.getZ() - - targetPosition - .get() - .transformBy(target.getAlternateCameraToTarget().inverse()) - .getZ()); - double bestTransformDelta = - Math.abs( - robotToCamera.getZ() - - targetPosition - .get() - .transformBy(target.getBestCameraToTarget().inverse()) - .getZ()); - - if (alternateTransformDelta < smallestHeightDifference) { - smallestHeightDifference = alternateTransformDelta; - closestHeightTarget = - new EstimatedRobotPose( - targetPosition - .get() - .transformBy(target.getAlternateCameraToTarget().inverse()) - .transformBy(robotToCamera.inverse()), - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.CLOSEST_TO_CAMERA_HEIGHT); - } - - if (bestTransformDelta < smallestHeightDifference) { - smallestHeightDifference = bestTransformDelta; - closestHeightTarget = - new EstimatedRobotPose( - targetPosition - .get() - .transformBy(target.getBestCameraToTarget().inverse()) - .transformBy(robotToCamera.inverse()), - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.CLOSEST_TO_CAMERA_HEIGHT); - } + /** + * Updates the estimated position of the robot. Returns empty if: + * + *
    + *
  • The timestamp of the provided pipeline result is the same as in the previous call to + * {@code update()}. + *
  • No targets were found in the pipeline results. + *
+ * + * @param cameraMatrix Camera calibration data that can be used in the case of no assigned + * PhotonCamera. + * @param distCoeffs Camera calibration data that can be used in the case of no assigned + * PhotonCamera + * @return an {@link EstimatedRobotPose} with an estimated pose, timestamp, and targets used to + * create the estimate. + */ + public Optional update( + PhotonPipelineResult cameraResult, + Optional> cameraMatrix, + Optional> distCoeffs) { + // Time in the past -- give up, since the following if expects times > 0 + if (cameraResult.getTimestampSeconds() < 0) { + return Optional.empty(); + } + + // If the pose cache timestamp was set, and the result is from the same + // timestamp, return an + // empty result + if (poseCacheTimestampSeconds > 0 + && Math.abs(poseCacheTimestampSeconds - cameraResult.getTimestampSeconds()) < 1e-6) { + return Optional.empty(); + } + + // Remember the timestamp of the current result used + poseCacheTimestampSeconds = cameraResult.getTimestampSeconds(); + + // If no targets seen, trivial case -- return empty result + if (!cameraResult.hasTargets()) { + return Optional.empty(); + } + + return update(cameraResult, cameraMatrix, distCoeffs, this.primaryStrategy); } - // Need to null check here in case none of the provided targets are fiducial. - return Optional.ofNullable(closestHeightTarget); - } - - /** - * Return the estimated position of the robot using the target with the lowest delta in the vector - * magnitude between it and the reference pose. - * - * @param result pipeline result - * @param referencePose reference pose to check vector magnitude difference against. - * @return the estimated position of the robot in the FCS and the estimated timestamp of this - * estimation. - */ - private Optional closestToReferencePoseStrategy( - PhotonPipelineResult result, Pose3d referencePose) { - if (referencePose == null) { - DriverStation.reportError( - "[PhotonPoseEstimator] Tried to use reference pose strategy without setting the reference!", - false); - return Optional.empty(); + private Optional update( + PhotonPipelineResult cameraResult, + Optional> cameraMatrix, + Optional> distCoeffs, + PoseStrategy strat) { + Optional estimatedPose; + switch (strat) { + case LOWEST_AMBIGUITY: + estimatedPose = lowestAmbiguityStrategy(cameraResult); + break; + case CLOSEST_TO_CAMERA_HEIGHT: + estimatedPose = closestToCameraHeightStrategy(cameraResult); + break; + case CLOSEST_TO_REFERENCE_POSE: + estimatedPose = closestToReferencePoseStrategy(cameraResult, referencePose); + break; + case CLOSEST_TO_LAST_POSE: + setReferencePose(lastPose); + estimatedPose = closestToReferencePoseStrategy(cameraResult, referencePose); + break; + case AVERAGE_BEST_TARGETS: + estimatedPose = averageBestTargetsStrategy(cameraResult); + break; + case MULTI_TAG_PNP_ON_RIO: + estimatedPose = multiTagOnRioStrategy(cameraResult, cameraMatrix, distCoeffs); + break; + case MULTI_TAG_PNP_ON_COPROCESSOR: + estimatedPose = multiTagOnCoprocStrategy(cameraResult, cameraMatrix, distCoeffs); + break; + default: + DriverStation.reportError( + "[PhotonPoseEstimator] Unknown Position Estimation Strategy!", false); + return Optional.empty(); + } + + if (estimatedPose.isEmpty()) { + lastPose = null; + } + + return estimatedPose; } - double smallestPoseDelta = 10e9; - EstimatedRobotPose lowestDeltaPose = null; - - for (PhotonTrackedTarget target : result.targets) { - int targetFiducialId = target.getFiducialId(); - - // Don't report errors for non-fiducial targets. This could also be resolved by - // adding -1 to - // the initial HashSet. - if (targetFiducialId == -1) continue; - - Optional targetPosition = fieldTags.getTagPose(target.getFiducialId()); - - if (targetPosition.isEmpty()) { - reportFiducialPoseError(targetFiducialId); - continue; - } - - Pose3d altTransformPosition = - targetPosition - .get() - .transformBy(target.getAlternateCameraToTarget().inverse()) - .transformBy(robotToCamera.inverse()); - Pose3d bestTransformPosition = - targetPosition - .get() - .transformBy(target.getBestCameraToTarget().inverse()) - .transformBy(robotToCamera.inverse()); - - double altDifference = Math.abs(calculateDifference(referencePose, altTransformPosition)); - double bestDifference = Math.abs(calculateDifference(referencePose, bestTransformPosition)); - - if (altDifference < smallestPoseDelta) { - smallestPoseDelta = altDifference; - lowestDeltaPose = - new EstimatedRobotPose( - altTransformPosition, - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.CLOSEST_TO_REFERENCE_POSE); - } - if (bestDifference < smallestPoseDelta) { - smallestPoseDelta = bestDifference; - lowestDeltaPose = - new EstimatedRobotPose( - bestTransformPosition, - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.CLOSEST_TO_REFERENCE_POSE); - } + private Optional multiTagOnCoprocStrategy( + PhotonPipelineResult result, + Optional> cameraMatrixOpt, + Optional> distCoeffsOpt) { + if (result.getMultiTagResult().estimatedPose.isPresent) { + var best_tf = result.getMultiTagResult().estimatedPose.best; + var best = + new Pose3d() + .plus(best_tf) // field-to-camera + .plus(robotToCamera.inverse()); // field-to-robot + return Optional.of( + new EstimatedRobotPose( + best, + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR)); + } else { + return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); + } } - return Optional.ofNullable(lowestDeltaPose); - } - - /** - * Return the average of the best target poses using ambiguity as weight. - * - * @param result pipeline result - * @return the estimated position of the robot in the FCS and the estimated timestamp of this - * estimation. - */ - private Optional averageBestTargetsStrategy(PhotonPipelineResult result) { - List> estimatedRobotPoses = new ArrayList<>(); - double totalAmbiguity = 0; - - for (PhotonTrackedTarget target : result.targets) { - int targetFiducialId = target.getFiducialId(); - - // Don't report errors for non-fiducial targets. This could also be resolved by - // adding -1 to - // the initial HashSet. - if (targetFiducialId == -1) continue; - - Optional targetPosition = fieldTags.getTagPose(target.getFiducialId()); - - if (targetPosition.isEmpty()) { - reportFiducialPoseError(targetFiducialId); - continue; - } - - double targetPoseAmbiguity = target.getPoseAmbiguity(); - - // Pose ambiguity is 0, use that pose - if (targetPoseAmbiguity == 0) { + + private Optional multiTagOnRioStrategy( + PhotonPipelineResult result, + Optional> cameraMatrixOpt, + Optional> distCoeffsOpt) { + boolean hasCalibData = cameraMatrixOpt.isPresent() && distCoeffsOpt.isPresent(); + // cannot run multitagPNP, use fallback strategy + if (!hasCalibData || result.getTargets().size() < 2) { + return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); + } + + var pnpResults = + VisionEstimation.estimateCamPosePNP( + cameraMatrixOpt.get(), distCoeffsOpt.get(), result.getTargets(), fieldTags); + // try fallback strategy if solvePNP fails for some reason + if (!pnpResults.isPresent) + return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); + var best = + new Pose3d() + .plus(pnpResults.best) // field-to-camera + .plus(robotToCamera.inverse()); // field-to-robot + + return Optional.of( + new EstimatedRobotPose( + best, + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.MULTI_TAG_PNP_ON_RIO)); + } + + /** + * Return the estimated position of the robot with the lowest position ambiguity from a List of + * pipeline results. + * + * @param result pipeline result + * @return the estimated position of the robot in the FCS and the estimated timestamp of this + * estimation. + */ + private Optional lowestAmbiguityStrategy(PhotonPipelineResult result) { + PhotonTrackedTarget lowestAmbiguityTarget = null; + + double lowestAmbiguityScore = 10; + + for (PhotonTrackedTarget target : result.targets) { + double targetPoseAmbiguity = target.getPoseAmbiguity(); + // Make sure the target is a Fiducial target. + if (targetPoseAmbiguity != -1 && targetPoseAmbiguity < lowestAmbiguityScore) { + lowestAmbiguityScore = targetPoseAmbiguity; + lowestAmbiguityTarget = target; + } + } + + // Although there are confirmed to be targets, none of them may be fiducial + // targets. + if (lowestAmbiguityTarget == null) return Optional.empty(); + + int targetFiducialId = lowestAmbiguityTarget.getFiducialId(); + + Optional targetPosition = fieldTags.getTagPose(targetFiducialId); + + if (targetPosition.isEmpty()) { + reportFiducialPoseError(targetFiducialId); + return Optional.empty(); + } + return Optional.of( - new EstimatedRobotPose( - targetPosition - .get() - .transformBy(target.getBestCameraToTarget().inverse()) - .transformBy(robotToCamera.inverse()), - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.AVERAGE_BEST_TARGETS)); - } - - totalAmbiguity += 1.0 / target.getPoseAmbiguity(); - - estimatedRobotPoses.add( - new Pair<>( - target, - targetPosition - .get() - .transformBy(target.getBestCameraToTarget().inverse()) - .transformBy(robotToCamera.inverse()))); + new EstimatedRobotPose( + targetPosition + .get() + .transformBy(lowestAmbiguityTarget.getBestCameraToTarget().inverse()) + .transformBy(robotToCamera.inverse()), + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.LOWEST_AMBIGUITY)); } - // Take the average + /** + * Return the estimated position of the robot using the target with the lowest delta height + * difference between the estimated and actual height of the camera. + * + * @param result pipeline result + * @return the estimated position of the robot in the FCS and the estimated timestamp of this + * estimation. + */ + private Optional closestToCameraHeightStrategy(PhotonPipelineResult result) { + double smallestHeightDifference = 10e9; + EstimatedRobotPose closestHeightTarget = null; + + for (PhotonTrackedTarget target : result.targets) { + int targetFiducialId = target.getFiducialId(); + + // Don't report errors for non-fiducial targets. This could also be resolved by + // adding -1 to + // the initial HashSet. + if (targetFiducialId == -1) continue; + + Optional targetPosition = fieldTags.getTagPose(target.getFiducialId()); + + if (targetPosition.isEmpty()) { + reportFiducialPoseError(target.getFiducialId()); + continue; + } + + double alternateTransformDelta = + Math.abs( + robotToCamera.getZ() + - targetPosition + .get() + .transformBy(target.getAlternateCameraToTarget().inverse()) + .getZ()); + double bestTransformDelta = + Math.abs( + robotToCamera.getZ() + - targetPosition + .get() + .transformBy(target.getBestCameraToTarget().inverse()) + .getZ()); + + if (alternateTransformDelta < smallestHeightDifference) { + smallestHeightDifference = alternateTransformDelta; + closestHeightTarget = + new EstimatedRobotPose( + targetPosition + .get() + .transformBy(target.getAlternateCameraToTarget().inverse()) + .transformBy(robotToCamera.inverse()), + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.CLOSEST_TO_CAMERA_HEIGHT); + } + + if (bestTransformDelta < smallestHeightDifference) { + smallestHeightDifference = bestTransformDelta; + closestHeightTarget = + new EstimatedRobotPose( + targetPosition + .get() + .transformBy(target.getBestCameraToTarget().inverse()) + .transformBy(robotToCamera.inverse()), + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.CLOSEST_TO_CAMERA_HEIGHT); + } + } + + // Need to null check here in case none of the provided targets are fiducial. + return Optional.ofNullable(closestHeightTarget); + } - Translation3d transform = new Translation3d(); - Rotation3d rotation = new Rotation3d(); + /** + * Return the estimated position of the robot using the target with the lowest delta in the vector + * magnitude between it and the reference pose. + * + * @param result pipeline result + * @param referencePose reference pose to check vector magnitude difference against. + * @return the estimated position of the robot in the FCS and the estimated timestamp of this + * estimation. + */ + private Optional closestToReferencePoseStrategy( + PhotonPipelineResult result, Pose3d referencePose) { + if (referencePose == null) { + DriverStation.reportError( + "[PhotonPoseEstimator] Tried to use reference pose strategy without setting the reference!", + false); + return Optional.empty(); + } + + double smallestPoseDelta = 10e9; + EstimatedRobotPose lowestDeltaPose = null; + + for (PhotonTrackedTarget target : result.targets) { + int targetFiducialId = target.getFiducialId(); + + // Don't report errors for non-fiducial targets. This could also be resolved by + // adding -1 to + // the initial HashSet. + if (targetFiducialId == -1) continue; + + Optional targetPosition = fieldTags.getTagPose(target.getFiducialId()); + + if (targetPosition.isEmpty()) { + reportFiducialPoseError(targetFiducialId); + continue; + } + + Pose3d altTransformPosition = + targetPosition + .get() + .transformBy(target.getAlternateCameraToTarget().inverse()) + .transformBy(robotToCamera.inverse()); + Pose3d bestTransformPosition = + targetPosition + .get() + .transformBy(target.getBestCameraToTarget().inverse()) + .transformBy(robotToCamera.inverse()); + + double altDifference = Math.abs(calculateDifference(referencePose, altTransformPosition)); + double bestDifference = Math.abs(calculateDifference(referencePose, bestTransformPosition)); + + if (altDifference < smallestPoseDelta) { + smallestPoseDelta = altDifference; + lowestDeltaPose = + new EstimatedRobotPose( + altTransformPosition, + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.CLOSEST_TO_REFERENCE_POSE); + } + if (bestDifference < smallestPoseDelta) { + smallestPoseDelta = bestDifference; + lowestDeltaPose = + new EstimatedRobotPose( + bestTransformPosition, + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.CLOSEST_TO_REFERENCE_POSE); + } + } + return Optional.ofNullable(lowestDeltaPose); + } + + /** + * Return the average of the best target poses using ambiguity as weight. + * + * @param result pipeline result + * @return the estimated position of the robot in the FCS and the estimated timestamp of this + * estimation. + */ + private Optional averageBestTargetsStrategy(PhotonPipelineResult result) { + List> estimatedRobotPoses = new ArrayList<>(); + double totalAmbiguity = 0; + + for (PhotonTrackedTarget target : result.targets) { + int targetFiducialId = target.getFiducialId(); + + // Don't report errors for non-fiducial targets. This could also be resolved by + // adding -1 to + // the initial HashSet. + if (targetFiducialId == -1) continue; + + Optional targetPosition = fieldTags.getTagPose(target.getFiducialId()); + + if (targetPosition.isEmpty()) { + reportFiducialPoseError(targetFiducialId); + continue; + } + + double targetPoseAmbiguity = target.getPoseAmbiguity(); + + // Pose ambiguity is 0, use that pose + if (targetPoseAmbiguity == 0) { + return Optional.of( + new EstimatedRobotPose( + targetPosition + .get() + .transformBy(target.getBestCameraToTarget().inverse()) + .transformBy(robotToCamera.inverse()), + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.AVERAGE_BEST_TARGETS)); + } + + totalAmbiguity += 1.0 / target.getPoseAmbiguity(); + + estimatedRobotPoses.add( + new Pair<>( + target, + targetPosition + .get() + .transformBy(target.getBestCameraToTarget().inverse()) + .transformBy(robotToCamera.inverse()))); + } + + // Take the average + + Translation3d transform = new Translation3d(); + Rotation3d rotation = new Rotation3d(); + + if (estimatedRobotPoses.isEmpty()) return Optional.empty(); + + for (Pair pair : estimatedRobotPoses) { + // Total ambiguity is non-zero confirmed because if it was zero, that pose was + // returned. + double weight = (1.0 / pair.getFirst().getPoseAmbiguity()) / totalAmbiguity; + Pose3d estimatedPose = pair.getSecond(); + transform = transform.plus(estimatedPose.getTranslation().times(weight)); + rotation = rotation.plus(estimatedPose.getRotation().times(weight)); + } - if (estimatedRobotPoses.isEmpty()) return Optional.empty(); + return Optional.of( + new EstimatedRobotPose( + new Pose3d(transform, rotation), + result.getTimestampSeconds(), + result.getTargets(), + PoseStrategy.AVERAGE_BEST_TARGETS)); + } - for (Pair pair : estimatedRobotPoses) { - // Total ambiguity is non-zero confirmed because if it was zero, that pose was - // returned. - double weight = (1.0 / pair.getFirst().getPoseAmbiguity()) / totalAmbiguity; - Pose3d estimatedPose = pair.getSecond(); - transform = transform.plus(estimatedPose.getTranslation().times(weight)); - rotation = rotation.plus(estimatedPose.getRotation().times(weight)); + /** + * Difference is defined as the vector magnitude between the two poses + * + * @return The absolute "difference" (>=0) between two Pose3ds. + */ + private double calculateDifference(Pose3d x, Pose3d y) { + return x.getTranslation().getDistance(y.getTranslation()); } - return Optional.of( - new EstimatedRobotPose( - new Pose3d(transform, rotation), - result.getTimestampSeconds(), - result.getTargets(), - PoseStrategy.AVERAGE_BEST_TARGETS)); - } - - /** - * Difference is defined as the vector magnitude between the two poses - * - * @return The absolute "difference" (>=0) between two Pose3ds. - */ - private double calculateDifference(Pose3d x, Pose3d y) { - return x.getTranslation().getDistance(y.getTranslation()); - } - - private void reportFiducialPoseError(int fiducialId) { - if (!reportedErrors.contains(fiducialId)) { - DriverStation.reportError( - "[PhotonPoseEstimator] Tried to get pose of unknown AprilTag: " + fiducialId, false); - reportedErrors.add(fiducialId); + private void reportFiducialPoseError(int fiducialId) { + if (!reportedErrors.contains(fiducialId)) { + DriverStation.reportError( + "[PhotonPoseEstimator] Tried to get pose of unknown AprilTag: " + fiducialId, false); + reportedErrors.add(fiducialId); + } } - } } diff --git a/photon-lib/src/main/java/org/photonvision/PhotonTargetSortMode.java b/photon-lib/src/main/java/org/photonvision/PhotonTargetSortMode.java index 0cd5cb984c..bf22644645 100644 --- a/photon-lib/src/main/java/org/photonvision/PhotonTargetSortMode.java +++ b/photon-lib/src/main/java/org/photonvision/PhotonTargetSortMode.java @@ -28,23 +28,23 @@ import org.photonvision.targeting.PhotonTrackedTarget; public enum PhotonTargetSortMode { - Smallest(Comparator.comparingDouble(PhotonTrackedTarget::getArea)), - Largest(Smallest.m_comparator.reversed()), - Highest(Comparator.comparingDouble(PhotonTrackedTarget::getPitch)), - Lowest(Highest.m_comparator.reversed()), - Rightmost(Comparator.comparingDouble(PhotonTrackedTarget::getYaw)), - Leftmost(Rightmost.m_comparator.reversed()), - Centermost( - Comparator.comparingDouble( - target -> (Math.pow(target.getPitch(), 2) + Math.pow(target.getYaw(), 2)))); + Smallest(Comparator.comparingDouble(PhotonTrackedTarget::getArea)), + Largest(Smallest.m_comparator.reversed()), + Highest(Comparator.comparingDouble(PhotonTrackedTarget::getPitch)), + Lowest(Highest.m_comparator.reversed()), + Rightmost(Comparator.comparingDouble(PhotonTrackedTarget::getYaw)), + Leftmost(Rightmost.m_comparator.reversed()), + Centermost( + Comparator.comparingDouble( + target -> (Math.pow(target.getPitch(), 2) + Math.pow(target.getYaw(), 2)))); - private final Comparator m_comparator; + private final Comparator m_comparator; - PhotonTargetSortMode(Comparator comparator) { - m_comparator = comparator; - } + PhotonTargetSortMode(Comparator comparator) { + m_comparator = comparator; + } - public Comparator getComparator() { - return m_comparator; - } + public Comparator getComparator() { + return m_comparator; + } } diff --git a/photon-lib/src/main/java/org/photonvision/PhotonUtils.java b/photon-lib/src/main/java/org/photonvision/PhotonUtils.java index 0a29169645..4d35c5ac29 100644 --- a/photon-lib/src/main/java/org/photonvision/PhotonUtils.java +++ b/photon-lib/src/main/java/org/photonvision/PhotonUtils.java @@ -27,183 +27,183 @@ import edu.wpi.first.math.geometry.*; public final class PhotonUtils { - private PhotonUtils() { - // Utility class - } + private PhotonUtils() { + // Utility class + } - /** - * Algorithm from https://docs.limelightvision.io/en/latest/cs_estimating_distance.html Estimates - * range to a target using the target's elevation. This method can produce more stable results - * than SolvePNP when well tuned, if the full 6d robot pose is not required. Note that this method - * requires the camera to have 0 roll (not be skewed clockwise or CCW relative to the floor), and - * for there to exist a height differential between goal and camera. The larger this differential, - * the more accurate the distance estimate will be. - * - *

Units can be converted using the {@link edu.wpi.first.math.util.Units} class. - * - * @param cameraHeightMeters The physical height of the camera off the floor in meters. - * @param targetHeightMeters The physical height of the target off the floor in meters. This - * should be the height of whatever is being targeted (i.e. if the targeting region is set to - * top, this should be the height of the top of the target). - * @param cameraPitchRadians The pitch of the camera from the horizontal plane in radians. - * Positive values up. - * @param targetPitchRadians The pitch of the target in the camera's lens in radians. Positive - * values up. - * @return The estimated distance to the target in meters. - */ - public static double calculateDistanceToTargetMeters( - double cameraHeightMeters, - double targetHeightMeters, - double cameraPitchRadians, - double targetPitchRadians) { - return (targetHeightMeters - cameraHeightMeters) - / Math.tan(cameraPitchRadians + targetPitchRadians); - } + /** + * Algorithm from https://docs.limelightvision.io/en/latest/cs_estimating_distance.html Estimates + * range to a target using the target's elevation. This method can produce more stable results + * than SolvePNP when well tuned, if the full 6d robot pose is not required. Note that this method + * requires the camera to have 0 roll (not be skewed clockwise or CCW relative to the floor), and + * for there to exist a height differential between goal and camera. The larger this differential, + * the more accurate the distance estimate will be. + * + *

Units can be converted using the {@link edu.wpi.first.math.util.Units} class. + * + * @param cameraHeightMeters The physical height of the camera off the floor in meters. + * @param targetHeightMeters The physical height of the target off the floor in meters. This + * should be the height of whatever is being targeted (i.e. if the targeting region is set to + * top, this should be the height of the top of the target). + * @param cameraPitchRadians The pitch of the camera from the horizontal plane in radians. + * Positive values up. + * @param targetPitchRadians The pitch of the target in the camera's lens in radians. Positive + * values up. + * @return The estimated distance to the target in meters. + */ + public static double calculateDistanceToTargetMeters( + double cameraHeightMeters, + double targetHeightMeters, + double cameraPitchRadians, + double targetPitchRadians) { + return (targetHeightMeters - cameraHeightMeters) + / Math.tan(cameraPitchRadians + targetPitchRadians); + } - /** - * Estimate the {@link Translation2d} of the target relative to the camera. - * - * @param targetDistanceMeters The distance to the target in meters. - * @param yaw The observed yaw of the target. - * @return The target's camera-relative translation. - */ - public static Translation2d estimateCameraToTargetTranslation( - double targetDistanceMeters, Rotation2d yaw) { - return new Translation2d( - yaw.getCos() * targetDistanceMeters, yaw.getSin() * targetDistanceMeters); - } + /** + * Estimate the {@link Translation2d} of the target relative to the camera. + * + * @param targetDistanceMeters The distance to the target in meters. + * @param yaw The observed yaw of the target. + * @return The target's camera-relative translation. + */ + public static Translation2d estimateCameraToTargetTranslation( + double targetDistanceMeters, Rotation2d yaw) { + return new Translation2d( + yaw.getCos() * targetDistanceMeters, yaw.getSin() * targetDistanceMeters); + } - /** - * Estimate the position of the robot in the field. - * - * @param cameraHeightMeters The physical height of the camera off the floor in meters. - * @param targetHeightMeters The physical height of the target off the floor in meters. This - * should be the height of whatever is being targeted (i.e. if the targeting region is set to - * top, this should be the height of the top of the target). - * @param cameraPitchRadians The pitch of the camera from the horizontal plane in radians. - * Positive values up. - * @param targetPitchRadians The pitch of the target in the camera's lens in radians. Positive - * values up. - * @param targetYaw The observed yaw of the target. Note that this *must* be CCW-positive, and - * Photon returns CW-positive. - * @param gyroAngle The current robot gyro angle, likely from odometry. - * @param fieldToTarget A Pose2d representing the target position in the field coordinate system. - * @param cameraToRobot The position of the robot relative to the camera. If the camera was - * mounted 3 inches behind the "origin" (usually physical center) of the robot, this would be - * Transform2d(3 inches, 0 inches, 0 degrees). - * @return The position of the robot in the field. - */ - public static Pose2d estimateFieldToRobot( - double cameraHeightMeters, - double targetHeightMeters, - double cameraPitchRadians, - double targetPitchRadians, - Rotation2d targetYaw, - Rotation2d gyroAngle, - Pose2d fieldToTarget, - Transform2d cameraToRobot) { - return PhotonUtils.estimateFieldToRobot( - PhotonUtils.estimateCameraToTarget( - PhotonUtils.estimateCameraToTargetTranslation( - PhotonUtils.calculateDistanceToTargetMeters( - cameraHeightMeters, targetHeightMeters, cameraPitchRadians, targetPitchRadians), - targetYaw), - fieldToTarget, - gyroAngle), - fieldToTarget, - cameraToRobot); - } + /** + * Estimate the position of the robot in the field. + * + * @param cameraHeightMeters The physical height of the camera off the floor in meters. + * @param targetHeightMeters The physical height of the target off the floor in meters. This + * should be the height of whatever is being targeted (i.e. if the targeting region is set to + * top, this should be the height of the top of the target). + * @param cameraPitchRadians The pitch of the camera from the horizontal plane in radians. + * Positive values up. + * @param targetPitchRadians The pitch of the target in the camera's lens in radians. Positive + * values up. + * @param targetYaw The observed yaw of the target. Note that this *must* be CCW-positive, and + * Photon returns CW-positive. + * @param gyroAngle The current robot gyro angle, likely from odometry. + * @param fieldToTarget A Pose2d representing the target position in the field coordinate system. + * @param cameraToRobot The position of the robot relative to the camera. If the camera was + * mounted 3 inches behind the "origin" (usually physical center) of the robot, this would be + * Transform2d(3 inches, 0 inches, 0 degrees). + * @return The position of the robot in the field. + */ + public static Pose2d estimateFieldToRobot( + double cameraHeightMeters, + double targetHeightMeters, + double cameraPitchRadians, + double targetPitchRadians, + Rotation2d targetYaw, + Rotation2d gyroAngle, + Pose2d fieldToTarget, + Transform2d cameraToRobot) { + return PhotonUtils.estimateFieldToRobot( + PhotonUtils.estimateCameraToTarget( + PhotonUtils.estimateCameraToTargetTranslation( + PhotonUtils.calculateDistanceToTargetMeters( + cameraHeightMeters, targetHeightMeters, cameraPitchRadians, targetPitchRadians), + targetYaw), + fieldToTarget, + gyroAngle), + fieldToTarget, + cameraToRobot); + } - /** - * Estimates a {@link Transform2d} that maps the camera position to the target position, using the - * robot's gyro. Note that the gyro angle provided *must* line up with the field coordinate system - * -- that is, it should read zero degrees when pointed towards the opposing alliance station, and - * increase as the robot rotates CCW. - * - * @param cameraToTargetTranslation A Translation2d that encodes the x/y position of the target - * relative to the camera. - * @param fieldToTarget A Pose2d representing the target position in the field coordinate system. - * @param gyroAngle The current robot gyro angle, likely from odometry. - * @return A Transform2d that takes us from the camera to the target. - */ - public static Transform2d estimateCameraToTarget( - Translation2d cameraToTargetTranslation, Pose2d fieldToTarget, Rotation2d gyroAngle) { - // This pose maps our camera at the origin out to our target, in the robot - // reference frame - // The translation part of this Transform2d is from the above step, and the - // rotation uses our robot's - // gyro. - return new Transform2d( - cameraToTargetTranslation, gyroAngle.times(-1).minus(fieldToTarget.getRotation())); - } + /** + * Estimates a {@link Transform2d} that maps the camera position to the target position, using the + * robot's gyro. Note that the gyro angle provided *must* line up with the field coordinate system + * -- that is, it should read zero degrees when pointed towards the opposing alliance station, and + * increase as the robot rotates CCW. + * + * @param cameraToTargetTranslation A Translation2d that encodes the x/y position of the target + * relative to the camera. + * @param fieldToTarget A Pose2d representing the target position in the field coordinate system. + * @param gyroAngle The current robot gyro angle, likely from odometry. + * @return A Transform2d that takes us from the camera to the target. + */ + public static Transform2d estimateCameraToTarget( + Translation2d cameraToTargetTranslation, Pose2d fieldToTarget, Rotation2d gyroAngle) { + // This pose maps our camera at the origin out to our target, in the robot + // reference frame + // The translation part of this Transform2d is from the above step, and the + // rotation uses our robot's + // gyro. + return new Transform2d( + cameraToTargetTranslation, gyroAngle.times(-1).minus(fieldToTarget.getRotation())); + } - /** - * Estimates the pose of the robot in the field coordinate system, given the position of the - * target relative to the camera, the target relative to the field, and the robot relative to the - * camera. - * - * @param cameraToTarget The position of the target relative to the camera. - * @param fieldToTarget The position of the target in the field. - * @param cameraToRobot The position of the robot relative to the camera. If the camera was - * mounted 3 inches behind the "origin" (usually physical center) of the robot, this would be - * Transform2d(3 inches, 0 inches, 0 degrees). - * @return The position of the robot in the field. - */ - public static Pose2d estimateFieldToRobot( - Transform2d cameraToTarget, Pose2d fieldToTarget, Transform2d cameraToRobot) { - return estimateFieldToCamera(cameraToTarget, fieldToTarget).transformBy(cameraToRobot); - } + /** + * Estimates the pose of the robot in the field coordinate system, given the position of the + * target relative to the camera, the target relative to the field, and the robot relative to the + * camera. + * + * @param cameraToTarget The position of the target relative to the camera. + * @param fieldToTarget The position of the target in the field. + * @param cameraToRobot The position of the robot relative to the camera. If the camera was + * mounted 3 inches behind the "origin" (usually physical center) of the robot, this would be + * Transform2d(3 inches, 0 inches, 0 degrees). + * @return The position of the robot in the field. + */ + public static Pose2d estimateFieldToRobot( + Transform2d cameraToTarget, Pose2d fieldToTarget, Transform2d cameraToRobot) { + return estimateFieldToCamera(cameraToTarget, fieldToTarget).transformBy(cameraToRobot); + } - /** - * Estimates the pose of the camera in the field coordinate system, given the position of the - * target relative to the camera, and the target relative to the field. This *only* tracks the - * position of the camera, not the position of the robot itself. - * - * @param cameraToTarget The position of the target relative to the camera. - * @param fieldToTarget The position of the target in the field. - * @return The position of the camera in the field. - */ - public static Pose2d estimateFieldToCamera(Transform2d cameraToTarget, Pose2d fieldToTarget) { - var targetToCamera = cameraToTarget.inverse(); - return fieldToTarget.transformBy(targetToCamera); - } + /** + * Estimates the pose of the camera in the field coordinate system, given the position of the + * target relative to the camera, and the target relative to the field. This *only* tracks the + * position of the camera, not the position of the robot itself. + * + * @param cameraToTarget The position of the target relative to the camera. + * @param fieldToTarget The position of the target in the field. + * @return The position of the camera in the field. + */ + public static Pose2d estimateFieldToCamera(Transform2d cameraToTarget, Pose2d fieldToTarget) { + var targetToCamera = cameraToTarget.inverse(); + return fieldToTarget.transformBy(targetToCamera); + } - /** - * Estimates the pose of the robot in the field coordinate system, given the pose of the fiducial - * tag, the robot relative to the camera, and the target relative to the camera. - * - * @param fieldRelativeTagPose Pose3D the field relative pose of the target - * @param cameraToRobot Transform3D of the robot relative to the camera. Origin of the robot is - * defined as the center. - * @param cameraToTarget Transform3D of the target relative to the camera, returned by - * PhotonVision - * @return Transform3d Robot position relative to the field - */ - public static Pose3d estimateFieldToRobotAprilTag( - Transform3d cameraToTarget, Pose3d fieldRelativeTagPose, Transform3d cameraToRobot) { - return fieldRelativeTagPose.plus(cameraToTarget.inverse()).plus(cameraToRobot); - } + /** + * Estimates the pose of the robot in the field coordinate system, given the pose of the fiducial + * tag, the robot relative to the camera, and the target relative to the camera. + * + * @param fieldRelativeTagPose Pose3D the field relative pose of the target + * @param cameraToRobot Transform3D of the robot relative to the camera. Origin of the robot is + * defined as the center. + * @param cameraToTarget Transform3D of the target relative to the camera, returned by + * PhotonVision + * @return Transform3d Robot position relative to the field + */ + public static Pose3d estimateFieldToRobotAprilTag( + Transform3d cameraToTarget, Pose3d fieldRelativeTagPose, Transform3d cameraToRobot) { + return fieldRelativeTagPose.plus(cameraToTarget.inverse()).plus(cameraToRobot); + } - /** - * Returns the yaw between your robot and a target. - * - * @param robotPose Current pose of the robot - * @param targetPose Pose of the target on the field - * @return double Yaw to the target - */ - public static Rotation2d getYawToPose(Pose2d robotPose, Pose2d targetPose) { - Translation2d relativeTrl = targetPose.relativeTo(robotPose).getTranslation(); - return new Rotation2d(relativeTrl.getX(), relativeTrl.getY()); - } + /** + * Returns the yaw between your robot and a target. + * + * @param robotPose Current pose of the robot + * @param targetPose Pose of the target on the field + * @return double Yaw to the target + */ + public static Rotation2d getYawToPose(Pose2d robotPose, Pose2d targetPose) { + Translation2d relativeTrl = targetPose.relativeTo(robotPose).getTranslation(); + return new Rotation2d(relativeTrl.getX(), relativeTrl.getY()); + } - /** - * Returns the distance between two poses - * - * @param robotPose Pose of the robot - * @param targetPose Pose of the target - * @return - */ - public static double getDistanceToPose(Pose2d robotPose, Pose2d targetPose) { - return robotPose.getTranslation().getDistance(targetPose.getTranslation()); - } + /** + * Returns the distance between two poses + * + * @param robotPose Pose of the robot + * @param targetPose Pose of the target + * @return + */ + public static double getDistanceToPose(Pose2d robotPose, Pose2d targetPose) { + return robotPose.getTranslation().getDistance(targetPose.getTranslation()); + } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java index 8887e77e26..1c9ef2ce1e 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -62,525 +62,525 @@ */ @SuppressWarnings("unused") public class PhotonCameraSim implements AutoCloseable { - private final PhotonCamera cam; - - NTTopicSet ts = new NTTopicSet(); - private long heartbeatCounter = 0; - - /** This simulated camera's {@link SimCameraProperties} */ - public final SimCameraProperties prop; - - private long nextNTEntryTime = WPIUtilJNI.now(); - - private double maxSightRangeMeters = Double.MAX_VALUE; - private static final double kDefaultMinAreaPx = 100; - private double minTargetAreaPercent; - private PhotonTargetSortMode sortMode = PhotonTargetSortMode.Largest; - - // video stream simulation - private final CvSource videoSimRaw; - private final Mat videoSimFrameRaw = new Mat(); - private boolean videoSimRawEnabled = true; - private boolean videoSimWireframeEnabled = false; - private double videoSimWireframeResolution = 0.1; - private final CvSource videoSimProcessed; - private final Mat videoSimFrameProcessed = new Mat(); - private boolean videoSimProcEnabled = true; - - static { - try { - var loader = - new RuntimeLoader<>( - Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); - loader.loadLibrary(); - } catch (Exception e) { - throw new RuntimeException("Failed to load native libraries!", e); + private final PhotonCamera cam; + + NTTopicSet ts = new NTTopicSet(); + private long heartbeatCounter = 0; + + /** This simulated camera's {@link SimCameraProperties} */ + public final SimCameraProperties prop; + + private long nextNTEntryTime = WPIUtilJNI.now(); + + private double maxSightRangeMeters = Double.MAX_VALUE; + private static final double kDefaultMinAreaPx = 100; + private double minTargetAreaPercent; + private PhotonTargetSortMode sortMode = PhotonTargetSortMode.Largest; + + // video stream simulation + private final CvSource videoSimRaw; + private final Mat videoSimFrameRaw = new Mat(); + private boolean videoSimRawEnabled = true; + private boolean videoSimWireframeEnabled = false; + private double videoSimWireframeResolution = 0.1; + private final CvSource videoSimProcessed; + private final Mat videoSimFrameProcessed = new Mat(); + private boolean videoSimProcEnabled = true; + + static { + try { + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + } catch (Exception e) { + throw new RuntimeException("Failed to load native libraries!", e); + } } - } - - @Override - public void close() { - videoSimRaw.close(); - videoSimFrameRaw.release(); - videoSimProcessed.close(); - videoSimFrameProcessed.release(); - } - - /** - * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets - * through this class will change the associated PhotonCamera's results. - * - *

This constructor's camera has a 90 deg FOV with no simulated lag! - * - *

By default, the minimum target area is 100 pixels and there is no maximum sight range. - * - * @param camera The camera to be simulated - */ - public PhotonCameraSim(PhotonCamera camera) { - this(camera, SimCameraProperties.PERFECT_90DEG()); - } - - /** - * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets - * through this class will change the associated PhotonCamera's results. - * - *

By default, the minimum target area is 100 pixels and there is no maximum sight range. - * - * @param camera The camera to be simulated - * @param prop Properties of this camera such as FOV and FPS - */ - public PhotonCameraSim(PhotonCamera camera, SimCameraProperties prop) { - this.cam = camera; - this.prop = prop; - setMinTargetAreaPixels(kDefaultMinAreaPx); - - videoSimRaw = - CameraServer.putVideo(camera.getName() + "-raw", prop.getResWidth(), prop.getResHeight()); - videoSimRaw.setPixelFormat(PixelFormat.kGray); - videoSimProcessed = - CameraServer.putVideo( - camera.getName() + "-processed", prop.getResWidth(), prop.getResHeight()); - - ts.removeEntries(); - ts.subTable = camera.getCameraTable(); - ts.updateEntries(); - } - - /** - * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets - * through this class will change the associated PhotonCamera's results. - * - * @param camera The camera to be simulated - * @param prop Properties of this camera such as FOV and FPS - * @param minTargetAreaPercent The minimum percentage(0 - 100) a detected target must take up of - * the camera's image to be processed. Match this with your contour filtering settings in the - * PhotonVision GUI. - * @param maxSightRangeMeters Maximum distance at which the target is illuminated to your camera. - * Note that minimum target area of the image is separate from this. - */ - public PhotonCameraSim( - PhotonCamera camera, - SimCameraProperties prop, - double minTargetAreaPercent, - double maxSightRangeMeters) { - this(camera, prop); - this.minTargetAreaPercent = minTargetAreaPercent; - this.maxSightRangeMeters = maxSightRangeMeters; - } - - public PhotonCamera getCamera() { - return cam; - } - - public double getMinTargetAreaPercent() { - return minTargetAreaPercent; - } - - public double getMinTargetAreaPixels() { - return minTargetAreaPercent / 100.0 * prop.getResArea(); - } - - public double getMaxSightRangeMeters() { - return maxSightRangeMeters; - } - - public PhotonTargetSortMode getTargetSortMode() { - return sortMode; - } - - public CvSource getVideoSimRaw() { - return videoSimRaw; - } - - public Mat getVideoSimFrameRaw() { - return videoSimFrameRaw; - } - - /** - * Determines if this target's pose should be visible to the camera without considering its - * projected image points. Does not account for image area. - * - * @param camPose Camera's 3d pose - * @param target Vision target containing pose and shape - * @return If this vision target can be seen before image projection. - */ - public boolean canSeeTargetPose(Pose3d camPose, VisionTargetSim target) { - // var rel = new CameraTargetRelation(camPose, target.getPose()); - // TODO: removal awaiting wpilib Rotation3d performance improvements - var relTarget = RotTrlTransform3d.makeRelativeTo(camPose).apply(target.getPose()); - var camToTargYaw = new Rotation2d(relTarget.getX(), relTarget.getY()); - var camToTargPitch = - new Rotation2d(Math.hypot(relTarget.getX(), relTarget.getY()), -relTarget.getZ()); - var relCam = RotTrlTransform3d.makeRelativeTo(target.getPose()).apply(camPose); - var targToCamAngle = new Rotation2d(relCam.getX(), Math.hypot(relCam.getY(), relCam.getZ())); - - return ( - // target translation is outside of camera's FOV - (Math.abs(camToTargYaw.getDegrees()) < prop.getHorizFOV().getDegrees() / 2) - && (Math.abs(camToTargPitch.getDegrees()) < prop.getVertFOV().getDegrees() / 2) - && (!target.getModel().isPlanar - || Math.abs(targToCamAngle.getDegrees()) - < 90) // camera is behind planar target and it should be occluded - && (relTarget.getTranslation().getNorm() <= maxSightRangeMeters)); // target is too far - } - - /** - * Determines if all target points are inside the camera's image. - * - * @param points The target's 2d image points - */ - public boolean canSeeCorners(Point[] points) { - for (var point : points) { - if (MathUtil.clamp(point.x, 0, prop.getResWidth()) != point.x - || MathUtil.clamp(point.y, 0, prop.getResHeight()) != point.y) { - return false; // point is outside of resolution - } + + @Override + public void close() { + videoSimRaw.close(); + videoSimFrameRaw.release(); + videoSimProcessed.close(); + videoSimFrameProcessed.release(); } - return true; - } - - /** - * Determine if this camera should process a new frame based on performance metrics and the time - * since the last update. This returns an Optional which is either empty if no update should occur - * or a Long of the timestamp in microseconds of when the frame which should be received by NT. If - * a timestamp is returned, the last frame update time becomes that timestamp. - * - * @return Optional long which is empty while blocked or the NT entry timestamp in microseconds if - * ready - */ - public Optional consumeNextEntryTime() { - // check if this camera is ready for another frame update - long now = WPIUtilJNI.now(); - long timestamp = -1; - int iter = 0; - // prepare next latest update - while (now >= nextNTEntryTime) { - timestamp = nextNTEntryTime; - long frameTime = (long) (prop.estMsUntilNextFrame() * 1e3); - nextNTEntryTime += frameTime; - - // if frame time is very small, avoid blocking - if (iter++ > 50) { - timestamp = now; - nextNTEntryTime = now + frameTime; - break; - } + + /** + * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets + * through this class will change the associated PhotonCamera's results. + * + *

This constructor's camera has a 90 deg FOV with no simulated lag! + * + *

By default, the minimum target area is 100 pixels and there is no maximum sight range. + * + * @param camera The camera to be simulated + */ + public PhotonCameraSim(PhotonCamera camera) { + this(camera, SimCameraProperties.PERFECT_90DEG()); } - // return the timestamp of the latest update - if (timestamp >= 0) return Optional.of(timestamp); - // or this camera isn't ready to process yet - else return Optional.empty(); - } - - /** - * The minimum percentage(0 - 100) a detected target must take up of the camera's image to be - * processed. - */ - public void setMinTargetAreaPercent(double areaPercent) { - this.minTargetAreaPercent = areaPercent; - } - - /** - * The minimum number of pixels a detected target must take up in the camera's image to be - * processed. - */ - public void setMinTargetAreaPixels(double areaPx) { - this.minTargetAreaPercent = areaPx / prop.getResArea() * 100; - } - - /** - * Maximum distance at which the target is illuminated to your camera. Note that minimum target - * area of the image is separate from this. - */ - public void setMaxSightRange(double rangeMeters) { - this.maxSightRangeMeters = rangeMeters; - } - - /** Defines the order the targets are sorted in the pipeline result. */ - public void setTargetSortMode(PhotonTargetSortMode sortMode) { - if (sortMode != null) this.sortMode = sortMode; - } - - /** - * Sets whether the raw video stream simulation is enabled. - * - *

Note: This may increase loop times. - */ - public void enableRawStream(boolean enabled) { - videoSimRawEnabled = enabled; - } - - /** - * Sets whether a wireframe of the field is drawn to the raw video stream. - * - *

Note: This will dramatically increase loop times. - */ - public void enableDrawWireframe(boolean enabled) { - videoSimWireframeEnabled = enabled; - } - - /** - * Sets the resolution of the drawn wireframe if enabled. Drawn line segments will be subdivided - * into smaller segments based on a threshold set by the resolution. - * - * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in - * pixels - */ - public void setWireframeResolution(double resolution) { - videoSimWireframeResolution = resolution; - } - - /** Sets whether the processed video stream simulation is enabled. */ - public void enableProcessedStream(boolean enabled) { - videoSimProcEnabled = enabled; - } - - public PhotonPipelineResult process( - double latencyMillis, Pose3d cameraPose, List targets) { - // sort targets by distance to camera - targets = new ArrayList<>(targets); - targets.sort( - (t1, t2) -> { - double dist1 = t1.getPose().getTranslation().getDistance(cameraPose.getTranslation()); - double dist2 = t2.getPose().getTranslation().getDistance(cameraPose.getTranslation()); - if (dist1 == dist2) return 0; - return dist1 < dist2 ? 1 : -1; - }); - // all targets visible before noise - var visibleTgts = new ArrayList>(); - // all targets actually detected by camera (after noise) - var detectableTgts = new ArrayList(); - // basis change from world coordinates to camera coordinates - var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); - - // reset our frame - VideoSimUtil.updateVideoProp(videoSimRaw, prop); - VideoSimUtil.updateVideoProp(videoSimProcessed, prop); - Size videoFrameSize = new Size(prop.getResWidth(), prop.getResHeight()); - Mat.zeros(videoFrameSize, CvType.CV_8UC1).assignTo(videoSimFrameRaw); - - for (var tgt : targets) { - // pose isn't visible, skip to next - if (!canSeeTargetPose(cameraPose, tgt)) continue; - - // find target's 3d corner points - var fieldCorners = tgt.getFieldVertices(); - if (tgt.getModel().isSpherical) { // target is spherical - var model = tgt.getModel(); - // orient the model to the camera (like a sprite/decal) so it appears similar regardless of - // view - fieldCorners = - model.getFieldVertices( - TargetModel.getOrientedPose( - tgt.getPose().getTranslation(), cameraPose.getTranslation())); - } - // project 3d target points into 2d image points - var imagePoints = - OpenCVHelp.projectPoints(prop.getIntrinsics(), prop.getDistCoeffs(), camRt, fieldCorners); - // spherical targets need a rotated rectangle of their midpoints for visualization - if (tgt.getModel().isSpherical) { - var center = OpenCVHelp.avgPoint(imagePoints); - int l = 0, t, b, r = 0; - // reference point (left side midpoint) - for (int i = 1; i < 4; i++) { - if (imagePoints[i].x < imagePoints[l].x) l = i; - } - var lc = imagePoints[l]; - // determine top, right, bottom midpoints - double[] angles = new double[4]; - t = (l + 1) % 4; - b = (l + 1) % 4; - for (int i = 0; i < 4; i++) { - if (i == l) continue; - var ic = imagePoints[i]; - angles[i] = Math.atan2(lc.y - ic.y, ic.x - lc.x); - if (angles[i] >= angles[t]) t = i; - if (angles[i] <= angles[b]) b = i; + + /** + * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets + * through this class will change the associated PhotonCamera's results. + * + *

By default, the minimum target area is 100 pixels and there is no maximum sight range. + * + * @param camera The camera to be simulated + * @param prop Properties of this camera such as FOV and FPS + */ + public PhotonCameraSim(PhotonCamera camera, SimCameraProperties prop) { + this.cam = camera; + this.prop = prop; + setMinTargetAreaPixels(kDefaultMinAreaPx); + + videoSimRaw = + CameraServer.putVideo(camera.getName() + "-raw", prop.getResWidth(), prop.getResHeight()); + videoSimRaw.setPixelFormat(PixelFormat.kGray); + videoSimProcessed = + CameraServer.putVideo( + camera.getName() + "-processed", prop.getResWidth(), prop.getResHeight()); + + ts.removeEntries(); + ts.subTable = camera.getCameraTable(); + ts.updateEntries(); + } + + /** + * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets + * through this class will change the associated PhotonCamera's results. + * + * @param camera The camera to be simulated + * @param prop Properties of this camera such as FOV and FPS + * @param minTargetAreaPercent The minimum percentage(0 - 100) a detected target must take up of + * the camera's image to be processed. Match this with your contour filtering settings in the + * PhotonVision GUI. + * @param maxSightRangeMeters Maximum distance at which the target is illuminated to your camera. + * Note that minimum target area of the image is separate from this. + */ + public PhotonCameraSim( + PhotonCamera camera, + SimCameraProperties prop, + double minTargetAreaPercent, + double maxSightRangeMeters) { + this(camera, prop); + this.minTargetAreaPercent = minTargetAreaPercent; + this.maxSightRangeMeters = maxSightRangeMeters; + } + + public PhotonCamera getCamera() { + return cam; + } + + public double getMinTargetAreaPercent() { + return minTargetAreaPercent; + } + + public double getMinTargetAreaPixels() { + return minTargetAreaPercent / 100.0 * prop.getResArea(); + } + + public double getMaxSightRangeMeters() { + return maxSightRangeMeters; + } + + public PhotonTargetSortMode getTargetSortMode() { + return sortMode; + } + + public CvSource getVideoSimRaw() { + return videoSimRaw; + } + + public Mat getVideoSimFrameRaw() { + return videoSimFrameRaw; + } + + /** + * Determines if this target's pose should be visible to the camera without considering its + * projected image points. Does not account for image area. + * + * @param camPose Camera's 3d pose + * @param target Vision target containing pose and shape + * @return If this vision target can be seen before image projection. + */ + public boolean canSeeTargetPose(Pose3d camPose, VisionTargetSim target) { + // var rel = new CameraTargetRelation(camPose, target.getPose()); + // TODO: removal awaiting wpilib Rotation3d performance improvements + var relTarget = RotTrlTransform3d.makeRelativeTo(camPose).apply(target.getPose()); + var camToTargYaw = new Rotation2d(relTarget.getX(), relTarget.getY()); + var camToTargPitch = + new Rotation2d(Math.hypot(relTarget.getX(), relTarget.getY()), -relTarget.getZ()); + var relCam = RotTrlTransform3d.makeRelativeTo(target.getPose()).apply(camPose); + var targToCamAngle = new Rotation2d(relCam.getX(), Math.hypot(relCam.getY(), relCam.getZ())); + + return ( + // target translation is outside of camera's FOV + (Math.abs(camToTargYaw.getDegrees()) < prop.getHorizFOV().getDegrees() / 2) + && (Math.abs(camToTargPitch.getDegrees()) < prop.getVertFOV().getDegrees() / 2) + && (!target.getModel().isPlanar + || Math.abs(targToCamAngle.getDegrees()) + < 90) // camera is behind planar target and it should be occluded + && (relTarget.getTranslation().getNorm() <= maxSightRangeMeters)); // target is too far + } + + /** + * Determines if all target points are inside the camera's image. + * + * @param points The target's 2d image points + */ + public boolean canSeeCorners(Point[] points) { + for (var point : points) { + if (MathUtil.clamp(point.x, 0, prop.getResWidth()) != point.x + || MathUtil.clamp(point.y, 0, prop.getResHeight()) != point.y) { + return false; // point is outside of resolution + } } - for (int i = 0; i < 4; i++) { - if (i != t && i != l && i != b) r = i; + return true; + } + + /** + * Determine if this camera should process a new frame based on performance metrics and the time + * since the last update. This returns an Optional which is either empty if no update should occur + * or a Long of the timestamp in microseconds of when the frame which should be received by NT. If + * a timestamp is returned, the last frame update time becomes that timestamp. + * + * @return Optional long which is empty while blocked or the NT entry timestamp in microseconds if + * ready + */ + public Optional consumeNextEntryTime() { + // check if this camera is ready for another frame update + long now = WPIUtilJNI.now(); + long timestamp = -1; + int iter = 0; + // prepare next latest update + while (now >= nextNTEntryTime) { + timestamp = nextNTEntryTime; + long frameTime = (long) (prop.estMsUntilNextFrame() * 1e3); + nextNTEntryTime += frameTime; + + // if frame time is very small, avoid blocking + if (iter++ > 50) { + timestamp = now; + nextNTEntryTime = now + frameTime; + break; + } } - // create RotatedRect from midpoints - var rect = - new RotatedRect( - new Point(center.x, center.y), - new Size(imagePoints[r].x - lc.x, imagePoints[b].y - imagePoints[t].y), - Math.toDegrees(-angles[r])); - // set target corners to rect corners - Point[] points = new Point[4]; - rect.points(points); - imagePoints = points; - } - // save visible targets for raw video stream simulation - visibleTgts.add(new Pair<>(tgt, imagePoints)); - // estimate pixel noise - var noisyTargetCorners = prop.estPixelNoise(imagePoints); - // find the minimum area rectangle of target corners - var minAreaRect = OpenCVHelp.getMinAreaRect(noisyTargetCorners); - Point[] minAreaRectPts = new Point[4]; - minAreaRect.points(minAreaRectPts); - // find the (naive) 2d yaw/pitch - var centerPt = minAreaRect.center; - var centerRot = prop.getPixelRot(centerPt); - // find contour area - double areaPercent = prop.getContourAreaPercent(noisyTargetCorners); - - // projected target can't be detected, skip to next - if (!(canSeeCorners(noisyTargetCorners) && areaPercent >= minTargetAreaPercent)) continue; - - var pnpSim = new PNPResults(); - if (tgt.fiducialID >= 0 && tgt.getFieldVertices().size() == 4) { // single AprilTag solvePNP - pnpSim = - OpenCVHelp.solvePNP_SQUARE( - prop.getIntrinsics(), - prop.getDistCoeffs(), - tgt.getModel().vertices, - noisyTargetCorners); - } - - detectableTgts.add( - new PhotonTrackedTarget( - Math.toDegrees(centerRot.getZ()), - -Math.toDegrees(centerRot.getY()), - areaPercent, - Math.toDegrees(centerRot.getX()), - tgt.fiducialID, - pnpSim.best, - pnpSim.alt, - pnpSim.ambiguity, - OpenCVHelp.pointsToCorners(minAreaRectPts), - OpenCVHelp.pointsToCorners(noisyTargetCorners))); + // return the timestamp of the latest update + if (timestamp >= 0) return Optional.of(timestamp); + // or this camera isn't ready to process yet + else return Optional.empty(); + } + + /** + * The minimum percentage(0 - 100) a detected target must take up of the camera's image to be + * processed. + */ + public void setMinTargetAreaPercent(double areaPercent) { + this.minTargetAreaPercent = areaPercent; + } + + /** + * The minimum number of pixels a detected target must take up in the camera's image to be + * processed. + */ + public void setMinTargetAreaPixels(double areaPx) { + this.minTargetAreaPercent = areaPx / prop.getResArea() * 100; + } + + /** + * Maximum distance at which the target is illuminated to your camera. Note that minimum target + * area of the image is separate from this. + */ + public void setMaxSightRange(double rangeMeters) { + this.maxSightRangeMeters = rangeMeters; + } + + /** Defines the order the targets are sorted in the pipeline result. */ + public void setTargetSortMode(PhotonTargetSortMode sortMode) { + if (sortMode != null) this.sortMode = sortMode; + } + + /** + * Sets whether the raw video stream simulation is enabled. + * + *

Note: This may increase loop times. + */ + public void enableRawStream(boolean enabled) { + videoSimRawEnabled = enabled; } - // render visible tags to raw video frame - if (videoSimRawEnabled) { - // draw field wireframe - if (videoSimWireframeEnabled) { - VideoSimUtil.drawFieldWireframe( - camRt, - prop, - videoSimWireframeResolution, - 1.5, - new Scalar(80), - 6, - 1, - new Scalar(30), - videoSimFrameRaw); - } - - // draw targets - for (var pair : visibleTgts) { - var tgt = pair.getFirst(); - var corn = pair.getSecond(); - - if (tgt.fiducialID >= 0) { // apriltags - VideoSimUtil.warp16h5TagImage(tgt.fiducialID, corn, true, videoSimFrameRaw); - } else if (!tgt.getModel().isSpherical) { // non-spherical targets - var contour = corn; - if (!tgt.getModel() - .isPlanar) { // visualization cant handle non-convex projections of 3d models - contour = OpenCVHelp.getConvexHull(contour); - } - VideoSimUtil.drawPoly(contour, -1, new Scalar(255), true, videoSimFrameRaw); - } else { // spherical targets - VideoSimUtil.drawInscribedEllipse(corn, new Scalar(255), videoSimFrameRaw); + + /** + * Sets whether a wireframe of the field is drawn to the raw video stream. + * + *

Note: This will dramatically increase loop times. + */ + public void enableDrawWireframe(boolean enabled) { + videoSimWireframeEnabled = enabled; + } + + /** + * Sets the resolution of the drawn wireframe if enabled. Drawn line segments will be subdivided + * into smaller segments based on a threshold set by the resolution. + * + * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in + * pixels + */ + public void setWireframeResolution(double resolution) { + videoSimWireframeResolution = resolution; + } + + /** Sets whether the processed video stream simulation is enabled. */ + public void enableProcessedStream(boolean enabled) { + videoSimProcEnabled = enabled; + } + + public PhotonPipelineResult process( + double latencyMillis, Pose3d cameraPose, List targets) { + // sort targets by distance to camera + targets = new ArrayList<>(targets); + targets.sort( + (t1, t2) -> { + double dist1 = t1.getPose().getTranslation().getDistance(cameraPose.getTranslation()); + double dist2 = t2.getPose().getTranslation().getDistance(cameraPose.getTranslation()); + if (dist1 == dist2) return 0; + return dist1 < dist2 ? 1 : -1; + }); + // all targets visible before noise + var visibleTgts = new ArrayList>(); + // all targets actually detected by camera (after noise) + var detectableTgts = new ArrayList(); + // basis change from world coordinates to camera coordinates + var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); + + // reset our frame + VideoSimUtil.updateVideoProp(videoSimRaw, prop); + VideoSimUtil.updateVideoProp(videoSimProcessed, prop); + Size videoFrameSize = new Size(prop.getResWidth(), prop.getResHeight()); + Mat.zeros(videoFrameSize, CvType.CV_8UC1).assignTo(videoSimFrameRaw); + + for (var tgt : targets) { + // pose isn't visible, skip to next + if (!canSeeTargetPose(cameraPose, tgt)) continue; + + // find target's 3d corner points + var fieldCorners = tgt.getFieldVertices(); + if (tgt.getModel().isSpherical) { // target is spherical + var model = tgt.getModel(); + // orient the model to the camera (like a sprite/decal) so it appears similar regardless of + // view + fieldCorners = + model.getFieldVertices( + TargetModel.getOrientedPose( + tgt.getPose().getTranslation(), cameraPose.getTranslation())); + } + // project 3d target points into 2d image points + var imagePoints = + OpenCVHelp.projectPoints(prop.getIntrinsics(), prop.getDistCoeffs(), camRt, fieldCorners); + // spherical targets need a rotated rectangle of their midpoints for visualization + if (tgt.getModel().isSpherical) { + var center = OpenCVHelp.avgPoint(imagePoints); + int l = 0, t, b, r = 0; + // reference point (left side midpoint) + for (int i = 1; i < 4; i++) { + if (imagePoints[i].x < imagePoints[l].x) l = i; + } + var lc = imagePoints[l]; + // determine top, right, bottom midpoints + double[] angles = new double[4]; + t = (l + 1) % 4; + b = (l + 1) % 4; + for (int i = 0; i < 4; i++) { + if (i == l) continue; + var ic = imagePoints[i]; + angles[i] = Math.atan2(lc.y - ic.y, ic.x - lc.x); + if (angles[i] >= angles[t]) t = i; + if (angles[i] <= angles[b]) b = i; + } + for (int i = 0; i < 4; i++) { + if (i != t && i != l && i != b) r = i; + } + // create RotatedRect from midpoints + var rect = + new RotatedRect( + new Point(center.x, center.y), + new Size(imagePoints[r].x - lc.x, imagePoints[b].y - imagePoints[t].y), + Math.toDegrees(-angles[r])); + // set target corners to rect corners + Point[] points = new Point[4]; + rect.points(points); + imagePoints = points; + } + // save visible targets for raw video stream simulation + visibleTgts.add(new Pair<>(tgt, imagePoints)); + // estimate pixel noise + var noisyTargetCorners = prop.estPixelNoise(imagePoints); + // find the minimum area rectangle of target corners + var minAreaRect = OpenCVHelp.getMinAreaRect(noisyTargetCorners); + Point[] minAreaRectPts = new Point[4]; + minAreaRect.points(minAreaRectPts); + // find the (naive) 2d yaw/pitch + var centerPt = minAreaRect.center; + var centerRot = prop.getPixelRot(centerPt); + // find contour area + double areaPercent = prop.getContourAreaPercent(noisyTargetCorners); + + // projected target can't be detected, skip to next + if (!(canSeeCorners(noisyTargetCorners) && areaPercent >= minTargetAreaPercent)) continue; + + var pnpSim = new PNPResults(); + if (tgt.fiducialID >= 0 && tgt.getFieldVertices().size() == 4) { // single AprilTag solvePNP + pnpSim = + OpenCVHelp.solvePNP_SQUARE( + prop.getIntrinsics(), + prop.getDistCoeffs(), + tgt.getModel().vertices, + noisyTargetCorners); + } + + detectableTgts.add( + new PhotonTrackedTarget( + Math.toDegrees(centerRot.getZ()), + -Math.toDegrees(centerRot.getY()), + areaPercent, + Math.toDegrees(centerRot.getX()), + tgt.fiducialID, + pnpSim.best, + pnpSim.alt, + pnpSim.ambiguity, + OpenCVHelp.pointsToCorners(minAreaRectPts), + OpenCVHelp.pointsToCorners(noisyTargetCorners))); } - } - videoSimRaw.putFrame(videoSimFrameRaw); - } else videoSimRaw.setConnectionStrategy(ConnectionStrategy.kForceClose); - // draw/annotate target detection outline on processed view - if (videoSimProcEnabled) { - Imgproc.cvtColor(videoSimFrameRaw, videoSimFrameProcessed, Imgproc.COLOR_GRAY2BGR); - Imgproc.drawMarker( // crosshair - videoSimFrameProcessed, - new Point(prop.getResWidth() / 2.0, prop.getResHeight() / 2.0), - new Scalar(0, 255, 0), - Imgproc.MARKER_CROSS, - (int) VideoSimUtil.getScaledThickness(15, videoSimFrameProcessed), - (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), - Imgproc.LINE_AA); - for (var tgt : detectableTgts) { - if (tgt.getFiducialId() >= 0) { // apriltags - VideoSimUtil.drawTagDetection( - tgt.getFiducialId(), - OpenCVHelp.cornersToPoints(tgt.getDetectedCorners()), - videoSimFrameProcessed); - } else { // other targets - // bounding rectangle - Imgproc.rectangle( - videoSimFrameProcessed, - OpenCVHelp.getBoundingRect(OpenCVHelp.cornersToPoints(tgt.getDetectedCorners())), - new Scalar(0, 0, 255), - (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), - Imgproc.LINE_AA); - - VideoSimUtil.drawPoly( - OpenCVHelp.cornersToPoints(tgt.getMinAreaRectCorners()), - (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), - new Scalar(255, 30, 30), - true, - videoSimFrameProcessed); + // render visible tags to raw video frame + if (videoSimRawEnabled) { + // draw field wireframe + if (videoSimWireframeEnabled) { + VideoSimUtil.drawFieldWireframe( + camRt, + prop, + videoSimWireframeResolution, + 1.5, + new Scalar(80), + 6, + 1, + new Scalar(30), + videoSimFrameRaw); + } + + // draw targets + for (var pair : visibleTgts) { + var tgt = pair.getFirst(); + var corn = pair.getSecond(); + + if (tgt.fiducialID >= 0) { // apriltags + VideoSimUtil.warp16h5TagImage(tgt.fiducialID, corn, true, videoSimFrameRaw); + } else if (!tgt.getModel().isSpherical) { // non-spherical targets + var contour = corn; + if (!tgt.getModel() + .isPlanar) { // visualization cant handle non-convex projections of 3d models + contour = OpenCVHelp.getConvexHull(contour); + } + VideoSimUtil.drawPoly(contour, -1, new Scalar(255), true, videoSimFrameRaw); + } else { // spherical targets + VideoSimUtil.drawInscribedEllipse(corn, new Scalar(255), videoSimFrameRaw); + } + } + videoSimRaw.putFrame(videoSimFrameRaw); + } else videoSimRaw.setConnectionStrategy(ConnectionStrategy.kForceClose); + // draw/annotate target detection outline on processed view + if (videoSimProcEnabled) { + Imgproc.cvtColor(videoSimFrameRaw, videoSimFrameProcessed, Imgproc.COLOR_GRAY2BGR); + Imgproc.drawMarker( // crosshair + videoSimFrameProcessed, + new Point(prop.getResWidth() / 2.0, prop.getResHeight() / 2.0), + new Scalar(0, 255, 0), + Imgproc.MARKER_CROSS, + (int) VideoSimUtil.getScaledThickness(15, videoSimFrameProcessed), + (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), + Imgproc.LINE_AA); + for (var tgt : detectableTgts) { + if (tgt.getFiducialId() >= 0) { // apriltags + VideoSimUtil.drawTagDetection( + tgt.getFiducialId(), + OpenCVHelp.cornersToPoints(tgt.getDetectedCorners()), + videoSimFrameProcessed); + } else { // other targets + // bounding rectangle + Imgproc.rectangle( + videoSimFrameProcessed, + OpenCVHelp.getBoundingRect(OpenCVHelp.cornersToPoints(tgt.getDetectedCorners())), + new Scalar(0, 0, 255), + (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), + Imgproc.LINE_AA); + + VideoSimUtil.drawPoly( + OpenCVHelp.cornersToPoints(tgt.getMinAreaRectCorners()), + (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), + new Scalar(255, 30, 30), + true, + videoSimFrameProcessed); + } + } + videoSimProcessed.putFrame(videoSimFrameProcessed); + } else videoSimProcessed.setConnectionStrategy(ConnectionStrategy.kForceClose); + + // put this simulated data to NT + if (sortMode != null) { + detectableTgts.sort(sortMode.getComparator()); } - } - videoSimProcessed.putFrame(videoSimFrameProcessed); - } else videoSimProcessed.setConnectionStrategy(ConnectionStrategy.kForceClose); - - // put this simulated data to NT - if (sortMode != null) { - detectableTgts.sort(sortMode.getComparator()); + return new PhotonPipelineResult(latencyMillis, detectableTgts); } - return new PhotonPipelineResult(latencyMillis, detectableTgts); - } - - /** - * Simulate one processed frame of vision data, putting one result to NT at current timestamp. - * Image capture time is assumed be (current time - latency). - * - * @param result The pipeline result to submit - */ - public void submitProcessedFrame(PhotonPipelineResult result) { - submitProcessedFrame(result, WPIUtilJNI.now()); - } - - /** - * Simulate one processed frame of vision data, putting one result to NT. Image capture timestamp - * overrides {@link PhotonPipelineResult#getTimestampSeconds() getTimestampSeconds()} for more - * precise latency simulation. - * - * @param result The pipeline result to submit - * @param receiveTimestamp The (sim) timestamp when this result was read by NT in microseconds - */ - public void submitProcessedFrame(PhotonPipelineResult result, long receiveTimestamp) { - ts.latencyMillisEntry.set(result.getLatencyMillis(), receiveTimestamp); - - var newPacket = new Packet(result.getPacketSize()); - result.populatePacket(newPacket); - ts.rawBytesEntry.set(newPacket.getData(), receiveTimestamp); - - boolean hasTargets = result.hasTargets(); - ts.hasTargetEntry.set(hasTargets, receiveTimestamp); - if (!hasTargets) { - ts.targetPitchEntry.set(0.0, receiveTimestamp); - ts.targetYawEntry.set(0.0, receiveTimestamp); - ts.targetAreaEntry.set(0.0, receiveTimestamp); - ts.targetPoseEntry.set(new double[] {0.0, 0.0, 0.0}, receiveTimestamp); - ts.targetSkewEntry.set(0.0, receiveTimestamp); - } else { - var bestTarget = result.getBestTarget(); - - ts.targetPitchEntry.set(bestTarget.getPitch(), receiveTimestamp); - ts.targetYawEntry.set(bestTarget.getYaw(), receiveTimestamp); - ts.targetAreaEntry.set(bestTarget.getArea(), receiveTimestamp); - ts.targetSkewEntry.set(bestTarget.getSkew(), receiveTimestamp); - - var transform = bestTarget.getBestCameraToTarget(); - double[] poseData = { - transform.getX(), transform.getY(), transform.getRotation().toRotation2d().getDegrees() - }; - ts.targetPoseEntry.set(poseData, receiveTimestamp); + + /** + * Simulate one processed frame of vision data, putting one result to NT at current timestamp. + * Image capture time is assumed be (current time - latency). + * + * @param result The pipeline result to submit + */ + public void submitProcessedFrame(PhotonPipelineResult result) { + submitProcessedFrame(result, WPIUtilJNI.now()); } - ts.cameraIntrinsicsPublisher.set(prop.getIntrinsics().getData(), receiveTimestamp); - ts.cameraDistortionPublisher.set(prop.getDistCoeffs().getData(), receiveTimestamp); - ts.heartbeatPublisher.set(heartbeatCounter++, receiveTimestamp); - } + /** + * Simulate one processed frame of vision data, putting one result to NT. Image capture timestamp + * overrides {@link PhotonPipelineResult#getTimestampSeconds() getTimestampSeconds()} for more + * precise latency simulation. + * + * @param result The pipeline result to submit + * @param receiveTimestamp The (sim) timestamp when this result was read by NT in microseconds + */ + public void submitProcessedFrame(PhotonPipelineResult result, long receiveTimestamp) { + ts.latencyMillisEntry.set(result.getLatencyMillis(), receiveTimestamp); + + var newPacket = new Packet(result.getPacketSize()); + result.populatePacket(newPacket); + ts.rawBytesEntry.set(newPacket.getData(), receiveTimestamp); + + boolean hasTargets = result.hasTargets(); + ts.hasTargetEntry.set(hasTargets, receiveTimestamp); + if (!hasTargets) { + ts.targetPitchEntry.set(0.0, receiveTimestamp); + ts.targetYawEntry.set(0.0, receiveTimestamp); + ts.targetAreaEntry.set(0.0, receiveTimestamp); + ts.targetPoseEntry.set(new double[] {0.0, 0.0, 0.0}, receiveTimestamp); + ts.targetSkewEntry.set(0.0, receiveTimestamp); + } else { + var bestTarget = result.getBestTarget(); + + ts.targetPitchEntry.set(bestTarget.getPitch(), receiveTimestamp); + ts.targetYawEntry.set(bestTarget.getYaw(), receiveTimestamp); + ts.targetAreaEntry.set(bestTarget.getArea(), receiveTimestamp); + ts.targetSkewEntry.set(bestTarget.getSkew(), receiveTimestamp); + + var transform = bestTarget.getBestCameraToTarget(); + double[] poseData = { + transform.getX(), transform.getY(), transform.getRotation().toRotation2d().getDegrees() + }; + ts.targetPoseEntry.set(poseData, receiveTimestamp); + } + + ts.cameraIntrinsicsPublisher.set(prop.getIntrinsics().getData(), receiveTimestamp); + ts.cameraDistortionPublisher.set(prop.getDistCoeffs().getData(), receiveTimestamp); + ts.heartbeatPublisher.set(heartbeatCounter++, receiveTimestamp); + } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java index 56c18a1aad..35b0ad190f 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java @@ -65,675 +65,675 @@ * network latency and the latency reported will always be perfect. */ public class SimCameraProperties { - private final Random rand = new Random(); - // calibration - private int resWidth; - private int resHeight; - private Matrix camIntrinsics; - private Matrix distCoeffs; - private double avgErrorPx; - private double errorStdDevPx; - // performance - private double frameSpeedMs = 0; - private double exposureTimeMs = 0; - private double avgLatencyMs = 0; - private double latencyStdDevMs = 0; - // util - private List viewplanes = new ArrayList<>(); - - /** Default constructor which is the same as {@link #PERFECT_90DEG} */ - public SimCameraProperties() { - setCalibration(960, 720, Rotation2d.fromDegrees(90)); - } - - /** - * Reads camera properties from a photonvision config.json file. This is only the - * resolution, camera intrinsics, distortion coefficients, and average/std. dev. pixel error. - * Other camera properties must be set. - * - * @param path Path to the config.json file - * @param width The width of the desired resolution in the JSON - * @param height The height of the desired resolution in the JSON - * @throws IOException If reading the JSON fails, either from an invalid path or a missing/invalid - * calibrated resolution. - */ - public SimCameraProperties(String path, int width, int height) throws IOException { - this(Path.of(path), width, height); - } - - /** - * Reads camera properties from a photonvision config.json file. This is only the - * resolution, camera intrinsics, distortion coefficients, and average/std. dev. pixel error. - * Other camera properties must be set. - * - * @param path Path to the config.json file - * @param width The width of the desired resolution in the JSON - * @param height The height of the desired resolution in the JSON - * @throws IOException If reading the JSON fails, either from an invalid path or a missing/invalid - * calibrated resolution. - */ - public SimCameraProperties(Path path, int width, int height) throws IOException { - var mapper = new ObjectMapper(); - var json = mapper.readTree(path.toFile()); - json = json.get("calibrations"); - boolean success = false; - try { - for (int i = 0; i < json.size() && !success; i++) { - // check if this calibration entry is our desired resolution - var calib = json.get(i); - int jsonWidth = calib.get("resolution").get("width").asInt(); - int jsonHeight = calib.get("resolution").get("height").asInt(); - if (jsonWidth != width || jsonHeight != height) continue; - // get the relevant calibration values - var jsonIntrinsicsNode = calib.get("cameraIntrinsics").get("data"); - double[] jsonIntrinsics = new double[jsonIntrinsicsNode.size()]; - for (int j = 0; j < jsonIntrinsicsNode.size(); j++) { - jsonIntrinsics[j] = jsonIntrinsicsNode.get(j).asDouble(); + private final Random rand = new Random(); + // calibration + private int resWidth; + private int resHeight; + private Matrix camIntrinsics; + private Matrix distCoeffs; + private double avgErrorPx; + private double errorStdDevPx; + // performance + private double frameSpeedMs = 0; + private double exposureTimeMs = 0; + private double avgLatencyMs = 0; + private double latencyStdDevMs = 0; + // util + private List viewplanes = new ArrayList<>(); + + /** Default constructor which is the same as {@link #PERFECT_90DEG} */ + public SimCameraProperties() { + setCalibration(960, 720, Rotation2d.fromDegrees(90)); + } + + /** + * Reads camera properties from a photonvision config.json file. This is only the + * resolution, camera intrinsics, distortion coefficients, and average/std. dev. pixel error. + * Other camera properties must be set. + * + * @param path Path to the config.json file + * @param width The width of the desired resolution in the JSON + * @param height The height of the desired resolution in the JSON + * @throws IOException If reading the JSON fails, either from an invalid path or a missing/invalid + * calibrated resolution. + */ + public SimCameraProperties(String path, int width, int height) throws IOException { + this(Path.of(path), width, height); + } + + /** + * Reads camera properties from a photonvision config.json file. This is only the + * resolution, camera intrinsics, distortion coefficients, and average/std. dev. pixel error. + * Other camera properties must be set. + * + * @param path Path to the config.json file + * @param width The width of the desired resolution in the JSON + * @param height The height of the desired resolution in the JSON + * @throws IOException If reading the JSON fails, either from an invalid path or a missing/invalid + * calibrated resolution. + */ + public SimCameraProperties(Path path, int width, int height) throws IOException { + var mapper = new ObjectMapper(); + var json = mapper.readTree(path.toFile()); + json = json.get("calibrations"); + boolean success = false; + try { + for (int i = 0; i < json.size() && !success; i++) { + // check if this calibration entry is our desired resolution + var calib = json.get(i); + int jsonWidth = calib.get("resolution").get("width").asInt(); + int jsonHeight = calib.get("resolution").get("height").asInt(); + if (jsonWidth != width || jsonHeight != height) continue; + // get the relevant calibration values + var jsonIntrinsicsNode = calib.get("cameraIntrinsics").get("data"); + double[] jsonIntrinsics = new double[jsonIntrinsicsNode.size()]; + for (int j = 0; j < jsonIntrinsicsNode.size(); j++) { + jsonIntrinsics[j] = jsonIntrinsicsNode.get(j).asDouble(); + } + var jsonDistortNode = calib.get("cameraExtrinsics").get("data"); + double[] jsonDistortion = new double[jsonDistortNode.size()]; + for (int j = 0; j < jsonDistortNode.size(); j++) { + jsonDistortion[j] = jsonDistortNode.get(j).asDouble(); + } + var jsonViewErrors = calib.get("perViewErrors"); + double jsonAvgError = 0; + for (int j = 0; j < jsonViewErrors.size(); j++) { + jsonAvgError += jsonViewErrors.get(j).asDouble(); + } + jsonAvgError /= jsonViewErrors.size(); + double jsonErrorStdDev = calib.get("standardDeviation").asDouble(); + // assign the read JSON values to this CameraProperties + setCalibration( + jsonWidth, + jsonHeight, + Matrix.mat(Nat.N3(), Nat.N3()).fill(jsonIntrinsics), + Matrix.mat(Nat.N5(), Nat.N1()).fill(jsonDistortion)); + setCalibError(jsonAvgError, jsonErrorStdDev); + success = true; + } + } catch (Exception e) { + throw new IOException("Invalid calibration JSON"); + } + if (!success) throw new IOException("Requested resolution not found in calibration"); + } + + public void setRandomSeed(long seed) { + rand.setSeed(seed); + } + + public void setCalibration(int resWidth, int resHeight, Rotation2d fovDiag) { + if (fovDiag.getDegrees() < 1 || fovDiag.getDegrees() > 179) { + fovDiag = Rotation2d.fromDegrees(MathUtil.clamp(fovDiag.getDegrees(), 1, 179)); + DriverStation.reportError( + "Requested invalid FOV! Clamping between (1, 179) degrees...", false); + } + double resDiag = Math.sqrt(resWidth * resWidth + resHeight * resHeight); + double diagRatio = Math.tan(fovDiag.getRadians() / 2); + var fovWidth = new Rotation2d(Math.atan(diagRatio * (resWidth / resDiag)) * 2); + var fovHeight = new Rotation2d(Math.atan(diagRatio * (resHeight / resDiag)) * 2); + + // assume no distortion + var distCoeff = VecBuilder.fill(0, 0, 0, 0, 0); + + // assume centered principal point (pixels) + double cx = resWidth / 2.0 - 0.5; + double cy = resHeight / 2.0 - 0.5; + + // use given fov to determine focal point (pixels) + double fx = cx / Math.tan(fovWidth.getRadians() / 2.0); + double fy = cy / Math.tan(fovHeight.getRadians() / 2.0); + + // create camera intrinsics matrix + var camIntrinsics = Matrix.mat(Nat.N3(), Nat.N3()).fill(fx, 0, cx, 0, fy, cy, 0, 0, 1); + setCalibration(resWidth, resHeight, camIntrinsics, distCoeff); + } + + public void setCalibration( + int resWidth, int resHeight, Matrix camIntrinsics, Matrix distCoeffs) { + this.resWidth = resWidth; + this.resHeight = resHeight; + this.camIntrinsics = camIntrinsics; + this.distCoeffs = distCoeffs; + + // left, right, up, and down view planes + var p = + new Translation3d[] { + new Translation3d( + 1, + new Rotation3d(0, 0, getPixelYaw(0).plus(new Rotation2d(-Math.PI / 2)).getRadians())), + new Translation3d( + 1, + new Rotation3d( + 0, 0, getPixelYaw(resWidth).plus(new Rotation2d(Math.PI / 2)).getRadians())), + new Translation3d( + 1, + new Rotation3d( + 0, getPixelPitch(0).plus(new Rotation2d(Math.PI / 2)).getRadians(), 0)), + new Translation3d( + 1, + new Rotation3d( + 0, getPixelPitch(resHeight).plus(new Rotation2d(-Math.PI / 2)).getRadians(), 0)) + }; + viewplanes.clear(); + for (int i = 0; i < p.length; i++) { + viewplanes.add(new DMatrix3(p[i].getX(), p[i].getY(), p[i].getZ())); + } + } + + public void setCalibError(double avgErrorPx, double errorStdDevPx) { + this.avgErrorPx = avgErrorPx; + this.errorStdDevPx = errorStdDevPx; + } + + /** + * @param fps The average frames per second the camera should process at. Exposure time limits + * FPS if set! + */ + public void setFPS(double fps) { + frameSpeedMs = Math.max(1000.0 / fps, exposureTimeMs); + } + + /** + * @param exposureTimeMs The amount of time the "shutter" is open for one frame. Affects motion + * blur. Frame speed(from FPS) is limited to this! + */ + public void setExposureTimeMs(double exposureTimeMs) { + this.exposureTimeMs = exposureTimeMs; + frameSpeedMs = Math.max(frameSpeedMs, exposureTimeMs); + } + + /** + * @param avgLatencyMs The average latency (from image capture to data published) in milliseconds + * a frame should have + */ + public void setAvgLatencyMs(double avgLatencyMs) { + this.avgLatencyMs = avgLatencyMs; + } + + /** + * @param latencyStdDevMs The standard deviation in milliseconds of the latency + */ + public void setLatencyStdDevMs(double latencyStdDevMs) { + this.latencyStdDevMs = latencyStdDevMs; + } + + public int getResWidth() { + return resWidth; + } + + public int getResHeight() { + return resHeight; + } + + public int getResArea() { + return resWidth * resHeight; + } + + /** Width:height */ + public double getAspectRatio() { + return (double) resWidth / resHeight; + } + + public Matrix getIntrinsics() { + return camIntrinsics.copy(); + } + + public Vector getDistCoeffs() { + return new Vector<>(distCoeffs); + } + + public double getFPS() { + return 1000.0 / frameSpeedMs; + } + + public double getFrameSpeedMs() { + return frameSpeedMs; + } + + public double getExposureTimeMs() { + return exposureTimeMs; + } + + public double getAvgLatencyMs() { + return avgLatencyMs; + } + + public double getLatencyStdDevMs() { + return latencyStdDevMs; + } + + public SimCameraProperties copy() { + var newProp = new SimCameraProperties(); + newProp.setCalibration(resWidth, resHeight, camIntrinsics, distCoeffs); + newProp.setCalibError(avgErrorPx, errorStdDevPx); + newProp.setFPS(getFPS()); + newProp.setExposureTimeMs(exposureTimeMs); + newProp.setAvgLatencyMs(avgLatencyMs); + newProp.setLatencyStdDevMs(latencyStdDevMs); + return newProp; + } + + /** + * The percentage(0 - 100) of this camera's resolution the contour takes up in pixels of the + * image. + * + * @param points Points of the contour + */ + public double getContourAreaPercent(Point[] points) { + return Imgproc.contourArea(new MatOfPoint2f(OpenCVHelp.getConvexHull(points))) + / getResArea() + * 100; + } + + /** The yaw from the principal point of this camera to the pixel x value. Positive values left. */ + public Rotation2d getPixelYaw(double pixelX) { + double fx = camIntrinsics.get(0, 0); + // account for principal point not being centered + double cx = camIntrinsics.get(0, 2); + double xOffset = cx - pixelX; + return new Rotation2d(fx, xOffset); + } + + /** + * The pitch from the principal point of this camera to the pixel y value. Pitch is positive down. + * + *

Note that this angle is naively computed and may be incorrect. See {@link + * #getCorrectedPixelRot(Point)}. + */ + public Rotation2d getPixelPitch(double pixelY) { + double fy = camIntrinsics.get(1, 1); + // account for principal point not being centered + double cy = camIntrinsics.get(1, 2); + double yOffset = cy - pixelY; + return new Rotation2d(fy, -yOffset); + } + + /** + * Finds the yaw and pitch to the given image point. Yaw is positive left, and pitch is positive + * down. + * + *

Note that pitch is naively computed and may be incorrect. See {@link + * #getCorrectedPixelRot(Point)}. + */ + public Rotation3d getPixelRot(Point point) { + return new Rotation3d( + 0, getPixelPitch(point.y).getRadians(), getPixelYaw(point.x).getRadians()); + } + + /** + * Gives the yaw and pitch of the line intersecting the camera lens and the given pixel + * coordinates on the sensor. Yaw is positive left, and pitch positive down. + * + *

The pitch traditionally calculated from pixel offsets do not correctly account for non-zero + * values of yaw because of perspective distortion (not to be confused with lens distortion)-- for + * example, the pitch angle is naively calculated as: + * + *

pitch = arctan(pixel y offset / focal length y)
+ * + * However, using focal length as a side of the associated right triangle is not correct when the + * pixel x value is not 0, because the distance from this pixel (projected on the x-axis) to the + * camera lens increases. Projecting a line back out of the camera with these naive angles will + * not intersect the 3d point that was originally projected into this 2d pixel. Instead, this + * length should be: + * + *
focal length y ⟶ (focal length y / cos(arctan(pixel x offset / focal length x)))
+ * + * @return Rotation3d with yaw and pitch of the line projected out of the camera from the given + * pixel (roll is zero). + */ + public Rotation3d getCorrectedPixelRot(Point point) { + double fx = camIntrinsics.get(0, 0); + double cx = camIntrinsics.get(0, 2); + double xOffset = cx - point.x; + + double fy = camIntrinsics.get(1, 1); + double cy = camIntrinsics.get(1, 2); + double yOffset = cy - point.y; + + // calculate yaw normally + var yaw = new Rotation2d(fx, xOffset); + // correct pitch based on yaw + var pitch = new Rotation2d(fy / Math.cos(Math.atan(xOffset / fx)), -yOffset); + + return new Rotation3d(0, pitch.getRadians(), yaw.getRadians()); + } + + public Rotation2d getHorizFOV() { + // sum of FOV left and right principal point + var left = getPixelYaw(0); + var right = getPixelYaw(resWidth); + return left.minus(right); + } + + public Rotation2d getVertFOV() { + // sum of FOV above and below principal point + var above = getPixelPitch(0); + var below = getPixelPitch(resHeight); + return below.minus(above); + } + + public Rotation2d getDiagFOV() { + return new Rotation2d(Math.hypot(getHorizFOV().getRadians(), getVertFOV().getRadians())); + } + + /** + * Determines where the line segment defined by the two given translations intersects the camera's + * frustum/field-of-vision, if at all. + * + *

The line is parametrized so any of its points p = t * (b - a) + a. This method + * returns these values of t, minimum first, defining the region of the line segment which is + * visible in the frustum. If both ends of the line segment are visible, this simply returns {0, + * 1}. If, for example, point b is visible while a is not, and half of the line segment is inside + * the camera frustum, {0.5, 1} would be returned. + * + * @param camRt The change in basis from world coordinates to camera coordinates. See {@link + * RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * @param a The initial translation of the line + * @param b The final translation of the line + * @return A Pair of Doubles. The values may be null: + *

    + *
  • {Double, Double} : Two parametrized values(t), minimum first, representing which + * segment of the line is visible in the camera frustum. + *
  • {Double, null} : One value(t) representing a single intersection point. For example, + * the line only intersects the intersection of two adjacent viewplanes. + *
  • {null, null} : No values. The line segment is not visible in the camera frustum. + *
+ */ + public Pair getVisibleLine( + RotTrlTransform3d camRt, Translation3d a, Translation3d b) { + // translations relative to the camera + var rela = camRt.apply(a); + var relb = camRt.apply(b); + + // check if both ends are behind camera + if (rela.getX() <= 0 && relb.getX() <= 0) return new Pair<>(null, null); + + var av = new DMatrix3(rela.getX(), rela.getY(), rela.getZ()); + var bv = new DMatrix3(relb.getX(), relb.getY(), relb.getZ()); + // a to b + var abv = new DMatrix3(); + CommonOps_DDF3.subtract(bv, av, abv); + + // check if the ends of the line segment are visible + boolean aVisible = true; + boolean bVisible = true; + for (int i = 0; i < viewplanes.size(); i++) { + var normal = viewplanes.get(i); + double aVisibility = CommonOps_DDF3.dot(av, normal); + if (aVisibility < 0) aVisible = false; + double bVisibility = CommonOps_DDF3.dot(bv, normal); + if (bVisibility < 0) bVisible = false; + // both ends are outside at least one of the same viewplane + if (aVisibility <= 0 && bVisibility <= 0) return new Pair<>(null, null); } - var jsonDistortNode = calib.get("cameraExtrinsics").get("data"); - double[] jsonDistortion = new double[jsonDistortNode.size()]; - for (int j = 0; j < jsonDistortNode.size(); j++) { - jsonDistortion[j] = jsonDistortNode.get(j).asDouble(); + // both ends are inside frustum + if (aVisible && bVisible) return new Pair<>(Double.valueOf(0), Double.valueOf(1)); + + // parametrized (t=0 at a, t=1 at b) intersections with viewplanes + double[] intersections = {Double.NaN, Double.NaN, Double.NaN, Double.NaN}; + // intersection points + List ipts = new ArrayList<>(); + for (double val : intersections) ipts.add(null); + + // find intersections + for (int i = 0; i < viewplanes.size(); i++) { + var normal = viewplanes.get(i); + + // we want to know the value of t when the line intercepts this plane + // parametrized: v = t * ab + a, where v lies on the plane + // we can find the projection of a onto the plane normal + // a_projn = normal.times(av.dot(normal) / normal.dot(normal)); + var a_projn = new DMatrix3(); + CommonOps_DDF3.scale( + CommonOps_DDF3.dot(av, normal) / CommonOps_DDF3.dot(normal, normal), normal, a_projn); + // this projection lets us determine the scalar multiple t of ab where + // (t * ab + a) is a vector which lies on the plane + if (Math.abs(CommonOps_DDF3.dot(abv, a_projn)) < 1e-5) continue; // line is parallel to plane + intersections[i] = CommonOps_DDF3.dot(a_projn, a_projn) / -CommonOps_DDF3.dot(abv, a_projn); + + // vector a to the viewplane + var apv = new DMatrix3(); + CommonOps_DDF3.scale(intersections[i], abv, apv); + // av + apv = intersection point + var intersectpt = new DMatrix3(); + CommonOps_DDF3.add(av, apv, intersectpt); + ipts.set(i, intersectpt); + + // discard intersections outside the camera frustum + for (int j = 1; j < viewplanes.size(); j++) { + int oi = (i + j) % viewplanes.size(); + var onormal = viewplanes.get(oi); + // if the dot of the intersection point with any plane normal is negative, it is outside + if (CommonOps_DDF3.dot(intersectpt, onormal) < 0) { + intersections[i] = Double.NaN; + ipts.set(i, null); + break; + } + } + + // discard duplicate intersections + if (ipts.get(i) == null) continue; + for (int j = i - 1; j >= 0; j--) { + var oipt = ipts.get(j); + if (oipt == null) continue; + var diff = new DMatrix3(); + CommonOps_DDF3.subtract(oipt, intersectpt, diff); + if (CommonOps_DDF3.elementMaxAbs(diff) < 1e-4) { + intersections[i] = Double.NaN; + ipts.set(i, null); + break; + } + } } - var jsonViewErrors = calib.get("perViewErrors"); - double jsonAvgError = 0; - for (int j = 0; j < jsonViewErrors.size(); j++) { - jsonAvgError += jsonViewErrors.get(j).asDouble(); + + // determine visible segment (minimum and maximum t) + double inter1 = Double.NaN; + double inter2 = Double.NaN; + for (double inter : intersections) { + if (!Double.isNaN(inter)) { + if (Double.isNaN(inter1)) inter1 = inter; + else inter2 = inter; + } + } + + // two viewplane intersections + if (!Double.isNaN(inter2)) { + double max = Math.max(inter1, inter2); + double min = Math.min(inter1, inter2); + if (aVisible) min = 0; + if (bVisible) max = 1; + return new Pair<>(min, max); } - jsonAvgError /= jsonViewErrors.size(); - double jsonErrorStdDev = calib.get("standardDeviation").asDouble(); - // assign the read JSON values to this CameraProperties - setCalibration( - jsonWidth, - jsonHeight, - Matrix.mat(Nat.N3(), Nat.N3()).fill(jsonIntrinsics), - Matrix.mat(Nat.N5(), Nat.N1()).fill(jsonDistortion)); - setCalibError(jsonAvgError, jsonErrorStdDev); - success = true; - } - } catch (Exception e) { - throw new IOException("Invalid calibration JSON"); - } - if (!success) throw new IOException("Requested resolution not found in calibration"); - } - - public void setRandomSeed(long seed) { - rand.setSeed(seed); - } - - public void setCalibration(int resWidth, int resHeight, Rotation2d fovDiag) { - if (fovDiag.getDegrees() < 1 || fovDiag.getDegrees() > 179) { - fovDiag = Rotation2d.fromDegrees(MathUtil.clamp(fovDiag.getDegrees(), 1, 179)); - DriverStation.reportError( - "Requested invalid FOV! Clamping between (1, 179) degrees...", false); - } - double resDiag = Math.sqrt(resWidth * resWidth + resHeight * resHeight); - double diagRatio = Math.tan(fovDiag.getRadians() / 2); - var fovWidth = new Rotation2d(Math.atan(diagRatio * (resWidth / resDiag)) * 2); - var fovHeight = new Rotation2d(Math.atan(diagRatio * (resHeight / resDiag)) * 2); - - // assume no distortion - var distCoeff = VecBuilder.fill(0, 0, 0, 0, 0); - - // assume centered principal point (pixels) - double cx = resWidth / 2.0 - 0.5; - double cy = resHeight / 2.0 - 0.5; - - // use given fov to determine focal point (pixels) - double fx = cx / Math.tan(fovWidth.getRadians() / 2.0); - double fy = cy / Math.tan(fovHeight.getRadians() / 2.0); - - // create camera intrinsics matrix - var camIntrinsics = Matrix.mat(Nat.N3(), Nat.N3()).fill(fx, 0, cx, 0, fy, cy, 0, 0, 1); - setCalibration(resWidth, resHeight, camIntrinsics, distCoeff); - } - - public void setCalibration( - int resWidth, int resHeight, Matrix camIntrinsics, Matrix distCoeffs) { - this.resWidth = resWidth; - this.resHeight = resHeight; - this.camIntrinsics = camIntrinsics; - this.distCoeffs = distCoeffs; - - // left, right, up, and down view planes - var p = - new Translation3d[] { - new Translation3d( - 1, - new Rotation3d(0, 0, getPixelYaw(0).plus(new Rotation2d(-Math.PI / 2)).getRadians())), - new Translation3d( - 1, - new Rotation3d( - 0, 0, getPixelYaw(resWidth).plus(new Rotation2d(Math.PI / 2)).getRadians())), - new Translation3d( - 1, - new Rotation3d( - 0, getPixelPitch(0).plus(new Rotation2d(Math.PI / 2)).getRadians(), 0)), - new Translation3d( - 1, - new Rotation3d( - 0, getPixelPitch(resHeight).plus(new Rotation2d(-Math.PI / 2)).getRadians(), 0)) - }; - viewplanes.clear(); - for (int i = 0; i < p.length; i++) { - viewplanes.add(new DMatrix3(p[i].getX(), p[i].getY(), p[i].getZ())); - } - } - - public void setCalibError(double avgErrorPx, double errorStdDevPx) { - this.avgErrorPx = avgErrorPx; - this.errorStdDevPx = errorStdDevPx; - } - - /** - * @param fps The average frames per second the camera should process at. Exposure time limits - * FPS if set! - */ - public void setFPS(double fps) { - frameSpeedMs = Math.max(1000.0 / fps, exposureTimeMs); - } - - /** - * @param exposureTimeMs The amount of time the "shutter" is open for one frame. Affects motion - * blur. Frame speed(from FPS) is limited to this! - */ - public void setExposureTimeMs(double exposureTimeMs) { - this.exposureTimeMs = exposureTimeMs; - frameSpeedMs = Math.max(frameSpeedMs, exposureTimeMs); - } - - /** - * @param avgLatencyMs The average latency (from image capture to data published) in milliseconds - * a frame should have - */ - public void setAvgLatencyMs(double avgLatencyMs) { - this.avgLatencyMs = avgLatencyMs; - } - - /** - * @param latencyStdDevMs The standard deviation in milliseconds of the latency - */ - public void setLatencyStdDevMs(double latencyStdDevMs) { - this.latencyStdDevMs = latencyStdDevMs; - } - - public int getResWidth() { - return resWidth; - } - - public int getResHeight() { - return resHeight; - } - - public int getResArea() { - return resWidth * resHeight; - } - - /** Width:height */ - public double getAspectRatio() { - return (double) resWidth / resHeight; - } - - public Matrix getIntrinsics() { - return camIntrinsics.copy(); - } - - public Vector getDistCoeffs() { - return new Vector<>(distCoeffs); - } - - public double getFPS() { - return 1000.0 / frameSpeedMs; - } - - public double getFrameSpeedMs() { - return frameSpeedMs; - } - - public double getExposureTimeMs() { - return exposureTimeMs; - } - - public double getAvgLatencyMs() { - return avgLatencyMs; - } - - public double getLatencyStdDevMs() { - return latencyStdDevMs; - } - - public SimCameraProperties copy() { - var newProp = new SimCameraProperties(); - newProp.setCalibration(resWidth, resHeight, camIntrinsics, distCoeffs); - newProp.setCalibError(avgErrorPx, errorStdDevPx); - newProp.setFPS(getFPS()); - newProp.setExposureTimeMs(exposureTimeMs); - newProp.setAvgLatencyMs(avgLatencyMs); - newProp.setLatencyStdDevMs(latencyStdDevMs); - return newProp; - } - - /** - * The percentage(0 - 100) of this camera's resolution the contour takes up in pixels of the - * image. - * - * @param points Points of the contour - */ - public double getContourAreaPercent(Point[] points) { - return Imgproc.contourArea(new MatOfPoint2f(OpenCVHelp.getConvexHull(points))) - / getResArea() - * 100; - } - - /** The yaw from the principal point of this camera to the pixel x value. Positive values left. */ - public Rotation2d getPixelYaw(double pixelX) { - double fx = camIntrinsics.get(0, 0); - // account for principal point not being centered - double cx = camIntrinsics.get(0, 2); - double xOffset = cx - pixelX; - return new Rotation2d(fx, xOffset); - } - - /** - * The pitch from the principal point of this camera to the pixel y value. Pitch is positive down. - * - *

Note that this angle is naively computed and may be incorrect. See {@link - * #getCorrectedPixelRot(Point)}. - */ - public Rotation2d getPixelPitch(double pixelY) { - double fy = camIntrinsics.get(1, 1); - // account for principal point not being centered - double cy = camIntrinsics.get(1, 2); - double yOffset = cy - pixelY; - return new Rotation2d(fy, -yOffset); - } - - /** - * Finds the yaw and pitch to the given image point. Yaw is positive left, and pitch is positive - * down. - * - *

Note that pitch is naively computed and may be incorrect. See {@link - * #getCorrectedPixelRot(Point)}. - */ - public Rotation3d getPixelRot(Point point) { - return new Rotation3d( - 0, getPixelPitch(point.y).getRadians(), getPixelYaw(point.x).getRadians()); - } - - /** - * Gives the yaw and pitch of the line intersecting the camera lens and the given pixel - * coordinates on the sensor. Yaw is positive left, and pitch positive down. - * - *

The pitch traditionally calculated from pixel offsets do not correctly account for non-zero - * values of yaw because of perspective distortion (not to be confused with lens distortion)-- for - * example, the pitch angle is naively calculated as: - * - *

pitch = arctan(pixel y offset / focal length y)
- * - * However, using focal length as a side of the associated right triangle is not correct when the - * pixel x value is not 0, because the distance from this pixel (projected on the x-axis) to the - * camera lens increases. Projecting a line back out of the camera with these naive angles will - * not intersect the 3d point that was originally projected into this 2d pixel. Instead, this - * length should be: - * - *
focal length y ⟶ (focal length y / cos(arctan(pixel x offset / focal length x)))
- * - * @return Rotation3d with yaw and pitch of the line projected out of the camera from the given - * pixel (roll is zero). - */ - public Rotation3d getCorrectedPixelRot(Point point) { - double fx = camIntrinsics.get(0, 0); - double cx = camIntrinsics.get(0, 2); - double xOffset = cx - point.x; - - double fy = camIntrinsics.get(1, 1); - double cy = camIntrinsics.get(1, 2); - double yOffset = cy - point.y; - - // calculate yaw normally - var yaw = new Rotation2d(fx, xOffset); - // correct pitch based on yaw - var pitch = new Rotation2d(fy / Math.cos(Math.atan(xOffset / fx)), -yOffset); - - return new Rotation3d(0, pitch.getRadians(), yaw.getRadians()); - } - - public Rotation2d getHorizFOV() { - // sum of FOV left and right principal point - var left = getPixelYaw(0); - var right = getPixelYaw(resWidth); - return left.minus(right); - } - - public Rotation2d getVertFOV() { - // sum of FOV above and below principal point - var above = getPixelPitch(0); - var below = getPixelPitch(resHeight); - return below.minus(above); - } - - public Rotation2d getDiagFOV() { - return new Rotation2d(Math.hypot(getHorizFOV().getRadians(), getVertFOV().getRadians())); - } - - /** - * Determines where the line segment defined by the two given translations intersects the camera's - * frustum/field-of-vision, if at all. - * - *

The line is parametrized so any of its points p = t * (b - a) + a. This method - * returns these values of t, minimum first, defining the region of the line segment which is - * visible in the frustum. If both ends of the line segment are visible, this simply returns {0, - * 1}. If, for example, point b is visible while a is not, and half of the line segment is inside - * the camera frustum, {0.5, 1} would be returned. - * - * @param camRt The change in basis from world coordinates to camera coordinates. See {@link - * RotTrlTransform3d#makeRelativeTo(Pose3d)}. - * @param a The initial translation of the line - * @param b The final translation of the line - * @return A Pair of Doubles. The values may be null: - *

    - *
  • {Double, Double} : Two parametrized values(t), minimum first, representing which - * segment of the line is visible in the camera frustum. - *
  • {Double, null} : One value(t) representing a single intersection point. For example, - * the line only intersects the intersection of two adjacent viewplanes. - *
  • {null, null} : No values. The line segment is not visible in the camera frustum. - *
- */ - public Pair getVisibleLine( - RotTrlTransform3d camRt, Translation3d a, Translation3d b) { - // translations relative to the camera - var rela = camRt.apply(a); - var relb = camRt.apply(b); - - // check if both ends are behind camera - if (rela.getX() <= 0 && relb.getX() <= 0) return new Pair<>(null, null); - - var av = new DMatrix3(rela.getX(), rela.getY(), rela.getZ()); - var bv = new DMatrix3(relb.getX(), relb.getY(), relb.getZ()); - // a to b - var abv = new DMatrix3(); - CommonOps_DDF3.subtract(bv, av, abv); - - // check if the ends of the line segment are visible - boolean aVisible = true; - boolean bVisible = true; - for (int i = 0; i < viewplanes.size(); i++) { - var normal = viewplanes.get(i); - double aVisibility = CommonOps_DDF3.dot(av, normal); - if (aVisibility < 0) aVisible = false; - double bVisibility = CommonOps_DDF3.dot(bv, normal); - if (bVisibility < 0) bVisible = false; - // both ends are outside at least one of the same viewplane - if (aVisibility <= 0 && bVisibility <= 0) return new Pair<>(null, null); - } - // both ends are inside frustum - if (aVisible && bVisible) return new Pair<>(Double.valueOf(0), Double.valueOf(1)); - - // parametrized (t=0 at a, t=1 at b) intersections with viewplanes - double[] intersections = {Double.NaN, Double.NaN, Double.NaN, Double.NaN}; - // intersection points - List ipts = new ArrayList<>(); - for (double val : intersections) ipts.add(null); - - // find intersections - for (int i = 0; i < viewplanes.size(); i++) { - var normal = viewplanes.get(i); - - // we want to know the value of t when the line intercepts this plane - // parametrized: v = t * ab + a, where v lies on the plane - // we can find the projection of a onto the plane normal - // a_projn = normal.times(av.dot(normal) / normal.dot(normal)); - var a_projn = new DMatrix3(); - CommonOps_DDF3.scale( - CommonOps_DDF3.dot(av, normal) / CommonOps_DDF3.dot(normal, normal), normal, a_projn); - // this projection lets us determine the scalar multiple t of ab where - // (t * ab + a) is a vector which lies on the plane - if (Math.abs(CommonOps_DDF3.dot(abv, a_projn)) < 1e-5) continue; // line is parallel to plane - intersections[i] = CommonOps_DDF3.dot(a_projn, a_projn) / -CommonOps_DDF3.dot(abv, a_projn); - - // vector a to the viewplane - var apv = new DMatrix3(); - CommonOps_DDF3.scale(intersections[i], abv, apv); - // av + apv = intersection point - var intersectpt = new DMatrix3(); - CommonOps_DDF3.add(av, apv, intersectpt); - ipts.set(i, intersectpt); - - // discard intersections outside the camera frustum - for (int j = 1; j < viewplanes.size(); j++) { - int oi = (i + j) % viewplanes.size(); - var onormal = viewplanes.get(oi); - // if the dot of the intersection point with any plane normal is negative, it is outside - if (CommonOps_DDF3.dot(intersectpt, onormal) < 0) { - intersections[i] = Double.NaN; - ipts.set(i, null); - break; + // one viewplane intersection + else if (!Double.isNaN(inter1)) { + if (aVisible) return new Pair<>(Double.valueOf(0), inter1); + if (bVisible) return new Pair<>(inter1, Double.valueOf(1)); + return new Pair<>(inter1, null); } - } - - // discard duplicate intersections - if (ipts.get(i) == null) continue; - for (int j = i - 1; j >= 0; j--) { - var oipt = ipts.get(j); - if (oipt == null) continue; - var diff = new DMatrix3(); - CommonOps_DDF3.subtract(oipt, intersectpt, diff); - if (CommonOps_DDF3.elementMaxAbs(diff) < 1e-4) { - intersections[i] = Double.NaN; - ipts.set(i, null); - break; + // no intersections + else return new Pair<>(null, null); + } + + /** Returns these points after applying this camera's estimated noise. */ + public Point[] estPixelNoise(Point[] points) { + if (avgErrorPx == 0 && errorStdDevPx == 0) return points; + + Point[] noisyPts = new Point[points.length]; + for (int i = 0; i < points.length; i++) { + var p = points[i]; + // error pixels in random direction + double error = avgErrorPx + rand.nextGaussian() * errorStdDevPx; + double errorAngle = rand.nextDouble() * 2 * Math.PI - Math.PI; + noisyPts[i] = + new Point(p.x + error * Math.cos(errorAngle), p.y + error * Math.sin(errorAngle)); } - } - } - - // determine visible segment (minimum and maximum t) - double inter1 = Double.NaN; - double inter2 = Double.NaN; - for (double inter : intersections) { - if (!Double.isNaN(inter)) { - if (Double.isNaN(inter1)) inter1 = inter; - else inter2 = inter; - } - } - - // two viewplane intersections - if (!Double.isNaN(inter2)) { - double max = Math.max(inter1, inter2); - double min = Math.min(inter1, inter2); - if (aVisible) min = 0; - if (bVisible) max = 1; - return new Pair<>(min, max); - } - // one viewplane intersection - else if (!Double.isNaN(inter1)) { - if (aVisible) return new Pair<>(Double.valueOf(0), inter1); - if (bVisible) return new Pair<>(inter1, Double.valueOf(1)); - return new Pair<>(inter1, null); - } - // no intersections - else return new Pair<>(null, null); - } - - /** Returns these points after applying this camera's estimated noise. */ - public Point[] estPixelNoise(Point[] points) { - if (avgErrorPx == 0 && errorStdDevPx == 0) return points; - - Point[] noisyPts = new Point[points.length]; - for (int i = 0; i < points.length; i++) { - var p = points[i]; - // error pixels in random direction - double error = avgErrorPx + rand.nextGaussian() * errorStdDevPx; - double errorAngle = rand.nextDouble() * 2 * Math.PI - Math.PI; - noisyPts[i] = - new Point(p.x + error * Math.cos(errorAngle), p.y + error * Math.sin(errorAngle)); - } - return noisyPts; - } - - /** - * @return Noisy estimation of a frame's processing latency in milliseconds - */ - public double estLatencyMs() { - return Math.max(avgLatencyMs + rand.nextGaussian() * latencyStdDevMs, 0); - } - - /** - * @return Estimate how long until the next frame should be processed in milliseconds - */ - public double estMsUntilNextFrame() { - // exceptional processing latency blocks the next frame - return frameSpeedMs + Math.max(0, estLatencyMs() - frameSpeedMs); - } - - // pre-calibrated example cameras - - /** 960x720 resolution, 90 degree FOV, "perfect" lagless camera */ - public static SimCameraProperties PERFECT_90DEG() { - return new SimCameraProperties(); - } - - public static SimCameraProperties PI4_LIFECAM_320_240() { - var prop = new SimCameraProperties(); - prop.setCalibration( - 320, - 240, - Matrix.mat(Nat.N3(), Nat.N3()) - .fill( // intrinsic - 328.2733242048587, - 0.0, - 164.8190261141906, - 0.0, - 318.0609794305216, - 123.8633838438093, - 0.0, - 0.0, - 1.0), - VecBuilder.fill( // distort - 0.09957946553445934, - -0.9166265114485799, - 0.0019519890627236526, - -0.0036071725380870333, - 1.5627234622420942)); - prop.setCalibError(0.21, 0.0124); - prop.setFPS(30); - prop.setAvgLatencyMs(30); - prop.setLatencyStdDevMs(10); - return prop; - } - - public static SimCameraProperties PI4_LIFECAM_640_480() { - var prop = new SimCameraProperties(); - prop.setCalibration( - 640, - 480, - Matrix.mat(Nat.N3(), Nat.N3()) - .fill( // intrinsic - 669.1428078983059, - 0.0, - 322.53377249329213, - 0.0, - 646.9843137061716, - 241.26567383784163, - 0.0, - 0.0, - 1.0), - VecBuilder.fill( // distort - 0.12788470750464645, - -1.2350335805796528, - 0.0024990767286192732, - -0.0026958287600230705, - 2.2951386729115537)); - prop.setCalibError(0.26, 0.046); - prop.setFPS(15); - prop.setAvgLatencyMs(65); - prop.setLatencyStdDevMs(15); - return prop; - } - - public static SimCameraProperties LL2_640_480() { - var prop = new SimCameraProperties(); - prop.setCalibration( - 640, - 480, - Matrix.mat(Nat.N3(), Nat.N3()) - .fill( // intrinsic - 511.22843367007755, - 0.0, - 323.62049380211096, - 0.0, - 514.5452336723849, - 261.8827920543568, - 0.0, - 0.0, - 1.0), - VecBuilder.fill( // distort - 0.1917469998873756, - -0.5142936883324216, - 0.012461562046896614, - 0.0014084973492408186, - 0.35160648971214437)); - prop.setCalibError(0.25, 0.05); - prop.setFPS(15); - prop.setAvgLatencyMs(35); - prop.setLatencyStdDevMs(8); - return prop; - } - - public static SimCameraProperties LL2_960_720() { - var prop = new SimCameraProperties(); - prop.setCalibration( - 960, - 720, - Matrix.mat(Nat.N3(), Nat.N3()) - .fill( // intrinsic - 769.6873145148892, - 0.0, - 486.1096609458122, - 0.0, - 773.8164483705323, - 384.66071662358354, - 0.0, - 0.0, - 1.0), - VecBuilder.fill( // distort - 0.189462064814501, - -0.49903003669627627, - 0.007468423590519429, - 0.002496885298683693, - 0.3443122090208624)); - prop.setCalibError(0.35, 0.10); - prop.setFPS(10); - prop.setAvgLatencyMs(50); - prop.setLatencyStdDevMs(15); - return prop; - } - - public static SimCameraProperties LL2_1280_720() { - var prop = new SimCameraProperties(); - prop.setCalibration( - 1280, - 720, - Matrix.mat(Nat.N3(), Nat.N3()) - .fill( // intrinsic - 1011.3749416937393, - 0.0, - 645.4955139388737, - 0.0, - 1008.5391755084075, - 508.32877656020196, - 0.0, - 0.0, - 1.0), - VecBuilder.fill( // distort - 0.13730101577061535, - -0.2904345656989261, - 8.32475714507539E-4, - -3.694397782014239E-4, - 0.09487962227027584)); - prop.setCalibError(0.37, 0.06); - prop.setFPS(7); - prop.setAvgLatencyMs(60); - prop.setLatencyStdDevMs(20); - return prop; - } + return noisyPts; + } + + /** + * @return Noisy estimation of a frame's processing latency in milliseconds + */ + public double estLatencyMs() { + return Math.max(avgLatencyMs + rand.nextGaussian() * latencyStdDevMs, 0); + } + + /** + * @return Estimate how long until the next frame should be processed in milliseconds + */ + public double estMsUntilNextFrame() { + // exceptional processing latency blocks the next frame + return frameSpeedMs + Math.max(0, estLatencyMs() - frameSpeedMs); + } + + // pre-calibrated example cameras + + /** 960x720 resolution, 90 degree FOV, "perfect" lagless camera */ + public static SimCameraProperties PERFECT_90DEG() { + return new SimCameraProperties(); + } + + public static SimCameraProperties PI4_LIFECAM_320_240() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 320, + 240, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 328.2733242048587, + 0.0, + 164.8190261141906, + 0.0, + 318.0609794305216, + 123.8633838438093, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.09957946553445934, + -0.9166265114485799, + 0.0019519890627236526, + -0.0036071725380870333, + 1.5627234622420942)); + prop.setCalibError(0.21, 0.0124); + prop.setFPS(30); + prop.setAvgLatencyMs(30); + prop.setLatencyStdDevMs(10); + return prop; + } + + public static SimCameraProperties PI4_LIFECAM_640_480() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 640, + 480, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 669.1428078983059, + 0.0, + 322.53377249329213, + 0.0, + 646.9843137061716, + 241.26567383784163, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.12788470750464645, + -1.2350335805796528, + 0.0024990767286192732, + -0.0026958287600230705, + 2.2951386729115537)); + prop.setCalibError(0.26, 0.046); + prop.setFPS(15); + prop.setAvgLatencyMs(65); + prop.setLatencyStdDevMs(15); + return prop; + } + + public static SimCameraProperties LL2_640_480() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 640, + 480, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 511.22843367007755, + 0.0, + 323.62049380211096, + 0.0, + 514.5452336723849, + 261.8827920543568, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.1917469998873756, + -0.5142936883324216, + 0.012461562046896614, + 0.0014084973492408186, + 0.35160648971214437)); + prop.setCalibError(0.25, 0.05); + prop.setFPS(15); + prop.setAvgLatencyMs(35); + prop.setLatencyStdDevMs(8); + return prop; + } + + public static SimCameraProperties LL2_960_720() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 960, + 720, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 769.6873145148892, + 0.0, + 486.1096609458122, + 0.0, + 773.8164483705323, + 384.66071662358354, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.189462064814501, + -0.49903003669627627, + 0.007468423590519429, + 0.002496885298683693, + 0.3443122090208624)); + prop.setCalibError(0.35, 0.10); + prop.setFPS(10); + prop.setAvgLatencyMs(50); + prop.setLatencyStdDevMs(15); + return prop; + } + + public static SimCameraProperties LL2_1280_720() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 1280, + 720, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 1011.3749416937393, + 0.0, + 645.4955139388737, + 0.0, + 1008.5391755084075, + 508.32877656020196, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.13730101577061535, + -0.2904345656989261, + 8.32475714507539E-4, + -3.694397782014239E-4, + 0.09487962227027584)); + prop.setCalibError(0.37, 0.06); + prop.setFPS(7); + prop.setAvgLatencyMs(60); + prop.setLatencyStdDevMs(20); + return prop; + } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimPhotonCamera.java b/photon-lib/src/main/java/org/photonvision/simulation/SimPhotonCamera.java index f9936dae65..58e3e2fcf7 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimPhotonCamera.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimPhotonCamera.java @@ -45,137 +45,137 @@ @Deprecated @SuppressWarnings("unused") public class SimPhotonCamera { - NTTopicSet ts = new NTTopicSet(); - PhotonPipelineResult latestResult; - private long heartbeatCounter = 0; - - /** - * Constructs a Simulated PhotonCamera from a root table. - * - * @param instance The NetworkTableInstance to pull data from. This can be a custom instance in - * simulation, but should *usually* be the default NTInstance from - * NetworkTableInstance::getDefault - * @param cameraName The name of the camera, as seen in the UI. - */ - public SimPhotonCamera(NetworkTableInstance instance, String cameraName) { - ts.removeEntries(); - ts.subTable = instance.getTable(PhotonCamera.kTableName).getSubTable(cameraName); - ts.updateEntries(); - } - - /** - * Publishes the camera intrinsics matrix. The matrix should be in the form: spotless:off + NTTopicSet ts = new NTTopicSet(); + PhotonPipelineResult latestResult; + private long heartbeatCounter = 0; + + /** + * Constructs a Simulated PhotonCamera from a root table. + * + * @param instance The NetworkTableInstance to pull data from. This can be a custom instance in + * simulation, but should *usually* be the default NTInstance from + * NetworkTableInstance::getDefault + * @param cameraName The name of the camera, as seen in the UI. + */ + public SimPhotonCamera(NetworkTableInstance instance, String cameraName) { + ts.removeEntries(); + ts.subTable = instance.getTable(PhotonCamera.kTableName).getSubTable(cameraName); + ts.updateEntries(); + } + + /** + * Publishes the camera intrinsics matrix. The matrix should be in the form: spotless:off * fx 0 cx * 0 fy cy * 0 0 1 * @param cameraMatrix The cam matrix * spotless:on - */ - public void setCameraIntrinsicsMat(Matrix cameraMatrix) { - ts.cameraIntrinsicsPublisher.set(cameraMatrix.getData()); - } - - /** - * Publishes the camera distortion matrix. The matrix should be in the form [k1 k2 p1 p2 k3]. See - * more: https://docs.opencv.org/3.4/d4/d94/tutorial_camera_calibration.html - * - * @param distortionMat The distortion mat - */ - public void setCameraDistortionMat(Matrix distortionMat) { - ts.cameraDistortionPublisher.set(distortionMat.getData()); - } - - /** - * Constructs a Simulated PhotonCamera from the name of the camera. - * - * @param cameraName The nickname of the camera (found in the PhotonVision UI). - */ - public SimPhotonCamera(String cameraName) { - this(NetworkTableInstance.getDefault(), cameraName); - } - - /** - * Simulate one processed frame of vision data, putting one result to NT. - * - * @param latencyMillis Latency of the provided frame - * @param targets Each target detected - */ - public void submitProcessedFrame(double latencyMillis, PhotonTrackedTarget... targets) { - submitProcessedFrame(latencyMillis, Arrays.asList(targets)); - } - - /** - * Simulate one processed frame of vision data, putting one result to NT. - * - * @param latencyMillis Latency of the provided frame - * @param sortMode Order in which to sort targets - * @param targets Each target detected - */ - public void submitProcessedFrame( - double latencyMillis, PhotonTargetSortMode sortMode, PhotonTrackedTarget... targets) { - submitProcessedFrame(latencyMillis, sortMode, Arrays.asList(targets)); - } - - /** - * Simulate one processed frame of vision data, putting one result to NT. - * - * @param latencyMillis Latency of the provided frame - * @param targetList List of targets detected - */ - public void submitProcessedFrame(double latencyMillis, List targetList) { - submitProcessedFrame(latencyMillis, null, targetList); - } - - /** - * Simulate one processed frame of vision data, putting one result to NT. - * - * @param latencyMillis Latency of the provided frame - * @param sortMode Order in which to sort targets - * @param targetList List of targets detected - */ - public void submitProcessedFrame( - double latencyMillis, PhotonTargetSortMode sortMode, List targetList) { - ts.latencyMillisEntry.set(latencyMillis); - - if (sortMode != null) { - targetList.sort(sortMode.getComparator()); + */ + public void setCameraIntrinsicsMat(Matrix cameraMatrix) { + ts.cameraIntrinsicsPublisher.set(cameraMatrix.getData()); + } + + /** + * Publishes the camera distortion matrix. The matrix should be in the form [k1 k2 p1 p2 k3]. See + * more: https://docs.opencv.org/3.4/d4/d94/tutorial_camera_calibration.html + * + * @param distortionMat The distortion mat + */ + public void setCameraDistortionMat(Matrix distortionMat) { + ts.cameraDistortionPublisher.set(distortionMat.getData()); } - PhotonPipelineResult newResult = - new PhotonPipelineResult(latencyMillis, targetList, new MultiTargetPNPResults()); - var newPacket = new Packet(newResult.getPacketSize()); - newResult.populatePacket(newPacket); - ts.rawBytesEntry.set(newPacket.getData()); - - boolean hasTargets = newResult.hasTargets(); - ts.hasTargetEntry.set(hasTargets); - if (!hasTargets) { - ts.targetPitchEntry.set(0.0); - ts.targetYawEntry.set(0.0); - ts.targetAreaEntry.set(0.0); - ts.targetPoseEntry.set(new double[] {0.0, 0.0, 0.0}); - ts.targetSkewEntry.set(0.0); - } else { - var bestTarget = newResult.getBestTarget(); - - ts.targetPitchEntry.set(bestTarget.getPitch()); - ts.targetYawEntry.set(bestTarget.getYaw()); - ts.targetAreaEntry.set(bestTarget.getArea()); - ts.targetSkewEntry.set(bestTarget.getSkew()); - - var transform = bestTarget.getBestCameraToTarget(); - double[] poseData = { - transform.getX(), transform.getY(), transform.getRotation().toRotation2d().getDegrees() - }; - ts.targetPoseEntry.set(poseData); + /** + * Constructs a Simulated PhotonCamera from the name of the camera. + * + * @param cameraName The nickname of the camera (found in the PhotonVision UI). + */ + public SimPhotonCamera(String cameraName) { + this(NetworkTableInstance.getDefault(), cameraName); } - ts.heartbeatPublisher.set(heartbeatCounter++); + /** + * Simulate one processed frame of vision data, putting one result to NT. + * + * @param latencyMillis Latency of the provided frame + * @param targets Each target detected + */ + public void submitProcessedFrame(double latencyMillis, PhotonTrackedTarget... targets) { + submitProcessedFrame(latencyMillis, Arrays.asList(targets)); + } - latestResult = newResult; - } + /** + * Simulate one processed frame of vision data, putting one result to NT. + * + * @param latencyMillis Latency of the provided frame + * @param sortMode Order in which to sort targets + * @param targets Each target detected + */ + public void submitProcessedFrame( + double latencyMillis, PhotonTargetSortMode sortMode, PhotonTrackedTarget... targets) { + submitProcessedFrame(latencyMillis, sortMode, Arrays.asList(targets)); + } - PhotonPipelineResult getLatestResult() { - return latestResult; - } + /** + * Simulate one processed frame of vision data, putting one result to NT. + * + * @param latencyMillis Latency of the provided frame + * @param targetList List of targets detected + */ + public void submitProcessedFrame(double latencyMillis, List targetList) { + submitProcessedFrame(latencyMillis, null, targetList); + } + + /** + * Simulate one processed frame of vision data, putting one result to NT. + * + * @param latencyMillis Latency of the provided frame + * @param sortMode Order in which to sort targets + * @param targetList List of targets detected + */ + public void submitProcessedFrame( + double latencyMillis, PhotonTargetSortMode sortMode, List targetList) { + ts.latencyMillisEntry.set(latencyMillis); + + if (sortMode != null) { + targetList.sort(sortMode.getComparator()); + } + + PhotonPipelineResult newResult = + new PhotonPipelineResult(latencyMillis, targetList, new MultiTargetPNPResults()); + var newPacket = new Packet(newResult.getPacketSize()); + newResult.populatePacket(newPacket); + ts.rawBytesEntry.set(newPacket.getData()); + + boolean hasTargets = newResult.hasTargets(); + ts.hasTargetEntry.set(hasTargets); + if (!hasTargets) { + ts.targetPitchEntry.set(0.0); + ts.targetYawEntry.set(0.0); + ts.targetAreaEntry.set(0.0); + ts.targetPoseEntry.set(new double[] {0.0, 0.0, 0.0}); + ts.targetSkewEntry.set(0.0); + } else { + var bestTarget = newResult.getBestTarget(); + + ts.targetPitchEntry.set(bestTarget.getPitch()); + ts.targetYawEntry.set(bestTarget.getYaw()); + ts.targetAreaEntry.set(bestTarget.getArea()); + ts.targetSkewEntry.set(bestTarget.getSkew()); + + var transform = bestTarget.getBestCameraToTarget(); + double[] poseData = { + transform.getX(), transform.getY(), transform.getRotation().toRotation2d().getDegrees() + }; + ts.targetPoseEntry.set(poseData); + } + + ts.heartbeatPublisher.set(heartbeatCounter++); + + latestResult = newResult; + } + + PhotonPipelineResult getLatestResult() { + return latestResult; + } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimVisionSystem.java b/photon-lib/src/main/java/org/photonvision/simulation/SimVisionSystem.java index c7fdf445b6..52756f4727 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimVisionSystem.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimVisionSystem.java @@ -46,228 +46,228 @@ */ @Deprecated public class SimVisionSystem { - SimPhotonCamera cam; - - double camHorizFOVDegrees; - double camVertFOVDegrees; - double cameraHeightOffGroundMeters; - double maxLEDRangeMeters; - int cameraResWidth; - int cameraResHeight; - double minTargetArea; - Transform3d robotToCamera; - - Field2d dbgField; - FieldObject2d dbgRobot; - FieldObject2d dbgCamera; - - ArrayList tgtList; - - /** - * Create a simulated vision system involving a camera and coprocessor mounted on a mobile robot - * running PhotonVision, detecting one or more targets scattered around the field. This assumes a - * fairly simple and distortion-less pinhole camera model. - * - * @param camName Name of the PhotonVision camera to create. Align it with the settings you use in - * the PhotonVision GUI. - * @param camDiagFOVDegrees Diagonal Field of View of the camera used. Align it with the - * manufacturer specifications, and/or whatever is configured in the PhotonVision Setting - * page. - * @param robotToCamera Transform to move from the center of the robot to the camera's mount - * position - * @param maxLEDRangeMeters Maximum distance at which your camera can illuminate the target and - * make it visible. Set to 9000 or more if your vision system does not rely on LED's. - * @param cameraResWidth Width of your camera's image sensor in pixels - * @param cameraResHeight Height of your camera's image sensor in pixels - * @param minTargetArea Minimum area that that the target should be before it's recognized as a - * target by the camera. Match this with your contour filtering settings in the PhotonVision - * GUI. - */ - public SimVisionSystem( - String camName, - double camDiagFOVDegrees, - Transform3d robotToCamera, - double maxLEDRangeMeters, - int cameraResWidth, - int cameraResHeight, - double minTargetArea) { - this.robotToCamera = robotToCamera; - this.maxLEDRangeMeters = maxLEDRangeMeters; - this.cameraResWidth = cameraResWidth; - this.cameraResHeight = cameraResHeight; - this.minTargetArea = minTargetArea; - - // Calculate horizontal/vertical FOV by similar triangles - double hypotPixels = Math.hypot(cameraResWidth, cameraResHeight); - this.camHorizFOVDegrees = camDiagFOVDegrees * cameraResWidth / hypotPixels; - this.camVertFOVDegrees = camDiagFOVDegrees * cameraResHeight / hypotPixels; - - cam = new SimPhotonCamera(camName); - tgtList = new ArrayList<>(); - - dbgField = new Field2d(); - dbgRobot = dbgField.getRobotObject(); - dbgCamera = dbgField.getObject(camName + " Camera"); - SmartDashboard.putData(camName + " Sim Field", dbgField); - } - - /** - * Add a target on the field which your vision system is designed to detect. The PhotonCamera from - * this system will report the location of the robot relative to the subset of these targets which - * are visible from the given robot position. - * - * @param target Target to add to the simulated field - */ - public void addSimVisionTarget(SimVisionTarget target) { - tgtList.add(target); - dbgField - .getObject("Target " + Integer.toString(target.targetID)) - .setPose(target.targetPose.toPose2d()); - } - - /** - * Adds all apriltags from the provided {@link AprilTagFieldLayout} as sim vision targets. The - * poses added will preserve the tag layout's alliance origin at the time of calling this method. - * - * @param tagLayout The field tag layout to get Apriltag poses and IDs from - */ - public void addVisionTargets(AprilTagFieldLayout tagLayout) { - for (AprilTag tag : tagLayout.getTags()) { - addSimVisionTarget( - new SimVisionTarget( - tagLayout.getTagPose(tag.ID).get(), // preserve alliance rotation - Units.inchesToMeters(6), - Units.inchesToMeters(6), - tag.ID)); + SimPhotonCamera cam; + + double camHorizFOVDegrees; + double camVertFOVDegrees; + double cameraHeightOffGroundMeters; + double maxLEDRangeMeters; + int cameraResWidth; + int cameraResHeight; + double minTargetArea; + Transform3d robotToCamera; + + Field2d dbgField; + FieldObject2d dbgRobot; + FieldObject2d dbgCamera; + + ArrayList tgtList; + + /** + * Create a simulated vision system involving a camera and coprocessor mounted on a mobile robot + * running PhotonVision, detecting one or more targets scattered around the field. This assumes a + * fairly simple and distortion-less pinhole camera model. + * + * @param camName Name of the PhotonVision camera to create. Align it with the settings you use in + * the PhotonVision GUI. + * @param camDiagFOVDegrees Diagonal Field of View of the camera used. Align it with the + * manufacturer specifications, and/or whatever is configured in the PhotonVision Setting + * page. + * @param robotToCamera Transform to move from the center of the robot to the camera's mount + * position + * @param maxLEDRangeMeters Maximum distance at which your camera can illuminate the target and + * make it visible. Set to 9000 or more if your vision system does not rely on LED's. + * @param cameraResWidth Width of your camera's image sensor in pixels + * @param cameraResHeight Height of your camera's image sensor in pixels + * @param minTargetArea Minimum area that that the target should be before it's recognized as a + * target by the camera. Match this with your contour filtering settings in the PhotonVision + * GUI. + */ + public SimVisionSystem( + String camName, + double camDiagFOVDegrees, + Transform3d robotToCamera, + double maxLEDRangeMeters, + int cameraResWidth, + int cameraResHeight, + double minTargetArea) { + this.robotToCamera = robotToCamera; + this.maxLEDRangeMeters = maxLEDRangeMeters; + this.cameraResWidth = cameraResWidth; + this.cameraResHeight = cameraResHeight; + this.minTargetArea = minTargetArea; + + // Calculate horizontal/vertical FOV by similar triangles + double hypotPixels = Math.hypot(cameraResWidth, cameraResHeight); + this.camHorizFOVDegrees = camDiagFOVDegrees * cameraResWidth / hypotPixels; + this.camVertFOVDegrees = camDiagFOVDegrees * cameraResHeight / hypotPixels; + + cam = new SimPhotonCamera(camName); + tgtList = new ArrayList<>(); + + dbgField = new Field2d(); + dbgRobot = dbgField.getRobotObject(); + dbgCamera = dbgField.getObject(camName + " Camera"); + SmartDashboard.putData(camName + " Sim Field", dbgField); + } + + /** + * Add a target on the field which your vision system is designed to detect. The PhotonCamera from + * this system will report the location of the robot relative to the subset of these targets which + * are visible from the given robot position. + * + * @param target Target to add to the simulated field + */ + public void addSimVisionTarget(SimVisionTarget target) { + tgtList.add(target); + dbgField + .getObject("Target " + Integer.toString(target.targetID)) + .setPose(target.targetPose.toPose2d()); + } + + /** + * Adds all apriltags from the provided {@link AprilTagFieldLayout} as sim vision targets. The + * poses added will preserve the tag layout's alliance origin at the time of calling this method. + * + * @param tagLayout The field tag layout to get Apriltag poses and IDs from + */ + public void addVisionTargets(AprilTagFieldLayout tagLayout) { + for (AprilTag tag : tagLayout.getTags()) { + addSimVisionTarget( + new SimVisionTarget( + tagLayout.getTagPose(tag.ID).get(), // preserve alliance rotation + Units.inchesToMeters(6), + Units.inchesToMeters(6), + tag.ID)); + } + } + + /** + * Clears all sim vision targets. This is useful for switching alliances and needing to repopulate + * the sim targets. NOTE: Old targets will still show on the Field2d unless overwritten by new + * targets with the same ID + */ + public void clearVisionTargets() { + tgtList.clear(); + } + + /** + * Adjust the camera position relative to the robot. Use this if your camera is on a gimbal or + * turret or some other mobile platform. + * + * @param newRobotToCamera New Transform from the robot to the camera + */ + public void moveCamera(Transform3d newRobotToCamera) { + this.robotToCamera = newRobotToCamera; + } + + /** + * Periodic update. Call this once per frame of image data you wish to process and send to + * NetworkTables + * + * @param robotPoseMeters current pose of the robot on the field. Will be used to calculate which + * targets are actually in view, where they are at relative to the robot, and relevant + * PhotonVision parameters. + */ + public void processFrame(Pose2d robotPoseMeters) { + processFrame(new Pose3d(robotPoseMeters)); + } + + /** + * Periodic update. Call this once per frame of image data you wish to process and send to + * NetworkTables + * + * @param robotPoseMeters current pose of the robot in space. Will be used to calculate which + * targets are actually in view, where they are at relative to the robot, and relevant + * PhotonVision parameters. + */ + public void processFrame(Pose3d robotPoseMeters) { + Pose3d cameraPose = robotPoseMeters.transformBy(robotToCamera); + + dbgRobot.setPose(robotPoseMeters.toPose2d()); + dbgCamera.setPose(cameraPose.toPose2d()); + + ArrayList visibleTgtList = new ArrayList<>(tgtList.size()); + + tgtList.forEach( + (tgt) -> { + var camToTargetTrans = new Transform3d(cameraPose, tgt.targetPose); + + // Generate a transformation from camera to target, + // ignoring rotation. + var t = camToTargetTrans.getTranslation(); + + // Rough approximation of the alternate solution, which is (so far) always incorrect. + var altTrans = + new Translation3d( + t.getX(), + -1.0 * t.getY(), + t.getZ()); // mirrored across camera axis in Y direction + var altRot = camToTargetTrans.getRotation().times(-1.0); // flipped + var camToTargetTransAlt = new Transform3d(altTrans, altRot); + + double distMeters = t.getNorm(); + + double area_px = tgt.tgtAreaMeters2 / getM2PerPx(distMeters); + + var translationAlongGround = + new Translation2d( + tgt.targetPose.toPose2d().getX() - cameraPose.toPose2d().getX(), + tgt.targetPose.toPose2d().getY() - cameraPose.toPose2d().getY()); + + var camAngle = cameraPose.getRotation().toRotation2d(); + var camToTgtRotation = + new Rotation2d(translationAlongGround.getX(), translationAlongGround.getY()); + double yawDegrees = camToTgtRotation.minus(camAngle).getDegrees(); + + double camHeightAboveGround = cameraPose.getZ(); + double tgtHeightAboveGround = tgt.targetPose.getZ(); + double camPitchDegrees = Units.radiansToDegrees(cameraPose.getRotation().getY()); + + double distAlongGround = translationAlongGround.getNorm(); + + double pitchDegrees = + Units.radiansToDegrees( + Math.atan2((tgtHeightAboveGround - camHeightAboveGround), distAlongGround)) + - camPitchDegrees; + + if (camCanSeeTarget(distMeters, yawDegrees, pitchDegrees, area_px)) { + // TODO simulate target corners + visibleTgtList.add( + new PhotonTrackedTarget( + yawDegrees, + pitchDegrees, + area_px, + 0.0, + tgt.targetID, + camToTargetTrans, + camToTargetTransAlt, + 0.0, // TODO - simulate ambiguity when straight on? + List.of( + new TargetCorner(0, 0), new TargetCorner(0, 0), + new TargetCorner(0, 0), new TargetCorner(0, 0)), + List.of( + new TargetCorner(0, 0), new TargetCorner(0, 0), + new TargetCorner(0, 0), new TargetCorner(0, 0)))); + } + }); + + cam.submitProcessedFrame(0.0, visibleTgtList); + } + + double getM2PerPx(double dist) { + double widthMPerPx = + 2 * dist * Math.tan(Units.degreesToRadians(this.camHorizFOVDegrees) / 2) / cameraResWidth; + double heightMPerPx = + 2 * dist * Math.tan(Units.degreesToRadians(this.camVertFOVDegrees) / 2) / cameraResHeight; + return widthMPerPx * heightMPerPx; + } + + boolean camCanSeeTarget(double distMeters, double yaw, double pitch, double area) { + boolean inRange = (distMeters < this.maxLEDRangeMeters); + boolean inHorizAngle = Math.abs(yaw) < (this.camHorizFOVDegrees / 2); + boolean inVertAngle = Math.abs(pitch) < (this.camVertFOVDegrees / 2); + boolean targetBigEnough = area > this.minTargetArea; + return (inRange && inHorizAngle && inVertAngle && targetBigEnough); } - } - - /** - * Clears all sim vision targets. This is useful for switching alliances and needing to repopulate - * the sim targets. NOTE: Old targets will still show on the Field2d unless overwritten by new - * targets with the same ID - */ - public void clearVisionTargets() { - tgtList.clear(); - } - - /** - * Adjust the camera position relative to the robot. Use this if your camera is on a gimbal or - * turret or some other mobile platform. - * - * @param newRobotToCamera New Transform from the robot to the camera - */ - public void moveCamera(Transform3d newRobotToCamera) { - this.robotToCamera = newRobotToCamera; - } - - /** - * Periodic update. Call this once per frame of image data you wish to process and send to - * NetworkTables - * - * @param robotPoseMeters current pose of the robot on the field. Will be used to calculate which - * targets are actually in view, where they are at relative to the robot, and relevant - * PhotonVision parameters. - */ - public void processFrame(Pose2d robotPoseMeters) { - processFrame(new Pose3d(robotPoseMeters)); - } - - /** - * Periodic update. Call this once per frame of image data you wish to process and send to - * NetworkTables - * - * @param robotPoseMeters current pose of the robot in space. Will be used to calculate which - * targets are actually in view, where they are at relative to the robot, and relevant - * PhotonVision parameters. - */ - public void processFrame(Pose3d robotPoseMeters) { - Pose3d cameraPose = robotPoseMeters.transformBy(robotToCamera); - - dbgRobot.setPose(robotPoseMeters.toPose2d()); - dbgCamera.setPose(cameraPose.toPose2d()); - - ArrayList visibleTgtList = new ArrayList<>(tgtList.size()); - - tgtList.forEach( - (tgt) -> { - var camToTargetTrans = new Transform3d(cameraPose, tgt.targetPose); - - // Generate a transformation from camera to target, - // ignoring rotation. - var t = camToTargetTrans.getTranslation(); - - // Rough approximation of the alternate solution, which is (so far) always incorrect. - var altTrans = - new Translation3d( - t.getX(), - -1.0 * t.getY(), - t.getZ()); // mirrored across camera axis in Y direction - var altRot = camToTargetTrans.getRotation().times(-1.0); // flipped - var camToTargetTransAlt = new Transform3d(altTrans, altRot); - - double distMeters = t.getNorm(); - - double area_px = tgt.tgtAreaMeters2 / getM2PerPx(distMeters); - - var translationAlongGround = - new Translation2d( - tgt.targetPose.toPose2d().getX() - cameraPose.toPose2d().getX(), - tgt.targetPose.toPose2d().getY() - cameraPose.toPose2d().getY()); - - var camAngle = cameraPose.getRotation().toRotation2d(); - var camToTgtRotation = - new Rotation2d(translationAlongGround.getX(), translationAlongGround.getY()); - double yawDegrees = camToTgtRotation.minus(camAngle).getDegrees(); - - double camHeightAboveGround = cameraPose.getZ(); - double tgtHeightAboveGround = tgt.targetPose.getZ(); - double camPitchDegrees = Units.radiansToDegrees(cameraPose.getRotation().getY()); - - double distAlongGround = translationAlongGround.getNorm(); - - double pitchDegrees = - Units.radiansToDegrees( - Math.atan2((tgtHeightAboveGround - camHeightAboveGround), distAlongGround)) - - camPitchDegrees; - - if (camCanSeeTarget(distMeters, yawDegrees, pitchDegrees, area_px)) { - // TODO simulate target corners - visibleTgtList.add( - new PhotonTrackedTarget( - yawDegrees, - pitchDegrees, - area_px, - 0.0, - tgt.targetID, - camToTargetTrans, - camToTargetTransAlt, - 0.0, // TODO - simulate ambiguity when straight on? - List.of( - new TargetCorner(0, 0), new TargetCorner(0, 0), - new TargetCorner(0, 0), new TargetCorner(0, 0)), - List.of( - new TargetCorner(0, 0), new TargetCorner(0, 0), - new TargetCorner(0, 0), new TargetCorner(0, 0)))); - } - }); - - cam.submitProcessedFrame(0.0, visibleTgtList); - } - - double getM2PerPx(double dist) { - double widthMPerPx = - 2 * dist * Math.tan(Units.degreesToRadians(this.camHorizFOVDegrees) / 2) / cameraResWidth; - double heightMPerPx = - 2 * dist * Math.tan(Units.degreesToRadians(this.camVertFOVDegrees) / 2) / cameraResHeight; - return widthMPerPx * heightMPerPx; - } - - boolean camCanSeeTarget(double distMeters, double yaw, double pitch, double area) { - boolean inRange = (distMeters < this.maxLEDRangeMeters); - boolean inHorizAngle = Math.abs(yaw) < (this.camHorizFOVDegrees / 2); - boolean inVertAngle = Math.abs(pitch) < (this.camVertFOVDegrees / 2); - boolean targetBigEnough = area > this.minTargetArea; - return (inRange && inHorizAngle && inVertAngle && targetBigEnough); - } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimVisionTarget.java b/photon-lib/src/main/java/org/photonvision/simulation/SimVisionTarget.java index cd56c95fef..0d66154d9f 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimVisionTarget.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimVisionTarget.java @@ -31,25 +31,25 @@ */ @Deprecated public class SimVisionTarget { - Pose3d targetPose; - double targetWidthMeters; - double targetHeightMeters; - double tgtAreaMeters2; - int targetID; + Pose3d targetPose; + double targetWidthMeters; + double targetHeightMeters; + double tgtAreaMeters2; + int targetID; - /** - * Describes a vision target located somewhere on the field that your SimVisionSystem can detect. - * - * @param targetPos Pose3d of the target in field-relative coordinates - * @param targetWidthMeters Width of the outer bounding box of the target in meters. - * @param targetHeightMeters Pair Height of the outer bounding box of the target in meters. - */ - public SimVisionTarget( - Pose3d targetPos, double targetWidthMeters, double targetHeightMeters, int targetID) { - this.targetPose = targetPos; - this.targetWidthMeters = targetWidthMeters; - this.targetHeightMeters = targetHeightMeters; - this.tgtAreaMeters2 = targetWidthMeters * targetHeightMeters; - this.targetID = targetID; - } + /** + * Describes a vision target located somewhere on the field that your SimVisionSystem can detect. + * + * @param targetPos Pose3d of the target in field-relative coordinates + * @param targetWidthMeters Width of the outer bounding box of the target in meters. + * @param targetHeightMeters Pair Height of the outer bounding box of the target in meters. + */ + public SimVisionTarget( + Pose3d targetPos, double targetWidthMeters, double targetHeightMeters, int targetID) { + this.targetPose = targetPos; + this.targetWidthMeters = targetWidthMeters; + this.targetHeightMeters = targetHeightMeters; + this.tgtAreaMeters2 = targetWidthMeters * targetHeightMeters; + this.targetID = targetID; + } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java index ed44e864e3..7a32e413a3 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java @@ -52,551 +52,551 @@ import org.photonvision.estimation.RotTrlTransform3d; public class VideoSimUtil { - public static final String kLocalTagImagesPath = "./src/main/resources/images/apriltags/"; - public static final String kResourceTagImagesPath = "/images/apriltags/"; - public static final String kTag16h5ImageName = "tag16_05_00000"; - public static final int kNumTags16h5 = 30; - - // All 16h5 tag images - private static final Map kTag16h5Images = new HashMap<>(); - // Points corresponding to marker(black square) corners of 8x8 16h5 tag images - public static final Point[] kTag16h5MarkerPts; - - // field dimensions for wireframe - private static double fieldLength = 16.54175; - private static double fieldWidth = 8.0137; - - static { - try { - var loader = - new RuntimeLoader<>( - Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); - loader.loadLibrary(); - } catch (Exception e) { - throw new RuntimeException("Failed to load native libraries!", e); + public static final String kLocalTagImagesPath = "./src/main/resources/images/apriltags/"; + public static final String kResourceTagImagesPath = "/images/apriltags/"; + public static final String kTag16h5ImageName = "tag16_05_00000"; + public static final int kNumTags16h5 = 30; + + // All 16h5 tag images + private static final Map kTag16h5Images = new HashMap<>(); + // Points corresponding to marker(black square) corners of 8x8 16h5 tag images + public static final Point[] kTag16h5MarkerPts; + + // field dimensions for wireframe + private static double fieldLength = 16.54175; + private static double fieldWidth = 8.0137; + + static { + try { + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + } catch (Exception e) { + throw new RuntimeException("Failed to load native libraries!", e); + } + + // create Mats of 8x8 apriltag images + for (int i = 0; i < VideoSimUtil.kNumTags16h5; i++) { + Mat tagImage = VideoSimUtil.get16h5TagImage(i); + kTag16h5Images.put(i, tagImage); + } + + kTag16h5MarkerPts = get16h5MarkerPts(); } - // create Mats of 8x8 apriltag images - for (int i = 0; i < VideoSimUtil.kNumTags16h5; i++) { - Mat tagImage = VideoSimUtil.get16h5TagImage(i); - kTag16h5Images.put(i, tagImage); + /** Updates the properties of this CvSource video stream with the given camera properties. */ + public static void updateVideoProp(CvSource video, SimCameraProperties prop) { + video.setResolution(prop.getResWidth(), prop.getResHeight()); + video.setFPS((int) prop.getFPS()); } - kTag16h5MarkerPts = get16h5MarkerPts(); - } - - /** Updates the properties of this CvSource video stream with the given camera properties. */ - public static void updateVideoProp(CvSource video, SimCameraProperties prop) { - video.setResolution(prop.getResWidth(), prop.getResHeight()); - video.setFPS((int) prop.getFPS()); - } - - /** - * Gets the points representing the corners of this image. Because image pixels are accessed - * through a Mat, the point (0,0) actually represents the center of the top-left pixel and not the - * actual top-left corner. - * - * @param size Size of image - */ - public static Point[] getImageCorners(Size size) { - return new Point[] { - new Point(-0.5, -0.5), - new Point(size.width - 0.5, -0.5), - new Point(size.width - 0.5, size.height - 0.5), - new Point(-0.5, size.height - 0.5) - }; - } - - /** - * Gets the 8x8 (grayscale) image of a specific 16h5 AprilTag. - * - * @param id The fiducial id of the desired tag - */ - public static Mat get16h5TagImage(int id) { - String name = kTag16h5ImageName; - String idString = String.valueOf(id); - name = name.substring(0, name.length() - idString.length()) + idString; - - var resource = VideoSimUtil.class.getResource(kResourceTagImagesPath + name + ".png"); - - Mat result = new Mat(); - // reading jar file - if (resource != null && resource.getPath().startsWith("file")) { - BufferedImage buf; - try { - buf = ImageIO.read(resource); - } catch (IOException e) { - System.err.println("Couldn't read tag image!"); + /** + * Gets the points representing the corners of this image. Because image pixels are accessed + * through a Mat, the point (0,0) actually represents the center of the top-left pixel and not the + * actual top-left corner. + * + * @param size Size of image + */ + public static Point[] getImageCorners(Size size) { + return new Point[] { + new Point(-0.5, -0.5), + new Point(size.width - 0.5, -0.5), + new Point(size.width - 0.5, size.height - 0.5), + new Point(-0.5, size.height - 0.5) + }; + } + + /** + * Gets the 8x8 (grayscale) image of a specific 16h5 AprilTag. + * + * @param id The fiducial id of the desired tag + */ + public static Mat get16h5TagImage(int id) { + String name = kTag16h5ImageName; + String idString = String.valueOf(id); + name = name.substring(0, name.length() - idString.length()) + idString; + + var resource = VideoSimUtil.class.getResource(kResourceTagImagesPath + name + ".png"); + + Mat result = new Mat(); + // reading jar file + if (resource != null && resource.getPath().startsWith("file")) { + BufferedImage buf; + try { + buf = ImageIO.read(resource); + } catch (IOException e) { + System.err.println("Couldn't read tag image!"); + return result; + } + + result = new Mat(buf.getHeight(), buf.getWidth(), CvType.CV_8UC1); + + byte[] px = new byte[1]; + for (int y = 0; y < result.height(); y++) { + for (int x = 0; x < result.width(); x++) { + px[0] = (byte) (buf.getRGB(x, y) & 0xFF); + result.put(y, x, px); + } + } + } + // local IDE tests + else result = Imgcodecs.imread(kLocalTagImagesPath + name + ".png", Imgcodecs.IMREAD_GRAYSCALE); return result; - } + } - result = new Mat(buf.getHeight(), buf.getWidth(), CvType.CV_8UC1); + /** Gets the points representing the marker(black square) corners. */ + public static Point[] get16h5MarkerPts() { + return get16h5MarkerPts(1); + } - byte[] px = new byte[1]; - for (int y = 0; y < result.height(); y++) { - for (int x = 0; x < result.width(); x++) { - px[0] = (byte) (buf.getRGB(x, y) & 0xFF); - result.put(y, x, px); + /** + * Gets the points representing the marker(black square) corners. + * + * @param scale The scale of the tag image (8*scale x 8*scale image) + */ + public static Point[] get16h5MarkerPts(int scale) { + var roi16h5 = new Rect(new Point(1, 1), new Size(6, 6)); + roi16h5.x *= scale; + roi16h5.y *= scale; + roi16h5.width *= scale; + roi16h5.height *= scale; + var pts = getImageCorners(roi16h5.size()); + for (int i = 0; i < pts.length; i++) { + var pt = pts[i]; + pts[i] = new Point(roi16h5.tl().x + pt.x, roi16h5.tl().y + pt.y); + } + return pts; + } + + /** + * Warps the image of a specific 16h5 AprilTag onto the destination image at the given points. + * + * @param tagId The id of the specific tag to warp onto the destination image + * @param dstPoints Points(4) in destination image where the tag marker(black square) corners + * should be warped onto. + * @param antialiasing If antialiasing should be performed by automatically + * supersampling/interpolating the warped image. This should be used if better stream quality + * is desired or target detection is being done on the stream, but can hurt performance. + * @param destination The destination image to place the warped tag image onto. + */ + public static void warp16h5TagImage( + int tagId, Point[] dstPoints, boolean antialiasing, Mat destination) { + Mat tagImage = kTag16h5Images.get(tagId); + if (tagImage == null || tagImage.empty()) return; + var tagPoints = new MatOfPoint2f(kTag16h5MarkerPts); + // points of tag image corners + var tagImageCorners = new MatOfPoint2f(getImageCorners(tagImage.size())); + var dstPointMat = new MatOfPoint2f(dstPoints); + // the rectangle describing the rectangle-of-interest(ROI) + var boundingRect = Imgproc.boundingRect(dstPointMat); + // find the perspective transform from the tag image to the warped destination points + Mat perspecTrf = Imgproc.getPerspectiveTransform(tagPoints, dstPointMat); + // check extreme image corners after transform to check if we need to expand bounding rect + var extremeCorners = new MatOfPoint2f(); + Core.perspectiveTransform(tagImageCorners, extremeCorners, perspecTrf); + // dilate ROI to fit full tag + boundingRect = Imgproc.boundingRect(extremeCorners); + + // adjust interpolation strategy based on size of warped tag compared to tag image + var warpedContourArea = Imgproc.contourArea(extremeCorners); + double warpedTagUpscale = Math.sqrt(warpedContourArea) / Math.sqrt(tagImage.size().area()); + int warpStrategy = Imgproc.INTER_NEAREST; + // automatically determine the best supersampling of warped image and scale of tag image + /* + (warpPerspective does not properly resample, so this is used to avoid aliasing in the + warped image. Supersampling is used when the warped tag is small, but is very slow + when the warped tag is large-- scaling the tag image up and using linear interpolation + instead can be performant while still effectively antialiasing. Some combination of these + two can be used in between those extremes.) + + TODO: Simplify magic numbers to one or two variables, or use a more proper approach? + */ + int supersampling = 6; + supersampling = (int) Math.ceil(supersampling / warpedTagUpscale); + supersampling = Math.max(Math.min(supersampling, 8), 1); + + Mat scaledTagImage = new Mat(); + if (warpedTagUpscale > 2.0) { + warpStrategy = Imgproc.INTER_LINEAR; + int scaleFactor = (int) (warpedTagUpscale / 3.0) + 2; + scaleFactor = Math.max(Math.min(scaleFactor, 40), 1); + scaleFactor *= supersampling; + Imgproc.resize( + tagImage, scaledTagImage, new Size(), scaleFactor, scaleFactor, Imgproc.INTER_NEAREST); + tagPoints.fromArray(get16h5MarkerPts(scaleFactor)); + } else tagImage.assignTo(scaledTagImage); + + // constrain the bounding rect inside of the destination image + boundingRect.x -= 1; + boundingRect.y -= 1; + boundingRect.width += 2; + boundingRect.height += 2; + if (boundingRect.x < 0) { + boundingRect.width += boundingRect.x; + boundingRect.x = 0; + } + if (boundingRect.y < 0) { + boundingRect.height += boundingRect.y; + boundingRect.y = 0; + } + boundingRect.width = Math.min(destination.width() - boundingRect.x, boundingRect.width); + boundingRect.height = Math.min(destination.height() - boundingRect.y, boundingRect.height); + if (boundingRect.width <= 0 || boundingRect.height <= 0) return; + + // upscale if supersampling + Mat scaledDstPts = new Mat(); + if (supersampling > 1) { + Core.multiply(dstPointMat, new Scalar(supersampling, supersampling), scaledDstPts); + boundingRect.x *= supersampling; + boundingRect.y *= supersampling; + boundingRect.width *= supersampling; + boundingRect.height *= supersampling; + } else dstPointMat.assignTo(scaledDstPts); + + // update transform relative to expanded, scaled bounding rect + Core.subtract(scaledDstPts, new Scalar(boundingRect.tl().x, boundingRect.tl().y), scaledDstPts); + perspecTrf = Imgproc.getPerspectiveTransform(tagPoints, scaledDstPts); + + // warp (scaled) tag image onto (scaled) ROI image representing the portion of + // the destination image encapsulated by boundingRect + Mat tempROI = new Mat(); + Imgproc.warpPerspective(scaledTagImage, tempROI, perspecTrf, boundingRect.size(), warpStrategy); + + // downscale ROI with interpolation if supersampling + if (supersampling > 1) { + boundingRect.x /= supersampling; + boundingRect.y /= supersampling; + boundingRect.width /= supersampling; + boundingRect.height /= supersampling; + Imgproc.resize(tempROI, tempROI, boundingRect.size(), 0, 0, Imgproc.INTER_AREA); } - } + + // we want to copy ONLY the transformed tag to the result image, not the entire bounding rect + // using a mask only copies the source pixels which have an associated non-zero value in the + // mask + Mat tempMask = Mat.zeros(tempROI.size(), CvType.CV_8UC1); + Core.subtract( + extremeCorners, new Scalar(boundingRect.tl().x, boundingRect.tl().y), extremeCorners); + Point tempCenter = new Point(); + tempCenter.x = + Arrays.stream(extremeCorners.toArray()).mapToDouble(p -> p.x).average().getAsDouble(); + tempCenter.y = + Arrays.stream(extremeCorners.toArray()).mapToDouble(p -> p.y).average().getAsDouble(); + // dilate tag corners + Arrays.stream(extremeCorners.toArray()) + .forEach( + p -> { + double xdiff = p.x - tempCenter.x; + double ydiff = p.y - tempCenter.y; + xdiff += 1 * Math.signum(xdiff); + ydiff += 1 * Math.signum(ydiff); + new Point(tempCenter.x + xdiff, tempCenter.y + ydiff); + }); + // (make inside of tag completely white in mask) + Imgproc.fillConvexPoly(tempMask, new MatOfPoint(extremeCorners.toArray()), new Scalar(255)); + + // copy transformed tag onto result image + tempROI.copyTo(destination.submat(boundingRect), tempMask); } - // local IDE tests - else result = Imgcodecs.imread(kLocalTagImagesPath + name + ".png", Imgcodecs.IMREAD_GRAYSCALE); - return result; - } - - /** Gets the points representing the marker(black square) corners. */ - public static Point[] get16h5MarkerPts() { - return get16h5MarkerPts(1); - } - - /** - * Gets the points representing the marker(black square) corners. - * - * @param scale The scale of the tag image (8*scale x 8*scale image) - */ - public static Point[] get16h5MarkerPts(int scale) { - var roi16h5 = new Rect(new Point(1, 1), new Size(6, 6)); - roi16h5.x *= scale; - roi16h5.y *= scale; - roi16h5.width *= scale; - roi16h5.height *= scale; - var pts = getImageCorners(roi16h5.size()); - for (int i = 0; i < pts.length; i++) { - var pt = pts[i]; - pts[i] = new Point(roi16h5.tl().x + pt.x, roi16h5.tl().y + pt.y); + + /** + * Given a line thickness in a 640x480 image, try to scale to the given destination image + * resolution. + * + * @param thickness480p A hypothetical line thickness in a 640x480 image + * @param destinationImg The destination image to scale to + * @return Scaled thickness which cannot be less than 1 + */ + public static double getScaledThickness(double thickness480p, Mat destinationImg) { + double scaleX = destinationImg.width() / 640.0; + double scaleY = destinationImg.height() / 480.0; + double minScale = Math.min(scaleX, scaleY); + return Math.max(thickness480p * minScale, 1.0); } - return pts; - } - - /** - * Warps the image of a specific 16h5 AprilTag onto the destination image at the given points. - * - * @param tagId The id of the specific tag to warp onto the destination image - * @param dstPoints Points(4) in destination image where the tag marker(black square) corners - * should be warped onto. - * @param antialiasing If antialiasing should be performed by automatically - * supersampling/interpolating the warped image. This should be used if better stream quality - * is desired or target detection is being done on the stream, but can hurt performance. - * @param destination The destination image to place the warped tag image onto. - */ - public static void warp16h5TagImage( - int tagId, Point[] dstPoints, boolean antialiasing, Mat destination) { - Mat tagImage = kTag16h5Images.get(tagId); - if (tagImage == null || tagImage.empty()) return; - var tagPoints = new MatOfPoint2f(kTag16h5MarkerPts); - // points of tag image corners - var tagImageCorners = new MatOfPoint2f(getImageCorners(tagImage.size())); - var dstPointMat = new MatOfPoint2f(dstPoints); - // the rectangle describing the rectangle-of-interest(ROI) - var boundingRect = Imgproc.boundingRect(dstPointMat); - // find the perspective transform from the tag image to the warped destination points - Mat perspecTrf = Imgproc.getPerspectiveTransform(tagPoints, dstPointMat); - // check extreme image corners after transform to check if we need to expand bounding rect - var extremeCorners = new MatOfPoint2f(); - Core.perspectiveTransform(tagImageCorners, extremeCorners, perspecTrf); - // dilate ROI to fit full tag - boundingRect = Imgproc.boundingRect(extremeCorners); - - // adjust interpolation strategy based on size of warped tag compared to tag image - var warpedContourArea = Imgproc.contourArea(extremeCorners); - double warpedTagUpscale = Math.sqrt(warpedContourArea) / Math.sqrt(tagImage.size().area()); - int warpStrategy = Imgproc.INTER_NEAREST; - // automatically determine the best supersampling of warped image and scale of tag image - /* - (warpPerspective does not properly resample, so this is used to avoid aliasing in the - warped image. Supersampling is used when the warped tag is small, but is very slow - when the warped tag is large-- scaling the tag image up and using linear interpolation - instead can be performant while still effectively antialiasing. Some combination of these - two can be used in between those extremes.) - - TODO: Simplify magic numbers to one or two variables, or use a more proper approach? - */ - int supersampling = 6; - supersampling = (int) Math.ceil(supersampling / warpedTagUpscale); - supersampling = Math.max(Math.min(supersampling, 8), 1); - - Mat scaledTagImage = new Mat(); - if (warpedTagUpscale > 2.0) { - warpStrategy = Imgproc.INTER_LINEAR; - int scaleFactor = (int) (warpedTagUpscale / 3.0) + 2; - scaleFactor = Math.max(Math.min(scaleFactor, 40), 1); - scaleFactor *= supersampling; - Imgproc.resize( - tagImage, scaledTagImage, new Size(), scaleFactor, scaleFactor, Imgproc.INTER_NEAREST); - tagPoints.fromArray(get16h5MarkerPts(scaleFactor)); - } else tagImage.assignTo(scaledTagImage); - - // constrain the bounding rect inside of the destination image - boundingRect.x -= 1; - boundingRect.y -= 1; - boundingRect.width += 2; - boundingRect.height += 2; - if (boundingRect.x < 0) { - boundingRect.width += boundingRect.x; - boundingRect.x = 0; + + /** + * Draw a filled ellipse in the destination image. + * + * @param dstPoints The points in the destination image representing the rectangle in which the + * ellipse is inscribed. + * @param color The color of the ellipse. This is a scalar with BGR values (0-255) + * @param destination The destination image to draw onto. The image should be in the BGR color + * space. + */ + public static void drawInscribedEllipse(Point[] dstPoints, Scalar color, Mat destination) { + // create RotatedRect from points + var rect = OpenCVHelp.getMinAreaRect(dstPoints); + // inscribe ellipse inside rectangle + Imgproc.ellipse(destination, rect, color, -1, Imgproc.LINE_AA); } - if (boundingRect.y < 0) { - boundingRect.height += boundingRect.y; - boundingRect.y = 0; + + /** + * Draw a polygon outline or filled polygon to the destination image with the given points. + * + * @param dstPoints The points in the destination image representing the polygon. + * @param thickness The thickness of the outline in pixels. If this is not positive, a filled + * polygon is drawn instead. + * @param color The color drawn. This should match the color space of the destination image. + * @param isClosed If the last point should connect to the first point in the polygon outline. + * @param destination The destination image to draw onto. + */ + public static void drawPoly( + Point[] dstPoints, int thickness, Scalar color, boolean isClosed, Mat destination) { + var dstPointsd = new MatOfPoint(dstPoints); + if (thickness > 0) { + Imgproc.polylines( + destination, List.of(dstPointsd), isClosed, color, thickness, Imgproc.LINE_AA); + } else { + Imgproc.fillPoly(destination, List.of(dstPointsd), color, Imgproc.LINE_AA); + } } - boundingRect.width = Math.min(destination.width() - boundingRect.x, boundingRect.width); - boundingRect.height = Math.min(destination.height() - boundingRect.y, boundingRect.height); - if (boundingRect.width <= 0 || boundingRect.height <= 0) return; - - // upscale if supersampling - Mat scaledDstPts = new Mat(); - if (supersampling > 1) { - Core.multiply(dstPointMat, new Scalar(supersampling, supersampling), scaledDstPts); - boundingRect.x *= supersampling; - boundingRect.y *= supersampling; - boundingRect.width *= supersampling; - boundingRect.height *= supersampling; - } else dstPointMat.assignTo(scaledDstPts); - - // update transform relative to expanded, scaled bounding rect - Core.subtract(scaledDstPts, new Scalar(boundingRect.tl().x, boundingRect.tl().y), scaledDstPts); - perspecTrf = Imgproc.getPerspectiveTransform(tagPoints, scaledDstPts); - - // warp (scaled) tag image onto (scaled) ROI image representing the portion of - // the destination image encapsulated by boundingRect - Mat tempROI = new Mat(); - Imgproc.warpPerspective(scaledTagImage, tempROI, perspecTrf, boundingRect.size(), warpStrategy); - - // downscale ROI with interpolation if supersampling - if (supersampling > 1) { - boundingRect.x /= supersampling; - boundingRect.y /= supersampling; - boundingRect.width /= supersampling; - boundingRect.height /= supersampling; - Imgproc.resize(tempROI, tempROI, boundingRect.size(), 0, 0, Imgproc.INTER_AREA); + + /** + * Draws a contour around the given points and text of the id onto the destination image. + * + * @param id Fiducial ID number to draw + * @param dstPoints Points representing the four corners of the tag marker(black square) in the + * destination image. + * @param destination The destination image to draw onto. The image should be in the BGR color + * space. + */ + public static void drawTagDetection(int id, Point[] dstPoints, Mat destination) { + double thickness = getScaledThickness(1, destination); + drawPoly(dstPoints, (int) thickness, new Scalar(0, 0, 255), true, destination); + var rect = Imgproc.boundingRect(new MatOfPoint(dstPoints)); + var textPt = new Point(rect.x + rect.width, rect.y); + textPt.x += thickness; + textPt.y += thickness; + Imgproc.putText( + destination, + String.valueOf(id), + textPt, + Imgproc.FONT_HERSHEY_PLAIN, + 1.5 * thickness, + new Scalar(0, 200, 0), + (int) thickness, + Imgproc.LINE_AA); } - // we want to copy ONLY the transformed tag to the result image, not the entire bounding rect - // using a mask only copies the source pixels which have an associated non-zero value in the - // mask - Mat tempMask = Mat.zeros(tempROI.size(), CvType.CV_8UC1); - Core.subtract( - extremeCorners, new Scalar(boundingRect.tl().x, boundingRect.tl().y), extremeCorners); - Point tempCenter = new Point(); - tempCenter.x = - Arrays.stream(extremeCorners.toArray()).mapToDouble(p -> p.x).average().getAsDouble(); - tempCenter.y = - Arrays.stream(extremeCorners.toArray()).mapToDouble(p -> p.y).average().getAsDouble(); - // dilate tag corners - Arrays.stream(extremeCorners.toArray()) - .forEach( - p -> { - double xdiff = p.x - tempCenter.x; - double ydiff = p.y - tempCenter.y; - xdiff += 1 * Math.signum(xdiff); - ydiff += 1 * Math.signum(ydiff); - new Point(tempCenter.x + xdiff, tempCenter.y + ydiff); - }); - // (make inside of tag completely white in mask) - Imgproc.fillConvexPoly(tempMask, new MatOfPoint(extremeCorners.toArray()), new Scalar(255)); - - // copy transformed tag onto result image - tempROI.copyTo(destination.submat(boundingRect), tempMask); - } - - /** - * Given a line thickness in a 640x480 image, try to scale to the given destination image - * resolution. - * - * @param thickness480p A hypothetical line thickness in a 640x480 image - * @param destinationImg The destination image to scale to - * @return Scaled thickness which cannot be less than 1 - */ - public static double getScaledThickness(double thickness480p, Mat destinationImg) { - double scaleX = destinationImg.width() / 640.0; - double scaleY = destinationImg.height() / 480.0; - double minScale = Math.min(scaleX, scaleY); - return Math.max(thickness480p * minScale, 1.0); - } - - /** - * Draw a filled ellipse in the destination image. - * - * @param dstPoints The points in the destination image representing the rectangle in which the - * ellipse is inscribed. - * @param color The color of the ellipse. This is a scalar with BGR values (0-255) - * @param destination The destination image to draw onto. The image should be in the BGR color - * space. - */ - public static void drawInscribedEllipse(Point[] dstPoints, Scalar color, Mat destination) { - // create RotatedRect from points - var rect = OpenCVHelp.getMinAreaRect(dstPoints); - // inscribe ellipse inside rectangle - Imgproc.ellipse(destination, rect, color, -1, Imgproc.LINE_AA); - } - - /** - * Draw a polygon outline or filled polygon to the destination image with the given points. - * - * @param dstPoints The points in the destination image representing the polygon. - * @param thickness The thickness of the outline in pixels. If this is not positive, a filled - * polygon is drawn instead. - * @param color The color drawn. This should match the color space of the destination image. - * @param isClosed If the last point should connect to the first point in the polygon outline. - * @param destination The destination image to draw onto. - */ - public static void drawPoly( - Point[] dstPoints, int thickness, Scalar color, boolean isClosed, Mat destination) { - var dstPointsd = new MatOfPoint(dstPoints); - if (thickness > 0) { - Imgproc.polylines( - destination, List.of(dstPointsd), isClosed, color, thickness, Imgproc.LINE_AA); - } else { - Imgproc.fillPoly(destination, List.of(dstPointsd), color, Imgproc.LINE_AA); + /** + * Set the field dimensions that are used for drawing the field wireframe. + * + * @param fieldLengthMeters + * @param fieldWidthMeters + */ + public static void setFieldDimensionsMeters(double fieldLengthMeters, double fieldWidthMeters) { + fieldLength = fieldLengthMeters; + fieldWidth = fieldWidthMeters; } - } - - /** - * Draws a contour around the given points and text of the id onto the destination image. - * - * @param id Fiducial ID number to draw - * @param dstPoints Points representing the four corners of the tag marker(black square) in the - * destination image. - * @param destination The destination image to draw onto. The image should be in the BGR color - * space. - */ - public static void drawTagDetection(int id, Point[] dstPoints, Mat destination) { - double thickness = getScaledThickness(1, destination); - drawPoly(dstPoints, (int) thickness, new Scalar(0, 0, 255), true, destination); - var rect = Imgproc.boundingRect(new MatOfPoint(dstPoints)); - var textPt = new Point(rect.x + rect.width, rect.y); - textPt.x += thickness; - textPt.y += thickness; - Imgproc.putText( - destination, - String.valueOf(id), - textPt, - Imgproc.FONT_HERSHEY_PLAIN, - 1.5 * thickness, - new Scalar(0, 200, 0), - (int) thickness, - Imgproc.LINE_AA); - } - - /** - * Set the field dimensions that are used for drawing the field wireframe. - * - * @param fieldLengthMeters - * @param fieldWidthMeters - */ - public static void setFieldDimensionsMeters(double fieldLengthMeters, double fieldWidthMeters) { - fieldLength = fieldLengthMeters; - fieldWidth = fieldWidthMeters; - } - - /** - * The translations used to draw the field side walls and driver station walls. It is a List of - * Lists because the translations are not all connected. - */ - private static List> getFieldWallLines() { - var list = new ArrayList>(); - - final double sideHt = Units.inchesToMeters(19.5); - final double driveHt = Units.inchesToMeters(35); - final double topHt = Units.inchesToMeters(78); - - // field floor - list.add( - List.of( - new Translation3d(0, 0, 0), - new Translation3d(fieldLength, 0, 0), - new Translation3d(fieldLength, fieldWidth, 0), - new Translation3d(0, fieldWidth, 0), - new Translation3d(0, 0, 0))); - // right side wall - list.add( - List.of( - new Translation3d(0, 0, 0), - new Translation3d(0, 0, sideHt), - new Translation3d(fieldLength, 0, sideHt), - new Translation3d(fieldLength, 0, 0))); - // red driverstation - list.add( - List.of( - new Translation3d(fieldLength, 0, sideHt), - new Translation3d(fieldLength, 0, topHt), - new Translation3d(fieldLength, fieldWidth, topHt), - new Translation3d(fieldLength, fieldWidth, sideHt))); - list.add( - List.of( - new Translation3d(fieldLength, 0, driveHt), - new Translation3d(fieldLength, fieldWidth, driveHt))); - // left side wall - list.add( - List.of( - new Translation3d(0, fieldWidth, 0), - new Translation3d(0, fieldWidth, sideHt), - new Translation3d(fieldLength, fieldWidth, sideHt), - new Translation3d(fieldLength, fieldWidth, 0))); - // blue driverstation - list.add( - List.of( - new Translation3d(0, 0, sideHt), - new Translation3d(0, 0, topHt), - new Translation3d(0, fieldWidth, topHt), - new Translation3d(0, fieldWidth, sideHt))); - list.add(List.of(new Translation3d(0, 0, driveHt), new Translation3d(0, fieldWidth, driveHt))); - - return list; - } - - /** - * The translations used to draw the field floor subdivisions (not the floor outline). It is a - * List of Lists because the translations are not all connected. - * - * @param subdivisions How many "subdivisions" along the width/length of the floor. E.g. 3 - * subdivisions would mean 2 lines along the length and 2 lines along the width creating a 3x3 - * "grid". - */ - private static List> getFieldFloorLines(int subdivisions) { - var list = new ArrayList>(); - final double subLength = fieldLength / subdivisions; - final double subWidth = fieldWidth / subdivisions; - - // field floor subdivisions - for (int i = 0; i < subdivisions; i++) { - list.add( - List.of( - new Translation3d(0, subWidth * (i + 1), 0), - new Translation3d(fieldLength, subWidth * (i + 1), 0))); - list.add( - List.of( - new Translation3d(subLength * (i + 1), 0, 0), - new Translation3d(subLength * (i + 1), fieldWidth, 0))); + + /** + * The translations used to draw the field side walls and driver station walls. It is a List of + * Lists because the translations are not all connected. + */ + private static List> getFieldWallLines() { + var list = new ArrayList>(); + + final double sideHt = Units.inchesToMeters(19.5); + final double driveHt = Units.inchesToMeters(35); + final double topHt = Units.inchesToMeters(78); + + // field floor + list.add( + List.of( + new Translation3d(0, 0, 0), + new Translation3d(fieldLength, 0, 0), + new Translation3d(fieldLength, fieldWidth, 0), + new Translation3d(0, fieldWidth, 0), + new Translation3d(0, 0, 0))); + // right side wall + list.add( + List.of( + new Translation3d(0, 0, 0), + new Translation3d(0, 0, sideHt), + new Translation3d(fieldLength, 0, sideHt), + new Translation3d(fieldLength, 0, 0))); + // red driverstation + list.add( + List.of( + new Translation3d(fieldLength, 0, sideHt), + new Translation3d(fieldLength, 0, topHt), + new Translation3d(fieldLength, fieldWidth, topHt), + new Translation3d(fieldLength, fieldWidth, sideHt))); + list.add( + List.of( + new Translation3d(fieldLength, 0, driveHt), + new Translation3d(fieldLength, fieldWidth, driveHt))); + // left side wall + list.add( + List.of( + new Translation3d(0, fieldWidth, 0), + new Translation3d(0, fieldWidth, sideHt), + new Translation3d(fieldLength, fieldWidth, sideHt), + new Translation3d(fieldLength, fieldWidth, 0))); + // blue driverstation + list.add( + List.of( + new Translation3d(0, 0, sideHt), + new Translation3d(0, 0, topHt), + new Translation3d(0, fieldWidth, topHt), + new Translation3d(0, fieldWidth, sideHt))); + list.add(List.of(new Translation3d(0, 0, driveHt), new Translation3d(0, fieldWidth, driveHt))); + + return list; } - return list; - } - - /** - * Convert 3D lines represented by the given series of translations into a polygon(s) in the - * camera's image. - * - * @param camRt The change in basis from world coordinates to camera coordinates. See {@link - * RotTrlTransform3d#makeRelativeTo(Pose3d)}. - * @param prop The simulated camera's properties. - * @param trls A sequential series of translations defining the polygon to be drawn. - * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in - * pixels. Line segments will be subdivided if they exceed this resolution. - * @param isClosed If the final translation should also draw a line to the first translation. - * @param destination The destination image that is being drawn to. - * @return A list of polygons(which are an array of points) - */ - public static List polyFrom3dLines( - RotTrlTransform3d camRt, - SimCameraProperties prop, - List trls, - double resolution, - boolean isClosed, - Mat destination) { - resolution = Math.hypot(destination.size().height, destination.size().width) * resolution; - List pts = new ArrayList<>(trls); - if (isClosed) pts.add(pts.get(0)); - List polyPointList = new ArrayList<>(); - - for (int i = 0; i < pts.size() - 1; i++) { - var pta = pts.get(i); - var ptb = pts.get(i + 1); - - // check if line is inside camera fulcrum - var inter = prop.getVisibleLine(camRt, pta, ptb); - if (inter.getSecond() == null) continue; - - // cull line to the inside of the camera fulcrum - double inter1 = inter.getFirst().doubleValue(); - double inter2 = inter.getSecond().doubleValue(); - var baseDelta = ptb.minus(pta); - var old_pta = pta; - if (inter1 > 0) pta = old_pta.plus(baseDelta.times(inter1)); - if (inter2 < 1) ptb = old_pta.plus(baseDelta.times(inter2)); - baseDelta = ptb.minus(pta); - - // project points into 2d - var poly = new ArrayList(); - poly.addAll( - Arrays.asList( - OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, List.of(pta, ptb)))); - var pxa = poly.get(0); - var pxb = poly.get(1); - - // subdivide projected line based on desired resolution - double pxDist = Math.hypot(pxb.x - pxa.x, pxb.y - pxa.y); - int subdivisions = (int) (pxDist / resolution); - var subDelta = baseDelta.div(subdivisions + 1); - var subPts = new ArrayList(); - for (int j = 0; j < subdivisions; j++) { - subPts.add(pta.plus(subDelta.times(j + 1))); - } - if (subPts.size() > 0) { - poly.addAll( - 1, - Arrays.asList( - OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, subPts))); - } - - polyPointList.add(poly.toArray(Point[]::new)); + /** + * The translations used to draw the field floor subdivisions (not the floor outline). It is a + * List of Lists because the translations are not all connected. + * + * @param subdivisions How many "subdivisions" along the width/length of the floor. E.g. 3 + * subdivisions would mean 2 lines along the length and 2 lines along the width creating a 3x3 + * "grid". + */ + private static List> getFieldFloorLines(int subdivisions) { + var list = new ArrayList>(); + final double subLength = fieldLength / subdivisions; + final double subWidth = fieldWidth / subdivisions; + + // field floor subdivisions + for (int i = 0; i < subdivisions; i++) { + list.add( + List.of( + new Translation3d(0, subWidth * (i + 1), 0), + new Translation3d(fieldLength, subWidth * (i + 1), 0))); + list.add( + List.of( + new Translation3d(subLength * (i + 1), 0, 0), + new Translation3d(subLength * (i + 1), fieldWidth, 0))); + } + + return list; } - return polyPointList; - } - - /** - * Draw a wireframe of the field to the given image. - * - * @param camRt The change in basis from world coordinates to camera coordinates. See {@link - * RotTrlTransform3d#makeRelativeTo(Pose3d)}. - * @param prop The simulated camera's properties. - * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in - * pixels. Line segments will be subdivided if they exceed this resolution. - * @param wallThickness Thickness of the lines used for drawing the field walls in pixels. This is - * scaled by {@link #getScaledThickness(double, Mat)}. - * @param wallColor Color of the lines used for drawing the field walls. - * @param floorSubdivisions A NxN "grid" is created from the floor where this parameter is N, - * which defines the floor lines. - * @param floorThickness Thickness of the lines used for drawing the field floor grid in pixels. - * This is scaled by {@link #getScaledThickness(double, Mat)}. - * @param floorColor Color of the lines used for drawing the field floor grid. - * @param destination The destination image to draw to. - */ - public static void drawFieldWireframe( - RotTrlTransform3d camRt, - SimCameraProperties prop, - double resolution, - double wallThickness, - Scalar wallColor, - int floorSubdivisions, - double floorThickness, - Scalar floorColor, - Mat destination) { - for (var trls : getFieldFloorLines(floorSubdivisions)) { - var polys = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); - for (var poly : polys) { - drawPoly( - poly, - (int) Math.round(getScaledThickness(floorThickness, destination)), - floorColor, - false, - destination); - } + /** + * Convert 3D lines represented by the given series of translations into a polygon(s) in the + * camera's image. + * + * @param camRt The change in basis from world coordinates to camera coordinates. See {@link + * RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * @param prop The simulated camera's properties. + * @param trls A sequential series of translations defining the polygon to be drawn. + * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in + * pixels. Line segments will be subdivided if they exceed this resolution. + * @param isClosed If the final translation should also draw a line to the first translation. + * @param destination The destination image that is being drawn to. + * @return A list of polygons(which are an array of points) + */ + public static List polyFrom3dLines( + RotTrlTransform3d camRt, + SimCameraProperties prop, + List trls, + double resolution, + boolean isClosed, + Mat destination) { + resolution = Math.hypot(destination.size().height, destination.size().width) * resolution; + List pts = new ArrayList<>(trls); + if (isClosed) pts.add(pts.get(0)); + List polyPointList = new ArrayList<>(); + + for (int i = 0; i < pts.size() - 1; i++) { + var pta = pts.get(i); + var ptb = pts.get(i + 1); + + // check if line is inside camera fulcrum + var inter = prop.getVisibleLine(camRt, pta, ptb); + if (inter.getSecond() == null) continue; + + // cull line to the inside of the camera fulcrum + double inter1 = inter.getFirst().doubleValue(); + double inter2 = inter.getSecond().doubleValue(); + var baseDelta = ptb.minus(pta); + var old_pta = pta; + if (inter1 > 0) pta = old_pta.plus(baseDelta.times(inter1)); + if (inter2 < 1) ptb = old_pta.plus(baseDelta.times(inter2)); + baseDelta = ptb.minus(pta); + + // project points into 2d + var poly = new ArrayList(); + poly.addAll( + Arrays.asList( + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, List.of(pta, ptb)))); + var pxa = poly.get(0); + var pxb = poly.get(1); + + // subdivide projected line based on desired resolution + double pxDist = Math.hypot(pxb.x - pxa.x, pxb.y - pxa.y); + int subdivisions = (int) (pxDist / resolution); + var subDelta = baseDelta.div(subdivisions + 1); + var subPts = new ArrayList(); + for (int j = 0; j < subdivisions; j++) { + subPts.add(pta.plus(subDelta.times(j + 1))); + } + if (subPts.size() > 0) { + poly.addAll( + 1, + Arrays.asList( + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, subPts))); + } + + polyPointList.add(poly.toArray(Point[]::new)); + } + + return polyPointList; } - for (var trls : getFieldWallLines()) { - var polys = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); - for (var poly : polys) { - drawPoly( - poly, - (int) Math.round(getScaledThickness(wallThickness, destination)), - wallColor, - false, - destination); - } + + /** + * Draw a wireframe of the field to the given image. + * + * @param camRt The change in basis from world coordinates to camera coordinates. See {@link + * RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * @param prop The simulated camera's properties. + * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in + * pixels. Line segments will be subdivided if they exceed this resolution. + * @param wallThickness Thickness of the lines used for drawing the field walls in pixels. This is + * scaled by {@link #getScaledThickness(double, Mat)}. + * @param wallColor Color of the lines used for drawing the field walls. + * @param floorSubdivisions A NxN "grid" is created from the floor where this parameter is N, + * which defines the floor lines. + * @param floorThickness Thickness of the lines used for drawing the field floor grid in pixels. + * This is scaled by {@link #getScaledThickness(double, Mat)}. + * @param floorColor Color of the lines used for drawing the field floor grid. + * @param destination The destination image to draw to. + */ + public static void drawFieldWireframe( + RotTrlTransform3d camRt, + SimCameraProperties prop, + double resolution, + double wallThickness, + Scalar wallColor, + int floorSubdivisions, + double floorThickness, + Scalar floorColor, + Mat destination) { + for (var trls : getFieldFloorLines(floorSubdivisions)) { + var polys = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); + for (var poly : polys) { + drawPoly( + poly, + (int) Math.round(getScaledThickness(floorThickness, destination)), + floorColor, + false, + destination); + } + } + for (var trls : getFieldWallLines()) { + var polys = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); + for (var poly : polys) { + drawPoly( + poly, + (int) Math.round(getScaledThickness(wallThickness, destination)), + wallColor, + false, + destination); + } + } } - } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java index eb9986ad4a..ed0207096e 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java @@ -53,358 +53,358 @@ * camera target info. */ public class VisionSystemSim { - private final Map camSimMap = new HashMap<>(); - private static final double kBufferLengthSeconds = 1.5; - // save robot-to-camera for each camera over time (Pose3d is easily interpolatable) - private final Map> camTrfMap = new HashMap<>(); - - // interpolate drivetrain with twists - private final TimeInterpolatableBuffer robotPoseBuffer = - TimeInterpolatableBuffer.createBuffer(kBufferLengthSeconds); - - private final Map> targetSets = new HashMap<>(); - - private final Field2d dbgField; - - private final Transform3d kEmptyTrf = new Transform3d(); - - /** - * A simulated vision system involving a camera(s) and coprocessor(s) mounted on a mobile robot - * running PhotonVision, detecting targets placed on the field. {@link VisionTargetSim}s added to - * this class will be detected by the {@link PhotonCameraSim}s added to this class. This class - * should be updated periodically with the robot's current pose in order to publish the simulated - * camera target info. - * - * @param visionSystemName The specific identifier for this vision system in NetworkTables. - */ - public VisionSystemSim(String visionSystemName) { - dbgField = new Field2d(); - String tableName = "VisionSystemSim-" + visionSystemName; - SmartDashboard.putData(tableName + "/Sim Field", dbgField); - } - - /** Get one of the simulated cameras. */ - public Optional getCameraSim(String name) { - return Optional.ofNullable(camSimMap.get(name)); - } - - /** Get all the simulated cameras. */ - public Collection getCameraSims() { - return camSimMap.values(); - } - - /** - * Adds a simulated camera to this vision system with a specified robot-to-camera transformation. - * The vision targets registered with this vision system simulation will be observed by the - * simulated {@link PhotonCamera}. - * - * @param cameraSim The camera simulation - * @param robotToCamera The transform from the robot pose to the camera pose - */ - public void addCamera(PhotonCameraSim cameraSim, Transform3d robotToCamera) { - var existing = camSimMap.putIfAbsent(cameraSim.getCamera().getName(), cameraSim); - if (existing == null) { - camTrfMap.put(cameraSim, TimeInterpolatableBuffer.createBuffer(kBufferLengthSeconds)); - camTrfMap - .get(cameraSim) - .addSample(Timer.getFPGATimestamp(), new Pose3d().plus(robotToCamera)); + private final Map camSimMap = new HashMap<>(); + private static final double kBufferLengthSeconds = 1.5; + // save robot-to-camera for each camera over time (Pose3d is easily interpolatable) + private final Map> camTrfMap = new HashMap<>(); + + // interpolate drivetrain with twists + private final TimeInterpolatableBuffer robotPoseBuffer = + TimeInterpolatableBuffer.createBuffer(kBufferLengthSeconds); + + private final Map> targetSets = new HashMap<>(); + + private final Field2d dbgField; + + private final Transform3d kEmptyTrf = new Transform3d(); + + /** + * A simulated vision system involving a camera(s) and coprocessor(s) mounted on a mobile robot + * running PhotonVision, detecting targets placed on the field. {@link VisionTargetSim}s added to + * this class will be detected by the {@link PhotonCameraSim}s added to this class. This class + * should be updated periodically with the robot's current pose in order to publish the simulated + * camera target info. + * + * @param visionSystemName The specific identifier for this vision system in NetworkTables. + */ + public VisionSystemSim(String visionSystemName) { + dbgField = new Field2d(); + String tableName = "VisionSystemSim-" + visionSystemName; + SmartDashboard.putData(tableName + "/Sim Field", dbgField); } - } - - /** Remove all simulated cameras from this vision system. */ - public void clearCameras() { - camSimMap.clear(); - camTrfMap.clear(); - } - - /** - * Remove a simulated camera from this vision system. - * - * @return If the camera was present and removed - */ - public boolean removeCamera(PhotonCameraSim cameraSim) { - boolean success = camSimMap.remove(cameraSim.getCamera().getName()) != null; - camTrfMap.remove(cameraSim); - return success; - } - - /** - * Get a simulated camera's position relative to the robot. If the requested camera is invalid, an - * empty optional is returned. - * - * @param cameraSim The specific camera to get the robot-to-camera transform of - * @return The transform of this camera, or an empty optional if it is invalid - */ - public Optional getRobotToCamera(PhotonCameraSim cameraSim) { - return getRobotToCamera(cameraSim, Timer.getFPGATimestamp()); - } - - /** - * Get a simulated camera's position relative to the robot. If the requested camera is invalid, an - * empty optional is returned. - * - * @param cameraSim The specific camera to get the robot-to-camera transform of - * @param timeSeconds Timestamp in seconds of when the transform should be observed - * @return The transform of this camera, or an empty optional if it is invalid - */ - public Optional getRobotToCamera(PhotonCameraSim cameraSim, double timeSeconds) { - var trfBuffer = camTrfMap.get(cameraSim); - if (trfBuffer == null) return Optional.empty(); - var sample = trfBuffer.getSample(timeSeconds); - if (sample.isEmpty()) return Optional.empty(); - return Optional.of(new Transform3d(new Pose3d(), sample.orElse(new Pose3d()))); - } - - /** - * Get a simulated camera's position on the field. If the requested camera is invalid, an empty - * optional is returned. - * - * @param cameraSim The specific camera to get the field pose of - * @return The pose of this camera, or an empty optional if it is invalid - */ - public Optional getCameraPose(PhotonCameraSim cameraSim) { - return getCameraPose(cameraSim, Timer.getFPGATimestamp()); - } - - /** - * Get a simulated camera's position on the field. If the requested camera is invalid, an empty - * optional is returned. - * - * @param cameraSim The specific camera to get the field pose of - * @param timeSeconds Timestamp in seconds of when the pose should be observed - * @return The pose of this camera, or an empty optional if it is invalid - */ - public Optional getCameraPose(PhotonCameraSim cameraSim, double timeSeconds) { - var robotToCamera = getRobotToCamera(cameraSim, timeSeconds); - if (robotToCamera.isEmpty()) return Optional.empty(); - return Optional.of(getRobotPose(timeSeconds).plus(robotToCamera.get())); - } - - /** - * Adjust a camera's position relative to the robot. Use this if your camera is on a gimbal or - * turret or some other mobile platform. - * - * @param cameraSim The simulated camera to change the relative position of - * @param robotToCamera New transform from the robot to the camera - * @return If the cameraSim was valid and transform was adjusted - */ - public boolean adjustCamera(PhotonCameraSim cameraSim, Transform3d robotToCamera) { - var trfBuffer = camTrfMap.get(cameraSim); - if (trfBuffer == null) return false; - trfBuffer.addSample(Timer.getFPGATimestamp(), new Pose3d().plus(robotToCamera)); - return true; - } - - /** Reset the previous transforms for all cameras to their current transform. */ - public void resetCameraTransforms() { - for (var cam : camTrfMap.keySet()) resetCameraTransforms(cam); - } - - /** - * Reset the transform history for this camera to just the current transform. - * - * @return If the cameraSim was valid and transforms were reset - */ - public boolean resetCameraTransforms(PhotonCameraSim cameraSim) { - double now = Timer.getFPGATimestamp(); - var trfBuffer = camTrfMap.get(cameraSim); - if (trfBuffer == null) return false; - var lastTrf = new Transform3d(new Pose3d(), trfBuffer.getSample(now).orElse(new Pose3d())); - trfBuffer.clear(); - adjustCamera(cameraSim, lastTrf); - return true; - } - - public Set getVisionTargets() { - var all = new HashSet(); - for (var entry : targetSets.entrySet()) { - all.addAll(entry.getValue()); + + /** Get one of the simulated cameras. */ + public Optional getCameraSim(String name) { + return Optional.ofNullable(camSimMap.get(name)); + } + + /** Get all the simulated cameras. */ + public Collection getCameraSims() { + return camSimMap.values(); + } + + /** + * Adds a simulated camera to this vision system with a specified robot-to-camera transformation. + * The vision targets registered with this vision system simulation will be observed by the + * simulated {@link PhotonCamera}. + * + * @param cameraSim The camera simulation + * @param robotToCamera The transform from the robot pose to the camera pose + */ + public void addCamera(PhotonCameraSim cameraSim, Transform3d robotToCamera) { + var existing = camSimMap.putIfAbsent(cameraSim.getCamera().getName(), cameraSim); + if (existing == null) { + camTrfMap.put(cameraSim, TimeInterpolatableBuffer.createBuffer(kBufferLengthSeconds)); + camTrfMap + .get(cameraSim) + .addSample(Timer.getFPGATimestamp(), new Pose3d().plus(robotToCamera)); + } + } + + /** Remove all simulated cameras from this vision system. */ + public void clearCameras() { + camSimMap.clear(); + camTrfMap.clear(); + } + + /** + * Remove a simulated camera from this vision system. + * + * @return If the camera was present and removed + */ + public boolean removeCamera(PhotonCameraSim cameraSim) { + boolean success = camSimMap.remove(cameraSim.getCamera().getName()) != null; + camTrfMap.remove(cameraSim); + return success; + } + + /** + * Get a simulated camera's position relative to the robot. If the requested camera is invalid, an + * empty optional is returned. + * + * @param cameraSim The specific camera to get the robot-to-camera transform of + * @return The transform of this camera, or an empty optional if it is invalid + */ + public Optional getRobotToCamera(PhotonCameraSim cameraSim) { + return getRobotToCamera(cameraSim, Timer.getFPGATimestamp()); + } + + /** + * Get a simulated camera's position relative to the robot. If the requested camera is invalid, an + * empty optional is returned. + * + * @param cameraSim The specific camera to get the robot-to-camera transform of + * @param timeSeconds Timestamp in seconds of when the transform should be observed + * @return The transform of this camera, or an empty optional if it is invalid + */ + public Optional getRobotToCamera(PhotonCameraSim cameraSim, double timeSeconds) { + var trfBuffer = camTrfMap.get(cameraSim); + if (trfBuffer == null) return Optional.empty(); + var sample = trfBuffer.getSample(timeSeconds); + if (sample.isEmpty()) return Optional.empty(); + return Optional.of(new Transform3d(new Pose3d(), sample.orElse(new Pose3d()))); } - return all; - } - - public Set getVisionTargets(String type) { - return targetSets.get(type); - } - - /** - * Adds targets on the field which your vision system is designed to detect. The {@link - * PhotonCamera}s simulated from this system will report the location of the camera relative to - * the subset of these targets which are visible from the given camera position. - * - *

By default these are added under the type "targets". - * - * @param targets Targets to add to the simulated field - */ - public void addVisionTargets(VisionTargetSim... targets) { - addVisionTargets("targets", targets); - } - - /** - * Adds targets on the field which your vision system is designed to detect. The {@link - * PhotonCamera}s simulated from this system will report the location of the camera relative to - * the subset of these targets which are visible from the given camera position. - * - *

The AprilTags from this layout will be added as vision targets under the type "apriltag". - * The poses added preserve the tag layout's current alliance origin. If the tag layout's alliance - * origin is changed, these added tags will have to be cleared and re-added. - * - * @param tagLayout The field tag layout to get Apriltag poses and IDs from - */ - public void addAprilTags(AprilTagFieldLayout tagLayout) { - for (AprilTag tag : tagLayout.getTags()) { - addVisionTargets( - "apriltag", - new VisionTargetSim( - tagLayout.getTagPose(tag.ID).get(), // preserve alliance rotation - TargetModel.kTag16h5, - tag.ID)); + + /** + * Get a simulated camera's position on the field. If the requested camera is invalid, an empty + * optional is returned. + * + * @param cameraSim The specific camera to get the field pose of + * @return The pose of this camera, or an empty optional if it is invalid + */ + public Optional getCameraPose(PhotonCameraSim cameraSim) { + return getCameraPose(cameraSim, Timer.getFPGATimestamp()); + } + + /** + * Get a simulated camera's position on the field. If the requested camera is invalid, an empty + * optional is returned. + * + * @param cameraSim The specific camera to get the field pose of + * @param timeSeconds Timestamp in seconds of when the pose should be observed + * @return The pose of this camera, or an empty optional if it is invalid + */ + public Optional getCameraPose(PhotonCameraSim cameraSim, double timeSeconds) { + var robotToCamera = getRobotToCamera(cameraSim, timeSeconds); + if (robotToCamera.isEmpty()) return Optional.empty(); + return Optional.of(getRobotPose(timeSeconds).plus(robotToCamera.get())); } - } - - /** - * Adds targets on the field which your vision system is designed to detect. The {@link - * PhotonCamera}s simulated from this system will report the location of the camera relative to - * the subset of these targets which are visible from the given camera position. - * - * @param type Type of target (e.g. "cargo"). - * @param targets Targets to add to the simulated field - */ - public void addVisionTargets(String type, VisionTargetSim... targets) { - targetSets.computeIfAbsent(type, k -> new HashSet<>()); - for (var tgt : targets) { - targetSets.get(type).add(tgt); + + /** + * Adjust a camera's position relative to the robot. Use this if your camera is on a gimbal or + * turret or some other mobile platform. + * + * @param cameraSim The simulated camera to change the relative position of + * @param robotToCamera New transform from the robot to the camera + * @return If the cameraSim was valid and transform was adjusted + */ + public boolean adjustCamera(PhotonCameraSim cameraSim, Transform3d robotToCamera) { + var trfBuffer = camTrfMap.get(cameraSim); + if (trfBuffer == null) return false; + trfBuffer.addSample(Timer.getFPGATimestamp(), new Pose3d().plus(robotToCamera)); + return true; } - } - - public void clearVisionTargets() { - targetSets.clear(); - } - - public void clearAprilTags() { - removeVisionTargets("apriltag"); - } - - public Set removeVisionTargets(String type) { - return targetSets.remove(type); - } - - public Set removeVisionTargets(VisionTargetSim... targets) { - var removeList = List.of(targets); - var removedSet = new HashSet(); - for (var entry : targetSets.entrySet()) { - entry - .getValue() - .removeIf( - t -> { - if (removeList.contains(t)) { - removedSet.add(t); - return true; - } else return false; - }); + + /** Reset the previous transforms for all cameras to their current transform. */ + public void resetCameraTransforms() { + for (var cam : camTrfMap.keySet()) resetCameraTransforms(cam); + } + + /** + * Reset the transform history for this camera to just the current transform. + * + * @return If the cameraSim was valid and transforms were reset + */ + public boolean resetCameraTransforms(PhotonCameraSim cameraSim) { + double now = Timer.getFPGATimestamp(); + var trfBuffer = camTrfMap.get(cameraSim); + if (trfBuffer == null) return false; + var lastTrf = new Transform3d(new Pose3d(), trfBuffer.getSample(now).orElse(new Pose3d())); + trfBuffer.clear(); + adjustCamera(cameraSim, lastTrf); + return true; + } + + public Set getVisionTargets() { + var all = new HashSet(); + for (var entry : targetSets.entrySet()) { + all.addAll(entry.getValue()); + } + return all; + } + + public Set getVisionTargets(String type) { + return targetSets.get(type); + } + + /** + * Adds targets on the field which your vision system is designed to detect. The {@link + * PhotonCamera}s simulated from this system will report the location of the camera relative to + * the subset of these targets which are visible from the given camera position. + * + *

By default these are added under the type "targets". + * + * @param targets Targets to add to the simulated field + */ + public void addVisionTargets(VisionTargetSim... targets) { + addVisionTargets("targets", targets); } - return removedSet; - } - - /** Get the latest robot pose in meters saved by the vision system. */ - public Pose3d getRobotPose() { - return getRobotPose(Timer.getFPGATimestamp()); - } - - /** - * Get the robot pose in meters saved by the vision system at this timestamp. - * - * @param timestamp Timestamp of the desired robot pose - */ - public Pose3d getRobotPose(double timestamp) { - return robotPoseBuffer.getSample(timestamp).orElse(new Pose3d()); - } - - /** Clears all previous robot poses and sets robotPose at current time. */ - public void resetRobotPose(Pose2d robotPose) { - resetRobotPose(new Pose3d(robotPose)); - } - - /** Clears all previous robot poses and sets robotPose at current time. */ - public void resetRobotPose(Pose3d robotPose) { - robotPoseBuffer.clear(); - robotPoseBuffer.addSample(Timer.getFPGATimestamp(), robotPose); - } - - public Field2d getDebugField() { - return dbgField; - } - - /** - * Periodic update. Ensure this is called repeatedly-- camera performance is used to automatically - * determine if a new frame should be submitted. - * - * @param robotPoseMeters The simulated robot pose in meters - */ - public void update(Pose2d robotPoseMeters) { - update(new Pose3d(robotPoseMeters)); - } - - /** - * Periodic update. Ensure this is called repeatedly-- camera performance is used to automatically - * determine if a new frame should be submitted. - * - * @param robotPoseMeters The simulated robot pose in meters - */ - public void update(Pose3d robotPoseMeters) { - var targetTypes = targetSets.entrySet(); - // update vision targets on field - targetTypes.forEach( - entry -> - dbgField - .getObject(entry.getKey()) - .setPoses( - entry.getValue().stream() - .map(t -> t.getPose().toPose2d()) - .collect(Collectors.toList()))); - - if (robotPoseMeters == null) return; - - // save "real" robot poses over time - double now = Timer.getFPGATimestamp(); - robotPoseBuffer.addSample(now, robotPoseMeters); - dbgField.setRobotPose(robotPoseMeters.toPose2d()); - - var allTargets = new ArrayList(); - targetTypes.forEach((entry) -> allTargets.addAll(entry.getValue())); - var visTgtPoses2d = new ArrayList(); - var cameraPoses2d = new ArrayList(); - boolean processed = false; - // process each camera - for (var camSim : camSimMap.values()) { - // check if this camera is ready to process and get latency - var optTimestamp = camSim.consumeNextEntryTime(); - if (optTimestamp.isEmpty()) continue; - else processed = true; - // when this result "was" read by NT - long timestampNT = optTimestamp.get(); - // this result's processing latency in milliseconds - double latencyMillis = camSim.prop.estLatencyMs(); - // the image capture timestamp in seconds of this result - double timestampCapture = timestampNT / 1e6 - latencyMillis / 1e3; - - // use camera pose from the image capture timestamp - Pose3d lateRobotPose = getRobotPose(timestampCapture); - Pose3d lateCameraPose = lateRobotPose.plus(getRobotToCamera(camSim, timestampCapture).get()); - cameraPoses2d.add(lateCameraPose.toPose2d()); - - // process a PhotonPipelineResult with visible targets - var camResult = camSim.process(latencyMillis, lateCameraPose, allTargets); - // publish this info to NT at estimated timestamp of receive - camSim.submitProcessedFrame(camResult, timestampNT); - // display debug results - for (var target : camResult.getTargets()) { - var trf = target.getBestCameraToTarget(); - if (trf.equals(kEmptyTrf)) continue; - visTgtPoses2d.add(lateCameraPose.transformBy(trf).toPose2d()); - } + + /** + * Adds targets on the field which your vision system is designed to detect. The {@link + * PhotonCamera}s simulated from this system will report the location of the camera relative to + * the subset of these targets which are visible from the given camera position. + * + *

The AprilTags from this layout will be added as vision targets under the type "apriltag". + * The poses added preserve the tag layout's current alliance origin. If the tag layout's alliance + * origin is changed, these added tags will have to be cleared and re-added. + * + * @param tagLayout The field tag layout to get Apriltag poses and IDs from + */ + public void addAprilTags(AprilTagFieldLayout tagLayout) { + for (AprilTag tag : tagLayout.getTags()) { + addVisionTargets( + "apriltag", + new VisionTargetSim( + tagLayout.getTagPose(tag.ID).get(), // preserve alliance rotation + TargetModel.kTag16h5, + tag.ID)); + } + } + + /** + * Adds targets on the field which your vision system is designed to detect. The {@link + * PhotonCamera}s simulated from this system will report the location of the camera relative to + * the subset of these targets which are visible from the given camera position. + * + * @param type Type of target (e.g. "cargo"). + * @param targets Targets to add to the simulated field + */ + public void addVisionTargets(String type, VisionTargetSim... targets) { + targetSets.computeIfAbsent(type, k -> new HashSet<>()); + for (var tgt : targets) { + targetSets.get(type).add(tgt); + } + } + + public void clearVisionTargets() { + targetSets.clear(); + } + + public void clearAprilTags() { + removeVisionTargets("apriltag"); + } + + public Set removeVisionTargets(String type) { + return targetSets.remove(type); + } + + public Set removeVisionTargets(VisionTargetSim... targets) { + var removeList = List.of(targets); + var removedSet = new HashSet(); + for (var entry : targetSets.entrySet()) { + entry + .getValue() + .removeIf( + t -> { + if (removeList.contains(t)) { + removedSet.add(t); + return true; + } else return false; + }); + } + return removedSet; + } + + /** Get the latest robot pose in meters saved by the vision system. */ + public Pose3d getRobotPose() { + return getRobotPose(Timer.getFPGATimestamp()); + } + + /** + * Get the robot pose in meters saved by the vision system at this timestamp. + * + * @param timestamp Timestamp of the desired robot pose + */ + public Pose3d getRobotPose(double timestamp) { + return robotPoseBuffer.getSample(timestamp).orElse(new Pose3d()); + } + + /** Clears all previous robot poses and sets robotPose at current time. */ + public void resetRobotPose(Pose2d robotPose) { + resetRobotPose(new Pose3d(robotPose)); + } + + /** Clears all previous robot poses and sets robotPose at current time. */ + public void resetRobotPose(Pose3d robotPose) { + robotPoseBuffer.clear(); + robotPoseBuffer.addSample(Timer.getFPGATimestamp(), robotPose); + } + + public Field2d getDebugField() { + return dbgField; + } + + /** + * Periodic update. Ensure this is called repeatedly-- camera performance is used to automatically + * determine if a new frame should be submitted. + * + * @param robotPoseMeters The simulated robot pose in meters + */ + public void update(Pose2d robotPoseMeters) { + update(new Pose3d(robotPoseMeters)); + } + + /** + * Periodic update. Ensure this is called repeatedly-- camera performance is used to automatically + * determine if a new frame should be submitted. + * + * @param robotPoseMeters The simulated robot pose in meters + */ + public void update(Pose3d robotPoseMeters) { + var targetTypes = targetSets.entrySet(); + // update vision targets on field + targetTypes.forEach( + entry -> + dbgField + .getObject(entry.getKey()) + .setPoses( + entry.getValue().stream() + .map(t -> t.getPose().toPose2d()) + .collect(Collectors.toList()))); + + if (robotPoseMeters == null) return; + + // save "real" robot poses over time + double now = Timer.getFPGATimestamp(); + robotPoseBuffer.addSample(now, robotPoseMeters); + dbgField.setRobotPose(robotPoseMeters.toPose2d()); + + var allTargets = new ArrayList(); + targetTypes.forEach((entry) -> allTargets.addAll(entry.getValue())); + var visTgtPoses2d = new ArrayList(); + var cameraPoses2d = new ArrayList(); + boolean processed = false; + // process each camera + for (var camSim : camSimMap.values()) { + // check if this camera is ready to process and get latency + var optTimestamp = camSim.consumeNextEntryTime(); + if (optTimestamp.isEmpty()) continue; + else processed = true; + // when this result "was" read by NT + long timestampNT = optTimestamp.get(); + // this result's processing latency in milliseconds + double latencyMillis = camSim.prop.estLatencyMs(); + // the image capture timestamp in seconds of this result + double timestampCapture = timestampNT / 1e6 - latencyMillis / 1e3; + + // use camera pose from the image capture timestamp + Pose3d lateRobotPose = getRobotPose(timestampCapture); + Pose3d lateCameraPose = lateRobotPose.plus(getRobotToCamera(camSim, timestampCapture).get()); + cameraPoses2d.add(lateCameraPose.toPose2d()); + + // process a PhotonPipelineResult with visible targets + var camResult = camSim.process(latencyMillis, lateCameraPose, allTargets); + // publish this info to NT at estimated timestamp of receive + camSim.submitProcessedFrame(camResult, timestampNT); + // display debug results + for (var target : camResult.getTargets()) { + var trf = target.getBestCameraToTarget(); + if (trf.equals(kEmptyTrf)) continue; + visTgtPoses2d.add(lateCameraPose.transformBy(trf).toPose2d()); + } + } + if (processed) dbgField.getObject("visibleTargetPoses").setPoses(visTgtPoses2d); + if (cameraPoses2d.size() != 0) dbgField.getObject("cameras").setPoses(cameraPoses2d); } - if (processed) dbgField.getObject("visibleTargetPoses").setPoses(visTgtPoses2d); - if (cameraPoses2d.size() != 0) dbgField.getObject("cameras").setPoses(cameraPoses2d); - } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VisionTargetSim.java b/photon-lib/src/main/java/org/photonvision/simulation/VisionTargetSim.java index 2ca98ccbd1..88d19756c3 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VisionTargetSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VisionTargetSim.java @@ -31,64 +31,64 @@ /** Describes a vision target located somewhere on the field that your vision system can detect. */ public class VisionTargetSim { - private Pose3d pose; - private TargetModel model; + private Pose3d pose; + private TargetModel model; - public final int fiducialID; + public final int fiducialID; - /** - * Describes a vision target located somewhere on the field that your vision system can detect. - * - * @param pose Pose3d of the tag in field-relative coordinates - * @param model TargetModel which describes the shape of the target - */ - public VisionTargetSim(Pose3d pose, TargetModel model) { - this.pose = pose; - this.model = model; - this.fiducialID = -1; - } + /** + * Describes a vision target located somewhere on the field that your vision system can detect. + * + * @param pose Pose3d of the tag in field-relative coordinates + * @param model TargetModel which describes the shape of the target + */ + public VisionTargetSim(Pose3d pose, TargetModel model) { + this.pose = pose; + this.model = model; + this.fiducialID = -1; + } - /** - * Describes a fiducial tag located somewhere on the field that your vision system can detect. - * - * @param pose Pose3d of the tag in field-relative coordinates - * @param model TargetModel which describes the shape of the target(tag) - * @param id The ID of this fiducial tag - */ - public VisionTargetSim(Pose3d pose, TargetModel model, int id) { - this.pose = pose; - this.model = model; - this.fiducialID = id; - } + /** + * Describes a fiducial tag located somewhere on the field that your vision system can detect. + * + * @param pose Pose3d of the tag in field-relative coordinates + * @param model TargetModel which describes the shape of the target(tag) + * @param id The ID of this fiducial tag + */ + public VisionTargetSim(Pose3d pose, TargetModel model, int id) { + this.pose = pose; + this.model = model; + this.fiducialID = id; + } - public void setPose(Pose3d pose) { - this.pose = pose; - } + public void setPose(Pose3d pose) { + this.pose = pose; + } - public void setModel(TargetModel model) { - this.model = model; - } + public void setModel(TargetModel model) { + this.model = model; + } - public Pose3d getPose() { - return pose; - } + public Pose3d getPose() { + return pose; + } - public TargetModel getModel() { - return model; - } + public TargetModel getModel() { + return model; + } - /** This target's vertices offset from its field pose. */ - public List getFieldVertices() { - return model.getFieldVertices(pose); - } + /** This target's vertices offset from its field pose. */ + public List getFieldVertices() { + return model.getFieldVertices(pose); + } - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj instanceof VisionTargetSim) { - var o = (VisionTargetSim) obj; - return pose.equals(o.pose) && model.equals(o.model); + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj instanceof VisionTargetSim) { + var o = (VisionTargetSim) obj; + return pose.equals(o.pose) && model.equals(o.model); + } + return false; } - return false; - } } diff --git a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java index 98be84b677..e483833111 100644 --- a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java +++ b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java @@ -55,200 +55,200 @@ import org.photonvision.simulation.VisionTargetSim; public class OpenCVTest { - private static final double kTrlDelta = 0.005; - private static final double kRotDeltaDeg = 0.25; - - public static void assertSame(Translation3d trl1, Translation3d trl2) { - assertEquals(0, trl1.getX() - trl2.getX(), kTrlDelta, "Trl X Diff"); - assertEquals(0, trl1.getY() - trl2.getY(), kTrlDelta, "Trl Y Diff"); - assertEquals(0, trl1.getZ() - trl2.getZ(), kTrlDelta, "Trl Z Diff"); - } - - public static void assertSame(Rotation3d rot1, Rotation3d rot2) { - assertEquals(0, MathUtil.angleModulus(rot1.getX() - rot2.getX()), kRotDeltaDeg, "Rot X Diff"); - assertEquals(0, MathUtil.angleModulus(rot1.getY() - rot2.getY()), kRotDeltaDeg, "Rot Y Diff"); - assertEquals(0, MathUtil.angleModulus(rot1.getZ() - rot2.getZ()), kRotDeltaDeg, "Rot Z Diff"); - assertEquals( - 0, MathUtil.angleModulus(rot1.getAngle() - rot2.getAngle()), kRotDeltaDeg, "Rot W Diff"); - } - - public static void assertSame(Pose3d pose1, Pose3d pose2) { - assertSame(pose1.getTranslation(), pose2.getTranslation()); - assertSame(pose1.getRotation(), pose2.getRotation()); - } - - public static void assertSame(Transform3d trf1, Transform3d trf2) { - assertSame(trf1.getTranslation(), trf2.getTranslation()); - assertSame(trf1.getRotation(), trf2.getRotation()); - } - - private static final SimCameraProperties prop = new SimCameraProperties(); - - @BeforeAll - public static void setUp() { - JNIWrapper.Helper.setExtractOnStaticLoad(false); - WPIUtilJNI.Helper.setExtractOnStaticLoad(false); - NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); - WPINetJNI.Helper.setExtractOnStaticLoad(false); - CameraServerJNI.Helper.setExtractOnStaticLoad(false); - CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); - AprilTagJNI.Helper.setExtractOnStaticLoad(false); - - try { - CombinedRuntimeLoader.loadLibraries( - VisionSystemSim.class, - "wpiutiljni", - "ntcorejni", - "wpinetjni", - "wpiHaljni", - "cscorejni", - "cscorejnicvstatic"); - - var loader = - new RuntimeLoader<>( - Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); - loader.loadLibrary(); - } catch (Exception e) { - e.printStackTrace(); + private static final double kTrlDelta = 0.005; + private static final double kRotDeltaDeg = 0.25; + + public static void assertSame(Translation3d trl1, Translation3d trl2) { + assertEquals(0, trl1.getX() - trl2.getX(), kTrlDelta, "Trl X Diff"); + assertEquals(0, trl1.getY() - trl2.getY(), kTrlDelta, "Trl Y Diff"); + assertEquals(0, trl1.getZ() - trl2.getZ(), kTrlDelta, "Trl Z Diff"); + } + + public static void assertSame(Rotation3d rot1, Rotation3d rot2) { + assertEquals(0, MathUtil.angleModulus(rot1.getX() - rot2.getX()), kRotDeltaDeg, "Rot X Diff"); + assertEquals(0, MathUtil.angleModulus(rot1.getY() - rot2.getY()), kRotDeltaDeg, "Rot Y Diff"); + assertEquals(0, MathUtil.angleModulus(rot1.getZ() - rot2.getZ()), kRotDeltaDeg, "Rot Z Diff"); + assertEquals( + 0, MathUtil.angleModulus(rot1.getAngle() - rot2.getAngle()), kRotDeltaDeg, "Rot W Diff"); + } + + public static void assertSame(Pose3d pose1, Pose3d pose2) { + assertSame(pose1.getTranslation(), pose2.getTranslation()); + assertSame(pose1.getRotation(), pose2.getRotation()); + } + + public static void assertSame(Transform3d trf1, Transform3d trf2) { + assertSame(trf1.getTranslation(), trf2.getTranslation()); + assertSame(trf1.getRotation(), trf2.getRotation()); + } + + private static final SimCameraProperties prop = new SimCameraProperties(); + + @BeforeAll + public static void setUp() { + JNIWrapper.Helper.setExtractOnStaticLoad(false); + WPIUtilJNI.Helper.setExtractOnStaticLoad(false); + NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); + WPINetJNI.Helper.setExtractOnStaticLoad(false); + CameraServerJNI.Helper.setExtractOnStaticLoad(false); + CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); + AprilTagJNI.Helper.setExtractOnStaticLoad(false); + + try { + CombinedRuntimeLoader.loadLibraries( + VisionSystemSim.class, + "wpiutiljni", + "ntcorejni", + "wpinetjni", + "wpiHaljni", + "cscorejni", + "cscorejnicvstatic"); + + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + } catch (Exception e) { + e.printStackTrace(); + } + + // NT live for debug purposes + NetworkTableInstance.getDefault().startServer(); + + // No version check for testing + PhotonCamera.setVersionCheckEnabled(false); + } + + @Test + public void testTrlConvert() { + var trl = new Translation3d(0.75, -0.4, 0.1); + var tvec = OpenCVHelp.translationToTvec(trl); + var result = OpenCVHelp.tvecToTranslation(tvec); + tvec.release(); + assertSame(trl, result); + } + + @Test + public void testRotConvert() { + var rot = new Rotation3d(0.5, 1, -1); + var rvec = OpenCVHelp.rotationToRvec(rot); + var result = OpenCVHelp.rvecToRotation(rvec); + rvec.release(); + var diff = result.minus(rot); + assertSame(new Rotation3d(), diff); + } + + @Test + public void testProjection() { + var target = + new VisionTargetSim( + new Pose3d(1, 0, 0, new Rotation3d(0, 0, Math.PI)), TargetModel.kTag16h5, 0); + var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); + var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); + var imagePoints = + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); + + // find circulation (counter/clockwise-ness) + double circulation = 0; + for (int i = 0; i < imagePoints.length; i++) { + double xDiff = imagePoints[(i + 1) % 4].x - imagePoints[i].x; + double ySum = imagePoints[(i + 1) % 4].y + imagePoints[i].y; + circulation += xDiff * ySum; + } + assertTrue(circulation > 0, "2d fiducial points aren't counter-clockwise"); + + // undo projection distortion + imagePoints = + OpenCVHelp.undistortPoints(prop.getIntrinsics(), prop.getDistCoeffs(), imagePoints); + + // test projection results after moving camera + var avgCenterRot1 = prop.getPixelRot(OpenCVHelp.avgPoint(imagePoints)); + cameraPose = + cameraPose.plus(new Transform3d(new Translation3d(), new Rotation3d(0, 0.25, 0.25))); + camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); + imagePoints = + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); + var avgCenterRot2 = prop.getPixelRot(OpenCVHelp.avgPoint(imagePoints)); + + var yaw2d = new Rotation2d(avgCenterRot2.getZ()); + var pitch2d = new Rotation2d(avgCenterRot2.getY()); + var yawDiff = yaw2d.minus(new Rotation2d(avgCenterRot1.getZ())); + var pitchDiff = pitch2d.minus(new Rotation2d(avgCenterRot1.getY())); + assertTrue(yawDiff.getRadians() < 0, "2d points don't follow yaw"); + assertTrue(pitchDiff.getRadians() < 0, "2d points don't follow pitch"); + + var actualRelation = new CameraTargetRelation(cameraPose, target.getPose()); + assertEquals( + actualRelation.camToTargPitch.getDegrees(), + pitchDiff.getDegrees() + * Math.cos(yaw2d.getRadians()), // adjust for unaccounted perpsective distortion + kRotDeltaDeg, + "2d pitch doesn't match 3d"); + assertEquals( + actualRelation.camToTargYaw.getDegrees(), + yawDiff.getDegrees(), + kRotDeltaDeg, + "2d yaw doesn't match 3d"); + } + + @Test + public void testSolvePNP_SQUARE() { + // square AprilTag target + var target = + new VisionTargetSim( + new Pose3d(5, 0.5, 1, new Rotation3d(0, 0, Math.PI)), TargetModel.kTag16h5, 0); + var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); + var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); + // target relative to camera + var relTarget = camRt.apply(target.getPose()); + + // simulate solvePNP estimation + var targetCorners = + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); + var pnpSim = + OpenCVHelp.solvePNP_SQUARE( + prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); + + // check solvePNP estimation accuracy + assertSame(relTarget.getRotation(), pnpSim.best.getRotation()); + assertSame(relTarget.getTranslation(), pnpSim.best.getTranslation()); } - // NT live for debug purposes - NetworkTableInstance.getDefault().startServer(); - - // No version check for testing - PhotonCamera.setVersionCheckEnabled(false); - } - - @Test - public void testTrlConvert() { - var trl = new Translation3d(0.75, -0.4, 0.1); - var tvec = OpenCVHelp.translationToTvec(trl); - var result = OpenCVHelp.tvecToTranslation(tvec); - tvec.release(); - assertSame(trl, result); - } - - @Test - public void testRotConvert() { - var rot = new Rotation3d(0.5, 1, -1); - var rvec = OpenCVHelp.rotationToRvec(rot); - var result = OpenCVHelp.rvecToRotation(rvec); - rvec.release(); - var diff = result.minus(rot); - assertSame(new Rotation3d(), diff); - } - - @Test - public void testProjection() { - var target = - new VisionTargetSim( - new Pose3d(1, 0, 0, new Rotation3d(0, 0, Math.PI)), TargetModel.kTag16h5, 0); - var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); - var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); - var imagePoints = - OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); - - // find circulation (counter/clockwise-ness) - double circulation = 0; - for (int i = 0; i < imagePoints.length; i++) { - double xDiff = imagePoints[(i + 1) % 4].x - imagePoints[i].x; - double ySum = imagePoints[(i + 1) % 4].y + imagePoints[i].y; - circulation += xDiff * ySum; + @Test + public void testSolvePNP_SQPNP() { + // (for targets with arbitrary number of non-colinear points > 2) + var target = + new VisionTargetSim( + new Pose3d(5, 0.5, 1, new Rotation3d(0, 0, Math.PI)), + new TargetModel( + List.of( + new Translation3d(0, 0, 0), + new Translation3d(1, 0, 0), + new Translation3d(0, 1, 0), + new Translation3d(0, 0, 1), + new Translation3d(0.125, 0.25, 0.5), + new Translation3d(0, 0, -1), + new Translation3d(0, -1, 0), + new Translation3d(-1, 0, 0))), + 0); + var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); + var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); + // target relative to camera + var relTarget = camRt.apply(target.getPose()); + + // simulate solvePNP estimation + var targetCorners = + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); + var pnpSim = + OpenCVHelp.solvePNP_SQPNP( + prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); + + // check solvePNP estimation accuracy + assertSame(relTarget.getRotation(), pnpSim.best.getRotation()); + assertSame(relTarget.getTranslation(), pnpSim.best.getTranslation()); } - assertTrue(circulation > 0, "2d fiducial points aren't counter-clockwise"); - - // undo projection distortion - imagePoints = - OpenCVHelp.undistortPoints(prop.getIntrinsics(), prop.getDistCoeffs(), imagePoints); - - // test projection results after moving camera - var avgCenterRot1 = prop.getPixelRot(OpenCVHelp.avgPoint(imagePoints)); - cameraPose = - cameraPose.plus(new Transform3d(new Translation3d(), new Rotation3d(0, 0.25, 0.25))); - camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); - imagePoints = - OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); - var avgCenterRot2 = prop.getPixelRot(OpenCVHelp.avgPoint(imagePoints)); - - var yaw2d = new Rotation2d(avgCenterRot2.getZ()); - var pitch2d = new Rotation2d(avgCenterRot2.getY()); - var yawDiff = yaw2d.minus(new Rotation2d(avgCenterRot1.getZ())); - var pitchDiff = pitch2d.minus(new Rotation2d(avgCenterRot1.getY())); - assertTrue(yawDiff.getRadians() < 0, "2d points don't follow yaw"); - assertTrue(pitchDiff.getRadians() < 0, "2d points don't follow pitch"); - - var actualRelation = new CameraTargetRelation(cameraPose, target.getPose()); - assertEquals( - actualRelation.camToTargPitch.getDegrees(), - pitchDiff.getDegrees() - * Math.cos(yaw2d.getRadians()), // adjust for unaccounted perpsective distortion - kRotDeltaDeg, - "2d pitch doesn't match 3d"); - assertEquals( - actualRelation.camToTargYaw.getDegrees(), - yawDiff.getDegrees(), - kRotDeltaDeg, - "2d yaw doesn't match 3d"); - } - - @Test - public void testSolvePNP_SQUARE() { - // square AprilTag target - var target = - new VisionTargetSim( - new Pose3d(5, 0.5, 1, new Rotation3d(0, 0, Math.PI)), TargetModel.kTag16h5, 0); - var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); - var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); - // target relative to camera - var relTarget = camRt.apply(target.getPose()); - - // simulate solvePNP estimation - var targetCorners = - OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); - var pnpSim = - OpenCVHelp.solvePNP_SQUARE( - prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); - - // check solvePNP estimation accuracy - assertSame(relTarget.getRotation(), pnpSim.best.getRotation()); - assertSame(relTarget.getTranslation(), pnpSim.best.getTranslation()); - } - - @Test - public void testSolvePNP_SQPNP() { - // (for targets with arbitrary number of non-colinear points > 2) - var target = - new VisionTargetSim( - new Pose3d(5, 0.5, 1, new Rotation3d(0, 0, Math.PI)), - new TargetModel( - List.of( - new Translation3d(0, 0, 0), - new Translation3d(1, 0, 0), - new Translation3d(0, 1, 0), - new Translation3d(0, 0, 1), - new Translation3d(0.125, 0.25, 0.5), - new Translation3d(0, 0, -1), - new Translation3d(0, -1, 0), - new Translation3d(-1, 0, 0))), - 0); - var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); - var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); - // target relative to camera - var relTarget = camRt.apply(target.getPose()); - - // simulate solvePNP estimation - var targetCorners = - OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); - var pnpSim = - OpenCVHelp.solvePNP_SQPNP( - prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); - - // check solvePNP estimation accuracy - assertSame(relTarget.getRotation(), pnpSim.best.getRotation()); - assertSame(relTarget.getTranslation(), pnpSim.best.getTranslation()); - } } diff --git a/photon-lib/src/test/java/org/photonvision/PacketTest.java b/photon-lib/src/test/java/org/photonvision/PacketTest.java index de2a0fb46e..c29a629b5b 100644 --- a/photon-lib/src/test/java/org/photonvision/PacketTest.java +++ b/photon-lib/src/test/java/org/photonvision/PacketTest.java @@ -37,154 +37,154 @@ import org.photonvision.targeting.TargetCorner; class PacketTest { - @Test - void testSimpleTrackedTarget() { - var target = - new PhotonTrackedTarget( - 3.0, - 4.0, - 9.0, - -5.0, - -1, - new Transform3d(new Translation3d(), new Rotation3d()), - new Transform3d(new Translation3d(), new Rotation3d()), - 0.25, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))); - var p = new Packet(PhotonTrackedTarget.PACK_SIZE_BYTES); - target.populatePacket(p); + @Test + void testSimpleTrackedTarget() { + var target = + new PhotonTrackedTarget( + 3.0, + 4.0, + 9.0, + -5.0, + -1, + new Transform3d(new Translation3d(), new Rotation3d()), + new Transform3d(new Translation3d(), new Rotation3d()), + 0.25, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))); + var p = new Packet(PhotonTrackedTarget.PACK_SIZE_BYTES); + target.populatePacket(p); - var b = new PhotonTrackedTarget(); - b.createFromPacket(p); + var b = new PhotonTrackedTarget(); + b.createFromPacket(p); - Assertions.assertEquals(target, b); - } + Assertions.assertEquals(target, b); + } - @Test - void testSimplePipelineResult() { - var result = new PhotonPipelineResult(1, new ArrayList<>()); - var p = new Packet(result.getPacketSize()); - result.populatePacket(p); + @Test + void testSimplePipelineResult() { + var result = new PhotonPipelineResult(1, new ArrayList<>()); + var p = new Packet(result.getPacketSize()); + result.populatePacket(p); - var b = new PhotonPipelineResult(); - b.createFromPacket(p); + var b = new PhotonPipelineResult(); + b.createFromPacket(p); - Assertions.assertEquals(result, b); + Assertions.assertEquals(result, b); - var result2 = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 2, - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - 0.25, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 3, - new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), - new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), - 0.25, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); - var p2 = new Packet(result2.getPacketSize()); - result2.populatePacket(p2); + var result2 = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 2, + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + 0.25, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 3, + new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), + new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), + 0.25, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); + var p2 = new Packet(result2.getPacketSize()); + result2.populatePacket(p2); - var b2 = new PhotonPipelineResult(); - b2.createFromPacket(p2); + var b2 = new PhotonPipelineResult(); + b2.createFromPacket(p2); - Assertions.assertEquals(result2, b2); - } + Assertions.assertEquals(result2, b2); + } - @Test - public void testMultiTargetSerde() { - var result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 2, - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - 0.25, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 3, - new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), - new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), - 0.25, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)))), - new MultiTargetPNPResults( - new PNPResults( - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), 0.1), - List.of(1, 2, 3))); + @Test + public void testMultiTargetSerde() { + var result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 2, + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + 0.25, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 3, + new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), + new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), + 0.25, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)))), + new MultiTargetPNPResults( + new PNPResults( + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), 0.1), + List.of(1, 2, 3))); - Packet packet = new Packet(result.getPacketSize()); - result.populatePacket(packet); + Packet packet = new Packet(result.getPacketSize()); + result.populatePacket(packet); - var result_deserialized = new PhotonPipelineResult(); - result_deserialized.createFromPacket(packet); + var result_deserialized = new PhotonPipelineResult(); + result_deserialized.createFromPacket(packet); - Assertions.assertEquals(result, result_deserialized); - } + Assertions.assertEquals(result, result_deserialized); + } } diff --git a/photon-lib/src/test/java/org/photonvision/PhotonCameraTest.java b/photon-lib/src/test/java/org/photonvision/PhotonCameraTest.java index 52533b6c74..8a3fb5fad8 100644 --- a/photon-lib/src/test/java/org/photonvision/PhotonCameraTest.java +++ b/photon-lib/src/test/java/org/photonvision/PhotonCameraTest.java @@ -30,17 +30,17 @@ import org.photonvision.targeting.PhotonPipelineResult; class PhotonCameraTest { - @Test - public void testEmpty() { - Assertions.assertDoesNotThrow( - () -> { - var packet = new Packet(1); - var ret = new PhotonPipelineResult(); - packet.setData(new byte[0]); - if (packet.getSize() < 1) { - return; - } - ret.createFromPacket(packet); - }); - } + @Test + public void testEmpty() { + Assertions.assertDoesNotThrow( + () -> { + var packet = new Packet(1); + var ret = new PhotonPipelineResult(); + packet.setData(new byte[0]); + if (packet.getSize() < 1) { + return; + } + ret.createFromPacket(packet); + }); + } } diff --git a/photon-lib/src/test/java/org/photonvision/PhotonPoseEstimatorTest.java b/photon-lib/src/test/java/org/photonvision/PhotonPoseEstimatorTest.java index 0f3ab1fc80..1b386cc747 100644 --- a/photon-lib/src/test/java/org/photonvision/PhotonPoseEstimatorTest.java +++ b/photon-lib/src/test/java/org/photonvision/PhotonPoseEstimatorTest.java @@ -52,583 +52,583 @@ import org.photonvision.targeting.TargetCorner; class PhotonPoseEstimatorTest { - static AprilTagFieldLayout aprilTags; - - @BeforeAll - public static void init() { - JNIWrapper.Helper.setExtractOnStaticLoad(false); - WPIUtilJNI.Helper.setExtractOnStaticLoad(false); - NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); - WPINetJNI.Helper.setExtractOnStaticLoad(false); - - try { - CombinedRuntimeLoader.loadLibraries( - PhotonPoseEstimatorTest.class, "wpiutiljni", "ntcorejni", "wpinetjni", "wpiHaljni"); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + static AprilTagFieldLayout aprilTags; + + @BeforeAll + public static void init() { + JNIWrapper.Helper.setExtractOnStaticLoad(false); + WPIUtilJNI.Helper.setExtractOnStaticLoad(false); + NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); + WPINetJNI.Helper.setExtractOnStaticLoad(false); + + try { + CombinedRuntimeLoader.loadLibraries( + PhotonPoseEstimatorTest.class, "wpiutiljni", "ntcorejni", "wpinetjni", "wpiHaljni"); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + List tagList = new ArrayList(2); + tagList.add(new AprilTag(0, new Pose3d(3, 3, 3, new Rotation3d()))); + tagList.add(new AprilTag(1, new Pose3d(5, 5, 5, new Rotation3d()))); + double fieldLength = Units.feetToMeters(54.0); + double fieldWidth = Units.feetToMeters(27.0); + aprilTags = new AprilTagFieldLayout(tagList, fieldLength, fieldWidth); } - List tagList = new ArrayList(2); - tagList.add(new AprilTag(0, new Pose3d(3, 3, 3, new Rotation3d()))); - tagList.add(new AprilTag(1, new Pose3d(5, 5, 5, new Rotation3d()))); - double fieldLength = Units.feetToMeters(54.0); - double fieldWidth = Units.feetToMeters(27.0); - aprilTags = new AprilTagFieldLayout(tagList, fieldLength, fieldWidth); - } - - @Test - void testLowestAmbiguityStrategy() { - PhotonCameraInjector cameraOne = new PhotonCameraInjector(); - cameraOne.result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 0, - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - 0.7, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 1, - new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(0, 0, 0)), - new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), - 0.3, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 9.0, - -2.0, - 19.0, - 3.0, - 0, - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), - 0.4, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); - cameraOne.result.setTimestampSeconds(11); - - PhotonPoseEstimator estimator = - new PhotonPoseEstimator( - aprilTags, PoseStrategy.LOWEST_AMBIGUITY, cameraOne, new Transform3d()); - - Optional estimatedPose = estimator.update(); - Pose3d pose = estimatedPose.get().estimatedPose; - - assertEquals(11, estimatedPose.get().timestampSeconds); - assertEquals(1, pose.getX(), .01); - assertEquals(3, pose.getY(), .01); - assertEquals(2, pose.getZ(), .01); - } - - @Test - void testClosestToCameraHeightStrategy() { - PhotonCameraInjector cameraOne = new PhotonCameraInjector(); - cameraOne.result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 1, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), - new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), - 0.7, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 1, - new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), - new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), - 0.3, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 9.0, - -2.0, - 19.0, - 3.0, - 0, - new Transform3d(new Translation3d(4, 4, 4), new Rotation3d()), - new Transform3d(new Translation3d(5, 5, 5), new Rotation3d()), - 0.4, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); - - cameraOne.result.setTimestampSeconds(4); - - PhotonPoseEstimator estimator = - new PhotonPoseEstimator( - aprilTags, - PoseStrategy.CLOSEST_TO_CAMERA_HEIGHT, - cameraOne, - new Transform3d(new Translation3d(0, 0, 4), new Rotation3d())); - - Optional estimatedPose = estimator.update(); - Pose3d pose = estimatedPose.get().estimatedPose; - - assertEquals(4, estimatedPose.get().timestampSeconds); - assertEquals(4, pose.getX(), .01); - assertEquals(4, pose.getY(), .01); - assertEquals(0, pose.getZ(), .01); - } - - @Test - void closestToReferencePoseStrategy() { - PhotonCameraInjector cameraOne = new PhotonCameraInjector(); - cameraOne.result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 1, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), - new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), - 0.7, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 1, - new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), - new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), - 0.3, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 9.0, - -2.0, - 19.0, - 3.0, - 0, - new Transform3d(new Translation3d(2.2, 2.2, 2.2), new Rotation3d()), - new Transform3d(new Translation3d(2, 1.9, 2.1), new Rotation3d()), - 0.4, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); - cameraOne.result.setTimestampSeconds(17); - - PhotonPoseEstimator estimator = - new PhotonPoseEstimator( - aprilTags, - PoseStrategy.CLOSEST_TO_REFERENCE_POSE, - cameraOne, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); - estimator.setReferencePose(new Pose3d(1, 1, 1, new Rotation3d())); - - Optional estimatedPose = estimator.update(); - Pose3d pose = estimatedPose.get().estimatedPose; - - assertEquals(17, estimatedPose.get().timestampSeconds); - assertEquals(1, pose.getX(), .01); - assertEquals(1.1, pose.getY(), .01); - assertEquals(.9, pose.getZ(), .01); - } - - @Test - void closestToLastPose() { - PhotonCameraInjector cameraOne = new PhotonCameraInjector(); - cameraOne.result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 1, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), - new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), - 0.7, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 1, - new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), - new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), - 0.3, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 9.0, - -2.0, - 19.0, - 3.0, - 0, - new Transform3d(new Translation3d(2.2, 2.2, 2.2), new Rotation3d()), - new Transform3d(new Translation3d(2, 1.9, 2.1), new Rotation3d()), - 0.4, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); - cameraOne.result.setTimestampSeconds(1); - - PhotonPoseEstimator estimator = - new PhotonPoseEstimator( - aprilTags, - PoseStrategy.CLOSEST_TO_LAST_POSE, - cameraOne, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); - - estimator.setLastPose(new Pose3d(1, 1, 1, new Rotation3d())); - - Optional estimatedPose = estimator.update(); - Pose3d pose = estimatedPose.get().estimatedPose; - - cameraOne.result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 1, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), - new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), - 0.7, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 0, - new Transform3d(new Translation3d(2.1, 1.9, 2), new Rotation3d()), - new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), - 0.3, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), - new PhotonTrackedTarget( - 9.0, - -2.0, - 19.0, - 3.0, - 0, - new Transform3d(new Translation3d(2.4, 2.4, 2.2), new Rotation3d()), - new Transform3d(new Translation3d(2, 1, 2), new Rotation3d()), - 0.4, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); - cameraOne.result.setTimestampSeconds(7); - - estimatedPose = estimator.update(); - pose = estimatedPose.get().estimatedPose; - - assertEquals(7, estimatedPose.get().timestampSeconds); - assertEquals(.9, pose.getX(), .01); - assertEquals(1.1, pose.getY(), .01); - assertEquals(1, pose.getZ(), .01); - } - - @Test - void cacheIsInvalidated() { - PhotonCameraInjector cameraOne = new PhotonCameraInjector(); - var result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 0, - new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), - new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), - 0.7, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); - result.setTimestampSeconds(20); - - PhotonPoseEstimator estimator = - new PhotonPoseEstimator( - aprilTags, - PoseStrategy.AVERAGE_BEST_TARGETS, - cameraOne, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); - - // Empty result, expect empty result - cameraOne.result = new PhotonPipelineResult(); - cameraOne.result.setTimestampSeconds(1); - Optional estimatedPose = estimator.update(); - assertFalse(estimatedPose.isPresent()); - - // Set actual result - cameraOne.result = result; - estimatedPose = estimator.update(); - assertTrue(estimatedPose.isPresent()); - assertEquals(20, estimatedPose.get().timestampSeconds, .01); - assertEquals(20, estimator.poseCacheTimestampSeconds); - - // And again -- pose cache should mean this is empty - cameraOne.result = result; - estimatedPose = estimator.update(); - assertFalse(estimatedPose.isPresent()); - // Expect the old timestamp to still be here - assertEquals(20, estimator.poseCacheTimestampSeconds); - - // Set new field layout -- right after, the pose cache timestamp should be -1 - estimator.setFieldTags(new AprilTagFieldLayout(List.of(new AprilTag(0, new Pose3d())), 0, 0)); - assertEquals(-1, estimator.poseCacheTimestampSeconds); - // Update should cache the current timestamp (20) again - cameraOne.result = result; - estimatedPose = estimator.update(); - assertEquals(20, estimatedPose.get().timestampSeconds, .01); - assertEquals(20, estimator.poseCacheTimestampSeconds); - } - - @Test - void averageBestPoses() { - PhotonCameraInjector cameraOne = new PhotonCameraInjector(); - cameraOne.result = - new PhotonPipelineResult( - 2, - List.of( - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.0, - 4.0, - 0, - new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), - new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), - 0.7, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), // 1 1 1 ambig: .7 - new PhotonTrackedTarget( - 3.0, - -4.0, - 9.1, - 6.7, - 1, - new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), - new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), - 0.3, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))), // 2 2 2 ambig .3 - new PhotonTrackedTarget( - 9.0, - -2.0, - 19.0, - 3.0, - 0, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), - new Transform3d(new Translation3d(2, 1.9, 2.1), new Rotation3d()), - 0.4, - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8)), - List.of( - new TargetCorner(1, 2), - new TargetCorner(3, 4), - new TargetCorner(5, 6), - new TargetCorner(7, 8))))); // 3 3 3 ambig .4 - cameraOne.result.setTimestampSeconds(20); - - PhotonPoseEstimator estimator = - new PhotonPoseEstimator( - aprilTags, - PoseStrategy.AVERAGE_BEST_TARGETS, - cameraOne, - new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); - - Optional estimatedPose = estimator.update(); - Pose3d pose = estimatedPose.get().estimatedPose; - - assertEquals(20, estimatedPose.get().timestampSeconds, .01); - assertEquals(2.15, pose.getX(), .01); - assertEquals(2.15, pose.getY(), .01); - assertEquals(2.15, pose.getZ(), .01); - } - - private class PhotonCameraInjector extends PhotonCamera { - public PhotonCameraInjector() { - super("Test"); + @Test + void testLowestAmbiguityStrategy() { + PhotonCameraInjector cameraOne = new PhotonCameraInjector(); + cameraOne.result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 0, + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + 0.7, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 1, + new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(0, 0, 0)), + new Transform3d(new Translation3d(4, 2, 3), new Rotation3d(1, 5, 3)), + 0.3, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 9.0, + -2.0, + 19.0, + 3.0, + 0, + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + new Transform3d(new Translation3d(1, 2, 3), new Rotation3d(1, 2, 3)), + 0.4, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); + cameraOne.result.setTimestampSeconds(11); + + PhotonPoseEstimator estimator = + new PhotonPoseEstimator( + aprilTags, PoseStrategy.LOWEST_AMBIGUITY, cameraOne, new Transform3d()); + + Optional estimatedPose = estimator.update(); + Pose3d pose = estimatedPose.get().estimatedPose; + + assertEquals(11, estimatedPose.get().timestampSeconds); + assertEquals(1, pose.getX(), .01); + assertEquals(3, pose.getY(), .01); + assertEquals(2, pose.getZ(), .01); } - PhotonPipelineResult result; + @Test + void testClosestToCameraHeightStrategy() { + PhotonCameraInjector cameraOne = new PhotonCameraInjector(); + cameraOne.result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 1, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), + new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), + 0.7, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 1, + new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), + new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), + 0.3, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 9.0, + -2.0, + 19.0, + 3.0, + 0, + new Transform3d(new Translation3d(4, 4, 4), new Rotation3d()), + new Transform3d(new Translation3d(5, 5, 5), new Rotation3d()), + 0.4, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); + + cameraOne.result.setTimestampSeconds(4); + + PhotonPoseEstimator estimator = + new PhotonPoseEstimator( + aprilTags, + PoseStrategy.CLOSEST_TO_CAMERA_HEIGHT, + cameraOne, + new Transform3d(new Translation3d(0, 0, 4), new Rotation3d())); + + Optional estimatedPose = estimator.update(); + Pose3d pose = estimatedPose.get().estimatedPose; + + assertEquals(4, estimatedPose.get().timestampSeconds); + assertEquals(4, pose.getX(), .01); + assertEquals(4, pose.getY(), .01); + assertEquals(0, pose.getZ(), .01); + } + + @Test + void closestToReferencePoseStrategy() { + PhotonCameraInjector cameraOne = new PhotonCameraInjector(); + cameraOne.result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 1, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), + new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), + 0.7, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 1, + new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), + new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), + 0.3, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 9.0, + -2.0, + 19.0, + 3.0, + 0, + new Transform3d(new Translation3d(2.2, 2.2, 2.2), new Rotation3d()), + new Transform3d(new Translation3d(2, 1.9, 2.1), new Rotation3d()), + 0.4, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); + cameraOne.result.setTimestampSeconds(17); + + PhotonPoseEstimator estimator = + new PhotonPoseEstimator( + aprilTags, + PoseStrategy.CLOSEST_TO_REFERENCE_POSE, + cameraOne, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); + estimator.setReferencePose(new Pose3d(1, 1, 1, new Rotation3d())); + + Optional estimatedPose = estimator.update(); + Pose3d pose = estimatedPose.get().estimatedPose; + + assertEquals(17, estimatedPose.get().timestampSeconds); + assertEquals(1, pose.getX(), .01); + assertEquals(1.1, pose.getY(), .01); + assertEquals(.9, pose.getZ(), .01); + } + + @Test + void closestToLastPose() { + PhotonCameraInjector cameraOne = new PhotonCameraInjector(); + cameraOne.result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 1, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), + new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), + 0.7, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 1, + new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), + new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), + 0.3, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 9.0, + -2.0, + 19.0, + 3.0, + 0, + new Transform3d(new Translation3d(2.2, 2.2, 2.2), new Rotation3d()), + new Transform3d(new Translation3d(2, 1.9, 2.1), new Rotation3d()), + 0.4, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); + cameraOne.result.setTimestampSeconds(1); + + PhotonPoseEstimator estimator = + new PhotonPoseEstimator( + aprilTags, + PoseStrategy.CLOSEST_TO_LAST_POSE, + cameraOne, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); + + estimator.setLastPose(new Pose3d(1, 1, 1, new Rotation3d())); + + Optional estimatedPose = estimator.update(); + Pose3d pose = estimatedPose.get().estimatedPose; + + cameraOne.result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 1, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), + new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), + 0.7, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 0, + new Transform3d(new Translation3d(2.1, 1.9, 2), new Rotation3d()), + new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), + 0.3, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), + new PhotonTrackedTarget( + 9.0, + -2.0, + 19.0, + 3.0, + 0, + new Transform3d(new Translation3d(2.4, 2.4, 2.2), new Rotation3d()), + new Transform3d(new Translation3d(2, 1, 2), new Rotation3d()), + 0.4, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); + cameraOne.result.setTimestampSeconds(7); + + estimatedPose = estimator.update(); + pose = estimatedPose.get().estimatedPose; + + assertEquals(7, estimatedPose.get().timestampSeconds); + assertEquals(.9, pose.getX(), .01); + assertEquals(1.1, pose.getY(), .01); + assertEquals(1, pose.getZ(), .01); + } + + @Test + void cacheIsInvalidated() { + PhotonCameraInjector cameraOne = new PhotonCameraInjector(); + var result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 0, + new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), + new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), + 0.7, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); + result.setTimestampSeconds(20); + + PhotonPoseEstimator estimator = + new PhotonPoseEstimator( + aprilTags, + PoseStrategy.AVERAGE_BEST_TARGETS, + cameraOne, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); + + // Empty result, expect empty result + cameraOne.result = new PhotonPipelineResult(); + cameraOne.result.setTimestampSeconds(1); + Optional estimatedPose = estimator.update(); + assertFalse(estimatedPose.isPresent()); + + // Set actual result + cameraOne.result = result; + estimatedPose = estimator.update(); + assertTrue(estimatedPose.isPresent()); + assertEquals(20, estimatedPose.get().timestampSeconds, .01); + assertEquals(20, estimator.poseCacheTimestampSeconds); + + // And again -- pose cache should mean this is empty + cameraOne.result = result; + estimatedPose = estimator.update(); + assertFalse(estimatedPose.isPresent()); + // Expect the old timestamp to still be here + assertEquals(20, estimator.poseCacheTimestampSeconds); + + // Set new field layout -- right after, the pose cache timestamp should be -1 + estimator.setFieldTags(new AprilTagFieldLayout(List.of(new AprilTag(0, new Pose3d())), 0, 0)); + assertEquals(-1, estimator.poseCacheTimestampSeconds); + // Update should cache the current timestamp (20) again + cameraOne.result = result; + estimatedPose = estimator.update(); + assertEquals(20, estimatedPose.get().timestampSeconds, .01); + assertEquals(20, estimator.poseCacheTimestampSeconds); + } + + @Test + void averageBestPoses() { + PhotonCameraInjector cameraOne = new PhotonCameraInjector(); + cameraOne.result = + new PhotonPipelineResult( + 2, + List.of( + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.0, + 4.0, + 0, + new Transform3d(new Translation3d(2, 2, 2), new Rotation3d()), + new Transform3d(new Translation3d(1, 1, 1), new Rotation3d()), + 0.7, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), // 1 1 1 ambig: .7 + new PhotonTrackedTarget( + 3.0, + -4.0, + 9.1, + 6.7, + 1, + new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), + new Transform3d(new Translation3d(3, 3, 3), new Rotation3d()), + 0.3, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))), // 2 2 2 ambig .3 + new PhotonTrackedTarget( + 9.0, + -2.0, + 19.0, + 3.0, + 0, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d()), + new Transform3d(new Translation3d(2, 1.9, 2.1), new Rotation3d()), + 0.4, + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8)), + List.of( + new TargetCorner(1, 2), + new TargetCorner(3, 4), + new TargetCorner(5, 6), + new TargetCorner(7, 8))))); // 3 3 3 ambig .4 + cameraOne.result.setTimestampSeconds(20); + + PhotonPoseEstimator estimator = + new PhotonPoseEstimator( + aprilTags, + PoseStrategy.AVERAGE_BEST_TARGETS, + cameraOne, + new Transform3d(new Translation3d(0, 0, 0), new Rotation3d())); + + Optional estimatedPose = estimator.update(); + Pose3d pose = estimatedPose.get().estimatedPose; + + assertEquals(20, estimatedPose.get().timestampSeconds, .01); + assertEquals(2.15, pose.getX(), .01); + assertEquals(2.15, pose.getY(), .01); + assertEquals(2.15, pose.getZ(), .01); + } + + private class PhotonCameraInjector extends PhotonCamera { + public PhotonCameraInjector() { + super("Test"); + } + + PhotonPipelineResult result; - @Override - public PhotonPipelineResult getLatestResult() { - return result; + @Override + public PhotonPipelineResult getLatestResult() { + return result; + } } - } } diff --git a/photon-lib/src/test/java/org/photonvision/PhotonUtilTest.java b/photon-lib/src/test/java/org/photonvision/PhotonUtilTest.java index fa3ea2fa31..bbc70cf856 100644 --- a/photon-lib/src/test/java/org/photonvision/PhotonUtilTest.java +++ b/photon-lib/src/test/java/org/photonvision/PhotonUtilTest.java @@ -30,73 +30,73 @@ import org.junit.jupiter.api.Test; class PhotonUtilTest { - @Test - public void testDistance() { - var camHeight = 1; - var targetHeight = 3; - var camPitch = Units.degreesToRadians(0); - var targetPitch = Units.degreesToRadians(30); + @Test + public void testDistance() { + var camHeight = 1; + var targetHeight = 3; + var camPitch = Units.degreesToRadians(0); + var targetPitch = Units.degreesToRadians(30); - var dist = - PhotonUtils.calculateDistanceToTargetMeters(camHeight, targetHeight, camPitch, targetPitch); + var dist = + PhotonUtils.calculateDistanceToTargetMeters(camHeight, targetHeight, camPitch, targetPitch); - Assertions.assertEquals(3.464, dist, 0.01); + Assertions.assertEquals(3.464, dist, 0.01); - camHeight = 1; - targetHeight = 2; - camPitch = Units.degreesToRadians(20); - targetPitch = Units.degreesToRadians(-10); + camHeight = 1; + targetHeight = 2; + camPitch = Units.degreesToRadians(20); + targetPitch = Units.degreesToRadians(-10); - dist = - PhotonUtils.calculateDistanceToTargetMeters(camHeight, targetHeight, camPitch, targetPitch); - Assertions.assertEquals(5.671, dist, 0.01); - } + dist = + PhotonUtils.calculateDistanceToTargetMeters(camHeight, targetHeight, camPitch, targetPitch); + Assertions.assertEquals(5.671, dist, 0.01); + } - @Test - public void testTransform() { - var camHeight = 1; - var tgtHeight = 3; - var camPitch = 0; - var tgtPitch = Units.degreesToRadians(30); - var tgtYaw = new Rotation2d(); - var gyroAngle = new Rotation2d(); - var fieldToTarget = new Pose2d(); - var cameraToRobot = new Transform2d(); + @Test + public void testTransform() { + var camHeight = 1; + var tgtHeight = 3; + var camPitch = 0; + var tgtPitch = Units.degreesToRadians(30); + var tgtYaw = new Rotation2d(); + var gyroAngle = new Rotation2d(); + var fieldToTarget = new Pose2d(); + var cameraToRobot = new Transform2d(); - var fieldToRobot = - PhotonUtils.estimateFieldToRobot( - PhotonUtils.estimateCameraToTarget( - PhotonUtils.estimateCameraToTargetTranslation( - PhotonUtils.calculateDistanceToTargetMeters( - camHeight, tgtHeight, camPitch, tgtPitch), - tgtYaw), - fieldToTarget, - gyroAngle), - fieldToTarget, - cameraToRobot); + var fieldToRobot = + PhotonUtils.estimateFieldToRobot( + PhotonUtils.estimateCameraToTarget( + PhotonUtils.estimateCameraToTargetTranslation( + PhotonUtils.calculateDistanceToTargetMeters( + camHeight, tgtHeight, camPitch, tgtPitch), + tgtYaw), + fieldToTarget, + gyroAngle), + fieldToTarget, + cameraToRobot); - Assertions.assertEquals(-3.464, fieldToRobot.getX(), 0.1); - Assertions.assertEquals(0, fieldToRobot.getY(), 0.1); - Assertions.assertEquals(0, fieldToRobot.getRotation().getDegrees(), 0.1); - } + Assertions.assertEquals(-3.464, fieldToRobot.getX(), 0.1); + Assertions.assertEquals(0, fieldToRobot.getY(), 0.1); + Assertions.assertEquals(0, fieldToRobot.getRotation().getDegrees(), 0.1); + } - @Test - public void testAprilTagUtils() { - var cameraToTarget = new Transform3d(new Translation3d(1, 0, 0), new Rotation3d()); - var tagPose = new Pose3d(5, 0, 0, new Rotation3d()); - var cameraToRobot = new Transform3d(); + @Test + public void testAprilTagUtils() { + var cameraToTarget = new Transform3d(new Translation3d(1, 0, 0), new Rotation3d()); + var tagPose = new Pose3d(5, 0, 0, new Rotation3d()); + var cameraToRobot = new Transform3d(); - var fieldToRobot = - PhotonUtils.estimateFieldToRobotAprilTag(cameraToTarget, tagPose, cameraToRobot); + var fieldToRobot = + PhotonUtils.estimateFieldToRobotAprilTag(cameraToTarget, tagPose, cameraToRobot); - var targetPose = - new Pose2d( - new Translation2d(Units.inchesToMeters(324), Units.inchesToMeters(162)), - new Rotation2d()); - var currentPose = new Pose2d(0, 0, Rotation2d.fromDegrees(0)); - Assertions.assertEquals(4.0, fieldToRobot.getX()); - Assertions.assertEquals( - Math.toDegrees(Math.atan2((Units.inchesToMeters(162)), (Units.inchesToMeters(324)))), - PhotonUtils.getYawToPose(currentPose, targetPose).getDegrees()); - } + var targetPose = + new Pose2d( + new Translation2d(Units.inchesToMeters(324), Units.inchesToMeters(162)), + new Rotation2d()); + var currentPose = new Pose2d(0, 0, Rotation2d.fromDegrees(0)); + Assertions.assertEquals(4.0, fieldToRobot.getX()); + Assertions.assertEquals( + Math.toDegrees(Math.atan2((Units.inchesToMeters(162)), (Units.inchesToMeters(324)))), + PhotonUtils.getYawToPose(currentPose, targetPose).getDegrees()); + } } diff --git a/photon-lib/src/test/java/org/photonvision/PhotonVersionTest.java b/photon-lib/src/test/java/org/photonvision/PhotonVersionTest.java index d62f528781..6dc5a5854b 100644 --- a/photon-lib/src/test/java/org/photonvision/PhotonVersionTest.java +++ b/photon-lib/src/test/java/org/photonvision/PhotonVersionTest.java @@ -30,30 +30,30 @@ import org.junit.jupiter.api.Test; public class PhotonVersionTest { - public static final boolean versionMatches(String versionString, String other) { - String c = versionString; - Pattern p = Pattern.compile("v[0-9]+.[0-9]+.[0-9]+"); - Matcher m = p.matcher(c); - if (m.find()) { - c = m.group(0); - } else { - return false; + public static final boolean versionMatches(String versionString, String other) { + String c = versionString; + Pattern p = Pattern.compile("v[0-9]+.[0-9]+.[0-9]+"); + Matcher m = p.matcher(c); + if (m.find()) { + c = m.group(0); + } else { + return false; + } + m = p.matcher(other); + if (m.find()) { + other = m.group(0); + } else { + return false; + } + return c.equals(other); } - m = p.matcher(other); - if (m.find()) { - other = m.group(0); - } else { - return false; - } - return c.equals(other); - } - @Test - public void testVersion() { - Assertions.assertTrue(versionMatches("v2021.1.6", "v2021.1.6")); - Assertions.assertTrue(versionMatches("dev-v2021.1.6", "v2021.1.6")); - Assertions.assertTrue(versionMatches("dev-v2021.1.6-5-gca49ea50", "v2021.1.6")); - Assertions.assertFalse(versionMatches("", "v2021.1.6")); - Assertions.assertFalse(versionMatches("v2021.1.6", "")); - } + @Test + public void testVersion() { + Assertions.assertTrue(versionMatches("v2021.1.6", "v2021.1.6")); + Assertions.assertTrue(versionMatches("dev-v2021.1.6", "v2021.1.6")); + Assertions.assertTrue(versionMatches("dev-v2021.1.6-5-gca49ea50", "v2021.1.6")); + Assertions.assertFalse(versionMatches("", "v2021.1.6")); + Assertions.assertFalse(versionMatches("v2021.1.6", "")); + } } diff --git a/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java index 71ed7f7670..c53e006717 100644 --- a/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java +++ b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java @@ -68,480 +68,480 @@ import org.photonvision.targeting.PhotonTrackedTarget; class VisionSystemSimTest { - private static final double kTrlDelta = 0.005; - private static final double kRotDeltaDeg = 0.25; - - @Test - public void testEmpty() { - Assertions.assertDoesNotThrow( - () -> { - var sysUnderTest = new VisionSystemSim("Test"); - sysUnderTest.addVisionTargets( - new VisionTargetSim(new Pose3d(), new TargetModel(1.0, 1.0))); - for (int loopIdx = 0; loopIdx < 100; loopIdx++) { - sysUnderTest.update(new Pose2d()); - } - }); - } - - @BeforeAll - public static void setUp() { - JNIWrapper.Helper.setExtractOnStaticLoad(false); - WPIUtilJNI.Helper.setExtractOnStaticLoad(false); - NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); - WPINetJNI.Helper.setExtractOnStaticLoad(false); - CameraServerJNI.Helper.setExtractOnStaticLoad(false); - CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); - AprilTagJNI.Helper.setExtractOnStaticLoad(false); - - try { - CombinedRuntimeLoader.loadLibraries( - VisionSystemSim.class, - "wpiutiljni", - "ntcorejni", - "wpinetjni", - "wpiHaljni", - "cscorejni", - "cscorejnicvstatic"); - - var loader = - new RuntimeLoader<>( - Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); - loader.loadLibrary(); - } catch (Exception e) { - e.printStackTrace(); + private static final double kTrlDelta = 0.005; + private static final double kRotDeltaDeg = 0.25; + + @Test + public void testEmpty() { + Assertions.assertDoesNotThrow( + () -> { + var sysUnderTest = new VisionSystemSim("Test"); + sysUnderTest.addVisionTargets( + new VisionTargetSim(new Pose3d(), new TargetModel(1.0, 1.0))); + for (int loopIdx = 0; loopIdx < 100; loopIdx++) { + sysUnderTest.update(new Pose2d()); + } + }); } - // NT live for debug purposes - NetworkTableInstance.getDefault().startServer(); - - // No version check for testing - PhotonCamera.setVersionCheckEnabled(false); - } - - @AfterAll - public static void shutDown() {} - - // @ParameterizedTest - // @ValueSource(doubles = {5, 10, 15, 20, 25, 30}) - // public void testDistanceAligned(double dist) { - // final var targetPose = new Pose2d(new Translation2d(15.98, 0), new Rotation2d()); - // var sysUnderTest = - // new SimVisionSystem("Test", 80.0, 0.0, new Transform2d(), 1, 99999, 320, 240, 0); - // sysUnderTest.addSimVisionTarget(new SimVisionTarget(targetPose, 0.0, 1.0, 1.0)); - - // final var robotPose = new Pose2d(new Translation2d(35 - dist, 0), new Rotation2d()); - // sysUnderTest.processFrame(robotPose); - - // var result = sysUnderTest.cam.getLatestResult(); - - // assertTrue(result.hasTargets()); - // assertEquals(result.getBestTarget().getCameraToTarget().getTranslation().getNorm(), dist); - // } - - @Test - public void testVisibilityCupidShuffle() { - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 2), new Rotation3d(0, 0, Math.PI)); - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 1.0), 3)); - - // To the right, to the right - var robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(-70)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - - // To the right, to the right - robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(-95)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - - // To the left, to the left - robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(90)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - - // To the left, to the left - robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(65)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - - // now kick, now kick - robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertTrue(camera.getLatestResult().hasTargets()); - - // now kick, now kick - robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(-5)); - visionSysSim.update(robotPose); - assertTrue(camera.getLatestResult().hasTargets()); - - // now walk it by yourself - robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(-179)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - - // now walk it by yourself - visionSysSim.adjustCamera( - cameraSim, new Transform3d(new Translation3d(), new Rotation3d(0, 0, Math.PI))); - visionSysSim.update(robotPose); - assertTrue(camera.getLatestResult().hasTargets()); - } - - @Test - public void testNotVisibleVert1() { - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 3.0), 3)); - - var robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertTrue(camera.getLatestResult().hasTargets()); - - visionSysSim.adjustCamera( // vooop selfie stick - cameraSim, new Transform3d(new Translation3d(0, 0, 5000), new Rotation3d(0, 0, Math.PI))); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - } - - @Test - public void testNotVisibleVert2() { - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 2), new Rotation3d(0, 0, Math.PI)); - var robotToCamera = - new Transform3d(new Translation3d(0, 0, 1), new Rotation3d(0, -Math.PI / 4, 0)); - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, robotToCamera); - cameraSim.prop.setCalibration(1234, 1234, Rotation2d.fromDegrees(80)); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 0.5), 1736)); - - var robotPose = new Pose2d(new Translation2d(13.98, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertTrue(camera.getLatestResult().hasTargets()); - - // Pitched back camera should mean target goes out of view below the robot as distance increases - robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - } - - @Test - public void testNotVisibleTgtSize() { - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); - cameraSim.setMinTargetAreaPixels(20.0); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.1, 0.025), 24)); - - var robotPose = new Pose2d(new Translation2d(12, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertTrue(camera.getLatestResult().hasTargets()); - - robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - } - - @Test - public void testNotVisibleTooFarForLEDs() { - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); - cameraSim.setMaxSightRange(10); - cameraSim.setMinTargetAreaPixels(1.0); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 0.25), 78)); - - var robotPose = new Pose2d(new Translation2d(10, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertTrue(camera.getLatestResult().hasTargets()); - - robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); - visionSysSim.update(robotPose); - assertFalse(camera.getLatestResult().hasTargets()); - } - - @ParameterizedTest - @ValueSource(doubles = {-10, -5, -0, -1, -2, 5, 7, 10.23}) - public void testYawAngles(double testYaw) { - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, 3 * Math.PI / 4)); - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); - cameraSim.setMinTargetAreaPixels(0.0); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 3)); - - var robotPose = new Pose2d(new Translation2d(10, 0), Rotation2d.fromDegrees(-1.0 * testYaw)); - visionSysSim.update(robotPose); - var res = camera.getLatestResult(); - assertTrue(res.hasTargets()); - var tgt = res.getBestTarget(); - assertEquals(testYaw, tgt.getYaw(), kRotDeltaDeg); - } - - @ParameterizedTest - @ValueSource(doubles = {-10, -5, -0, -1, -2, 5, 7, 10.23, 20.21, -19.999}) - public void testPitchAngles(double testPitch) { - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 0), new Rotation3d(0, 0, 3 * Math.PI / 4)); - final var robotPose = new Pose2d(new Translation2d(10, 0), new Rotation2d(0)); - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(120)); - cameraSim.setMinTargetAreaPixels(0.0); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 23)); - - // Transform is now robot -> camera - visionSysSim.adjustCamera( - cameraSim, - new Transform3d( - new Translation3d(), new Rotation3d(0, Units.degreesToRadians(testPitch), 0))); - visionSysSim.update(robotPose); - var res = camera.getLatestResult(); - assertTrue(res.hasTargets()); - var tgt = res.getBestTarget(); - - // Since the camera is level with the target, a positive-upward point will mean the target is in - // the - // lower half of the image - // which should produce negative pitch. - assertEquals(testPitch, tgt.getPitch(), kRotDeltaDeg); - } - - private static Stream testDistanceCalcArgs() { - // Arbitrary and fairly random assortment of distances, camera pitches, and heights - return Stream.of( - Arguments.of(5, -15.98, 0), - Arguments.of(6, -15.98, 1), - Arguments.of(10, -15.98, 0), - Arguments.of(15, -15.98, 2), - Arguments.of(19.95, -15.98, 0), - Arguments.of(20, -15.98, 0), - Arguments.of(5, -42, 1), - Arguments.of(6, -42, 0), - Arguments.of(10, -42, 2), - Arguments.of(15, -42, 0.5), - Arguments.of(19.42, -15.98, 0), - Arguments.of(20, -42, 0), - Arguments.of(5, -35, 2), - Arguments.of(6, -35, 0), - Arguments.of(10, -34, 2.4), - Arguments.of(15, -33, 0), - Arguments.of(19.52, -15.98, 1.1)); - } - - @ParameterizedTest - @MethodSource("testDistanceCalcArgs") - public void testDistanceCalc(double testDist, double testPitch, double testHeight) { - // Assume dist along ground and tgt height the same. Iterate over other parameters. - - final var targetPose = - new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI * 0.98)); - final var robotPose = - new Pose3d(new Translation3d(15.98 - Units.feetToMeters(testDist), 0, 0), new Rotation3d()); - final var robotToCamera = - new Transform3d( - new Translation3d(0, 0, Units.feetToMeters(testHeight)), - new Rotation3d(0, Units.degreesToRadians(testPitch), 0)); - - var visionSysSim = - new VisionSystemSim( - "absurdlylongnamewhichshouldneveractuallyhappenbuteehwelltestitanywaysohowsyourdaygoingihopegoodhaveagreatrestofyourlife!"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(160)); - cameraSim.setMinTargetAreaPixels(0.0); - visionSysSim.adjustCamera(cameraSim, robotToCamera); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 0)); - - visionSysSim.update(robotPose); - - // Note that target 2d yaw/pitch accuracy is hindered by two factors in photonvision: - // 1. These are calculated with the average of the minimum area rectangle, which does not - // actually find the target center because of perspective distortion. - // 2. Yaw and pitch are calculated separately which gives incorrect pitch values. - var res = camera.getLatestResult(); - assertTrue(res.hasTargets()); - var tgt = res.getBestTarget(); - assertEquals(0.0, tgt.getYaw(), 0.5); - - // Distance calculation using this trigonometry may be wildly incorrect when - // there is not much height difference between the target and the camera. - double distMeas = - PhotonUtils.calculateDistanceToTargetMeters( - robotToCamera.getZ(), - targetPose.getZ(), - Units.degreesToRadians(-testPitch), - Units.degreesToRadians(tgt.getPitch())); - assertEquals(Units.feetToMeters(testDist), distMeas, 0.15); - } - - @Test - public void testMultipleTargets() { - final var targetPoseL = - new Pose3d(new Translation3d(15.98, 2, 0), new Rotation3d(0, 0, Math.PI)); - final var targetPoseC = - new Pose3d(new Translation3d(15.98, 0, 0), new Rotation3d(0, 0, Math.PI)); - final var targetPoseR = - new Pose3d(new Translation3d(15.98, -2, 0), new Rotation3d(0, 0, Math.PI)); - - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); - cameraSim.setMinTargetAreaPixels(20.0); - - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseL.transformBy( - new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), - TargetModel.kTag16h5, - 1)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseC.transformBy( - new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), - TargetModel.kTag16h5, - 2)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseR.transformBy( - new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), - TargetModel.kTag16h5, - 3)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseL.transformBy( - new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), - TargetModel.kTag16h5, - 4)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseC.transformBy( - new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), - TargetModel.kTag16h5, - 5)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseR.transformBy( - new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), - TargetModel.kTag16h5, - 6)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseL.transformBy( - new Transform3d(new Translation3d(0, 0, 0.50), new Rotation3d())), - TargetModel.kTag16h5, - 7)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseC.transformBy( - new Transform3d(new Translation3d(0, 0, 0.50), new Rotation3d())), - TargetModel.kTag16h5, - 8)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseL.transformBy( - new Transform3d(new Translation3d(0, 0, 0.75), new Rotation3d())), - TargetModel.kTag16h5, - 9)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseR.transformBy( - new Transform3d(new Translation3d(0, 0, 0.75), new Rotation3d())), - TargetModel.kTag16h5, - 10)); - visionSysSim.addVisionTargets( - new VisionTargetSim( - targetPoseL.transformBy( - new Transform3d(new Translation3d(0, 0, 0.25), new Rotation3d())), - TargetModel.kTag16h5, - 11)); - - var robotPose = new Pose2d(new Translation2d(6.0, 0), Rotation2d.fromDegrees(0.25)); - visionSysSim.update(robotPose); - var res = camera.getLatestResult(); - assertTrue(res.hasTargets()); - List tgtList; - tgtList = res.getTargets(); - assertEquals(11, tgtList.size()); - } - - @Test - public void testPoseEstimation() { - var visionSysSim = new VisionSystemSim("Test"); - var camera = new PhotonCamera("camera"); - var cameraSim = new PhotonCameraSim(camera); - visionSysSim.addCamera(cameraSim, new Transform3d()); - cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(90)); - cameraSim.setMinTargetAreaPixels(20.0); - - List tagList = new ArrayList<>(); - tagList.add(new AprilTag(0, new Pose3d(12, 3, 1, new Rotation3d(0, 0, Math.PI)))); - tagList.add(new AprilTag(1, new Pose3d(12, 1, -1, new Rotation3d(0, 0, Math.PI)))); - tagList.add(new AprilTag(2, new Pose3d(11, 0, 2, new Rotation3d(0, 0, Math.PI)))); - double fieldLength = Units.feetToMeters(54.0); - double fieldWidth = Units.feetToMeters(27.0); - AprilTagFieldLayout layout = new AprilTagFieldLayout(tagList, fieldLength, fieldWidth); - Pose2d robotPose = new Pose2d(5, 1, Rotation2d.fromDegrees(5)); - - visionSysSim.addVisionTargets( - new VisionTargetSim(tagList.get(0).pose, TargetModel.kTag16h5, 0)); - - visionSysSim.update(robotPose); - var results = - VisionEstimation.estimateCamPosePNP( - camera.getCameraMatrix().get(), - camera.getDistCoeffs().get(), - camera.getLatestResult().getTargets(), - layout); - Pose3d pose = new Pose3d().plus(results.best); - assertEquals(5, pose.getX(), .01); - assertEquals(1, pose.getY(), .01); - assertEquals(0, pose.getZ(), .01); - assertEquals(Math.toRadians(5), pose.getRotation().getZ(), 0.01); - - visionSysSim.addVisionTargets( - new VisionTargetSim(tagList.get(1).pose, TargetModel.kTag16h5, 1)); - visionSysSim.addVisionTargets( - new VisionTargetSim(tagList.get(2).pose, TargetModel.kTag16h5, 2)); - - visionSysSim.update(robotPose); - results = - VisionEstimation.estimateCamPosePNP( - camera.getCameraMatrix().get(), - camera.getDistCoeffs().get(), - camera.getLatestResult().getTargets(), - layout); - pose = new Pose3d().plus(results.best); - assertEquals(5, pose.getX(), .01); - assertEquals(1, pose.getY(), .01); - assertEquals(0, pose.getZ(), .01); - assertEquals(Math.toRadians(5), pose.getRotation().getZ(), 0.01); - } + @BeforeAll + public static void setUp() { + JNIWrapper.Helper.setExtractOnStaticLoad(false); + WPIUtilJNI.Helper.setExtractOnStaticLoad(false); + NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); + WPINetJNI.Helper.setExtractOnStaticLoad(false); + CameraServerJNI.Helper.setExtractOnStaticLoad(false); + CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); + AprilTagJNI.Helper.setExtractOnStaticLoad(false); + + try { + CombinedRuntimeLoader.loadLibraries( + VisionSystemSim.class, + "wpiutiljni", + "ntcorejni", + "wpinetjni", + "wpiHaljni", + "cscorejni", + "cscorejnicvstatic"); + + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + } catch (Exception e) { + e.printStackTrace(); + } + + // NT live for debug purposes + NetworkTableInstance.getDefault().startServer(); + + // No version check for testing + PhotonCamera.setVersionCheckEnabled(false); + } + + @AfterAll + public static void shutDown() {} + + // @ParameterizedTest + // @ValueSource(doubles = {5, 10, 15, 20, 25, 30}) + // public void testDistanceAligned(double dist) { + // final var targetPose = new Pose2d(new Translation2d(15.98, 0), new Rotation2d()); + // var sysUnderTest = + // new SimVisionSystem("Test", 80.0, 0.0, new Transform2d(), 1, 99999, 320, 240, 0); + // sysUnderTest.addSimVisionTarget(new SimVisionTarget(targetPose, 0.0, 1.0, 1.0)); + + // final var robotPose = new Pose2d(new Translation2d(35 - dist, 0), new Rotation2d()); + // sysUnderTest.processFrame(robotPose); + + // var result = sysUnderTest.cam.getLatestResult(); + + // assertTrue(result.hasTargets()); + // assertEquals(result.getBestTarget().getCameraToTarget().getTranslation().getNorm(), dist); + // } + + @Test + public void testVisibilityCupidShuffle() { + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 2), new Rotation3d(0, 0, Math.PI)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 1.0), 3)); + + // To the right, to the right + var robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(-70)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + + // To the right, to the right + robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(-95)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + + // To the left, to the left + robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(90)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + + // To the left, to the left + robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(65)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + + // now kick, now kick + robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); + + // now kick, now kick + robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(-5)); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); + + // now walk it by yourself + robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(-179)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + + // now walk it by yourself + visionSysSim.adjustCamera( + cameraSim, new Transform3d(new Translation3d(), new Rotation3d(0, 0, Math.PI))); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); + } + + @Test + public void testNotVisibleVert1() { + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 3.0), 3)); + + var robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); + + visionSysSim.adjustCamera( // vooop selfie stick + cameraSim, new Transform3d(new Translation3d(0, 0, 5000), new Rotation3d(0, 0, Math.PI))); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + } + + @Test + public void testNotVisibleVert2() { + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 2), new Rotation3d(0, 0, Math.PI)); + var robotToCamera = + new Transform3d(new Translation3d(0, 0, 1), new Rotation3d(0, -Math.PI / 4, 0)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, robotToCamera); + cameraSim.prop.setCalibration(1234, 1234, Rotation2d.fromDegrees(80)); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 0.5), 1736)); + + var robotPose = new Pose2d(new Translation2d(13.98, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); + + // Pitched back camera should mean target goes out of view below the robot as distance increases + robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + } + + @Test + public void testNotVisibleTgtSize() { + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + cameraSim.setMinTargetAreaPixels(20.0); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.1, 0.025), 24)); + + var robotPose = new Pose2d(new Translation2d(12, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); + + robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + } + + @Test + public void testNotVisibleTooFarForLEDs() { + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + cameraSim.setMaxSightRange(10); + cameraSim.setMinTargetAreaPixels(1.0); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 0.25), 78)); + + var robotPose = new Pose2d(new Translation2d(10, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); + + robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); + } + + @ParameterizedTest + @ValueSource(doubles = {-10, -5, -0, -1, -2, 5, 7, 10.23}) + public void testYawAngles(double testYaw) { + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, 3 * Math.PI / 4)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + cameraSim.setMinTargetAreaPixels(0.0); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 3)); + + var robotPose = new Pose2d(new Translation2d(10, 0), Rotation2d.fromDegrees(-1.0 * testYaw)); + visionSysSim.update(robotPose); + var res = camera.getLatestResult(); + assertTrue(res.hasTargets()); + var tgt = res.getBestTarget(); + assertEquals(testYaw, tgt.getYaw(), kRotDeltaDeg); + } + + @ParameterizedTest + @ValueSource(doubles = {-10, -5, -0, -1, -2, 5, 7, 10.23, 20.21, -19.999}) + public void testPitchAngles(double testPitch) { + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 0), new Rotation3d(0, 0, 3 * Math.PI / 4)); + final var robotPose = new Pose2d(new Translation2d(10, 0), new Rotation2d(0)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(120)); + cameraSim.setMinTargetAreaPixels(0.0); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 23)); + + // Transform is now robot -> camera + visionSysSim.adjustCamera( + cameraSim, + new Transform3d( + new Translation3d(), new Rotation3d(0, Units.degreesToRadians(testPitch), 0))); + visionSysSim.update(robotPose); + var res = camera.getLatestResult(); + assertTrue(res.hasTargets()); + var tgt = res.getBestTarget(); + + // Since the camera is level with the target, a positive-upward point will mean the target is in + // the + // lower half of the image + // which should produce negative pitch. + assertEquals(testPitch, tgt.getPitch(), kRotDeltaDeg); + } + + private static Stream testDistanceCalcArgs() { + // Arbitrary and fairly random assortment of distances, camera pitches, and heights + return Stream.of( + Arguments.of(5, -15.98, 0), + Arguments.of(6, -15.98, 1), + Arguments.of(10, -15.98, 0), + Arguments.of(15, -15.98, 2), + Arguments.of(19.95, -15.98, 0), + Arguments.of(20, -15.98, 0), + Arguments.of(5, -42, 1), + Arguments.of(6, -42, 0), + Arguments.of(10, -42, 2), + Arguments.of(15, -42, 0.5), + Arguments.of(19.42, -15.98, 0), + Arguments.of(20, -42, 0), + Arguments.of(5, -35, 2), + Arguments.of(6, -35, 0), + Arguments.of(10, -34, 2.4), + Arguments.of(15, -33, 0), + Arguments.of(19.52, -15.98, 1.1)); + } + + @ParameterizedTest + @MethodSource("testDistanceCalcArgs") + public void testDistanceCalc(double testDist, double testPitch, double testHeight) { + // Assume dist along ground and tgt height the same. Iterate over other parameters. + + final var targetPose = + new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI * 0.98)); + final var robotPose = + new Pose3d(new Translation3d(15.98 - Units.feetToMeters(testDist), 0, 0), new Rotation3d()); + final var robotToCamera = + new Transform3d( + new Translation3d(0, 0, Units.feetToMeters(testHeight)), + new Rotation3d(0, Units.degreesToRadians(testPitch), 0)); + + var visionSysSim = + new VisionSystemSim( + "absurdlylongnamewhichshouldneveractuallyhappenbuteehwelltestitanywaysohowsyourdaygoingihopegoodhaveagreatrestofyourlife!"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(160)); + cameraSim.setMinTargetAreaPixels(0.0); + visionSysSim.adjustCamera(cameraSim, robotToCamera); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 0)); + + visionSysSim.update(robotPose); + + // Note that target 2d yaw/pitch accuracy is hindered by two factors in photonvision: + // 1. These are calculated with the average of the minimum area rectangle, which does not + // actually find the target center because of perspective distortion. + // 2. Yaw and pitch are calculated separately which gives incorrect pitch values. + var res = camera.getLatestResult(); + assertTrue(res.hasTargets()); + var tgt = res.getBestTarget(); + assertEquals(0.0, tgt.getYaw(), 0.5); + + // Distance calculation using this trigonometry may be wildly incorrect when + // there is not much height difference between the target and the camera. + double distMeas = + PhotonUtils.calculateDistanceToTargetMeters( + robotToCamera.getZ(), + targetPose.getZ(), + Units.degreesToRadians(-testPitch), + Units.degreesToRadians(tgt.getPitch())); + assertEquals(Units.feetToMeters(testDist), distMeas, 0.15); + } + + @Test + public void testMultipleTargets() { + final var targetPoseL = + new Pose3d(new Translation3d(15.98, 2, 0), new Rotation3d(0, 0, Math.PI)); + final var targetPoseC = + new Pose3d(new Translation3d(15.98, 0, 0), new Rotation3d(0, 0, Math.PI)); + final var targetPoseR = + new Pose3d(new Translation3d(15.98, -2, 0), new Rotation3d(0, 0, Math.PI)); + + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + cameraSim.setMinTargetAreaPixels(20.0); + + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseL.transformBy( + new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), + TargetModel.kTag16h5, + 1)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseC.transformBy( + new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), + TargetModel.kTag16h5, + 2)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseR.transformBy( + new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), + TargetModel.kTag16h5, + 3)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseL.transformBy( + new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), + TargetModel.kTag16h5, + 4)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseC.transformBy( + new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), + TargetModel.kTag16h5, + 5)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseR.transformBy( + new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), + TargetModel.kTag16h5, + 6)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseL.transformBy( + new Transform3d(new Translation3d(0, 0, 0.50), new Rotation3d())), + TargetModel.kTag16h5, + 7)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseC.transformBy( + new Transform3d(new Translation3d(0, 0, 0.50), new Rotation3d())), + TargetModel.kTag16h5, + 8)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseL.transformBy( + new Transform3d(new Translation3d(0, 0, 0.75), new Rotation3d())), + TargetModel.kTag16h5, + 9)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseR.transformBy( + new Transform3d(new Translation3d(0, 0, 0.75), new Rotation3d())), + TargetModel.kTag16h5, + 10)); + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseL.transformBy( + new Transform3d(new Translation3d(0, 0, 0.25), new Rotation3d())), + TargetModel.kTag16h5, + 11)); + + var robotPose = new Pose2d(new Translation2d(6.0, 0), Rotation2d.fromDegrees(0.25)); + visionSysSim.update(robotPose); + var res = camera.getLatestResult(); + assertTrue(res.hasTargets()); + List tgtList; + tgtList = res.getTargets(); + assertEquals(11, tgtList.size()); + } + + @Test + public void testPoseEstimation() { + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(90)); + cameraSim.setMinTargetAreaPixels(20.0); + + List tagList = new ArrayList<>(); + tagList.add(new AprilTag(0, new Pose3d(12, 3, 1, new Rotation3d(0, 0, Math.PI)))); + tagList.add(new AprilTag(1, new Pose3d(12, 1, -1, new Rotation3d(0, 0, Math.PI)))); + tagList.add(new AprilTag(2, new Pose3d(11, 0, 2, new Rotation3d(0, 0, Math.PI)))); + double fieldLength = Units.feetToMeters(54.0); + double fieldWidth = Units.feetToMeters(27.0); + AprilTagFieldLayout layout = new AprilTagFieldLayout(tagList, fieldLength, fieldWidth); + Pose2d robotPose = new Pose2d(5, 1, Rotation2d.fromDegrees(5)); + + visionSysSim.addVisionTargets( + new VisionTargetSim(tagList.get(0).pose, TargetModel.kTag16h5, 0)); + + visionSysSim.update(robotPose); + var results = + VisionEstimation.estimateCamPosePNP( + camera.getCameraMatrix().get(), + camera.getDistCoeffs().get(), + camera.getLatestResult().getTargets(), + layout); + Pose3d pose = new Pose3d().plus(results.best); + assertEquals(5, pose.getX(), .01); + assertEquals(1, pose.getY(), .01); + assertEquals(0, pose.getZ(), .01); + assertEquals(Math.toRadians(5), pose.getRotation().getZ(), 0.01); + + visionSysSim.addVisionTargets( + new VisionTargetSim(tagList.get(1).pose, TargetModel.kTag16h5, 1)); + visionSysSim.addVisionTargets( + new VisionTargetSim(tagList.get(2).pose, TargetModel.kTag16h5, 2)); + + visionSysSim.update(robotPose); + results = + VisionEstimation.estimateCamPosePNP( + camera.getCameraMatrix().get(), + camera.getDistCoeffs().get(), + camera.getLatestResult().getTargets(), + layout); + pose = new Pose3d().plus(results.best); + assertEquals(5, pose.getX(), .01); + assertEquals(1, pose.getY(), .01); + assertEquals(0, pose.getZ(), .01); + assertEquals(Math.toRadians(5), pose.getRotation().getZ(), 0.01); + } } diff --git a/photon-lib/src/test/java/org/photonvision/estimation/ApriltagWorkbenchTest.java b/photon-lib/src/test/java/org/photonvision/estimation/ApriltagWorkbenchTest.java index cbc247fc14..fa70467c5f 100644 --- a/photon-lib/src/test/java/org/photonvision/estimation/ApriltagWorkbenchTest.java +++ b/photon-lib/src/test/java/org/photonvision/estimation/ApriltagWorkbenchTest.java @@ -42,63 +42,63 @@ import org.photonvision.PhotonPoseEstimator; public class ApriltagWorkbenchTest { - @BeforeAll - public static void setUp() { - JNIWrapper.Helper.setExtractOnStaticLoad(false); - WPIUtilJNI.Helper.setExtractOnStaticLoad(false); - NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); - WPINetJNI.Helper.setExtractOnStaticLoad(false); - CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); + @BeforeAll + public static void setUp() { + JNIWrapper.Helper.setExtractOnStaticLoad(false); + WPIUtilJNI.Helper.setExtractOnStaticLoad(false); + NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); + WPINetJNI.Helper.setExtractOnStaticLoad(false); + CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); - try { - CombinedRuntimeLoader.loadLibraries( - ApriltagWorkbenchTest.class, - "wpiutiljni", - "ntcorejni", - "wpinetjni", - "wpiHaljni", - "cscorejnicvstatic"); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } + try { + CombinedRuntimeLoader.loadLibraries( + ApriltagWorkbenchTest.class, + "wpiutiljni", + "ntcorejni", + "wpinetjni", + "wpiHaljni", + "cscorejnicvstatic"); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } - // No version check for testing - PhotonCamera.setVersionCheckEnabled(false); - } + // No version check for testing + PhotonCamera.setVersionCheckEnabled(false); + } - // @Test - public void testMeme() throws IOException, InterruptedException { - NetworkTableInstance instance = NetworkTableInstance.getDefault(); - instance.stopServer(); - // set the NT server if simulating this code. - // "localhost" for photon on desktop, or "photonvision.local" / "[ip-address]" - // for coprocessor - instance.setServer("localhost"); - instance.startClient4("myRobot"); + // @Test + public void testMeme() throws IOException, InterruptedException { + NetworkTableInstance instance = NetworkTableInstance.getDefault(); + instance.stopServer(); + // set the NT server if simulating this code. + // "localhost" for photon on desktop, or "photonvision.local" / "[ip-address]" + // for coprocessor + instance.setServer("localhost"); + instance.startClient4("myRobot"); - var robotToCamera = new Transform3d(); - var cam = new PhotonCamera("WPI2023"); - var tagLayout = - AprilTagFieldLayout.loadFromResource(AprilTagFields.k2023ChargedUp.m_resourceFile); + var robotToCamera = new Transform3d(); + var cam = new PhotonCamera("WPI2023"); + var tagLayout = + AprilTagFieldLayout.loadFromResource(AprilTagFields.k2023ChargedUp.m_resourceFile); - var pe = - new PhotonPoseEstimator( - tagLayout, - PhotonPoseEstimator.PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR, - cam, - robotToCamera); + var pe = + new PhotonPoseEstimator( + tagLayout, + PhotonPoseEstimator.PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR, + cam, + robotToCamera); - var field = new Field2d(); - SmartDashboard.putData(field); + var field = new Field2d(); + SmartDashboard.putData(field); - while (!Thread.interrupted()) { - Thread.sleep(500); + while (!Thread.interrupted()) { + Thread.sleep(500); - var ret = pe.update(); - System.out.println(ret); + var ret = pe.update(); + System.out.println(ret); - field.setRobotPose(ret.get().estimatedPose.toPose2d()); + field.setRobotPose(ret.get().estimatedPose.toPose2d()); + } } - } } diff --git a/photon-server/src/main/java/org/photonvision/Main.java b/photon-server/src/main/java/org/photonvision/Main.java index a683311479..db8787208d 100644 --- a/photon-server/src/main/java/org/photonvision/Main.java +++ b/photon-server/src/main/java/org/photonvision/Main.java @@ -55,326 +55,326 @@ import org.photonvision.vision.target.TargetModel; public class Main { - public static final int DEFAULT_WEBPORT = 5800; - - private static final Logger logger = new Logger(Main.class, LogGroup.General); - private static final boolean isRelease = PhotonVersion.isRelease; - - private static boolean isTestMode = false; - private static Path testModeFolder = null; - private static boolean printDebugLogs; - - private static boolean handleArgs(String[] args) throws ParseException { - final var options = new Options(); - options.addOption("d", "debug", false, "Enable debug logging prints"); - options.addOption("h", "help", false, "Show this help text and exit"); - options.addOption( - "t", - "test-mode", - false, - "Run in test mode with 2019 and 2020 WPI field images in place of cameras"); - - options.addOption("p", "path", true, "Point test mode to a specific folder"); - options.addOption( - "i", - "ignore-cameras", - true, - "Ignore cameras that match a regex. Uses camera name as provided by cscore."); - options.addOption("n", "disable-networking", false, "Disables control device network settings"); - options.addOption( - "c", - "clear-config", - false, - "Clears PhotonVision pipeline and networking settings. Preserves log files"); - - CommandLineParser parser = new DefaultParser(); - CommandLine cmd = parser.parse(options, args); - - if (cmd.hasOption("help")) { - HelpFormatter formatter = new HelpFormatter(); - formatter.printHelp("java -jar photonvision.jar [options]", options); - return false; // exit program - } else { - if (cmd.hasOption("debug")) { - printDebugLogs = true; - logger.info("Enabled debug logging"); - } - - if (cmd.hasOption("test-mode")) { - isTestMode = true; - logger.info("Running in test mode - Cameras will not be used"); - - if (cmd.hasOption("path")) { - Path p = Path.of(System.getProperty("PATH_PREFIX", "") + cmd.getOptionValue("path")); - logger.info("Loading from Path " + p.toAbsolutePath().toString()); - testModeFolder = p; + public static final int DEFAULT_WEBPORT = 5800; + + private static final Logger logger = new Logger(Main.class, LogGroup.General); + private static final boolean isRelease = PhotonVersion.isRelease; + + private static boolean isTestMode = false; + private static Path testModeFolder = null; + private static boolean printDebugLogs; + + private static boolean handleArgs(String[] args) throws ParseException { + final var options = new Options(); + options.addOption("d", "debug", false, "Enable debug logging prints"); + options.addOption("h", "help", false, "Show this help text and exit"); + options.addOption( + "t", + "test-mode", + false, + "Run in test mode with 2019 and 2020 WPI field images in place of cameras"); + + options.addOption("p", "path", true, "Point test mode to a specific folder"); + options.addOption( + "i", + "ignore-cameras", + true, + "Ignore cameras that match a regex. Uses camera name as provided by cscore."); + options.addOption("n", "disable-networking", false, "Disables control device network settings"); + options.addOption( + "c", + "clear-config", + false, + "Clears PhotonVision pipeline and networking settings. Preserves log files"); + + CommandLineParser parser = new DefaultParser(); + CommandLine cmd = parser.parse(options, args); + + if (cmd.hasOption("help")) { + HelpFormatter formatter = new HelpFormatter(); + formatter.printHelp("java -jar photonvision.jar [options]", options); + return false; // exit program + } else { + if (cmd.hasOption("debug")) { + printDebugLogs = true; + logger.info("Enabled debug logging"); + } + + if (cmd.hasOption("test-mode")) { + isTestMode = true; + logger.info("Running in test mode - Cameras will not be used"); + + if (cmd.hasOption("path")) { + Path p = Path.of(System.getProperty("PATH_PREFIX", "") + cmd.getOptionValue("path")); + logger.info("Loading from Path " + p.toAbsolutePath().toString()); + testModeFolder = p; + } + } + + if (cmd.hasOption("ignore-cameras")) { + VisionSourceManager.getInstance() + .setIgnoredCamerasRegex(cmd.getOptionValue("ignore-cameras")); + } + + if (cmd.hasOption("disable-networking")) { + NetworkManager.getInstance().networkingIsDisabled = true; + } + + if (cmd.hasOption("clear-config")) { + ConfigManager.getInstance().clearConfig(); + } } - } + return true; + } - if (cmd.hasOption("ignore-cameras")) { - VisionSourceManager.getInstance() - .setIgnoredCamerasRegex(cmd.getOptionValue("ignore-cameras")); - } + private static void addTestModeFromFolder() { + ConfigManager.getInstance().load(); + + try { + List collectedSources = + Files.list(testModeFolder) + .filter(p -> p.toFile().isFile()) + .map( + p -> { + try { + CameraConfiguration camConf = + new CameraConfiguration( + p.getFileName().toString(), p.toAbsolutePath().toString()); + camConf.FOV = TestUtils.WPI2019Image.FOV; // Good guess? + camConf.addCalibration(TestUtils.get2020LifeCamCoeffs(false)); + + var pipeSettings = new AprilTagPipelineSettings(); + pipeSettings.pipelineNickname = p.getFileName().toString(); + pipeSettings.outputShowMultipleTargets = true; + pipeSettings.inputShouldShow = true; + pipeSettings.outputShouldShow = false; + pipeSettings.solvePNPEnabled = true; + + var aprilTag = new AprilTagPipelineSettings(); + var psList = new ArrayList(); + psList.add(aprilTag); + camConf.pipelineSettings = psList; + + return new FileVisionSource(camConf); + } catch (Exception e) { + logger.error("Couldn't load image " + p.getFileName().toString(), e); + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + + ConfigManager.getInstance().unloadCameraConfigs(); + VisionModuleManager.getInstance().addSources(collectedSources).forEach(VisionModule::start); + ConfigManager.getInstance().addCameraConfigurations(collectedSources); + } catch (IOException e) { + logger.error("Path does not exist!"); + System.exit(1); + } + } - if (cmd.hasOption("disable-networking")) { - NetworkManager.getInstance().networkingIsDisabled = true; - } + private static void addTestModeSources() { + ConfigManager.getInstance().load(); + + var camConf2019 = + ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2019"); + if (camConf2019 == null) { + camConf2019 = + new CameraConfiguration("WPI2019", TestUtils.getTestMode2019ImagePath().toString()); + camConf2019.FOV = TestUtils.WPI2019Image.FOV; + camConf2019.calibrations.add(TestUtils.get2019LifeCamCoeffs(true)); + + var pipeline2019 = new ReflectivePipelineSettings(); + pipeline2019.pipelineNickname = "CargoShip"; + pipeline2019.targetModel = TargetModel.k2019DualTarget; + pipeline2019.outputShowMultipleTargets = true; + pipeline2019.contourGroupingMode = ContourGroupingMode.Dual; + pipeline2019.inputShouldShow = true; + + var psList2019 = new ArrayList(); + psList2019.add(pipeline2019); + camConf2019.pipelineSettings = psList2019; + } - if (cmd.hasOption("clear-config")) { - ConfigManager.getInstance().clearConfig(); - } - } - return true; - } - - private static void addTestModeFromFolder() { - ConfigManager.getInstance().load(); - - try { - List collectedSources = - Files.list(testModeFolder) - .filter(p -> p.toFile().isFile()) - .map( - p -> { - try { - CameraConfiguration camConf = - new CameraConfiguration( - p.getFileName().toString(), p.toAbsolutePath().toString()); - camConf.FOV = TestUtils.WPI2019Image.FOV; // Good guess? - camConf.addCalibration(TestUtils.get2020LifeCamCoeffs(false)); - - var pipeSettings = new AprilTagPipelineSettings(); - pipeSettings.pipelineNickname = p.getFileName().toString(); - pipeSettings.outputShowMultipleTargets = true; - pipeSettings.inputShouldShow = true; - pipeSettings.outputShouldShow = false; - pipeSettings.solvePNPEnabled = true; - - var aprilTag = new AprilTagPipelineSettings(); - var psList = new ArrayList(); - psList.add(aprilTag); - camConf.pipelineSettings = psList; - - return new FileVisionSource(camConf); - } catch (Exception e) { - logger.error("Couldn't load image " + p.getFileName().toString(), e); - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - - ConfigManager.getInstance().unloadCameraConfigs(); - VisionModuleManager.getInstance().addSources(collectedSources).forEach(VisionModule::start); - ConfigManager.getInstance().addCameraConfigurations(collectedSources); - } catch (IOException e) { - logger.error("Path does not exist!"); - System.exit(1); - } - } - - private static void addTestModeSources() { - ConfigManager.getInstance().load(); - - var camConf2019 = - ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2019"); - if (camConf2019 == null) { - camConf2019 = - new CameraConfiguration("WPI2019", TestUtils.getTestMode2019ImagePath().toString()); - camConf2019.FOV = TestUtils.WPI2019Image.FOV; - camConf2019.calibrations.add(TestUtils.get2019LifeCamCoeffs(true)); - - var pipeline2019 = new ReflectivePipelineSettings(); - pipeline2019.pipelineNickname = "CargoShip"; - pipeline2019.targetModel = TargetModel.k2019DualTarget; - pipeline2019.outputShowMultipleTargets = true; - pipeline2019.contourGroupingMode = ContourGroupingMode.Dual; - pipeline2019.inputShouldShow = true; - - var psList2019 = new ArrayList(); - psList2019.add(pipeline2019); - camConf2019.pipelineSettings = psList2019; - } + var camConf2020 = + ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2020"); + if (camConf2020 == null) { + camConf2020 = + new CameraConfiguration("WPI2020", TestUtils.getTestMode2020ImagePath().toString()); + camConf2020.FOV = TestUtils.WPI2020Image.FOV; + camConf2020.calibrations.add(TestUtils.get2019LifeCamCoeffs(true)); + + var pipeline2020 = new ReflectivePipelineSettings(); + pipeline2020.pipelineNickname = "OuterPort"; + pipeline2020.targetModel = TargetModel.k2020HighGoalOuter; + camConf2020.calibrations.add(TestUtils.get2020LifeCamCoeffs(true)); + pipeline2020.inputShouldShow = true; + + var psList2020 = new ArrayList(); + psList2020.add(pipeline2020); + camConf2020.pipelineSettings = psList2020; + } - var camConf2020 = - ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2020"); - if (camConf2020 == null) { - camConf2020 = - new CameraConfiguration("WPI2020", TestUtils.getTestMode2020ImagePath().toString()); - camConf2020.FOV = TestUtils.WPI2020Image.FOV; - camConf2020.calibrations.add(TestUtils.get2019LifeCamCoeffs(true)); - - var pipeline2020 = new ReflectivePipelineSettings(); - pipeline2020.pipelineNickname = "OuterPort"; - pipeline2020.targetModel = TargetModel.k2020HighGoalOuter; - camConf2020.calibrations.add(TestUtils.get2020LifeCamCoeffs(true)); - pipeline2020.inputShouldShow = true; - - var psList2020 = new ArrayList(); - psList2020.add(pipeline2020); - camConf2020.pipelineSettings = psList2020; - } + var camConf2022 = + ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2022"); + if (camConf2022 == null) { + camConf2022 = + new CameraConfiguration("WPI2022", TestUtils.getTestMode2022ImagePath().toString()); + camConf2022.FOV = TestUtils.WPI2022Image.FOV; + camConf2022.calibrations.add(TestUtils.get2019LifeCamCoeffs(true)); + + var pipeline2022 = new ReflectivePipelineSettings(); + pipeline2022.pipelineNickname = "OuterPort"; + pipeline2022.targetModel = TargetModel.k2020HighGoalOuter; + pipeline2022.inputShouldShow = true; + // camConf2020.calibrations.add(TestUtils.get2020LifeCamCoeffs(true)); + + var psList2022 = new ArrayList(); + psList2022.add(pipeline2022); + camConf2022.pipelineSettings = psList2022; + } - var camConf2022 = - ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2022"); - if (camConf2022 == null) { - camConf2022 = - new CameraConfiguration("WPI2022", TestUtils.getTestMode2022ImagePath().toString()); - camConf2022.FOV = TestUtils.WPI2022Image.FOV; - camConf2022.calibrations.add(TestUtils.get2019LifeCamCoeffs(true)); - - var pipeline2022 = new ReflectivePipelineSettings(); - pipeline2022.pipelineNickname = "OuterPort"; - pipeline2022.targetModel = TargetModel.k2020HighGoalOuter; - pipeline2022.inputShouldShow = true; - // camConf2020.calibrations.add(TestUtils.get2020LifeCamCoeffs(true)); - - var psList2022 = new ArrayList(); - psList2022.add(pipeline2022); - camConf2022.pipelineSettings = psList2022; - } + CameraConfiguration camConf2023 = + ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2023"); + if (camConf2023 == null) { + camConf2023 = + new CameraConfiguration( + "WPI2023", + TestUtils.getResourcesFolderPath(true) + .resolve("testimages") + .resolve(TestUtils.WPI2023Apriltags.k383_60_Angle2.path) + .toString()); + + camConf2023.FOV = TestUtils.WPI2023Apriltags.FOV; + camConf2023.calibrations.add(TestUtils.get2023LifeCamCoeffs(true)); + + var pipeline2023 = new AprilTagPipelineSettings(); + var path_split = Path.of(camConf2023.path).getFileName().toString(); + pipeline2023.pipelineNickname = path_split.replace(".png", ""); + pipeline2023.targetModel = TargetModel.k6in_16h5; + pipeline2023.inputShouldShow = true; + pipeline2023.solvePNPEnabled = true; + + var psList2023 = new ArrayList(); + psList2023.add(pipeline2023); + camConf2023.pipelineSettings = psList2023; + } - CameraConfiguration camConf2023 = - ConfigManager.getInstance().getConfig().getCameraConfigurations().get("WPI2023"); - if (camConf2023 == null) { - camConf2023 = - new CameraConfiguration( - "WPI2023", - TestUtils.getResourcesFolderPath(true) - .resolve("testimages") - .resolve(TestUtils.WPI2023Apriltags.k383_60_Angle2.path) - .toString()); - - camConf2023.FOV = TestUtils.WPI2023Apriltags.FOV; - camConf2023.calibrations.add(TestUtils.get2023LifeCamCoeffs(true)); - - var pipeline2023 = new AprilTagPipelineSettings(); - var path_split = Path.of(camConf2023.path).getFileName().toString(); - pipeline2023.pipelineNickname = path_split.replace(".png", ""); - pipeline2023.targetModel = TargetModel.k6in_16h5; - pipeline2023.inputShouldShow = true; - pipeline2023.solvePNPEnabled = true; - - var psList2023 = new ArrayList(); - psList2023.add(pipeline2023); - camConf2023.pipelineSettings = psList2023; - } + // Colored shape testing + var camConfShape = + ConfigManager.getInstance().getConfig().getCameraConfigurations().get("Shape"); + + // If we haven't saved shape settings, create a new one + if (camConfShape == null) { + camConfShape = + new CameraConfiguration( + "Shape", + TestUtils.getPowercellImagePath(TestUtils.PowercellTestImages.kPowercell_test_1, true) + .toString()); + var settings = new ColoredShapePipelineSettings(); + settings.hsvHue = new IntegerCouple(0, 35); + settings.hsvSaturation = new IntegerCouple(82, 255); + settings.hsvValue = new IntegerCouple(62, 255); + settings.contourShape = ContourShape.Triangle; + settings.outputShowMultipleTargets = true; + settings.circleAccuracy = 15; + settings.inputShouldShow = true; + camConfShape.addPipelineSetting(settings); + } - // Colored shape testing - var camConfShape = - ConfigManager.getInstance().getConfig().getCameraConfigurations().get("Shape"); - - // If we haven't saved shape settings, create a new one - if (camConfShape == null) { - camConfShape = - new CameraConfiguration( - "Shape", - TestUtils.getPowercellImagePath(TestUtils.PowercellTestImages.kPowercell_test_1, true) - .toString()); - var settings = new ColoredShapePipelineSettings(); - settings.hsvHue = new IntegerCouple(0, 35); - settings.hsvSaturation = new IntegerCouple(82, 255); - settings.hsvValue = new IntegerCouple(62, 255); - settings.contourShape = ContourShape.Triangle; - settings.outputShowMultipleTargets = true; - settings.circleAccuracy = 15; - settings.inputShouldShow = true; - camConfShape.addPipelineSetting(settings); - } + var collectedSources = new ArrayList(); - var collectedSources = new ArrayList(); - - var fvsShape = new FileVisionSource(camConfShape); - var fvs2019 = new FileVisionSource(camConf2019); - var fvs2020 = new FileVisionSource(camConf2020); - var fvs2022 = new FileVisionSource(camConf2022); - var fvs2023 = new FileVisionSource(camConf2023); - - collectedSources.add(fvs2023); - collectedSources.add(fvs2022); - collectedSources.add(fvsShape); - collectedSources.add(fvs2020); - collectedSources.add(fvs2019); - - ConfigManager.getInstance().unloadCameraConfigs(); - VisionModuleManager.getInstance().addSources(collectedSources).forEach(VisionModule::start); - ConfigManager.getInstance().addCameraConfigurations(collectedSources); - } - - public static void main(String[] args) { - try { - TestUtils.loadLibraries(); - logger.info("Native libraries loaded."); - } catch (Exception e) { - logger.error("Failed to load native libraries!", e); - } + var fvsShape = new FileVisionSource(camConfShape); + var fvs2019 = new FileVisionSource(camConf2019); + var fvs2020 = new FileVisionSource(camConf2020); + var fvs2022 = new FileVisionSource(camConf2022); + var fvs2023 = new FileVisionSource(camConf2023); - try { - if (Platform.isRaspberryPi()) { - LibCameraJNI.forceLoad(); - } - } catch (IOException e) { - logger.error("Failed to load libcamera-JNI!", e); - } + collectedSources.add(fvs2023); + collectedSources.add(fvs2022); + collectedSources.add(fvsShape); + collectedSources.add(fvs2020); + collectedSources.add(fvs2019); - try { - if (!handleArgs(args)) { - System.exit(0); - } - } catch (ParseException e) { - logger.error("Failed to parse command-line options!", e); + ConfigManager.getInstance().unloadCameraConfigs(); + VisionModuleManager.getInstance().addSources(collectedSources).forEach(VisionModule::start); + ConfigManager.getInstance().addCameraConfigurations(collectedSources); } - CVMat.enablePrint(false); - PipelineProfiler.enablePrint(false); - - var logLevel = printDebugLogs ? LogLevel.TRACE : LogLevel.DEBUG; - Logger.setLevel(LogGroup.Camera, logLevel); - Logger.setLevel(LogGroup.WebServer, logLevel); - Logger.setLevel(LogGroup.VisionModule, logLevel); - Logger.setLevel(LogGroup.Data, logLevel); - Logger.setLevel(LogGroup.Config, logLevel); - Logger.setLevel(LogGroup.General, logLevel); - logger.info("Logging initialized in debug mode."); - - logger.info( - "Starting PhotonVision version " - + PhotonVersion.versionString - + " on " - + Platform.getPlatformName() - + (Platform.isRaspberryPi() ? (" (Pi " + PiVersion.getPiVersion() + ")") : "")); - - logger.debug("Loading ConfigManager..."); - ConfigManager.getInstance().load(); // init config manager - ConfigManager.getInstance().requestSave(); - - logger.debug("Loading HardwareManager..."); - // Force load the hardware manager - HardwareManager.getInstance(); - - logger.debug("Loading NetworkManager..."); - NetworkManager.getInstance().reinitialize(); - - logger.debug("Loading NetworkTablesManager..."); - NetworkTablesManager.getInstance() - .setConfig(ConfigManager.getInstance().getConfig().getNetworkConfig()); - - if (!isTestMode) { - logger.debug("Loading VisionSourceManager..."); - VisionSourceManager.getInstance() - .registerLoadedConfigs( - ConfigManager.getInstance().getConfig().getCameraConfigurations().values()); - - VisionSourceManager.getInstance().registerTimedTask(); - } else { - if (testModeFolder == null) { - addTestModeSources(); - } else { - addTestModeFromFolder(); - } - } + public static void main(String[] args) { + try { + TestUtils.loadLibraries(); + logger.info("Native libraries loaded."); + } catch (Exception e) { + logger.error("Failed to load native libraries!", e); + } - logger.info("Starting server..."); - Server.start(DEFAULT_WEBPORT); - } + try { + if (Platform.isRaspberryPi()) { + LibCameraJNI.forceLoad(); + } + } catch (IOException e) { + logger.error("Failed to load libcamera-JNI!", e); + } + + try { + if (!handleArgs(args)) { + System.exit(0); + } + } catch (ParseException e) { + logger.error("Failed to parse command-line options!", e); + } + + CVMat.enablePrint(false); + PipelineProfiler.enablePrint(false); + + var logLevel = printDebugLogs ? LogLevel.TRACE : LogLevel.DEBUG; + Logger.setLevel(LogGroup.Camera, logLevel); + Logger.setLevel(LogGroup.WebServer, logLevel); + Logger.setLevel(LogGroup.VisionModule, logLevel); + Logger.setLevel(LogGroup.Data, logLevel); + Logger.setLevel(LogGroup.Config, logLevel); + Logger.setLevel(LogGroup.General, logLevel); + logger.info("Logging initialized in debug mode."); + + logger.info( + "Starting PhotonVision version " + + PhotonVersion.versionString + + " on " + + Platform.getPlatformName() + + (Platform.isRaspberryPi() ? (" (Pi " + PiVersion.getPiVersion() + ")") : "")); + + logger.debug("Loading ConfigManager..."); + ConfigManager.getInstance().load(); // init config manager + ConfigManager.getInstance().requestSave(); + + logger.debug("Loading HardwareManager..."); + // Force load the hardware manager + HardwareManager.getInstance(); + + logger.debug("Loading NetworkManager..."); + NetworkManager.getInstance().reinitialize(); + + logger.debug("Loading NetworkTablesManager..."); + NetworkTablesManager.getInstance() + .setConfig(ConfigManager.getInstance().getConfig().getNetworkConfig()); + + if (!isTestMode) { + logger.debug("Loading VisionSourceManager..."); + VisionSourceManager.getInstance() + .registerLoadedConfigs( + ConfigManager.getInstance().getConfig().getCameraConfigurations().values()); + + VisionSourceManager.getInstance().registerTimedTask(); + } else { + if (testModeFolder == null) { + addTestModeSources(); + } else { + addTestModeFromFolder(); + } + } + + logger.info("Starting server..."); + Server.start(DEFAULT_WEBPORT); + } } diff --git a/photon-server/src/main/java/org/photonvision/server/CameraSocketHandler.java b/photon-server/src/main/java/org/photonvision/server/CameraSocketHandler.java index 3a44f09f16..d93f9a9e44 100644 --- a/photon-server/src/main/java/org/photonvision/server/CameraSocketHandler.java +++ b/photon-server/src/main/java/org/photonvision/server/CameraSocketHandler.java @@ -35,109 +35,109 @@ import org.photonvision.vision.videoStream.SocketVideoStreamManager; public class CameraSocketHandler { - private final Logger logger = new Logger(CameraSocketHandler.class, LogGroup.WebServer); - private final List users = new CopyOnWriteArrayList<>(); - private final SocketVideoStreamManager svsManager = SocketVideoStreamManager.getInstance(); - - private Thread cameraBroadcastThread; - - public static class UIMap extends HashMap {} - - private static class ThreadSafeSingleton { - private static final CameraSocketHandler INSTANCE = new CameraSocketHandler(); - } - - public static CameraSocketHandler getInstance() { - return CameraSocketHandler.ThreadSafeSingleton.INSTANCE; - } - - private CameraSocketHandler() { - cameraBroadcastThread = new Thread(this::broadcastFramesTask); - cameraBroadcastThread.setPriority(Thread.MAX_PRIORITY - 3); // fairly high priority - cameraBroadcastThread.start(); - } - - public void onConnect(WsConnectContext context) { - context.session.setIdleTimeout( - Duration.ofMillis(Long.MAX_VALUE)); // TODO: determine better value - var remote = (InetSocketAddress) context.session.getRemoteAddress(); - var host = remote.getAddress().toString() + ":" + remote.getPort(); - logger.info("New camera websocket connection from " + host); - users.add(context); - } - - protected void onClose(WsCloseContext context) { - var remote = (InetSocketAddress) context.session.getRemoteAddress(); - var host = remote.getAddress().toString() + ":" + remote.getPort(); - var reason = context.reason() != null ? context.reason() : "Connection closed by client"; - logger.info("Closing camera websocket connection from " + host + " for reason: " + reason); - svsManager.removeSubscription(context); - users.remove(context); - } - - @SuppressWarnings({"unchecked"}) - public void onMessage(WsMessageContext context) { - var messageStr = context.message(); - ObjectMapper mapper = new ObjectMapper(); - try { - JsonNode actualObj = mapper.readTree(messageStr); - - try { - var entryCmd = actualObj.get("cmd").asText(); - var socketMessageType = CameraSocketMessageType.fromEntryKey(entryCmd); - - logger.trace(() -> "Got Camera WS message: [" + socketMessageType + "]"); - - if (socketMessageType == null) { - logger.warn("Got unknown socket message command: " + entryCmd); - } + private final Logger logger = new Logger(CameraSocketHandler.class, LogGroup.WebServer); + private final List users = new CopyOnWriteArrayList<>(); + private final SocketVideoStreamManager svsManager = SocketVideoStreamManager.getInstance(); - switch (socketMessageType) { - case CSMT_SUBSCRIBE: - { - int portId = actualObj.get("port").asInt(); - svsManager.addSubscription(context, portId); - break; - } - case CSMT_UNSUBSCRIBE: - { - svsManager.removeSubscription(context); - break; + private Thread cameraBroadcastThread; + + public static class UIMap extends HashMap {} + + private static class ThreadSafeSingleton { + private static final CameraSocketHandler INSTANCE = new CameraSocketHandler(); + } + + public static CameraSocketHandler getInstance() { + return CameraSocketHandler.ThreadSafeSingleton.INSTANCE; + } + + private CameraSocketHandler() { + cameraBroadcastThread = new Thread(this::broadcastFramesTask); + cameraBroadcastThread.setPriority(Thread.MAX_PRIORITY - 3); // fairly high priority + cameraBroadcastThread.start(); + } + + public void onConnect(WsConnectContext context) { + context.session.setIdleTimeout( + Duration.ofMillis(Long.MAX_VALUE)); // TODO: determine better value + var remote = (InetSocketAddress) context.session.getRemoteAddress(); + var host = remote.getAddress().toString() + ":" + remote.getPort(); + logger.info("New camera websocket connection from " + host); + users.add(context); + } + + protected void onClose(WsCloseContext context) { + var remote = (InetSocketAddress) context.session.getRemoteAddress(); + var host = remote.getAddress().toString() + ":" + remote.getPort(); + var reason = context.reason() != null ? context.reason() : "Connection closed by client"; + logger.info("Closing camera websocket connection from " + host + " for reason: " + reason); + svsManager.removeSubscription(context); + users.remove(context); + } + + @SuppressWarnings({"unchecked"}) + public void onMessage(WsMessageContext context) { + var messageStr = context.message(); + ObjectMapper mapper = new ObjectMapper(); + try { + JsonNode actualObj = mapper.readTree(messageStr); + + try { + var entryCmd = actualObj.get("cmd").asText(); + var socketMessageType = CameraSocketMessageType.fromEntryKey(entryCmd); + + logger.trace(() -> "Got Camera WS message: [" + socketMessageType + "]"); + + if (socketMessageType == null) { + logger.warn("Got unknown socket message command: " + entryCmd); + } + + switch (socketMessageType) { + case CSMT_SUBSCRIBE: + { + int portId = actualObj.get("port").asInt(); + svsManager.addSubscription(context, portId); + break; + } + case CSMT_UNSUBSCRIBE: + { + svsManager.removeSubscription(context); + break; + } + } + } catch (Exception e) { + logger.error("Failed to parse message!", e); } + + } catch (JsonProcessingException e) { + logger.warn("Could not parse message \"" + messageStr + "\""); + e.printStackTrace(); + return; } - } catch (Exception e) { - logger.error("Failed to parse message!", e); - } - - } catch (JsonProcessingException e) { - logger.warn("Could not parse message \"" + messageStr + "\""); - e.printStackTrace(); - return; } - } - - @SuppressWarnings({"unchecked"}) - public void onBinaryMessage(WsBinaryMessageContext context) { - return; // ignoring binary messages for now - } - - private void broadcastFramesTask() { - // Background camera image broadcasting thread - while (!Thread.currentThread().isInterrupted()) { - svsManager.allStreamConvertNextFrame(); - - try { - Thread.sleep(1); - } catch (InterruptedException e) { - logger.error("Exception waiting for camera stream broadcast semaphore", e); - } - - for (var user : users) { - var sendBytes = svsManager.getSendFrame(user); - if (sendBytes != null) { - user.send(sendBytes); + + @SuppressWarnings({"unchecked"}) + public void onBinaryMessage(WsBinaryMessageContext context) { + return; // ignoring binary messages for now + } + + private void broadcastFramesTask() { + // Background camera image broadcasting thread + while (!Thread.currentThread().isInterrupted()) { + svsManager.allStreamConvertNextFrame(); + + try { + Thread.sleep(1); + } catch (InterruptedException e) { + logger.error("Exception waiting for camera stream broadcast semaphore", e); + } + + for (var user : users) { + var sendBytes = svsManager.getSendFrame(user); + if (sendBytes != null) { + user.send(sendBytes); + } + } } - } } - } } diff --git a/photon-server/src/main/java/org/photonvision/server/CameraSocketMessageType.java b/photon-server/src/main/java/org/photonvision/server/CameraSocketMessageType.java index fda054d924..74841e2022 100644 --- a/photon-server/src/main/java/org/photonvision/server/CameraSocketMessageType.java +++ b/photon-server/src/main/java/org/photonvision/server/CameraSocketMessageType.java @@ -23,24 +23,24 @@ @SuppressWarnings("unused") public enum CameraSocketMessageType { - CSMT_SUBSCRIBE("subscribe"), - CSMT_UNSUBSCRIBE("unsubscribe"); + CSMT_SUBSCRIBE("subscribe"), + CSMT_UNSUBSCRIBE("unsubscribe"); - public final String entryKey; + public final String entryKey; - CameraSocketMessageType(String entryKey) { - this.entryKey = entryKey; - } + CameraSocketMessageType(String entryKey) { + this.entryKey = entryKey; + } - private static final Map entryKeyToValueMap = new HashMap<>(); + private static final Map entryKeyToValueMap = new HashMap<>(); - static { - for (var value : EnumSet.allOf(CameraSocketMessageType.class)) { - entryKeyToValueMap.put(value.entryKey, value); + static { + for (var value : EnumSet.allOf(CameraSocketMessageType.class)) { + entryKeyToValueMap.put(value.entryKey, value); + } } - } - public static CameraSocketMessageType fromEntryKey(String entryKey) { - return entryKeyToValueMap.get(entryKey); - } + public static CameraSocketMessageType fromEntryKey(String entryKey) { + return entryKeyToValueMap.get(entryKey); + } } diff --git a/photon-server/src/main/java/org/photonvision/server/DataSocketHandler.java b/photon-server/src/main/java/org/photonvision/server/DataSocketHandler.java index 92e1f40064..f7f0345723 100644 --- a/photon-server/src/main/java/org/photonvision/server/DataSocketHandler.java +++ b/photon-server/src/main/java/org/photonvision/server/DataSocketHandler.java @@ -45,327 +45,327 @@ @SuppressWarnings("rawtypes") public class DataSocketHandler { - private final Logger logger = new Logger(DataSocketHandler.class, LogGroup.WebServer); - private final List users = new CopyOnWriteArrayList<>(); - private final ObjectMapper objectMapper = new ObjectMapper(new MessagePackFactory()); - private final DataChangeService dcService = DataChangeService.getInstance(); + private final Logger logger = new Logger(DataSocketHandler.class, LogGroup.WebServer); + private final List users = new CopyOnWriteArrayList<>(); + private final ObjectMapper objectMapper = new ObjectMapper(new MessagePackFactory()); + private final DataChangeService dcService = DataChangeService.getInstance(); - @SuppressWarnings("FieldCanBeLocal") - private final UIOutboundSubscriber uiOutboundSubscriber = new UIOutboundSubscriber(this); + @SuppressWarnings("FieldCanBeLocal") + private final UIOutboundSubscriber uiOutboundSubscriber = new UIOutboundSubscriber(this); - public static class UIMap extends HashMap {} + public static class UIMap extends HashMap {} - private static class ThreadSafeSingleton { - private static final DataSocketHandler INSTANCE = new DataSocketHandler(); - } + private static class ThreadSafeSingleton { + private static final DataSocketHandler INSTANCE = new DataSocketHandler(); + } - public static DataSocketHandler getInstance() { - return DataSocketHandler.ThreadSafeSingleton.INSTANCE; - } + public static DataSocketHandler getInstance() { + return DataSocketHandler.ThreadSafeSingleton.INSTANCE; + } - private DataSocketHandler() { - dcService.addSubscribers( - uiOutboundSubscriber, - new UIInboundSubscriber()); // Subscribe outgoing messages to the data change service - } + private DataSocketHandler() { + dcService.addSubscribers( + uiOutboundSubscriber, + new UIInboundSubscriber()); // Subscribe outgoing messages to the data change service + } - public void onConnect(WsConnectContext context) { - context.session.setIdleTimeout( - Duration.ofMillis(Long.MAX_VALUE)); // TODO: determine better value - var remote = (InetSocketAddress) context.session.getRemoteAddress(); - var host = remote.getAddress().toString() + ":" + remote.getPort(); - logger.info("New websocket connection from " + host); - users.add(context); - dcService.publishEvent( - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_GENSETTINGS, "userConnected", context)); - } + public void onConnect(WsConnectContext context) { + context.session.setIdleTimeout( + Duration.ofMillis(Long.MAX_VALUE)); // TODO: determine better value + var remote = (InetSocketAddress) context.session.getRemoteAddress(); + var host = remote.getAddress().toString() + ":" + remote.getPort(); + logger.info("New websocket connection from " + host); + users.add(context); + dcService.publishEvent( + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_GENSETTINGS, "userConnected", context)); + } - protected void onClose(WsCloseContext context) { - var remote = (InetSocketAddress) context.session.getRemoteAddress(); - var host = remote.getAddress().toString() + ":" + remote.getPort(); - var reason = context.reason() != null ? context.reason() : "Connection closed by client"; - logger.info("Closing websocket connection from " + host + " for reason: " + reason); - users.remove(context); - } + protected void onClose(WsCloseContext context) { + var remote = (InetSocketAddress) context.session.getRemoteAddress(); + var host = remote.getAddress().toString() + ":" + remote.getPort(); + var reason = context.reason() != null ? context.reason() : "Connection closed by client"; + logger.info("Closing websocket connection from " + host + " for reason: " + reason); + users.remove(context); + } - @SuppressWarnings({"unchecked"}) - public void onBinaryMessage(WsBinaryMessageContext context) { - try { - Map deserializedData = - objectMapper.readValue(context.data(), new TypeReference<>() {}); + @SuppressWarnings({"unchecked"}) + public void onBinaryMessage(WsBinaryMessageContext context) { + try { + Map deserializedData = + objectMapper.readValue(context.data(), new TypeReference<>() {}); - // Special case the current camera index - var camIndexValue = deserializedData.get("cameraIndex"); - Integer cameraIndex = null; - if (camIndexValue instanceof Integer) { - cameraIndex = (Integer) camIndexValue; - deserializedData.remove("cameraIndex"); - } + // Special case the current camera index + var camIndexValue = deserializedData.get("cameraIndex"); + Integer cameraIndex = null; + if (camIndexValue instanceof Integer) { + cameraIndex = (Integer) camIndexValue; + deserializedData.remove("cameraIndex"); + } - for (Map.Entry entry : deserializedData.entrySet()) { - try { - var entryKey = entry.getKey(); - var entryValue = entry.getValue(); - var socketMessageType = DataSocketMessageType.fromEntryKey(entryKey); + for (Map.Entry entry : deserializedData.entrySet()) { + try { + var entryKey = entry.getKey(); + var entryValue = entry.getValue(); + var socketMessageType = DataSocketMessageType.fromEntryKey(entryKey); - logger.trace( - () -> - "Got WS message: [" - + socketMessageType - + "] ==> [" - + entryKey - + "], [" - + entryValue - + "]"); + logger.trace( + () -> + "Got WS message: [" + + socketMessageType + + "] ==> [" + + entryKey + + "], [" + + entryValue + + "]"); - if (socketMessageType == null) { - logger.warn("Got unknown socket message type: " + entryKey); - continue; - } + if (socketMessageType == null) { + logger.warn("Got unknown socket message type: " + entryKey); + continue; + } - switch (socketMessageType) { - case SMT_DRIVERMODE: - { - // TODO: what is this event? - var data = (HashMap) entryValue; - var dmExpEvent = - new IncomingWebSocketEvent( - DataChangeDestination.DCD_ACTIVEMODULE, "driverExposure", data); - var dmBrightEvent = - new IncomingWebSocketEvent( - DataChangeDestination.DCD_ACTIVEMODULE, "driverBrightness", data); - var dmIsDriverEvent = - new IncomingWebSocketEvent( - DataChangeDestination.DCD_ACTIVEMODULE, "isDriver", data); + switch (socketMessageType) { + case SMT_DRIVERMODE: + { + // TODO: what is this event? + var data = (HashMap) entryValue; + var dmExpEvent = + new IncomingWebSocketEvent( + DataChangeDestination.DCD_ACTIVEMODULE, "driverExposure", data); + var dmBrightEvent = + new IncomingWebSocketEvent( + DataChangeDestination.DCD_ACTIVEMODULE, "driverBrightness", data); + var dmIsDriverEvent = + new IncomingWebSocketEvent( + DataChangeDestination.DCD_ACTIVEMODULE, "isDriver", data); - dcService.publishEvents(dmExpEvent, dmBrightEvent, dmIsDriverEvent); - break; - } - case SMT_CHANGECAMERANAME: - { - var ccnEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "cameraNickname", - (String) entryValue, - cameraIndex, - context); - dcService.publishEvent(ccnEvent); - break; - } - case SMT_CHANGEPIPELINENAME: - { - var cpnEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "pipelineName", - (String) entryValue, - cameraIndex, - context); - dcService.publishEvent(cpnEvent); - break; - } - case SMT_ADDNEWPIPELINE: - { - // HashMap data = (HashMap) entryValue; - // var type = (PipelineType) - // data.get("pipelineType"); - // var name = (String) data.get("pipelineName"); - var arr = (ArrayList) entryValue; - var name = (String) arr.get(0); - var type = PipelineType.values()[(Integer) arr.get(1) + 2]; + dcService.publishEvents(dmExpEvent, dmBrightEvent, dmIsDriverEvent); + break; + } + case SMT_CHANGECAMERANAME: + { + var ccnEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "cameraNickname", + (String) entryValue, + cameraIndex, + context); + dcService.publishEvent(ccnEvent); + break; + } + case SMT_CHANGEPIPELINENAME: + { + var cpnEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "pipelineName", + (String) entryValue, + cameraIndex, + context); + dcService.publishEvent(cpnEvent); + break; + } + case SMT_ADDNEWPIPELINE: + { + // HashMap data = (HashMap) entryValue; + // var type = (PipelineType) + // data.get("pipelineType"); + // var name = (String) data.get("pipelineName"); + var arr = (ArrayList) entryValue; + var name = (String) arr.get(0); + var type = PipelineType.values()[(Integer) arr.get(1) + 2]; - var newPipelineEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "newPipelineInfo", - Pair.of(name, type), - cameraIndex, - context); - dcService.publishEvent(newPipelineEvent); - break; - } - case SMT_CHANGEBRIGHTNESS: - { - HardwareManager.getInstance() - .setBrightnessPercent(Integer.parseInt(entryValue.toString())); - break; - } - case SMT_DUPLICATEPIPELINE: - { - var pipeIndex = (Integer) entryValue; + var newPipelineEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "newPipelineInfo", + Pair.of(name, type), + cameraIndex, + context); + dcService.publishEvent(newPipelineEvent); + break; + } + case SMT_CHANGEBRIGHTNESS: + { + HardwareManager.getInstance() + .setBrightnessPercent(Integer.parseInt(entryValue.toString())); + break; + } + case SMT_DUPLICATEPIPELINE: + { + var pipeIndex = (Integer) entryValue; - logger.info("Duplicating pipe@index" + pipeIndex + " for camera " + cameraIndex); + logger.info("Duplicating pipe@index" + pipeIndex + " for camera " + cameraIndex); - var newPipelineEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "duplicatePipeline", - pipeIndex, - cameraIndex, - context); - dcService.publishEvent(newPipelineEvent); - break; - } - case SMT_DELETECURRENTPIPELINE: - { - var deleteCurrentPipelineEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "deleteCurrPipeline", - 0, - cameraIndex, - context); - dcService.publishEvent(deleteCurrentPipelineEvent); - break; - } - case SMT_ROBOTOFFSETPOINT: - { - var robotOffsetPointEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "robotOffsetPoint", - (Integer) entryValue, - cameraIndex, - null); - dcService.publishEvent(robotOffsetPointEvent); - break; - } - case SMT_CURRENTCAMERA: - { - var changeCurrentCameraEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_OTHER, "changeUICamera", (Integer) entryValue); - dcService.publishEvent(changeCurrentCameraEvent); - break; - } - case SMT_CURRENTPIPELINE: - { - var changePipelineEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "changePipeline", - (Integer) entryValue, - cameraIndex, - context); - dcService.publishEvent(changePipelineEvent); - break; - } - case SMT_STARTPNPCALIBRATION: - { - var changePipelineEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "startCalibration", - (Map) entryValue, - cameraIndex, - context); - dcService.publishEvent(changePipelineEvent); - break; - } - case SMT_SAVEINPUTSNAPSHOT: - { - var takeInputSnapshotEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "saveInputSnapshot", - 0, - cameraIndex, - context); - dcService.publishEvent(takeInputSnapshotEvent); - break; - } - case SMT_SAVEOUTPUTSNAPSHOT: - { - var takeOutputSnapshotEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "saveOutputSnapshot", - 0, - cameraIndex, - context); - dcService.publishEvent(takeOutputSnapshotEvent); - break; - } - case SMT_TAKECALIBRATIONSNAPSHOT: - { - var takeCalSnapshotEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "takeCalSnapshot", - 0, - cameraIndex, - context); - dcService.publishEvent(takeCalSnapshotEvent); - break; - } - case SMT_PIPELINESETTINGCHANGE: - { - HashMap data = (HashMap) entryValue; + var newPipelineEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "duplicatePipeline", + pipeIndex, + cameraIndex, + context); + dcService.publishEvent(newPipelineEvent); + break; + } + case SMT_DELETECURRENTPIPELINE: + { + var deleteCurrentPipelineEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "deleteCurrPipeline", + 0, + cameraIndex, + context); + dcService.publishEvent(deleteCurrentPipelineEvent); + break; + } + case SMT_ROBOTOFFSETPOINT: + { + var robotOffsetPointEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "robotOffsetPoint", + (Integer) entryValue, + cameraIndex, + null); + dcService.publishEvent(robotOffsetPointEvent); + break; + } + case SMT_CURRENTCAMERA: + { + var changeCurrentCameraEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_OTHER, "changeUICamera", (Integer) entryValue); + dcService.publishEvent(changeCurrentCameraEvent); + break; + } + case SMT_CURRENTPIPELINE: + { + var changePipelineEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "changePipeline", + (Integer) entryValue, + cameraIndex, + context); + dcService.publishEvent(changePipelineEvent); + break; + } + case SMT_STARTPNPCALIBRATION: + { + var changePipelineEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "startCalibration", + (Map) entryValue, + cameraIndex, + context); + dcService.publishEvent(changePipelineEvent); + break; + } + case SMT_SAVEINPUTSNAPSHOT: + { + var takeInputSnapshotEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "saveInputSnapshot", + 0, + cameraIndex, + context); + dcService.publishEvent(takeInputSnapshotEvent); + break; + } + case SMT_SAVEOUTPUTSNAPSHOT: + { + var takeOutputSnapshotEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "saveOutputSnapshot", + 0, + cameraIndex, + context); + dcService.publishEvent(takeOutputSnapshotEvent); + break; + } + case SMT_TAKECALIBRATIONSNAPSHOT: + { + var takeCalSnapshotEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "takeCalSnapshot", + 0, + cameraIndex, + context); + dcService.publishEvent(takeCalSnapshotEvent); + break; + } + case SMT_PIPELINESETTINGCHANGE: + { + HashMap data = (HashMap) entryValue; - if (data.size() >= 2) { - var cameraIndex2 = (int) data.get("cameraIndex"); - for (var dataEntry : data.entrySet()) { - if (dataEntry.getKey().equals("cameraIndex")) { - continue; + if (data.size() >= 2) { + var cameraIndex2 = (int) data.get("cameraIndex"); + for (var dataEntry : data.entrySet()) { + if (dataEntry.getKey().equals("cameraIndex")) { + continue; + } + var pipelineSettingChangeEvent = + new IncomingWebSocketEvent( + DataChangeDestination.DCD_ACTIVEPIPELINESETTINGS, + dataEntry.getKey(), + dataEntry.getValue(), + cameraIndex2, + context); + dcService.publishEvent(pipelineSettingChangeEvent); + } + } else { + logger.warn("Unknown message for PSC: " + data.keySet().iterator().next()); + } + break; + } + case SMT_CHANGEPIPELINETYPE: + { + var changePipelineEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "changePipelineType", + (Integer) entryValue, + cameraIndex, + context); + dcService.publishEvent(changePipelineEvent); + break; + } } - var pipelineSettingChangeEvent = - new IncomingWebSocketEvent( - DataChangeDestination.DCD_ACTIVEPIPELINESETTINGS, - dataEntry.getKey(), - dataEntry.getValue(), - cameraIndex2, - context); - dcService.publishEvent(pipelineSettingChangeEvent); - } - } else { - logger.warn("Unknown message for PSC: " + data.keySet().iterator().next()); + } catch (Exception e) { + logger.error("Failed to parse message!", e); } - break; - } - case SMT_CHANGEPIPELINETYPE: - { - var changePipelineEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "changePipelineType", - (Integer) entryValue, - cameraIndex, - context); - dcService.publishEvent(changePipelineEvent); - break; - } - } - } catch (Exception e) { - logger.error("Failed to parse message!", e); + } + } catch (IOException e) { + logger.error("Failed to deserialize message!", e); } - } - } catch (IOException e) { - logger.error("Failed to deserialize message!", e); } - } - private void sendMessage(Object message, WsContext user) throws JsonProcessingException { - ByteBuffer b = ByteBuffer.wrap(objectMapper.writeValueAsBytes(message)); - user.send(b); - } + private void sendMessage(Object message, WsContext user) throws JsonProcessingException { + ByteBuffer b = ByteBuffer.wrap(objectMapper.writeValueAsBytes(message)); + user.send(b); + } - public void broadcastMessage(Object message, WsContext userToSkip) - throws JsonProcessingException { - if (userToSkip == null) { - for (WsContext user : users) { - sendMessage(message, user); - } - } else { - var skipUserPort = ((InetSocketAddress) userToSkip.session.getRemoteAddress()).getPort(); - for (WsContext user : users) { - var userPort = ((InetSocketAddress) user.session.getRemoteAddress()).getPort(); - if (userPort != skipUserPort) { - sendMessage(message, user); + public void broadcastMessage(Object message, WsContext userToSkip) + throws JsonProcessingException { + if (userToSkip == null) { + for (WsContext user : users) { + sendMessage(message, user); + } + } else { + var skipUserPort = ((InetSocketAddress) userToSkip.session.getRemoteAddress()).getPort(); + for (WsContext user : users) { + var userPort = ((InetSocketAddress) user.session.getRemoteAddress()).getPort(); + if (userPort != skipUserPort) { + sendMessage(message, user); + } + } } - } } - } } diff --git a/photon-server/src/main/java/org/photonvision/server/DataSocketMessageType.java b/photon-server/src/main/java/org/photonvision/server/DataSocketMessageType.java index 0e702458f8..f5222cf371 100644 --- a/photon-server/src/main/java/org/photonvision/server/DataSocketMessageType.java +++ b/photon-server/src/main/java/org/photonvision/server/DataSocketMessageType.java @@ -23,38 +23,38 @@ @SuppressWarnings("unused") public enum DataSocketMessageType { - SMT_DRIVERMODE("driverMode"), - SMT_CHANGECAMERANAME("changeCameraName"), - SMT_CHANGEPIPELINENAME("changePipelineName"), - SMT_ADDNEWPIPELINE("addNewPipeline"), - SMT_DELETECURRENTPIPELINE("deleteCurrentPipeline"), - SMT_CURRENTCAMERA("currentCamera"), - SMT_PIPELINESETTINGCHANGE("changePipelineSetting"), - SMT_CURRENTPIPELINE("currentPipeline"), - SMT_STARTPNPCALIBRATION("startPnpCalibration"), - SMT_SAVEINPUTSNAPSHOT("saveInputSnapshot"), - SMT_SAVEOUTPUTSNAPSHOT("saveOutputSnapshot"), - SMT_TAKECALIBRATIONSNAPSHOT("takeCalibrationSnapshot"), - SMT_DUPLICATEPIPELINE("duplicatePipeline"), - SMT_CHANGEBRIGHTNESS("enabledLEDPercentage"), - SMT_ROBOTOFFSETPOINT("robotOffsetPoint"), - SMT_CHANGEPIPELINETYPE("pipelineType"); - - public final String entryKey; - - DataSocketMessageType(String entryKey) { - this.entryKey = entryKey; - } - - private static final Map entryKeyToValueMap = new HashMap<>(); - - static { - for (var value : EnumSet.allOf(DataSocketMessageType.class)) { - entryKeyToValueMap.put(value.entryKey, value); + SMT_DRIVERMODE("driverMode"), + SMT_CHANGECAMERANAME("changeCameraName"), + SMT_CHANGEPIPELINENAME("changePipelineName"), + SMT_ADDNEWPIPELINE("addNewPipeline"), + SMT_DELETECURRENTPIPELINE("deleteCurrentPipeline"), + SMT_CURRENTCAMERA("currentCamera"), + SMT_PIPELINESETTINGCHANGE("changePipelineSetting"), + SMT_CURRENTPIPELINE("currentPipeline"), + SMT_STARTPNPCALIBRATION("startPnpCalibration"), + SMT_SAVEINPUTSNAPSHOT("saveInputSnapshot"), + SMT_SAVEOUTPUTSNAPSHOT("saveOutputSnapshot"), + SMT_TAKECALIBRATIONSNAPSHOT("takeCalibrationSnapshot"), + SMT_DUPLICATEPIPELINE("duplicatePipeline"), + SMT_CHANGEBRIGHTNESS("enabledLEDPercentage"), + SMT_ROBOTOFFSETPOINT("robotOffsetPoint"), + SMT_CHANGEPIPELINETYPE("pipelineType"); + + public final String entryKey; + + DataSocketMessageType(String entryKey) { + this.entryKey = entryKey; } - } - public static DataSocketMessageType fromEntryKey(String entryKey) { - return entryKeyToValueMap.get(entryKey); - } + private static final Map entryKeyToValueMap = new HashMap<>(); + + static { + for (var value : EnumSet.allOf(DataSocketMessageType.class)) { + entryKeyToValueMap.put(value.entryKey, value); + } + } + + public static DataSocketMessageType fromEntryKey(String entryKey) { + return entryKeyToValueMap.get(entryKey); + } } diff --git a/photon-server/src/main/java/org/photonvision/server/RequestHandler.java b/photon-server/src/main/java/org/photonvision/server/RequestHandler.java index 88df5d84e7..9547c2cdf0 100644 --- a/photon-server/src/main/java/org/photonvision/server/RequestHandler.java +++ b/photon-server/src/main/java/org/photonvision/server/RequestHandler.java @@ -49,536 +49,536 @@ import org.photonvision.vision.processes.VisionModuleManager; public class RequestHandler { - // Treat all 2XX calls as "INFO" - // Treat all 4XX calls as "ERROR" - // Treat all 5XX calls as "ERROR" - - private static final Logger logger = new Logger(RequestHandler.class, LogGroup.WebServer); - - private static final ObjectMapper kObjectMapper = new ObjectMapper(); - - public static void onSettingsImportRequest(Context ctx) { - var file = ctx.uploadedFile("data"); - - if (file == null) { - ctx.status(400); - ctx.result( - "No File was sent with the request. Make sure that the settings zip is sent at the key 'data'"); - logger.error( - "No File was sent with the request. Make sure that the settings zip is sent at the key 'data'"); - return; - } - - if (!file.extension().contains("zip")) { - ctx.status(400); - ctx.result( - "The uploaded file was not of type 'zip'. The uploaded file should be a .zip file."); - logger.error( - "The uploaded file was not of type 'zip'. The uploaded file should be a .zip file."); - return; - } - - // Create a temp file - var tempFilePath = handleTempFileCreation(file); - - if (tempFilePath.isEmpty()) { - ctx.status(500); - ctx.result("There was an error while creating a temporary copy of the file"); - logger.error("There was an error while creating a temporary copy of the file"); - return; - } - - if (ConfigManager.saveUploadedSettingsZip(tempFilePath.get())) { - ctx.status(200); - ctx.result("Successfully saved the uploaded settings zip"); - logger.info("Successfully saved the uploaded settings zip"); - } else { - ctx.status(500); - ctx.result("There was an error while saving the uploaded zip file"); - logger.error("There was an error while saving the uploaded zip file"); - } - } - - public static void onSettingsExportRequest(Context ctx) { - logger.info("Exporting Settings to ZIP Archive"); - - try { - var zip = ConfigManager.getInstance().getSettingsFolderAsZip(); - var stream = new FileInputStream(zip); - logger.info("Uploading settings with size " + stream.available()); - - ctx.contentType("application/zip"); - ctx.header( - "Content-Disposition", "attachment; filename=\"photonvision-settings-export.zip\""); - - ctx.result(stream); - ctx.status(200); - } catch (IOException e) { - logger.error("Unable to export settings archive, bad recode from zip to byte"); - ctx.status(500); - ctx.result("There was an error while exporting the settings archive"); - } - } - - public static void onHardwareConfigRequest(Context ctx) { - var file = ctx.uploadedFile("data"); - - if (file == null) { - ctx.status(400); - ctx.result( - "No File was sent with the request. Make sure that the hardware config json is sent at the key 'data'"); - logger.error( - "No File was sent with the request. Make sure that the hardware config json is sent at the key 'data'"); - return; - } - - if (!file.extension().contains("json")) { - ctx.status(400); - ctx.result( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - logger.error( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - return; - } - - // Create a temp file - var tempFilePath = handleTempFileCreation(file); - - if (tempFilePath.isEmpty()) { - ctx.status(500); - ctx.result("There was an error while creating a temporary copy of the file"); - logger.error("There was an error while creating a temporary copy of the file"); - return; + // Treat all 2XX calls as "INFO" + // Treat all 4XX calls as "ERROR" + // Treat all 5XX calls as "ERROR" + + private static final Logger logger = new Logger(RequestHandler.class, LogGroup.WebServer); + + private static final ObjectMapper kObjectMapper = new ObjectMapper(); + + public static void onSettingsImportRequest(Context ctx) { + var file = ctx.uploadedFile("data"); + + if (file == null) { + ctx.status(400); + ctx.result( + "No File was sent with the request. Make sure that the settings zip is sent at the key 'data'"); + logger.error( + "No File was sent with the request. Make sure that the settings zip is sent at the key 'data'"); + return; + } + + if (!file.extension().contains("zip")) { + ctx.status(400); + ctx.result( + "The uploaded file was not of type 'zip'. The uploaded file should be a .zip file."); + logger.error( + "The uploaded file was not of type 'zip'. The uploaded file should be a .zip file."); + return; + } + + // Create a temp file + var tempFilePath = handleTempFileCreation(file); + + if (tempFilePath.isEmpty()) { + ctx.status(500); + ctx.result("There was an error while creating a temporary copy of the file"); + logger.error("There was an error while creating a temporary copy of the file"); + return; + } + + if (ConfigManager.saveUploadedSettingsZip(tempFilePath.get())) { + ctx.status(200); + ctx.result("Successfully saved the uploaded settings zip"); + logger.info("Successfully saved the uploaded settings zip"); + } else { + ctx.status(500); + ctx.result("There was an error while saving the uploaded zip file"); + logger.error("There was an error while saving the uploaded zip file"); + } + } + + public static void onSettingsExportRequest(Context ctx) { + logger.info("Exporting Settings to ZIP Archive"); + + try { + var zip = ConfigManager.getInstance().getSettingsFolderAsZip(); + var stream = new FileInputStream(zip); + logger.info("Uploading settings with size " + stream.available()); + + ctx.contentType("application/zip"); + ctx.header( + "Content-Disposition", "attachment; filename=\"photonvision-settings-export.zip\""); + + ctx.result(stream); + ctx.status(200); + } catch (IOException e) { + logger.error("Unable to export settings archive, bad recode from zip to byte"); + ctx.status(500); + ctx.result("There was an error while exporting the settings archive"); + } + } + + public static void onHardwareConfigRequest(Context ctx) { + var file = ctx.uploadedFile("data"); + + if (file == null) { + ctx.status(400); + ctx.result( + "No File was sent with the request. Make sure that the hardware config json is sent at the key 'data'"); + logger.error( + "No File was sent with the request. Make sure that the hardware config json is sent at the key 'data'"); + return; + } + + if (!file.extension().contains("json")) { + ctx.status(400); + ctx.result( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + logger.error( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + return; + } + + // Create a temp file + var tempFilePath = handleTempFileCreation(file); + + if (tempFilePath.isEmpty()) { + ctx.status(500); + ctx.result("There was an error while creating a temporary copy of the file"); + logger.error("There was an error while creating a temporary copy of the file"); + return; + } + + if (ConfigManager.getInstance().saveUploadedHardwareConfig(tempFilePath.get().toPath())) { + ctx.status(200); + ctx.result("Successfully saved the uploaded hardware config"); + logger.info("Successfully saved the uploaded hardware config"); + } else { + ctx.status(500); + ctx.result("There was an error while saving the uploaded hardware config"); + logger.error("There was an error while saving the uploaded hardware config"); + } + } + + public static void onHardwareSettingsRequest(Context ctx) { + var file = ctx.uploadedFile("data"); + + if (file == null) { + ctx.status(400); + ctx.result( + "No File was sent with the request. Make sure that the hardware settings json is sent at the key 'data'"); + logger.error( + "No File was sent with the request. Make sure that the hardware settings json is sent at the key 'data'"); + return; + } + + if (!file.extension().contains("json")) { + ctx.status(400); + ctx.result( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + logger.error( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + return; + } + + // Create a temp file + var tempFilePath = handleTempFileCreation(file); + + if (tempFilePath.isEmpty()) { + ctx.status(500); + ctx.result("There was an error while creating a temporary copy of the file"); + logger.error("There was an error while creating a temporary copy of the file"); + return; + } + + if (ConfigManager.getInstance().saveUploadedHardwareSettings(tempFilePath.get().toPath())) { + ctx.status(200); + ctx.result("Successfully saved the uploaded hardware settings"); + logger.info("Successfully saved the uploaded hardware settings"); + } else { + ctx.status(500); + ctx.result("There was an error while saving the uploaded hardware settings"); + logger.error("There was an error while saving the uploaded hardware settings"); + } + } + + public static void onNetworkConfigRequest(Context ctx) { + var file = ctx.uploadedFile("data"); + + if (file == null) { + ctx.status(400); + ctx.result( + "No File was sent with the request. Make sure that the network config json is sent at the key 'data'"); + logger.error( + "No File was sent with the request. Make sure that the network config json is sent at the key 'data'"); + return; + } + + if (!file.extension().contains("json")) { + ctx.status(400); + ctx.result( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + logger.error( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + return; + } + + // Create a temp file + var tempFilePath = handleTempFileCreation(file); + + if (tempFilePath.isEmpty()) { + ctx.status(500); + ctx.result("There was an error while creating a temporary copy of the file"); + logger.error("There was an error while creating a temporary copy of the file"); + return; + } + + if (ConfigManager.getInstance().saveUploadedNetworkConfig(tempFilePath.get().toPath())) { + ctx.status(200); + ctx.result("Successfully saved the uploaded network config"); + logger.info("Successfully saved the uploaded network config"); + } else { + ctx.status(500); + ctx.result("There was an error while saving the uploaded network config"); + logger.error("There was an error while saving the uploaded network config"); + } + } + + public static void onAprilTagFieldLayoutRequest(Context ctx) { + var file = ctx.uploadedFile("data"); + + if (file == null) { + ctx.status(400); + ctx.result( + "No File was sent with the request. Make sure that the field layout json is sent at the key 'data'"); + logger.error( + "No File was sent with the request. Make sure that the field layout json is sent at the key 'data'"); + return; + } + + if (!file.extension().contains("json")) { + ctx.status(400); + ctx.result( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + logger.error( + "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); + return; + } + + // Create a temp file + var tempFilePath = handleTempFileCreation(file); + + if (tempFilePath.isEmpty()) { + ctx.status(500); + ctx.result("There was an error while creating a temporary copy of the file"); + logger.error("There was an error while creating a temporary copy of the file"); + return; + } + + if (ConfigManager.getInstance().saveUploadedAprilTagFieldLayout(tempFilePath.get().toPath())) { + ctx.status(200); + ctx.result("Successfully saved the uploaded AprilTagFieldLayout"); + logger.info("Successfully saved the uploaded AprilTagFieldLayout"); + } else { + ctx.status(500); + ctx.result("There was an error while saving the uploaded AprilTagFieldLayout"); + logger.error("There was an error while saving the uploaded AprilTagFieldLayout"); + } + } + + public static void onOfflineUpdateRequest(Context ctx) { + var file = ctx.uploadedFile("jarData"); + + if (file == null) { + ctx.status(400); + ctx.result( + "No File was sent with the request. Make sure that the new jar is sent at the key 'jarData'"); + logger.error( + "No File was sent with the request. Make sure that the new jar is sent at the key 'jarData'"); + return; + } + + if (!file.extension().contains("jar")) { + ctx.status(400); + ctx.result( + "The uploaded file was not of type 'jar'. The uploaded file should be a .jar file."); + logger.error( + "The uploaded file was not of type 'jar'. The uploaded file should be a .jar file."); + return; + } + + try { + Path filePath = + Paths.get(ProgramDirectoryUtilities.getProgramDirectory(), "photonvision.jar"); + File targetFile = new File(filePath.toString()); + var stream = new FileOutputStream(targetFile); + + file.content().transferTo(stream); + stream.close(); + + ctx.status(200); + ctx.result( + "Offline update successfully complete. PhotonVision will restart in the background."); + logger.info( + "Offline update successfully complete. PhotonVision will restart in the background."); + restartProgram(); + } catch (FileNotFoundException e) { + ctx.result("The current program jar file couldn't be found."); + ctx.status(500); + logger.error("The current program jar file couldn't be found.", e); + } catch (IOException e) { + ctx.result("Unable to overwrite the existing program with the new program."); + ctx.status(500); + logger.error("Unable to overwrite the existing program with the new program.", e); + } + } + + public static void onGeneralSettingsRequest(Context ctx) { + NetworkConfig config; + try { + config = kObjectMapper.readValue(ctx.body(), NetworkConfig.class); + + ctx.status(200); + ctx.result("Successfully saved general settings"); + logger.info("Successfully saved general settings"); + } catch (JsonProcessingException e) { + // If the settings can't be parsed, use the default network settings + config = new NetworkConfig(); + + ctx.status(400); + ctx.result("The provided general settings were malformed"); + logger.error("The provided general settings were malformed", e); + } + + ConfigManager.getInstance().setNetworkSettings(config); + ConfigManager.getInstance().requestSave(); + + NetworkManager.getInstance().reinitialize(); + + NetworkTablesManager.getInstance().setConfig(config); + } + + public static void onCameraSettingsRequest(Context ctx) { + try { + var data = kObjectMapper.readTree(ctx.body()); + + int index = data.get("index").asInt(); + double fov = data.get("settings").get("fov").asDouble(); + + var module = VisionModuleManager.getInstance().getModule(index); + module.setFov(fov); + + module.saveModule(); + + ctx.status(200); + ctx.result("Successfully saved camera settings"); + logger.info("Successfully saved camera settings"); + } catch (JsonProcessingException | NullPointerException e) { + ctx.status(400); + ctx.result("The provided camera settings were malformed"); + logger.error("The provided camera settings were malformed", e); + } + } + + public static void onLogExportRequest(Context ctx) { + if (!Platform.isLinux()) { + ctx.status(405); + ctx.result("Logs can only be exported on a Linux platform"); + // INFO only log because this isn't ERROR worthy + logger.info("Logs can only be exported on a Linux platform"); + return; + } + + try { + ShellExec shell = new ShellExec(); + var tempPath = Files.createTempFile("photonvision-journalctl", ".txt"); + shell.executeBashCommand("journalctl -u photonvision.service > " + tempPath.toAbsolutePath()); + + while (!shell.isOutputCompleted()) { + // TODO: add timeout + } + + if (shell.getExitCode() == 0) { + // Wrote to the temp file! Add it to the ctx + var stream = new FileInputStream(tempPath.toFile()); + ctx.contentType("text/plain"); + ctx.header("Content-Disposition", "attachment; filename=\"photonvision-journalctl.txt\""); + ctx.status(200); + ctx.result(stream); + logger.info("Uploading settings with size " + stream.available()); + } else { + ctx.status(500); + ctx.result("The journalctl service was unable to export logs"); + logger.error("The journalctl service was unable to export logs"); + } + } catch (IOException e) { + ctx.status(500); + ctx.result("There was an error while exporting journactl logs"); + logger.error("There was an error while exporting journactl logs", e); + } + } + + public static void onCalibrationEndRequest(Context ctx) { + logger.info("Calibrating camera! This will take a long time..."); + + int index; + + try { + index = kObjectMapper.readTree(ctx.body()).get("index").asInt(); + + var calData = VisionModuleManager.getInstance().getModule(index).endCalibration(); + if (calData == null) { + ctx.result("The calibration process failed"); + ctx.status(500); + logger.error( + "The calibration process failed. Calibration data for module at index (" + + index + + ") was null"); + return; + } + + ctx.result("Camera calibration successfully completed!"); + ctx.status(200); + logger.info("Camera calibration successfully completed!"); + } catch (JsonProcessingException e) { + ctx.status(400); + ctx.result( + "The 'index' field was not found in the request. Please make sure the index of the vision module is specified with the 'index' key."); + logger.error( + "The 'index' field was not found in the request. Please make sure the index of the vision module is specified with the 'index' key.", + e); + } catch (Exception e) { + ctx.status(500); + ctx.result("There was an error while ending calibration"); + logger.error("There was an error while ending calibration", e); + } + } + + public static void onCalibrationImportRequest(Context ctx) { + var data = ctx.body(); + + try { + var actualObj = kObjectMapper.readTree(data); + + int cameraIndex = actualObj.get("cameraIndex").asInt(); + var payload = kObjectMapper.readTree(actualObj.get("payload").asText()); + var coeffs = CameraCalibrationCoefficients.parseFromCalibdbJson(payload); + + var uploadCalibrationEvent = + new IncomingWebSocketEvent<>( + DataChangeDestination.DCD_ACTIVEMODULE, + "calibrationUploaded", + coeffs, + cameraIndex, + null); + DataChangeService.getInstance().publishEvent(uploadCalibrationEvent); + + ctx.status(200); + ctx.result("Calibration imported successfully from CalibDB data!"); + logger.info("Calibration imported successfully from CalibDB data!"); + } catch (JsonProcessingException e) { + ctx.status(400); + ctx.result( + "The Provided CalibDB data is malformed and cannot be parsed for the required fields."); + logger.error( + "The Provided CalibDB data is malformed and cannot be parsed for the required fields.", + e); + } + } + + public static void onProgramRestartRequest(Context ctx) { + // TODO, check if this was successful or not + ctx.status(204); + restartProgram(); + } + + public static void onDeviceRestartRequest(Context ctx) { + ctx.status(HardwareManager.getInstance().restartDevice() ? 204 : 500); + } + + public static void onCameraNicknameChangeRequest(Context ctx) { + try { + var data = kObjectMapper.readTree(ctx.body()); + + String name = data.get("name").asText(); + int idx = data.get("cameraIndex").asInt(); + + VisionModuleManager.getInstance().getModule(idx).setCameraNickname(name); + ctx.status(200); + ctx.result("Successfully changed the camera name to: " + name); + logger.info("Successfully changed the camera name to: " + name); + } catch (JsonProcessingException e) { + ctx.status(400); + ctx.result("The provided nickname data was malformed"); + logger.error("The provided nickname data was malformed", e); + + } catch (Exception e) { + ctx.status(500); + ctx.result("An error occurred while changing the camera's nickname"); + logger.error("An error occurred while changing the camera's nickname", e); + } + } + + public static void onMetricsPublishRequest(Context ctx) { + HardwareManager.getInstance().publishMetrics(); + ctx.status(204); + } + + /** + * Create a temporary file using the UploadedFile from Javalin. + * + * @param file the uploaded file. + * @return Temporary file. Empty if the temporary file was unable to be created. + */ + private static Optional handleTempFileCreation(UploadedFile file) { + var tempFilePath = + new File(Path.of(System.getProperty("java.io.tmpdir"), file.filename()).toString()); + boolean makeDirsRes = tempFilePath.getParentFile().mkdirs(); + + if (!makeDirsRes && !(tempFilePath.getParentFile().exists())) { + logger.error( + "There was an error while creating " + + tempFilePath.getAbsolutePath() + + "! Exists: " + + tempFilePath.getParentFile().exists()); + return Optional.empty(); + } + + try { + FileUtils.copyInputStreamToFile(file.content(), tempFilePath); + } catch (IOException e) { + logger.error( + "There was an error while copying " + + file.filename() + + " to the temp file " + + tempFilePath.getAbsolutePath()); + return Optional.empty(); + } + + return Optional.of(tempFilePath); + } + + /** + * Restart the running program. Note that this doesn't actually restart the program itself, + * instead, it relies on systemd or an equivalent. + */ + private static void restartProgram() { + TimedTaskManager.getInstance() + .addOneShotTask( + () -> { + if (Platform.isLinux()) { + try { + new ShellExec().executeBashCommand("systemctl restart photonvision.service"); + } catch (IOException e) { + logger.error("Could not restart device!", e); + System.exit(0); + } + } else { + System.exit(0); + } + }, + 0); } - - if (ConfigManager.getInstance().saveUploadedHardwareConfig(tempFilePath.get().toPath())) { - ctx.status(200); - ctx.result("Successfully saved the uploaded hardware config"); - logger.info("Successfully saved the uploaded hardware config"); - } else { - ctx.status(500); - ctx.result("There was an error while saving the uploaded hardware config"); - logger.error("There was an error while saving the uploaded hardware config"); - } - } - - public static void onHardwareSettingsRequest(Context ctx) { - var file = ctx.uploadedFile("data"); - - if (file == null) { - ctx.status(400); - ctx.result( - "No File was sent with the request. Make sure that the hardware settings json is sent at the key 'data'"); - logger.error( - "No File was sent with the request. Make sure that the hardware settings json is sent at the key 'data'"); - return; - } - - if (!file.extension().contains("json")) { - ctx.status(400); - ctx.result( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - logger.error( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - return; - } - - // Create a temp file - var tempFilePath = handleTempFileCreation(file); - - if (tempFilePath.isEmpty()) { - ctx.status(500); - ctx.result("There was an error while creating a temporary copy of the file"); - logger.error("There was an error while creating a temporary copy of the file"); - return; - } - - if (ConfigManager.getInstance().saveUploadedHardwareSettings(tempFilePath.get().toPath())) { - ctx.status(200); - ctx.result("Successfully saved the uploaded hardware settings"); - logger.info("Successfully saved the uploaded hardware settings"); - } else { - ctx.status(500); - ctx.result("There was an error while saving the uploaded hardware settings"); - logger.error("There was an error while saving the uploaded hardware settings"); - } - } - - public static void onNetworkConfigRequest(Context ctx) { - var file = ctx.uploadedFile("data"); - - if (file == null) { - ctx.status(400); - ctx.result( - "No File was sent with the request. Make sure that the network config json is sent at the key 'data'"); - logger.error( - "No File was sent with the request. Make sure that the network config json is sent at the key 'data'"); - return; - } - - if (!file.extension().contains("json")) { - ctx.status(400); - ctx.result( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - logger.error( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - return; - } - - // Create a temp file - var tempFilePath = handleTempFileCreation(file); - - if (tempFilePath.isEmpty()) { - ctx.status(500); - ctx.result("There was an error while creating a temporary copy of the file"); - logger.error("There was an error while creating a temporary copy of the file"); - return; - } - - if (ConfigManager.getInstance().saveUploadedNetworkConfig(tempFilePath.get().toPath())) { - ctx.status(200); - ctx.result("Successfully saved the uploaded network config"); - logger.info("Successfully saved the uploaded network config"); - } else { - ctx.status(500); - ctx.result("There was an error while saving the uploaded network config"); - logger.error("There was an error while saving the uploaded network config"); - } - } - - public static void onAprilTagFieldLayoutRequest(Context ctx) { - var file = ctx.uploadedFile("data"); - - if (file == null) { - ctx.status(400); - ctx.result( - "No File was sent with the request. Make sure that the field layout json is sent at the key 'data'"); - logger.error( - "No File was sent with the request. Make sure that the field layout json is sent at the key 'data'"); - return; - } - - if (!file.extension().contains("json")) { - ctx.status(400); - ctx.result( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - logger.error( - "The uploaded file was not of type 'json'. The uploaded file should be a .json file."); - return; - } - - // Create a temp file - var tempFilePath = handleTempFileCreation(file); - - if (tempFilePath.isEmpty()) { - ctx.status(500); - ctx.result("There was an error while creating a temporary copy of the file"); - logger.error("There was an error while creating a temporary copy of the file"); - return; - } - - if (ConfigManager.getInstance().saveUploadedAprilTagFieldLayout(tempFilePath.get().toPath())) { - ctx.status(200); - ctx.result("Successfully saved the uploaded AprilTagFieldLayout"); - logger.info("Successfully saved the uploaded AprilTagFieldLayout"); - } else { - ctx.status(500); - ctx.result("There was an error while saving the uploaded AprilTagFieldLayout"); - logger.error("There was an error while saving the uploaded AprilTagFieldLayout"); - } - } - - public static void onOfflineUpdateRequest(Context ctx) { - var file = ctx.uploadedFile("jarData"); - - if (file == null) { - ctx.status(400); - ctx.result( - "No File was sent with the request. Make sure that the new jar is sent at the key 'jarData'"); - logger.error( - "No File was sent with the request. Make sure that the new jar is sent at the key 'jarData'"); - return; - } - - if (!file.extension().contains("jar")) { - ctx.status(400); - ctx.result( - "The uploaded file was not of type 'jar'. The uploaded file should be a .jar file."); - logger.error( - "The uploaded file was not of type 'jar'. The uploaded file should be a .jar file."); - return; - } - - try { - Path filePath = - Paths.get(ProgramDirectoryUtilities.getProgramDirectory(), "photonvision.jar"); - File targetFile = new File(filePath.toString()); - var stream = new FileOutputStream(targetFile); - - file.content().transferTo(stream); - stream.close(); - - ctx.status(200); - ctx.result( - "Offline update successfully complete. PhotonVision will restart in the background."); - logger.info( - "Offline update successfully complete. PhotonVision will restart in the background."); - restartProgram(); - } catch (FileNotFoundException e) { - ctx.result("The current program jar file couldn't be found."); - ctx.status(500); - logger.error("The current program jar file couldn't be found.", e); - } catch (IOException e) { - ctx.result("Unable to overwrite the existing program with the new program."); - ctx.status(500); - logger.error("Unable to overwrite the existing program with the new program.", e); - } - } - - public static void onGeneralSettingsRequest(Context ctx) { - NetworkConfig config; - try { - config = kObjectMapper.readValue(ctx.body(), NetworkConfig.class); - - ctx.status(200); - ctx.result("Successfully saved general settings"); - logger.info("Successfully saved general settings"); - } catch (JsonProcessingException e) { - // If the settings can't be parsed, use the default network settings - config = new NetworkConfig(); - - ctx.status(400); - ctx.result("The provided general settings were malformed"); - logger.error("The provided general settings were malformed", e); - } - - ConfigManager.getInstance().setNetworkSettings(config); - ConfigManager.getInstance().requestSave(); - - NetworkManager.getInstance().reinitialize(); - - NetworkTablesManager.getInstance().setConfig(config); - } - - public static void onCameraSettingsRequest(Context ctx) { - try { - var data = kObjectMapper.readTree(ctx.body()); - - int index = data.get("index").asInt(); - double fov = data.get("settings").get("fov").asDouble(); - - var module = VisionModuleManager.getInstance().getModule(index); - module.setFov(fov); - - module.saveModule(); - - ctx.status(200); - ctx.result("Successfully saved camera settings"); - logger.info("Successfully saved camera settings"); - } catch (JsonProcessingException | NullPointerException e) { - ctx.status(400); - ctx.result("The provided camera settings were malformed"); - logger.error("The provided camera settings were malformed", e); - } - } - - public static void onLogExportRequest(Context ctx) { - if (!Platform.isLinux()) { - ctx.status(405); - ctx.result("Logs can only be exported on a Linux platform"); - // INFO only log because this isn't ERROR worthy - logger.info("Logs can only be exported on a Linux platform"); - return; - } - - try { - ShellExec shell = new ShellExec(); - var tempPath = Files.createTempFile("photonvision-journalctl", ".txt"); - shell.executeBashCommand("journalctl -u photonvision.service > " + tempPath.toAbsolutePath()); - - while (!shell.isOutputCompleted()) { - // TODO: add timeout - } - - if (shell.getExitCode() == 0) { - // Wrote to the temp file! Add it to the ctx - var stream = new FileInputStream(tempPath.toFile()); - ctx.contentType("text/plain"); - ctx.header("Content-Disposition", "attachment; filename=\"photonvision-journalctl.txt\""); - ctx.status(200); - ctx.result(stream); - logger.info("Uploading settings with size " + stream.available()); - } else { - ctx.status(500); - ctx.result("The journalctl service was unable to export logs"); - logger.error("The journalctl service was unable to export logs"); - } - } catch (IOException e) { - ctx.status(500); - ctx.result("There was an error while exporting journactl logs"); - logger.error("There was an error while exporting journactl logs", e); - } - } - - public static void onCalibrationEndRequest(Context ctx) { - logger.info("Calibrating camera! This will take a long time..."); - - int index; - - try { - index = kObjectMapper.readTree(ctx.body()).get("index").asInt(); - - var calData = VisionModuleManager.getInstance().getModule(index).endCalibration(); - if (calData == null) { - ctx.result("The calibration process failed"); - ctx.status(500); - logger.error( - "The calibration process failed. Calibration data for module at index (" - + index - + ") was null"); - return; - } - - ctx.result("Camera calibration successfully completed!"); - ctx.status(200); - logger.info("Camera calibration successfully completed!"); - } catch (JsonProcessingException e) { - ctx.status(400); - ctx.result( - "The 'index' field was not found in the request. Please make sure the index of the vision module is specified with the 'index' key."); - logger.error( - "The 'index' field was not found in the request. Please make sure the index of the vision module is specified with the 'index' key.", - e); - } catch (Exception e) { - ctx.status(500); - ctx.result("There was an error while ending calibration"); - logger.error("There was an error while ending calibration", e); - } - } - - public static void onCalibrationImportRequest(Context ctx) { - var data = ctx.body(); - - try { - var actualObj = kObjectMapper.readTree(data); - - int cameraIndex = actualObj.get("cameraIndex").asInt(); - var payload = kObjectMapper.readTree(actualObj.get("payload").asText()); - var coeffs = CameraCalibrationCoefficients.parseFromCalibdbJson(payload); - - var uploadCalibrationEvent = - new IncomingWebSocketEvent<>( - DataChangeDestination.DCD_ACTIVEMODULE, - "calibrationUploaded", - coeffs, - cameraIndex, - null); - DataChangeService.getInstance().publishEvent(uploadCalibrationEvent); - - ctx.status(200); - ctx.result("Calibration imported successfully from CalibDB data!"); - logger.info("Calibration imported successfully from CalibDB data!"); - } catch (JsonProcessingException e) { - ctx.status(400); - ctx.result( - "The Provided CalibDB data is malformed and cannot be parsed for the required fields."); - logger.error( - "The Provided CalibDB data is malformed and cannot be parsed for the required fields.", - e); - } - } - - public static void onProgramRestartRequest(Context ctx) { - // TODO, check if this was successful or not - ctx.status(204); - restartProgram(); - } - - public static void onDeviceRestartRequest(Context ctx) { - ctx.status(HardwareManager.getInstance().restartDevice() ? 204 : 500); - } - - public static void onCameraNicknameChangeRequest(Context ctx) { - try { - var data = kObjectMapper.readTree(ctx.body()); - - String name = data.get("name").asText(); - int idx = data.get("cameraIndex").asInt(); - - VisionModuleManager.getInstance().getModule(idx).setCameraNickname(name); - ctx.status(200); - ctx.result("Successfully changed the camera name to: " + name); - logger.info("Successfully changed the camera name to: " + name); - } catch (JsonProcessingException e) { - ctx.status(400); - ctx.result("The provided nickname data was malformed"); - logger.error("The provided nickname data was malformed", e); - - } catch (Exception e) { - ctx.status(500); - ctx.result("An error occurred while changing the camera's nickname"); - logger.error("An error occurred while changing the camera's nickname", e); - } - } - - public static void onMetricsPublishRequest(Context ctx) { - HardwareManager.getInstance().publishMetrics(); - ctx.status(204); - } - - /** - * Create a temporary file using the UploadedFile from Javalin. - * - * @param file the uploaded file. - * @return Temporary file. Empty if the temporary file was unable to be created. - */ - private static Optional handleTempFileCreation(UploadedFile file) { - var tempFilePath = - new File(Path.of(System.getProperty("java.io.tmpdir"), file.filename()).toString()); - boolean makeDirsRes = tempFilePath.getParentFile().mkdirs(); - - if (!makeDirsRes && !(tempFilePath.getParentFile().exists())) { - logger.error( - "There was an error while creating " - + tempFilePath.getAbsolutePath() - + "! Exists: " - + tempFilePath.getParentFile().exists()); - return Optional.empty(); - } - - try { - FileUtils.copyInputStreamToFile(file.content(), tempFilePath); - } catch (IOException e) { - logger.error( - "There was an error while copying " - + file.filename() - + " to the temp file " - + tempFilePath.getAbsolutePath()); - return Optional.empty(); - } - - return Optional.of(tempFilePath); - } - - /** - * Restart the running program. Note that this doesn't actually restart the program itself, - * instead, it relies on systemd or an equivalent. - */ - private static void restartProgram() { - TimedTaskManager.getInstance() - .addOneShotTask( - () -> { - if (Platform.isLinux()) { - try { - new ShellExec().executeBashCommand("systemctl restart photonvision.service"); - } catch (IOException e) { - logger.error("Could not restart device!", e); - System.exit(0); - } - } else { - System.exit(0); - } - }, - 0); - } } diff --git a/photon-server/src/main/java/org/photonvision/server/Server.java b/photon-server/src/main/java/org/photonvision/server/Server.java index e83c1e2b46..4026ab58fa 100644 --- a/photon-server/src/main/java/org/photonvision/server/Server.java +++ b/photon-server/src/main/java/org/photonvision/server/Server.java @@ -25,94 +25,94 @@ import org.photonvision.common.logging.Logger; public class Server { - private static final Logger logger = new Logger(Server.class, LogGroup.WebServer); + private static final Logger logger = new Logger(Server.class, LogGroup.WebServer); - public static void start(int port) { - var app = - Javalin.create( - javalinConfig -> { - javalinConfig.showJavalinBanner = false; - javalinConfig.staticFiles.add("web"); - javalinConfig.plugins.enableCors( - corsContainer -> { - corsContainer.add(CorsPluginConfig::anyHost); - }); + public static void start(int port) { + var app = + Javalin.create( + javalinConfig -> { + javalinConfig.showJavalinBanner = false; + javalinConfig.staticFiles.add("web"); + javalinConfig.plugins.enableCors( + corsContainer -> { + corsContainer.add(CorsPluginConfig::anyHost); + }); - javalinConfig.requestLogger.http( - (ctx, ms) -> { - StringJoiner joiner = - new StringJoiner(" ") - .add("Handled HTTP request of type") - .add(ctx.req().getMethod()) - .add("from endpoint") - .add(ctx.path()) - .add("for host") - .add(ctx.req().getRemoteHost()) - .add("in") - .add(ms.toString()) - .add("ms"); + javalinConfig.requestLogger.http( + (ctx, ms) -> { + StringJoiner joiner = + new StringJoiner(" ") + .add("Handled HTTP request of type") + .add(ctx.req().getMethod()) + .add("from endpoint") + .add(ctx.path()) + .add("for host") + .add(ctx.req().getRemoteHost()) + .add("in") + .add(ms.toString()) + .add("ms"); - logger.debug(joiner.toString()); - }); - javalinConfig.requestLogger.ws( - ws -> { - ws.onMessage(ctx -> logger.debug("Got WebSockets message: " + ctx.message())); - ws.onBinaryMessage( - ctx -> - logger.trace( - () -> { - var remote = (InetSocketAddress) ctx.session.getRemoteAddress(); - var host = - remote.getAddress().toString() + ":" + remote.getPort(); - return "Got WebSockets binary message from host: " + host; - })); - }); - }); + logger.debug(joiner.toString()); + }); + javalinConfig.requestLogger.ws( + ws -> { + ws.onMessage(ctx -> logger.debug("Got WebSockets message: " + ctx.message())); + ws.onBinaryMessage( + ctx -> + logger.trace( + () -> { + var remote = (InetSocketAddress) ctx.session.getRemoteAddress(); + var host = + remote.getAddress().toString() + ":" + remote.getPort(); + return "Got WebSockets binary message from host: " + host; + })); + }); + }); - /*Web Socket Events for Data Exchange */ - var dsHandler = DataSocketHandler.getInstance(); - app.ws( - "/websocket_data", - ws -> { - ws.onConnect(dsHandler::onConnect); - ws.onClose(dsHandler::onClose); - ws.onBinaryMessage(dsHandler::onBinaryMessage); - }); + /*Web Socket Events for Data Exchange */ + var dsHandler = DataSocketHandler.getInstance(); + app.ws( + "/websocket_data", + ws -> { + ws.onConnect(dsHandler::onConnect); + ws.onClose(dsHandler::onClose); + ws.onBinaryMessage(dsHandler::onBinaryMessage); + }); - /*Web Socket Events for Camera Streaming */ - var camDsHandler = CameraSocketHandler.getInstance(); - app.ws( - "/websocket_cameras", - ws -> { - ws.onConnect(camDsHandler::onConnect); - ws.onClose(camDsHandler::onClose); - ws.onBinaryMessage(camDsHandler::onBinaryMessage); - ws.onMessage(camDsHandler::onMessage); - }); + /*Web Socket Events for Camera Streaming */ + var camDsHandler = CameraSocketHandler.getInstance(); + app.ws( + "/websocket_cameras", + ws -> { + ws.onConnect(camDsHandler::onConnect); + ws.onClose(camDsHandler::onClose); + ws.onBinaryMessage(camDsHandler::onBinaryMessage); + ws.onMessage(camDsHandler::onMessage); + }); - /*API Events*/ - // Settings - app.post("/api/settings", RequestHandler::onSettingsImportRequest); - app.get("/api/settings/photonvision_config.zip", RequestHandler::onSettingsExportRequest); - app.post("/api/settings/hardwareConfig", RequestHandler::onHardwareConfigRequest); - app.post("/api/settings/hardwareSettings", RequestHandler::onHardwareSettingsRequest); - app.post("/api/settings/networkConfig", RequestHandler::onNetworkConfigRequest); - app.post("/api/settings/aprilTagFieldLayout", RequestHandler::onAprilTagFieldLayoutRequest); - app.post("/api/settings/general", RequestHandler::onGeneralSettingsRequest); - app.post("/api/settings/camera", RequestHandler::onCameraSettingsRequest); - app.post("/api/settings/camera/setNickname", RequestHandler::onCameraNicknameChangeRequest); + /*API Events*/ + // Settings + app.post("/api/settings", RequestHandler::onSettingsImportRequest); + app.get("/api/settings/photonvision_config.zip", RequestHandler::onSettingsExportRequest); + app.post("/api/settings/hardwareConfig", RequestHandler::onHardwareConfigRequest); + app.post("/api/settings/hardwareSettings", RequestHandler::onHardwareSettingsRequest); + app.post("/api/settings/networkConfig", RequestHandler::onNetworkConfigRequest); + app.post("/api/settings/aprilTagFieldLayout", RequestHandler::onAprilTagFieldLayoutRequest); + app.post("/api/settings/general", RequestHandler::onGeneralSettingsRequest); + app.post("/api/settings/camera", RequestHandler::onCameraSettingsRequest); + app.post("/api/settings/camera/setNickname", RequestHandler::onCameraNicknameChangeRequest); - // Utilities - app.post("/api/utils/offlineUpdate", RequestHandler::onOfflineUpdateRequest); - app.get("/api/utils/photonvision-journalctl.txt", RequestHandler::onLogExportRequest); - app.post("/api/utils/restartProgram", RequestHandler::onProgramRestartRequest); - app.post("/api/utils/restartDevice", RequestHandler::onDeviceRestartRequest); - app.post("/api/utils/publishMetrics", RequestHandler::onMetricsPublishRequest); + // Utilities + app.post("/api/utils/offlineUpdate", RequestHandler::onOfflineUpdateRequest); + app.get("/api/utils/photonvision-journalctl.txt", RequestHandler::onLogExportRequest); + app.post("/api/utils/restartProgram", RequestHandler::onProgramRestartRequest); + app.post("/api/utils/restartDevice", RequestHandler::onDeviceRestartRequest); + app.post("/api/utils/publishMetrics", RequestHandler::onMetricsPublishRequest); - // Calibration - app.post("/api/calibration/end", RequestHandler::onCalibrationEndRequest); - app.post("/api/calibration/importFromCalibDB", RequestHandler::onCalibrationImportRequest); + // Calibration + app.post("/api/calibration/end", RequestHandler::onCalibrationEndRequest); + app.post("/api/calibration/importFromCalibDB", RequestHandler::onCalibrationImportRequest); - app.start(port); - } + app.start(port); + } } diff --git a/photon-server/src/main/java/org/photonvision/server/UIInboundSubscriber.java b/photon-server/src/main/java/org/photonvision/server/UIInboundSubscriber.java index cca65b54ed..5f1e125a4b 100644 --- a/photon-server/src/main/java/org/photonvision/server/UIInboundSubscriber.java +++ b/photon-server/src/main/java/org/photonvision/server/UIInboundSubscriber.java @@ -30,26 +30,26 @@ import org.photonvision.common.logging.Logger; public class UIInboundSubscriber extends DataChangeSubscriber { - public UIInboundSubscriber() { - super( - Collections.singletonList(DataChangeSource.DCS_WEBSOCKET), - Collections.singletonList(DataChangeDestination.DCD_GENSETTINGS)); - } + public UIInboundSubscriber() { + super( + Collections.singletonList(DataChangeSource.DCS_WEBSOCKET), + Collections.singletonList(DataChangeDestination.DCD_GENSETTINGS)); + } - @Override - public void onDataChangeEvent(DataChangeEvent event) { - if (event instanceof IncomingWebSocketEvent) { - var incomingWSEvent = (IncomingWebSocketEvent) event; - if (incomingWSEvent.propertyName.equals("userConnected") - || incomingWSEvent.propertyName.equals("sendFullSettings")) { - // Send full settings - var settings = ConfigManager.getInstance().getConfig().toHashMap(); - var message = - new OutgoingUIEvent<>("fullsettings", settings, incomingWSEvent.originContext); - DataChangeService.getInstance().publishEvent(message); - Logger.sendConnectedBacklog(); - NetworkTablesManager.getInstance().broadcastConnectedStatus(); - } + @Override + public void onDataChangeEvent(DataChangeEvent event) { + if (event instanceof IncomingWebSocketEvent) { + var incomingWSEvent = (IncomingWebSocketEvent) event; + if (incomingWSEvent.propertyName.equals("userConnected") + || incomingWSEvent.propertyName.equals("sendFullSettings")) { + // Send full settings + var settings = ConfigManager.getInstance().getConfig().toHashMap(); + var message = + new OutgoingUIEvent<>("fullsettings", settings, incomingWSEvent.originContext); + DataChangeService.getInstance().publishEvent(message); + Logger.sendConnectedBacklog(); + NetworkTablesManager.getInstance().broadcastConnectedStatus(); + } + } } - } } diff --git a/photon-server/src/main/java/org/photonvision/server/UIOutboundSubscriber.java b/photon-server/src/main/java/org/photonvision/server/UIOutboundSubscriber.java index 91af46b54b..e502b5eec9 100644 --- a/photon-server/src/main/java/org/photonvision/server/UIOutboundSubscriber.java +++ b/photon-server/src/main/java/org/photonvision/server/UIOutboundSubscriber.java @@ -33,29 +33,29 @@ * DO NOT use logging in this class. If you do, the logs will recurse forever! */ class UIOutboundSubscriber extends DataChangeSubscriber { - Logger logger = new Logger(UIOutboundSubscriber.class, LogGroup.WebServer); + Logger logger = new Logger(UIOutboundSubscriber.class, LogGroup.WebServer); - private final DataSocketHandler socketHandler; + private final DataSocketHandler socketHandler; - public UIOutboundSubscriber(DataSocketHandler socketHandler) { - super(DataChangeSource.AllSources, Collections.singletonList(DataChangeDestination.DCD_UI)); - this.socketHandler = socketHandler; - } + public UIOutboundSubscriber(DataSocketHandler socketHandler) { + super(DataChangeSource.AllSources, Collections.singletonList(DataChangeDestination.DCD_UI)); + this.socketHandler = socketHandler; + } - @Override - public void onDataChangeEvent(DataChangeEvent event) { - if (event instanceof OutgoingUIEvent) { - var thisEvent = (OutgoingUIEvent) event; - try { - if (event.data instanceof HashMap) { - var data = (HashMap) event.data; - socketHandler.broadcastMessage(data, thisEvent.originContext); - } else { - socketHandler.broadcastMessage(event.data, thisEvent.originContext); + @Override + public void onDataChangeEvent(DataChangeEvent event) { + if (event instanceof OutgoingUIEvent) { + var thisEvent = (OutgoingUIEvent) event; + try { + if (event.data instanceof HashMap) { + var data = (HashMap) event.data; + socketHandler.broadcastMessage(data, thisEvent.originContext); + } else { + socketHandler.broadcastMessage(event.data, thisEvent.originContext); + } + } catch (JsonProcessingException e) { + logger.error("Failed to process outgoing message!", e); + } } - } catch (JsonProcessingException e) { - logger.error("Failed to process outgoing message!", e); - } } - } } diff --git a/photon-server/src/main/java/org/photonvision/server/UISettings.java b/photon-server/src/main/java/org/photonvision/server/UISettings.java index 0aa3731134..f4361c2670 100644 --- a/photon-server/src/main/java/org/photonvision/server/UISettings.java +++ b/photon-server/src/main/java/org/photonvision/server/UISettings.java @@ -21,6 +21,6 @@ import org.photonvision.common.configuration.NetworkConfig; public class UISettings { - public NetworkConfig networkConfig; - public CameraConfiguration currentCameraConfiguration; + public NetworkConfig networkConfig; + public CameraConfiguration currentCameraConfiguration; } diff --git a/photon-targeting/src/main/java/org/photonvision/common/dataflow/structures/Packet.java b/photon-targeting/src/main/java/org/photonvision/common/dataflow/structures/Packet.java index a7b46cf64b..e97e3f1658 100644 --- a/photon-targeting/src/main/java/org/photonvision/common/dataflow/structures/Packet.java +++ b/photon-targeting/src/main/java/org/photonvision/common/dataflow/structures/Packet.java @@ -19,198 +19,198 @@ /** A packet that holds byte-packed data to be sent over NetworkTables. */ public class Packet { - // Size of the packet. - int size; - // Data stored in the packet. - byte[] packetData; - // Read and write positions. - int readPos, writePos; - - /** - * Constructs an empty packet. - * - * @param size The size of the packet buffer. - */ - public Packet(int size) { - this.size = size; - packetData = new byte[size]; - } - - /** - * Constructs a packet with the given data. - * - * @param data The packet data. - */ - public Packet(byte[] data) { - packetData = data; - size = packetData.length; - } - - /** Clears the packet and resets the read and write positions. */ - public void clear() { - packetData = new byte[size]; - readPos = 0; - writePos = 0; - } - - public int getSize() { - return size; - } - - /** - * Returns the packet data. - * - * @return The packet data. - */ - public byte[] getData() { - return packetData; - } - - /** - * Sets the packet data. - * - * @param data The packet data. - */ - public void setData(byte[] data) { - packetData = data; - size = data.length; - } - - /** - * Encodes the byte into the packet. - * - * @param src The byte to encode. - */ - public void encode(byte src) { - packetData[writePos++] = src; - } - - /** - * Encodes the short into the packet. - * - * @param src The short to encode. - */ - public void encode(short src) { - packetData[writePos++] = (byte) (src >>> 8); - packetData[writePos++] = (byte) src; - } - - /** - * Encodes the integer into the packet. - * - * @param src The integer to encode. - */ - public void encode(int src) { - packetData[writePos++] = (byte) (src >>> 24); - packetData[writePos++] = (byte) (src >>> 16); - packetData[writePos++] = (byte) (src >>> 8); - packetData[writePos++] = (byte) src; - } - - /** - * Encodes the double into the packet. - * - * @param src The double to encode. - */ - public void encode(double src) { - long data = Double.doubleToRawLongBits(src); - packetData[writePos++] = (byte) ((data >> 56) & 0xff); - packetData[writePos++] = (byte) ((data >> 48) & 0xff); - packetData[writePos++] = (byte) ((data >> 40) & 0xff); - packetData[writePos++] = (byte) ((data >> 32) & 0xff); - packetData[writePos++] = (byte) ((data >> 24) & 0xff); - packetData[writePos++] = (byte) ((data >> 16) & 0xff); - packetData[writePos++] = (byte) ((data >> 8) & 0xff); - packetData[writePos++] = (byte) (data & 0xff); - } - - /** - * Encodes the boolean into the packet. - * - * @param src The boolean to encode. - */ - public void encode(boolean src) { - packetData[writePos++] = src ? (byte) 1 : (byte) 0; - } - - /** - * Returns a decoded byte from the packet. - * - * @return A decoded byte from the packet. - */ - public byte decodeByte() { - if (packetData.length < readPos) { - return '\0'; - } - return packetData[readPos++]; - } - - /** - * Returns a decoded int from the packet. - * - * @return A decoded int from the packet. - */ - public int decodeInt() { - if (packetData.length < readPos + 3) { - return 0; - } - return (0xff & packetData[readPos++]) << 24 - | (0xff & packetData[readPos++]) << 16 - | (0xff & packetData[readPos++]) << 8 - | (0xff & packetData[readPos++]); - } - - /** - * Returns a decoded double from the packet. - * - * @return A decoded double from the packet. - */ - public double decodeDouble() { - if (packetData.length < (readPos + 7)) { - return 0; - } - long data = - (long) (0xff & packetData[readPos++]) << 56 - | (long) (0xff & packetData[readPos++]) << 48 - | (long) (0xff & packetData[readPos++]) << 40 - | (long) (0xff & packetData[readPos++]) << 32 - | (long) (0xff & packetData[readPos++]) << 24 - | (long) (0xff & packetData[readPos++]) << 16 - | (long) (0xff & packetData[readPos++]) << 8 - | (long) (0xff & packetData[readPos++]); - return Double.longBitsToDouble(data); - } - - /** - * Returns a decoded boolean from the packet. - * - * @return A decoded boolean from the packet. - */ - public boolean decodeBoolean() { - if (packetData.length < readPos) { - return false; - } - return packetData[readPos++] == 1; - } - - public void encode(double[] data) { - for (double d : data) { - encode(d); - } - } - - public double[] decodeDoubleArray(int len) { - double[] ret = new double[len]; - for (int i = 0; i < len; i++) { - ret[i] = decodeDouble(); - } - return ret; - } - - public short decodeShort() { - if (packetData.length < readPos + 1) { - return 0; - } - return (short) ((0xff & packetData[readPos++]) << 8 | (0xff & packetData[readPos++])); - } + // Size of the packet. + int size; + // Data stored in the packet. + byte[] packetData; + // Read and write positions. + int readPos, writePos; + + /** + * Constructs an empty packet. + * + * @param size The size of the packet buffer. + */ + public Packet(int size) { + this.size = size; + packetData = new byte[size]; + } + + /** + * Constructs a packet with the given data. + * + * @param data The packet data. + */ + public Packet(byte[] data) { + packetData = data; + size = packetData.length; + } + + /** Clears the packet and resets the read and write positions. */ + public void clear() { + packetData = new byte[size]; + readPos = 0; + writePos = 0; + } + + public int getSize() { + return size; + } + + /** + * Returns the packet data. + * + * @return The packet data. + */ + public byte[] getData() { + return packetData; + } + + /** + * Sets the packet data. + * + * @param data The packet data. + */ + public void setData(byte[] data) { + packetData = data; + size = data.length; + } + + /** + * Encodes the byte into the packet. + * + * @param src The byte to encode. + */ + public void encode(byte src) { + packetData[writePos++] = src; + } + + /** + * Encodes the short into the packet. + * + * @param src The short to encode. + */ + public void encode(short src) { + packetData[writePos++] = (byte) (src >>> 8); + packetData[writePos++] = (byte) src; + } + + /** + * Encodes the integer into the packet. + * + * @param src The integer to encode. + */ + public void encode(int src) { + packetData[writePos++] = (byte) (src >>> 24); + packetData[writePos++] = (byte) (src >>> 16); + packetData[writePos++] = (byte) (src >>> 8); + packetData[writePos++] = (byte) src; + } + + /** + * Encodes the double into the packet. + * + * @param src The double to encode. + */ + public void encode(double src) { + long data = Double.doubleToRawLongBits(src); + packetData[writePos++] = (byte) ((data >> 56) & 0xff); + packetData[writePos++] = (byte) ((data >> 48) & 0xff); + packetData[writePos++] = (byte) ((data >> 40) & 0xff); + packetData[writePos++] = (byte) ((data >> 32) & 0xff); + packetData[writePos++] = (byte) ((data >> 24) & 0xff); + packetData[writePos++] = (byte) ((data >> 16) & 0xff); + packetData[writePos++] = (byte) ((data >> 8) & 0xff); + packetData[writePos++] = (byte) (data & 0xff); + } + + /** + * Encodes the boolean into the packet. + * + * @param src The boolean to encode. + */ + public void encode(boolean src) { + packetData[writePos++] = src ? (byte) 1 : (byte) 0; + } + + /** + * Returns a decoded byte from the packet. + * + * @return A decoded byte from the packet. + */ + public byte decodeByte() { + if (packetData.length < readPos) { + return '\0'; + } + return packetData[readPos++]; + } + + /** + * Returns a decoded int from the packet. + * + * @return A decoded int from the packet. + */ + public int decodeInt() { + if (packetData.length < readPos + 3) { + return 0; + } + return (0xff & packetData[readPos++]) << 24 + | (0xff & packetData[readPos++]) << 16 + | (0xff & packetData[readPos++]) << 8 + | (0xff & packetData[readPos++]); + } + + /** + * Returns a decoded double from the packet. + * + * @return A decoded double from the packet. + */ + public double decodeDouble() { + if (packetData.length < (readPos + 7)) { + return 0; + } + long data = + (long) (0xff & packetData[readPos++]) << 56 + | (long) (0xff & packetData[readPos++]) << 48 + | (long) (0xff & packetData[readPos++]) << 40 + | (long) (0xff & packetData[readPos++]) << 32 + | (long) (0xff & packetData[readPos++]) << 24 + | (long) (0xff & packetData[readPos++]) << 16 + | (long) (0xff & packetData[readPos++]) << 8 + | (long) (0xff & packetData[readPos++]); + return Double.longBitsToDouble(data); + } + + /** + * Returns a decoded boolean from the packet. + * + * @return A decoded boolean from the packet. + */ + public boolean decodeBoolean() { + if (packetData.length < readPos) { + return false; + } + return packetData[readPos++] == 1; + } + + public void encode(double[] data) { + for (double d : data) { + encode(d); + } + } + + public double[] decodeDoubleArray(int len) { + double[] ret = new double[len]; + for (int i = 0; i < len; i++) { + ret[i] = decodeDouble(); + } + return ret; + } + + public short decodeShort() { + if (packetData.length < readPos + 1) { + return 0; + } + return (short) ((0xff & packetData[readPos++]) << 8 | (0xff & packetData[readPos++])); + } } diff --git a/photon-targeting/src/main/java/org/photonvision/common/hardware/VisionLEDMode.java b/photon-targeting/src/main/java/org/photonvision/common/hardware/VisionLEDMode.java index b3ed52b970..af95fc2cbb 100644 --- a/photon-targeting/src/main/java/org/photonvision/common/hardware/VisionLEDMode.java +++ b/photon-targeting/src/main/java/org/photonvision/common/hardware/VisionLEDMode.java @@ -18,29 +18,29 @@ package org.photonvision.common.hardware; public enum VisionLEDMode { - kDefault(-1), - kOff(0), - kOn(1), - kBlink(2); + kDefault(-1), + kOff(0), + kOn(1), + kBlink(2); - public final int value; + public final int value; - VisionLEDMode(int value) { - this.value = value; - } + VisionLEDMode(int value) { + this.value = value; + } - @Override - public String toString() { - switch (this) { - case kDefault: - return "Default"; - case kOff: - return "Off"; - case kOn: - return "On"; - case kBlink: - return "Blink"; + @Override + public String toString() { + switch (this) { + case kDefault: + return "Default"; + case kOff: + return "Off"; + case kOn: + return "On"; + case kBlink: + return "Blink"; + } + return ""; } - return ""; - } } diff --git a/photon-targeting/src/main/java/org/photonvision/common/networktables/NTTopicSet.java b/photon-targeting/src/main/java/org/photonvision/common/networktables/NTTopicSet.java index a986d5946b..5a0d31eaed 100644 --- a/photon-targeting/src/main/java/org/photonvision/common/networktables/NTTopicSet.java +++ b/photon-targeting/src/main/java/org/photonvision/common/networktables/NTTopicSet.java @@ -38,92 +38,92 @@ * different for sim vs. real camera */ public class NTTopicSet { - public NetworkTable subTable; - public RawPublisher rawBytesEntry; - - public IntegerPublisher pipelineIndexPublisher; - public IntegerSubscriber pipelineIndexRequestSub; - - public BooleanTopic driverModeEntry; - public BooleanPublisher driverModePublisher; - public BooleanSubscriber driverModeSubscriber; - - public DoublePublisher latencyMillisEntry; - public BooleanPublisher hasTargetEntry; - public DoublePublisher targetPitchEntry; - public DoublePublisher targetYawEntry; - public DoublePublisher targetAreaEntry; - public DoubleArrayPublisher targetPoseEntry; - public DoublePublisher targetSkewEntry; - - // The raw position of the best target, in pixels. - public DoublePublisher bestTargetPosX; - public DoublePublisher bestTargetPosY; - - // Heartbeat - public IntegerTopic heartbeatTopic; - public IntegerPublisher heartbeatPublisher; - - // Camera Calibration - public DoubleArrayPublisher cameraIntrinsicsPublisher; - public DoubleArrayPublisher cameraDistortionPublisher; - - public void updateEntries() { - rawBytesEntry = - subTable - .getRawTopic("rawBytes") - .publish("rawBytes", PubSubOption.periodic(0.01), PubSubOption.sendAll(true)); - - pipelineIndexPublisher = subTable.getIntegerTopic("pipelineIndexState").publish(); - pipelineIndexRequestSub = subTable.getIntegerTopic("pipelineIndexRequest").subscribe(0); - - driverModePublisher = subTable.getBooleanTopic("driverMode").publish(); - driverModeSubscriber = subTable.getBooleanTopic("driverModeRequest").subscribe(false); - - // Fun little hack to make the request show up - driverModeSubscriber.getTopic().publish().setDefault(false); - - latencyMillisEntry = subTable.getDoubleTopic("latencyMillis").publish(); - hasTargetEntry = subTable.getBooleanTopic("hasTarget").publish(); - - targetPitchEntry = subTable.getDoubleTopic("targetPitch").publish(); - targetAreaEntry = subTable.getDoubleTopic("targetArea").publish(); - targetYawEntry = subTable.getDoubleTopic("targetYaw").publish(); - targetPoseEntry = subTable.getDoubleArrayTopic("targetPose").publish(); - targetSkewEntry = subTable.getDoubleTopic("targetSkew").publish(); - - bestTargetPosX = subTable.getDoubleTopic("targetPixelsX").publish(); - bestTargetPosY = subTable.getDoubleTopic("targetPixelsY").publish(); - - heartbeatTopic = subTable.getIntegerTopic("heartbeat"); - heartbeatPublisher = heartbeatTopic.publish(); - - cameraIntrinsicsPublisher = subTable.getDoubleArrayTopic("cameraIntrinsics").publish(); - cameraDistortionPublisher = subTable.getDoubleArrayTopic("cameraDistortion").publish(); - } - - @SuppressWarnings("DuplicatedCode") - public void removeEntries() { - if (rawBytesEntry != null) rawBytesEntry.close(); - if (pipelineIndexPublisher != null) pipelineIndexPublisher.close(); - if (pipelineIndexRequestSub != null) pipelineIndexRequestSub.close(); - - if (driverModePublisher != null) driverModePublisher.close(); - if (driverModeSubscriber != null) driverModeSubscriber.close(); - - if (latencyMillisEntry != null) latencyMillisEntry.close(); - if (hasTargetEntry != null) hasTargetEntry.close(); - if (targetPitchEntry != null) targetPitchEntry.close(); - if (targetAreaEntry != null) targetAreaEntry.close(); - if (targetYawEntry != null) targetYawEntry.close(); - if (targetPoseEntry != null) targetPoseEntry.close(); - if (targetSkewEntry != null) targetSkewEntry.close(); - if (bestTargetPosX != null) bestTargetPosX.close(); - if (bestTargetPosY != null) bestTargetPosY.close(); - - if (heartbeatPublisher != null) heartbeatPublisher.close(); - - if (cameraIntrinsicsPublisher != null) cameraIntrinsicsPublisher.close(); - if (cameraDistortionPublisher != null) cameraDistortionPublisher.close(); - } + public NetworkTable subTable; + public RawPublisher rawBytesEntry; + + public IntegerPublisher pipelineIndexPublisher; + public IntegerSubscriber pipelineIndexRequestSub; + + public BooleanTopic driverModeEntry; + public BooleanPublisher driverModePublisher; + public BooleanSubscriber driverModeSubscriber; + + public DoublePublisher latencyMillisEntry; + public BooleanPublisher hasTargetEntry; + public DoublePublisher targetPitchEntry; + public DoublePublisher targetYawEntry; + public DoublePublisher targetAreaEntry; + public DoubleArrayPublisher targetPoseEntry; + public DoublePublisher targetSkewEntry; + + // The raw position of the best target, in pixels. + public DoublePublisher bestTargetPosX; + public DoublePublisher bestTargetPosY; + + // Heartbeat + public IntegerTopic heartbeatTopic; + public IntegerPublisher heartbeatPublisher; + + // Camera Calibration + public DoubleArrayPublisher cameraIntrinsicsPublisher; + public DoubleArrayPublisher cameraDistortionPublisher; + + public void updateEntries() { + rawBytesEntry = + subTable + .getRawTopic("rawBytes") + .publish("rawBytes", PubSubOption.periodic(0.01), PubSubOption.sendAll(true)); + + pipelineIndexPublisher = subTable.getIntegerTopic("pipelineIndexState").publish(); + pipelineIndexRequestSub = subTable.getIntegerTopic("pipelineIndexRequest").subscribe(0); + + driverModePublisher = subTable.getBooleanTopic("driverMode").publish(); + driverModeSubscriber = subTable.getBooleanTopic("driverModeRequest").subscribe(false); + + // Fun little hack to make the request show up + driverModeSubscriber.getTopic().publish().setDefault(false); + + latencyMillisEntry = subTable.getDoubleTopic("latencyMillis").publish(); + hasTargetEntry = subTable.getBooleanTopic("hasTarget").publish(); + + targetPitchEntry = subTable.getDoubleTopic("targetPitch").publish(); + targetAreaEntry = subTable.getDoubleTopic("targetArea").publish(); + targetYawEntry = subTable.getDoubleTopic("targetYaw").publish(); + targetPoseEntry = subTable.getDoubleArrayTopic("targetPose").publish(); + targetSkewEntry = subTable.getDoubleTopic("targetSkew").publish(); + + bestTargetPosX = subTable.getDoubleTopic("targetPixelsX").publish(); + bestTargetPosY = subTable.getDoubleTopic("targetPixelsY").publish(); + + heartbeatTopic = subTable.getIntegerTopic("heartbeat"); + heartbeatPublisher = heartbeatTopic.publish(); + + cameraIntrinsicsPublisher = subTable.getDoubleArrayTopic("cameraIntrinsics").publish(); + cameraDistortionPublisher = subTable.getDoubleArrayTopic("cameraDistortion").publish(); + } + + @SuppressWarnings("DuplicatedCode") + public void removeEntries() { + if (rawBytesEntry != null) rawBytesEntry.close(); + if (pipelineIndexPublisher != null) pipelineIndexPublisher.close(); + if (pipelineIndexRequestSub != null) pipelineIndexRequestSub.close(); + + if (driverModePublisher != null) driverModePublisher.close(); + if (driverModeSubscriber != null) driverModeSubscriber.close(); + + if (latencyMillisEntry != null) latencyMillisEntry.close(); + if (hasTargetEntry != null) hasTargetEntry.close(); + if (targetPitchEntry != null) targetPitchEntry.close(); + if (targetAreaEntry != null) targetAreaEntry.close(); + if (targetYawEntry != null) targetYawEntry.close(); + if (targetPoseEntry != null) targetPoseEntry.close(); + if (targetSkewEntry != null) targetSkewEntry.close(); + if (bestTargetPosX != null) bestTargetPosX.close(); + if (bestTargetPosY != null) bestTargetPosY.close(); + + if (heartbeatPublisher != null) heartbeatPublisher.close(); + + if (cameraIntrinsicsPublisher != null) cameraIntrinsicsPublisher.close(); + if (cameraDistortionPublisher != null) cameraDistortionPublisher.close(); + } } diff --git a/photon-targeting/src/main/java/org/photonvision/estimation/CameraTargetRelation.java b/photon-targeting/src/main/java/org/photonvision/estimation/CameraTargetRelation.java index 5ff59cdb47..b0b915c93e 100644 --- a/photon-targeting/src/main/java/org/photonvision/estimation/CameraTargetRelation.java +++ b/photon-targeting/src/main/java/org/photonvision/estimation/CameraTargetRelation.java @@ -23,37 +23,37 @@ /** Holds various helper geometries describing the relation between camera and target. */ public class CameraTargetRelation { - public final Pose3d camPose; - public final Transform3d camToTarg; - public final double camToTargDist; - public final double camToTargDistXY; - public final Rotation2d camToTargYaw; - public final Rotation2d camToTargPitch; + public final Pose3d camPose; + public final Transform3d camToTarg; + public final double camToTargDist; + public final double camToTargDistXY; + public final Rotation2d camToTargYaw; + public final Rotation2d camToTargPitch; - /** Angle from the camera's relative x-axis */ - public final Rotation2d camToTargAngle; + /** Angle from the camera's relative x-axis */ + public final Rotation2d camToTargAngle; - public final Transform3d targToCam; - public final Rotation2d targToCamYaw; - public final Rotation2d targToCamPitch; + public final Transform3d targToCam; + public final Rotation2d targToCamYaw; + public final Rotation2d targToCamPitch; - /** Angle from the target's relative x-axis */ - public final Rotation2d targToCamAngle; + /** Angle from the target's relative x-axis */ + public final Rotation2d targToCamAngle; - public CameraTargetRelation(Pose3d cameraPose, Pose3d targetPose) { - this.camPose = cameraPose; - camToTarg = new Transform3d(cameraPose, targetPose); - camToTargDist = camToTarg.getTranslation().getNorm(); - camToTargDistXY = - Math.hypot(camToTarg.getTranslation().getX(), camToTarg.getTranslation().getY()); - camToTargYaw = new Rotation2d(camToTarg.getX(), camToTarg.getY()); - camToTargPitch = new Rotation2d(camToTargDistXY, -camToTarg.getZ()); - camToTargAngle = - new Rotation2d(Math.hypot(camToTargYaw.getRadians(), camToTargPitch.getRadians())); - targToCam = new Transform3d(targetPose, cameraPose); - targToCamYaw = new Rotation2d(targToCam.getX(), targToCam.getY()); - targToCamPitch = new Rotation2d(camToTargDistXY, -targToCam.getZ()); - targToCamAngle = - new Rotation2d(Math.hypot(targToCamYaw.getRadians(), targToCamPitch.getRadians())); - } + public CameraTargetRelation(Pose3d cameraPose, Pose3d targetPose) { + this.camPose = cameraPose; + camToTarg = new Transform3d(cameraPose, targetPose); + camToTargDist = camToTarg.getTranslation().getNorm(); + camToTargDistXY = + Math.hypot(camToTarg.getTranslation().getX(), camToTarg.getTranslation().getY()); + camToTargYaw = new Rotation2d(camToTarg.getX(), camToTarg.getY()); + camToTargPitch = new Rotation2d(camToTargDistXY, -camToTarg.getZ()); + camToTargAngle = + new Rotation2d(Math.hypot(camToTargYaw.getRadians(), camToTargPitch.getRadians())); + targToCam = new Transform3d(targetPose, cameraPose); + targToCamYaw = new Rotation2d(targToCam.getX(), targToCam.getY()); + targToCamPitch = new Rotation2d(camToTargDistXY, -targToCam.getZ()); + targToCamAngle = + new Rotation2d(Math.hypot(targToCamYaw.getRadians(), targToCamPitch.getRadians())); + } } diff --git a/photon-targeting/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-targeting/src/main/java/org/photonvision/estimation/OpenCVHelp.java index 53c6179e29..3f89918d62 100644 --- a/photon-targeting/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-targeting/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -49,528 +49,528 @@ import org.photonvision.targeting.TargetCorner; public final class OpenCVHelp { - private static RotTrlTransform3d NWU_TO_EDN; - private static RotTrlTransform3d EDN_TO_NWU; - - static { - try { - var loader = - new RuntimeLoader<>( - Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); - loader.loadLibrary(); - } catch (Exception e) { - throw new RuntimeException("Failed to load native libraries!", e); + private static RotTrlTransform3d NWU_TO_EDN; + private static RotTrlTransform3d EDN_TO_NWU; + + static { + try { + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + } catch (Exception e) { + throw new RuntimeException("Failed to load native libraries!", e); + } + + NWU_TO_EDN = + new RotTrlTransform3d( + new Rotation3d(Matrix.mat(Nat.N3(), Nat.N3()).fill(0, -1, 0, 0, 0, -1, 1, 0, 0)), + new Translation3d()); + EDN_TO_NWU = + new RotTrlTransform3d( + new Rotation3d(Matrix.mat(Nat.N3(), Nat.N3()).fill(0, 0, 1, -1, 0, 0, 0, -1, 0)), + new Translation3d()); + } + + public static Mat matrixToMat(SimpleMatrix matrix) { + var mat = new Mat(matrix.numRows(), matrix.numCols(), CvType.CV_64F); + mat.put(0, 0, matrix.getDDRM().getData()); + return mat; + } + + public static Matrix matToMatrix(Mat mat) { + double[] data = new double[(int) mat.total() * mat.channels()]; + var doubleMat = new Mat(mat.rows(), mat.cols(), CvType.CV_64F); + mat.convertTo(doubleMat, CvType.CV_64F); + doubleMat.get(0, 0, data); + return new Matrix<>(new SimpleMatrix(mat.rows(), mat.cols(), true, data)); } - NWU_TO_EDN = - new RotTrlTransform3d( - new Rotation3d(Matrix.mat(Nat.N3(), Nat.N3()).fill(0, -1, 0, 0, 0, -1, 1, 0, 0)), - new Translation3d()); - EDN_TO_NWU = - new RotTrlTransform3d( - new Rotation3d(Matrix.mat(Nat.N3(), Nat.N3()).fill(0, 0, 1, -1, 0, 0, 0, -1, 0)), - new Translation3d()); - } - - public static Mat matrixToMat(SimpleMatrix matrix) { - var mat = new Mat(matrix.numRows(), matrix.numCols(), CvType.CV_64F); - mat.put(0, 0, matrix.getDDRM().getData()); - return mat; - } - - public static Matrix matToMatrix(Mat mat) { - double[] data = new double[(int) mat.total() * mat.channels()]; - var doubleMat = new Mat(mat.rows(), mat.cols(), CvType.CV_64F); - mat.convertTo(doubleMat, CvType.CV_64F); - doubleMat.get(0, 0, data); - return new Matrix<>(new SimpleMatrix(mat.rows(), mat.cols(), true, data)); - } - - /** - * Creates a new {@link MatOfPoint3f} with these 3d translations. The opencv tvec is a vector with - * three elements representing {x, y, z} in the EDN coordinate system. - * - * @param translations The translations to convert into a MatOfPoint3f - */ - public static MatOfPoint3f translationToTvec(Translation3d... translations) { - Point3[] points = new Point3[translations.length]; - for (int i = 0; i < translations.length; i++) { - var trl = translationNWUtoEDN(translations[i]); - points[i] = new Point3(trl.getX(), trl.getY(), trl.getZ()); + /** + * Creates a new {@link MatOfPoint3f} with these 3d translations. The opencv tvec is a vector with + * three elements representing {x, y, z} in the EDN coordinate system. + * + * @param translations The translations to convert into a MatOfPoint3f + */ + public static MatOfPoint3f translationToTvec(Translation3d... translations) { + Point3[] points = new Point3[translations.length]; + for (int i = 0; i < translations.length; i++) { + var trl = translationNWUtoEDN(translations[i]); + points[i] = new Point3(trl.getX(), trl.getY(), trl.getZ()); + } + return new MatOfPoint3f(points); } - return new MatOfPoint3f(points); - } - - /** - * Returns a new 3d translation from this {@link Mat}. The opencv tvec is a vector with three - * elements representing {x, y, z} in the EDN coordinate system. - * - * @param tvecInput The tvec to create a Translation3d from - */ - public static Translation3d tvecToTranslation(Mat tvecInput) { - float[] data = new float[3]; - var wrapped = new Mat(tvecInput.rows(), tvecInput.cols(), CvType.CV_32F); - tvecInput.convertTo(wrapped, CvType.CV_32F); - wrapped.get(0, 0, data); - wrapped.release(); - return translationEDNtoNWU(new Translation3d(data[0], data[1], data[2])); - } - - /** - * Creates a new {@link MatOfPoint3f} with this 3d rotation. The opencv rvec Mat is a vector with - * three elements representing the axis scaled by the angle in the EDN coordinate system. (angle = - * norm, and axis = rvec / norm) - * - * @param rotation The rotation to convert into a MatOfPoint3f - */ - public static MatOfPoint3f rotationToRvec(Rotation3d rotation) { - rotation = rotationNWUtoEDN(rotation); - return new MatOfPoint3f(new Point3(rotation.getQuaternion().toRotationVector().getData())); - } - - /** - * Returns a 3d rotation from this {@link Mat}. The opencv rvec Mat is a vector with three - * elements representing the axis scaled by the angle in the EDN coordinate system. (angle = norm, - * and axis = rvec / norm) - * - * @param rvecInput The rvec to create a Rotation3d from - */ - public static Rotation3d rvecToRotation(Mat rvecInput) { - // Get the 'rodriguez' (axis-angle, where the norm is the angle about the normalized direction - // of the vector) - float[] data = new float[3]; - var wrapped = new Mat(rvecInput.rows(), rvecInput.cols(), CvType.CV_32F); - rvecInput.convertTo(wrapped, CvType.CV_32F); - wrapped.get(0, 0, data); - wrapped.release(); - - return rotationEDNtoNWU(new Rotation3d(VecBuilder.fill(data[0], data[1], data[2]))); - } - - public static Point avgPoint(Point[] points) { - if (points == null || points.length == 0) return null; - var pointMat = new MatOfPoint2f(points); - Core.reduce(pointMat, pointMat, 0, Core.REDUCE_AVG); - var avgPt = pointMat.toArray()[0]; - pointMat.release(); - return avgPt; - } - - public static Point[] cornersToPoints(List corners) { - var points = new Point[corners.size()]; - for (int i = 0; i < corners.size(); i++) { - var corn = corners.get(i); - points[i] = new Point(corn.x, corn.y); + + /** + * Returns a new 3d translation from this {@link Mat}. The opencv tvec is a vector with three + * elements representing {x, y, z} in the EDN coordinate system. + * + * @param tvecInput The tvec to create a Translation3d from + */ + public static Translation3d tvecToTranslation(Mat tvecInput) { + float[] data = new float[3]; + var wrapped = new Mat(tvecInput.rows(), tvecInput.cols(), CvType.CV_32F); + tvecInput.convertTo(wrapped, CvType.CV_32F); + wrapped.get(0, 0, data); + wrapped.release(); + return translationEDNtoNWU(new Translation3d(data[0], data[1], data[2])); + } + + /** + * Creates a new {@link MatOfPoint3f} with this 3d rotation. The opencv rvec Mat is a vector with + * three elements representing the axis scaled by the angle in the EDN coordinate system. (angle = + * norm, and axis = rvec / norm) + * + * @param rotation The rotation to convert into a MatOfPoint3f + */ + public static MatOfPoint3f rotationToRvec(Rotation3d rotation) { + rotation = rotationNWUtoEDN(rotation); + return new MatOfPoint3f(new Point3(rotation.getQuaternion().toRotationVector().getData())); } - return points; - } - public static Point[] cornersToPoints(TargetCorner... corners) { - var points = new Point[corners.length]; - for (int i = 0; i < corners.length; i++) { - points[i] = new Point(corners[i].x, corners[i].y); + /** + * Returns a 3d rotation from this {@link Mat}. The opencv rvec Mat is a vector with three + * elements representing the axis scaled by the angle in the EDN coordinate system. (angle = norm, + * and axis = rvec / norm) + * + * @param rvecInput The rvec to create a Rotation3d from + */ + public static Rotation3d rvecToRotation(Mat rvecInput) { + // Get the 'rodriguez' (axis-angle, where the norm is the angle about the normalized direction + // of the vector) + float[] data = new float[3]; + var wrapped = new Mat(rvecInput.rows(), rvecInput.cols(), CvType.CV_32F); + rvecInput.convertTo(wrapped, CvType.CV_32F); + wrapped.get(0, 0, data); + wrapped.release(); + + return rotationEDNtoNWU(new Rotation3d(VecBuilder.fill(data[0], data[1], data[2]))); } - return points; - } - public static List pointsToCorners(Point... points) { - var corners = new ArrayList(points.length); - for (int i = 0; i < points.length; i++) { - corners.add(new TargetCorner(points[i].x, points[i].y)); + public static Point avgPoint(Point[] points) { + if (points == null || points.length == 0) return null; + var pointMat = new MatOfPoint2f(points); + Core.reduce(pointMat, pointMat, 0, Core.REDUCE_AVG); + var avgPt = pointMat.toArray()[0]; + pointMat.release(); + return avgPt; } - return corners; - } - - public static List pointsToCorners(MatOfPoint2f matInput) { - var corners = new ArrayList(); - float[] data = new float[(int) matInput.total() * matInput.channels()]; - matInput.get(0, 0, data); - for (int i = 0; i < (int) matInput.total(); i++) { - corners.add(new TargetCorner(data[0 + 2 * i], data[1 + 2 * i])); + + public static Point[] cornersToPoints(List corners) { + var points = new Point[corners.size()]; + for (int i = 0; i < corners.size(); i++) { + var corn = corners.get(i); + points[i] = new Point(corn.x, corn.y); + } + return points; } - return corners; - } - - /** - * Reorders the list, optionally indexing backwards and wrapping around to the last element after - * the first, and shifting all indices in the direction of indexing. - * - *

e.g. - * - *

({1,2,3}, false, 1) == {2,3,1} - * - *

({1,2,3}, true, 0) == {1,3,2} - * - *

({1,2,3}, true, 1) == {3,2,1} - * - * @param Element type - * @param elements - * @param backwards If indexing should happen in reverse (0, size-1, size-2, ...) - * @param shiftStart How much the inital index should be shifted (instead of starting at index 0, - * start at shiftStart, negated if backwards) - * @return Reordered list - */ - public static List reorderCircular(List elements, boolean backwards, int shiftStart) { - int size = elements.size(); - int dir = backwards ? -1 : 1; - var reordered = new ArrayList<>(elements); - for (int i = 0; i < size; i++) { - int index = (i * dir + shiftStart * dir) % size; - if (index < 0) index = size + index; - reordered.set(i, elements.get(index)); + + public static Point[] cornersToPoints(TargetCorner... corners) { + var points = new Point[corners.length]; + for (int i = 0; i < corners.length; i++) { + points[i] = new Point(corners[i].x, corners[i].y); + } + return points; } - return reordered; - } - - // TODO: RotTrlTransform3d removal awaiting Rotation3d performance improvements - /** - * Convert a rotation delta from EDN to NWU. For example, if you have a rotation X,Y,Z {1, 0, 0} - * in EDN, this would be {0, -1, 0} in NWU. - */ - private static Rotation3d rotationEDNtoNWU(Rotation3d rot) { - return new RotTrlTransform3d(EDN_TO_NWU.apply(rot), new Translation3d()) - .apply(EDN_TO_NWU.inverse().getRotation()); - } - - /** - * Convert a rotation delta from NWU to EDN. For example, if you have a rotation X,Y,Z {1, 0, 0} - * in NWU, this would be {0, 0, 1} in EDN. - */ - private static Rotation3d rotationNWUtoEDN(Rotation3d rot) { - return new RotTrlTransform3d(NWU_TO_EDN.apply(rot), new Translation3d()) - .apply(NWU_TO_EDN.inverse().getRotation()); - } - - /** - * Convert a translation from EDN to NWU. For example, if you have a translation X,Y,Z {1, 0, 0} - * in EDN, this would be {0, -1, 0} in NWU. - */ - private static Translation3d translationEDNtoNWU(Translation3d trl) { - return EDN_TO_NWU.apply(trl); - } - - /** - * Convert a translation from NWU to EDN. For example, if you have a translation X,Y,Z {1, 0, 0} - * in NWU, this would be {0, 0, 1} in EDN. - */ - private static Translation3d translationNWUtoEDN(Translation3d trl) { - return NWU_TO_EDN.apply(trl); - } - - /** - * Project object points from the 3d world into the 2d camera image. The camera - * properties(intrinsics, distortion) determine the results of this projection. - * - * @param cameraMatrix the camera intrinsics matrix in standard opencv form - * @param distCoeffs the camera distortion matrix in standard opencv form - * @param camRt The change in basis from world coordinates to camera coordinates. See {@link - * RotTrlTransform3d#makeRelativeTo(Pose3d)}. - * @param objectTranslations The 3d points to be projected - * @return The 2d points in pixels which correspond to the camera's image of the 3d points - */ - public static Point[] projectPoints( - Matrix cameraMatrix, - Matrix distCoeffs, - RotTrlTransform3d camRt, - List objectTranslations) { - // translate to opencv classes - var objectPoints = translationToTvec(objectTranslations.toArray(new Translation3d[0])); - // opencv rvec/tvec describe a change in basis from world to camera - var rvec = rotationToRvec(camRt.getRotation()); - var tvec = translationToTvec(camRt.getTranslation()); - var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); - var distCoeffsMat = new MatOfDouble(matrixToMat(distCoeffs.getStorage())); - var imagePoints = new MatOfPoint2f(); - // project to 2d - Calib3d.projectPoints(objectPoints, rvec, tvec, cameraMatrixMat, distCoeffsMat, imagePoints); - var points = imagePoints.toArray(); - - // release our Mats from native memory - objectPoints.release(); - rvec.release(); - tvec.release(); - cameraMatrixMat.release(); - distCoeffsMat.release(); - imagePoints.release(); - - return points; - } - - /** - * Undistort 2d image points using a given camera's intrinsics and distortion. - * - *

2d image points from {@link #projectPoints(Matrix, Matrix, RotTrlTransform3d, List) - * projectPoints()} will naturally be distorted, so this operation is important if the image - * points need to be directly used (e.g. 2d yaw/pitch). - * - * @param cameraMatrix The camera intrinsics matrix in standard opencv form - * @param distCoeffs The camera distortion matrix in standard opencv form - * @param points The distorted image points - * @return The undistorted image points - */ - public static Point[] undistortPoints( - Matrix cameraMatrix, Matrix distCoeffs, Point[] points) { - var distMat = new MatOfPoint2f(points); - var undistMat = new MatOfPoint2f(); - var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); - var distCoeffsMat = matrixToMat(distCoeffs.getStorage()); - - Calib3d.undistortImagePoints(distMat, undistMat, cameraMatrixMat, distCoeffsMat); - var undistPoints = undistMat.toArray(); - - distMat.release(); - undistMat.release(); - cameraMatrixMat.release(); - distCoeffsMat.release(); - - return undistPoints; - } - - /** - * Gets the (upright) rectangle which bounds this contour. - * - *

Note that rectangle size and position are stored with ints and do not have sub-pixel - * accuracy. - * - * @param points The points to be bounded - * @return Rectangle bounding the given points - */ - public static Rect getBoundingRect(Point[] points) { - var pointMat = new MatOfPoint2f(points); - var rect = Imgproc.boundingRect(pointMat); - pointMat.release(); - return rect; - } - - /** - * Gets the rotated rectangle with minimum area which bounds this contour. - * - *

Note that rectangle size and position are stored with floats and have sub-pixel accuracy. - * - * @param points The points to be bounded - * @return Rotated rectangle bounding the given points - */ - public static RotatedRect getMinAreaRect(Point[] points) { - var pointMat = new MatOfPoint2f(points); - var rect = Imgproc.minAreaRect(pointMat); - pointMat.release(); - return rect; - } - - /** - * Gets the convex hull contour (the outline) of a list of points. - * - * @param points The input contour - * @return The subset of points defining the convex hull. Note that these use ints and not floats. - */ - public static Point[] getConvexHull(Point[] points) { - var pointMat = new MatOfPoint(points); - // outputHull gives us indices (of corn) that make a convex hull contour - var outputHull = new MatOfInt(); - - Imgproc.convexHull(pointMat, outputHull); - - int[] indices = outputHull.toArray(); - outputHull.release(); - pointMat.release(); - var convexPoints = new Point[indices.length]; - for (int i = 0; i < indices.length; i++) { - convexPoints[i] = points[indices[i]]; + + public static List pointsToCorners(Point... points) { + var corners = new ArrayList(points.length); + for (int i = 0; i < points.length; i++) { + corners.add(new TargetCorner(points[i].x, points[i].y)); + } + return corners; } - return convexPoints; - } - - /** - * Finds the transformation(s) that map the camera's pose to the target's pose. The camera's pose - * relative to the target is determined by the supplied 3d points of the target's model and their - * associated 2d points imaged by the camera. The supplied model translations must be relative to - * the target's pose. - * - *

For planar targets, there may be an alternate solution which is plausible given the 2d image - * points. This has an associated "ambiguity" which describes the ratio of reprojection error - * between the "best" and "alternate" solution. - * - *

This method is intended for use with individual AprilTags, and will not work unless 4 points - * are provided. - * - * @param cameraMatrix The camera intrinsics matrix in standard opencv form - * @param distCoeffs The camera distortion matrix in standard opencv form - * @param modelTrls The translations of the object corners. These should have the object pose as - * their origin. These must come in a specific, pose-relative order (in NWU): - *

    - *
  • Point 0: [0, -squareLength / 2, squareLength / 2] - *
  • Point 1: [0, squareLength / 2, squareLength / 2] - *
  • Point 2: [0, squareLength / 2, -squareLength / 2] - *
  • Point 3: [0, -squareLength / 2, -squareLength / 2] - *
- * - * @param imagePoints The projection of these 3d object points into the 2d camera image. The order - * should match the given object point translations. - * @return The resulting transformation that maps the camera pose to the target pose and the - * ambiguity if an alternate solution is available. - */ - public static PNPResults solvePNP_SQUARE( - Matrix cameraMatrix, - Matrix distCoeffs, - List modelTrls, - Point[] imagePoints) { - // solvepnp inputs - MatOfPoint3f objectMat = new MatOfPoint3f(); - MatOfPoint2f imageMat = new MatOfPoint2f(); - MatOfDouble cameraMatrixMat = new MatOfDouble(); - MatOfDouble distCoeffsMat = new MatOfDouble(); - var rvecs = new ArrayList(); - var tvecs = new ArrayList(); - Mat rvec = Mat.zeros(3, 1, CvType.CV_32F); - Mat tvec = Mat.zeros(3, 1, CvType.CV_32F); - Mat reprojectionError = Mat.zeros(2, 1, CvType.CV_32F); - try { - // IPPE_SQUARE expects our corners in a specific order - modelTrls = reorderCircular(modelTrls, true, -1); - imagePoints = reorderCircular(Arrays.asList(imagePoints), true, -1).toArray(Point[]::new); - // translate to opencv classes - translationToTvec(modelTrls.toArray(new Translation3d[0])).assignTo(objectMat); - imageMat.fromArray(imagePoints); - matrixToMat(cameraMatrix.getStorage()).assignTo(cameraMatrixMat); - matrixToMat(distCoeffs.getStorage()).assignTo(distCoeffsMat); - - float[] errors = new float[2]; - Transform3d best = null; - Transform3d alt = null; - - for (int tries = 0; tries < 2; tries++) { - // calc rvecs/tvecs and associated reprojection error from image points - Calib3d.solvePnPGeneric( - objectMat, - imageMat, - cameraMatrixMat, - distCoeffsMat, - rvecs, - tvecs, - false, - Calib3d.SOLVEPNP_IPPE_SQUARE, - rvec, - tvec, - reprojectionError); - - reprojectionError.get(0, 0, errors); - // convert to wpilib coordinates - best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); - - if (tvecs.size() > 1) { - alt = new Transform3d(tvecToTranslation(tvecs.get(1)), rvecToRotation(rvecs.get(1))); + + public static List pointsToCorners(MatOfPoint2f matInput) { + var corners = new ArrayList(); + float[] data = new float[(int) matInput.total() * matInput.channels()]; + matInput.get(0, 0, data); + for (int i = 0; i < (int) matInput.total(); i++) { + corners.add(new TargetCorner(data[0 + 2 * i], data[1 + 2 * i])); } + return corners; + } - // check if we got a NaN result - if (!Double.isNaN(errors[0])) break; - else { // add noise and retry - double[] br = imageMat.get(0, 0); - br[0] -= 0.001; - br[1] -= 0.001; - imageMat.put(0, 0, br); + /** + * Reorders the list, optionally indexing backwards and wrapping around to the last element after + * the first, and shifting all indices in the direction of indexing. + * + *

e.g. + * + *

({1,2,3}, false, 1) == {2,3,1} + * + *

({1,2,3}, true, 0) == {1,3,2} + * + *

({1,2,3}, true, 1) == {3,2,1} + * + * @param Element type + * @param elements + * @param backwards If indexing should happen in reverse (0, size-1, size-2, ...) + * @param shiftStart How much the inital index should be shifted (instead of starting at index 0, + * start at shiftStart, negated if backwards) + * @return Reordered list + */ + public static List reorderCircular(List elements, boolean backwards, int shiftStart) { + int size = elements.size(); + int dir = backwards ? -1 : 1; + var reordered = new ArrayList<>(elements); + for (int i = 0; i < size; i++) { + int index = (i * dir + shiftStart * dir) % size; + if (index < 0) index = size + index; + reordered.set(i, elements.get(index)); } - } + return reordered; + } + + // TODO: RotTrlTransform3d removal awaiting Rotation3d performance improvements + /** + * Convert a rotation delta from EDN to NWU. For example, if you have a rotation X,Y,Z {1, 0, 0} + * in EDN, this would be {0, -1, 0} in NWU. + */ + private static Rotation3d rotationEDNtoNWU(Rotation3d rot) { + return new RotTrlTransform3d(EDN_TO_NWU.apply(rot), new Translation3d()) + .apply(EDN_TO_NWU.inverse().getRotation()); + } + + /** + * Convert a rotation delta from NWU to EDN. For example, if you have a rotation X,Y,Z {1, 0, 0} + * in NWU, this would be {0, 0, 1} in EDN. + */ + private static Rotation3d rotationNWUtoEDN(Rotation3d rot) { + return new RotTrlTransform3d(NWU_TO_EDN.apply(rot), new Translation3d()) + .apply(NWU_TO_EDN.inverse().getRotation()); + } + + /** + * Convert a translation from EDN to NWU. For example, if you have a translation X,Y,Z {1, 0, 0} + * in EDN, this would be {0, -1, 0} in NWU. + */ + private static Translation3d translationEDNtoNWU(Translation3d trl) { + return EDN_TO_NWU.apply(trl); + } + + /** + * Convert a translation from NWU to EDN. For example, if you have a translation X,Y,Z {1, 0, 0} + * in NWU, this would be {0, 0, 1} in EDN. + */ + private static Translation3d translationNWUtoEDN(Translation3d trl) { + return NWU_TO_EDN.apply(trl); + } + + /** + * Project object points from the 3d world into the 2d camera image. The camera + * properties(intrinsics, distortion) determine the results of this projection. + * + * @param cameraMatrix the camera intrinsics matrix in standard opencv form + * @param distCoeffs the camera distortion matrix in standard opencv form + * @param camRt The change in basis from world coordinates to camera coordinates. See {@link + * RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * @param objectTranslations The 3d points to be projected + * @return The 2d points in pixels which correspond to the camera's image of the 3d points + */ + public static Point[] projectPoints( + Matrix cameraMatrix, + Matrix distCoeffs, + RotTrlTransform3d camRt, + List objectTranslations) { + // translate to opencv classes + var objectPoints = translationToTvec(objectTranslations.toArray(new Translation3d[0])); + // opencv rvec/tvec describe a change in basis from world to camera + var rvec = rotationToRvec(camRt.getRotation()); + var tvec = translationToTvec(camRt.getTranslation()); + var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); + var distCoeffsMat = new MatOfDouble(matrixToMat(distCoeffs.getStorage())); + var imagePoints = new MatOfPoint2f(); + // project to 2d + Calib3d.projectPoints(objectPoints, rvec, tvec, cameraMatrixMat, distCoeffsMat, imagePoints); + var points = imagePoints.toArray(); + + // release our Mats from native memory + objectPoints.release(); + rvec.release(); + tvec.release(); + cameraMatrixMat.release(); + distCoeffsMat.release(); + imagePoints.release(); + + return points; + } + + /** + * Undistort 2d image points using a given camera's intrinsics and distortion. + * + *

2d image points from {@link #projectPoints(Matrix, Matrix, RotTrlTransform3d, List) + * projectPoints()} will naturally be distorted, so this operation is important if the image + * points need to be directly used (e.g. 2d yaw/pitch). + * + * @param cameraMatrix The camera intrinsics matrix in standard opencv form + * @param distCoeffs The camera distortion matrix in standard opencv form + * @param points The distorted image points + * @return The undistorted image points + */ + public static Point[] undistortPoints( + Matrix cameraMatrix, Matrix distCoeffs, Point[] points) { + var distMat = new MatOfPoint2f(points); + var undistMat = new MatOfPoint2f(); + var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); + var distCoeffsMat = matrixToMat(distCoeffs.getStorage()); + + Calib3d.undistortImagePoints(distMat, undistMat, cameraMatrixMat, distCoeffsMat); + var undistPoints = undistMat.toArray(); + + distMat.release(); + undistMat.release(); + cameraMatrixMat.release(); + distCoeffsMat.release(); + + return undistPoints; + } - // check if solvePnP failed with NaN results and retrying failed - if (Double.isNaN(errors[0])) throw new Exception("SolvePNP_SQUARE NaN result"); + /** + * Gets the (upright) rectangle which bounds this contour. + * + *

Note that rectangle size and position are stored with ints and do not have sub-pixel + * accuracy. + * + * @param points The points to be bounded + * @return Rectangle bounding the given points + */ + public static Rect getBoundingRect(Point[] points) { + var pointMat = new MatOfPoint2f(points); + var rect = Imgproc.boundingRect(pointMat); + pointMat.release(); + return rect; + } - if (alt != null) - return new PNPResults(best, alt, errors[0] / errors[1], errors[0], errors[1]); - else return new PNPResults(best, errors[0]); + /** + * Gets the rotated rectangle with minimum area which bounds this contour. + * + *

Note that rectangle size and position are stored with floats and have sub-pixel accuracy. + * + * @param points The points to be bounded + * @return Rotated rectangle bounding the given points + */ + public static RotatedRect getMinAreaRect(Point[] points) { + var pointMat = new MatOfPoint2f(points); + var rect = Imgproc.minAreaRect(pointMat); + pointMat.release(); + return rect; + } + + /** + * Gets the convex hull contour (the outline) of a list of points. + * + * @param points The input contour + * @return The subset of points defining the convex hull. Note that these use ints and not floats. + */ + public static Point[] getConvexHull(Point[] points) { + var pointMat = new MatOfPoint(points); + // outputHull gives us indices (of corn) that make a convex hull contour + var outputHull = new MatOfInt(); + + Imgproc.convexHull(pointMat, outputHull); + + int[] indices = outputHull.toArray(); + outputHull.release(); + pointMat.release(); + var convexPoints = new Point[indices.length]; + for (int i = 0; i < indices.length; i++) { + convexPoints[i] = points[indices[i]]; + } + return convexPoints; } - // solvePnP failed - catch (Exception e) { - System.err.println("SolvePNP_SQUARE failed!"); - e.printStackTrace(); - return new PNPResults(); - } finally { - // release our Mats from native memory - objectMat.release(); - imageMat.release(); - cameraMatrixMat.release(); - distCoeffsMat.release(); - for (var v : rvecs) v.release(); - for (var v : tvecs) v.release(); - rvec.release(); - tvec.release(); - reprojectionError.release(); + + /** + * Finds the transformation(s) that map the camera's pose to the target's pose. The camera's pose + * relative to the target is determined by the supplied 3d points of the target's model and their + * associated 2d points imaged by the camera. The supplied model translations must be relative to + * the target's pose. + * + *

For planar targets, there may be an alternate solution which is plausible given the 2d image + * points. This has an associated "ambiguity" which describes the ratio of reprojection error + * between the "best" and "alternate" solution. + * + *

This method is intended for use with individual AprilTags, and will not work unless 4 points + * are provided. + * + * @param cameraMatrix The camera intrinsics matrix in standard opencv form + * @param distCoeffs The camera distortion matrix in standard opencv form + * @param modelTrls The translations of the object corners. These should have the object pose as + * their origin. These must come in a specific, pose-relative order (in NWU): + *

    + *
  • Point 0: [0, -squareLength / 2, squareLength / 2] + *
  • Point 1: [0, squareLength / 2, squareLength / 2] + *
  • Point 2: [0, squareLength / 2, -squareLength / 2] + *
  • Point 3: [0, -squareLength / 2, -squareLength / 2] + *
+ * + * @param imagePoints The projection of these 3d object points into the 2d camera image. The order + * should match the given object point translations. + * @return The resulting transformation that maps the camera pose to the target pose and the + * ambiguity if an alternate solution is available. + */ + public static PNPResults solvePNP_SQUARE( + Matrix cameraMatrix, + Matrix distCoeffs, + List modelTrls, + Point[] imagePoints) { + // solvepnp inputs + MatOfPoint3f objectMat = new MatOfPoint3f(); + MatOfPoint2f imageMat = new MatOfPoint2f(); + MatOfDouble cameraMatrixMat = new MatOfDouble(); + MatOfDouble distCoeffsMat = new MatOfDouble(); + var rvecs = new ArrayList(); + var tvecs = new ArrayList(); + Mat rvec = Mat.zeros(3, 1, CvType.CV_32F); + Mat tvec = Mat.zeros(3, 1, CvType.CV_32F); + Mat reprojectionError = Mat.zeros(2, 1, CvType.CV_32F); + try { + // IPPE_SQUARE expects our corners in a specific order + modelTrls = reorderCircular(modelTrls, true, -1); + imagePoints = reorderCircular(Arrays.asList(imagePoints), true, -1).toArray(Point[]::new); + // translate to opencv classes + translationToTvec(modelTrls.toArray(new Translation3d[0])).assignTo(objectMat); + imageMat.fromArray(imagePoints); + matrixToMat(cameraMatrix.getStorage()).assignTo(cameraMatrixMat); + matrixToMat(distCoeffs.getStorage()).assignTo(distCoeffsMat); + + float[] errors = new float[2]; + Transform3d best = null; + Transform3d alt = null; + + for (int tries = 0; tries < 2; tries++) { + // calc rvecs/tvecs and associated reprojection error from image points + Calib3d.solvePnPGeneric( + objectMat, + imageMat, + cameraMatrixMat, + distCoeffsMat, + rvecs, + tvecs, + false, + Calib3d.SOLVEPNP_IPPE_SQUARE, + rvec, + tvec, + reprojectionError); + + reprojectionError.get(0, 0, errors); + // convert to wpilib coordinates + best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); + + if (tvecs.size() > 1) { + alt = new Transform3d(tvecToTranslation(tvecs.get(1)), rvecToRotation(rvecs.get(1))); + } + + // check if we got a NaN result + if (!Double.isNaN(errors[0])) break; + else { // add noise and retry + double[] br = imageMat.get(0, 0); + br[0] -= 0.001; + br[1] -= 0.001; + imageMat.put(0, 0, br); + } + } + + // check if solvePnP failed with NaN results and retrying failed + if (Double.isNaN(errors[0])) throw new Exception("SolvePNP_SQUARE NaN result"); + + if (alt != null) + return new PNPResults(best, alt, errors[0] / errors[1], errors[0], errors[1]); + else return new PNPResults(best, errors[0]); + } + // solvePnP failed + catch (Exception e) { + System.err.println("SolvePNP_SQUARE failed!"); + e.printStackTrace(); + return new PNPResults(); + } finally { + // release our Mats from native memory + objectMat.release(); + imageMat.release(); + cameraMatrixMat.release(); + distCoeffsMat.release(); + for (var v : rvecs) v.release(); + for (var v : tvecs) v.release(); + rvec.release(); + tvec.release(); + reprojectionError.release(); + } } - } - - /** - * Finds the transformation that maps the camera's pose to the origin of the supplied object. An - * "object" is simply a set of known 3d translations that correspond to the given 2d points. If, - * for example, the object translations are given relative to close-right corner of the blue - * alliance(the default origin), a camera-to-origin transformation is returned. If the - * translations are relative to a target's pose, a camera-to-target transformation is returned. - * - *

There must be at least 3 points to use this method. This does not return an alternate - * solution-- if you are intending to use solvePNP on a single AprilTag, see {@link - * #solvePNP_SQUARE} instead. - * - * @param cameraMatrix The camera intrinsics matrix in standard opencv form - * @param distCoeffs The camera distortion matrix in standard opencv form - * @param objectTrls The translations of the object corners, relative to the field. - * @param imagePoints The projection of these 3d object points into the 2d camera image. The order - * should match the given object point translations. - * @return The resulting transformation that maps the camera pose to the target pose. If the 3d - * model points are supplied relative to the origin, this transformation brings the camera to - * the origin. - */ - public static PNPResults solvePNP_SQPNP( - Matrix cameraMatrix, - Matrix distCoeffs, - List objectTrls, - Point[] imagePoints) { - try { - // translate to opencv classes - MatOfPoint3f objectMat = translationToTvec(objectTrls.toArray(new Translation3d[0])); - MatOfPoint2f imageMat = new MatOfPoint2f(imagePoints); - Mat cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); - Mat distCoeffsMat = matrixToMat(distCoeffs.getStorage()); - var rvecs = new ArrayList(); - var tvecs = new ArrayList(); - Mat rvec = Mat.zeros(3, 1, CvType.CV_32F); - Mat tvec = Mat.zeros(3, 1, CvType.CV_32F); - Mat reprojectionError = new Mat(); - // calc rvec/tvec from image points - Calib3d.solvePnPGeneric( - objectMat, - imageMat, - cameraMatrixMat, - distCoeffsMat, - rvecs, - tvecs, - false, - Calib3d.SOLVEPNP_SQPNP, - rvec, - tvec, - reprojectionError); - - float[] error = new float[1]; - reprojectionError.get(0, 0, error); - // convert to wpilib coordinates - var best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); - - // release our Mats from native memory - objectMat.release(); - imageMat.release(); - cameraMatrixMat.release(); - distCoeffsMat.release(); - for (var v : rvecs) v.release(); - for (var v : tvecs) v.release(); - rvec.release(); - tvec.release(); - reprojectionError.release(); - - // check if solvePnP failed with NaN results - if (Double.isNaN(error[0])) throw new Exception("SolvePNP_SQPNP NaN result"); - - return new PNPResults(best, error[0]); - } catch (Exception e) { - System.err.println("SolvePNP_SQPNP failed!"); - e.printStackTrace(); - return new PNPResults(); + + /** + * Finds the transformation that maps the camera's pose to the origin of the supplied object. An + * "object" is simply a set of known 3d translations that correspond to the given 2d points. If, + * for example, the object translations are given relative to close-right corner of the blue + * alliance(the default origin), a camera-to-origin transformation is returned. If the + * translations are relative to a target's pose, a camera-to-target transformation is returned. + * + *

There must be at least 3 points to use this method. This does not return an alternate + * solution-- if you are intending to use solvePNP on a single AprilTag, see {@link + * #solvePNP_SQUARE} instead. + * + * @param cameraMatrix The camera intrinsics matrix in standard opencv form + * @param distCoeffs The camera distortion matrix in standard opencv form + * @param objectTrls The translations of the object corners, relative to the field. + * @param imagePoints The projection of these 3d object points into the 2d camera image. The order + * should match the given object point translations. + * @return The resulting transformation that maps the camera pose to the target pose. If the 3d + * model points are supplied relative to the origin, this transformation brings the camera to + * the origin. + */ + public static PNPResults solvePNP_SQPNP( + Matrix cameraMatrix, + Matrix distCoeffs, + List objectTrls, + Point[] imagePoints) { + try { + // translate to opencv classes + MatOfPoint3f objectMat = translationToTvec(objectTrls.toArray(new Translation3d[0])); + MatOfPoint2f imageMat = new MatOfPoint2f(imagePoints); + Mat cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); + Mat distCoeffsMat = matrixToMat(distCoeffs.getStorage()); + var rvecs = new ArrayList(); + var tvecs = new ArrayList(); + Mat rvec = Mat.zeros(3, 1, CvType.CV_32F); + Mat tvec = Mat.zeros(3, 1, CvType.CV_32F); + Mat reprojectionError = new Mat(); + // calc rvec/tvec from image points + Calib3d.solvePnPGeneric( + objectMat, + imageMat, + cameraMatrixMat, + distCoeffsMat, + rvecs, + tvecs, + false, + Calib3d.SOLVEPNP_SQPNP, + rvec, + tvec, + reprojectionError); + + float[] error = new float[1]; + reprojectionError.get(0, 0, error); + // convert to wpilib coordinates + var best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); + + // release our Mats from native memory + objectMat.release(); + imageMat.release(); + cameraMatrixMat.release(); + distCoeffsMat.release(); + for (var v : rvecs) v.release(); + for (var v : tvecs) v.release(); + rvec.release(); + tvec.release(); + reprojectionError.release(); + + // check if solvePnP failed with NaN results + if (Double.isNaN(error[0])) throw new Exception("SolvePNP_SQPNP NaN result"); + + return new PNPResults(best, error[0]); + } catch (Exception e) { + System.err.println("SolvePNP_SQPNP failed!"); + e.printStackTrace(); + return new PNPResults(); + } } - } } diff --git a/photon-targeting/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java b/photon-targeting/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java index bbd26c08ca..2b79e920d6 100644 --- a/photon-targeting/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java +++ b/photon-targeting/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java @@ -29,198 +29,198 @@ * Represents a transformation that first rotates a pose around the origin, and then translates it. */ public class RotTrlTransform3d { - private final Translation3d trl; - private final Rotation3d rot; - // TODO: removal awaiting wpilib Rotation3d performance improvements - private double m_w; - private double m_x; - private double m_y; - private double m_z; - - /** - * A rotation-translation transformation. - * - *

Applying this RotTrlTransform3d to poses will preserve their current origin-to-pose - * transform as if the origin was transformed by these components instead. - * - * @param rot The rotation component - * @param trl The translation component - */ - public RotTrlTransform3d(Rotation3d rot, Translation3d trl) { - this.rot = rot; - var quat = rot.getQuaternion(); - m_w = quat.getW(); - m_x = quat.getX(); - m_y = quat.getY(); - m_z = quat.getZ(); - this.trl = trl; - } - - public RotTrlTransform3d(Pose3d initial, Pose3d last) { - // this.rot = last.getRotation().minus(initial.getRotation()); - // this.trl = last.getTranslation().minus(initial.getTranslation().rotateBy(rot)); - - var quat = initial.getRotation().getQuaternion(); - m_w = quat.getW(); - m_x = quat.getX(); - m_y = quat.getY(); - m_z = quat.getZ(); - this.rot = invrotate(last.getRotation()); - this.trl = last.getTranslation().minus(rotate(initial.getTranslation())); - } - - /** - * Creates a rotation-translation transformation from a Transform3d. - * - *

Applying this RotTrlTransform3d to poses will preserve their current origin-to-pose - * transform as if the origin was transformed by trf instead. - * - * @param trf The origin transformation - */ - public RotTrlTransform3d(Transform3d trf) { - this(trf.getRotation(), trf.getTranslation()); - } - - public RotTrlTransform3d() { - this(new Rotation3d(), new Translation3d()); - } - - private Translation3d rotate(Translation3d otrl) { - final var p = new Quaternion(0.0, otrl.getX(), otrl.getY(), otrl.getZ()); - final var qprime = times(times(p), new Quaternion(m_w, -m_x, -m_y, -m_z)); - return new Translation3d(qprime.getX(), qprime.getY(), qprime.getZ()); - } - - private Translation3d invrotate(Translation3d otrl) { - m_x = -m_x; - m_y = -m_y; - m_z = -m_z; - var result = rotate(otrl); - m_x = -m_x; - m_y = -m_y; - m_z = -m_z; - return result; - } - - private Rotation3d rotate(Rotation3d orot) { - return new Rotation3d(times(orot.getQuaternion())); - } - - private Rotation3d invrotate(Rotation3d orot) { - m_x = -m_x; - m_y = -m_y; - m_z = -m_z; - var result = rotate(orot); - m_x = -m_x; - m_y = -m_y; - m_z = -m_z; - return result; - } - - /** - * The rotation-translation transformation that makes poses in the world consider this pose as the - * new origin, or change the basis to this pose. - * - * @param pose The new origin - */ - public static RotTrlTransform3d makeRelativeTo(Pose3d pose) { - return new RotTrlTransform3d(pose.getRotation(), pose.getTranslation()).inverse(); - } - - /** The inverse of this transformation. Applying the inverse will "undo" this transformation. */ - public RotTrlTransform3d inverse() { - // var inverseRot = rot.unaryMinus(); - // var inverseTrl = trl.rotateBy(inverseRot).unaryMinus(); - // return new RotTrlTransform3d(inverseRot, inverseTrl); - - var inverseTrl = invrotate(trl).unaryMinus(); - return new RotTrlTransform3d(new Rotation3d(new Quaternion(m_w, -m_x, -m_y, -m_z)), inverseTrl); - } - - /** This transformation as a Transform3d (as if of the origin) */ - public Transform3d getTransform() { - return new Transform3d(trl, rot); - } - - /** The translation component of this transformation */ - public Translation3d getTranslation() { - return trl; - } - - /** The rotation component of this transformation */ - public Rotation3d getRotation() { - return rot; - } - - public Translation3d apply(Translation3d trl) { - // return trl.rotateBy(rot).plus(this.trl); - return rotate(trl).plus(this.trl); - } - - public List applyTrls(List trls) { - return trls.stream().map(this::apply).collect(Collectors.toList()); - } - - public Rotation3d apply(Rotation3d rot) { - return rotate(rot); - } - - public List applyRots(List rots) { - return rots.stream().map(this::apply).collect(Collectors.toList()); - } - - public Pose3d apply(Pose3d pose) { - // return new Pose3d(pose.getTranslation().rotateBy(rot).plus(trl), - // pose.getRotation().plus(rot)); - return new Pose3d(apply(pose.getTranslation()), apply(pose.getRotation())); - } - - public List applyPoses(List poses) { - return poses.stream().map(this::apply).collect(Collectors.toList()); - } - - // TODO: removal awaiting wpilib Rotation3d performance improvements - private Quaternion times(Quaternion other) { - final double o_w = other.getW(); - final double o_x = other.getX(); - final double o_y = other.getY(); - final double o_z = other.getZ(); - return times(m_w, m_x, m_y, m_z, o_w, o_x, o_y, o_z); - } - - private static Quaternion times(Quaternion a, Quaternion b) { - final double m_w = a.getW(); - final double m_x = a.getX(); - final double m_y = a.getY(); - final double m_z = a.getZ(); - final double o_w = b.getW(); - final double o_x = b.getX(); - final double o_y = b.getY(); - final double o_z = b.getZ(); - return times(m_w, m_x, m_y, m_z, o_w, o_x, o_y, o_z); - } - - private static Quaternion times( - double m_w, - double m_x, - double m_y, - double m_z, - double o_w, - double o_x, - double o_y, - double o_z) { - // https://en.wikipedia.org/wiki/Quaternion#Scalar_and_vector_parts - - // v₁ x v₂ - final double cross_x = m_y * o_z - o_y * m_z; - final double cross_y = o_x * m_z - m_x * o_z; - final double cross_z = m_x * o_y - o_x * m_y; - - // v = w₁v₂ + w₂v₁ + v₁ x v₂ - final double new_x = o_x * m_w + (m_x * o_w) + cross_x; - final double new_y = o_y * m_w + (m_y * o_w) + cross_y; - final double new_z = o_z * m_w + (m_z * o_w) + cross_z; - - return new Quaternion(m_w * o_w - (m_x * o_x + m_y * o_y + m_z * o_z), new_x, new_y, new_z); - } + private final Translation3d trl; + private final Rotation3d rot; + // TODO: removal awaiting wpilib Rotation3d performance improvements + private double m_w; + private double m_x; + private double m_y; + private double m_z; + + /** + * A rotation-translation transformation. + * + *

Applying this RotTrlTransform3d to poses will preserve their current origin-to-pose + * transform as if the origin was transformed by these components instead. + * + * @param rot The rotation component + * @param trl The translation component + */ + public RotTrlTransform3d(Rotation3d rot, Translation3d trl) { + this.rot = rot; + var quat = rot.getQuaternion(); + m_w = quat.getW(); + m_x = quat.getX(); + m_y = quat.getY(); + m_z = quat.getZ(); + this.trl = trl; + } + + public RotTrlTransform3d(Pose3d initial, Pose3d last) { + // this.rot = last.getRotation().minus(initial.getRotation()); + // this.trl = last.getTranslation().minus(initial.getTranslation().rotateBy(rot)); + + var quat = initial.getRotation().getQuaternion(); + m_w = quat.getW(); + m_x = quat.getX(); + m_y = quat.getY(); + m_z = quat.getZ(); + this.rot = invrotate(last.getRotation()); + this.trl = last.getTranslation().minus(rotate(initial.getTranslation())); + } + + /** + * Creates a rotation-translation transformation from a Transform3d. + * + *

Applying this RotTrlTransform3d to poses will preserve their current origin-to-pose + * transform as if the origin was transformed by trf instead. + * + * @param trf The origin transformation + */ + public RotTrlTransform3d(Transform3d trf) { + this(trf.getRotation(), trf.getTranslation()); + } + + public RotTrlTransform3d() { + this(new Rotation3d(), new Translation3d()); + } + + private Translation3d rotate(Translation3d otrl) { + final var p = new Quaternion(0.0, otrl.getX(), otrl.getY(), otrl.getZ()); + final var qprime = times(times(p), new Quaternion(m_w, -m_x, -m_y, -m_z)); + return new Translation3d(qprime.getX(), qprime.getY(), qprime.getZ()); + } + + private Translation3d invrotate(Translation3d otrl) { + m_x = -m_x; + m_y = -m_y; + m_z = -m_z; + var result = rotate(otrl); + m_x = -m_x; + m_y = -m_y; + m_z = -m_z; + return result; + } + + private Rotation3d rotate(Rotation3d orot) { + return new Rotation3d(times(orot.getQuaternion())); + } + + private Rotation3d invrotate(Rotation3d orot) { + m_x = -m_x; + m_y = -m_y; + m_z = -m_z; + var result = rotate(orot); + m_x = -m_x; + m_y = -m_y; + m_z = -m_z; + return result; + } + + /** + * The rotation-translation transformation that makes poses in the world consider this pose as the + * new origin, or change the basis to this pose. + * + * @param pose The new origin + */ + public static RotTrlTransform3d makeRelativeTo(Pose3d pose) { + return new RotTrlTransform3d(pose.getRotation(), pose.getTranslation()).inverse(); + } + + /** The inverse of this transformation. Applying the inverse will "undo" this transformation. */ + public RotTrlTransform3d inverse() { + // var inverseRot = rot.unaryMinus(); + // var inverseTrl = trl.rotateBy(inverseRot).unaryMinus(); + // return new RotTrlTransform3d(inverseRot, inverseTrl); + + var inverseTrl = invrotate(trl).unaryMinus(); + return new RotTrlTransform3d(new Rotation3d(new Quaternion(m_w, -m_x, -m_y, -m_z)), inverseTrl); + } + + /** This transformation as a Transform3d (as if of the origin) */ + public Transform3d getTransform() { + return new Transform3d(trl, rot); + } + + /** The translation component of this transformation */ + public Translation3d getTranslation() { + return trl; + } + + /** The rotation component of this transformation */ + public Rotation3d getRotation() { + return rot; + } + + public Translation3d apply(Translation3d trl) { + // return trl.rotateBy(rot).plus(this.trl); + return rotate(trl).plus(this.trl); + } + + public List applyTrls(List trls) { + return trls.stream().map(this::apply).collect(Collectors.toList()); + } + + public Rotation3d apply(Rotation3d rot) { + return rotate(rot); + } + + public List applyRots(List rots) { + return rots.stream().map(this::apply).collect(Collectors.toList()); + } + + public Pose3d apply(Pose3d pose) { + // return new Pose3d(pose.getTranslation().rotateBy(rot).plus(trl), + // pose.getRotation().plus(rot)); + return new Pose3d(apply(pose.getTranslation()), apply(pose.getRotation())); + } + + public List applyPoses(List poses) { + return poses.stream().map(this::apply).collect(Collectors.toList()); + } + + // TODO: removal awaiting wpilib Rotation3d performance improvements + private Quaternion times(Quaternion other) { + final double o_w = other.getW(); + final double o_x = other.getX(); + final double o_y = other.getY(); + final double o_z = other.getZ(); + return times(m_w, m_x, m_y, m_z, o_w, o_x, o_y, o_z); + } + + private static Quaternion times(Quaternion a, Quaternion b) { + final double m_w = a.getW(); + final double m_x = a.getX(); + final double m_y = a.getY(); + final double m_z = a.getZ(); + final double o_w = b.getW(); + final double o_x = b.getX(); + final double o_y = b.getY(); + final double o_z = b.getZ(); + return times(m_w, m_x, m_y, m_z, o_w, o_x, o_y, o_z); + } + + private static Quaternion times( + double m_w, + double m_x, + double m_y, + double m_z, + double o_w, + double o_x, + double o_y, + double o_z) { + // https://en.wikipedia.org/wiki/Quaternion#Scalar_and_vector_parts + + // v₁ x v₂ + final double cross_x = m_y * o_z - o_y * m_z; + final double cross_y = o_x * m_z - m_x * o_z; + final double cross_z = m_x * o_y - o_x * m_y; + + // v = w₁v₂ + w₂v₁ + v₁ x v₂ + final double new_x = o_x * m_w + (m_x * o_w) + cross_x; + final double new_y = o_y * m_w + (m_y * o_w) + cross_y; + final double new_z = o_z * m_w + (m_z * o_w) + cross_z; + + return new Quaternion(m_w * o_w - (m_x * o_x + m_y * o_y + m_z * o_z), new_x, new_y, new_z); + } } diff --git a/photon-targeting/src/main/java/org/photonvision/estimation/TargetModel.java b/photon-targeting/src/main/java/org/photonvision/estimation/TargetModel.java index 96f473a3b6..4a4c22449d 100644 --- a/photon-targeting/src/main/java/org/photonvision/estimation/TargetModel.java +++ b/photon-targeting/src/main/java/org/photonvision/estimation/TargetModel.java @@ -28,156 +28,156 @@ /** Describes the 3d model of a target. */ public class TargetModel { - /** - * Translations of this target's vertices relative to its pose. Rectangular and spherical targets - * will have four vertices. See their respective constructors for more info. - */ - public final List vertices; + /** + * Translations of this target's vertices relative to its pose. Rectangular and spherical targets + * will have four vertices. See their respective constructors for more info. + */ + public final List vertices; - public final boolean isPlanar; - public final boolean isSpherical; + public final boolean isPlanar; + public final boolean isSpherical; - public static final TargetModel kTag16h5 = - new TargetModel(Units.inchesToMeters(6), Units.inchesToMeters(6)); + public static final TargetModel kTag16h5 = + new TargetModel(Units.inchesToMeters(6), Units.inchesToMeters(6)); - /** - * Creates a rectangular, planar target model given the width and height. The model has four - * vertices: - * - *

    - *
  • Point 0: [0, -width/2, -height/2] - *
  • Point 1: [0, width/2, -height/2] - *
  • Point 2: [0, width/2, height/2] - *
  • Point 3: [0, -width/2, height/2] - *
- */ - public TargetModel(double widthMeters, double heightMeters) { - this.vertices = - List.of( - // this order is relevant for AprilTag compatibility - new Translation3d(0, -widthMeters / 2.0, -heightMeters / 2.0), - new Translation3d(0, widthMeters / 2.0, -heightMeters / 2.0), - new Translation3d(0, widthMeters / 2.0, heightMeters / 2.0), - new Translation3d(0, -widthMeters / 2.0, heightMeters / 2.0)); - this.isPlanar = true; - this.isSpherical = false; - } + /** + * Creates a rectangular, planar target model given the width and height. The model has four + * vertices: + * + *
    + *
  • Point 0: [0, -width/2, -height/2] + *
  • Point 1: [0, width/2, -height/2] + *
  • Point 2: [0, width/2, height/2] + *
  • Point 3: [0, -width/2, height/2] + *
+ */ + public TargetModel(double widthMeters, double heightMeters) { + this.vertices = + List.of( + // this order is relevant for AprilTag compatibility + new Translation3d(0, -widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(0, widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(0, widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(0, -widthMeters / 2.0, heightMeters / 2.0)); + this.isPlanar = true; + this.isSpherical = false; + } - /** - * Creates a cuboid target model given the length, width, height. The model has eight vertices: - * - *
    - *
  • Point 0: [length/2, -width/2, -height/2] - *
  • Point 1: [length/2, width/2, -height/2] - *
  • Point 2: [length/2, width/2, height/2] - *
  • Point 3: [length/2, -width/2, height/2] - *
  • Point 4: [-length/2, -width/2, height/2] - *
  • Point 5: [-length/2, width/2, height/2] - *
  • Point 6: [-length/2, width/2, -height/2] - *
  • Point 7: [-length/2, -width/2, -height/2] - *
- */ - public TargetModel(double lengthMeters, double widthMeters, double heightMeters) { - this( - List.of( - new Translation3d(lengthMeters / 2.0, -widthMeters / 2.0, -heightMeters / 2.0), - new Translation3d(lengthMeters / 2.0, widthMeters / 2.0, -heightMeters / 2.0), - new Translation3d(lengthMeters / 2.0, widthMeters / 2.0, heightMeters / 2.0), - new Translation3d(lengthMeters / 2.0, -widthMeters / 2.0, heightMeters / 2.0), - new Translation3d(-lengthMeters / 2.0, -widthMeters / 2.0, heightMeters / 2.0), - new Translation3d(-lengthMeters / 2.0, widthMeters / 2.0, heightMeters / 2.0), - new Translation3d(-lengthMeters / 2.0, widthMeters / 2.0, -heightMeters / 2.0), - new Translation3d(-lengthMeters / 2.0, -widthMeters / 2.0, -heightMeters / 2.0))); - } + /** + * Creates a cuboid target model given the length, width, height. The model has eight vertices: + * + *
    + *
  • Point 0: [length/2, -width/2, -height/2] + *
  • Point 1: [length/2, width/2, -height/2] + *
  • Point 2: [length/2, width/2, height/2] + *
  • Point 3: [length/2, -width/2, height/2] + *
  • Point 4: [-length/2, -width/2, height/2] + *
  • Point 5: [-length/2, width/2, height/2] + *
  • Point 6: [-length/2, width/2, -height/2] + *
  • Point 7: [-length/2, -width/2, -height/2] + *
+ */ + public TargetModel(double lengthMeters, double widthMeters, double heightMeters) { + this( + List.of( + new Translation3d(lengthMeters / 2.0, -widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(lengthMeters / 2.0, widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(lengthMeters / 2.0, widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(lengthMeters / 2.0, -widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, -widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, -widthMeters / 2.0, -heightMeters / 2.0))); + } - /** - * Creates a spherical target model which has similar dimensions regardless of its rotation. This - * model has four vertices: - * - *
    - *
  • Point 0: [0, -radius, 0] - *
  • Point 1: [0, 0, -radius] - *
  • Point 2: [0, radius, 0] - *
  • Point 3: [0, 0, radius] - *
- * - * Q: Why these vertices? A: This target should be oriented to the camera every frame, much - * like a sprite/decal, and these vertices represent the ellipse vertices (maxima). These vertices - * are used for drawing the image of this sphere, but do not match the corners that will be - * published by photonvision. - */ - public TargetModel(double diameterMeters) { - double radius = diameterMeters / 2.0; - this.vertices = - List.of( - new Translation3d(0, -radius, 0), - new Translation3d(0, 0, -radius), - new Translation3d(0, radius, 0), - new Translation3d(0, 0, radius)); - this.isPlanar = false; - this.isSpherical = true; - } + /** + * Creates a spherical target model which has similar dimensions regardless of its rotation. This + * model has four vertices: + * + *
    + *
  • Point 0: [0, -radius, 0] + *
  • Point 1: [0, 0, -radius] + *
  • Point 2: [0, radius, 0] + *
  • Point 3: [0, 0, radius] + *
+ * + * Q: Why these vertices? A: This target should be oriented to the camera every frame, much + * like a sprite/decal, and these vertices represent the ellipse vertices (maxima). These vertices + * are used for drawing the image of this sphere, but do not match the corners that will be + * published by photonvision. + */ + public TargetModel(double diameterMeters) { + double radius = diameterMeters / 2.0; + this.vertices = + List.of( + new Translation3d(0, -radius, 0), + new Translation3d(0, 0, -radius), + new Translation3d(0, radius, 0), + new Translation3d(0, 0, radius)); + this.isPlanar = false; + this.isSpherical = true; + } - /** - * Creates a target model from arbitrary 3d vertices. Automatically determines if the given - * vertices are planar(x == 0). More than 2 vertices must be given. If this is a planar model, the - * vertices should define a non-intersecting contour. - * - * @param vertices Translations representing the vertices of this target model relative to its - * pose. - */ - public TargetModel(List vertices) { - this.isSpherical = false; - if (vertices == null || vertices.size() <= 2) { - vertices = new ArrayList<>(); - this.isPlanar = false; - } else { - boolean cornersPlanar = true; - for (Translation3d corner : vertices) { - if (corner.getX() != 0) cornersPlanar = false; - } - this.isPlanar = cornersPlanar; + /** + * Creates a target model from arbitrary 3d vertices. Automatically determines if the given + * vertices are planar(x == 0). More than 2 vertices must be given. If this is a planar model, the + * vertices should define a non-intersecting contour. + * + * @param vertices Translations representing the vertices of this target model relative to its + * pose. + */ + public TargetModel(List vertices) { + this.isSpherical = false; + if (vertices == null || vertices.size() <= 2) { + vertices = new ArrayList<>(); + this.isPlanar = false; + } else { + boolean cornersPlanar = true; + for (Translation3d corner : vertices) { + if (corner.getX() != 0) cornersPlanar = false; + } + this.isPlanar = cornersPlanar; + } + this.vertices = vertices; } - this.vertices = vertices; - } - /** - * This target's vertices offset from its field pose. - * - *

Note: If this target is spherical, use {@link #getOrientedPose(Translation3d, - * Translation3d)} with this method. - */ - public List getFieldVertices(Pose3d targetPose) { - var basisChange = new RotTrlTransform3d(targetPose.getRotation(), targetPose.getTranslation()); - return vertices.stream().map(t -> basisChange.apply(t)).collect(Collectors.toList()); - } + /** + * This target's vertices offset from its field pose. + * + *

Note: If this target is spherical, use {@link #getOrientedPose(Translation3d, + * Translation3d)} with this method. + */ + public List getFieldVertices(Pose3d targetPose) { + var basisChange = new RotTrlTransform3d(targetPose.getRotation(), targetPose.getTranslation()); + return vertices.stream().map(t -> basisChange.apply(t)).collect(Collectors.toList()); + } - /** - * Returns a Pose3d with the given target translation oriented (with its relative x-axis aligned) - * to the camera translation. This is used for spherical targets which should not have their - * projection change regardless of their own rotation. - * - * @param tgtTrl This target's translation - * @param cameraTrl Camera's translation - * @return This target's pose oriented to the camera - */ - public static Pose3d getOrientedPose(Translation3d tgtTrl, Translation3d cameraTrl) { - var relCam = cameraTrl.minus(tgtTrl); - var orientToCam = - new Rotation3d( - 0, - new Rotation2d(Math.hypot(relCam.getX(), relCam.getY()), -relCam.getZ()).getRadians(), - new Rotation2d(relCam.getX(), relCam.getY()).getRadians()); - return new Pose3d(tgtTrl, orientToCam); - } + /** + * Returns a Pose3d with the given target translation oriented (with its relative x-axis aligned) + * to the camera translation. This is used for spherical targets which should not have their + * projection change regardless of their own rotation. + * + * @param tgtTrl This target's translation + * @param cameraTrl Camera's translation + * @return This target's pose oriented to the camera + */ + public static Pose3d getOrientedPose(Translation3d tgtTrl, Translation3d cameraTrl) { + var relCam = cameraTrl.minus(tgtTrl); + var orientToCam = + new Rotation3d( + 0, + new Rotation2d(Math.hypot(relCam.getX(), relCam.getY()), -relCam.getZ()).getRadians(), + new Rotation2d(relCam.getX(), relCam.getY()).getRadians()); + return new Pose3d(tgtTrl, orientToCam); + } - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj instanceof TargetModel) { - var o = (TargetModel) obj; - return vertices.equals(o.vertices) && isPlanar == o.isPlanar && isSpherical == o.isSpherical; + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj instanceof TargetModel) { + var o = (TargetModel) obj; + return vertices.equals(o.vertices) && isPlanar == o.isPlanar && isSpherical == o.isSpherical; + } + return false; } - return false; - } } diff --git a/photon-targeting/src/main/java/org/photonvision/estimation/VisionEstimation.java b/photon-targeting/src/main/java/org/photonvision/estimation/VisionEstimation.java index 69493fc0c3..387176d57b 100644 --- a/photon-targeting/src/main/java/org/photonvision/estimation/VisionEstimation.java +++ b/photon-targeting/src/main/java/org/photonvision/estimation/VisionEstimation.java @@ -34,99 +34,99 @@ import org.photonvision.targeting.TargetCorner; public class VisionEstimation { - /** Get the visible {@link AprilTag}s which are in the tag layout using the visible tag IDs. */ - public static List getVisibleLayoutTags( - List visTags, AprilTagFieldLayout tagLayout) { - return visTags.stream() - .map( - t -> { - int id = t.getFiducialId(); - var maybePose = tagLayout.getTagPose(id); - return maybePose.map(pose3d -> new AprilTag(id, pose3d)).orElse(null); - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } - - /** - * Performs solvePNP using 3d-2d point correspondences of visible AprilTags to estimate the - * field-to-camera transformation. If only one tag is visible, the result may have an alternate - * solution. - * - *

Note: The returned transformation is from the field origin to the camera pose! - * - *

With only one tag: {@link OpenCVHelp#solvePNP_SQUARE} - * - *

With multiple tags: {@link OpenCVHelp#solvePNP_SQPNP} - * - * @param cameraMatrix The camera intrinsics matrix in standard opencv form - * @param distCoeffs The camera distortion matrix in standard opencv form - * @param visTags The visible tags reported by PV. Non-tag targets are automatically excluded. - * @param tagLayout The known tag layout on the field - * @return The transformation that maps the field origin to the camera pose. Ensure the {@link - * PNPResults} are present before utilizing them. - */ - public static PNPResults estimateCamPosePNP( - Matrix cameraMatrix, - Matrix distCoeffs, - List visTags, - AprilTagFieldLayout tagLayout) { - if (tagLayout == null - || visTags == null - || tagLayout.getTags().size() == 0 - || visTags.size() == 0) { - return new PNPResults(); + /** Get the visible {@link AprilTag}s which are in the tag layout using the visible tag IDs. */ + public static List getVisibleLayoutTags( + List visTags, AprilTagFieldLayout tagLayout) { + return visTags.stream() + .map( + t -> { + int id = t.getFiducialId(); + var maybePose = tagLayout.getTagPose(id); + return maybePose.map(pose3d -> new AprilTag(id, pose3d)).orElse(null); + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); } - var corners = new ArrayList(); - var knownTags = new ArrayList(); - // ensure these are AprilTags in our layout - for (var tgt : visTags) { - int id = tgt.getFiducialId(); - tagLayout - .getTagPose(id) - .ifPresent( - pose -> { - knownTags.add(new AprilTag(id, pose)); - corners.addAll(tgt.getDetectedCorners()); - }); - } - if (knownTags.size() == 0 || corners.size() == 0 || corners.size() % 4 != 0) { - return new PNPResults(); - } - Point[] points = OpenCVHelp.cornersToPoints(corners); + /** + * Performs solvePNP using 3d-2d point correspondences of visible AprilTags to estimate the + * field-to-camera transformation. If only one tag is visible, the result may have an alternate + * solution. + * + *

Note: The returned transformation is from the field origin to the camera pose! + * + *

With only one tag: {@link OpenCVHelp#solvePNP_SQUARE} + * + *

With multiple tags: {@link OpenCVHelp#solvePNP_SQPNP} + * + * @param cameraMatrix The camera intrinsics matrix in standard opencv form + * @param distCoeffs The camera distortion matrix in standard opencv form + * @param visTags The visible tags reported by PV. Non-tag targets are automatically excluded. + * @param tagLayout The known tag layout on the field + * @return The transformation that maps the field origin to the camera pose. Ensure the {@link + * PNPResults} are present before utilizing them. + */ + public static PNPResults estimateCamPosePNP( + Matrix cameraMatrix, + Matrix distCoeffs, + List visTags, + AprilTagFieldLayout tagLayout) { + if (tagLayout == null + || visTags == null + || tagLayout.getTags().size() == 0 + || visTags.size() == 0) { + return new PNPResults(); + } - // single-tag pnp - if (knownTags.size() == 1) { - var camToTag = - OpenCVHelp.solvePNP_SQUARE( - cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, points); - if (!camToTag.isPresent) return new PNPResults(); - var bestPose = knownTags.get(0).pose.transformBy(camToTag.best.inverse()); - var altPose = new Pose3d(); - if (camToTag.ambiguity != 0) - altPose = knownTags.get(0).pose.transformBy(camToTag.alt.inverse()); + var corners = new ArrayList(); + var knownTags = new ArrayList(); + // ensure these are AprilTags in our layout + for (var tgt : visTags) { + int id = tgt.getFiducialId(); + tagLayout + .getTagPose(id) + .ifPresent( + pose -> { + knownTags.add(new AprilTag(id, pose)); + corners.addAll(tgt.getDetectedCorners()); + }); + } + if (knownTags.size() == 0 || corners.size() == 0 || corners.size() % 4 != 0) { + return new PNPResults(); + } + Point[] points = OpenCVHelp.cornersToPoints(corners); - var o = new Pose3d(); - return new PNPResults( - new Transform3d(o, bestPose), - new Transform3d(o, altPose), - camToTag.ambiguity, - camToTag.bestReprojErr, - camToTag.altReprojErr); - } - // multi-tag pnp - else { - var objectTrls = new ArrayList(); - for (var tag : knownTags) objectTrls.addAll(TargetModel.kTag16h5.getFieldVertices(tag.pose)); - var camToOrigin = OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, points); - if (!camToOrigin.isPresent) return new PNPResults(); - return new PNPResults( - camToOrigin.best.inverse(), - camToOrigin.alt.inverse(), - camToOrigin.ambiguity, - camToOrigin.bestReprojErr, - camToOrigin.altReprojErr); + // single-tag pnp + if (knownTags.size() == 1) { + var camToTag = + OpenCVHelp.solvePNP_SQUARE( + cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, points); + if (!camToTag.isPresent) return new PNPResults(); + var bestPose = knownTags.get(0).pose.transformBy(camToTag.best.inverse()); + var altPose = new Pose3d(); + if (camToTag.ambiguity != 0) + altPose = knownTags.get(0).pose.transformBy(camToTag.alt.inverse()); + + var o = new Pose3d(); + return new PNPResults( + new Transform3d(o, bestPose), + new Transform3d(o, altPose), + camToTag.ambiguity, + camToTag.bestReprojErr, + camToTag.altReprojErr); + } + // multi-tag pnp + else { + var objectTrls = new ArrayList(); + for (var tag : knownTags) objectTrls.addAll(TargetModel.kTag16h5.getFieldVertices(tag.pose)); + var camToOrigin = OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, points); + if (!camToOrigin.isPresent) return new PNPResults(); + return new PNPResults( + camToOrigin.best.inverse(), + camToOrigin.alt.inverse(), + camToOrigin.ambiguity, + camToOrigin.bestReprojErr, + camToOrigin.altReprojErr); + } } - } } diff --git a/photon-targeting/src/main/java/org/photonvision/targeting/MultiTargetPNPResults.java b/photon-targeting/src/main/java/org/photonvision/targeting/MultiTargetPNPResults.java index 410064a9b3..453c0a57e5 100644 --- a/photon-targeting/src/main/java/org/photonvision/targeting/MultiTargetPNPResults.java +++ b/photon-targeting/src/main/java/org/photonvision/targeting/MultiTargetPNPResults.java @@ -22,72 +22,72 @@ import org.photonvision.common.dataflow.structures.Packet; public class MultiTargetPNPResults { - // Seeing 32 apriltags at once seems like a sane limit - private static final int MAX_IDS = 32; - // pnpresult + MAX_IDS possible targets (arbitrary upper limit that should never be hit, ideally) - public static final int PACK_SIZE_BYTES = PNPResults.PACK_SIZE_BYTES + (Short.BYTES * MAX_IDS); + // Seeing 32 apriltags at once seems like a sane limit + private static final int MAX_IDS = 32; + // pnpresult + MAX_IDS possible targets (arbitrary upper limit that should never be hit, ideally) + public static final int PACK_SIZE_BYTES = PNPResults.PACK_SIZE_BYTES + (Short.BYTES * MAX_IDS); - public PNPResults estimatedPose = new PNPResults(); - public List fiducialIDsUsed = List.of(); + public PNPResults estimatedPose = new PNPResults(); + public List fiducialIDsUsed = List.of(); - public MultiTargetPNPResults() {} + public MultiTargetPNPResults() {} - public MultiTargetPNPResults(PNPResults results, List ids) { - estimatedPose = results; - fiducialIDsUsed = ids; - } + public MultiTargetPNPResults(PNPResults results, List ids) { + estimatedPose = results; + fiducialIDsUsed = ids; + } - public static MultiTargetPNPResults createFromPacket(Packet packet) { - var results = PNPResults.createFromPacket(packet); - var ids = new ArrayList(MAX_IDS); - for (int i = 0; i < MAX_IDS; i++) { - int targetId = (int) packet.decodeShort(); - if (targetId > -1) ids.add(targetId); + public static MultiTargetPNPResults createFromPacket(Packet packet) { + var results = PNPResults.createFromPacket(packet); + var ids = new ArrayList(MAX_IDS); + for (int i = 0; i < MAX_IDS; i++) { + int targetId = (int) packet.decodeShort(); + if (targetId > -1) ids.add(targetId); + } + return new MultiTargetPNPResults(results, ids); } - return new MultiTargetPNPResults(results, ids); - } - public void populatePacket(Packet packet) { - estimatedPose.populatePacket(packet); - for (int i = 0; i < MAX_IDS; i++) { - if (i < fiducialIDsUsed.size()) { - packet.encode((short) fiducialIDsUsed.get(i).byteValue()); - } else { - packet.encode((short) -1); - } + public void populatePacket(Packet packet) { + estimatedPose.populatePacket(packet); + for (int i = 0; i < MAX_IDS; i++) { + if (i < fiducialIDsUsed.size()) { + packet.encode((short) fiducialIDsUsed.get(i).byteValue()); + } else { + packet.encode((short) -1); + } + } } - } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((estimatedPose == null) ? 0 : estimatedPose.hashCode()); - result = prime * result + ((fiducialIDsUsed == null) ? 0 : fiducialIDsUsed.hashCode()); - return result; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((estimatedPose == null) ? 0 : estimatedPose.hashCode()); + result = prime * result + ((fiducialIDsUsed == null) ? 0 : fiducialIDsUsed.hashCode()); + return result; + } - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - MultiTargetPNPResults other = (MultiTargetPNPResults) obj; - if (estimatedPose == null) { - if (other.estimatedPose != null) return false; - } else if (!estimatedPose.equals(other.estimatedPose)) return false; - if (fiducialIDsUsed == null) { - if (other.fiducialIDsUsed != null) return false; - } else if (!fiducialIDsUsed.equals(other.fiducialIDsUsed)) return false; - return true; - } + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + MultiTargetPNPResults other = (MultiTargetPNPResults) obj; + if (estimatedPose == null) { + if (other.estimatedPose != null) return false; + } else if (!estimatedPose.equals(other.estimatedPose)) return false; + if (fiducialIDsUsed == null) { + if (other.fiducialIDsUsed != null) return false; + } else if (!fiducialIDsUsed.equals(other.fiducialIDsUsed)) return false; + return true; + } - @Override - public String toString() { - return "MultiTargetPNPResults [estimatedPose=" - + estimatedPose - + ", fiducialIDsUsed=" - + fiducialIDsUsed - + "]"; - } + @Override + public String toString() { + return "MultiTargetPNPResults [estimatedPose=" + + estimatedPose + + ", fiducialIDsUsed=" + + fiducialIDsUsed + + "]"; + } } diff --git a/photon-targeting/src/main/java/org/photonvision/targeting/PNPResults.java b/photon-targeting/src/main/java/org/photonvision/targeting/PNPResults.java index 93b04f2386..11a77547fc 100644 --- a/photon-targeting/src/main/java/org/photonvision/targeting/PNPResults.java +++ b/photon-targeting/src/main/java/org/photonvision/targeting/PNPResults.java @@ -31,140 +31,140 @@ * method. */ public class PNPResults { - /** - * If this result is valid. A false value indicates there was an error in estimation, and this - * result should not be used. - */ - public final boolean isPresent; - - /** - * The best-fit transform. The coordinate frame of this transform depends on the method which gave - * this result. - */ - public final Transform3d best; - - /** Reprojection error of the best solution, in pixels */ - public final double bestReprojErr; - - /** - * Alternate, ambiguous solution from solvepnp. If no alternate solution is found, this is equal - * to the best solution. - */ - public final Transform3d alt; - - /** If no alternate solution is found, this is bestReprojErr */ - public final double altReprojErr; - - /** If no alternate solution is found, this is 0 */ - public final double ambiguity; - - /** An empty (invalid) result. */ - public PNPResults() { - this.isPresent = false; - this.best = new Transform3d(); - this.alt = new Transform3d(); - this.ambiguity = 0; - this.bestReprojErr = 0; - this.altReprojErr = 0; - } - - public PNPResults(Transform3d best, double bestReprojErr) { - this(best, best, 0, bestReprojErr, bestReprojErr); - } - - public PNPResults( - Transform3d best, - Transform3d alt, - double ambiguity, - double bestReprojErr, - double altReprojErr) { - this.isPresent = true; - this.best = best; - this.alt = alt; - this.ambiguity = ambiguity; - this.bestReprojErr = bestReprojErr; - this.altReprojErr = altReprojErr; - } - - public static final int PACK_SIZE_BYTES = 1 + (Double.BYTES * 7 * 2) + (Double.BYTES * 3); - - public static PNPResults createFromPacket(Packet packet) { - var present = packet.decodeBoolean(); - var best = PacketUtils.decodeTransform(packet); - var alt = PacketUtils.decodeTransform(packet); - var bestEr = packet.decodeDouble(); - var altEr = packet.decodeDouble(); - var ambiguity = packet.decodeDouble(); - if (present) { - return new PNPResults(best, alt, ambiguity, bestEr, altEr); - } else { - return new PNPResults(); + /** + * If this result is valid. A false value indicates there was an error in estimation, and this + * result should not be used. + */ + public final boolean isPresent; + + /** + * The best-fit transform. The coordinate frame of this transform depends on the method which gave + * this result. + */ + public final Transform3d best; + + /** Reprojection error of the best solution, in pixels */ + public final double bestReprojErr; + + /** + * Alternate, ambiguous solution from solvepnp. If no alternate solution is found, this is equal + * to the best solution. + */ + public final Transform3d alt; + + /** If no alternate solution is found, this is bestReprojErr */ + public final double altReprojErr; + + /** If no alternate solution is found, this is 0 */ + public final double ambiguity; + + /** An empty (invalid) result. */ + public PNPResults() { + this.isPresent = false; + this.best = new Transform3d(); + this.alt = new Transform3d(); + this.ambiguity = 0; + this.bestReprojErr = 0; + this.altReprojErr = 0; + } + + public PNPResults(Transform3d best, double bestReprojErr) { + this(best, best, 0, bestReprojErr, bestReprojErr); + } + + public PNPResults( + Transform3d best, + Transform3d alt, + double ambiguity, + double bestReprojErr, + double altReprojErr) { + this.isPresent = true; + this.best = best; + this.alt = alt; + this.ambiguity = ambiguity; + this.bestReprojErr = bestReprojErr; + this.altReprojErr = altReprojErr; + } + + public static final int PACK_SIZE_BYTES = 1 + (Double.BYTES * 7 * 2) + (Double.BYTES * 3); + + public static PNPResults createFromPacket(Packet packet) { + var present = packet.decodeBoolean(); + var best = PacketUtils.decodeTransform(packet); + var alt = PacketUtils.decodeTransform(packet); + var bestEr = packet.decodeDouble(); + var altEr = packet.decodeDouble(); + var ambiguity = packet.decodeDouble(); + if (present) { + return new PNPResults(best, alt, ambiguity, bestEr, altEr); + } else { + return new PNPResults(); + } + } + + public Packet populatePacket(Packet packet) { + packet.encode(isPresent); + PacketUtils.encodeTransform(packet, best); + PacketUtils.encodeTransform(packet, alt); + packet.encode(bestReprojErr); + packet.encode(altReprojErr); + packet.encode(ambiguity); + return packet; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (isPresent ? 1231 : 1237); + result = prime * result + ((best == null) ? 0 : best.hashCode()); + long temp; + temp = Double.doubleToLongBits(bestReprojErr); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + ((alt == null) ? 0 : alt.hashCode()); + temp = Double.doubleToLongBits(altReprojErr); + result = prime * result + (int) (temp ^ (temp >>> 32)); + temp = Double.doubleToLongBits(ambiguity); + result = prime * result + (int) (temp ^ (temp >>> 32)); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + PNPResults other = (PNPResults) obj; + if (isPresent != other.isPresent) return false; + if (best == null) { + if (other.best != null) return false; + } else if (!best.equals(other.best)) return false; + if (Double.doubleToLongBits(bestReprojErr) != Double.doubleToLongBits(other.bestReprojErr)) + return false; + if (alt == null) { + if (other.alt != null) return false; + } else if (!alt.equals(other.alt)) return false; + if (Double.doubleToLongBits(altReprojErr) != Double.doubleToLongBits(other.altReprojErr)) + return false; + if (Double.doubleToLongBits(ambiguity) != Double.doubleToLongBits(other.ambiguity)) + return false; + return true; + } + + @Override + public String toString() { + return "PNPResults [isPresent=" + + isPresent + + ", best=" + + best + + ", bestReprojErr=" + + bestReprojErr + + ", alt=" + + alt + + ", altReprojErr=" + + altReprojErr + + ", ambiguity=" + + ambiguity + + "]"; } - } - - public Packet populatePacket(Packet packet) { - packet.encode(isPresent); - PacketUtils.encodeTransform(packet, best); - PacketUtils.encodeTransform(packet, alt); - packet.encode(bestReprojErr); - packet.encode(altReprojErr); - packet.encode(ambiguity); - return packet; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (isPresent ? 1231 : 1237); - result = prime * result + ((best == null) ? 0 : best.hashCode()); - long temp; - temp = Double.doubleToLongBits(bestReprojErr); - result = prime * result + (int) (temp ^ (temp >>> 32)); - result = prime * result + ((alt == null) ? 0 : alt.hashCode()); - temp = Double.doubleToLongBits(altReprojErr); - result = prime * result + (int) (temp ^ (temp >>> 32)); - temp = Double.doubleToLongBits(ambiguity); - result = prime * result + (int) (temp ^ (temp >>> 32)); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - PNPResults other = (PNPResults) obj; - if (isPresent != other.isPresent) return false; - if (best == null) { - if (other.best != null) return false; - } else if (!best.equals(other.best)) return false; - if (Double.doubleToLongBits(bestReprojErr) != Double.doubleToLongBits(other.bestReprojErr)) - return false; - if (alt == null) { - if (other.alt != null) return false; - } else if (!alt.equals(other.alt)) return false; - if (Double.doubleToLongBits(altReprojErr) != Double.doubleToLongBits(other.altReprojErr)) - return false; - if (Double.doubleToLongBits(ambiguity) != Double.doubleToLongBits(other.ambiguity)) - return false; - return true; - } - - @Override - public String toString() { - return "PNPResults [isPresent=" - + isPresent - + ", best=" - + best - + ", bestReprojErr=" - + bestReprojErr - + ", alt=" - + alt - + ", altReprojErr=" - + altReprojErr - + ", ambiguity=" - + ambiguity - + "]"; - } } diff --git a/photon-targeting/src/main/java/org/photonvision/targeting/PhotonPipelineResult.java b/photon-targeting/src/main/java/org/photonvision/targeting/PhotonPipelineResult.java index b09841db74..4472078f8a 100644 --- a/photon-targeting/src/main/java/org/photonvision/targeting/PhotonPipelineResult.java +++ b/photon-targeting/src/main/java/org/photonvision/targeting/PhotonPipelineResult.java @@ -23,219 +23,219 @@ /** Represents a pipeline result from a PhotonCamera. */ public class PhotonPipelineResult { - private static boolean HAS_WARNED = false; - - // Targets to store. - public final List targets = new ArrayList<>(); - - // Latency in milliseconds. - private double latencyMillis; - - // Timestamp in milliseconds. - private double timestampSeconds = -1; - - // Multi-tag result - private MultiTargetPNPResults multiTagResult = new MultiTargetPNPResults(); - - /** Constructs an empty pipeline result. */ - public PhotonPipelineResult() {} - - /** - * Constructs a pipeline result. - * - * @param latencyMillis The latency in the pipeline. - * @param targets The list of targets identified by the pipeline. - */ - public PhotonPipelineResult(double latencyMillis, List targets) { - this.latencyMillis = latencyMillis; - this.targets.addAll(targets); - } - - /** - * Constructs a pipeline result. - * - * @param latencyMillis The latency in the pipeline. - * @param targets The list of targets identified by the pipeline. - * @param result Result from multi-target PNP. - */ - public PhotonPipelineResult( - double latencyMillis, List targets, MultiTargetPNPResults result) { - this.latencyMillis = latencyMillis; - this.targets.addAll(targets); - this.multiTagResult = result; - } - - /** - * Returns the size of the packet needed to store this pipeline result. - * - * @return The size of the packet needed to store this pipeline result. - */ - public int getPacketSize() { - return targets.size() * PhotonTrackedTarget.PACK_SIZE_BYTES - + 8 // latency - + MultiTargetPNPResults.PACK_SIZE_BYTES - + 1; // target count - } - - /** - * Returns the best target in this pipeline result. If there are no targets, this method will - * return null. The best target is determined by the target sort mode in the PhotonVision UI. - * - * @return The best target of the pipeline result. - */ - public PhotonTrackedTarget getBestTarget() { - if (!hasTargets() && !HAS_WARNED) { - String errStr = - "This PhotonPipelineResult object has no targets associated with it! Please check hasTargets() " - + "before calling this method. For more information, please review the PhotonLib " - + "documentation at http://docs.photonvision.org"; - System.err.println(errStr); - new Exception().printStackTrace(); - HAS_WARNED = true; + private static boolean HAS_WARNED = false; + + // Targets to store. + public final List targets = new ArrayList<>(); + + // Latency in milliseconds. + private double latencyMillis; + + // Timestamp in milliseconds. + private double timestampSeconds = -1; + + // Multi-tag result + private MultiTargetPNPResults multiTagResult = new MultiTargetPNPResults(); + + /** Constructs an empty pipeline result. */ + public PhotonPipelineResult() {} + + /** + * Constructs a pipeline result. + * + * @param latencyMillis The latency in the pipeline. + * @param targets The list of targets identified by the pipeline. + */ + public PhotonPipelineResult(double latencyMillis, List targets) { + this.latencyMillis = latencyMillis; + this.targets.addAll(targets); + } + + /** + * Constructs a pipeline result. + * + * @param latencyMillis The latency in the pipeline. + * @param targets The list of targets identified by the pipeline. + * @param result Result from multi-target PNP. + */ + public PhotonPipelineResult( + double latencyMillis, List targets, MultiTargetPNPResults result) { + this.latencyMillis = latencyMillis; + this.targets.addAll(targets); + this.multiTagResult = result; + } + + /** + * Returns the size of the packet needed to store this pipeline result. + * + * @return The size of the packet needed to store this pipeline result. + */ + public int getPacketSize() { + return targets.size() * PhotonTrackedTarget.PACK_SIZE_BYTES + + 8 // latency + + MultiTargetPNPResults.PACK_SIZE_BYTES + + 1; // target count + } + + /** + * Returns the best target in this pipeline result. If there are no targets, this method will + * return null. The best target is determined by the target sort mode in the PhotonVision UI. + * + * @return The best target of the pipeline result. + */ + public PhotonTrackedTarget getBestTarget() { + if (!hasTargets() && !HAS_WARNED) { + String errStr = + "This PhotonPipelineResult object has no targets associated with it! Please check hasTargets() " + + "before calling this method. For more information, please review the PhotonLib " + + "documentation at http://docs.photonvision.org"; + System.err.println(errStr); + new Exception().printStackTrace(); + HAS_WARNED = true; + } + return hasTargets() ? targets.get(0) : null; + } + + /** + * Returns the latency in the pipeline. + * + * @return The latency in the pipeline. + */ + public double getLatencyMillis() { + return latencyMillis; + } + + /** + * Returns the estimated time the frame was taken, This is more accurate than using + * getLatencyMillis() + * + * @return The timestamp in seconds, or -1 if this result has no timestamp set. + */ + public double getTimestampSeconds() { + return timestampSeconds; } - return hasTargets() ? targets.get(0) : null; - } - - /** - * Returns the latency in the pipeline. - * - * @return The latency in the pipeline. - */ - public double getLatencyMillis() { - return latencyMillis; - } - - /** - * Returns the estimated time the frame was taken, This is more accurate than using - * getLatencyMillis() - * - * @return The timestamp in seconds, or -1 if this result has no timestamp set. - */ - public double getTimestampSeconds() { - return timestampSeconds; - } - - /** - * Sets the FPGA timestamp of this result in seconds. - * - * @param timestampSeconds The timestamp in seconds. - */ - public void setTimestampSeconds(double timestampSeconds) { - this.timestampSeconds = timestampSeconds; - } - - /** - * Returns whether the pipeline has targets. - * - * @return Whether the pipeline has targets. - */ - public boolean hasTargets() { - return targets.size() > 0; - } - - /** - * Returns a copy of the vector of targets. - * - * @return A copy of the vector of targets. - */ - public List getTargets() { - return new ArrayList<>(targets); - } - - /** - * Return the latest mulit-target result. Be sure to check - * getMultiTagResult().estimatedPose.isPresent before using the pose estimate! - */ - public MultiTargetPNPResults getMultiTagResult() { - return multiTagResult; - } - - /** - * Populates the fields of the pipeline result from the packet. - * - * @param packet The incoming packet. - * @return The incoming packet. - */ - public Packet createFromPacket(Packet packet) { - // Decode latency, existence of targets, and number of targets. - latencyMillis = packet.decodeDouble(); - this.multiTagResult = MultiTargetPNPResults.createFromPacket(packet); - byte targetCount = packet.decodeByte(); - - targets.clear(); - - // Decode the information of each target. - for (int i = 0; i < (int) targetCount; ++i) { - var target = new PhotonTrackedTarget(); - target.createFromPacket(packet); - targets.add(target); + + /** + * Sets the FPGA timestamp of this result in seconds. + * + * @param timestampSeconds The timestamp in seconds. + */ + public void setTimestampSeconds(double timestampSeconds) { + this.timestampSeconds = timestampSeconds; + } + + /** + * Returns whether the pipeline has targets. + * + * @return Whether the pipeline has targets. + */ + public boolean hasTargets() { + return targets.size() > 0; + } + + /** + * Returns a copy of the vector of targets. + * + * @return A copy of the vector of targets. + */ + public List getTargets() { + return new ArrayList<>(targets); + } + + /** + * Return the latest mulit-target result. Be sure to check + * getMultiTagResult().estimatedPose.isPresent before using the pose estimate! + */ + public MultiTargetPNPResults getMultiTagResult() { + return multiTagResult; + } + + /** + * Populates the fields of the pipeline result from the packet. + * + * @param packet The incoming packet. + * @return The incoming packet. + */ + public Packet createFromPacket(Packet packet) { + // Decode latency, existence of targets, and number of targets. + latencyMillis = packet.decodeDouble(); + this.multiTagResult = MultiTargetPNPResults.createFromPacket(packet); + byte targetCount = packet.decodeByte(); + + targets.clear(); + + // Decode the information of each target. + for (int i = 0; i < (int) targetCount; ++i) { + var target = new PhotonTrackedTarget(); + target.createFromPacket(packet); + targets.add(target); + } + + return packet; + } + + /** + * Populates the outgoing packet with information from this pipeline result. + * + * @param packet The outgoing packet. + * @return The outgoing packet. + */ + public Packet populatePacket(Packet packet) { + // Encode latency, existence of targets, and number of targets. + packet.encode(latencyMillis); + multiTagResult.populatePacket(packet); + packet.encode((byte) targets.size()); + + // Encode the information of each target. + for (var target : targets) target.populatePacket(packet); + + // Return the packet. + return packet; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((targets == null) ? 0 : targets.hashCode()); + long temp; + temp = Double.doubleToLongBits(latencyMillis); + result = prime * result + (int) (temp ^ (temp >>> 32)); + temp = Double.doubleToLongBits(timestampSeconds); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + ((multiTagResult == null) ? 0 : multiTagResult.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + PhotonPipelineResult other = (PhotonPipelineResult) obj; + if (targets == null) { + if (other.targets != null) return false; + } else if (!targets.equals(other.targets)) return false; + if (Double.doubleToLongBits(latencyMillis) != Double.doubleToLongBits(other.latencyMillis)) + return false; + if (Double.doubleToLongBits(timestampSeconds) + != Double.doubleToLongBits(other.timestampSeconds)) return false; + if (multiTagResult == null) { + if (other.multiTagResult != null) return false; + } else if (!multiTagResult.equals(other.multiTagResult)) return false; + return true; } - return packet; - } - - /** - * Populates the outgoing packet with information from this pipeline result. - * - * @param packet The outgoing packet. - * @return The outgoing packet. - */ - public Packet populatePacket(Packet packet) { - // Encode latency, existence of targets, and number of targets. - packet.encode(latencyMillis); - multiTagResult.populatePacket(packet); - packet.encode((byte) targets.size()); - - // Encode the information of each target. - for (var target : targets) target.populatePacket(packet); - - // Return the packet. - return packet; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((targets == null) ? 0 : targets.hashCode()); - long temp; - temp = Double.doubleToLongBits(latencyMillis); - result = prime * result + (int) (temp ^ (temp >>> 32)); - temp = Double.doubleToLongBits(timestampSeconds); - result = prime * result + (int) (temp ^ (temp >>> 32)); - result = prime * result + ((multiTagResult == null) ? 0 : multiTagResult.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - PhotonPipelineResult other = (PhotonPipelineResult) obj; - if (targets == null) { - if (other.targets != null) return false; - } else if (!targets.equals(other.targets)) return false; - if (Double.doubleToLongBits(latencyMillis) != Double.doubleToLongBits(other.latencyMillis)) - return false; - if (Double.doubleToLongBits(timestampSeconds) - != Double.doubleToLongBits(other.timestampSeconds)) return false; - if (multiTagResult == null) { - if (other.multiTagResult != null) return false; - } else if (!multiTagResult.equals(other.multiTagResult)) return false; - return true; - } - - @Override - public String toString() { - return "PhotonPipelineResult [targets=" - + targets - + ", latencyMillis=" - + latencyMillis - + ", timestampSeconds=" - + timestampSeconds - + ", multiTagResult=" - + multiTagResult - + "]"; - } + @Override + public String toString() { + return "PhotonPipelineResult [targets=" + + targets + + ", latencyMillis=" + + latencyMillis + + ", timestampSeconds=" + + timestampSeconds + + ", multiTagResult=" + + multiTagResult + + "]"; + } } diff --git a/photon-targeting/src/main/java/org/photonvision/targeting/PhotonTrackedTarget.java b/photon-targeting/src/main/java/org/photonvision/targeting/PhotonTrackedTarget.java index b4a50722f3..40263f5cc5 100644 --- a/photon-targeting/src/main/java/org/photonvision/targeting/PhotonTrackedTarget.java +++ b/photon-targeting/src/main/java/org/photonvision/targeting/PhotonTrackedTarget.java @@ -24,270 +24,270 @@ import org.photonvision.utils.PacketUtils; public class PhotonTrackedTarget { - private static final int MAX_CORNERS = 8; - public static final int PACK_SIZE_BYTES = - Double.BYTES * (5 + 7 + 2 * 4 + 1 + 7 + 2 * MAX_CORNERS); - - private double yaw; - private double pitch; - private double area; - private double skew; - private int fiducialId; - private Transform3d bestCameraToTarget = new Transform3d(); - private Transform3d altCameraToTarget = new Transform3d(); - private double poseAmbiguity; - - // Corners from the min-area rectangle bounding the target - private List minAreaRectCorners; - - // Corners from whatever corner detection method was used - private List detectedCorners; - - public PhotonTrackedTarget() {} - - /** Construct a tracked target, given exactly 4 corners */ - public PhotonTrackedTarget( - double yaw, - double pitch, - double area, - double skew, - int id, - Transform3d pose, - Transform3d altPose, - double ambiguity, - List minAreaRectCorners, - List detectedCorners) { - assert minAreaRectCorners.size() == 4; - - if (detectedCorners.size() > MAX_CORNERS) { - detectedCorners = detectedCorners.subList(0, MAX_CORNERS); + private static final int MAX_CORNERS = 8; + public static final int PACK_SIZE_BYTES = + Double.BYTES * (5 + 7 + 2 * 4 + 1 + 7 + 2 * MAX_CORNERS); + + private double yaw; + private double pitch; + private double area; + private double skew; + private int fiducialId; + private Transform3d bestCameraToTarget = new Transform3d(); + private Transform3d altCameraToTarget = new Transform3d(); + private double poseAmbiguity; + + // Corners from the min-area rectangle bounding the target + private List minAreaRectCorners; + + // Corners from whatever corner detection method was used + private List detectedCorners; + + public PhotonTrackedTarget() {} + + /** Construct a tracked target, given exactly 4 corners */ + public PhotonTrackedTarget( + double yaw, + double pitch, + double area, + double skew, + int id, + Transform3d pose, + Transform3d altPose, + double ambiguity, + List minAreaRectCorners, + List detectedCorners) { + assert minAreaRectCorners.size() == 4; + + if (detectedCorners.size() > MAX_CORNERS) { + detectedCorners = detectedCorners.subList(0, MAX_CORNERS); + } + + this.yaw = yaw; + this.pitch = pitch; + this.area = area; + this.skew = skew; + this.fiducialId = id; + this.bestCameraToTarget = pose; + this.altCameraToTarget = altPose; + this.minAreaRectCorners = minAreaRectCorners; + this.detectedCorners = detectedCorners; + this.poseAmbiguity = ambiguity; } - this.yaw = yaw; - this.pitch = pitch; - this.area = area; - this.skew = skew; - this.fiducialId = id; - this.bestCameraToTarget = pose; - this.altCameraToTarget = altPose; - this.minAreaRectCorners = minAreaRectCorners; - this.detectedCorners = detectedCorners; - this.poseAmbiguity = ambiguity; - } - - public double getYaw() { - return yaw; - } - - public double getPitch() { - return pitch; - } - - public double getArea() { - return area; - } - - public double getSkew() { - return skew; - } - - /** Get the Fiducial ID, or -1 if not set. */ - public int getFiducialId() { - return fiducialId; - } - - /** - * Get the ratio of pose reprojection errors, called ambiguity. Numbers above 0.2 are likely to be - * ambiguous. -1 if invalid. - */ - public double getPoseAmbiguity() { - return poseAmbiguity; - } - - /** - * Return a list of the 4 corners in image space (origin top left, x right, y down), in no - * particular order, of the minimum area bounding rectangle of this target - */ - public List getMinAreaRectCorners() { - return minAreaRectCorners; - } - - /** - * Return a list of the n corners in image space (origin top left, x right, y down), in no - * particular order, detected for this target. - * - *

For fiducials, the order is known and is always counter-clock wise around the tag, like so: - * - *

​
-   * ⟶ +X  3 ----- 2
-   * |      |       |
-   * V      |       |
-   * +Y     0 ----- 1
-   * 
- */ - public List getDetectedCorners() { - return detectedCorners; - } - - /** - * Get the transform that maps camera space (X = forward, Y = left, Z = up) to object/fiducial tag - * space (X forward, Y left, Z up) with the lowest reprojection error - */ - public Transform3d getBestCameraToTarget() { - return bestCameraToTarget; - } - - /** - * Get the transform that maps camera space (X = forward, Y = left, Z = up) to object/fiducial tag - * space (X forward, Y left, Z up) with the highest reprojection error - */ - public Transform3d getAlternateCameraToTarget() { - return altCameraToTarget; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - long temp; - temp = Double.doubleToLongBits(yaw); - result = prime * result + (int) (temp ^ (temp >>> 32)); - temp = Double.doubleToLongBits(pitch); - result = prime * result + (int) (temp ^ (temp >>> 32)); - temp = Double.doubleToLongBits(area); - result = prime * result + (int) (temp ^ (temp >>> 32)); - temp = Double.doubleToLongBits(skew); - result = prime * result + (int) (temp ^ (temp >>> 32)); - result = prime * result + fiducialId; - result = prime * result + ((bestCameraToTarget == null) ? 0 : bestCameraToTarget.hashCode()); - result = prime * result + ((altCameraToTarget == null) ? 0 : altCameraToTarget.hashCode()); - temp = Double.doubleToLongBits(poseAmbiguity); - result = prime * result + (int) (temp ^ (temp >>> 32)); - result = prime * result + ((minAreaRectCorners == null) ? 0 : minAreaRectCorners.hashCode()); - result = prime * result + ((detectedCorners == null) ? 0 : detectedCorners.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - PhotonTrackedTarget other = (PhotonTrackedTarget) obj; - if (Double.doubleToLongBits(yaw) != Double.doubleToLongBits(other.yaw)) return false; - if (Double.doubleToLongBits(pitch) != Double.doubleToLongBits(other.pitch)) return false; - if (Double.doubleToLongBits(area) != Double.doubleToLongBits(other.area)) return false; - if (Double.doubleToLongBits(skew) != Double.doubleToLongBits(other.skew)) return false; - if (fiducialId != other.fiducialId) return false; - if (bestCameraToTarget == null) { - if (other.bestCameraToTarget != null) return false; - } else if (!bestCameraToTarget.equals(other.bestCameraToTarget)) return false; - if (altCameraToTarget == null) { - if (other.altCameraToTarget != null) return false; - } else if (!altCameraToTarget.equals(other.altCameraToTarget)) return false; - if (Double.doubleToLongBits(poseAmbiguity) != Double.doubleToLongBits(other.poseAmbiguity)) - return false; - if (minAreaRectCorners == null) { - if (other.minAreaRectCorners != null) return false; - } else if (!minAreaRectCorners.equals(other.minAreaRectCorners)) return false; - if (detectedCorners == null) { - if (other.detectedCorners != null) return false; - } else if (!detectedCorners.equals(other.detectedCorners)) return false; - return true; - } - - private static void encodeList(Packet packet, List list) { - packet.encode((byte) Math.min(list.size(), Byte.MAX_VALUE)); - for (int i = 0; i < list.size(); i++) { - packet.encode(list.get(i).x); - packet.encode(list.get(i).y); + public double getYaw() { + return yaw; } - } - - private static List decodeList(Packet p) { - byte len = p.decodeByte(); - var ret = new ArrayList(); - for (int i = 0; i < len; i++) { - double cx = p.decodeDouble(); - double cy = p.decodeDouble(); - ret.add(new TargetCorner(cx, cy)); + + public double getPitch() { + return pitch; + } + + public double getArea() { + return area; + } + + public double getSkew() { + return skew; + } + + /** Get the Fiducial ID, or -1 if not set. */ + public int getFiducialId() { + return fiducialId; } - return ret; - } - - /** - * Populates the fields of this class with information from the incoming packet. - * - * @param packet The incoming packet. - * @return The incoming packet. - */ - public Packet createFromPacket(Packet packet) { - this.yaw = packet.decodeDouble(); - this.pitch = packet.decodeDouble(); - this.area = packet.decodeDouble(); - this.skew = packet.decodeDouble(); - this.fiducialId = packet.decodeInt(); - - this.bestCameraToTarget = PacketUtils.decodeTransform(packet); - this.altCameraToTarget = PacketUtils.decodeTransform(packet); - - this.poseAmbiguity = packet.decodeDouble(); - - this.minAreaRectCorners = new ArrayList<>(4); - for (int i = 0; i < 4; i++) { - double cx = packet.decodeDouble(); - double cy = packet.decodeDouble(); - minAreaRectCorners.add(new TargetCorner(cx, cy)); + + /** + * Get the ratio of pose reprojection errors, called ambiguity. Numbers above 0.2 are likely to be + * ambiguous. -1 if invalid. + */ + public double getPoseAmbiguity() { + return poseAmbiguity; + } + + /** + * Return a list of the 4 corners in image space (origin top left, x right, y down), in no + * particular order, of the minimum area bounding rectangle of this target + */ + public List getMinAreaRectCorners() { + return minAreaRectCorners; + } + + /** + * Return a list of the n corners in image space (origin top left, x right, y down), in no + * particular order, detected for this target. + * + *

For fiducials, the order is known and is always counter-clock wise around the tag, like so: + * + *

​
+     * ⟶ +X  3 ----- 2
+     * |      |       |
+     * V      |       |
+     * +Y     0 ----- 1
+     * 
+ */ + public List getDetectedCorners() { + return detectedCorners; + } + + /** + * Get the transform that maps camera space (X = forward, Y = left, Z = up) to object/fiducial tag + * space (X forward, Y left, Z up) with the lowest reprojection error + */ + public Transform3d getBestCameraToTarget() { + return bestCameraToTarget; } - detectedCorners = decodeList(packet); - - return packet; - } - - /** - * Populates the outgoing packet with information from the current target. - * - * @param packet The outgoing packet. - * @return The outgoing packet. - */ - public Packet populatePacket(Packet packet) { - packet.encode(yaw); - packet.encode(pitch); - packet.encode(area); - packet.encode(skew); - packet.encode(fiducialId); - PacketUtils.encodeTransform(packet, bestCameraToTarget); - PacketUtils.encodeTransform(packet, altCameraToTarget); - packet.encode(poseAmbiguity); - - for (int i = 0; i < 4; i++) { - packet.encode(minAreaRectCorners.get(i).x); - packet.encode(minAreaRectCorners.get(i).y); + /** + * Get the transform that maps camera space (X = forward, Y = left, Z = up) to object/fiducial tag + * space (X forward, Y left, Z up) with the highest reprojection error + */ + public Transform3d getAlternateCameraToTarget() { + return altCameraToTarget; } - encodeList(packet, detectedCorners); - - return packet; - } - - @Override - public String toString() { - return "PhotonTrackedTarget{" - + "yaw=" - + yaw - + ", pitch=" - + pitch - + ", area=" - + area - + ", skew=" - + skew - + ", fiducialId=" - + fiducialId - + ", cameraToTarget=" - + bestCameraToTarget - + ", targetCorners=" - + minAreaRectCorners - + '}'; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + long temp; + temp = Double.doubleToLongBits(yaw); + result = prime * result + (int) (temp ^ (temp >>> 32)); + temp = Double.doubleToLongBits(pitch); + result = prime * result + (int) (temp ^ (temp >>> 32)); + temp = Double.doubleToLongBits(area); + result = prime * result + (int) (temp ^ (temp >>> 32)); + temp = Double.doubleToLongBits(skew); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + fiducialId; + result = prime * result + ((bestCameraToTarget == null) ? 0 : bestCameraToTarget.hashCode()); + result = prime * result + ((altCameraToTarget == null) ? 0 : altCameraToTarget.hashCode()); + temp = Double.doubleToLongBits(poseAmbiguity); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + ((minAreaRectCorners == null) ? 0 : minAreaRectCorners.hashCode()); + result = prime * result + ((detectedCorners == null) ? 0 : detectedCorners.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + PhotonTrackedTarget other = (PhotonTrackedTarget) obj; + if (Double.doubleToLongBits(yaw) != Double.doubleToLongBits(other.yaw)) return false; + if (Double.doubleToLongBits(pitch) != Double.doubleToLongBits(other.pitch)) return false; + if (Double.doubleToLongBits(area) != Double.doubleToLongBits(other.area)) return false; + if (Double.doubleToLongBits(skew) != Double.doubleToLongBits(other.skew)) return false; + if (fiducialId != other.fiducialId) return false; + if (bestCameraToTarget == null) { + if (other.bestCameraToTarget != null) return false; + } else if (!bestCameraToTarget.equals(other.bestCameraToTarget)) return false; + if (altCameraToTarget == null) { + if (other.altCameraToTarget != null) return false; + } else if (!altCameraToTarget.equals(other.altCameraToTarget)) return false; + if (Double.doubleToLongBits(poseAmbiguity) != Double.doubleToLongBits(other.poseAmbiguity)) + return false; + if (minAreaRectCorners == null) { + if (other.minAreaRectCorners != null) return false; + } else if (!minAreaRectCorners.equals(other.minAreaRectCorners)) return false; + if (detectedCorners == null) { + if (other.detectedCorners != null) return false; + } else if (!detectedCorners.equals(other.detectedCorners)) return false; + return true; + } + + private static void encodeList(Packet packet, List list) { + packet.encode((byte) Math.min(list.size(), Byte.MAX_VALUE)); + for (int i = 0; i < list.size(); i++) { + packet.encode(list.get(i).x); + packet.encode(list.get(i).y); + } + } + + private static List decodeList(Packet p) { + byte len = p.decodeByte(); + var ret = new ArrayList(); + for (int i = 0; i < len; i++) { + double cx = p.decodeDouble(); + double cy = p.decodeDouble(); + ret.add(new TargetCorner(cx, cy)); + } + return ret; + } + + /** + * Populates the fields of this class with information from the incoming packet. + * + * @param packet The incoming packet. + * @return The incoming packet. + */ + public Packet createFromPacket(Packet packet) { + this.yaw = packet.decodeDouble(); + this.pitch = packet.decodeDouble(); + this.area = packet.decodeDouble(); + this.skew = packet.decodeDouble(); + this.fiducialId = packet.decodeInt(); + + this.bestCameraToTarget = PacketUtils.decodeTransform(packet); + this.altCameraToTarget = PacketUtils.decodeTransform(packet); + + this.poseAmbiguity = packet.decodeDouble(); + + this.minAreaRectCorners = new ArrayList<>(4); + for (int i = 0; i < 4; i++) { + double cx = packet.decodeDouble(); + double cy = packet.decodeDouble(); + minAreaRectCorners.add(new TargetCorner(cx, cy)); + } + + detectedCorners = decodeList(packet); + + return packet; + } + + /** + * Populates the outgoing packet with information from the current target. + * + * @param packet The outgoing packet. + * @return The outgoing packet. + */ + public Packet populatePacket(Packet packet) { + packet.encode(yaw); + packet.encode(pitch); + packet.encode(area); + packet.encode(skew); + packet.encode(fiducialId); + PacketUtils.encodeTransform(packet, bestCameraToTarget); + PacketUtils.encodeTransform(packet, altCameraToTarget); + packet.encode(poseAmbiguity); + + for (int i = 0; i < 4; i++) { + packet.encode(minAreaRectCorners.get(i).x); + packet.encode(minAreaRectCorners.get(i).y); + } + + encodeList(packet, detectedCorners); + + return packet; + } + + @Override + public String toString() { + return "PhotonTrackedTarget{" + + "yaw=" + + yaw + + ", pitch=" + + pitch + + ", area=" + + area + + ", skew=" + + skew + + ", fiducialId=" + + fiducialId + + ", cameraToTarget=" + + bestCameraToTarget + + ", targetCorners=" + + minAreaRectCorners + + '}'; + } } diff --git a/photon-targeting/src/main/java/org/photonvision/targeting/TargetCorner.java b/photon-targeting/src/main/java/org/photonvision/targeting/TargetCorner.java index 83ff3a7631..9808079a5d 100644 --- a/photon-targeting/src/main/java/org/photonvision/targeting/TargetCorner.java +++ b/photon-targeting/src/main/java/org/photonvision/targeting/TargetCorner.java @@ -24,29 +24,29 @@ * Origin at the top left, plus-x to the right, plus-y down. */ public class TargetCorner { - public final double x; - public final double y; + public final double x; + public final double y; - public TargetCorner(double cx, double cy) { - this.x = cx; - this.y = cy; - } + public TargetCorner(double cx, double cy) { + this.x = cx; + this.y = cy; + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TargetCorner that = (TargetCorner) o; - return Double.compare(that.x, x) == 0 && Double.compare(that.y, y) == 0; - } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TargetCorner that = (TargetCorner) o; + return Double.compare(that.x, x) == 0 && Double.compare(that.y, y) == 0; + } - @Override - public int hashCode() { - return Objects.hash(x, y); - } + @Override + public int hashCode() { + return Objects.hash(x, y); + } - @Override - public String toString() { - return "(" + x + "," + y + ')'; - } + @Override + public String toString() { + return "(" + x + "," + y + ')'; + } } diff --git a/photon-targeting/src/main/java/org/photonvision/utils/PacketUtils.java b/photon-targeting/src/main/java/org/photonvision/utils/PacketUtils.java index 34d0aeb562..43a0dd4cf6 100644 --- a/photon-targeting/src/main/java/org/photonvision/utils/PacketUtils.java +++ b/photon-targeting/src/main/java/org/photonvision/utils/PacketUtils.java @@ -24,26 +24,26 @@ import org.photonvision.common.dataflow.structures.Packet; public class PacketUtils { - public static Transform3d decodeTransform(Packet packet) { - double x = packet.decodeDouble(); - double y = packet.decodeDouble(); - double z = packet.decodeDouble(); - var translation = new Translation3d(x, y, z); - double w = packet.decodeDouble(); - x = packet.decodeDouble(); - y = packet.decodeDouble(); - z = packet.decodeDouble(); - var rotation = new Rotation3d(new Quaternion(w, x, y, z)); - return new Transform3d(translation, rotation); - } + public static Transform3d decodeTransform(Packet packet) { + double x = packet.decodeDouble(); + double y = packet.decodeDouble(); + double z = packet.decodeDouble(); + var translation = new Translation3d(x, y, z); + double w = packet.decodeDouble(); + x = packet.decodeDouble(); + y = packet.decodeDouble(); + z = packet.decodeDouble(); + var rotation = new Rotation3d(new Quaternion(w, x, y, z)); + return new Transform3d(translation, rotation); + } - public static void encodeTransform(Packet packet, Transform3d transform) { - packet.encode(transform.getTranslation().getX()); - packet.encode(transform.getTranslation().getY()); - packet.encode(transform.getTranslation().getZ()); - packet.encode(transform.getRotation().getQuaternion().getW()); - packet.encode(transform.getRotation().getQuaternion().getX()); - packet.encode(transform.getRotation().getQuaternion().getY()); - packet.encode(transform.getRotation().getQuaternion().getZ()); - } + public static void encodeTransform(Packet packet, Transform3d transform) { + packet.encode(transform.getTranslation().getX()); + packet.encode(transform.getTranslation().getY()); + packet.encode(transform.getTranslation().getZ()); + packet.encode(transform.getRotation().getQuaternion().getW()); + packet.encode(transform.getRotation().getQuaternion().getX()); + packet.encode(transform.getRotation().getQuaternion().getY()); + packet.encode(transform.getRotation().getQuaternion().getZ()); + } } diff --git a/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Main.java b/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Main.java index a92703e411..077f979f97 100644 --- a/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Main.java +++ b/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Main.java @@ -32,14 +32,14 @@ * call. */ public final class Main { - private Main() {} + private Main() {} - /** - * Main initialization function. Do not perform any initialization here. - * - *

If you change your main robot class, change the parameter type. - */ - public static void main(String... args) { - RobotBase.startRobot(Robot::new); - } + /** + * Main initialization function. Do not perform any initialization here. + * + *

If you change your main robot class, change the parameter type. + */ + public static void main(String... args) { + RobotBase.startRobot(Robot::new); + } } diff --git a/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Robot.java index e8fb3ddf02..d7a0c2a23c 100644 --- a/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/aimandrange/src/main/java/frc/robot/Robot.java @@ -40,72 +40,72 @@ * project. */ public class Robot extends TimedRobot { - // Constants such as camera and target height stored. Change per robot and goal! - final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); - final double TARGET_HEIGHT_METERS = Units.feetToMeters(5); - // Angle between horizontal and the camera. - final double CAMERA_PITCH_RADIANS = Units.degreesToRadians(0); + // Constants such as camera and target height stored. Change per robot and goal! + final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); + final double TARGET_HEIGHT_METERS = Units.feetToMeters(5); + // Angle between horizontal and the camera. + final double CAMERA_PITCH_RADIANS = Units.degreesToRadians(0); - // How far from the target we want to be - final double GOAL_RANGE_METERS = Units.feetToMeters(3); + // How far from the target we want to be + final double GOAL_RANGE_METERS = Units.feetToMeters(3); - // Change this to match the name of your camera - PhotonCamera camera = new PhotonCamera("photonvision"); + // Change this to match the name of your camera + PhotonCamera camera = new PhotonCamera("photonvision"); - // PID constants should be tuned per robot - final double LINEAR_P = 0.1; - final double LINEAR_D = 0.0; - PIDController forwardController = new PIDController(LINEAR_P, 0, LINEAR_D); + // PID constants should be tuned per robot + final double LINEAR_P = 0.1; + final double LINEAR_D = 0.0; + PIDController forwardController = new PIDController(LINEAR_P, 0, LINEAR_D); - final double ANGULAR_P = 0.1; - final double ANGULAR_D = 0.0; - PIDController turnController = new PIDController(ANGULAR_P, 0, ANGULAR_D); + final double ANGULAR_P = 0.1; + final double ANGULAR_D = 0.0; + PIDController turnController = new PIDController(ANGULAR_P, 0, ANGULAR_D); - XboxController xboxController = new XboxController(0); + XboxController xboxController = new XboxController(0); - // Drive motors - PWMVictorSPX leftMotor = new PWMVictorSPX(0); - PWMVictorSPX rightMotor = new PWMVictorSPX(1); - DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); + // Drive motors + PWMVictorSPX leftMotor = new PWMVictorSPX(0); + PWMVictorSPX rightMotor = new PWMVictorSPX(1); + DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); - @Override - public void teleopPeriodic() { - double forwardSpeed; - double rotationSpeed; + @Override + public void teleopPeriodic() { + double forwardSpeed; + double rotationSpeed; - if (xboxController.getAButton()) { - // Vision-alignment mode - // Query the latest result from PhotonVision - var result = camera.getLatestResult(); + if (xboxController.getAButton()) { + // Vision-alignment mode + // Query the latest result from PhotonVision + var result = camera.getLatestResult(); - if (result.hasTargets()) { - // First calculate range - double range = - PhotonUtils.calculateDistanceToTargetMeters( - CAMERA_HEIGHT_METERS, - TARGET_HEIGHT_METERS, - CAMERA_PITCH_RADIANS, - Units.degreesToRadians(result.getBestTarget().getPitch())); + if (result.hasTargets()) { + // First calculate range + double range = + PhotonUtils.calculateDistanceToTargetMeters( + CAMERA_HEIGHT_METERS, + TARGET_HEIGHT_METERS, + CAMERA_PITCH_RADIANS, + Units.degreesToRadians(result.getBestTarget().getPitch())); - // Use this range as the measurement we give to the PID controller. - // -1.0 required to ensure positive PID controller effort _increases_ range - forwardSpeed = -forwardController.calculate(range, GOAL_RANGE_METERS); + // Use this range as the measurement we give to the PID controller. + // -1.0 required to ensure positive PID controller effort _increases_ range + forwardSpeed = -forwardController.calculate(range, GOAL_RANGE_METERS); - // Also calculate angular power - // -1.0 required to ensure positive PID controller effort _increases_ yaw - rotationSpeed = -turnController.calculate(result.getBestTarget().getYaw(), 0); - } else { - // If we have no targets, stay still. - forwardSpeed = 0; - rotationSpeed = 0; - } - } else { - // Manual Driver Mode - forwardSpeed = -xboxController.getRightY(); - rotationSpeed = xboxController.getLeftX(); - } + // Also calculate angular power + // -1.0 required to ensure positive PID controller effort _increases_ yaw + rotationSpeed = -turnController.calculate(result.getBestTarget().getYaw(), 0); + } else { + // If we have no targets, stay still. + forwardSpeed = 0; + rotationSpeed = 0; + } + } else { + // Manual Driver Mode + forwardSpeed = -xboxController.getRightY(); + rotationSpeed = xboxController.getLeftX(); + } - // Use our forward/turn speeds to control the drivetrain - drive.arcadeDrive(forwardSpeed, rotationSpeed); - } + // Use our forward/turn speeds to control the drivetrain + drive.arcadeDrive(forwardSpeed, rotationSpeed); + } } diff --git a/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Main.java b/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Main.java index a92703e411..077f979f97 100644 --- a/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Main.java +++ b/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Main.java @@ -32,14 +32,14 @@ * call. */ public final class Main { - private Main() {} + private Main() {} - /** - * Main initialization function. Do not perform any initialization here. - * - *

If you change your main robot class, change the parameter type. - */ - public static void main(String... args) { - RobotBase.startRobot(Robot::new); - } + /** + * Main initialization function. Do not perform any initialization here. + * + *

If you change your main robot class, change the parameter type. + */ + public static void main(String... args) { + RobotBase.startRobot(Robot::new); + } } diff --git a/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Robot.java index 0868e18c95..5d41404fdc 100644 --- a/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/aimattarget/src/main/java/frc/robot/Robot.java @@ -39,60 +39,60 @@ * project. */ public class Robot extends TimedRobot { - // Constants such as camera and target height stored. Change per robot and goal! - final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); - final double TARGET_HEIGHT_METERS = Units.feetToMeters(5); - // Angle between horizontal and the camera. - final double CAMERA_PITCH_RADIANS = Units.degreesToRadians(0); + // Constants such as camera and target height stored. Change per robot and goal! + final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); + final double TARGET_HEIGHT_METERS = Units.feetToMeters(5); + // Angle between horizontal and the camera. + final double CAMERA_PITCH_RADIANS = Units.degreesToRadians(0); - // How far from the target we want to be - final double GOAL_RANGE_METERS = Units.feetToMeters(3); + // How far from the target we want to be + final double GOAL_RANGE_METERS = Units.feetToMeters(3); - // Change this to match the name of your camera - PhotonCamera camera = new PhotonCamera("photonvision"); + // Change this to match the name of your camera + PhotonCamera camera = new PhotonCamera("photonvision"); - // PID constants should be tuned per robot - final double LINEAR_P = 0.1; - final double LINEAR_D = 0.0; - PIDController forwardController = new PIDController(LINEAR_P, 0, LINEAR_D); + // PID constants should be tuned per robot + final double LINEAR_P = 0.1; + final double LINEAR_D = 0.0; + PIDController forwardController = new PIDController(LINEAR_P, 0, LINEAR_D); - final double ANGULAR_P = 0.1; - final double ANGULAR_D = 0.0; - PIDController turnController = new PIDController(ANGULAR_P, 0, ANGULAR_D); + final double ANGULAR_P = 0.1; + final double ANGULAR_D = 0.0; + PIDController turnController = new PIDController(ANGULAR_P, 0, ANGULAR_D); - XboxController xboxController = new XboxController(0); + XboxController xboxController = new XboxController(0); - // Drive motors - PWMVictorSPX leftMotor = new PWMVictorSPX(0); - PWMVictorSPX rightMotor = new PWMVictorSPX(1); - DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); + // Drive motors + PWMVictorSPX leftMotor = new PWMVictorSPX(0); + PWMVictorSPX rightMotor = new PWMVictorSPX(1); + DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); - @Override - public void teleopPeriodic() { - double forwardSpeed; - double rotationSpeed; + @Override + public void teleopPeriodic() { + double forwardSpeed; + double rotationSpeed; - forwardSpeed = -xboxController.getRightY(); + forwardSpeed = -xboxController.getRightY(); - if (xboxController.getAButton()) { - // Vision-alignment mode - // Query the latest result from PhotonVision - var result = camera.getLatestResult(); + if (xboxController.getAButton()) { + // Vision-alignment mode + // Query the latest result from PhotonVision + var result = camera.getLatestResult(); - if (result.hasTargets()) { - // Calculate angular turn power - // -1.0 required to ensure positive PID controller effort _increases_ yaw - rotationSpeed = -turnController.calculate(result.getBestTarget().getYaw(), 0); - } else { - // If we have no targets, stay still. - rotationSpeed = 0; - } - } else { - // Manual Driver Mode - rotationSpeed = xboxController.getLeftX(); - } + if (result.hasTargets()) { + // Calculate angular turn power + // -1.0 required to ensure positive PID controller effort _increases_ yaw + rotationSpeed = -turnController.calculate(result.getBestTarget().getYaw(), 0); + } else { + // If we have no targets, stay still. + rotationSpeed = 0; + } + } else { + // Manual Driver Mode + rotationSpeed = xboxController.getLeftX(); + } - // Use our forward/turn speeds to control the drivetrain - drive.arcadeDrive(forwardSpeed, rotationSpeed); - } + // Use our forward/turn speeds to control the drivetrain + drive.arcadeDrive(forwardSpeed, rotationSpeed); + } } diff --git a/photonlib-java-examples/getinrange/src/main/java/frc/robot/Main.java b/photonlib-java-examples/getinrange/src/main/java/frc/robot/Main.java index a92703e411..077f979f97 100644 --- a/photonlib-java-examples/getinrange/src/main/java/frc/robot/Main.java +++ b/photonlib-java-examples/getinrange/src/main/java/frc/robot/Main.java @@ -32,14 +32,14 @@ * call. */ public final class Main { - private Main() {} + private Main() {} - /** - * Main initialization function. Do not perform any initialization here. - * - *

If you change your main robot class, change the parameter type. - */ - public static void main(String... args) { - RobotBase.startRobot(Robot::new); - } + /** + * Main initialization function. Do not perform any initialization here. + * + *

If you change your main robot class, change the parameter type. + */ + public static void main(String... args) { + RobotBase.startRobot(Robot::new); + } } diff --git a/photonlib-java-examples/getinrange/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/getinrange/src/main/java/frc/robot/Robot.java index e59c49fcc4..c776021844 100644 --- a/photonlib-java-examples/getinrange/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/getinrange/src/main/java/frc/robot/Robot.java @@ -40,68 +40,68 @@ * project. */ public class Robot extends TimedRobot { - // Constants such as camera and target height stored. Change per robot and goal! - final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); - final double TARGET_HEIGHT_METERS = Units.feetToMeters(5); + // Constants such as camera and target height stored. Change per robot and goal! + final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); + final double TARGET_HEIGHT_METERS = Units.feetToMeters(5); - // Angle between horizontal and the camera. - final double CAMERA_PITCH_RADIANS = Units.degreesToRadians(0); + // Angle between horizontal and the camera. + final double CAMERA_PITCH_RADIANS = Units.degreesToRadians(0); - // How far from the target we want to be - final double GOAL_RANGE_METERS = Units.feetToMeters(3); + // How far from the target we want to be + final double GOAL_RANGE_METERS = Units.feetToMeters(3); - // Change this to match the name of your camera - PhotonCamera camera = new PhotonCamera("photonvision"); + // Change this to match the name of your camera + PhotonCamera camera = new PhotonCamera("photonvision"); - // PID constants should be tuned per robot - final double P_GAIN = 0.1; - final double D_GAIN = 0.0; - PIDController controller = new PIDController(P_GAIN, 0, D_GAIN); + // PID constants should be tuned per robot + final double P_GAIN = 0.1; + final double D_GAIN = 0.0; + PIDController controller = new PIDController(P_GAIN, 0, D_GAIN); - XboxController xboxController; + XboxController xboxController; - // Drive motors - PWMVictorSPX leftMotor = new PWMVictorSPX(0); - PWMVictorSPX rightMotor = new PWMVictorSPX(1); - DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); + // Drive motors + PWMVictorSPX leftMotor = new PWMVictorSPX(0); + PWMVictorSPX rightMotor = new PWMVictorSPX(1); + DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); - @Override - public void robotInit() { - xboxController = new XboxController(0); - } + @Override + public void robotInit() { + xboxController = new XboxController(0); + } - @Override - public void teleopPeriodic() { - double forwardSpeed; - double rotationSpeed = xboxController.getLeftX(); + @Override + public void teleopPeriodic() { + double forwardSpeed; + double rotationSpeed = xboxController.getLeftX(); - if (xboxController.getAButton()) { - // Vision-alignment mode - // Query the latest result from PhotonVision - var result = camera.getLatestResult(); + if (xboxController.getAButton()) { + // Vision-alignment mode + // Query the latest result from PhotonVision + var result = camera.getLatestResult(); - if (result.hasTargets()) { - // First calculate range - double range = - PhotonUtils.calculateDistanceToTargetMeters( - CAMERA_HEIGHT_METERS, - TARGET_HEIGHT_METERS, - CAMERA_PITCH_RADIANS, - Units.degreesToRadians(result.getBestTarget().getPitch())); + if (result.hasTargets()) { + // First calculate range + double range = + PhotonUtils.calculateDistanceToTargetMeters( + CAMERA_HEIGHT_METERS, + TARGET_HEIGHT_METERS, + CAMERA_PITCH_RADIANS, + Units.degreesToRadians(result.getBestTarget().getPitch())); - // Use this range as the measurement we give to the PID controller. - // -1.0 required to ensure positive PID controller effort _increases_ range - forwardSpeed = -controller.calculate(range, GOAL_RANGE_METERS); - } else { - // If we have no targets, stay still. - forwardSpeed = 0; - } - } else { - // Manual Driver Mode - forwardSpeed = -xboxController.getRightY(); - } + // Use this range as the measurement we give to the PID controller. + // -1.0 required to ensure positive PID controller effort _increases_ range + forwardSpeed = -controller.calculate(range, GOAL_RANGE_METERS); + } else { + // If we have no targets, stay still. + forwardSpeed = 0; + } + } else { + // Manual Driver Mode + forwardSpeed = -xboxController.getRightY(); + } - // Use our forward/turn speeds to control the drivetrain - drive.arcadeDrive(forwardSpeed, rotationSpeed); - } + // Use our forward/turn speeds to control the drivetrain + drive.arcadeDrive(forwardSpeed, rotationSpeed); + } } diff --git a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Constants.java b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Constants.java index d26a898b57..8e0f41cd57 100644 --- a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Constants.java +++ b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Constants.java @@ -31,61 +31,61 @@ import edu.wpi.first.math.util.Units; public class Constants { - // ---------- Vision - // Constants about how your camera is mounted to the robot - public static final double CAMERA_PITCH_RADIANS = - Units.degreesToRadians(15); // Angle "up" from horizontal - public static final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); // Height above floor + // ---------- Vision + // Constants about how your camera is mounted to the robot + public static final double CAMERA_PITCH_RADIANS = + Units.degreesToRadians(15); // Angle "up" from horizontal + public static final double CAMERA_HEIGHT_METERS = Units.inchesToMeters(24); // Height above floor - // How far from the target we want to be - public static final double GOAL_RANGE_METERS = Units.feetToMeters(10); + // How far from the target we want to be + public static final double GOAL_RANGE_METERS = Units.feetToMeters(10); - // Where the 2020 High goal target is located on the field - // See - // https://docs.wpilib.org/en/stable/docs/software/advanced-controls/geometry/coordinate-systems.html#field-coordinate-system - // and https://firstfrc.blob.core.windows.net/frc2020/PlayingField/LayoutandMarkingDiagram.pdf - // (pages 4 and 5) - public static final Pose3d TARGET_POSE = - new Pose3d( - new Translation3d( - Units.feetToMeters(52.46), - Units.inchesToMeters(94.66), - Units.inchesToMeters(89.69)), // (center of vision target) - new Rotation3d(0.0, 0.0, Math.PI)); - // ---------- + // Where the 2020 High goal target is located on the field + // See + // https://docs.wpilib.org/en/stable/docs/software/advanced-controls/geometry/coordinate-systems.html#field-coordinate-system + // and https://firstfrc.blob.core.windows.net/frc2020/PlayingField/LayoutandMarkingDiagram.pdf + // (pages 4 and 5) + public static final Pose3d TARGET_POSE = + new Pose3d( + new Translation3d( + Units.feetToMeters(52.46), + Units.inchesToMeters(94.66), + Units.inchesToMeters(89.69)), // (center of vision target) + new Rotation3d(0.0, 0.0, Math.PI)); + // ---------- - // ---------- Drivetrain - public static final int LEFT_MOTOR_CHANNEL = 0; - public static final int RIGHT_MOTOR_CHANNEL = 1; + // ---------- Drivetrain + public static final int LEFT_MOTOR_CHANNEL = 0; + public static final int RIGHT_MOTOR_CHANNEL = 1; - // PID constants should be tuned per robot - public static final double LINEAR_P = 0.5; - public static final double LINEAR_I = 0; - public static final double LINEAR_D = 0.1; + // PID constants should be tuned per robot + public static final double LINEAR_P = 0.5; + public static final double LINEAR_I = 0; + public static final double LINEAR_D = 0.1; - public static final double ANGULAR_P = 0.03; - public static final double ANGULAR_I = 0; - public static final double ANGULAR_D = 0.003; + public static final double ANGULAR_P = 0.03; + public static final double ANGULAR_I = 0; + public static final double ANGULAR_D = 0.003; - // Ratio to multiply joystick inputs by - public static final double DRIVESPEED = 0.75; + // Ratio to multiply joystick inputs by + public static final double DRIVESPEED = 0.75; - // The following properties are necessary for simulation: + // The following properties are necessary for simulation: - // Distance from drivetrain left wheels to right wheels - public static final double TRACKWIDTH_METERS = Units.feetToMeters(2.0); - public static final double WHEEL_DIAMETER_METERS = Units.inchesToMeters(6.0); + // Distance from drivetrain left wheels to right wheels + public static final double TRACKWIDTH_METERS = Units.feetToMeters(2.0); + public static final double WHEEL_DIAMETER_METERS = Units.inchesToMeters(6.0); - // The motors used in the gearbox for one drivetrain side - public static final DCMotor DRIVE_MOTORS = DCMotor.getCIM(2); - // The gearbox reduction, or how many motor rotations per wheel rotation - public static final double GEARING = 8.0; + // The motors used in the gearbox for one drivetrain side + public static final DCMotor DRIVE_MOTORS = DCMotor.getCIM(2); + // The gearbox reduction, or how many motor rotations per wheel rotation + public static final double GEARING = 8.0; - // The drivetrain feedforward values - public static final double LINEAR_KV = 2.0; - public static final double LINEAR_KA = 0.5; + // The drivetrain feedforward values + public static final double LINEAR_KV = 2.0; + public static final double LINEAR_KA = 0.5; - public static final double ANGULAR_KV = 2.25; - public static final double ANGULAR_KA = 0.3; - // ---------- + public static final double ANGULAR_KV = 2.25; + public static final double ANGULAR_KA = 0.3; + // ---------- } diff --git a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Main.java b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Main.java index a92703e411..077f979f97 100644 --- a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Main.java +++ b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Main.java @@ -32,14 +32,14 @@ * call. */ public final class Main { - private Main() {} + private Main() {} - /** - * Main initialization function. Do not perform any initialization here. - * - *

If you change your main robot class, change the parameter type. - */ - public static void main(String... args) { - RobotBase.startRobot(Robot::new); - } + /** + * Main initialization function. Do not perform any initialization here. + * + *

If you change your main robot class, change the parameter type. + */ + public static void main(String... args) { + RobotBase.startRobot(Robot::new); + } } diff --git a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Robot.java index 702f2b91c3..4f22aa287c 100644 --- a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Robot.java @@ -44,82 +44,82 @@ * project. */ public class Robot extends TimedRobot { - // Change this to match the name of your camera - PhotonCamera camera = new PhotonCamera("photonvision"); - - // PID constants should be tuned per robot - PIDController forwardController = new PIDController(LINEAR_P, 0, LINEAR_D); - PIDController turnController = new PIDController(ANGULAR_P, 0, ANGULAR_D); - - XboxController xboxController = new XboxController(0); - - // Drive motors - PWMVictorSPX leftMotor = new PWMVictorSPX(LEFT_MOTOR_CHANNEL); - PWMVictorSPX rightMotor = new PWMVictorSPX(RIGHT_MOTOR_CHANNEL); - DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); - - @Override - public void robotInit() { - leftMotor.setInverted(false); - rightMotor.setInverted(true); - } - - @Override - public void teleopPeriodic() { - double forwardSpeed; - double rotationSpeed; - - if (xboxController.getAButton()) { - // Vision-alignment mode - // Query the latest result from PhotonVision - var result = camera.getLatestResult(); - - if (result.hasTargets()) { - // First calculate range - double range = - PhotonUtils.calculateDistanceToTargetMeters( - CAMERA_HEIGHT_METERS, - TARGET_POSE.getZ(), - CAMERA_PITCH_RADIANS, - Units.degreesToRadians(result.getBestTarget().getPitch())); - - // Use this range as the measurement we give to the PID controller. - // (This forwardSpeed must be positive to go forward.) - forwardSpeed = -forwardController.calculate(range, GOAL_RANGE_METERS); - - // Also calculate angular power - // (This rotationSpeed must be positive to turn counter-clockwise.) - rotationSpeed = -turnController.calculate(result.getBestTarget().getYaw(), 0); - } else { - // If we have no targets, stay still. - forwardSpeed = 0; - rotationSpeed = 0; - } - } else { - // Manual Driver Mode - forwardSpeed = -xboxController.getLeftY() * DRIVESPEED; - rotationSpeed = -xboxController.getRightX() * DRIVESPEED; + // Change this to match the name of your camera + PhotonCamera camera = new PhotonCamera("photonvision"); + + // PID constants should be tuned per robot + PIDController forwardController = new PIDController(LINEAR_P, 0, LINEAR_D); + PIDController turnController = new PIDController(ANGULAR_P, 0, ANGULAR_D); + + XboxController xboxController = new XboxController(0); + + // Drive motors + PWMVictorSPX leftMotor = new PWMVictorSPX(LEFT_MOTOR_CHANNEL); + PWMVictorSPX rightMotor = new PWMVictorSPX(RIGHT_MOTOR_CHANNEL); + DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); + + @Override + public void robotInit() { + leftMotor.setInverted(false); + rightMotor.setInverted(true); } - // Use our forward/turn speeds to control the drivetrain - drive.arcadeDrive(forwardSpeed, rotationSpeed); - } + @Override + public void teleopPeriodic() { + double forwardSpeed; + double rotationSpeed; + + if (xboxController.getAButton()) { + // Vision-alignment mode + // Query the latest result from PhotonVision + var result = camera.getLatestResult(); + + if (result.hasTargets()) { + // First calculate range + double range = + PhotonUtils.calculateDistanceToTargetMeters( + CAMERA_HEIGHT_METERS, + TARGET_POSE.getZ(), + CAMERA_PITCH_RADIANS, + Units.degreesToRadians(result.getBestTarget().getPitch())); + + // Use this range as the measurement we give to the PID controller. + // (This forwardSpeed must be positive to go forward.) + forwardSpeed = -forwardController.calculate(range, GOAL_RANGE_METERS); + + // Also calculate angular power + // (This rotationSpeed must be positive to turn counter-clockwise.) + rotationSpeed = -turnController.calculate(result.getBestTarget().getYaw(), 0); + } else { + // If we have no targets, stay still. + forwardSpeed = 0; + rotationSpeed = 0; + } + } else { + // Manual Driver Mode + forwardSpeed = -xboxController.getLeftY() * DRIVESPEED; + rotationSpeed = -xboxController.getRightX() * DRIVESPEED; + } + + // Use our forward/turn speeds to control the drivetrain + drive.arcadeDrive(forwardSpeed, rotationSpeed); + } - //////////////////////////////////////////////////// - // Simulation support + //////////////////////////////////////////////////// + // Simulation support - DrivetrainSim dtSim; - VisionSim visionSim; + DrivetrainSim dtSim; + VisionSim visionSim; - @Override - public void simulationInit() { - dtSim = new DrivetrainSim(leftMotor, rightMotor); - visionSim = new VisionSim("main", camera); - } + @Override + public void simulationInit() { + dtSim = new DrivetrainSim(leftMotor, rightMotor); + visionSim = new VisionSim("main", camera); + } - @Override - public void simulationPeriodic() { - dtSim.update(); - visionSim.update(dtSim.getPose()); - } + @Override + public void simulationPeriodic() { + dtSim.update(); + visionSim.update(dtSim.getPose()); + } } diff --git a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java index 57d1382d6d..2a9c532e84 100644 --- a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java +++ b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java @@ -44,62 +44,62 @@ * real motors/sensors/physics are used instead. */ public class DrivetrainSim { - // ----- Simulation specific constants - // Drivetrain plant and simulation. This will calculate how the robot pose changes over time as - // motor voltages are applied. - private static final LinearSystem drivetrainSystem = - LinearSystemId.identifyDrivetrainSystem( - LINEAR_KV, LINEAR_KA, ANGULAR_KV, ANGULAR_KA, TRACKWIDTH_METERS); - private static final DifferentialDrivetrainSim drivetrainSimulator = - new DifferentialDrivetrainSim( - drivetrainSystem, - DRIVE_MOTORS, - GEARING, - TRACKWIDTH_METERS, - WHEEL_DIAMETER_METERS / 2.0, - null); - // ----- + // ----- Simulation specific constants + // Drivetrain plant and simulation. This will calculate how the robot pose changes over time as + // motor voltages are applied. + private static final LinearSystem drivetrainSystem = + LinearSystemId.identifyDrivetrainSystem( + LINEAR_KV, LINEAR_KA, ANGULAR_KV, ANGULAR_KA, TRACKWIDTH_METERS); + private static final DifferentialDrivetrainSim drivetrainSimulator = + new DifferentialDrivetrainSim( + drivetrainSystem, + DRIVE_MOTORS, + GEARING, + TRACKWIDTH_METERS, + WHEEL_DIAMETER_METERS / 2.0, + null); + // ----- - // PWM handles for getting commanded motor controller speeds - private final PWMSim leftLeader; - private final PWMSim rightLeader; + // PWM handles for getting commanded motor controller speeds + private final PWMSim leftLeader; + private final PWMSim rightLeader; - public DrivetrainSim(PWMMotorController leftController, PWMMotorController rightController) { - leftLeader = new PWMSim(leftController); - rightLeader = new PWMSim(rightController); - } + public DrivetrainSim(PWMMotorController leftController, PWMMotorController rightController) { + leftLeader = new PWMSim(leftController); + rightLeader = new PWMSim(rightController); + } - /** - * Perform all periodic drivetrain simulation related tasks to advance our simulation of robot - * physics forward by a single 20ms step. - */ - public void update() { - double leftMotorCmd = 0; - double rightMotorCmd = 0; + /** + * Perform all periodic drivetrain simulation related tasks to advance our simulation of robot + * physics forward by a single 20ms step. + */ + public void update() { + double leftMotorCmd = 0; + double rightMotorCmd = 0; - if (DriverStation.isEnabled() && !RobotController.isBrownedOut()) { - leftMotorCmd = leftLeader.getSpeed(); - rightMotorCmd = rightLeader.getSpeed(); - } + if (DriverStation.isEnabled() && !RobotController.isBrownedOut()) { + leftMotorCmd = leftLeader.getSpeed(); + rightMotorCmd = rightLeader.getSpeed(); + } - drivetrainSimulator.setInputs( - leftMotorCmd * RobotController.getBatteryVoltage(), - -rightMotorCmd * RobotController.getBatteryVoltage()); + drivetrainSimulator.setInputs( + leftMotorCmd * RobotController.getBatteryVoltage(), + -rightMotorCmd * RobotController.getBatteryVoltage()); - drivetrainSimulator.update(Robot.kDefaultPeriod); - } + drivetrainSimulator.update(Robot.kDefaultPeriod); + } - public Pose2d getPose() { - return drivetrainSimulator.getPose(); - } + public Pose2d getPose() { + return drivetrainSimulator.getPose(); + } - /** - * Resets the simulation back to a pre-defined pose. Useful to simulate the action of placing the - * robot onto a specific spot in the field (e.g. at the start of each match). - * - * @param pose - */ - public void resetPose(Pose2d pose) { - drivetrainSimulator.setPose(pose); - } + /** + * Resets the simulation back to a pre-defined pose. Useful to simulate the action of placing the + * robot onto a specific spot in the field (e.g. at the start of each match). + * + * @param pose + */ + public void resetPose(Pose2d pose) { + drivetrainSimulator.setPose(pose); + } } diff --git a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/VisionSim.java b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/VisionSim.java index 0be75f7fee..0be5357c69 100644 --- a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/VisionSim.java +++ b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/VisionSim.java @@ -48,80 +48,80 @@ * real camera data is used instead. */ public class VisionSim { - // ----- Simulation specific constants - // 2020 High goal target shape - // See - // https://firstfrc.blob.core.windows.net/frc2020/PlayingField/2020FieldDrawing-SeasonSpecific.pdf - // page 208 - private static final TargetModel kTargetModel = - new TargetModel( - List.of( - new Translation3d(0, Units.inchesToMeters(-9.819867), Units.inchesToMeters(-8.5)), - new Translation3d(0, Units.inchesToMeters(9.819867), Units.inchesToMeters(-8.5)), - new Translation3d(0, Units.inchesToMeters(19.625), Units.inchesToMeters(8.5)), - new Translation3d(0, Units.inchesToMeters(-19.625), Units.inchesToMeters(8.5)))); + // ----- Simulation specific constants + // 2020 High goal target shape + // See + // https://firstfrc.blob.core.windows.net/frc2020/PlayingField/2020FieldDrawing-SeasonSpecific.pdf + // page 208 + private static final TargetModel kTargetModel = + new TargetModel( + List.of( + new Translation3d(0, Units.inchesToMeters(-9.819867), Units.inchesToMeters(-8.5)), + new Translation3d(0, Units.inchesToMeters(9.819867), Units.inchesToMeters(-8.5)), + new Translation3d(0, Units.inchesToMeters(19.625), Units.inchesToMeters(8.5)), + new Translation3d(0, Units.inchesToMeters(-19.625), Units.inchesToMeters(8.5)))); - // Simulated camera properties. These can be set to mimic your actual camera. - private static final int kResolutionWidth = 640; // pixels - private static final int kResolutionHeight = 480; // pixels - private static final Rotation2d kFOVDiag = Rotation2d.fromDegrees(100.0); // degrees - private static final double kAvgErrorPx = 0.2; - private static final double kErrorStdDevPx = 0.05; - private static final double kFPS = 25; - private static final double kAvgLatencyMs = 30; - private static final double kLatencyStdDevMs = 4; - private static final double kMinTargetArea = 0.1; // percentage (0 - 100) - private static final double kMaxLEDRange = 15; // meters - // ----- + // Simulated camera properties. These can be set to mimic your actual camera. + private static final int kResolutionWidth = 640; // pixels + private static final int kResolutionHeight = 480; // pixels + private static final Rotation2d kFOVDiag = Rotation2d.fromDegrees(100.0); // degrees + private static final double kAvgErrorPx = 0.2; + private static final double kErrorStdDevPx = 0.05; + private static final double kFPS = 25; + private static final double kAvgLatencyMs = 30; + private static final double kLatencyStdDevMs = 4; + private static final double kMinTargetArea = 0.1; // percentage (0 - 100) + private static final double kMaxLEDRange = 15; // meters + // ----- - // A simulated vision system which handles simulated cameras and targets. - private VisionSystemSim visionSim; - // The simulation of our PhotonCamera, which will simulate camera frames and target info. - private PhotonCameraSim cameraSim; + // A simulated vision system which handles simulated cameras and targets. + private VisionSystemSim visionSim; + // The simulation of our PhotonCamera, which will simulate camera frames and target info. + private PhotonCameraSim cameraSim; - public VisionSim(String name, PhotonCamera camera) { - visionSim = new VisionSystemSim(name); - // Make the vision target visible to this simulated field. - var visionTarget = new VisionTargetSim(TARGET_POSE, kTargetModel); - visionSim.addVisionTargets(visionTarget); + public VisionSim(String name, PhotonCamera camera) { + visionSim = new VisionSystemSim(name); + // Make the vision target visible to this simulated field. + var visionTarget = new VisionTargetSim(TARGET_POSE, kTargetModel); + visionSim.addVisionTargets(visionTarget); - // Create simulated camera properties from our defined constants. - var cameraProp = new SimCameraProperties(); - cameraProp.setCalibration(kResolutionWidth, kResolutionHeight, kFOVDiag); - cameraProp.setCalibError(kAvgErrorPx, kErrorStdDevPx); - cameraProp.setFPS(kFPS); - cameraProp.setAvgLatencyMs(kAvgLatencyMs); - cameraProp.setLatencyStdDevMs(kLatencyStdDevMs); - // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible - // targets. - cameraSim = new PhotonCameraSim(camera, cameraProp, kMinTargetArea, kMaxLEDRange); + // Create simulated camera properties from our defined constants. + var cameraProp = new SimCameraProperties(); + cameraProp.setCalibration(kResolutionWidth, kResolutionHeight, kFOVDiag); + cameraProp.setCalibError(kAvgErrorPx, kErrorStdDevPx); + cameraProp.setFPS(kFPS); + cameraProp.setAvgLatencyMs(kAvgLatencyMs); + cameraProp.setLatencyStdDevMs(kLatencyStdDevMs); + // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible + // targets. + cameraSim = new PhotonCameraSim(camera, cameraProp, kMinTargetArea, kMaxLEDRange); - // Add the simulated camera to view the targets on this simulated field. - visionSim.addCamera( - cameraSim, - new Transform3d( - new Translation3d(0, 0, CAMERA_HEIGHT_METERS), - new Rotation3d(0, -CAMERA_PITCH_RADIANS, 0))); + // Add the simulated camera to view the targets on this simulated field. + visionSim.addCamera( + cameraSim, + new Transform3d( + new Translation3d(0, 0, CAMERA_HEIGHT_METERS), + new Rotation3d(0, -CAMERA_PITCH_RADIANS, 0))); - cameraSim.enableDrawWireframe(true); - } + cameraSim.enableDrawWireframe(true); + } - /** - * Update the simulated camera data based on its new field pose. - * - * @param simRobotPose The pose of the simulated robot - */ - public void update(Pose2d simRobotPose) { - visionSim.update(simRobotPose); - } + /** + * Update the simulated camera data based on its new field pose. + * + * @param simRobotPose The pose of the simulated robot + */ + public void update(Pose2d simRobotPose) { + visionSim.update(simRobotPose); + } - /** - * Resets the simulation back to a pre-defined pose. Useful to simulate the action of placing the - * robot onto a specific spot in the field (e.g. at the start of each match). - * - * @param pose - */ - public void resetPose(Pose2d pose) { - visionSim.resetRobotPose(pose); - } + /** + * Resets the simulation back to a pre-defined pose. Useful to simulate the action of placing the + * robot onto a specific spot in the field (e.g. at the start of each match). + * + * @param pose + */ + public void resetPose(Pose2d pose) { + visionSim.resetRobotPose(pose); + } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java index 75fba6c5fc..1cea631e93 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java @@ -38,105 +38,105 @@ import edu.wpi.first.math.util.Units; public class Constants { - public static class Vision { - public static final String kCameraName = "YOUR CAMERA NAME"; - // Cam mounted facing forward, half a meter forward of center, half a meter up from center. - public static final Transform3d kRobotToCam = - new Transform3d(new Translation3d(0.5, 0.0, 0.5), new Rotation3d(0, 0, 0)); + public static class Vision { + public static final String kCameraName = "YOUR CAMERA NAME"; + // Cam mounted facing forward, half a meter forward of center, half a meter up from center. + public static final Transform3d kRobotToCam = + new Transform3d(new Translation3d(0.5, 0.0, 0.5), new Rotation3d(0, 0, 0)); - // The layout of the AprilTags on the field - public static final AprilTagFieldLayout kTagLayout = - AprilTagFields.kDefaultField.loadAprilTagLayoutField(); + // The layout of the AprilTags on the field + public static final AprilTagFieldLayout kTagLayout = + AprilTagFields.kDefaultField.loadAprilTagLayoutField(); - // The standard deviations of our vision estimated poses, which affect correction rate - // (Fake values. Experiment and determine estimation noise on an actual robot.) - public static final Matrix kSingleTagStdDevs = VecBuilder.fill(4, 4, 8); - public static final Matrix kMultiTagStdDevs = VecBuilder.fill(0.5, 0.5, 1); - } + // The standard deviations of our vision estimated poses, which affect correction rate + // (Fake values. Experiment and determine estimation noise on an actual robot.) + public static final Matrix kSingleTagStdDevs = VecBuilder.fill(4, 4, 8); + public static final Matrix kMultiTagStdDevs = VecBuilder.fill(0.5, 0.5, 1); + } - public static class Swerve { - // Physical properties - public static final double kTrackWidth = Units.inchesToMeters(18.5); - public static final double kTrackLength = Units.inchesToMeters(18.5); - public static final double kRobotWidth = Units.inchesToMeters(25 + 3.25 * 2); - public static final double kRobotLength = Units.inchesToMeters(25 + 3.25 * 2); - public static final double kMaxLinearSpeed = Units.feetToMeters(15.5); - public static final double kMaxAngularSpeed = Units.rotationsToRadians(2); - public static final double kWheelDiameter = Units.inchesToMeters(4); - public static final double kWheelCircumference = kWheelDiameter * Math.PI; + public static class Swerve { + // Physical properties + public static final double kTrackWidth = Units.inchesToMeters(18.5); + public static final double kTrackLength = Units.inchesToMeters(18.5); + public static final double kRobotWidth = Units.inchesToMeters(25 + 3.25 * 2); + public static final double kRobotLength = Units.inchesToMeters(25 + 3.25 * 2); + public static final double kMaxLinearSpeed = Units.feetToMeters(15.5); + public static final double kMaxAngularSpeed = Units.rotationsToRadians(2); + public static final double kWheelDiameter = Units.inchesToMeters(4); + public static final double kWheelCircumference = kWheelDiameter * Math.PI; - public static final double kDriveGearRatio = 6.75; // 6.75:1 SDS MK4 L2 ratio - public static final double kSteerGearRatio = 12.8; // 12.8:1 + public static final double kDriveGearRatio = 6.75; // 6.75:1 SDS MK4 L2 ratio + public static final double kSteerGearRatio = 12.8; // 12.8:1 - public static final double kDriveDistPerPulse = kWheelCircumference / 1024 / kDriveGearRatio; - public static final double kSteerRadPerPulse = 2 * Math.PI / 1024; + public static final double kDriveDistPerPulse = kWheelCircumference / 1024 / kDriveGearRatio; + public static final double kSteerRadPerPulse = 2 * Math.PI / 1024; - public enum ModuleConstants { - FL( // Front left - 1, 0, 0, 1, 1, 2, 3, 0, kTrackLength / 2, kTrackWidth / 2), - FR( // Front Right - 2, 2, 4, 5, 3, 6, 7, 0, kTrackLength / 2, -kTrackWidth / 2), - BL( // Back Left - 3, 4, 8, 9, 5, 10, 11, 0, -kTrackLength / 2, kTrackWidth / 2), - BR( // Back Right - 4, 6, 12, 13, 7, 14, 15, 0, -kTrackLength / 2, -kTrackWidth / 2); + public enum ModuleConstants { + FL( // Front left + 1, 0, 0, 1, 1, 2, 3, 0, kTrackLength / 2, kTrackWidth / 2), + FR( // Front Right + 2, 2, 4, 5, 3, 6, 7, 0, kTrackLength / 2, -kTrackWidth / 2), + BL( // Back Left + 3, 4, 8, 9, 5, 10, 11, 0, -kTrackLength / 2, kTrackWidth / 2), + BR( // Back Right + 4, 6, 12, 13, 7, 14, 15, 0, -kTrackLength / 2, -kTrackWidth / 2); - public final int moduleNum; - public final int driveMotorID; - public final int driveEncoderA; - public final int driveEncoderB; - public final int steerMotorID; - public final int steerEncoderA; - public final int steerEncoderB; - public final double angleOffset; - public final Translation2d centerOffset; + public final int moduleNum; + public final int driveMotorID; + public final int driveEncoderA; + public final int driveEncoderB; + public final int steerMotorID; + public final int steerEncoderA; + public final int steerEncoderB; + public final double angleOffset; + public final Translation2d centerOffset; - private ModuleConstants( - int moduleNum, - int driveMotorID, - int driveEncoderA, - int driveEncoderB, - int steerMotorID, - int steerEncoderA, - int steerEncoderB, - double angleOffset, - double xOffset, - double yOffset) { - this.moduleNum = moduleNum; - this.driveMotorID = driveMotorID; - this.driveEncoderA = driveEncoderA; - this.driveEncoderB = driveEncoderB; - this.steerMotorID = steerMotorID; - this.steerEncoderA = steerEncoderA; - this.steerEncoderB = steerEncoderB; - this.angleOffset = angleOffset; - centerOffset = new Translation2d(xOffset, yOffset); - } - } + private ModuleConstants( + int moduleNum, + int driveMotorID, + int driveEncoderA, + int driveEncoderB, + int steerMotorID, + int steerEncoderA, + int steerEncoderB, + double angleOffset, + double xOffset, + double yOffset) { + this.moduleNum = moduleNum; + this.driveMotorID = driveMotorID; + this.driveEncoderA = driveEncoderA; + this.driveEncoderB = driveEncoderB; + this.steerMotorID = steerMotorID; + this.steerEncoderA = steerEncoderA; + this.steerEncoderB = steerEncoderB; + this.angleOffset = angleOffset; + centerOffset = new Translation2d(xOffset, yOffset); + } + } - // Feedforward - // Linear drive feed forward - public static final SimpleMotorFeedforward kDriveFF = - new SimpleMotorFeedforward( // real - 0.25, // Voltage to break static friction - 2.5, // Volts per meter per second - 0.3 // Volts per meter per second squared - ); - // Steer feed forward - public static final SimpleMotorFeedforward kSteerFF = - new SimpleMotorFeedforward( // real - 0.5, // Voltage to break static friction - 0.25, // Volts per radian per second - 0.01 // Volts per radian per second squared - ); + // Feedforward + // Linear drive feed forward + public static final SimpleMotorFeedforward kDriveFF = + new SimpleMotorFeedforward( // real + 0.25, // Voltage to break static friction + 2.5, // Volts per meter per second + 0.3 // Volts per meter per second squared + ); + // Steer feed forward + public static final SimpleMotorFeedforward kSteerFF = + new SimpleMotorFeedforward( // real + 0.5, // Voltage to break static friction + 0.25, // Volts per radian per second + 0.01 // Volts per radian per second squared + ); - // PID - public static final double kDriveKP = 1; - public static final double kDriveKI = 0; - public static final double kDriveKD = 0; + // PID + public static final double kDriveKP = 1; + public static final double kDriveKI = 0; + public static final double kDriveKD = 0; - public static final double kSteerKP = 20; - public static final double kSteerKI = 0; - public static final double kSteerKD = 0.25; - } + public static final double kSteerKP = 20; + public static final double kSteerKI = 0; + public static final double kSteerKD = 0.25; + } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java index c420a5735a..f227c28da0 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java @@ -27,9 +27,9 @@ import edu.wpi.first.wpilibj.RobotBase; public final class Main { - private Main() {} + private Main() {} - public static void main(String... args) { - RobotBase.startRobot(Robot::new); - } + public static void main(String... args) { + RobotBase.startRobot(Robot::new); + } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java index 712539e159..d32d4d477c 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java @@ -38,118 +38,118 @@ import java.util.Random; public class Robot extends TimedRobot { - private SwerveDrive drivetrain; - private Vision vision; - - private XboxController controller; - // Limit max speed - private final double kDriveSpeed = 0.6; - // Rudimentary limiting of drivetrain acceleration - private SlewRateLimiter forwardLimiter = new SlewRateLimiter(1.0 / 0.6); // 1 / x seconds to 100% - private SlewRateLimiter strafeLimiter = new SlewRateLimiter(1.0 / 0.6); - private SlewRateLimiter turnLimiter = new SlewRateLimiter(1.0 / 0.33); - - private Timer autoTimer = new Timer(); - private Random rand = new Random(4512); - - @Override - public void robotInit() { - drivetrain = new SwerveDrive(); - vision = new Vision(); - - controller = new XboxController(0); - } - - @Override - public void robotPeriodic() { - drivetrain.periodic(); - - // Correct pose estimate with vision measurements - var visionEst = vision.getEstimatedGlobalPose(); - visionEst.ifPresent( - est -> { - var estPose = est.estimatedPose.toPose2d(); - // Change our trust in the measurement based on the tags we can see - var estStdDevs = vision.getEstimationStdDevs(estPose); - - drivetrain.addVisionMeasurement( - est.estimatedPose.toPose2d(), est.timestampSeconds, estStdDevs); - }); - - // Apply a random offset to pose estimator to test vision correction - if (controller.getBButtonPressed()) { - var trf = - new Transform2d( - new Translation2d(rand.nextDouble() * 4 - 2, rand.nextDouble() * 4 - 2), - new Rotation2d(rand.nextDouble() * 2 * Math.PI)); - drivetrain.resetPose(drivetrain.getPose().plus(trf), false); + private SwerveDrive drivetrain; + private Vision vision; + + private XboxController controller; + // Limit max speed + private final double kDriveSpeed = 0.6; + // Rudimentary limiting of drivetrain acceleration + private SlewRateLimiter forwardLimiter = new SlewRateLimiter(1.0 / 0.6); // 1 / x seconds to 100% + private SlewRateLimiter strafeLimiter = new SlewRateLimiter(1.0 / 0.6); + private SlewRateLimiter turnLimiter = new SlewRateLimiter(1.0 / 0.33); + + private Timer autoTimer = new Timer(); + private Random rand = new Random(4512); + + @Override + public void robotInit() { + drivetrain = new SwerveDrive(); + vision = new Vision(); + + controller = new XboxController(0); } - // Log values to the dashboard - drivetrain.log(); - } - - @Override - public void disabledPeriodic() { - drivetrain.stop(); - } - - @Override - public void autonomousInit() { - autoTimer.restart(); - var pose = new Pose2d(1, 1, new Rotation2d()); - drivetrain.resetPose(pose, true); - vision.resetSimPose(pose); - } - - @Override - public void autonomousPeriodic() { - // translate diagonally while spinning - if (autoTimer.get() < 10) { - drivetrain.drive(0.5, 0.5, 0.5, false); - } else { - autoTimer.stop(); - drivetrain.stop(); + @Override + public void robotPeriodic() { + drivetrain.periodic(); + + // Correct pose estimate with vision measurements + var visionEst = vision.getEstimatedGlobalPose(); + visionEst.ifPresent( + est -> { + var estPose = est.estimatedPose.toPose2d(); + // Change our trust in the measurement based on the tags we can see + var estStdDevs = vision.getEstimationStdDevs(estPose); + + drivetrain.addVisionMeasurement( + est.estimatedPose.toPose2d(), est.timestampSeconds, estStdDevs); + }); + + // Apply a random offset to pose estimator to test vision correction + if (controller.getBButtonPressed()) { + var trf = + new Transform2d( + new Translation2d(rand.nextDouble() * 4 - 2, rand.nextDouble() * 4 - 2), + new Rotation2d(rand.nextDouble() * 2 * Math.PI)); + drivetrain.resetPose(drivetrain.getPose().plus(trf), false); + } + + // Log values to the dashboard + drivetrain.log(); + } + + @Override + public void disabledPeriodic() { + drivetrain.stop(); + } + + @Override + public void autonomousInit() { + autoTimer.restart(); + var pose = new Pose2d(1, 1, new Rotation2d()); + drivetrain.resetPose(pose, true); + vision.resetSimPose(pose); + } + + @Override + public void autonomousPeriodic() { + // translate diagonally while spinning + if (autoTimer.get() < 10) { + drivetrain.drive(0.5, 0.5, 0.5, false); + } else { + autoTimer.stop(); + drivetrain.stop(); + } + } + + @Override + public void teleopPeriodic() { + // We will use an "arcade drive" scheme to turn joystick values into target robot speeds + // We want to get joystick values where pushing forward/left is positive + double forward = -controller.getLeftY() * kDriveSpeed; + if (Math.abs(forward) < 0.1) forward = 0; // deadband small values + forward = forwardLimiter.calculate(forward); // limit acceleration + double strafe = -controller.getLeftX() * kDriveSpeed; + if (Math.abs(strafe) < 0.1) strafe = 0; + strafe = strafeLimiter.calculate(strafe); + double turn = -controller.getRightX() * kDriveSpeed; + if (Math.abs(turn) < 0.1) turn = 0; + turn = turnLimiter.calculate(turn); + + // Convert from joystick values to real target speeds + forward *= Constants.Swerve.kMaxLinearSpeed; + strafe *= Constants.Swerve.kMaxLinearSpeed; + turn *= Constants.Swerve.kMaxLinearSpeed; + + // Command drivetrain motors based on target speeds + drivetrain.drive(forward, strafe, turn, true); + } + + @Override + public void simulationPeriodic() { + // Update drivetrain simulation + drivetrain.simulationPeriodic(); + + // Update camera simulation + vision.simulationPeriodic(drivetrain.getSimPose()); + + var debugField = vision.getSimDebugField(); + debugField.getObject("EstimatedRobot").setPose(drivetrain.getPose()); + debugField.getObject("EstimatedRobotModules").setPoses(drivetrain.getModulePoses()); + + // Calculate battery voltage sag due to current draw + RoboRioSim.setVInVoltage( + BatterySim.calculateDefaultBatteryLoadedVoltage(drivetrain.getCurrentDraw())); } - } - - @Override - public void teleopPeriodic() { - // We will use an "arcade drive" scheme to turn joystick values into target robot speeds - // We want to get joystick values where pushing forward/left is positive - double forward = -controller.getLeftY() * kDriveSpeed; - if (Math.abs(forward) < 0.1) forward = 0; // deadband small values - forward = forwardLimiter.calculate(forward); // limit acceleration - double strafe = -controller.getLeftX() * kDriveSpeed; - if (Math.abs(strafe) < 0.1) strafe = 0; - strafe = strafeLimiter.calculate(strafe); - double turn = -controller.getRightX() * kDriveSpeed; - if (Math.abs(turn) < 0.1) turn = 0; - turn = turnLimiter.calculate(turn); - - // Convert from joystick values to real target speeds - forward *= Constants.Swerve.kMaxLinearSpeed; - strafe *= Constants.Swerve.kMaxLinearSpeed; - turn *= Constants.Swerve.kMaxLinearSpeed; - - // Command drivetrain motors based on target speeds - drivetrain.drive(forward, strafe, turn, true); - } - - @Override - public void simulationPeriodic() { - // Update drivetrain simulation - drivetrain.simulationPeriodic(); - - // Update camera simulation - vision.simulationPeriodic(drivetrain.getSimPose()); - - var debugField = vision.getSimDebugField(); - debugField.getObject("EstimatedRobot").setPose(drivetrain.getPose()); - debugField.getObject("EstimatedRobotModules").setPoses(drivetrain.getModulePoses()); - - // Calculate battery voltage sag due to current draw - RoboRioSim.setVInVoltage( - BatterySim.calculateDefaultBatteryLoadedVoltage(drivetrain.getCurrentDraw())); - } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java index bc1f9f7fb7..d1e88c9258 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java @@ -44,119 +44,119 @@ import org.photonvision.targeting.PhotonPipelineResult; public class Vision { - private final PhotonCamera camera; - private final PhotonPoseEstimator photonEstimator; - private double lastEstTimestamp = 0; + private final PhotonCamera camera; + private final PhotonPoseEstimator photonEstimator; + private double lastEstTimestamp = 0; + + // Simulation + private PhotonCameraSim cameraSim; + private VisionSystemSim visionSim; + + public Vision() { + camera = new PhotonCamera(kCameraName); + + photonEstimator = + new PhotonPoseEstimator( + kTagLayout, PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR, camera, kRobotToCam); + photonEstimator.setMultiTagFallbackStrategy(PoseStrategy.LOWEST_AMBIGUITY); + + // ----- Simulation + if (Robot.isSimulation()) { + // Create the vision system simulation which handles cameras and targets on the field. + visionSim = new VisionSystemSim("main"); + // Add all the AprilTags inside the tag layout as visible targets to this simulated field. + visionSim.addAprilTags(kTagLayout); + // Create simulated camera properties. These can be set to mimic your actual camera. + var cameraProp = new SimCameraProperties(); + cameraProp.setCalibration(960, 720, Rotation2d.fromDegrees(90)); + cameraProp.setCalibError(0.35, 0.10); + cameraProp.setFPS(15); + cameraProp.setAvgLatencyMs(50); + cameraProp.setLatencyStdDevMs(15); + // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible + // targets. + cameraSim = new PhotonCameraSim(camera, cameraProp); + // Add the simulated camera to view the targets on this simulated field. + visionSim.addCamera(cameraSim, kRobotToCam); + + cameraSim.enableDrawWireframe(true); + } + } - // Simulation - private PhotonCameraSim cameraSim; - private VisionSystemSim visionSim; + public PhotonPipelineResult getLatestResult() { + return camera.getLatestResult(); + } - public Vision() { - camera = new PhotonCamera(kCameraName); + /** + * The latest estimated robot pose on the field from vision data. This may be empty. This should + * only be called once per loop. + * + * @return An {@link EstimatedRobotPose} with an estimated pose, estimate timestamp, and targets + * used for estimation. + */ + public Optional getEstimatedGlobalPose() { + var visionEst = photonEstimator.update(); + double latestTimestamp = camera.getLatestResult().getTimestampSeconds(); + boolean newResult = Math.abs(latestTimestamp - lastEstTimestamp) > 1e-5; + if (Robot.isSimulation()) { + visionEst.ifPresentOrElse( + est -> + getSimDebugField() + .getObject("VisionEstimation") + .setPose(est.estimatedPose.toPose2d()), + () -> { + if (newResult) getSimDebugField().getObject("VisionEstimation").setPoses(); + }); + } + if (newResult) lastEstTimestamp = latestTimestamp; + return visionEst; + } - photonEstimator = - new PhotonPoseEstimator( - kTagLayout, PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR, camera, kRobotToCam); - photonEstimator.setMultiTagFallbackStrategy(PoseStrategy.LOWEST_AMBIGUITY); + /** + * The standard deviations of the estimated pose from {@link #getEstimatedGlobalPose()}, for use + * with {@link edu.wpi.first.math.estimator.SwerveDrivePoseEstimator SwerveDrivePoseEstimator}. + * This should only be used when there are targets visible. + * + * @param estimatedPose The estimated pose to guess standard deviations for. + */ + public Matrix getEstimationStdDevs(Pose2d estimatedPose) { + var estStdDevs = kSingleTagStdDevs; + var targets = getLatestResult().getTargets(); + int numTags = 0; + double avgDist = 0; + for (var tgt : targets) { + var tagPose = photonEstimator.getFieldTags().getTagPose(tgt.getFiducialId()); + if (tagPose.isEmpty()) continue; + numTags++; + avgDist += + tagPose.get().toPose2d().getTranslation().getDistance(estimatedPose.getTranslation()); + } + if (numTags == 0) return estStdDevs; + avgDist /= numTags; + // Decrease std devs if multiple targets are visible + if (numTags > 1) estStdDevs = kMultiTagStdDevs; + // Increase std devs based on (average) distance + if (numTags == 1 && avgDist > 4) + estStdDevs = VecBuilder.fill(Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE); + else estStdDevs = estStdDevs.times(1 + (avgDist * avgDist / 30)); + + return estStdDevs; + } // ----- Simulation - if (Robot.isSimulation()) { - // Create the vision system simulation which handles cameras and targets on the field. - visionSim = new VisionSystemSim("main"); - // Add all the AprilTags inside the tag layout as visible targets to this simulated field. - visionSim.addAprilTags(kTagLayout); - // Create simulated camera properties. These can be set to mimic your actual camera. - var cameraProp = new SimCameraProperties(); - cameraProp.setCalibration(960, 720, Rotation2d.fromDegrees(90)); - cameraProp.setCalibError(0.35, 0.10); - cameraProp.setFPS(15); - cameraProp.setAvgLatencyMs(50); - cameraProp.setLatencyStdDevMs(15); - // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible - // targets. - cameraSim = new PhotonCameraSim(camera, cameraProp); - // Add the simulated camera to view the targets on this simulated field. - visionSim.addCamera(cameraSim, kRobotToCam); - - cameraSim.enableDrawWireframe(true); + + public void simulationPeriodic(Pose2d robotSimPose) { + visionSim.update(robotSimPose); } - } - - public PhotonPipelineResult getLatestResult() { - return camera.getLatestResult(); - } - - /** - * The latest estimated robot pose on the field from vision data. This may be empty. This should - * only be called once per loop. - * - * @return An {@link EstimatedRobotPose} with an estimated pose, estimate timestamp, and targets - * used for estimation. - */ - public Optional getEstimatedGlobalPose() { - var visionEst = photonEstimator.update(); - double latestTimestamp = camera.getLatestResult().getTimestampSeconds(); - boolean newResult = Math.abs(latestTimestamp - lastEstTimestamp) > 1e-5; - if (Robot.isSimulation()) { - visionEst.ifPresentOrElse( - est -> - getSimDebugField() - .getObject("VisionEstimation") - .setPose(est.estimatedPose.toPose2d()), - () -> { - if (newResult) getSimDebugField().getObject("VisionEstimation").setPoses(); - }); + + /** Reset pose history of the robot in the vision system simulation. */ + public void resetSimPose(Pose2d pose) { + if (Robot.isSimulation()) visionSim.resetRobotPose(pose); } - if (newResult) lastEstTimestamp = latestTimestamp; - return visionEst; - } - - /** - * The standard deviations of the estimated pose from {@link #getEstimatedGlobalPose()}, for use - * with {@link edu.wpi.first.math.estimator.SwerveDrivePoseEstimator SwerveDrivePoseEstimator}. - * This should only be used when there are targets visible. - * - * @param estimatedPose The estimated pose to guess standard deviations for. - */ - public Matrix getEstimationStdDevs(Pose2d estimatedPose) { - var estStdDevs = kSingleTagStdDevs; - var targets = getLatestResult().getTargets(); - int numTags = 0; - double avgDist = 0; - for (var tgt : targets) { - var tagPose = photonEstimator.getFieldTags().getTagPose(tgt.getFiducialId()); - if (tagPose.isEmpty()) continue; - numTags++; - avgDist += - tagPose.get().toPose2d().getTranslation().getDistance(estimatedPose.getTranslation()); + + /** A Field2d for visualizing our robot and objects on the field. */ + public Field2d getSimDebugField() { + if (!Robot.isSimulation()) return null; + return visionSim.getDebugField(); } - if (numTags == 0) return estStdDevs; - avgDist /= numTags; - // Decrease std devs if multiple targets are visible - if (numTags > 1) estStdDevs = kMultiTagStdDevs; - // Increase std devs based on (average) distance - if (numTags == 1 && avgDist > 4) - estStdDevs = VecBuilder.fill(Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE); - else estStdDevs = estStdDevs.times(1 + (avgDist * avgDist / 30)); - - return estStdDevs; - } - - // ----- Simulation - - public void simulationPeriodic(Pose2d robotSimPose) { - visionSim.update(robotSimPose); - } - - /** Reset pose history of the robot in the vision system simulation. */ - public void resetSimPose(Pose2d pose) { - if (Robot.isSimulation()) visionSim.resetRobotPose(pose); - } - - /** A Field2d for visualizing our robot and objects on the field. */ - public Field2d getSimDebugField() { - if (!Robot.isSimulation()) return null; - return visionSim.getDebugField(); - } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java index 86a2058732..21911c997d 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java @@ -45,290 +45,290 @@ import frc.robot.Robot; public class SwerveDrive { - // Construct the swerve modules with their respective constants. - // The SwerveModule class will handle all the details of controlling the modules. - private final SwerveModule[] swerveMods = { - new SwerveModule(ModuleConstants.FL), - new SwerveModule(ModuleConstants.FR), - new SwerveModule(ModuleConstants.BL), - new SwerveModule(ModuleConstants.BR) - }; - - // The kinematics for translating between robot chassis speeds and module states. - private final SwerveDriveKinematics kinematics = - new SwerveDriveKinematics( - swerveMods[0].getModuleConstants().centerOffset, - swerveMods[1].getModuleConstants().centerOffset, - swerveMods[2].getModuleConstants().centerOffset, - swerveMods[3].getModuleConstants().centerOffset); - - private final ADXRS450_Gyro gyro = new ADXRS450_Gyro(Port.kOnboardCS0); - - // The robot pose estimator for tracking swerve odometry and applying vision corrections. - private final SwerveDrivePoseEstimator poseEstimator; - - private ChassisSpeeds targetChassisSpeeds = new ChassisSpeeds(); - - // ----- Simulation - private final ADXRS450_GyroSim gyroSim; - private final SwerveDriveSim swerveDriveSim; - private double totalCurrentDraw = 0; - - public SwerveDrive() { - // Define the standard deviations for the pose estimator, which determine how fast the pose - // estimate converges to the vision measurement. This should depend on the vision measurement - // noise - // and how many or how frequently vision measurements are applied to the pose estimator. - var stateStdDevs = VecBuilder.fill(0.1, 0.1, 0.1); - var visionStdDevs = VecBuilder.fill(1, 1, 1); - poseEstimator = - new SwerveDrivePoseEstimator( - kinematics, - getGyroYaw(), - getModulePositions(), - new Pose2d(), - stateStdDevs, - visionStdDevs); + // Construct the swerve modules with their respective constants. + // The SwerveModule class will handle all the details of controlling the modules. + private final SwerveModule[] swerveMods = { + new SwerveModule(ModuleConstants.FL), + new SwerveModule(ModuleConstants.FR), + new SwerveModule(ModuleConstants.BL), + new SwerveModule(ModuleConstants.BR) + }; + + // The kinematics for translating between robot chassis speeds and module states. + private final SwerveDriveKinematics kinematics = + new SwerveDriveKinematics( + swerveMods[0].getModuleConstants().centerOffset, + swerveMods[1].getModuleConstants().centerOffset, + swerveMods[2].getModuleConstants().centerOffset, + swerveMods[3].getModuleConstants().centerOffset); + + private final ADXRS450_Gyro gyro = new ADXRS450_Gyro(Port.kOnboardCS0); + + // The robot pose estimator for tracking swerve odometry and applying vision corrections. + private final SwerveDrivePoseEstimator poseEstimator; + + private ChassisSpeeds targetChassisSpeeds = new ChassisSpeeds(); // ----- Simulation - gyroSim = new ADXRS450_GyroSim(gyro); - swerveDriveSim = - new SwerveDriveSim( - kDriveFF, - DCMotor.getFalcon500(1), - kDriveGearRatio, - kWheelDiameter / 2.0, - kSteerFF, - DCMotor.getFalcon500(1), - kSteerGearRatio, - kinematics); - } - - public void periodic() { - for (SwerveModule module : swerveMods) { - module.periodic(); + private final ADXRS450_GyroSim gyroSim; + private final SwerveDriveSim swerveDriveSim; + private double totalCurrentDraw = 0; + + public SwerveDrive() { + // Define the standard deviations for the pose estimator, which determine how fast the pose + // estimate converges to the vision measurement. This should depend on the vision measurement + // noise + // and how many or how frequently vision measurements are applied to the pose estimator. + var stateStdDevs = VecBuilder.fill(0.1, 0.1, 0.1); + var visionStdDevs = VecBuilder.fill(1, 1, 1); + poseEstimator = + new SwerveDrivePoseEstimator( + kinematics, + getGyroYaw(), + getModulePositions(), + new Pose2d(), + stateStdDevs, + visionStdDevs); + + // ----- Simulation + gyroSim = new ADXRS450_GyroSim(gyro); + swerveDriveSim = + new SwerveDriveSim( + kDriveFF, + DCMotor.getFalcon500(1), + kDriveGearRatio, + kWheelDiameter / 2.0, + kSteerFF, + DCMotor.getFalcon500(1), + kSteerGearRatio, + kinematics); } - // Update the odometry of the swerve drive using the wheel encoders and gyro. - poseEstimator.update(getGyroYaw(), getModulePositions()); - } - - /** - * Basic drive control. A target field-relative ChassisSpeeds (vx, vy, omega) is converted to - * specific swerve module states. - * - * @param vxMeters X velocity (forwards/backwards) - * @param vyMeters Y velocity (strafe left/right) - * @param omegaRadians Angular velocity (rotation CCW+) - * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback - * control. - */ - public void drive(double vxMeters, double vyMeters, double omegaRadians, boolean openLoop) { - var targetChassisSpeeds = - ChassisSpeeds.fromFieldRelativeSpeeds(vxMeters, vyMeters, omegaRadians, getHeading()); - setChassisSpeeds(targetChassisSpeeds, openLoop, false); - } - - /** - * Command the swerve drive to the desired chassis speeds by converting them to swerve module - * states and using {@link #setModuleStates(SwerveModuleState[], boolean)}. - * - * @param targetChassisSpeeds Target robot-relative chassis speeds (vx, vy, omega). - * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback - * control. - * @param steerInPlace If modules should steer to the target angle when target velocity is 0. - */ - public void setChassisSpeeds( - ChassisSpeeds targetChassisSpeeds, boolean openLoop, boolean steerInPlace) { - setModuleStates(kinematics.toSwerveModuleStates(targetChassisSpeeds), openLoop, steerInPlace); - this.targetChassisSpeeds = targetChassisSpeeds; - } - - /** - * Command the swerve modules to the desired states. Velocites exceeding the maximum speed will be - * desaturated (while preserving the ratios between modules). - * - * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback - * control. - * @param steerInPlace If modules should steer to the target angle when target velocity is 0. - */ - public void setModuleStates( - SwerveModuleState[] desiredStates, boolean openLoop, boolean steerInPlace) { - SwerveDriveKinematics.desaturateWheelSpeeds(desiredStates, kMaxLinearSpeed); - for (int i = 0; i < swerveMods.length; i++) { - swerveMods[i].setDesiredState(desiredStates[i], openLoop, steerInPlace); + public void periodic() { + for (SwerveModule module : swerveMods) { + module.periodic(); + } + + // Update the odometry of the swerve drive using the wheel encoders and gyro. + poseEstimator.update(getGyroYaw(), getModulePositions()); } - } - - /** Stop the swerve drive. */ - public void stop() { - drive(0, 0, 0, true); - } - - /** See {@link SwerveDrivePoseEstimator#addVisionMeasurement(Pose2d, double)}. */ - public void addVisionMeasurement(Pose2d visionMeasurement, double timestampSeconds) { - poseEstimator.addVisionMeasurement(visionMeasurement, timestampSeconds); - } - - /** See {@link SwerveDrivePoseEstimator#addVisionMeasurement(Pose2d, double, Matrix)}. */ - public void addVisionMeasurement( - Pose2d visionMeasurement, double timestampSeconds, Matrix stdDevs) { - poseEstimator.addVisionMeasurement(visionMeasurement, timestampSeconds, stdDevs); - } - - /** - * Reset the estimated pose of the swerve drive on the field. - * - * @param pose New robot pose. - * @param resetSimPose If the simulated robot pose should also be reset. This effectively - * teleports the robot and should only be used during the setup of the simulation world. - */ - public void resetPose(Pose2d pose, boolean resetSimPose) { - if (resetSimPose) { - swerveDriveSim.reset(pose, false); - // we shouldnt realistically be resetting pose after startup, but we will handle it anyway for - // testing - for (int i = 0; i < swerveMods.length; i++) { - swerveMods[i].simulationUpdate(0, 0, 0, 0, 0, 0); - } - gyroSim.setAngle(-pose.getRotation().getDegrees()); - gyroSim.setRate(0); + + /** + * Basic drive control. A target field-relative ChassisSpeeds (vx, vy, omega) is converted to + * specific swerve module states. + * + * @param vxMeters X velocity (forwards/backwards) + * @param vyMeters Y velocity (strafe left/right) + * @param omegaRadians Angular velocity (rotation CCW+) + * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback + * control. + */ + public void drive(double vxMeters, double vyMeters, double omegaRadians, boolean openLoop) { + var targetChassisSpeeds = + ChassisSpeeds.fromFieldRelativeSpeeds(vxMeters, vyMeters, omegaRadians, getHeading()); + setChassisSpeeds(targetChassisSpeeds, openLoop, false); } - poseEstimator.resetPosition(getGyroYaw(), getModulePositions(), pose); - } - - /** Get the estimated pose of the swerve drive on the field. */ - public Pose2d getPose() { - return poseEstimator.getEstimatedPosition(); - } - - /** The heading of the swerve drive's estimated pose on the field. */ - public Rotation2d getHeading() { - return getPose().getRotation(); - } - - /** Raw gyro yaw (this may not match the field heading!). */ - public Rotation2d getGyroYaw() { - return gyro.getRotation2d(); - } - - /** Get the chassis speeds of the robot (vx, vy, omega) from the swerve module states. */ - public ChassisSpeeds getChassisSpeeds() { - return kinematics.toChassisSpeeds(getModuleStates()); - } - - /** - * Get the SwerveModuleState of each swerve module (speed, angle). The returned array order - * matches the kinematics module order. - */ - public SwerveModuleState[] getModuleStates() { - return new SwerveModuleState[] { - swerveMods[0].getState(), - swerveMods[1].getState(), - swerveMods[2].getState(), - swerveMods[3].getState() - }; - } - - /** - * Get the SwerveModulePosition of each swerve module (position, angle). The returned array order - * matches the kinematics module order. - */ - public SwerveModulePosition[] getModulePositions() { - return new SwerveModulePosition[] { - swerveMods[0].getPosition(), - swerveMods[1].getPosition(), - swerveMods[2].getPosition(), - swerveMods[3].getPosition() - }; - } - - /** - * Get the Pose2d of each swerve module based on kinematics and current robot pose. The returned - * array order matches the kinematics module order. - */ - public Pose2d[] getModulePoses() { - Pose2d[] modulePoses = new Pose2d[swerveMods.length]; - for (int i = 0; i < swerveMods.length; i++) { - var module = swerveMods[i]; - modulePoses[i] = - getPose() - .transformBy( - new Transform2d( - module.getModuleConstants().centerOffset, module.getAbsoluteHeading())); + /** + * Command the swerve drive to the desired chassis speeds by converting them to swerve module + * states and using {@link #setModuleStates(SwerveModuleState[], boolean)}. + * + * @param targetChassisSpeeds Target robot-relative chassis speeds (vx, vy, omega). + * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback + * control. + * @param steerInPlace If modules should steer to the target angle when target velocity is 0. + */ + public void setChassisSpeeds( + ChassisSpeeds targetChassisSpeeds, boolean openLoop, boolean steerInPlace) { + setModuleStates(kinematics.toSwerveModuleStates(targetChassisSpeeds), openLoop, steerInPlace); + this.targetChassisSpeeds = targetChassisSpeeds; } - return modulePoses; - } - - /** Log various drivetrain values to the dashboard. */ - public void log() { - String table = "Drive/"; - Pose2d pose = getPose(); - SmartDashboard.putNumber(table + "X", pose.getX()); - SmartDashboard.putNumber(table + "Y", pose.getY()); - SmartDashboard.putNumber(table + "Heading", pose.getRotation().getDegrees()); - ChassisSpeeds chassisSpeeds = getChassisSpeeds(); - SmartDashboard.putNumber(table + "VX", chassisSpeeds.vxMetersPerSecond); - SmartDashboard.putNumber(table + "VY", chassisSpeeds.vyMetersPerSecond); - SmartDashboard.putNumber( - table + "Omega Degrees", Math.toDegrees(chassisSpeeds.omegaRadiansPerSecond)); - SmartDashboard.putNumber(table + "Target VX", targetChassisSpeeds.vxMetersPerSecond); - SmartDashboard.putNumber(table + "Target VY", targetChassisSpeeds.vyMetersPerSecond); - SmartDashboard.putNumber( - table + "Target Omega Degrees", Math.toDegrees(targetChassisSpeeds.omegaRadiansPerSecond)); - - for (SwerveModule module : swerveMods) { - module.log(); + + /** + * Command the swerve modules to the desired states. Velocites exceeding the maximum speed will be + * desaturated (while preserving the ratios between modules). + * + * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback + * control. + * @param steerInPlace If modules should steer to the target angle when target velocity is 0. + */ + public void setModuleStates( + SwerveModuleState[] desiredStates, boolean openLoop, boolean steerInPlace) { + SwerveDriveKinematics.desaturateWheelSpeeds(desiredStates, kMaxLinearSpeed); + for (int i = 0; i < swerveMods.length; i++) { + swerveMods[i].setDesiredState(desiredStates[i], openLoop, steerInPlace); + } } - } - // ----- Simulation + /** Stop the swerve drive. */ + public void stop() { + drive(0, 0, 0, true); + } - public void simulationPeriodic() { - // Pass commanded motor voltages into swerve drive simulation - double[] driveInputs = new double[swerveMods.length]; - double[] steerInputs = new double[swerveMods.length]; - for (int i = 0; i < swerveMods.length; i++) { - driveInputs[i] = swerveMods[i].getDriveVoltage(); - steerInputs[i] = swerveMods[i].getSteerVoltage(); + /** See {@link SwerveDrivePoseEstimator#addVisionMeasurement(Pose2d, double)}. */ + public void addVisionMeasurement(Pose2d visionMeasurement, double timestampSeconds) { + poseEstimator.addVisionMeasurement(visionMeasurement, timestampSeconds); } - swerveDriveSim.setDriveInputs(driveInputs); - swerveDriveSim.setSteerInputs(steerInputs); - - // Simulate one timestep - swerveDriveSim.update(Robot.kDefaultPeriod); - - // Update module and gyro values with simulated values - var driveStates = swerveDriveSim.getDriveStates(); - var steerStates = swerveDriveSim.getSteerStates(); - totalCurrentDraw = 0; - double[] driveCurrents = swerveDriveSim.getDriveCurrentDraw(); - for (double current : driveCurrents) totalCurrentDraw += current; - double[] steerCurrents = swerveDriveSim.getSteerCurrentDraw(); - for (double current : steerCurrents) totalCurrentDraw += current; - for (int i = 0; i < swerveMods.length; i++) { - double drivePos = driveStates.get(i).get(0, 0); - double driveRate = driveStates.get(i).get(1, 0); - double steerPos = steerStates.get(i).get(0, 0); - double steerRate = steerStates.get(i).get(1, 0); - swerveMods[i].simulationUpdate( - drivePos, driveRate, driveCurrents[i], steerPos, steerRate, steerCurrents[i]); + + /** See {@link SwerveDrivePoseEstimator#addVisionMeasurement(Pose2d, double, Matrix)}. */ + public void addVisionMeasurement( + Pose2d visionMeasurement, double timestampSeconds, Matrix stdDevs) { + poseEstimator.addVisionMeasurement(visionMeasurement, timestampSeconds, stdDevs); } - gyroSim.setRate(-swerveDriveSim.getOmegaRadsPerSec()); - gyroSim.setAngle(-swerveDriveSim.getPose().getRotation().getDegrees()); - } - - /** - * The "actual" pose of the robot on the field used in simulation. This is different from the - * swerve drive's estimated pose. - */ - public Pose2d getSimPose() { - return swerveDriveSim.getPose(); - } - - public double getCurrentDraw() { - return totalCurrentDraw; - } + /** + * Reset the estimated pose of the swerve drive on the field. + * + * @param pose New robot pose. + * @param resetSimPose If the simulated robot pose should also be reset. This effectively + * teleports the robot and should only be used during the setup of the simulation world. + */ + public void resetPose(Pose2d pose, boolean resetSimPose) { + if (resetSimPose) { + swerveDriveSim.reset(pose, false); + // we shouldnt realistically be resetting pose after startup, but we will handle it anyway for + // testing + for (int i = 0; i < swerveMods.length; i++) { + swerveMods[i].simulationUpdate(0, 0, 0, 0, 0, 0); + } + gyroSim.setAngle(-pose.getRotation().getDegrees()); + gyroSim.setRate(0); + } + + poseEstimator.resetPosition(getGyroYaw(), getModulePositions(), pose); + } + + /** Get the estimated pose of the swerve drive on the field. */ + public Pose2d getPose() { + return poseEstimator.getEstimatedPosition(); + } + + /** The heading of the swerve drive's estimated pose on the field. */ + public Rotation2d getHeading() { + return getPose().getRotation(); + } + + /** Raw gyro yaw (this may not match the field heading!). */ + public Rotation2d getGyroYaw() { + return gyro.getRotation2d(); + } + + /** Get the chassis speeds of the robot (vx, vy, omega) from the swerve module states. */ + public ChassisSpeeds getChassisSpeeds() { + return kinematics.toChassisSpeeds(getModuleStates()); + } + + /** + * Get the SwerveModuleState of each swerve module (speed, angle). The returned array order + * matches the kinematics module order. + */ + public SwerveModuleState[] getModuleStates() { + return new SwerveModuleState[] { + swerveMods[0].getState(), + swerveMods[1].getState(), + swerveMods[2].getState(), + swerveMods[3].getState() + }; + } + + /** + * Get the SwerveModulePosition of each swerve module (position, angle). The returned array order + * matches the kinematics module order. + */ + public SwerveModulePosition[] getModulePositions() { + return new SwerveModulePosition[] { + swerveMods[0].getPosition(), + swerveMods[1].getPosition(), + swerveMods[2].getPosition(), + swerveMods[3].getPosition() + }; + } + + /** + * Get the Pose2d of each swerve module based on kinematics and current robot pose. The returned + * array order matches the kinematics module order. + */ + public Pose2d[] getModulePoses() { + Pose2d[] modulePoses = new Pose2d[swerveMods.length]; + for (int i = 0; i < swerveMods.length; i++) { + var module = swerveMods[i]; + modulePoses[i] = + getPose() + .transformBy( + new Transform2d( + module.getModuleConstants().centerOffset, module.getAbsoluteHeading())); + } + return modulePoses; + } + + /** Log various drivetrain values to the dashboard. */ + public void log() { + String table = "Drive/"; + Pose2d pose = getPose(); + SmartDashboard.putNumber(table + "X", pose.getX()); + SmartDashboard.putNumber(table + "Y", pose.getY()); + SmartDashboard.putNumber(table + "Heading", pose.getRotation().getDegrees()); + ChassisSpeeds chassisSpeeds = getChassisSpeeds(); + SmartDashboard.putNumber(table + "VX", chassisSpeeds.vxMetersPerSecond); + SmartDashboard.putNumber(table + "VY", chassisSpeeds.vyMetersPerSecond); + SmartDashboard.putNumber( + table + "Omega Degrees", Math.toDegrees(chassisSpeeds.omegaRadiansPerSecond)); + SmartDashboard.putNumber(table + "Target VX", targetChassisSpeeds.vxMetersPerSecond); + SmartDashboard.putNumber(table + "Target VY", targetChassisSpeeds.vyMetersPerSecond); + SmartDashboard.putNumber( + table + "Target Omega Degrees", Math.toDegrees(targetChassisSpeeds.omegaRadiansPerSecond)); + + for (SwerveModule module : swerveMods) { + module.log(); + } + } + + // ----- Simulation + + public void simulationPeriodic() { + // Pass commanded motor voltages into swerve drive simulation + double[] driveInputs = new double[swerveMods.length]; + double[] steerInputs = new double[swerveMods.length]; + for (int i = 0; i < swerveMods.length; i++) { + driveInputs[i] = swerveMods[i].getDriveVoltage(); + steerInputs[i] = swerveMods[i].getSteerVoltage(); + } + swerveDriveSim.setDriveInputs(driveInputs); + swerveDriveSim.setSteerInputs(steerInputs); + + // Simulate one timestep + swerveDriveSim.update(Robot.kDefaultPeriod); + + // Update module and gyro values with simulated values + var driveStates = swerveDriveSim.getDriveStates(); + var steerStates = swerveDriveSim.getSteerStates(); + totalCurrentDraw = 0; + double[] driveCurrents = swerveDriveSim.getDriveCurrentDraw(); + for (double current : driveCurrents) totalCurrentDraw += current; + double[] steerCurrents = swerveDriveSim.getSteerCurrentDraw(); + for (double current : steerCurrents) totalCurrentDraw += current; + for (int i = 0; i < swerveMods.length; i++) { + double drivePos = driveStates.get(i).get(0, 0); + double driveRate = driveStates.get(i).get(1, 0); + double steerPos = steerStates.get(i).get(0, 0); + double steerRate = steerStates.get(i).get(1, 0); + swerveMods[i].simulationUpdate( + drivePos, driveRate, driveCurrents[i], steerPos, steerRate, steerCurrents[i]); + } + + gyroSim.setRate(-swerveDriveSim.getOmegaRadsPerSec()); + gyroSim.setAngle(-swerveDriveSim.getPose().getRotation().getDegrees()); + } + + /** + * The "actual" pose of the robot on the field used in simulation. This is different from the + * swerve drive's estimated pose. + */ + public Pose2d getSimPose() { + return swerveDriveSim.getPose(); + } + + public double getCurrentDraw() { + return totalCurrentDraw; + } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java index 841774f0d3..aff69fc703 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java @@ -58,437 +58,437 @@ * the Sim GUI's field. */ public class SwerveDriveSim { - private final LinearSystem drivePlant; - private final double driveKs; - private final DCMotor driveMotor; - private final double driveGearing; - private final double driveWheelRadius; - private final LinearSystem steerPlant; - private final double steerKs; - private final DCMotor steerMotor; - private final double steerGearing; - - private final SwerveDriveKinematics kinematics; - private final int numModules; - - private final double[] driveInputs; - private final List> driveStates; - private final double[] steerInputs; - private final List> steerStates; - - private final Random rand = new Random(); - - // noiseless "actual" pose of the robot on the field - private Pose2d pose = new Pose2d(); - private double omegaRadsPerSec = 0; - - /** - * Creates a swerve drive simulation. - * - * @param driveFF The feedforward for the drive motors of this swerve drive. This should be in - * units of meters. - * @param driveMotor The DCMotor model for the drive motor(s) of this swerve drive's modules. This - * should not have any gearing applied. - * @param driveGearing The gear ratio of the drive system. Positive values indicate a reduction - * where one rotation of the drive wheel equals driveGearing rotations of the drive motor. - * @param driveWheelRadius The radius of the module's driving wheel. - * @param steerFF The feedforward for the steer motors of this swerve drive. This should be in - * units of radians. - * @param steerMotor The DCMotor model for the steer motor(s) of this swerve drive's modules. This - * should not have any gearing applied. - * @param steerGearing The gear ratio of the steer system. Positive values indicate a reduction - * where one rotation of the module heading/azimuth equals steerGearing rotations of the steer - * motor. - * @param kinematics The kinematics for this swerve drive. All swerve module information used in - * this class should match the order of the modules this kinematics object was constructed - * with. - */ - public SwerveDriveSim( - SimpleMotorFeedforward driveFF, - DCMotor driveMotor, - double driveGearing, - double driveWheelRadius, - SimpleMotorFeedforward steerFF, - DCMotor steerMotor, - double steerGearing, - SwerveDriveKinematics kinematics) { - this( - new LinearSystem( - Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -driveFF.kv / driveFF.ka), - VecBuilder.fill(0.0, 1.0 / driveFF.ka), - Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), - VecBuilder.fill(0.0, 0.0)), - driveFF.ks, - driveMotor, - driveGearing, - driveWheelRadius, - new LinearSystem( - Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -steerFF.kv / steerFF.ka), - VecBuilder.fill(0.0, 1.0 / steerFF.ka), - Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), - VecBuilder.fill(0.0, 0.0)), - steerFF.ks, - steerMotor, - steerGearing, - kinematics); - } - - /** - * Creates a swerve drive simulation. - * - * @param drivePlant The {@link LinearSystem} representing a swerve module's drive system. The - * state should be in units of meters and input in volts. - * @param driveKs The static gain in volts of the drive system's feedforward, or the minimum - * voltage to cause motion. Set this to 0 to ignore static friction. - * @param driveMotor The DCMotor model for the drive motor(s) of this swerve drive's modules. This - * should not have any gearing applied. - * @param driveGearing The gear ratio of the drive system. Positive values indicate a reduction - * where one rotation of the drive wheel equals driveGearing rotations of the drive motor. - * @param driveWheelRadius The radius of the module's driving wheel. - * @param steerPlant The {@link LinearSystem} representing a swerve module's steer system. The - * state should be in units of radians and input in volts. - * @param steerKs The static gain in volts of the steer system's feedforward, or the minimum - * voltage to cause motion. Set this to 0 to ignore static friction. - * @param steerMotor The DCMotor model for the steer motor(s) of this swerve drive's modules. This - * should not have any gearing applied. - * @param steerGearing The gear ratio of the steer system. Positive values indicate a reduction - * where one rotation of the module heading/azimuth equals steerGearing rotations of the steer - * motor. - * @param kinematics The kinematics for this swerve drive. All swerve module information used in - * this class should match the order of the modules this kinematics object was constructed - * with. - */ - public SwerveDriveSim( - LinearSystem drivePlant, - double driveKs, - DCMotor driveMotor, - double driveGearing, - double driveWheelRadius, - LinearSystem steerPlant, - double steerKs, - DCMotor steerMotor, - double steerGearing, - SwerveDriveKinematics kinematics) { - this.drivePlant = drivePlant; - this.driveKs = driveKs; - this.driveMotor = driveMotor; - this.driveGearing = driveGearing; - this.driveWheelRadius = driveWheelRadius; - this.steerPlant = steerPlant; - this.steerKs = steerKs; - this.steerMotor = steerMotor; - this.steerGearing = steerGearing; - - this.kinematics = kinematics; - numModules = kinematics.toSwerveModuleStates(new ChassisSpeeds()).length; - driveInputs = new double[numModules]; - driveStates = new ArrayList<>(numModules); - steerInputs = new double[numModules]; - steerStates = new ArrayList<>(numModules); - for (int i = 0; i < numModules; i++) { - driveStates.add(VecBuilder.fill(0, 0)); - steerStates.add(VecBuilder.fill(0, 0)); + private final LinearSystem drivePlant; + private final double driveKs; + private final DCMotor driveMotor; + private final double driveGearing; + private final double driveWheelRadius; + private final LinearSystem steerPlant; + private final double steerKs; + private final DCMotor steerMotor; + private final double steerGearing; + + private final SwerveDriveKinematics kinematics; + private final int numModules; + + private final double[] driveInputs; + private final List> driveStates; + private final double[] steerInputs; + private final List> steerStates; + + private final Random rand = new Random(); + + // noiseless "actual" pose of the robot on the field + private Pose2d pose = new Pose2d(); + private double omegaRadsPerSec = 0; + + /** + * Creates a swerve drive simulation. + * + * @param driveFF The feedforward for the drive motors of this swerve drive. This should be in + * units of meters. + * @param driveMotor The DCMotor model for the drive motor(s) of this swerve drive's modules. This + * should not have any gearing applied. + * @param driveGearing The gear ratio of the drive system. Positive values indicate a reduction + * where one rotation of the drive wheel equals driveGearing rotations of the drive motor. + * @param driveWheelRadius The radius of the module's driving wheel. + * @param steerFF The feedforward for the steer motors of this swerve drive. This should be in + * units of radians. + * @param steerMotor The DCMotor model for the steer motor(s) of this swerve drive's modules. This + * should not have any gearing applied. + * @param steerGearing The gear ratio of the steer system. Positive values indicate a reduction + * where one rotation of the module heading/azimuth equals steerGearing rotations of the steer + * motor. + * @param kinematics The kinematics for this swerve drive. All swerve module information used in + * this class should match the order of the modules this kinematics object was constructed + * with. + */ + public SwerveDriveSim( + SimpleMotorFeedforward driveFF, + DCMotor driveMotor, + double driveGearing, + double driveWheelRadius, + SimpleMotorFeedforward steerFF, + DCMotor steerMotor, + double steerGearing, + SwerveDriveKinematics kinematics) { + this( + new LinearSystem( + Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -driveFF.kv / driveFF.ka), + VecBuilder.fill(0.0, 1.0 / driveFF.ka), + Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), + VecBuilder.fill(0.0, 0.0)), + driveFF.ks, + driveMotor, + driveGearing, + driveWheelRadius, + new LinearSystem( + Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -steerFF.kv / steerFF.ka), + VecBuilder.fill(0.0, 1.0 / steerFF.ka), + Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), + VecBuilder.fill(0.0, 0.0)), + steerFF.ks, + steerMotor, + steerGearing, + kinematics); } - } - - /** - * Sets the input voltages of the drive motors. - * - * @param inputs Input voltages. These should match the module order used in the kinematics. - */ - public void setDriveInputs(double... inputs) { - final double battVoltage = RobotController.getBatteryVoltage(); - for (int i = 0; i < driveInputs.length; i++) { - double input = inputs[i]; - driveInputs[i] = MathUtil.clamp(input, -battVoltage, battVoltage); + + /** + * Creates a swerve drive simulation. + * + * @param drivePlant The {@link LinearSystem} representing a swerve module's drive system. The + * state should be in units of meters and input in volts. + * @param driveKs The static gain in volts of the drive system's feedforward, or the minimum + * voltage to cause motion. Set this to 0 to ignore static friction. + * @param driveMotor The DCMotor model for the drive motor(s) of this swerve drive's modules. This + * should not have any gearing applied. + * @param driveGearing The gear ratio of the drive system. Positive values indicate a reduction + * where one rotation of the drive wheel equals driveGearing rotations of the drive motor. + * @param driveWheelRadius The radius of the module's driving wheel. + * @param steerPlant The {@link LinearSystem} representing a swerve module's steer system. The + * state should be in units of radians and input in volts. + * @param steerKs The static gain in volts of the steer system's feedforward, or the minimum + * voltage to cause motion. Set this to 0 to ignore static friction. + * @param steerMotor The DCMotor model for the steer motor(s) of this swerve drive's modules. This + * should not have any gearing applied. + * @param steerGearing The gear ratio of the steer system. Positive values indicate a reduction + * where one rotation of the module heading/azimuth equals steerGearing rotations of the steer + * motor. + * @param kinematics The kinematics for this swerve drive. All swerve module information used in + * this class should match the order of the modules this kinematics object was constructed + * with. + */ + public SwerveDriveSim( + LinearSystem drivePlant, + double driveKs, + DCMotor driveMotor, + double driveGearing, + double driveWheelRadius, + LinearSystem steerPlant, + double steerKs, + DCMotor steerMotor, + double steerGearing, + SwerveDriveKinematics kinematics) { + this.drivePlant = drivePlant; + this.driveKs = driveKs; + this.driveMotor = driveMotor; + this.driveGearing = driveGearing; + this.driveWheelRadius = driveWheelRadius; + this.steerPlant = steerPlant; + this.steerKs = steerKs; + this.steerMotor = steerMotor; + this.steerGearing = steerGearing; + + this.kinematics = kinematics; + numModules = kinematics.toSwerveModuleStates(new ChassisSpeeds()).length; + driveInputs = new double[numModules]; + driveStates = new ArrayList<>(numModules); + steerInputs = new double[numModules]; + steerStates = new ArrayList<>(numModules); + for (int i = 0; i < numModules; i++) { + driveStates.add(VecBuilder.fill(0, 0)); + steerStates.add(VecBuilder.fill(0, 0)); + } + } + + /** + * Sets the input voltages of the drive motors. + * + * @param inputs Input voltages. These should match the module order used in the kinematics. + */ + public void setDriveInputs(double... inputs) { + final double battVoltage = RobotController.getBatteryVoltage(); + for (int i = 0; i < driveInputs.length; i++) { + double input = inputs[i]; + driveInputs[i] = MathUtil.clamp(input, -battVoltage, battVoltage); + } + } + + /** + * Sets the input voltages of the steer motors. + * + * @param inputs Input voltages. These should match the module order used in the kinematics. + */ + public void setSteerInputs(double... inputs) { + final double battVoltage = RobotController.getBatteryVoltage(); + for (int i = 0; i < steerInputs.length; i++) { + double input = inputs[i]; + steerInputs[i] = MathUtil.clamp(input, -battVoltage, battVoltage); + } } - } - - /** - * Sets the input voltages of the steer motors. - * - * @param inputs Input voltages. These should match the module order used in the kinematics. - */ - public void setSteerInputs(double... inputs) { - final double battVoltage = RobotController.getBatteryVoltage(); - for (int i = 0; i < steerInputs.length; i++) { - double input = inputs[i]; - steerInputs[i] = MathUtil.clamp(input, -battVoltage, battVoltage); + + /** + * Computes the new x given the old x and the control input. Includes the effect of static + * friction. + * + * @param discA The discretized system matrix. + * @param discB The discretized input matrix. + * @param x The position/velocity state of the drive/steer system. + * @param input The input voltage. + * @param ks The kS value of the feedforward model. + * @return The updated x, including the effect of static friction. + */ + protected static Matrix calculateX( + Matrix discA, Matrix discB, Matrix x, double input, double ks) { + var Ax = discA.times(x); + double nextStateVel = Ax.get(1, 0); + // input required to make next state vel == 0 + double inputToStop = nextStateVel / -discB.get(1, 0); + // ks effect on system velocity + double ksSystemEffect = MathUtil.clamp(inputToStop, -ks, ks); + + // after ks system effect: + nextStateVel += discB.get(1, 0) * ksSystemEffect; + inputToStop = nextStateVel / -discB.get(1, 0); + double signToStop = Math.signum(inputToStop); + double inputSign = Math.signum(input); + double ksInputEffect = 0; + + // system velocity was reduced to 0, resist any input + if (Math.abs(ksSystemEffect) < ks) { + double absInput = Math.abs(input); + ksInputEffect = -MathUtil.clamp(ks * inputSign, -absInput, absInput); + } + // non-zero system velocity, but input causes velocity sign change. Resist input after sign + // change + else if ((input * signToStop) > (inputToStop * signToStop)) { + double absInput = Math.abs(input - inputToStop); + ksInputEffect = -MathUtil.clamp(ks * inputSign, -absInput, absInput); + } + + // calculate next x including static friction + var Bu = discB.times(VecBuilder.fill(input + ksSystemEffect + ksInputEffect)); + return Ax.plus(Bu); + } + + /** + * Update the drivetrain states with the given timestep. + * + * @param dtSeconds The timestep in seconds. This should be the robot loop period. + */ + public void update(double dtSeconds) { + var driveDiscAB = Discretization.discretizeAB(drivePlant.getA(), drivePlant.getB(), dtSeconds); + var steerDiscAB = Discretization.discretizeAB(steerPlant.getA(), steerPlant.getB(), dtSeconds); + + var moduleDeltas = new SwerveModulePosition[numModules]; + for (int i = 0; i < numModules; i++) { + double prevDriveStatePos = driveStates.get(i).get(0, 0); + driveStates.set( + i, + calculateX( + driveDiscAB.getFirst(), + driveDiscAB.getSecond(), + driveStates.get(i), + driveInputs[i], + driveKs)); + double currDriveStatePos = driveStates.get(i).get(0, 0); + steerStates.set( + i, + calculateX( + steerDiscAB.getFirst(), + steerDiscAB.getSecond(), + steerStates.get(i), + steerInputs[i], + steerKs)); + double currSteerStatePos = steerStates.get(i).get(0, 0); + moduleDeltas[i] = + new SwerveModulePosition( + currDriveStatePos - prevDriveStatePos, new Rotation2d(currSteerStatePos)); + } + + var twist = kinematics.toTwist2d(moduleDeltas); + pose = pose.exp(twist); + omegaRadsPerSec = twist.dtheta / dtSeconds; + } + + /** + * Reset the simulated swerve drive state. This effectively teleports the robot and should only be + * used during the setup of the simulation world. + * + * @param pose The new pose of the simulated swerve drive. + * @param preserveMotion If true, the current module states will be preserved. Otherwise, they + * will be reset to zeros. + */ + public void reset(Pose2d pose, boolean preserveMotion) { + this.pose = pose; + if (!preserveMotion) { + for (int i = 0; i < numModules; i++) { + driveStates.set(i, VecBuilder.fill(0, 0)); + steerStates.set(i, VecBuilder.fill(0, 0)); + } + omegaRadsPerSec = 0; + } } - } - - /** - * Computes the new x given the old x and the control input. Includes the effect of static - * friction. - * - * @param discA The discretized system matrix. - * @param discB The discretized input matrix. - * @param x The position/velocity state of the drive/steer system. - * @param input The input voltage. - * @param ks The kS value of the feedforward model. - * @return The updated x, including the effect of static friction. - */ - protected static Matrix calculateX( - Matrix discA, Matrix discB, Matrix x, double input, double ks) { - var Ax = discA.times(x); - double nextStateVel = Ax.get(1, 0); - // input required to make next state vel == 0 - double inputToStop = nextStateVel / -discB.get(1, 0); - // ks effect on system velocity - double ksSystemEffect = MathUtil.clamp(inputToStop, -ks, ks); - - // after ks system effect: - nextStateVel += discB.get(1, 0) * ksSystemEffect; - inputToStop = nextStateVel / -discB.get(1, 0); - double signToStop = Math.signum(inputToStop); - double inputSign = Math.signum(input); - double ksInputEffect = 0; - - // system velocity was reduced to 0, resist any input - if (Math.abs(ksSystemEffect) < ks) { - double absInput = Math.abs(input); - ksInputEffect = -MathUtil.clamp(ks * inputSign, -absInput, absInput); + + /** + * Reset the simulated swerve drive state. This effectively teleports the robot and should only be + * used during the setup of the simulation world. + * + * @param pose The new pose of the simulated swerve drive. + * @param moduleDriveStates The new states of the modules' drive systems in [meters, meters/sec]. + * These should match the module order used in the kinematics. + * @param moduleSteerStates The new states of the modules' steer systems in [radians, + * radians/sec]. These should match the module order used in the kinematics. + */ + public void reset( + Pose2d pose, List> moduleDriveStates, List> moduleSteerStates) { + if (moduleDriveStates.size() != driveStates.size() + || moduleSteerStates.size() != steerStates.size()) + throw new IllegalArgumentException("Provided module states do not match number of modules!"); + this.pose = pose; + for (int i = 0; i < numModules; i++) { + driveStates.set(i, moduleDriveStates.get(i).copy()); + steerStates.set(i, moduleSteerStates.get(i).copy()); + } + omegaRadsPerSec = kinematics.toChassisSpeeds(getModuleStates()).omegaRadiansPerSecond; } - // non-zero system velocity, but input causes velocity sign change. Resist input after sign - // change - else if ((input * signToStop) > (inputToStop * signToStop)) { - double absInput = Math.abs(input - inputToStop); - ksInputEffect = -MathUtil.clamp(ks * inputSign, -absInput, absInput); + + /** + * Get the pose of the simulated swerve drive. Note that this is the "actual" pose of the robot in + * the simulation world, without any noise. If you are simulating a pose estimator, this pose + * should only be used for visualization or camera simulation. This should be called after {@link + * #update(double)}. + */ + public Pose2d getPose() { + return pose; } - // calculate next x including static friction - var Bu = discB.times(VecBuilder.fill(input + ksSystemEffect + ksInputEffect)); - return Ax.plus(Bu); - } - - /** - * Update the drivetrain states with the given timestep. - * - * @param dtSeconds The timestep in seconds. This should be the robot loop period. - */ - public void update(double dtSeconds) { - var driveDiscAB = Discretization.discretizeAB(drivePlant.getA(), drivePlant.getB(), dtSeconds); - var steerDiscAB = Discretization.discretizeAB(steerPlant.getA(), steerPlant.getB(), dtSeconds); - - var moduleDeltas = new SwerveModulePosition[numModules]; - for (int i = 0; i < numModules; i++) { - double prevDriveStatePos = driveStates.get(i).get(0, 0); - driveStates.set( - i, - calculateX( - driveDiscAB.getFirst(), - driveDiscAB.getSecond(), - driveStates.get(i), - driveInputs[i], - driveKs)); - double currDriveStatePos = driveStates.get(i).get(0, 0); - steerStates.set( - i, - calculateX( - steerDiscAB.getFirst(), - steerDiscAB.getSecond(), - steerStates.get(i), - steerInputs[i], - steerKs)); - double currSteerStatePos = steerStates.get(i).get(0, 0); - moduleDeltas[i] = - new SwerveModulePosition( - currDriveStatePos - prevDriveStatePos, new Rotation2d(currSteerStatePos)); + /** + * Get the {@link SwerveModulePosition} of each module. The returned array order matches the + * kinematics module order. This should be called after {@link #update(double)}. + */ + public SwerveModulePosition[] getModulePositions() { + var positions = new SwerveModulePosition[numModules]; + for (int i = 0; i < numModules; i++) { + positions[i] = + new SwerveModulePosition( + driveStates.get(i).get(0, 0), new Rotation2d(steerStates.get(i).get(0, 0))); + } + return positions; } - var twist = kinematics.toTwist2d(moduleDeltas); - pose = pose.exp(twist); - omegaRadsPerSec = twist.dtheta / dtSeconds; - } - - /** - * Reset the simulated swerve drive state. This effectively teleports the robot and should only be - * used during the setup of the simulation world. - * - * @param pose The new pose of the simulated swerve drive. - * @param preserveMotion If true, the current module states will be preserved. Otherwise, they - * will be reset to zeros. - */ - public void reset(Pose2d pose, boolean preserveMotion) { - this.pose = pose; - if (!preserveMotion) { - for (int i = 0; i < numModules; i++) { - driveStates.set(i, VecBuilder.fill(0, 0)); - steerStates.set(i, VecBuilder.fill(0, 0)); - } - omegaRadsPerSec = 0; + /** + * Get the {@link SwerveModulePosition} of each module with rudimentary noise simulation. The + * returned array order matches the kinematics module order. This should be called after {@link + * #update(double)}. + * + * @param driveStdDev The standard deviation in meters of the drive wheel position. + * @param steerStdDev The standard deviation in radians of the steer angle. + */ + public SwerveModulePosition[] getNoisyModulePositions(double driveStdDev, double steerStdDev) { + var positions = new SwerveModulePosition[numModules]; + for (int i = 0; i < numModules; i++) { + positions[i] = + new SwerveModulePosition( + driveStates.get(i).get(0, 0) + rand.nextGaussian() * driveStdDev, + new Rotation2d(steerStates.get(i).get(0, 0) + rand.nextGaussian() * steerStdDev)); + } + return positions; } - } - - /** - * Reset the simulated swerve drive state. This effectively teleports the robot and should only be - * used during the setup of the simulation world. - * - * @param pose The new pose of the simulated swerve drive. - * @param moduleDriveStates The new states of the modules' drive systems in [meters, meters/sec]. - * These should match the module order used in the kinematics. - * @param moduleSteerStates The new states of the modules' steer systems in [radians, - * radians/sec]. These should match the module order used in the kinematics. - */ - public void reset( - Pose2d pose, List> moduleDriveStates, List> moduleSteerStates) { - if (moduleDriveStates.size() != driveStates.size() - || moduleSteerStates.size() != steerStates.size()) - throw new IllegalArgumentException("Provided module states do not match number of modules!"); - this.pose = pose; - for (int i = 0; i < numModules; i++) { - driveStates.set(i, moduleDriveStates.get(i).copy()); - steerStates.set(i, moduleSteerStates.get(i).copy()); + + /** + * Get the {@link SwerveModuleState} of each module. The returned array order matches the + * kinematics module order. This should be called after {@link #update(double)}. + */ + public SwerveModuleState[] getModuleStates() { + var positions = new SwerveModuleState[numModules]; + for (int i = 0; i < numModules; i++) { + positions[i] = + new SwerveModuleState( + driveStates.get(i).get(1, 0), new Rotation2d(steerStates.get(i).get(0, 0))); + } + return positions; } - omegaRadsPerSec = kinematics.toChassisSpeeds(getModuleStates()).omegaRadiansPerSecond; - } - - /** - * Get the pose of the simulated swerve drive. Note that this is the "actual" pose of the robot in - * the simulation world, without any noise. If you are simulating a pose estimator, this pose - * should only be used for visualization or camera simulation. This should be called after {@link - * #update(double)}. - */ - public Pose2d getPose() { - return pose; - } - - /** - * Get the {@link SwerveModulePosition} of each module. The returned array order matches the - * kinematics module order. This should be called after {@link #update(double)}. - */ - public SwerveModulePosition[] getModulePositions() { - var positions = new SwerveModulePosition[numModules]; - for (int i = 0; i < numModules; i++) { - positions[i] = - new SwerveModulePosition( - driveStates.get(i).get(0, 0), new Rotation2d(steerStates.get(i).get(0, 0))); + + /** + * Get the state of each module's drive system in [meters, meters/sec]. The returned list order + * matches the kinematics module order. This should be called after {@link #update(double)}. + */ + public List> getDriveStates() { + List> states = new ArrayList<>(); + for (int i = 0; i < driveStates.size(); i++) { + states.add(driveStates.get(i).copy()); + } + return states; } - return positions; - } - - /** - * Get the {@link SwerveModulePosition} of each module with rudimentary noise simulation. The - * returned array order matches the kinematics module order. This should be called after {@link - * #update(double)}. - * - * @param driveStdDev The standard deviation in meters of the drive wheel position. - * @param steerStdDev The standard deviation in radians of the steer angle. - */ - public SwerveModulePosition[] getNoisyModulePositions(double driveStdDev, double steerStdDev) { - var positions = new SwerveModulePosition[numModules]; - for (int i = 0; i < numModules; i++) { - positions[i] = - new SwerveModulePosition( - driveStates.get(i).get(0, 0) + rand.nextGaussian() * driveStdDev, - new Rotation2d(steerStates.get(i).get(0, 0) + rand.nextGaussian() * steerStdDev)); + + /** + * Get the state of each module's steer system in [radians, radians/sec]. The returned list order + * matches the kinematics module order. This should be called after {@link #update(double)}. + */ + public List> getSteerStates() { + List> states = new ArrayList<>(); + for (int i = 0; i < steerStates.size(); i++) { + states.add(steerStates.get(i).copy()); + } + return states; } - return positions; - } - - /** - * Get the {@link SwerveModuleState} of each module. The returned array order matches the - * kinematics module order. This should be called after {@link #update(double)}. - */ - public SwerveModuleState[] getModuleStates() { - var positions = new SwerveModuleState[numModules]; - for (int i = 0; i < numModules; i++) { - positions[i] = - new SwerveModuleState( - driveStates.get(i).get(1, 0), new Rotation2d(steerStates.get(i).get(0, 0))); + + /** + * Get the angular velocity of the robot, which can be useful for gyro simulation. CCW positive. + * This should be called after {@link #update(double)}. + */ + public double getOmegaRadsPerSec() { + return omegaRadsPerSec; } - return positions; - } - - /** - * Get the state of each module's drive system in [meters, meters/sec]. The returned list order - * matches the kinematics module order. This should be called after {@link #update(double)}. - */ - public List> getDriveStates() { - List> states = new ArrayList<>(); - for (int i = 0; i < driveStates.size(); i++) { - states.add(driveStates.get(i).copy()); + + /** + * Calculates the current drawn from the battery by the motor(s). Ignores regenerative current + * from back-emf. + * + * @param motor The motor(s) used. + * @param radiansPerSec The velocity of the motor in radians per second. + * @param inputVolts The voltage commanded by the motor controller (battery voltage * duty cycle). + * @param battVolts The voltage of the battery. + */ + protected static double getCurrentDraw( + DCMotor motor, double radiansPerSec, double inputVolts, double battVolts) { + double effVolts = inputVolts - radiansPerSec / motor.KvRadPerSecPerVolt; + // ignore regeneration + if (inputVolts >= 0) effVolts = MathUtil.clamp(effVolts, 0, inputVolts); + else effVolts = MathUtil.clamp(effVolts, inputVolts, 0); + // calculate battery current + return (inputVolts / battVolts) * (effVolts / motor.rOhms); } - return states; - } - - /** - * Get the state of each module's steer system in [radians, radians/sec]. The returned list order - * matches the kinematics module order. This should be called after {@link #update(double)}. - */ - public List> getSteerStates() { - List> states = new ArrayList<>(); - for (int i = 0; i < steerStates.size(); i++) { - states.add(steerStates.get(i).copy()); + + /** + * Get the current draw in amps for each module's drive motor(s). This should be called after + * {@link #update(double)}. The returned array order matches the kinematics module order. + */ + public double[] getDriveCurrentDraw() { + double[] currents = new double[numModules]; + for (int i = 0; i < numModules; i++) { + double radiansPerSec = driveStates.get(i).get(1, 0) * driveGearing / driveWheelRadius; + currents[i] = + getCurrentDraw( + driveMotor, radiansPerSec, driveInputs[i], RobotController.getBatteryVoltage()); + } + return currents; } - return states; - } - - /** - * Get the angular velocity of the robot, which can be useful for gyro simulation. CCW positive. - * This should be called after {@link #update(double)}. - */ - public double getOmegaRadsPerSec() { - return omegaRadsPerSec; - } - - /** - * Calculates the current drawn from the battery by the motor(s). Ignores regenerative current - * from back-emf. - * - * @param motor The motor(s) used. - * @param radiansPerSec The velocity of the motor in radians per second. - * @param inputVolts The voltage commanded by the motor controller (battery voltage * duty cycle). - * @param battVolts The voltage of the battery. - */ - protected static double getCurrentDraw( - DCMotor motor, double radiansPerSec, double inputVolts, double battVolts) { - double effVolts = inputVolts - radiansPerSec / motor.KvRadPerSecPerVolt; - // ignore regeneration - if (inputVolts >= 0) effVolts = MathUtil.clamp(effVolts, 0, inputVolts); - else effVolts = MathUtil.clamp(effVolts, inputVolts, 0); - // calculate battery current - return (inputVolts / battVolts) * (effVolts / motor.rOhms); - } - - /** - * Get the current draw in amps for each module's drive motor(s). This should be called after - * {@link #update(double)}. The returned array order matches the kinematics module order. - */ - public double[] getDriveCurrentDraw() { - double[] currents = new double[numModules]; - for (int i = 0; i < numModules; i++) { - double radiansPerSec = driveStates.get(i).get(1, 0) * driveGearing / driveWheelRadius; - currents[i] = - getCurrentDraw( - driveMotor, radiansPerSec, driveInputs[i], RobotController.getBatteryVoltage()); + + /** + * Get the current draw in amps for each module's steer motor(s). This should be called after + * {@link #update(double)}. The returned array order matches the kinematics module order. + */ + public double[] getSteerCurrentDraw() { + double[] currents = new double[numModules]; + for (int i = 0; i < numModules; i++) { + double radiansPerSec = steerStates.get(i).get(1, 0) * steerGearing; + currents[i] = + getCurrentDraw( + steerMotor, radiansPerSec, steerInputs[i], RobotController.getBatteryVoltage()); + } + return currents; } - return currents; - } - - /** - * Get the current draw in amps for each module's steer motor(s). This should be called after - * {@link #update(double)}. The returned array order matches the kinematics module order. - */ - public double[] getSteerCurrentDraw() { - double[] currents = new double[numModules]; - for (int i = 0; i < numModules; i++) { - double radiansPerSec = steerStates.get(i).get(1, 0) * steerGearing; - currents[i] = - getCurrentDraw( - steerMotor, radiansPerSec, steerInputs[i], RobotController.getBatteryVoltage()); + + /** + * Get the total current draw in amps of all swerve motors. This should be called after {@link + * #update(double)}. + */ + public double getTotalCurrentDraw() { + double sum = 0; + for (double val : getDriveCurrentDraw()) sum += val; + for (double val : getSteerCurrentDraw()) sum += val; + return sum; } - return currents; - } - - /** - * Get the total current draw in amps of all swerve motors. This should be called after {@link - * #update(double)}. - */ - public double getTotalCurrentDraw() { - double sum = 0; - for (double val : getDriveCurrentDraw()) sum += val; - for (double val : getSteerCurrentDraw()) sum += val; - return sum; - } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java index bafbd24068..4615d29217 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java @@ -39,154 +39,154 @@ import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; public class SwerveModule { - // --- Module Constants - private final ModuleConstants moduleConstants; - - // --- Hardware - private final PWMSparkMax driveMotor; - private final Encoder driveEncoder; - private final PWMSparkMax steerMotor; - private final Encoder steerEncoder; - - // --- Control - private SwerveModuleState desiredState = new SwerveModuleState(); - private boolean openLoop = false; - - // Simple PID feedback controllers run on the roborio - private PIDController drivePidController = new PIDController(kDriveKP, kDriveKI, kDriveKD); - // (A profiled steering PID controller may give better results by utilizing feedforward.) - private PIDController steerPidController = new PIDController(kSteerKP, kSteerKI, kSteerKD); - - // --- Simulation - private final EncoderSim driveEncoderSim; - private double driveCurrentSim = 0; - private final EncoderSim steerEncoderSim; - private double steerCurrentSim = 0; - - public SwerveModule(ModuleConstants moduleConstants) { - this.moduleConstants = moduleConstants; - - driveMotor = new PWMSparkMax(moduleConstants.driveMotorID); - driveEncoder = new Encoder(moduleConstants.driveEncoderA, moduleConstants.driveEncoderB); - driveEncoder.setDistancePerPulse(kDriveDistPerPulse); - steerMotor = new PWMSparkMax(moduleConstants.steerMotorID); - steerEncoder = new Encoder(moduleConstants.steerEncoderA, moduleConstants.steerEncoderB); - steerEncoder.setDistancePerPulse(kSteerRadPerPulse); - - steerPidController.enableContinuousInput(-Math.PI, Math.PI); + // --- Module Constants + private final ModuleConstants moduleConstants; + + // --- Hardware + private final PWMSparkMax driveMotor; + private final Encoder driveEncoder; + private final PWMSparkMax steerMotor; + private final Encoder steerEncoder; + + // --- Control + private SwerveModuleState desiredState = new SwerveModuleState(); + private boolean openLoop = false; + + // Simple PID feedback controllers run on the roborio + private PIDController drivePidController = new PIDController(kDriveKP, kDriveKI, kDriveKD); + // (A profiled steering PID controller may give better results by utilizing feedforward.) + private PIDController steerPidController = new PIDController(kSteerKP, kSteerKI, kSteerKD); // --- Simulation - driveEncoderSim = new EncoderSim(driveEncoder); - steerEncoderSim = new EncoderSim(steerEncoder); - } - - public void periodic() { - // Perform PID feedback control to steer the module to the target angle - double steerPid = - steerPidController.calculate( - getAbsoluteHeading().getRadians(), desiredState.angle.getRadians()); - steerMotor.setVoltage(steerPid); - - // Use feedforward model to translate target drive velocities into voltages - double driveFF = kDriveFF.calculate(desiredState.speedMetersPerSecond); - double drivePid = 0; - if (!openLoop) { - // Perform PID feedback control to compensate for disturbances - drivePid = - drivePidController.calculate(driveEncoder.getRate(), desiredState.speedMetersPerSecond); + private final EncoderSim driveEncoderSim; + private double driveCurrentSim = 0; + private final EncoderSim steerEncoderSim; + private double steerCurrentSim = 0; + + public SwerveModule(ModuleConstants moduleConstants) { + this.moduleConstants = moduleConstants; + + driveMotor = new PWMSparkMax(moduleConstants.driveMotorID); + driveEncoder = new Encoder(moduleConstants.driveEncoderA, moduleConstants.driveEncoderB); + driveEncoder.setDistancePerPulse(kDriveDistPerPulse); + steerMotor = new PWMSparkMax(moduleConstants.steerMotorID); + steerEncoder = new Encoder(moduleConstants.steerEncoderA, moduleConstants.steerEncoderB); + steerEncoder.setDistancePerPulse(kSteerRadPerPulse); + + steerPidController.enableContinuousInput(-Math.PI, Math.PI); + + // --- Simulation + driveEncoderSim = new EncoderSim(driveEncoder); + steerEncoderSim = new EncoderSim(steerEncoder); + } + + public void periodic() { + // Perform PID feedback control to steer the module to the target angle + double steerPid = + steerPidController.calculate( + getAbsoluteHeading().getRadians(), desiredState.angle.getRadians()); + steerMotor.setVoltage(steerPid); + + // Use feedforward model to translate target drive velocities into voltages + double driveFF = kDriveFF.calculate(desiredState.speedMetersPerSecond); + double drivePid = 0; + if (!openLoop) { + // Perform PID feedback control to compensate for disturbances + drivePid = + drivePidController.calculate(driveEncoder.getRate(), desiredState.speedMetersPerSecond); + } + + driveMotor.setVoltage(driveFF + drivePid); + } + + /** + * Command this swerve module to the desired angle and velocity. + * + * @param steerInPlace If modules should steer to target angle when target velocity is 0 + */ + public void setDesiredState( + SwerveModuleState desiredState, boolean openLoop, boolean steerInPlace) { + Rotation2d currentRotation = getAbsoluteHeading(); + // Avoid turning more than 90 degrees by inverting speed on large angle changes + desiredState = SwerveModuleState.optimize(desiredState, currentRotation); + + this.desiredState = desiredState; + } + + /** Module heading reported by steering encoder. */ + public Rotation2d getAbsoluteHeading() { + return Rotation2d.fromRadians(steerEncoder.getDistance()); + } + + /** + * {@link SwerveModuleState} describing absolute module rotation and velocity in meters per + * second. + */ + public SwerveModuleState getState() { + return new SwerveModuleState(driveEncoder.getRate(), getAbsoluteHeading()); + } + + /** {@link SwerveModulePosition} describing absolute module rotation and position in meters. */ + public SwerveModulePosition getPosition() { + return new SwerveModulePosition(driveEncoder.getDistance(), getAbsoluteHeading()); } - driveMotor.setVoltage(driveFF + drivePid); - } - - /** - * Command this swerve module to the desired angle and velocity. - * - * @param steerInPlace If modules should steer to target angle when target velocity is 0 - */ - public void setDesiredState( - SwerveModuleState desiredState, boolean openLoop, boolean steerInPlace) { - Rotation2d currentRotation = getAbsoluteHeading(); - // Avoid turning more than 90 degrees by inverting speed on large angle changes - desiredState = SwerveModuleState.optimize(desiredState, currentRotation); - - this.desiredState = desiredState; - } - - /** Module heading reported by steering encoder. */ - public Rotation2d getAbsoluteHeading() { - return Rotation2d.fromRadians(steerEncoder.getDistance()); - } - - /** - * {@link SwerveModuleState} describing absolute module rotation and velocity in meters per - * second. - */ - public SwerveModuleState getState() { - return new SwerveModuleState(driveEncoder.getRate(), getAbsoluteHeading()); - } - - /** {@link SwerveModulePosition} describing absolute module rotation and position in meters. */ - public SwerveModulePosition getPosition() { - return new SwerveModulePosition(driveEncoder.getDistance(), getAbsoluteHeading()); - } - - /** Voltage of the drive motor */ - public double getDriveVoltage() { - return driveMotor.get() * RobotController.getBatteryVoltage(); - } - - /** Voltage of the steer motor */ - public double getSteerVoltage() { - return steerMotor.get() * RobotController.getBatteryVoltage(); - } - - /** - * Constants about this module, like motor IDs, encoder angle offset, and translation from center. - */ - public ModuleConstants getModuleConstants() { - return moduleConstants; - } - - public void log() { - var state = getState(); - - String table = "Module " + moduleConstants.moduleNum + "/"; - SmartDashboard.putNumber( - table + "Steer Degrees", Math.toDegrees(MathUtil.angleModulus(state.angle.getRadians()))); - SmartDashboard.putNumber( - table + "Steer Target Degrees", Math.toDegrees(steerPidController.getSetpoint())); - SmartDashboard.putNumber( - table + "Drive Velocity Feet", Units.metersToFeet(state.speedMetersPerSecond)); - SmartDashboard.putNumber( - table + "Drive Velocity Target Feet", - Units.metersToFeet(desiredState.speedMetersPerSecond)); - SmartDashboard.putNumber(table + "Drive Current", driveCurrentSim); - SmartDashboard.putNumber(table + "Steer Current", steerCurrentSim); - } - - // ----- Simulation - - public void simulationUpdate( - double driveEncoderDist, - double driveEncoderRate, - double driveCurrent, - double steerEncoderDist, - double steerEncoderRate, - double steerCurrent) { - driveEncoderSim.setDistance(driveEncoderDist); - driveEncoderSim.setRate(driveEncoderRate); - this.driveCurrentSim = driveCurrent; - steerEncoderSim.setDistance(steerEncoderDist); - steerEncoderSim.setRate(steerEncoderRate); - this.steerCurrentSim = steerCurrent; - } - - public double getDriveCurrentSim() { - return driveCurrentSim; - } - - public double getSteerCurrentSim() { - return steerCurrentSim; - } + /** Voltage of the drive motor */ + public double getDriveVoltage() { + return driveMotor.get() * RobotController.getBatteryVoltage(); + } + + /** Voltage of the steer motor */ + public double getSteerVoltage() { + return steerMotor.get() * RobotController.getBatteryVoltage(); + } + + /** + * Constants about this module, like motor IDs, encoder angle offset, and translation from center. + */ + public ModuleConstants getModuleConstants() { + return moduleConstants; + } + + public void log() { + var state = getState(); + + String table = "Module " + moduleConstants.moduleNum + "/"; + SmartDashboard.putNumber( + table + "Steer Degrees", Math.toDegrees(MathUtil.angleModulus(state.angle.getRadians()))); + SmartDashboard.putNumber( + table + "Steer Target Degrees", Math.toDegrees(steerPidController.getSetpoint())); + SmartDashboard.putNumber( + table + "Drive Velocity Feet", Units.metersToFeet(state.speedMetersPerSecond)); + SmartDashboard.putNumber( + table + "Drive Velocity Target Feet", + Units.metersToFeet(desiredState.speedMetersPerSecond)); + SmartDashboard.putNumber(table + "Drive Current", driveCurrentSim); + SmartDashboard.putNumber(table + "Steer Current", steerCurrentSim); + } + + // ----- Simulation + + public void simulationUpdate( + double driveEncoderDist, + double driveEncoderRate, + double driveCurrent, + double steerEncoderDist, + double steerEncoderRate, + double steerCurrent) { + driveEncoderSim.setDistance(driveEncoderDist); + driveEncoderSim.setRate(driveEncoderRate); + this.driveCurrentSim = driveCurrent; + steerEncoderSim.setDistance(steerEncoderDist); + steerEncoderSim.setRate(steerEncoderRate); + this.steerCurrentSim = steerCurrent; + } + + public double getDriveCurrentSim() { + return driveCurrentSim; + } + + public double getSteerCurrentSim() { + return steerCurrentSim; + } }