From 4c2aa16c1c3512d0f99b0df9516d2d1f10fe5f9a Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 19 Jun 2023 21:00:27 -0700 Subject: [PATCH 01/46] optional pnp results, pnp method simplification --- .../org/photonvision/PhotonPoseEstimator.java | 41 +--- .../photonvision/estimation/OpenCVHelp.java | 209 ++++++++++-------- .../estimation/VisionEstimation.java | 101 ++------- .../simulation/PhotonCameraSim.java | 3 +- .../simulation/VisionSystemSim.java | 2 +- .../java/org/photonvision/OpenCVTest.java | 4 +- 6 files changed, 146 insertions(+), 214 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java b/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java index 3eecf72118..56537e5f0b 100644 --- a/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java +++ b/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java @@ -24,7 +24,6 @@ package org.photonvision; -import edu.wpi.first.apriltag.AprilTag; import edu.wpi.first.apriltag.AprilTagFieldLayout; import edu.wpi.first.math.Matrix; import edu.wpi.first.math.Pair; @@ -45,7 +44,6 @@ import org.photonvision.estimation.VisionEstimation; import org.photonvision.targeting.PhotonPipelineResult; import org.photonvision.targeting.PhotonTrackedTarget; -import org.photonvision.targeting.TargetCorner; /** * The PhotonPoseEstimator class filters or combines readings from all the AprilTags visible at a @@ -364,44 +362,17 @@ private Optional multiTagPNPStrategy( PhotonPipelineResult result, Optional> cameraMatrixOpt, Optional> distCoeffsOpt) { - // Arrays we need declared up front - var visCorners = new ArrayList(); - var knownVisTags = new ArrayList(); - - if (result.getTargets().size() < 2) { - return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); - } - boolean hasCalibData = cameraMatrixOpt.isPresent() && distCoeffsOpt.isPresent(); - - if (!hasCalibData) { - return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); - } - - for (var target : result.getTargets()) { - // Check if tag actually exists in the field layout - var tagPoseOpt = fieldTags.getTagPose(target.getFiducialId()); - if (tagPoseOpt.isEmpty()) { - reportFiducialPoseError(target.getFiducialId()); - continue; - } - - // Now that we know tag is valid, add detected corners - visCorners.addAll(target.getDetectedCorners()); - - // actual layout poses of visible tags -- not exposed, so have to recreate - var tagPose = tagPoseOpt.get(); - knownVisTags.add(new AprilTag(target.getFiducialId(), tagPose)); - } - - if (result.getTargets().size() < 2 || knownVisTags.size() < 2) { - // Run fallback strategy instead + // cannot run multitagPNP, use fallback strategy + if (!hasCalibData || result.getTargets().size() < 2) { return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); } var pnpResults = - VisionEstimation.estimateCamPoseSqpnp( - cameraMatrixOpt.get(), distCoeffsOpt.get(), visCorners, knownVisTags); + VisionEstimation.estimateCamPosePNP( + cameraMatrixOpt.get(), distCoeffsOpt.get(), result.getTargets(), fieldTags).orElse(null); + // try fallback strategy if solvePNP fails for some reason + if(pnpResults == null) return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); var best = new Pose3d() .plus(pnpResults.best) // field-to-camera diff --git a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java index b834ad4459..5ec848f424 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -38,6 +38,8 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Optional; + import org.ejml.simple.SimpleMatrix; import org.opencv.calib3d.Calib3d; import org.opencv.core.Core; @@ -398,63 +400,74 @@ public static double getContourAreaPx(List corners) { * @param imageCorners The projection of these 3d object points into the 2d camera image. The * order should match the given object point translations. * @return The resulting transformation that maps the camera pose to the target pose and the - * ambiguity if an alternate solution is available. + * ambiguity if an alternate solution is available, or an empty Optional if solvePNP fails. */ - public static PNPResults solvePNP_SQUARE( + public static Optional solvePNP_SQUARE( Matrix cameraMatrix, Matrix distCoeffs, List modelTrls, List imageCorners) { - // IPPE_SQUARE expects our corners in a specific order - modelTrls = reorderCircular(modelTrls, true, -1); - imageCorners = reorderCircular(imageCorners, true, -1); - // translate to opencv classes - var objectPoints = translationToTvec(modelTrls.toArray(new Translation3d[0])); - var imagePoints = targetCornersToMat(imageCorners); - var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); - var distCoeffsMat = matrixToMat(distCoeffs.getStorage()); - var rvecs = new ArrayList(); - var tvecs = new ArrayList(); - var rvec = Mat.zeros(3, 1, CvType.CV_32F); - var tvec = Mat.zeros(3, 1, CvType.CV_32F); - var reprojectionError = new Mat(); - // calc rvecs/tvecs and associated reprojection error from image points - Calib3d.solvePnPGeneric( - objectPoints, - imagePoints, - cameraMatrixMat, - distCoeffsMat, - rvecs, - tvecs, - false, - Calib3d.SOLVEPNP_IPPE_SQUARE, - rvec, - tvec, - reprojectionError); - - float[] errors = new float[2]; - reprojectionError.get(0, 0, errors); - // convert to wpilib coordinates - var best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); - - Transform3d alt = null; - if (tvecs.size() > 1) { - alt = new Transform3d(tvecToTranslation(tvecs.get(1)), rvecToRotation(rvecs.get(1))); + try { + // IPPE_SQUARE expects our corners in a specific order + modelTrls = reorderCircular(modelTrls, true, -1); + imageCorners = reorderCircular(imageCorners, true, -1); + // translate to opencv classes + var objectPoints = translationToTvec(modelTrls.toArray(new Translation3d[0])); + var imagePoints = targetCornersToMat(imageCorners); + var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); + var distCoeffsMat = matrixToMat(distCoeffs.getStorage()); + var rvecs = new ArrayList(); + var tvecs = new ArrayList(); + var rvec = Mat.zeros(3, 1, CvType.CV_32F); + var tvec = Mat.zeros(3, 1, CvType.CV_32F); + var reprojectionError = new Mat(); + // calc rvecs/tvecs and associated reprojection error from image points + Calib3d.solvePnPGeneric( + objectPoints, + imagePoints, + cameraMatrixMat, + distCoeffsMat, + rvecs, + tvecs, + false, + Calib3d.SOLVEPNP_IPPE_SQUARE, + rvec, + tvec, + reprojectionError); + + float[] errors = new float[2]; + reprojectionError.get(0, 0, errors); + // convert to wpilib coordinates + var best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); + + Transform3d alt = null; + if (tvecs.size() > 1) { + alt = new Transform3d(tvecToTranslation(tvecs.get(1)), rvecToRotation(rvecs.get(1))); + } + + // release our Mats from native memory + objectPoints.release(); + imagePoints.release(); + cameraMatrixMat.release(); + distCoeffsMat.release(); + for (var v : rvecs) v.release(); + for (var v : tvecs) v.release(); + rvec.release(); + tvec.release(); + reprojectionError.release(); + + // check if solvePnP failed with NaN results + if(Double.isNaN(errors[0])) throw new Exception("SolvePNP_SQUARE NaN result"); + + if (alt != null) return Optional.of(new PNPResults(best, alt, errors[0] / errors[1], errors[0], errors[1])); + else return Optional.of(new PNPResults(best, errors[0])); + } + // solvePnP failed + catch(Exception e) { + System.err.println("SolvePNP_SQUARE failed!"); + e.printStackTrace(); + return Optional.empty(); } - - // release our Mats from native memory - objectPoints.release(); - imagePoints.release(); - cameraMatrixMat.release(); - distCoeffsMat.release(); - for (var v : rvecs) v.release(); - for (var v : tvecs) v.release(); - rvec.release(); - tvec.release(); - reprojectionError.release(); - - if (alt != null) return new PNPResults(best, alt, errors[0] / errors[1], errors[0], errors[1]); - else return new PNPResults(best, errors[0]); } /** @@ -474,51 +487,61 @@ public static PNPResults solvePNP_SQUARE( * model points are supplied relative to the origin, this transformation brings the camera to * the origin. */ - public static PNPResults solvePNP_SQPNP( + public static Optional solvePNP_SQPNP( Matrix cameraMatrix, Matrix distCoeffs, List objectTrls, List imageCorners) { - // translate to opencv classes - var objectPoints = translationToTvec(objectTrls.toArray(new Translation3d[0])); - var imagePoints = targetCornersToMat(imageCorners); - var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); - var distCoeffsMat = matrixToMat(distCoeffs.getStorage()); - var rvecs = new ArrayList(); - var tvecs = new ArrayList(); - var rvec = Mat.zeros(3, 1, CvType.CV_32F); - var tvec = Mat.zeros(3, 1, CvType.CV_32F); - var reprojectionError = new Mat(); - // calc rvec/tvec from image points - Calib3d.solvePnPGeneric( - objectPoints, - imagePoints, - cameraMatrixMat, - distCoeffsMat, - rvecs, - tvecs, - false, - Calib3d.SOLVEPNP_SQPNP, - rvec, - tvec, - reprojectionError); - - float[] error = new float[1]; - reprojectionError.get(0, 0, error); - // convert to wpilib coordinates - var best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); - - // release our Mats from native memory - objectPoints.release(); - imagePoints.release(); - cameraMatrixMat.release(); - distCoeffsMat.release(); - for (var v : rvecs) v.release(); - for (var v : tvecs) v.release(); - rvec.release(); - tvec.release(); - reprojectionError.release(); - - return new PNPResults(best, error[0]); + try { + // translate to opencv classes + var objectPoints = translationToTvec(objectTrls.toArray(new Translation3d[0])); + var imagePoints = targetCornersToMat(imageCorners); + var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); + var distCoeffsMat = matrixToMat(distCoeffs.getStorage()); + var rvecs = new ArrayList(); + var tvecs = new ArrayList(); + var rvec = Mat.zeros(3, 1, CvType.CV_32F); + var tvec = Mat.zeros(3, 1, CvType.CV_32F); + var reprojectionError = new Mat(); + // calc rvec/tvec from image points + Calib3d.solvePnPGeneric( + objectPoints, + imagePoints, + cameraMatrixMat, + distCoeffsMat, + rvecs, + tvecs, + false, + Calib3d.SOLVEPNP_SQPNP, + rvec, + tvec, + reprojectionError); + + float[] error = new float[1]; + reprojectionError.get(0, 0, error); + // convert to wpilib coordinates + var best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); + + // release our Mats from native memory + objectPoints.release(); + imagePoints.release(); + cameraMatrixMat.release(); + distCoeffsMat.release(); + for (var v : rvecs) v.release(); + for (var v : tvecs) v.release(); + rvec.release(); + tvec.release(); + reprojectionError.release(); + + // check if solvePnP failed with NaN results + if(Double.isNaN(error[0])) throw new Exception("SolvePNP_SQPNP NaN result"); + + return Optional.of(new PNPResults(best, error[0])); + } + catch (Exception e) { + System.err.println("SolvePNP_SQPNP failed!"); + e.printStackTrace(); + return Optional.empty(); + } } } diff --git a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java index 9cdc1811fb..30756eaa44 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java @@ -35,12 +35,13 @@ import java.util.ArrayList; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.stream.Collectors; import org.photonvision.targeting.PhotonTrackedTarget; import org.photonvision.targeting.TargetCorner; public class VisionEstimation { - /** Get the visible {@link AprilTag}s according the tag layout using the visible tag IDs. */ + /** Get the visible {@link AprilTag}s which are in the tag layout using the visible tag IDs. */ public static List getVisibleLayoutTags( List visTags, AprilTagFieldLayout tagLayout) { return visTags.stream() @@ -55,20 +56,22 @@ public static List getVisibleLayoutTags( } /** - * Performs solvePNP using 3d-2d point correspondences to estimate the field-to-camera + * Performs solvePNP using 3d-2d point correspondences of visible AprilTags to estimate the field-to-camera * transformation. If only one tag is visible, the result may have an alternate solution. * *

Note: The returned transformation is from the field origin to the camera pose! - * (Unless you only feed this one tag??) + * + *

With only one tag: {@link OpenCVHelp#solvePNP_SQUARE}

+ *

With multiple tags: {@link OpenCVHelp#solvePNP_SQPNP}

* * @param cameraMatrix The camera intrinsics matrix in standard opencv form * @param distCoeffs The camera distortion matrix in standard opencv form * @param visTags The visible tags reported by PV * @param tagLayout The known tag layout on the field - * @return The transformation that maps the field origin to the camera pose + * @return The transformation that maps the field origin to the camera pose, or an empty + * Optional if estimation fails. */ - @Deprecated - public static PNPResults estimateCamPosePNP( + public static Optional estimateCamPosePNP( Matrix cameraMatrix, Matrix distCoeffs, List visTags, @@ -77,113 +80,47 @@ public static PNPResults estimateCamPosePNP( || visTags == null || tagLayout.getTags().size() == 0 || visTags.size() == 0) { - return new PNPResults(); + return Optional.empty(); } var corners = new ArrayList(); for (var tag : visTags) corners.addAll(tag.getDetectedCorners()); var knownTags = getVisibleLayoutTags(visTags, tagLayout); if (knownTags.size() == 0 || corners.size() == 0 || corners.size() % 4 != 0) { - return new PNPResults(); + return Optional.empty(); } // single-tag pnp if (visTags.size() == 1) { var camToTag = OpenCVHelp.solvePNP_SQUARE( - cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, corners); + cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, corners).orElse(null); + if(camToTag == null) return Optional.empty(); var bestPose = knownTags.get(0).pose.transformBy(camToTag.best.inverse()); var altPose = new Pose3d(); if (camToTag.ambiguity != 0) altPose = knownTags.get(0).pose.transformBy(camToTag.alt.inverse()); - var bestTagToCam = camToTag.best.inverse(); - SmartDashboard.putNumberArray( - "multiTagBest_internal", - new double[] { - bestTagToCam.getX(), - bestTagToCam.getY(), - bestTagToCam.getZ(), - bestTagToCam.getRotation().getQuaternion().getW(), - bestTagToCam.getRotation().getQuaternion().getX(), - bestTagToCam.getRotation().getQuaternion().getY(), - bestTagToCam.getRotation().getQuaternion().getZ() - }); - var o = new Pose3d(); - return new PNPResults( + return Optional.of(new PNPResults( new Transform3d(o, bestPose), new Transform3d(o, altPose), camToTag.ambiguity, camToTag.bestReprojErr, - camToTag.altReprojErr); + camToTag.altReprojErr)); } // multi-tag pnp else { var objectTrls = new ArrayList(); for (var tag : knownTags) objectTrls.addAll(TargetModel.kTag16h5.getFieldVertices(tag.pose)); - var camToOrigin = OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, corners); - return new PNPResults( + var camToOrigin = OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, corners).orElse(null); + if(camToOrigin == null) return Optional.empty(); + return Optional.of(new PNPResults( camToOrigin.best.inverse(), camToOrigin.alt.inverse(), camToOrigin.ambiguity, camToOrigin.bestReprojErr, - camToOrigin.altReprojErr); - } - } - - /** - * Performs solvePNP using 3d-2d point correspondences to estimate the field-to-camera - * transformation. If only one tag is visible, the result may have an alternate solution. - * - *

Note: The returned transformation is from the field origin to the camera pose! - * - * @param cameraMatrix the camera intrinsics matrix in standard opencv form - * @param distCoeffs the camera distortion matrix in standard opencv form - * @param corners The visible tag corners in the 2d image - * @param knownTags The known tag field poses corresponding to the visible tag IDs - * @return The transformation that maps the field origin to the camera pose - */ - public static PNPResults estimateCamPoseSqpnp( - Matrix cameraMatrix, - Matrix distCoeffs, - List corners, - List knownTags) { - if (knownTags == null - || corners == null - || corners.size() != knownTags.size() * 4 - || knownTags.size() == 0) { - return new PNPResults(); - } - var objectTrls = new ArrayList(); - for (var tag : knownTags) objectTrls.addAll(TargetModel.kTag16h5.vertices); - var camToOrigin = OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, corners); - // var camToOrigin = OpenCVHelp.solveTagsPNPRansac(prop, objectTrls, corners); - return new PNPResults( - camToOrigin.best.inverse(), - camToOrigin.alt.inverse(), - camToOrigin.ambiguity, - camToOrigin.bestReprojErr, - camToOrigin.altReprojErr); - } - - /** - * The best estimated transformation (Rotation-translation composition) that maps a set of - * translations onto another with point correspondences, and its RMSE. - */ - public static class SVDResults { - public final RotTrlTransform3d trf; - - /** If the result is invalid, this value is -1 */ - public final double rmse; - - public SVDResults() { - this(new RotTrlTransform3d(), -1); - } - - public SVDResults(RotTrlTransform3d trf, double rmse) { - this.trf = trf; - this.rmse = rmse; + camToOrigin.altReprojErr)); } } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java index 710d7f49bf..115b6c1015 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -355,7 +355,8 @@ public PhotonPipelineResult process( prop.getIntrinsics(), prop.getDistCoeffs(), tgt.getModel().vertices, - noisyTargetCorners); + noisyTargetCorners).orElse(null); + if(pnpSim == null) continue; centerRot = prop.getPixelRot( OpenCVHelp.projectPoints( diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java index 6a1c36cf3d..81736d2dde 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java @@ -355,7 +355,7 @@ public void update(Pose3d robotPoseMeters) { // use camera pose from the image capture timestamp Pose3d lateRobotPose = getRobotPose(timestampCapture); - Pose3d lateCameraPose = lateRobotPose.plus(getRobotToCamera(camSim).get()); + Pose3d lateCameraPose = lateRobotPose.plus(getRobotToCamera(camSim, timestampCapture).get()); cameraPose2ds.add(lateCameraPose.toPose2d()); // process a PhotonPipelineResult with visible targets diff --git a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java index 594439e847..baa57bdc6d 100644 --- a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java +++ b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java @@ -194,7 +194,7 @@ public void testSolvePNP_SQUARE() { prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, target.getFieldVertices()); var pnpSim = OpenCVHelp.solvePNP_SQUARE( - prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); + prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners).orElseThrow(); var estRelation = new CameraTargetRelation(cameraPose, cameraPose.plus(pnpSim.best)); assertSame(actualRelation.camToTarg, estRelation.camToTarg); } @@ -222,7 +222,7 @@ public void testSolvePNP_SQPNP() { prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, target.getFieldVertices()); var pnpSim = OpenCVHelp.solvePNP_SQPNP( - prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); + prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners).orElseThrow(); var estRelation = new CameraTargetRelation(cameraPose, cameraPose.plus(pnpSim.best)); assertSame(actualRelation.camToTarg, estRelation.camToTarg); } From 209e184098d931d5ffbbadf879a32f4ef10a0fd6 Mon Sep 17 00:00:00 2001 From: amquake Date: Tue, 20 Jun 2023 00:57:16 -0700 Subject: [PATCH 02/46] solvepnp docs clarification --- .../photonvision/estimation/OpenCVHelp.java | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java index 5ec848f424..28968f3630 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -375,9 +375,10 @@ public static double getContourAreaPx(List corners) { } /** - * Finds the transformation(s) that map the camera's pose to the target pose. The camera's pose + * Finds the transformation(s) that map the camera's pose to the target's pose. The camera's pose * relative to the target is determined by the supplied 3d points of the target's model and their - * associated 2d points imaged by the camera. + * associated 2d points imaged by the camera. The supplied model translations must be relative to + * the target's pose. * *

For planar targets, there may be an alternate solution which is plausible given the 2d image * points. This has an associated "ambiguity" which describes the ratio of reprojection error @@ -386,8 +387,8 @@ public static double getContourAreaPx(List corners) { *

This method is intended for use with individual AprilTags, and will not work unless 4 points * are provided. * - * @param cameraMatrix the camera intrinsics matrix in standard opencv form - * @param distCoeffs the camera distortion matrix in standard opencv form + * @param cameraMatrix The camera intrinsics matrix in standard opencv form + * @param distCoeffs The camera distortion matrix in standard opencv form * @param modelTrls The translations of the object corners. These should have the object pose as * their origin. These must come in a specific, pose-relative order (in NWU): *

    @@ -471,15 +472,17 @@ public static Optional solvePNP_SQUARE( } /** - * Finds the transformation that maps the camera's pose to the target pose. The camera's pose - * relative to the target is determined by the supplied 3d points of the target's model and their - * associated 2d points imaged by the camera. + * Finds the transformation that maps the camera's pose to the origin of the supplied object. + * An "object" is simply a set of known 3d translations that correspond to the given 2d points. + * If, for example, the object translations are given relative to close-right corner of the blue + * alliance(the default origin), a camera-to-origin transformation is returned. If the translations + * are relative to a target's pose, a camera-to-target transformation is returned. * - *

    This method is intended for use with multiple targets and has no alternate solutions. There - * must be at least 3 points. + *

    There must be at least 3 points to use this method. This does not return an alternate solution-- + * if you are intending to use solvePNP on a single AprilTag, see {@link #solvePNP_SQUARE} instead. * - * @param cameraMatrix the camera intrinsics matrix in standard opencv form - * @param distCoeffs the camera distortion matrix in standard opencv form + * @param cameraMatrix The camera intrinsics matrix in standard opencv form + * @param distCoeffs The camera distortion matrix in standard opencv form * @param objectTrls The translations of the object corners, relative to the field. * @param imageCorners The projection of these 3d object points into the 2d camera image. The * order should match the given object point translations. From b4a0c902f7a69cfb90398c164b19e6604319124f Mon Sep 17 00:00:00 2001 From: amquake Date: Tue, 20 Jun 2023 01:34:59 -0700 Subject: [PATCH 03/46] spotless --- .../org/photonvision/PhotonPoseEstimator.java | 6 ++- .../photonvision/estimation/OpenCVHelp.java | 28 +++++----- .../estimation/VisionEstimation.java | 53 ++++++++++--------- .../simulation/PhotonCameraSim.java | 11 ++-- .../java/org/photonvision/OpenCVTest.java | 12 ++++- 5 files changed, 63 insertions(+), 47 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java b/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java index 56537e5f0b..a669ab0b00 100644 --- a/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java +++ b/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java @@ -370,9 +370,11 @@ private Optional multiTagPNPStrategy( var pnpResults = VisionEstimation.estimateCamPosePNP( - cameraMatrixOpt.get(), distCoeffsOpt.get(), result.getTargets(), fieldTags).orElse(null); + cameraMatrixOpt.get(), distCoeffsOpt.get(), result.getTargets(), fieldTags) + .orElse(null); // try fallback strategy if solvePNP fails for some reason - if(pnpResults == null) return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); + if (pnpResults == null) + return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); var best = new Pose3d() .plus(pnpResults.best) // field-to-camera diff --git a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java index 28968f3630..87e2919b68 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -39,7 +39,6 @@ import java.util.Arrays; import java.util.List; import java.util.Optional; - import org.ejml.simple.SimpleMatrix; import org.opencv.calib3d.Calib3d; import org.opencv.core.Core; @@ -458,13 +457,14 @@ public static Optional solvePNP_SQUARE( reprojectionError.release(); // check if solvePnP failed with NaN results - if(Double.isNaN(errors[0])) throw new Exception("SolvePNP_SQUARE NaN result"); + if (Double.isNaN(errors[0])) throw new Exception("SolvePNP_SQUARE NaN result"); - if (alt != null) return Optional.of(new PNPResults(best, alt, errors[0] / errors[1], errors[0], errors[1])); + if (alt != null) + return Optional.of(new PNPResults(best, alt, errors[0] / errors[1], errors[0], errors[1])); else return Optional.of(new PNPResults(best, errors[0])); } // solvePnP failed - catch(Exception e) { + catch (Exception e) { System.err.println("SolvePNP_SQUARE failed!"); e.printStackTrace(); return Optional.empty(); @@ -472,14 +472,15 @@ public static Optional solvePNP_SQUARE( } /** - * Finds the transformation that maps the camera's pose to the origin of the supplied object. - * An "object" is simply a set of known 3d translations that correspond to the given 2d points. - * If, for example, the object translations are given relative to close-right corner of the blue - * alliance(the default origin), a camera-to-origin transformation is returned. If the translations - * are relative to a target's pose, a camera-to-target transformation is returned. + * Finds the transformation that maps the camera's pose to the origin of the supplied object. An + * "object" is simply a set of known 3d translations that correspond to the given 2d points. If, + * for example, the object translations are given relative to close-right corner of the blue + * alliance(the default origin), a camera-to-origin transformation is returned. If the + * translations are relative to a target's pose, a camera-to-target transformation is returned. * - *

    There must be at least 3 points to use this method. This does not return an alternate solution-- - * if you are intending to use solvePNP on a single AprilTag, see {@link #solvePNP_SQUARE} instead. + *

    There must be at least 3 points to use this method. This does not return an alternate + * solution-- if you are intending to use solvePNP on a single AprilTag, see {@link + * #solvePNP_SQUARE} instead. * * @param cameraMatrix The camera intrinsics matrix in standard opencv form * @param distCoeffs The camera distortion matrix in standard opencv form @@ -537,11 +538,10 @@ public static Optional solvePNP_SQPNP( reprojectionError.release(); // check if solvePnP failed with NaN results - if(Double.isNaN(error[0])) throw new Exception("SolvePNP_SQPNP NaN result"); + if (Double.isNaN(error[0])) throw new Exception("SolvePNP_SQPNP NaN result"); return Optional.of(new PNPResults(best, error[0])); - } - catch (Exception e) { + } catch (Exception e) { System.err.println("SolvePNP_SQPNP failed!"); e.printStackTrace(); return Optional.empty(); diff --git a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java index 30756eaa44..3df96f0642 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java @@ -31,7 +31,6 @@ import edu.wpi.first.math.geometry.Transform3d; import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.numbers.*; -import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import java.util.ArrayList; import java.util.List; import java.util.Objects; @@ -56,20 +55,22 @@ public static List getVisibleLayoutTags( } /** - * Performs solvePNP using 3d-2d point correspondences of visible AprilTags to estimate the field-to-camera - * transformation. If only one tag is visible, the result may have an alternate solution. + * Performs solvePNP using 3d-2d point correspondences of visible AprilTags to estimate the + * field-to-camera transformation. If only one tag is visible, the result may have an alternate + * solution. * *

    Note: The returned transformation is from the field origin to the camera pose! - * - *

    With only one tag: {@link OpenCVHelp#solvePNP_SQUARE}

    - *

    With multiple tags: {@link OpenCVHelp#solvePNP_SQPNP}

    + * + *

    With only one tag: {@link OpenCVHelp#solvePNP_SQUARE} + * + *

    With multiple tags: {@link OpenCVHelp#solvePNP_SQPNP} * * @param cameraMatrix The camera intrinsics matrix in standard opencv form * @param distCoeffs The camera distortion matrix in standard opencv form * @param visTags The visible tags reported by PV * @param tagLayout The known tag layout on the field - * @return The transformation that maps the field origin to the camera pose, or an empty - * Optional if estimation fails. + * @return The transformation that maps the field origin to the camera pose, or an empty Optional + * if estimation fails. */ public static Optional estimateCamPosePNP( Matrix cameraMatrix, @@ -94,33 +95,37 @@ public static Optional estimateCamPosePNP( if (visTags.size() == 1) { var camToTag = OpenCVHelp.solvePNP_SQUARE( - cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, corners).orElse(null); - if(camToTag == null) return Optional.empty(); + cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, corners) + .orElse(null); + if (camToTag == null) return Optional.empty(); var bestPose = knownTags.get(0).pose.transformBy(camToTag.best.inverse()); var altPose = new Pose3d(); if (camToTag.ambiguity != 0) altPose = knownTags.get(0).pose.transformBy(camToTag.alt.inverse()); var o = new Pose3d(); - return Optional.of(new PNPResults( - new Transform3d(o, bestPose), - new Transform3d(o, altPose), - camToTag.ambiguity, - camToTag.bestReprojErr, - camToTag.altReprojErr)); + return Optional.of( + new PNPResults( + new Transform3d(o, bestPose), + new Transform3d(o, altPose), + camToTag.ambiguity, + camToTag.bestReprojErr, + camToTag.altReprojErr)); } // multi-tag pnp else { var objectTrls = new ArrayList(); for (var tag : knownTags) objectTrls.addAll(TargetModel.kTag16h5.getFieldVertices(tag.pose)); - var camToOrigin = OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, corners).orElse(null); - if(camToOrigin == null) return Optional.empty(); - return Optional.of(new PNPResults( - camToOrigin.best.inverse(), - camToOrigin.alt.inverse(), - camToOrigin.ambiguity, - camToOrigin.bestReprojErr, - camToOrigin.altReprojErr)); + var camToOrigin = + OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, corners).orElse(null); + if (camToOrigin == null) return Optional.empty(); + return Optional.of( + new PNPResults( + camToOrigin.best.inverse(), + camToOrigin.alt.inverse(), + camToOrigin.ambiguity, + camToOrigin.bestReprojErr, + camToOrigin.altReprojErr)); } } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java index 115b6c1015..ad23dd8127 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -352,11 +352,12 @@ public PhotonPipelineResult process( if (tgt.fiducialID >= 0 && tgt.getFieldVertices().size() == 4) { // single AprilTag solvePNP pnpSim = OpenCVHelp.solvePNP_SQUARE( - prop.getIntrinsics(), - prop.getDistCoeffs(), - tgt.getModel().vertices, - noisyTargetCorners).orElse(null); - if(pnpSim == null) continue; + prop.getIntrinsics(), + prop.getDistCoeffs(), + tgt.getModel().vertices, + noisyTargetCorners) + .orElse(null); + if (pnpSim == null) continue; centerRot = prop.getPixelRot( OpenCVHelp.projectPoints( diff --git a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java index baa57bdc6d..ae52dde639 100644 --- a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java +++ b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java @@ -194,7 +194,11 @@ public void testSolvePNP_SQUARE() { prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, target.getFieldVertices()); var pnpSim = OpenCVHelp.solvePNP_SQUARE( - prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners).orElseThrow(); + prop.getIntrinsics(), + prop.getDistCoeffs(), + target.getModel().vertices, + targetCorners) + .orElseThrow(); var estRelation = new CameraTargetRelation(cameraPose, cameraPose.plus(pnpSim.best)); assertSame(actualRelation.camToTarg, estRelation.camToTarg); } @@ -222,7 +226,11 @@ public void testSolvePNP_SQPNP() { prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, target.getFieldVertices()); var pnpSim = OpenCVHelp.solvePNP_SQPNP( - prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners).orElseThrow(); + prop.getIntrinsics(), + prop.getDistCoeffs(), + target.getModel().vertices, + targetCorners) + .orElseThrow(); var estRelation = new CameraTargetRelation(cameraPose, cameraPose.plus(pnpSim.best)); assertSame(actualRelation.camToTarg, estRelation.camToTarg); } From 1511b2aef545e6a6d30e56c6ffc77cc27d83bf02 Mon Sep 17 00:00:00 2001 From: amquake Date: Tue, 27 Jun 2023 00:58:20 -0700 Subject: [PATCH 04/46] fix local tag image loading --- .../photonvision/simulation/VideoSimUtil.java | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java index cec341b4d8..63170a0260 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java @@ -109,19 +109,10 @@ public static Mat get16h5TagImage(int id) { name = name.substring(0, name.length() - idString.length()) + idString; var resource = VideoSimUtil.class.getResource(kResourceTagImagesPath + name + ".png"); - // local IDE tests - String path = kLocalTagImagesPath + name + ".png"; - // gradle tests - if (resource != null) { - path = resource.getPath(); - - // TODO why did we have this previously? - // if (path.startsWith("/")) path = path.substring(1); - } - Mat result = new Mat(); - if (!path.startsWith("file")) result = Imgcodecs.imread(path, Imgcodecs.IMREAD_GRAYSCALE); + + Mat result = new Mat(); // reading jar file - if (result.empty()) { + if (resource != null && resource.getPath().startsWith("file")) { BufferedImage buf; try { buf = ImageIO.read(resource); @@ -140,6 +131,8 @@ public static Mat get16h5TagImage(int id) { } } } + // local IDE tests + else result = Imgcodecs.imread(kLocalTagImagesPath + name + ".png", Imgcodecs.IMREAD_GRAYSCALE); return result; } From 5be455712d71541d8d9244eabe48f9c847f91e86 Mon Sep 17 00:00:00 2001 From: amquake Date: Sat, 1 Jul 2023 00:11:03 -0700 Subject: [PATCH 05/46] solvepnp square NaN retry --- .../photonvision/estimation/OpenCVHelp.java | 114 ++++++++++-------- .../org/photonvision/VisionSystemSimTest.java | 4 +- 2 files changed, 69 insertions(+), 49 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java index 87e2919b68..78dda17e12 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -407,56 +407,64 @@ public static Optional solvePNP_SQUARE( Matrix distCoeffs, List modelTrls, List imageCorners) { + // solvepnp inputs + MatOfPoint3f objectPoints = new MatOfPoint3f(); + MatOfPoint2f imagePoints = new MatOfPoint2f(); + MatOfDouble cameraMatrixMat = new MatOfDouble(); + MatOfDouble distCoeffsMat = new MatOfDouble(); + var rvecs = new ArrayList(); + var tvecs = new ArrayList(); + Mat rvec = Mat.zeros(3, 1, CvType.CV_32F); + Mat tvec = Mat.zeros(3, 1, CvType.CV_32F); + Mat reprojectionError = Mat.zeros(2, 1, CvType.CV_32F); try { // IPPE_SQUARE expects our corners in a specific order modelTrls = reorderCircular(modelTrls, true, -1); imageCorners = reorderCircular(imageCorners, true, -1); // translate to opencv classes - var objectPoints = translationToTvec(modelTrls.toArray(new Translation3d[0])); - var imagePoints = targetCornersToMat(imageCorners); - var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); - var distCoeffsMat = matrixToMat(distCoeffs.getStorage()); - var rvecs = new ArrayList(); - var tvecs = new ArrayList(); - var rvec = Mat.zeros(3, 1, CvType.CV_32F); - var tvec = Mat.zeros(3, 1, CvType.CV_32F); - var reprojectionError = new Mat(); - // calc rvecs/tvecs and associated reprojection error from image points - Calib3d.solvePnPGeneric( - objectPoints, - imagePoints, - cameraMatrixMat, - distCoeffsMat, - rvecs, - tvecs, - false, - Calib3d.SOLVEPNP_IPPE_SQUARE, - rvec, - tvec, - reprojectionError); + translationToTvec(modelTrls.toArray(new Translation3d[0])).assignTo(objectPoints); + targetCornersToMat(imageCorners).assignTo(imagePoints); + matrixToMat(cameraMatrix.getStorage()).assignTo(cameraMatrixMat); + matrixToMat(distCoeffs.getStorage()).assignTo(distCoeffsMat); float[] errors = new float[2]; - reprojectionError.get(0, 0, errors); - // convert to wpilib coordinates - var best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); - + Transform3d best = null; Transform3d alt = null; - if (tvecs.size() > 1) { - alt = new Transform3d(tvecToTranslation(tvecs.get(1)), rvecToRotation(rvecs.get(1))); - } - // release our Mats from native memory - objectPoints.release(); - imagePoints.release(); - cameraMatrixMat.release(); - distCoeffsMat.release(); - for (var v : rvecs) v.release(); - for (var v : tvecs) v.release(); - rvec.release(); - tvec.release(); - reprojectionError.release(); + for(int tries = 0; tries < 2; tries++) { + // calc rvecs/tvecs and associated reprojection error from image points + Calib3d.solvePnPGeneric( + objectPoints, + imagePoints, + cameraMatrixMat, + distCoeffsMat, + rvecs, + tvecs, + false, + Calib3d.SOLVEPNP_IPPE_SQUARE, + rvec, + tvec, + reprojectionError); + + reprojectionError.get(0, 0, errors); + // convert to wpilib coordinates + best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); + + if (tvecs.size() > 1) { + alt = new Transform3d(tvecToTranslation(tvecs.get(1)), rvecToRotation(rvecs.get(1))); + } + + // check if we got a NaN result + if(!Double.isNaN(errors[0])) break; + else { // add noise and retry + double[] br = imagePoints.get(0, 0); + br[0]-=0.001; + br[1]-=0.001; + imagePoints.put(0, 0, br); + } + } - // check if solvePnP failed with NaN results + // check if solvePnP failed with NaN results and retrying failed if (Double.isNaN(errors[0])) throw new Exception("SolvePNP_SQUARE NaN result"); if (alt != null) @@ -469,6 +477,18 @@ public static Optional solvePNP_SQUARE( e.printStackTrace(); return Optional.empty(); } + finally { + // release our Mats from native memory + objectPoints.release(); + imagePoints.release(); + cameraMatrixMat.release(); + distCoeffsMat.release(); + for (var v : rvecs) v.release(); + for (var v : tvecs) v.release(); + rvec.release(); + tvec.release(); + reprojectionError.release(); + } } /** @@ -498,15 +518,15 @@ public static Optional solvePNP_SQPNP( List imageCorners) { try { // translate to opencv classes - var objectPoints = translationToTvec(objectTrls.toArray(new Translation3d[0])); - var imagePoints = targetCornersToMat(imageCorners); - var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); - var distCoeffsMat = matrixToMat(distCoeffs.getStorage()); + MatOfPoint3f objectPoints = translationToTvec(objectTrls.toArray(new Translation3d[0])); + MatOfPoint2f imagePoints = targetCornersToMat(imageCorners); + MatOfDouble cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); + MatOfDouble distCoeffsMat = matrixToMat(distCoeffs.getStorage()); var rvecs = new ArrayList(); var tvecs = new ArrayList(); - var rvec = Mat.zeros(3, 1, CvType.CV_32F); - var tvec = Mat.zeros(3, 1, CvType.CV_32F); - var reprojectionError = new Mat(); + Mat rvec = Mat.zeros(3, 1, CvType.CV_32F); + Mat tvec = Mat.zeros(3, 1, CvType.CV_32F); + Mat reprojectionError = new Mat(); // calc rvec/tvec from image points Calib3d.solvePnPGeneric( objectPoints, diff --git a/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java index f4d5806e43..77c6859b8b 100644 --- a/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java +++ b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java @@ -144,7 +144,7 @@ public void testVisibilityCupidShuffle() { var cameraSim = new PhotonCameraSim(camera); visionSysSim.addCamera(cameraSim, new Transform3d()); cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); - visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 3.0), 3)); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 1.0), 3)); // To the right, to the right var robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(-70)); @@ -439,7 +439,7 @@ public void testMultipleTargets() { 5)); visionSysSim.addVisionTargets( new VisionTargetSim( - targetPoseL.transformBy( + targetPoseR.transformBy( new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), TargetModel.kTag16h5, 6)); From c9d2f1e0b2495bdd03d8f9ba5bff11fbce4cbddf Mon Sep 17 00:00:00 2001 From: amquake Date: Sat, 1 Jul 2023 00:12:37 -0700 Subject: [PATCH 06/46] spotless --- .../java/org/photonvision/estimation/OpenCVHelp.java | 11 +++++------ .../org/photonvision/simulation/VideoSimUtil.java | 4 ++-- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java index 78dda17e12..ec34afb1e9 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -431,7 +431,7 @@ public static Optional solvePNP_SQUARE( Transform3d best = null; Transform3d alt = null; - for(int tries = 0; tries < 2; tries++) { + for (int tries = 0; tries < 2; tries++) { // calc rvecs/tvecs and associated reprojection error from image points Calib3d.solvePnPGeneric( objectPoints, @@ -455,11 +455,11 @@ public static Optional solvePNP_SQUARE( } // check if we got a NaN result - if(!Double.isNaN(errors[0])) break; + if (!Double.isNaN(errors[0])) break; else { // add noise and retry double[] br = imagePoints.get(0, 0); - br[0]-=0.001; - br[1]-=0.001; + br[0] -= 0.001; + br[1] -= 0.001; imagePoints.put(0, 0, br); } } @@ -476,8 +476,7 @@ public static Optional solvePNP_SQUARE( System.err.println("SolvePNP_SQUARE failed!"); e.printStackTrace(); return Optional.empty(); - } - finally { + } finally { // release our Mats from native memory objectPoints.release(); imagePoints.release(); diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java index 63170a0260..d142347a9a 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java @@ -109,8 +109,8 @@ public static Mat get16h5TagImage(int id) { name = name.substring(0, name.length() - idString.length()) + idString; var resource = VideoSimUtil.class.getResource(kResourceTagImagesPath + name + ".png"); - - Mat result = new Mat(); + + Mat result = new Mat(); // reading jar file if (resource != null && resource.getPath().startsWith("file")) { BufferedImage buf; From 6b22823e28da1ba066b4092c59dc2863ab1358db Mon Sep 17 00:00:00 2001 From: amquake Date: Sun, 23 Jul 2023 00:48:54 -0700 Subject: [PATCH 07/46] use condition in result instead of Optional --- .../org/photonvision/PhotonPoseEstimator.java | 5 +- .../photonvision/estimation/OpenCVHelp.java | 17 +++---- .../photonvision/estimation/PNPResults.java | 13 ++++- .../estimation/VisionEstimation.java | 50 ++++++++----------- .../simulation/PhotonCameraSim.java | 5 +- .../java/org/photonvision/OpenCVTest.java | 6 +-- 6 files changed, 48 insertions(+), 48 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java b/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java index a669ab0b00..1e2db3d24a 100644 --- a/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java +++ b/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java @@ -370,10 +370,9 @@ private Optional multiTagPNPStrategy( var pnpResults = VisionEstimation.estimateCamPosePNP( - cameraMatrixOpt.get(), distCoeffsOpt.get(), result.getTargets(), fieldTags) - .orElse(null); + cameraMatrixOpt.get(), distCoeffsOpt.get(), result.getTargets(), fieldTags); // try fallback strategy if solvePNP fails for some reason - if (pnpResults == null) + if (!pnpResults.isPresent) return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); var best = new Pose3d() diff --git a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java index ec34afb1e9..944bc0708a 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -38,7 +38,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Optional; import org.ejml.simple.SimpleMatrix; import org.opencv.calib3d.Calib3d; import org.opencv.core.Core; @@ -400,9 +399,9 @@ public static double getContourAreaPx(List corners) { * @param imageCorners The projection of these 3d object points into the 2d camera image. The * order should match the given object point translations. * @return The resulting transformation that maps the camera pose to the target pose and the - * ambiguity if an alternate solution is available, or an empty Optional if solvePNP fails. + * ambiguity if an alternate solution is available. */ - public static Optional solvePNP_SQUARE( + public static PNPResults solvePNP_SQUARE( Matrix cameraMatrix, Matrix distCoeffs, List modelTrls, @@ -468,14 +467,14 @@ public static Optional solvePNP_SQUARE( if (Double.isNaN(errors[0])) throw new Exception("SolvePNP_SQUARE NaN result"); if (alt != null) - return Optional.of(new PNPResults(best, alt, errors[0] / errors[1], errors[0], errors[1])); - else return Optional.of(new PNPResults(best, errors[0])); + return new PNPResults(best, alt, errors[0] / errors[1], errors[0], errors[1]); + else return new PNPResults(best, errors[0]); } // solvePnP failed catch (Exception e) { System.err.println("SolvePNP_SQUARE failed!"); e.printStackTrace(); - return Optional.empty(); + return new PNPResults(); } finally { // release our Mats from native memory objectPoints.release(); @@ -510,7 +509,7 @@ public static Optional solvePNP_SQUARE( * model points are supplied relative to the origin, this transformation brings the camera to * the origin. */ - public static Optional solvePNP_SQPNP( + public static PNPResults solvePNP_SQPNP( Matrix cameraMatrix, Matrix distCoeffs, List objectTrls, @@ -559,11 +558,11 @@ public static Optional solvePNP_SQPNP( // check if solvePnP failed with NaN results if (Double.isNaN(error[0])) throw new Exception("SolvePNP_SQPNP NaN result"); - return Optional.of(new PNPResults(best, error[0])); + return new PNPResults(best, error[0]); } catch (Exception e) { System.err.println("SolvePNP_SQPNP failed!"); e.printStackTrace(); - return Optional.empty(); + return new PNPResults(); } } } diff --git a/photon-lib/src/main/java/org/photonvision/estimation/PNPResults.java b/photon-lib/src/main/java/org/photonvision/estimation/PNPResults.java index 6a88a4d5f5..0f851c1de4 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/PNPResults.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/PNPResults.java @@ -36,7 +36,11 @@ * method. */ public class PNPResults { + /** If this result is valid. A false value indicates there was an error in estimation, and this result should not be used. */ + public final boolean isPresent; + /** The best-fit transform. The coordinate frame of this transform depends on the method which gave this result. */ public final Transform3d best; + /** Reprojection error of the best solution, in pixels */ public final double bestReprojErr; /** @@ -51,8 +55,14 @@ public class PNPResults { /** If no alternate solution is found, this is 0 */ public final double ambiguity; + /** An empty (invalid) result. */ public PNPResults() { - this(new Transform3d(), new Transform3d(), 0, 0, 0); + this.isPresent = false; + this.best = new Transform3d(); + this.alt = new Transform3d(); + this.ambiguity = 0; + this.bestReprojErr = 0; + this.altReprojErr = 0; } public PNPResults(Transform3d best, double bestReprojErr) { @@ -65,6 +75,7 @@ public PNPResults( double ambiguity, double bestReprojErr, double altReprojErr) { + this.isPresent = true; this.best = best; this.alt = alt; this.ambiguity = ambiguity; diff --git a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java index 3df96f0642..8bc6f93436 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java @@ -34,7 +34,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Objects; -import java.util.Optional; import java.util.stream.Collectors; import org.photonvision.targeting.PhotonTrackedTarget; import org.photonvision.targeting.TargetCorner; @@ -69,10 +68,9 @@ public static List getVisibleLayoutTags( * @param distCoeffs The camera distortion matrix in standard opencv form * @param visTags The visible tags reported by PV * @param tagLayout The known tag layout on the field - * @return The transformation that maps the field origin to the camera pose, or an empty Optional - * if estimation fails. + * @return The transformation that maps the field origin to the camera pose. Ensure the {@link PNPResults} are present before utilizing them. */ - public static Optional estimateCamPosePNP( + public static PNPResults estimateCamPosePNP( Matrix cameraMatrix, Matrix distCoeffs, List visTags, @@ -81,51 +79,47 @@ public static Optional estimateCamPosePNP( || visTags == null || tagLayout.getTags().size() == 0 || visTags.size() == 0) { - return Optional.empty(); + return new PNPResults(); } var corners = new ArrayList(); for (var tag : visTags) corners.addAll(tag.getDetectedCorners()); var knownTags = getVisibleLayoutTags(visTags, tagLayout); if (knownTags.size() == 0 || corners.size() == 0 || corners.size() % 4 != 0) { - return Optional.empty(); + return new PNPResults(); } // single-tag pnp if (visTags.size() == 1) { - var camToTag = - OpenCVHelp.solvePNP_SQUARE( - cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, corners) - .orElse(null); - if (camToTag == null) return Optional.empty(); + var camToTag = OpenCVHelp.solvePNP_SQUARE(cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, corners); + if (!camToTag.isPresent) return new PNPResults(); var bestPose = knownTags.get(0).pose.transformBy(camToTag.best.inverse()); var altPose = new Pose3d(); if (camToTag.ambiguity != 0) altPose = knownTags.get(0).pose.transformBy(camToTag.alt.inverse()); var o = new Pose3d(); - return Optional.of( - new PNPResults( - new Transform3d(o, bestPose), - new Transform3d(o, altPose), - camToTag.ambiguity, - camToTag.bestReprojErr, - camToTag.altReprojErr)); + return new PNPResults( + new Transform3d(o, bestPose), + new Transform3d(o, altPose), + camToTag.ambiguity, + camToTag.bestReprojErr, + camToTag.altReprojErr + ); } // multi-tag pnp else { var objectTrls = new ArrayList(); for (var tag : knownTags) objectTrls.addAll(TargetModel.kTag16h5.getFieldVertices(tag.pose)); - var camToOrigin = - OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, corners).orElse(null); - if (camToOrigin == null) return Optional.empty(); - return Optional.of( - new PNPResults( - camToOrigin.best.inverse(), - camToOrigin.alt.inverse(), - camToOrigin.ambiguity, - camToOrigin.bestReprojErr, - camToOrigin.altReprojErr)); + var camToOrigin = OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, corners); + if (!camToOrigin.isPresent) return new PNPResults(); + return new PNPResults( + camToOrigin.best.inverse(), + camToOrigin.alt.inverse(), + camToOrigin.ambiguity, + camToOrigin.bestReprojErr, + camToOrigin.altReprojErr + ); } } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java index ad23dd8127..264f70275a 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -355,9 +355,8 @@ public PhotonPipelineResult process( prop.getIntrinsics(), prop.getDistCoeffs(), tgt.getModel().vertices, - noisyTargetCorners) - .orElse(null); - if (pnpSim == null) continue; + noisyTargetCorners); + if (!pnpSim.isPresent) continue; centerRot = prop.getPixelRot( OpenCVHelp.projectPoints( diff --git a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java index ae52dde639..999149d42a 100644 --- a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java +++ b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java @@ -197,8 +197,7 @@ public void testSolvePNP_SQUARE() { prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, - targetCorners) - .orElseThrow(); + targetCorners); var estRelation = new CameraTargetRelation(cameraPose, cameraPose.plus(pnpSim.best)); assertSame(actualRelation.camToTarg, estRelation.camToTarg); } @@ -229,8 +228,7 @@ public void testSolvePNP_SQPNP() { prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, - targetCorners) - .orElseThrow(); + targetCorners); var estRelation = new CameraTargetRelation(cameraPose, cameraPose.plus(pnpSim.best)); assertSame(actualRelation.camToTarg, estRelation.camToTarg); } From 4364ac267df338da0b45287cfef81a8fab44eb46 Mon Sep 17 00:00:00 2001 From: amquake Date: Sun, 23 Jul 2023 16:20:38 -0700 Subject: [PATCH 08/46] publish sim intrinsics/distortion to camera --- .../java/org/photonvision/simulation/PhotonCameraSim.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java index 264f70275a..d68ffd93f7 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -70,7 +70,7 @@ public class PhotonCameraSim implements AutoCloseable { private long nextNTEntryTime = WPIUtilJNI.now(); private double maxSightRangeMeters = Double.MAX_VALUE; - private static final double kDefaultMinAreaPx = 90; + private static final double kDefaultMinAreaPx = 100; private double minTargetAreaPercent; private PhotonTargetSortMode sortMode = PhotonTargetSortMode.Largest; @@ -460,6 +460,8 @@ public void submitProcessedFrame(PhotonPipelineResult result, long receiveTimest ts.targetPoseEntry.set(poseData, receiveTimestamp); } + ts.cameraIntrinsicsPublisher.set(prop.getIntrinsics().getData(), receiveTimestamp); + ts.cameraDistortionPublisher.set(prop.getDistCoeffs().getData(), receiveTimestamp); ts.heartbeatPublisher.set(heartbeatCounter++, receiveTimestamp); } } From 1f03b961975324b3ce8aa2fb6a9334b103e08e97 Mon Sep 17 00:00:00 2001 From: amquake Date: Sun, 23 Jul 2023 16:24:46 -0700 Subject: [PATCH 09/46] spotless --- .../org/photonvision/PhotonPoseEstimator.java | 2 +- .../photonvision/estimation/PNPResults.java | 12 ++++++-- .../estimation/VisionEstimation.java | 29 ++++++++++--------- .../simulation/PhotonCameraSim.java | 8 ++--- .../java/org/photonvision/OpenCVTest.java | 10 ++----- 5 files changed, 32 insertions(+), 29 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java b/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java index 1e2db3d24a..8bbdaf5744 100644 --- a/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java +++ b/photon-lib/src/main/java/org/photonvision/PhotonPoseEstimator.java @@ -370,7 +370,7 @@ private Optional multiTagPNPStrategy( var pnpResults = VisionEstimation.estimateCamPosePNP( - cameraMatrixOpt.get(), distCoeffsOpt.get(), result.getTargets(), fieldTags); + cameraMatrixOpt.get(), distCoeffsOpt.get(), result.getTargets(), fieldTags); // try fallback strategy if solvePNP fails for some reason if (!pnpResults.isPresent) return update(result, cameraMatrixOpt, distCoeffsOpt, this.multiTagFallbackStrategy); diff --git a/photon-lib/src/main/java/org/photonvision/estimation/PNPResults.java b/photon-lib/src/main/java/org/photonvision/estimation/PNPResults.java index 0f851c1de4..89adf94bcf 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/PNPResults.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/PNPResults.java @@ -36,10 +36,18 @@ * method. */ public class PNPResults { - /** If this result is valid. A false value indicates there was an error in estimation, and this result should not be used. */ + /** + * If this result is valid. A false value indicates there was an error in estimation, and this + * result should not be used. + */ public final boolean isPresent; - /** The best-fit transform. The coordinate frame of this transform depends on the method which gave this result. */ + + /** + * The best-fit transform. The coordinate frame of this transform depends on the method which gave + * this result. + */ public final Transform3d best; + /** Reprojection error of the best solution, in pixels */ public final double bestReprojErr; diff --git a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java index 8bc6f93436..dad9eed5b0 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java @@ -68,7 +68,8 @@ public static List getVisibleLayoutTags( * @param distCoeffs The camera distortion matrix in standard opencv form * @param visTags The visible tags reported by PV * @param tagLayout The known tag layout on the field - * @return The transformation that maps the field origin to the camera pose. Ensure the {@link PNPResults} are present before utilizing them. + * @return The transformation that maps the field origin to the camera pose. Ensure the {@link + * PNPResults} are present before utilizing them. */ public static PNPResults estimateCamPosePNP( Matrix cameraMatrix, @@ -91,7 +92,9 @@ public static PNPResults estimateCamPosePNP( // single-tag pnp if (visTags.size() == 1) { - var camToTag = OpenCVHelp.solvePNP_SQUARE(cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, corners); + var camToTag = + OpenCVHelp.solvePNP_SQUARE( + cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, corners); if (!camToTag.isPresent) return new PNPResults(); var bestPose = knownTags.get(0).pose.transformBy(camToTag.best.inverse()); var altPose = new Pose3d(); @@ -100,12 +103,11 @@ public static PNPResults estimateCamPosePNP( var o = new Pose3d(); return new PNPResults( - new Transform3d(o, bestPose), - new Transform3d(o, altPose), - camToTag.ambiguity, - camToTag.bestReprojErr, - camToTag.altReprojErr - ); + new Transform3d(o, bestPose), + new Transform3d(o, altPose), + camToTag.ambiguity, + camToTag.bestReprojErr, + camToTag.altReprojErr); } // multi-tag pnp else { @@ -114,12 +116,11 @@ public static PNPResults estimateCamPosePNP( var camToOrigin = OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, corners); if (!camToOrigin.isPresent) return new PNPResults(); return new PNPResults( - camToOrigin.best.inverse(), - camToOrigin.alt.inverse(), - camToOrigin.ambiguity, - camToOrigin.bestReprojErr, - camToOrigin.altReprojErr - ); + camToOrigin.best.inverse(), + camToOrigin.alt.inverse(), + camToOrigin.ambiguity, + camToOrigin.bestReprojErr, + camToOrigin.altReprojErr); } } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java index d68ffd93f7..81a20d0d36 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -352,10 +352,10 @@ public PhotonPipelineResult process( if (tgt.fiducialID >= 0 && tgt.getFieldVertices().size() == 4) { // single AprilTag solvePNP pnpSim = OpenCVHelp.solvePNP_SQUARE( - prop.getIntrinsics(), - prop.getDistCoeffs(), - tgt.getModel().vertices, - noisyTargetCorners); + prop.getIntrinsics(), + prop.getDistCoeffs(), + tgt.getModel().vertices, + noisyTargetCorners); if (!pnpSim.isPresent) continue; centerRot = prop.getPixelRot( diff --git a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java index 999149d42a..594439e847 100644 --- a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java +++ b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java @@ -194,10 +194,7 @@ public void testSolvePNP_SQUARE() { prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, target.getFieldVertices()); var pnpSim = OpenCVHelp.solvePNP_SQUARE( - prop.getIntrinsics(), - prop.getDistCoeffs(), - target.getModel().vertices, - targetCorners); + prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); var estRelation = new CameraTargetRelation(cameraPose, cameraPose.plus(pnpSim.best)); assertSame(actualRelation.camToTarg, estRelation.camToTarg); } @@ -225,10 +222,7 @@ public void testSolvePNP_SQPNP() { prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, target.getFieldVertices()); var pnpSim = OpenCVHelp.solvePNP_SQPNP( - prop.getIntrinsics(), - prop.getDistCoeffs(), - target.getModel().vertices, - targetCorners); + prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); var estRelation = new CameraTargetRelation(cameraPose, cameraPose.plus(pnpSim.best)); assertSame(actualRelation.camToTarg, estRelation.camToTarg); } From 2e851a037112f84adef274aa7b48355c9b43699f Mon Sep 17 00:00:00 2001 From: amquake Date: Sun, 13 Aug 2023 00:42:02 -0700 Subject: [PATCH 10/46] remember to commit, me --- .../photonvision/estimation/OpenCVHelp.java | 83 +++-- .../estimation/RotTrlTransform3d.java | 150 +++++++-- .../photonvision/estimation/TargetModel.java | 68 +++- .../simulation/PhotonCameraSim.java | 136 ++++++-- .../simulation/SimCameraProperties.java | 183 ++++++++++- .../photonvision/simulation/VideoSimUtil.java | 297 ++++++++++++++++-- .../simulation/VisionSystemSim.java | 45 ++- .../java/org/photonvision/OpenCVTest.java | 44 ++- 8 files changed, 863 insertions(+), 143 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java index 944bc0708a..51b3e5df97 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -28,7 +28,6 @@ import edu.wpi.first.math.Nat; import edu.wpi.first.math.Num; import edu.wpi.first.math.Vector; -import edu.wpi.first.math.geometry.CoordinateSystem; import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation3d; import edu.wpi.first.math.geometry.Transform3d; @@ -56,6 +55,10 @@ import org.photonvision.targeting.TargetCorner; public final class OpenCVHelp { + + private static RotTrlTransform3d NWU_TO_EDN; + private static RotTrlTransform3d EDN_TO_NWU; + static { try { var loader = @@ -65,6 +68,17 @@ public final class OpenCVHelp { } catch (Exception e) { throw new RuntimeException("Failed to load native libraries!", e); } + + NWU_TO_EDN = new RotTrlTransform3d(new Rotation3d(Matrix.mat(Nat.N3(), Nat.N3()).fill( + 0, -1, 0, + 0, 0, -1, + 1, 0, 0 + )), new Translation3d()); + EDN_TO_NWU = new RotTrlTransform3d(new Rotation3d(Matrix.mat(Nat.N3(), Nat.N3()).fill( + 0, 0, 1, + -1, 0, 0, + 0, -1, 0 + )), new Translation3d()); } public static MatOfDouble matrixToMat(SimpleMatrix matrix) { @@ -94,7 +108,7 @@ public static MatOfPoint3f translationToTvec(Translation3d... translations) { Point3[] points = new Point3[translations.length]; for (int i = 0; i < translations.length; i++) { var trl = - CoordinateSystem.convert(translations[i], CoordinateSystem.NWU(), CoordinateSystem.EDN()); + translationNWUtoEDN(translations[i]); points[i] = new Point3(trl.getX(), trl.getY(), trl.getZ()); } return new MatOfPoint3f(points); @@ -112,10 +126,9 @@ public static Translation3d tvecToTranslation(Mat tvecInput) { tvecInput.convertTo(wrapped, CvType.CV_32F); wrapped.get(0, 0, data); wrapped.release(); - return CoordinateSystem.convert( - new Translation3d(data[0], data[1], data[2]), - CoordinateSystem.EDN(), - CoordinateSystem.NWU()); + return translationEDNtoNWU( + new Translation3d(data[0], data[1], data[2]) + ); } /** @@ -221,24 +234,37 @@ public static List reorderCircular(List elements, boolean backwards, i return reordered; } + //TODO: RotTrlTransform3d removal awaiting Rotation3d performance improvements /** - * Convert a rotation from EDN to NWU. For example, if you have a rotation X,Y,Z {1, 0, 0} in EDN, - * this would be XYZ {0, -1, 0} in NWU. + * Convert a rotation delta from EDN to NWU. For example, if you have a rotation X,Y,Z {1, 0, 0} in EDN, + * this would be {0, -1, 0} in NWU. */ private static Rotation3d rotationEDNtoNWU(Rotation3d rot) { - return CoordinateSystem.convert( - new Rotation3d(), CoordinateSystem.NWU(), CoordinateSystem.EDN()) - .plus(CoordinateSystem.convert(rot, CoordinateSystem.EDN(), CoordinateSystem.NWU())); + return new RotTrlTransform3d(EDN_TO_NWU.apply(rot), new Translation3d()).apply(EDN_TO_NWU.inverse().getRotation()); } /** - * Convert a rotation from EDN to NWU. For example, if you have a rotation X,Y,Z {1, 0, 0} in EDN, - * this would be XYZ {0, -1, 0} in NWU. + * Convert a rotation delta from NWU to EDN. For example, if you have a rotation X,Y,Z {1, 0, 0} in NWU, + * this would be {0, 0, 1} in EDN. */ private static Rotation3d rotationNWUtoEDN(Rotation3d rot) { - return CoordinateSystem.convert( - new Rotation3d(), CoordinateSystem.EDN(), CoordinateSystem.NWU()) - .plus(CoordinateSystem.convert(rot, CoordinateSystem.NWU(), CoordinateSystem.EDN())); + return new RotTrlTransform3d(NWU_TO_EDN.apply(rot), new Translation3d()).apply(NWU_TO_EDN.inverse().getRotation()); + } + + /** + * Convert a translation from EDN to NWU. For example, if you have a translation X,Y,Z {1, 0, 0} in EDN, + * this would be {0, -1, 0} in NWU. + */ + private static Translation3d translationEDNtoNWU(Translation3d trl) { + return EDN_TO_NWU.apply(trl); + } + + /** + * Convert a translation from NWU to EDN. For example, if you have a translation X,Y,Z {1, 0, 0} in NWU, + * this would be {0, 0, 1} in EDN. + */ + private static Translation3d translationNWUtoEDN(Translation3d trl) { + return NWU_TO_EDN.apply(trl); } /** @@ -247,21 +273,21 @@ private static Rotation3d rotationNWUtoEDN(Rotation3d rot) { * * @param cameraMatrix the camera intrinsics matrix in standard opencv form * @param distCoeffs the camera distortion matrix in standard opencv form - * @param camPose The current camera pose in the 3d world + * @param camRt The change in basis from world coordinates to camera coordinates. See + * {@link RotTrlTransform3d#makeRelativeTo(Pose3d)}. * @param objectTranslations The 3d points to be projected * @return The 2d points in pixels which correspond to the image of the 3d points on the camera */ public static List projectPoints( Matrix cameraMatrix, Matrix distCoeffs, - Pose3d camPose, + RotTrlTransform3d camRt, List objectTranslations) { // translate to opencv classes var objectPoints = translationToTvec(objectTranslations.toArray(new Translation3d[0])); // opencv rvec/tvec describe a change in basis from world to camera - var basisChange = RotTrlTransform3d.makeRelativeTo(camPose); - var rvec = rotationToRvec(basisChange.getRotation()); - var tvec = translationToTvec(basisChange.getTranslation()); + var rvec = rotationToRvec(camRt.getRotation()); + var tvec = translationToTvec(camRt.getTranslation()); var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); var distCoeffsMat = matrixToMat(distCoeffs.getStorage()); var imagePoints = new MatOfPoint2f(); @@ -344,13 +370,11 @@ public static RotatedRect getMinAreaRect(List corners) { } /** - * Get the area in pixels of this target's contour. It's important to note that this may be - * different from the area of the bounding rectangle around the contour. - * - * @param corners The corners defining this contour - * @return Area in pixels (units of corner x/y) + * Gets the convex hull contour (the outline) of a list of points. + * @param corners + * @return The subset of points defining the contour */ - public static double getContourAreaPx(List corners) { + public static MatOfPoint getConvexHull(List corners) { var temp = targetCornersToMat(corners); var corn = new MatOfPoint(temp.toArray()); temp.release(); @@ -366,10 +390,7 @@ public static double getContourAreaPx(List corners) { points[i] = tempPoints[indices[i]]; } corn.fromArray(points); - // calculate area of the (convex hull) contour - double area = Imgproc.contourArea(corn); - corn.release(); - return area; + return corn; } /** diff --git a/photon-lib/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java b/photon-lib/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java index 41c3e36d97..8cd0c1372f 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java @@ -25,6 +25,7 @@ package org.photonvision.estimation; import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Quaternion; import edu.wpi.first.math.geometry.Rotation3d; import edu.wpi.first.math.geometry.Transform3d; import edu.wpi.first.math.geometry.Translation3d; @@ -37,16 +38,49 @@ public class RotTrlTransform3d { private final Translation3d trl; private final Rotation3d rot; + //TODO: removal awaiting wpilib Rotation3d performance improvements + private double m_w; + private double m_x; + private double m_y; + private double m_z; - public RotTrlTransform3d() { - this(new Rotation3d(), new Translation3d()); + /** + * A rotation-translation transformation. + * + *

    Applying this RotTrlTransform3d to poses will preserve their current origin-to-pose transform + * as if the origin was transformed by these components instead. + * + * @param rot The rotation component + * @param trl The translation component + */ + public RotTrlTransform3d(Rotation3d rot, Translation3d trl) { + this.rot = rot; + var quat = rot.getQuaternion(); + m_w = quat.getW(); + m_x = quat.getX(); + m_y = quat.getY(); + m_z = quat.getZ(); + this.trl = trl; + } + + public RotTrlTransform3d(Pose3d initial, Pose3d last) { + // this.rot = last.getRotation().minus(initial.getRotation()); + // this.trl = last.getTranslation().minus(initial.getTranslation().rotateBy(rot)); + + var quat = initial.getRotation().getQuaternion(); + m_w = quat.getW(); + m_x = quat.getX(); + m_y = quat.getY(); + m_z = quat.getZ(); + this.rot = invrotate(last.getRotation()); + this.trl = last.getTranslation().minus(rotate(initial.getTranslation())); } /** * Creates a rotation-translation transformation from a Transform3d. * - *

    Applying this transformation to poses will preserve their current origin-to-pose transform - * as if the origin was transformed by these components. + *

    Applying this RotTrlTransform3d to poses will preserve their current origin-to-pose transform + * as if the origin was transformed by trf instead. * * @param trf The origin transformation */ @@ -54,18 +88,40 @@ public RotTrlTransform3d(Transform3d trf) { this(trf.getRotation(), trf.getTranslation()); } - /** - * A rotation-translation transformation. - * - *

    Applying this transformation to poses will preserve their current origin-to-pose transform - * as if the origin was transformed by these components. - * - * @param rot The rotation component - * @param trl The translation component - */ - public RotTrlTransform3d(Rotation3d rot, Translation3d trl) { - this.rot = rot; - this.trl = trl; + public RotTrlTransform3d() { + this(new Rotation3d(), new Translation3d()); + } + + private Translation3d rotate(Translation3d otrl) { + final var p = new Quaternion(0.0, otrl.getX(), otrl.getY(), otrl.getZ()); + final var qprime = times(times(p), new Quaternion(m_w, -m_x, -m_y, -m_z)); + return new Translation3d(qprime.getX(), qprime.getY(), qprime.getZ()); + } + + private Translation3d invrotate(Translation3d otrl) { + m_x = -m_x; + m_y = -m_y; + m_z = -m_z; + var result = rotate(otrl); + m_x = -m_x; + m_y = -m_y; + m_z = -m_z; + return result; + } + + private Rotation3d rotate(Rotation3d orot) { + return new Rotation3d(times(orot.getQuaternion())); + } + + private Rotation3d invrotate(Rotation3d orot) { + m_x = -m_x; + m_y = -m_y; + m_z = -m_z; + var result = rotate(orot); + m_x = -m_x; + m_y = -m_y; + m_z = -m_z; + return result; } /** @@ -80,9 +136,12 @@ public static RotTrlTransform3d makeRelativeTo(Pose3d pose) { /** The inverse of this transformation. Applying the inverse will "undo" this transformation. */ public RotTrlTransform3d inverse() { - var inverseRot = rot.unaryMinus(); - var inverseTrl = trl.rotateBy(inverseRot).unaryMinus(); - return new RotTrlTransform3d(inverseRot, inverseTrl); + // var inverseRot = rot.unaryMinus(); + // var inverseTrl = trl.rotateBy(inverseRot).unaryMinus(); + // return new RotTrlTransform3d(inverseRot, inverseTrl); + + var inverseTrl = invrotate(trl).unaryMinus(); + return new RotTrlTransform3d(new Rotation3d(new Quaternion(m_w, -m_x, -m_y, -m_z)), inverseTrl); } /** This transformation as a Transform3d (as if of the origin) */ @@ -101,19 +160,66 @@ public Rotation3d getRotation() { } public Translation3d apply(Translation3d trl) { - return apply(new Pose3d(trl, new Rotation3d())).getTranslation(); + // return trl.rotateBy(rot).plus(this.trl); + return rotate(trl).plus(this.trl); } - ; public List applyTrls(List trls) { return trls.stream().map(this::apply).collect(Collectors.toList()); } + public Rotation3d apply(Rotation3d rot) { + return rotate(rot); + } + + public List applyRots(List rots) { + return rots.stream().map(this::apply).collect(Collectors.toList()); + } + public Pose3d apply(Pose3d pose) { - return new Pose3d(pose.getTranslation().rotateBy(rot).plus(trl), pose.getRotation().plus(rot)); + // return new Pose3d(pose.getTranslation().rotateBy(rot).plus(trl), pose.getRotation().plus(rot)); + return new Pose3d(apply(pose.getTranslation()), apply(pose.getRotation())); } public List applyPoses(List poses) { return poses.stream().map(this::apply).collect(Collectors.toList()); } + + //TODO: removal awaiting wpilib Rotation3d performance improvements + private Quaternion times(Quaternion other) { + final double o_w = other.getW(); + final double o_x = other.getX(); + final double o_y = other.getY(); + final double o_z = other.getZ(); + return times(m_w, m_x, m_y, m_z, o_w, o_x, o_y, o_z); + } + + private static Quaternion times(Quaternion a, Quaternion b) { + final double m_w = a.getW(); + final double m_x = a.getX(); + final double m_y = a.getY(); + final double m_z = a.getZ(); + final double o_w = b.getW(); + final double o_x = b.getX(); + final double o_y = b.getY(); + final double o_z = b.getZ(); + return times(m_w, m_x, m_y, m_z, o_w, o_x, o_y, o_z); + } + + private static Quaternion times(double m_w, double m_x, double m_y, double m_z, + double o_w, double o_x, double o_y, double o_z) { + // https://en.wikipedia.org/wiki/Quaternion#Scalar_and_vector_parts + + // v₁ x v₂ + final double cross_x = m_y * o_z - o_y * m_z; + final double cross_y = o_x * m_z - m_x * o_z; + final double cross_z = m_x * o_y - o_x * m_y; + + // v = w₁v₂ + w₂v₁ + v₁ x v₂ + final double new_x = o_x * m_w + (m_x * o_w) + cross_x; + final double new_y = o_y * m_w + (m_y * o_w) + cross_y; + final double new_z = o_z * m_w + (m_z * o_w) + cross_z; + + return new Quaternion(m_w * o_w - (m_x*o_x + m_y*o_y + m_z*o_z), new_x, new_y, new_z); + } } diff --git a/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java b/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java index 52693afb55..b5dfa4ec73 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java @@ -25,8 +25,8 @@ package org.photonvision.estimation; import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.geometry.Rotation3d; -import edu.wpi.first.math.geometry.Transform3d; import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.util.Units; import java.util.ArrayList; @@ -36,8 +36,8 @@ /** Describes the 3d model of a target. */ public class TargetModel { /** - * Translations of this target's vertices relative to its pose. If this target is spherical, this - * list has one translation with x == radius. + * Translations of this target's vertices relative to its pose. Rectangular and spherical targets + * will have four vertices. See their respective constructors for more info. */ public final List vertices; @@ -47,7 +47,16 @@ public class TargetModel { public static final TargetModel kTag16h5 = new TargetModel(Units.inchesToMeters(6), Units.inchesToMeters(6)); - /** Creates a rectangular, planar target model given the width and height. */ + /** + * Creates a rectangular, planar target model given the width and height. + * The model has four vertices: + *

      + *
    • Point 0: [0, -width/2, -height/2] + *
    • Point 1: [0, width/2, -height/2] + *
    • Point 2: [0, width/2, height/2] + *
    • Point 3: [0, -width/2, height/2] + *
    + */ public TargetModel(double widthMeters, double heightMeters) { this.vertices = List.of( @@ -61,18 +70,35 @@ public TargetModel(double widthMeters, double heightMeters) { } /** - * Creates a spherical target model which has similar dimensions when viewed from any angle. This - * model will only have one vertex which has x == radius. + * Creates a spherical target model which has similar dimensions regardless of its rotation. + * This model has four vertices: + *
      + *
    • Point 0: [0, -radius, -radius] + *
    • Point 1: [0, radius, -radius] + *
    • Point 2: [0, radius, radius] + *
    • Point 3: [0, -radius, radius] + *
    + * + * Q: Why these vertices? A: This target should be oriented to the camera every frame, + * much like a sprite/decal, and these vertices represent the corners of the rectangle in which + * the ellipse is inscribed. */ public TargetModel(double diameterMeters) { - this.vertices = List.of(new Translation3d(diameterMeters / 2.0, 0, 0)); + double radius = diameterMeters / 2.0; + this.vertices = + List.of( + new Translation3d(0, -radius, -radius), + new Translation3d(0, radius, -radius), + new Translation3d(0, radius, radius), + new Translation3d(0, -radius, radius)); this.isPlanar = false; this.isSpherical = true; } /** * Creates a target model from arbitrary 3d vertices. Automatically determines if the given - * vertices are planar(x == 0). More than 2 vertices must be given. + * vertices are planar(x == 0). More than 2 vertices must be given. If this is a planar + * model, the vertices should define a non-intersecting contour. * * @param vertices Translations representing the vertices of this target model relative to its * pose. @@ -95,15 +121,35 @@ public TargetModel(List vertices) { /** * This target's vertices offset from its field pose. * - *

    Note: If this target is spherical, only one vertex radius meters in front of the pose is - * returned. + *

    Note: If this target is spherical, use {@link #getOrientedPose(Translation3d, Translation3d)} + * with this method. */ public List getFieldVertices(Pose3d targetPose) { + var basisChange = new RotTrlTransform3d(targetPose.getRotation(), targetPose.getTranslation()); return vertices.stream() - .map(t -> targetPose.plus(new Transform3d(t, new Rotation3d())).getTranslation()) + .map(t -> basisChange.apply(t)) .collect(Collectors.toList()); } + /** + * Returns a Pose3d with the given target translation oriented (with its relative x-axis aligned) + * to the camera translation. This is used for spherical targets which should not have their projection + * change regardless of their own rotation. + * + * @param tgtTrl This target's translation + * @param cameraTrl Camera's translation + * @return This target's pose oriented to the camera + */ + public Pose3d getOrientedPose(Translation3d tgtTrl, Translation3d cameraTrl) { + var relCam = cameraTrl.minus(tgtTrl); + var orientToCam = new Rotation3d( + 0, + new Rotation2d(Math.hypot(relCam.getX(), relCam.getY()), -relCam.getZ()).getRadians(), + new Rotation2d(relCam.getX(), relCam.getY()).getRadians() + ); + return new Pose3d(tgtTrl, orientToCam); + } + @Override public boolean equals(Object obj) { if (this == obj) return true; diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java index 81a20d0d36..7af5288d44 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -31,15 +31,23 @@ import edu.wpi.first.math.MathUtil; import edu.wpi.first.math.Pair; import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Rotation3d; import edu.wpi.first.util.RuntimeLoader; import edu.wpi.first.util.WPIUtilJNI; + import java.util.ArrayList; import java.util.List; import java.util.Optional; import org.opencv.core.Core; import org.opencv.core.CvType; import org.opencv.core.Mat; +import org.opencv.core.MatOfPoint; +import org.opencv.core.MatOfPoint2f; import org.opencv.core.Point; +import org.opencv.core.Rect; +import org.opencv.core.RotatedRect; +import org.opencv.core.Scalar; import org.opencv.core.Size; import org.opencv.imgproc.Imgproc; import org.photonvision.PhotonCamera; @@ -49,6 +57,8 @@ import org.photonvision.estimation.CameraTargetRelation; import org.photonvision.estimation.OpenCVHelp; import org.photonvision.estimation.PNPResults; +import org.photonvision.estimation.RotTrlTransform3d; +import org.photonvision.estimation.TargetModel; import org.photonvision.targeting.PhotonPipelineResult; import org.photonvision.targeting.PhotonTrackedTarget; import org.photonvision.targeting.TargetCorner; @@ -78,6 +88,8 @@ public class PhotonCameraSim implements AutoCloseable { private final CvSource videoSimRaw; private final Mat videoSimFrameRaw = new Mat(); private boolean videoSimRawEnabled = true; + private boolean videoSimWireframeEnabled = false; + private double videoSimWireframeResolution = 0.1; private final CvSource videoSimProcessed; private final Mat videoSimFrameProcessed = new Mat(); private boolean videoSimProcEnabled = true; @@ -200,15 +212,22 @@ public Mat getVideoSimFrameRaw() { * @return If this vision target can be seen before image projection. */ public boolean canSeeTargetPose(Pose3d camPose, VisionTargetSim target) { - var rel = new CameraTargetRelation(camPose, target.getPose()); + // var rel = new CameraTargetRelation(camPose, target.getPose()); + //TODO: removal awaiting wpilib Rotation3d performance improvements + var relTarget = RotTrlTransform3d.makeRelativeTo(camPose).apply(target.getPose()); + var camToTargYaw = new Rotation2d(relTarget.getX(), relTarget.getY()); + var camToTargPitch = new Rotation2d(Math.hypot(relTarget.getX(), relTarget.getY()), -relTarget.getZ()); + var relCam = RotTrlTransform3d.makeRelativeTo(target.getPose()).apply(camPose); + var targToCamAngle = new Rotation2d(relCam.getX(), Math.hypot(relCam.getY(), relCam.getZ())); + return ( // target translation is outside of camera's FOV - (Math.abs(rel.camToTargYaw.getDegrees()) < prop.getHorizFOV().getDegrees() / 2) - && (Math.abs(rel.camToTargPitch.getDegrees()) < prop.getVertFOV().getDegrees() / 2) + (Math.abs(camToTargYaw.getDegrees()) < prop.getHorizFOV().getDegrees() / 2) + && (Math.abs(camToTargPitch.getDegrees()) < prop.getVertFOV().getDegrees() / 2) && (!target.getModel().isPlanar - || Math.abs(rel.targToCamAngle.getDegrees()) + || Math.abs(targToCamAngle.getDegrees()) < 90) // camera is behind planar target and it should be occluded - && (rel.camToTarg.getTranslation().getNorm() <= maxSightRangeMeters)); // target is too far + && (relTarget.getTranslation().getNorm() <= maxSightRangeMeters)); // target is too far } /** @@ -289,11 +308,32 @@ public void setTargetSortMode(PhotonTargetSortMode sortMode) { if (sortMode != null) this.sortMode = sortMode; } - /** Sets whether the raw video stream simulation is enabled. */ + /** + * Sets whether the raw video stream simulation is enabled. + *

    Note: This may increase loop times. + */ public void enableRawStream(boolean enabled) { videoSimRawEnabled = enabled; } + /** + * Sets whether a wireframe of the field is drawn to the raw video stream. + *

    Note: This will dramatically increase loop times. + */ + public void enableDrawWireframe(boolean enabled) { + videoSimWireframeEnabled = enabled; + } + + /** + * Sets the resolution of the drawn wireframe if enabled. Drawn line segments will be subdivided + * into smaller segments based on a threshold set by the resolution. + * + * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in pixels + */ + public void setWireframeResolution(double resolution) { + videoSimWireframeResolution = resolution; + } + /** Sets whether the processed video stream simulation is enabled. */ public void enableProcessedStream(boolean enabled) { videoSimProcEnabled = enabled; @@ -310,10 +350,12 @@ public PhotonPipelineResult process( if (dist1 == dist2) return 0; return dist1 < dist2 ? 1 : -1; }); - // all targets visible (in FOV) - var visibleTags = new ArrayList>>(); - // all targets actually detectable to the camera + // all targets visible before noise + var visibleTgts = new ArrayList>>(); + // all targets actually detected by camera (after noise) var detectableTgts = new ArrayList(); + // basis change from world coordinates to camera coordinates + var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); // reset our frame VideoSimUtil.updateVideoProp(videoSimRaw, prop); @@ -326,17 +368,18 @@ public PhotonPipelineResult process( if (!canSeeTargetPose(cameraPose, tgt)) continue; // find target's 3d corner points - // TODO: Handle spherical targets var fieldCorners = tgt.getFieldVertices(); - + if(tgt.getModel().isSpherical) { // target is spherical + var model = tgt.getModel(); + fieldCorners = model.getFieldVertices( + model.getOrientedPose(tgt.getPose().getTranslation(), cameraPose.getTranslation())); + } // project 3d target points into 2d image points var targetCorners = OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, fieldCorners); - // save visible tags for stream simulation - if (tgt.fiducialID >= 0) { - visibleTags.add(new Pair<>(tgt.fiducialID, targetCorners)); - } + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, fieldCorners); + // save visible targets for raw video stream simulation + visibleTgts.add(new Pair<>(tgt, targetCorners)); // estimate pixel noise var noisyTargetCorners = prop.estPixelNoise(targetCorners); // find the (naive) 2d yaw/pitch @@ -356,15 +399,6 @@ public PhotonPipelineResult process( prop.getDistCoeffs(), tgt.getModel().vertices, noisyTargetCorners); - if (!pnpSim.isPresent) continue; - centerRot = - prop.getPixelRot( - OpenCVHelp.projectPoints( - prop.getIntrinsics(), - prop.getDistCoeffs(), - new Pose3d(), - List.of(pnpSim.best.getTranslation())) - .get(0)); } Point[] minAreaRectPts = new Point[noisyTargetCorners.size()]; @@ -385,22 +419,64 @@ public PhotonPipelineResult process( } // render visible tags to raw video frame if (videoSimRawEnabled) { - for (var tag : visibleTags) { - VideoSimUtil.warp16h5TagImage( - tag.getFirst(), OpenCVHelp.targetCornersToMat(tag.getSecond()), videoSimFrameRaw, true); + // draw field wireframe + if(videoSimWireframeEnabled) { + VideoSimUtil.drawFieldWireframe(camRt, prop, videoSimWireframeResolution, 1.5, new Scalar(80), 4, 1, new Scalar(40), videoSimFrameRaw); + } + + // draw targets + for (var pair : visibleTgts) { + var tgt = pair.getFirst(); + var corn = pair.getSecond(); + + if(tgt.fiducialID >= 0) { // apriltags + VideoSimUtil.warp16h5TagImage( + tgt.fiducialID, OpenCVHelp.targetCornersToMat(corn), true, videoSimFrameRaw); + } + else if(!tgt.getModel().isSpherical) { // non-spherical targets + var contour = OpenCVHelp.targetCornersToMat(corn); + if(!tgt.getModel().isPlanar) { // visualization cant handle non-convex projections of 3d models + contour.fromArray(OpenCVHelp.getConvexHull(corn).toArray()); + } + VideoSimUtil.drawPoly( + contour, -1, new Scalar(255), true, videoSimFrameRaw); + } + else { // spherical targets + VideoSimUtil.drawEllipse(corn, new Scalar(255), videoSimFrameRaw); + } } videoSimRaw.putFrame(videoSimFrameRaw); } else videoSimRaw.setConnectionStrategy(ConnectionStrategy.kForceClose); - // draw/annotate tag detection outline on processed view + // draw/annotate target detection outline on processed view if (videoSimProcEnabled) { Imgproc.cvtColor(videoSimFrameRaw, videoSimFrameProcessed, Imgproc.COLOR_GRAY2BGR); + Imgproc.drawMarker( // crosshair + videoSimFrameProcessed, + new Point(prop.getResWidth()/2.0, prop.getResHeight()/2.0), + new Scalar(0, 255, 0), + Imgproc.MARKER_CROSS, + (int)VideoSimUtil.getScaledThickness(15, videoSimFrameProcessed), + (int)VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), + Imgproc.LINE_AA + ); for (var tgt : detectableTgts) { - if (tgt.getFiducialId() >= 0) { + if (tgt.getFiducialId() >= 0) { // apriltags VideoSimUtil.drawTagDetection( tgt.getFiducialId(), OpenCVHelp.targetCornersToMat(tgt.getDetectedCorners()), videoSimFrameProcessed); } + else { // other targets + Imgproc.rectangle( + videoSimFrameProcessed, OpenCVHelp.getBoundingRect(tgt.getDetectedCorners()), + new Scalar(0, 0, 255), + (int)VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), + Imgproc.LINE_AA); + + for(var pt : tgt.getDetectedCorners()) { + Imgproc.circle(videoSimFrameProcessed, new Point(pt.x, pt.y), 3, new Scalar(255, 50, 0), 1, Imgproc.LINE_AA); + } + } } videoSimProcessed.putFrame(videoSimFrameProcessed); } else videoSimProcessed.setConnectionStrategy(ConnectionStrategy.kForceClose); diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java index 336c7d5029..94b790f13f 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java @@ -25,20 +25,31 @@ package org.photonvision.simulation; import com.fasterxml.jackson.databind.ObjectMapper; + import edu.wpi.first.math.Matrix; import edu.wpi.first.math.Nat; +import edu.wpi.first.math.Pair; import edu.wpi.first.math.VecBuilder; import edu.wpi.first.math.Vector; +import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.numbers.*; import edu.wpi.first.wpilibj.DriverStation; + import java.io.IOException; import java.nio.file.Path; +import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.stream.Collectors; + +import org.ejml.data.DMatrix3; +import org.ejml.dense.fixed.CommonOps_DDF3; +import org.opencv.imgproc.Imgproc; import org.photonvision.estimation.OpenCVHelp; +import org.photonvision.estimation.RotTrlTransform3d; import org.photonvision.targeting.TargetCorner; /** @@ -69,6 +80,8 @@ public class SimCameraProperties { private double exposureTimeMs = 0; private double avgLatencyMs = 0; private double latencyStdDevMs = 0; + // util + private List viewplanes = new ArrayList<>(); /** Default constructor which is the same as {@link #PERFECT_90DEG} */ public SimCameraProperties() { @@ -186,14 +199,18 @@ public void setCalibration( this.resHeight = resHeight; this.camIntrinsics = camIntrinsics; this.distCoeffs = distCoeffs; - } - public void setCameraIntrinsics(Matrix camIntrinsics) { - this.camIntrinsics = camIntrinsics; - } - - public void setDistortionCoeffs(Matrix distCoeffs) { - this.distCoeffs = distCoeffs; + // left, right, up, and down view planes + var p = new Translation3d[]{ + new Translation3d(1, new Rotation3d(0, 0, getPixelYaw(0).plus(new Rotation2d(-Math.PI/2)).getRadians())), + new Translation3d(1, new Rotation3d(0, 0, getPixelYaw(resWidth).plus(new Rotation2d(Math.PI/2)).getRadians())), + new Translation3d(1, new Rotation3d(0, getPixelPitch(0).plus(new Rotation2d(Math.PI/2)).getRadians(), 0)), + new Translation3d(1, new Rotation3d(0, getPixelPitch(resHeight).plus(new Rotation2d(-Math.PI/2)).getRadians(), 0)) + }; + viewplanes.clear(); + for(int i = 0; i < p.length; i++) { + viewplanes.add(new DMatrix3(p[i].getX(), p[i].getY(), p[i].getZ())); + } } public void setCalibError(double avgErrorPx, double errorStdDevPx) { @@ -245,6 +262,11 @@ public int getResArea() { return resWidth * resHeight; } + /** Width:height */ + public double getAspectRatio() { + return (double) resWidth / resHeight; + } + public Matrix getIntrinsics() { return camIntrinsics.copy(); } @@ -295,7 +317,7 @@ public List undistort(List points) { * @param corners Corners of the contour */ public double getContourAreaPercent(List corners) { - return OpenCVHelp.getContourAreaPx(corners) / getResArea() * 100; + return Imgproc.contourArea(OpenCVHelp.getConvexHull(corners)) / getResArea() * 100; } /** The yaw from the principal point of this camera to the pixel x value. Positive values left. */ @@ -389,9 +411,148 @@ public Rotation2d getDiagFOV() { return new Rotation2d(Math.hypot(getHorizFOV().getRadians(), getVertFOV().getRadians())); } - /** Width:height */ - public double getAspectRatio() { - return (double) resWidth / resHeight; + /** + * Determines where the line segment defined by the two given translations intersects the camera's + * frustum/field-of-vision, if at all. + * + *

    The line is parametrized so any of its points p = t * (b - a) + a. + * This method returns these values of t, minimum first, defining the region of the line segment + * which is visible in the frustum. If both ends of the line segment are visible, this simply + * returns {0, 1}. If, for example, point b is visible while a is not, and half of the line segment + * is inside the camera frustum, {0.5, 1} would be returned. + * + * @param camRt The change in basis from world coordinates to camera coordinates. See + * {@link RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * @param a The initial translation of the line + * @param b The final translation of the line + * @return A Pair of Doubles. The values may be null: + *

      + *
    • {Double, Double} : Two parametrized values(t), minimum first, representing which segment + * of the line is visible in the camera frustum. + *
    • {Double, null} : One value(t) representing a single intersection point. For example, the + * line only intersects the intersection of two adjacent viewplanes. + *
    • {null, null} : No values. The line segment is not visible in the camera frustum. + *
    + */ + public Pair getVisibleLine(RotTrlTransform3d camRt, Translation3d a, Translation3d b) { + // translations relative to the camera + var rela = camRt.apply(a); + var relb = camRt.apply(b); + + // check if both ends are behind camera + if(rela.getX() <= 0 && relb.getX() <= 0) return new Pair<>(null, null); + + var av = new DMatrix3(rela.getX(), rela.getY(), rela.getZ()); + var bv = new DMatrix3(relb.getX(), relb.getY(), relb.getZ()); + // a to b + var abv = new DMatrix3(); + CommonOps_DDF3.subtract(bv, av, abv); + + // check if the ends of the line segment are visible + boolean aVisible = true; + boolean bVisible = true; + for(int i = 0; i < viewplanes.size(); i++) { + var normal = viewplanes.get(i); + double aVisibility = CommonOps_DDF3.dot(av, normal); + if(aVisibility < 0) aVisible = false; + double bVisibility = CommonOps_DDF3.dot(bv, normal); + if(bVisibility < 0) bVisible = false; + // both ends are outside at least one of the same viewplane + if(aVisibility <= 0 && bVisibility <= 0) return new Pair<>(null, null); + } + // both ends are inside frustum + if(aVisible && bVisible) return new Pair<>(Double.valueOf(0), Double.valueOf(1)); + + // parametrized (t=0 at a, t=1 at b) intersections with viewplanes + double[] intersections = { + Double.NaN, + Double.NaN, + Double.NaN, + Double.NaN + }; + // intersection points + List ipts = new ArrayList<>(); + for(double val : intersections) ipts.add(null); + + // find intersections + for(int i = 0; i < viewplanes.size(); i++) { + var normal = viewplanes.get(i); + + // we want to know the value of t when the line intercepts this plane + // parametrized: v = t * ab + a, where v lies on the plane + // we can find the projection of a onto the plane normal + // a_projn = normal.times(av.dot(normal) / normal.dot(normal)); + var a_projn = new DMatrix3(); + CommonOps_DDF3.scale(CommonOps_DDF3.dot(av, normal) / CommonOps_DDF3.dot(normal, normal), normal, a_projn); + // this projection lets us determine the scalar multiple t of ab where + // (t * ab + a) is a vector which lies on the plane + if(Math.abs(CommonOps_DDF3.dot(abv, a_projn)) < 1e-5) continue; // line is parallel to plane + intersections[i] = CommonOps_DDF3.dot(a_projn, a_projn) / -CommonOps_DDF3.dot(abv, a_projn); + + // vector a to the viewplane + var apv = new DMatrix3(); + CommonOps_DDF3.scale(intersections[i], abv, apv); + // av + apv = intersection point + var intersectpt = new DMatrix3(); + CommonOps_DDF3.add(av, apv, intersectpt); + ipts.set(i, intersectpt); + + // discard intersections outside the camera frustum + for(int j = 1; j < viewplanes.size(); j++) { + int oi = (i+j) % viewplanes.size(); + var onormal = viewplanes.get(oi); + // if the dot of the intersection point with any plane normal is negative, it is outside + if(CommonOps_DDF3.dot(intersectpt, onormal) < 0) { + intersections[i] = Double.NaN; + ipts.set(i, null); + break; + } + } + + // discard duplicate intersections + if(ipts.get(i) == null) continue; + for(int j = i-1; j >= 0; j--) { + var oipt = ipts.get(j); + if(oipt == null) continue; + var diff = new DMatrix3(); + CommonOps_DDF3.subtract(oipt, intersectpt, diff); + if(CommonOps_DDF3.elementMaxAbs(diff) < 1e-4) { + intersections[i] = Double.NaN; + ipts.set(i, null); + break; + } + } + } + + // determine visible segment (minimum and maximum t) + double inter1 = Double.NaN; + double inter2 = Double.NaN; + for(double inter : intersections) { + if(!Double.isNaN(inter)) { + if(Double.isNaN(inter1)) inter1 = inter; + else inter2 = inter; + } + } + + // two viewplane intersections + if(!Double.isNaN(inter2)) { + double max = Math.max(inter1, inter2); + double min = Math.min(inter1, inter2); + if(aVisible) min = 0; + if(bVisible) max = 1; + return new Pair<>( + min, + max + ); + } + // one viewplane intersection + else if(!Double.isNaN(inter1)) { + if(aVisible) return new Pair<>(Double.valueOf(0), inter1); + if(bVisible) return new Pair<>(inter1, Double.valueOf(1)); + return new Pair<>(inter1, null); + } + // no intersections + else return new Pair<>(null, null); } /** diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java index d142347a9a..8d91e149e9 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java @@ -25,13 +25,18 @@ package org.photonvision.simulation; import edu.wpi.first.cscore.CvSource; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.util.RuntimeLoader; + import java.awt.image.BufferedImage; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; + import javax.imageio.ImageIO; import org.opencv.core.Core; import org.opencv.core.CvType; @@ -45,6 +50,8 @@ import org.opencv.imgcodecs.Imgcodecs; import org.opencv.imgproc.Imgproc; import org.photonvision.estimation.OpenCVHelp; +import org.photonvision.estimation.RotTrlTransform3d; +import org.photonvision.targeting.TargetCorner; public class VideoSimUtil { public static final String kLocalTagImagesPath = "./src/main/resources/images/apriltags/"; @@ -57,6 +64,10 @@ public class VideoSimUtil { // Points corresponding to marker(black square) corners of 8x8 16h5 tag images public static final Point[] kTag16h5MarkerPts; + // field dimensions for wireframe + private static double fieldLength = 16.54175; + private static double fieldWidth = 8.0137; + static { try { var loader = @@ -166,14 +177,14 @@ public static Point[] get16h5MarkerPts(int scale) { * @param tagId The id of the specific tag to warp onto the destination image * @param dstPoints Points(4) in destination image where the tag marker(black square) corners * should be warped onto. - * @param destination The destination image to place the warped tag image onto. * @param antialiasing If antialiasing should be performed by automatically * supersampling/interpolating the warped image. This should be used if better stream quality * is desired or target detection is being done on the stream, but can hurt performance. + * @param destination The destination image to place the warped tag image onto. * @see OpenCVHelp#targetCornersToMat(org.photonvision.targeting.TargetCorner...) */ public static void warp16h5TagImage( - int tagId, MatOfPoint2f dstPoints, Mat destination, boolean antialiasing) { + int tagId, MatOfPoint2f dstPoints, boolean antialiasing, Mat destination) { Mat tagImage = kTag16h5Images.get(tagId); if (tagImage == null || tagImage.empty()) return; var tagPoints = new MatOfPoint2f(kTag16h5MarkerPts); @@ -291,6 +302,57 @@ public static void warp16h5TagImage( tempROI.copyTo(destination.submat(boundingRect), tempMask); } + /** + * Given a line thickness in a 640x480 image, try to scale to the given destination image resolution. + * + * @param thickness480p A hypothetical line thickness in a 640x480 image + * @param destinationImg The destination image to scale to + * @return Scaled thickness which cannot be less than 1 + */ + public static double getScaledThickness(double thickness480p, Mat destinationImg) { + double scaleX = destinationImg.width() / 640.0; + double scaleY = destinationImg.height() / 480.0; + double minScale = Math.min(scaleX, scaleY); + return Math.max(thickness480p * minScale, 1.0); + } + + /** + * Draw a filled ellipse in the destination image. + * + * @param corners The corners of a rectangle in which the ellipse is inscribed + * @param color The color of the ellipse. This is a scalar with BGR values (0-255) + * @param destination The destination image to draw onto. The image should be in the BGR color + * space. + */ + public static void drawEllipse(List corners, Scalar color, Mat destination) { + // create RotatedRect from corners + var rect = OpenCVHelp.getMinAreaRect(corners); + // inscribe ellipse inside rectangle + Imgproc.ellipse( + destination, + rect, color, -1, Imgproc.LINE_AA); + } + + /** + * Draw a polygon outline or filled polygon to the destination image with the given points. + * + * @param dstPoints The points in the destination image representing the polygon. + * @param thickness The thickness of the outline in pixels. If this is not positive, a filled polygon is drawn instead. + * @param color The color drawn. This should match the color space of the destination image. + * @param isClosed If the last point should connect to the first point in the polygon outline. + * @param destination The destination image to draw onto. + */ + public static void drawPoly(MatOfPoint2f dstPoints, int thickness, Scalar color, boolean isClosed, Mat destination) { + var dstPointsd = new MatOfPoint(dstPoints.toArray()); + if(thickness > 0){ + Imgproc.polylines( + destination, List.of(dstPointsd), isClosed, color, thickness, Imgproc.LINE_AA); + } + else { + Imgproc.fillPoly(destination, List.of(dstPointsd), color, Imgproc.LINE_AA); + } + } + /** * Draws a contour around the given points and text of the id onto the destination image. * @@ -301,30 +363,225 @@ public static void warp16h5TagImage( * space. */ public static void drawTagDetection(int id, MatOfPoint2f dstPoints, Mat destination) { - var dstPointsd = new MatOfPoint(dstPoints.toArray()); - double scaleX = destination.width() / 640.0; - double scaleY = destination.height() / 480.0; - double minScale = Math.min(scaleX, scaleY); - int thickness = (int) (1 * minScale); - // for(var pt : dstPoints.toArray()) { - // Imgproc.circle(destination, pt, 4, new Scalar(255), 1, Imgproc.LINE_AA); - // } - // Imgproc.rectangle(destination, extremeRect, new Scalar(255), 1, Imgproc.LINE_AA); - // Imgproc.rectangle(destination, Imgproc.boundingRect(dstPoints), new Scalar(255), 1, - // Imgproc.LINE_AA); - Imgproc.polylines( - destination, List.of(dstPointsd), true, new Scalar(0, 0, 255), thickness, Imgproc.LINE_AA); - var textPt = Imgproc.boundingRect(dstPoints).tl(); - textPt.x -= 10.0 * scaleX; - textPt.y -= 12.0 * scaleY; + double thickness = getScaledThickness(1, destination); + drawPoly(dstPoints, (int)thickness, new Scalar(0, 0, 255), true, destination); + var rect = Imgproc.boundingRect(dstPoints); + var textPt = new Point(rect.x+rect.width, rect.y); + textPt.x += thickness; + textPt.y += thickness; Imgproc.putText( destination, String.valueOf(id), textPt, Imgproc.FONT_HERSHEY_PLAIN, - 1.5 * minScale, - new Scalar(0, 0, 255), thickness, + new Scalar(0, 255, 0), + (int)thickness, Imgproc.LINE_AA); } + + /** + * Set the field dimensions that are used for drawing the field wireframe. + * + * @param fieldLengthMeters + * @param fieldWidthMeters + */ + public static void setFieldDimensionsMeters(double fieldLengthMeters, double fieldWidthMeters) { + fieldLength = fieldLengthMeters; + fieldWidth = fieldWidthMeters; + } + + /** + * The translations used to draw the field side walls and driver station walls. It is a List of Lists because + * the translations are not all connected. + */ + private static List> getFieldWallLines() { + var list = new ArrayList>(); + + final double sideHt = 0.5; + final double driveHt = 1; + final double topHt = 2; + + // field floor + list.add( + List.of( + new Translation3d(0, 0, 0), + new Translation3d(fieldLength, 0, 0), + new Translation3d(fieldLength, fieldWidth, 0), + new Translation3d(0, fieldWidth, 0), + new Translation3d(0, 0, 0) + ) + ); + // right side wall + list.add( + List.of( + new Translation3d(0, 0, 0), + new Translation3d(0, 0, sideHt), + new Translation3d(fieldLength, 0, sideHt), + new Translation3d(fieldLength, 0, 0) + ) + ); + // red driverstation + list.add( + List.of( + new Translation3d(fieldLength, 0, sideHt), + new Translation3d(fieldLength, 0, topHt), + new Translation3d(fieldLength, fieldWidth, topHt), + new Translation3d(fieldLength, fieldWidth, sideHt) + ) + ); + list.add( + List.of( + new Translation3d(fieldLength, 0, driveHt), + new Translation3d(fieldLength, fieldWidth, driveHt) + ) + ); + // left side wall + list.add( + List.of( + new Translation3d(0, fieldWidth, 0), + new Translation3d(0, fieldWidth, sideHt), + new Translation3d(fieldLength, fieldWidth, sideHt), + new Translation3d(fieldLength, fieldWidth, 0) + ) + ); + // blue driverstation + list.add( + List.of( + new Translation3d(0, 0, sideHt), + new Translation3d(0, 0, topHt), + new Translation3d(0, fieldWidth, topHt), + new Translation3d(0, fieldWidth, sideHt) + ) + ); + list.add( + List.of( + new Translation3d(0, 0, driveHt), + new Translation3d(0, fieldWidth, driveHt) + ) + ); + + return list; + } + + /** + * The translations used to draw the field floor subdivisions (not the floor outline). It is a List of Lists because + * the translations are not all connected. + * + * @param subdivisions How many "subdivisions" along the width/length of the floor. E.g. 3 subdivisions + * would mean 2 lines along the length and 2 lines along the width creating a 3x3 "grid". + */ + private static List> getFieldFloorLines(int subdivisions) { + var list = new ArrayList>(); + final double subLength = fieldLength / subdivisions; + final double subWidth = fieldWidth / subdivisions; + + // field floor subdivisions + for(int i = 0; i < subdivisions; i++) { + list.add(List.of( + new Translation3d(0, subWidth*(i+1), 0), + new Translation3d(fieldLength, subWidth*(i+1), 0) + )); + list.add(List.of( + new Translation3d(subLength*(i+1), 0, 0), + new Translation3d(subLength*(i+1), fieldWidth, 0) + )); + } + + return list; + } + + /** + * Convert 3D lines represented by the given series of translations into a polygon in the camera's image. + * + * @param camRt The change in basis from world coordinates to camera coordinates. See + * {@link RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * @param prop The simulated camera's properties. + * @param trls A sequential series of translations defining the polygon to be drawn. + * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in pixels. + * Line segments will be subdivided if they exceed this resolution. + * @param isClosed If the final translation should also draw a line to the first translation. + * @param destination The destination image that is being drawn to. + */ + public static MatOfPoint2f polyFrom3dLines(RotTrlTransform3d camRt, SimCameraProperties prop, + List trls, double resolution, + boolean isClosed, Mat destination) { + resolution = Math.hypot(destination.size().height, destination.size().width)*resolution; + List pts = new ArrayList<>(trls); + if(isClosed) pts.add(pts.get(0)); + var corners = new ArrayList(); + + for(int i = 0; i < pts.size()-1; i++) { + var pta = pts.get(i); + var ptb = pts.get(i+1); + + // check if line is inside camera fulcrum + var inter = prop.getVisibleLine(camRt, pta, ptb); + if(inter.getSecond() == null) continue; + + // cull line to the inside of the camera fulcrum + double inter1 = inter.getFirst().doubleValue(); + double inter2 = inter.getSecond().doubleValue(); + var baseDelta = ptb.minus(pta); + var old_pta = pta; + if(inter1 > 0) pta = old_pta.plus(baseDelta.times(inter1)); + if(inter2 < 1) ptb = old_pta.plus(baseDelta.times(inter2)); + baseDelta = ptb.minus(pta); + + // project points into 2d + var corn = new ArrayList(); + corn.addAll(OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, List.of(pta, ptb))); + var pxa = corn.get(0); + var pxb = corn.get(1); + + // subdivide projected line based on desired resolution + double pxDist = Math.hypot(pxb.x-pxa.x, pxb.y-pxa.y); + int subdivisions = (int) (pxDist / resolution); + var subDelta = baseDelta.div(subdivisions+1); + var subPts = new ArrayList(); + for(int j = 0; j < subdivisions; j++) { + subPts.add(pta.plus(subDelta.times(j+1))); + } + if(subPts.size() > 0){ + corn.addAll(1, OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, subPts)); + } + + corners.addAll(corn); + } + + return OpenCVHelp.targetCornersToMat(corners); + } + + /** + * Draw a wireframe of the field to the given image. + * + * @param camRt The change in basis from world coordinates to camera coordinates. See + * {@link RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * @param prop The simulated camera's properties. + * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in pixels. + * Line segments will be subdivided if they exceed this resolution. + * @param wallThickness Thickness of the lines used for drawing the field walls in pixels. + * This is scaled by {@link #getScaledThickness(double, Mat)}. + * @param wallColor Color of the lines used for drawing the field walls. + * @param floorSubdivisions A NxN "grid" is created from the floor where this parameter is N, + * which defines the floor lines. + * @param floorThickness Thickness of the lines used for drawing the field floor grid in pixels. + * This is scaled by {@link #getScaledThickness(double, Mat)}. + * @param floorColor Color of the lines used for drawing the field floor grid. + * @param destination The destination image to draw to. + */ + public static void drawFieldWireframe(RotTrlTransform3d camRt, SimCameraProperties prop, + double resolution, double wallThickness, Scalar wallColor, int floorSubdivisions, + double floorThickness, Scalar floorColor, Mat destination) { + for(var trls : getFieldWallLines()) { + var poly = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); + drawPoly(poly, (int)getScaledThickness(wallThickness, destination), wallColor, false, destination); + } + for(var trls : getFieldFloorLines(floorSubdivisions)) { + var poly = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); + drawPoly(poly, (int)getScaledThickness(floorThickness, destination), floorColor, false, destination); + } + } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java index 81736d2dde..c48e32e081 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java @@ -130,7 +130,8 @@ public boolean removeCamera(PhotonCameraSim cameraSim) { * Get a simulated camera's position relative to the robot. If the requested camera is invalid, an * empty optional is returned. * - * @return The transform of this cameraSim, or an empty optional if it is invalid + * @param cameraSim The specific camera to get the robot-to-camera transform of + * @return The transform of this camera, or an empty optional if it is invalid */ public Optional getRobotToCamera(PhotonCameraSim cameraSim) { return getRobotToCamera(cameraSim, Timer.getFPGATimestamp()); @@ -140,9 +141,9 @@ public Optional getRobotToCamera(PhotonCameraSim cameraSim) { * Get a simulated camera's position relative to the robot. If the requested camera is invalid, an * empty optional is returned. * - * @param cameraSim Specific camera to get the robot-to-camera transform of + * @param cameraSim The specific camera to get the robot-to-camera transform of * @param timeSeconds Timestamp in seconds of when the transform should be observed - * @return The transform of this cameraSim, or an empty optional if it is invalid + * @return The transform of this camera, or an empty optional if it is invalid */ public Optional getRobotToCamera(PhotonCameraSim cameraSim, double timeSeconds) { var trfBuffer = camTrfMap.get(cameraSim); @@ -152,6 +153,31 @@ public Optional getRobotToCamera(PhotonCameraSim cameraSim, double return Optional.of(new Transform3d(new Pose3d(), sample.orElse(new Pose3d()))); } + /** + * Get a simulated camera's position on the field. If the requested camera is invalid, an + * empty optional is returned. + * + * @param cameraSim The specific camera to get the field pose of + * @return The pose of this camera, or an empty optional if it is invalid + */ + public Optional getCameraPose(PhotonCameraSim cameraSim) { + return getCameraPose(cameraSim, Timer.getFPGATimestamp()); + } + + /** + * Get a simulated camera's position on the field. If the requested camera is invalid, an + * empty optional is returned. + * + * @param cameraSim The specific camera to get the field pose of + * @param timeSeconds Timestamp in seconds of when the pose should be observed + * @return The pose of this camera, or an empty optional if it is invalid + */ + public Optional getCameraPose(PhotonCameraSim cameraSim, double timeSeconds) { + var robotToCamera = getRobotToCamera(cameraSim, timeSeconds); + if(robotToCamera.isEmpty()) return Optional.empty(); + return Optional.of(getRobotPose(timeSeconds).plus(robotToCamera.get())); + } + /** * Adjust a camera's position relative to the robot. Use this if your camera is on a gimbal or * turret or some other mobile platform. @@ -217,15 +243,16 @@ public void addVisionTargets(VisionTargetSim... targets) { * PhotonCamera}s simulated from this system will report the location of the camera relative to * the subset of these targets which are visible from the given camera position. * - *

    The AprilTags from this layout will be added as vision targets under the type "apriltags". - * The poses added preserve the tag layout's current alliance origin. + *

    The AprilTags from this layout will be added as vision targets under the type "apriltag". + * The poses added preserve the tag layout's current alliance origin. If the tag layout's + * alliance origin is changed, these added tags will have to be cleared and re-added. * * @param tagLayout The field tag layout to get Apriltag poses and IDs from */ - public void addVisionTargets(AprilTagFieldLayout tagLayout) { + public void addAprilTags(AprilTagFieldLayout tagLayout) { for (AprilTag tag : tagLayout.getTags()) { addVisionTargets( - "apriltags", + "apriltag", new VisionTargetSim( tagLayout.getTagPose(tag.ID).get(), // preserve alliance rotation TargetModel.kTag16h5, @@ -252,6 +279,10 @@ public void clearVisionTargets() { targetSets.clear(); } + public void clearAprilTags() { + removeVisionTargets("apriltag"); + } + public Set removeVisionTargets(String type) { return targetSets.remove(type); } diff --git a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java index 594439e847..f0eaeef5cb 100644 --- a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java +++ b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java @@ -48,6 +48,7 @@ import org.opencv.core.Core; import org.photonvision.estimation.CameraTargetRelation; import org.photonvision.estimation.OpenCVHelp; +import org.photonvision.estimation.RotTrlTransform3d; import org.photonvision.estimation.TargetModel; import org.photonvision.simulation.SimCameraProperties; import org.photonvision.simulation.VisionSystemSim; @@ -143,9 +144,11 @@ public void testProjection() { new VisionTargetSim( new Pose3d(1, 0, 0, new Rotation3d(0, 0, Math.PI)), TargetModel.kTag16h5, 0); var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); + var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); var targetCorners = OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, target.getFieldVertices()); + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); + // find circulation (counter/clockwise-ness) double circulation = 0; for (int i = 0; i < targetCorners.size(); i++) { @@ -154,25 +157,30 @@ public void testProjection() { circulation += xDiff * ySum; } assertTrue(circulation > 0, "2d fiducial points aren't counter-clockwise"); + // undo projection distortion targetCorners = prop.undistort(targetCorners); + // test projection results after moving camera var avgCenterRot1 = prop.getPixelRot(OpenCVHelp.averageCorner(targetCorners)); cameraPose = cameraPose.plus(new Transform3d(new Translation3d(), new Rotation3d(0, 0.25, 0.25))); + camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); targetCorners = OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, target.getFieldVertices()); + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); var avgCenterRot2 = prop.getPixelRot(OpenCVHelp.averageCorner(targetCorners)); + var yaw2d = new Rotation2d(avgCenterRot2.getZ()); var pitch2d = new Rotation2d(avgCenterRot2.getY()); var yawDiff = yaw2d.minus(new Rotation2d(avgCenterRot1.getZ())); var pitchDiff = pitch2d.minus(new Rotation2d(avgCenterRot1.getY())); assertTrue(yawDiff.getRadians() < 0, "2d points don't follow yaw"); assertTrue(pitchDiff.getRadians() < 0, "2d points don't follow pitch"); + var actualRelation = new CameraTargetRelation(cameraPose, target.getPose()); assertEquals( actualRelation.camToTargPitch.getDegrees(), - pitchDiff.getDegrees() * Math.cos(yaw2d.getRadians()), // adjust for perpsective distortion + pitchDiff.getDegrees() * Math.cos(yaw2d.getRadians()), // adjust for unaccounted perpsective distortion kRotDeltaDeg, "2d pitch doesn't match 3d"); assertEquals( @@ -184,23 +192,31 @@ public void testProjection() { @Test public void testSolvePNP_SQUARE() { + // square AprilTag target var target = new VisionTargetSim( new Pose3d(5, 0.5, 1, new Rotation3d(0, 0, Math.PI)), TargetModel.kTag16h5, 0); var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); - var actualRelation = new CameraTargetRelation(cameraPose, target.getPose()); + var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); + // target relative to camera + var relTarget = camRt.apply(target.getPose()); + + // simulate solvePNP estimation var targetCorners = OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, target.getFieldVertices()); + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); var pnpSim = OpenCVHelp.solvePNP_SQUARE( prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); - var estRelation = new CameraTargetRelation(cameraPose, cameraPose.plus(pnpSim.best)); - assertSame(actualRelation.camToTarg, estRelation.camToTarg); + + // check solvePNP estimation accuracy + assertSame(relTarget.getRotation(), pnpSim.best.getRotation()); + assertSame(relTarget.getTranslation(), pnpSim.best.getTranslation()); } @Test public void testSolvePNP_SQPNP() { + // (for targets with arbitrary number of non-colinear points > 2) var target = new VisionTargetSim( new Pose3d(5, 0.5, 1, new Rotation3d(0, 0, Math.PI)), @@ -216,14 +232,20 @@ public void testSolvePNP_SQPNP() { new Translation3d(-1, 0, 0))), 0); var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); - var actualRelation = new CameraTargetRelation(cameraPose, target.getPose()); + var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); + // target relative to camera + var relTarget = camRt.apply(target.getPose()); + + // simulate solvePNP estimation var targetCorners = OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, target.getFieldVertices()); + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); var pnpSim = OpenCVHelp.solvePNP_SQPNP( prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); - var estRelation = new CameraTargetRelation(cameraPose, cameraPose.plus(pnpSim.best)); - assertSame(actualRelation.camToTarg, estRelation.camToTarg); + + // check solvePNP estimation accuracy + assertSame(relTarget.getRotation(), pnpSim.best.getRotation()); + assertSame(relTarget.getTranslation(), pnpSim.best.getTranslation()); } } From a6eec79dc9c56c68f8f9521fd2807738be9e1948 Mon Sep 17 00:00:00 2001 From: amquake Date: Sun, 13 Aug 2023 00:43:54 -0700 Subject: [PATCH 11/46] spotless --- .../photonvision/estimation/OpenCVHelp.java | 54 ++-- .../estimation/RotTrlTransform3d.java | 30 ++- .../photonvision/estimation/TargetModel.java | 46 ++-- .../simulation/PhotonCameraSim.java | 100 +++---- .../simulation/SimCameraProperties.java | 127 ++++----- .../photonvision/simulation/VideoSimUtil.java | 249 +++++++++--------- .../simulation/VisionSystemSim.java | 18 +- .../java/org/photonvision/OpenCVTest.java | 5 +- 8 files changed, 326 insertions(+), 303 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java index 51b3e5df97..1406e49180 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -69,16 +69,14 @@ public final class OpenCVHelp { throw new RuntimeException("Failed to load native libraries!", e); } - NWU_TO_EDN = new RotTrlTransform3d(new Rotation3d(Matrix.mat(Nat.N3(), Nat.N3()).fill( - 0, -1, 0, - 0, 0, -1, - 1, 0, 0 - )), new Translation3d()); - EDN_TO_NWU = new RotTrlTransform3d(new Rotation3d(Matrix.mat(Nat.N3(), Nat.N3()).fill( - 0, 0, 1, - -1, 0, 0, - 0, -1, 0 - )), new Translation3d()); + NWU_TO_EDN = + new RotTrlTransform3d( + new Rotation3d(Matrix.mat(Nat.N3(), Nat.N3()).fill(0, -1, 0, 0, 0, -1, 1, 0, 0)), + new Translation3d()); + EDN_TO_NWU = + new RotTrlTransform3d( + new Rotation3d(Matrix.mat(Nat.N3(), Nat.N3()).fill(0, 0, 1, -1, 0, 0, 0, -1, 0)), + new Translation3d()); } public static MatOfDouble matrixToMat(SimpleMatrix matrix) { @@ -107,8 +105,7 @@ public static Matrix matToMatrix(Mat mat) { public static MatOfPoint3f translationToTvec(Translation3d... translations) { Point3[] points = new Point3[translations.length]; for (int i = 0; i < translations.length; i++) { - var trl = - translationNWUtoEDN(translations[i]); + var trl = translationNWUtoEDN(translations[i]); points[i] = new Point3(trl.getX(), trl.getY(), trl.getZ()); } return new MatOfPoint3f(points); @@ -126,9 +123,7 @@ public static Translation3d tvecToTranslation(Mat tvecInput) { tvecInput.convertTo(wrapped, CvType.CV_32F); wrapped.get(0, 0, data); wrapped.release(); - return translationEDNtoNWU( - new Translation3d(data[0], data[1], data[2]) - ); + return translationEDNtoNWU(new Translation3d(data[0], data[1], data[2])); } /** @@ -234,34 +229,36 @@ public static List reorderCircular(List elements, boolean backwards, i return reordered; } - //TODO: RotTrlTransform3d removal awaiting Rotation3d performance improvements + // TODO: RotTrlTransform3d removal awaiting Rotation3d performance improvements /** - * Convert a rotation delta from EDN to NWU. For example, if you have a rotation X,Y,Z {1, 0, 0} in EDN, - * this would be {0, -1, 0} in NWU. + * Convert a rotation delta from EDN to NWU. For example, if you have a rotation X,Y,Z {1, 0, 0} + * in EDN, this would be {0, -1, 0} in NWU. */ private static Rotation3d rotationEDNtoNWU(Rotation3d rot) { - return new RotTrlTransform3d(EDN_TO_NWU.apply(rot), new Translation3d()).apply(EDN_TO_NWU.inverse().getRotation()); + return new RotTrlTransform3d(EDN_TO_NWU.apply(rot), new Translation3d()) + .apply(EDN_TO_NWU.inverse().getRotation()); } /** - * Convert a rotation delta from NWU to EDN. For example, if you have a rotation X,Y,Z {1, 0, 0} in NWU, - * this would be {0, 0, 1} in EDN. + * Convert a rotation delta from NWU to EDN. For example, if you have a rotation X,Y,Z {1, 0, 0} + * in NWU, this would be {0, 0, 1} in EDN. */ private static Rotation3d rotationNWUtoEDN(Rotation3d rot) { - return new RotTrlTransform3d(NWU_TO_EDN.apply(rot), new Translation3d()).apply(NWU_TO_EDN.inverse().getRotation()); + return new RotTrlTransform3d(NWU_TO_EDN.apply(rot), new Translation3d()) + .apply(NWU_TO_EDN.inverse().getRotation()); } /** - * Convert a translation from EDN to NWU. For example, if you have a translation X,Y,Z {1, 0, 0} in EDN, - * this would be {0, -1, 0} in NWU. + * Convert a translation from EDN to NWU. For example, if you have a translation X,Y,Z {1, 0, 0} + * in EDN, this would be {0, -1, 0} in NWU. */ private static Translation3d translationEDNtoNWU(Translation3d trl) { return EDN_TO_NWU.apply(trl); } /** - * Convert a translation from NWU to EDN. For example, if you have a translation X,Y,Z {1, 0, 0} in NWU, - * this would be {0, 0, 1} in EDN. + * Convert a translation from NWU to EDN. For example, if you have a translation X,Y,Z {1, 0, 0} + * in NWU, this would be {0, 0, 1} in EDN. */ private static Translation3d translationNWUtoEDN(Translation3d trl) { return NWU_TO_EDN.apply(trl); @@ -273,8 +270,8 @@ private static Translation3d translationNWUtoEDN(Translation3d trl) { * * @param cameraMatrix the camera intrinsics matrix in standard opencv form * @param distCoeffs the camera distortion matrix in standard opencv form - * @param camRt The change in basis from world coordinates to camera coordinates. See - * {@link RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * @param camRt The change in basis from world coordinates to camera coordinates. See {@link + * RotTrlTransform3d#makeRelativeTo(Pose3d)}. * @param objectTranslations The 3d points to be projected * @return The 2d points in pixels which correspond to the image of the 3d points on the camera */ @@ -371,6 +368,7 @@ public static RotatedRect getMinAreaRect(List corners) { /** * Gets the convex hull contour (the outline) of a list of points. + * * @param corners * @return The subset of points defining the contour */ diff --git a/photon-lib/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java b/photon-lib/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java index 8cd0c1372f..0fac0334eb 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java @@ -38,7 +38,7 @@ public class RotTrlTransform3d { private final Translation3d trl; private final Rotation3d rot; - //TODO: removal awaiting wpilib Rotation3d performance improvements + // TODO: removal awaiting wpilib Rotation3d performance improvements private double m_w; private double m_x; private double m_y; @@ -47,8 +47,8 @@ public class RotTrlTransform3d { /** * A rotation-translation transformation. * - *

    Applying this RotTrlTransform3d to poses will preserve their current origin-to-pose transform - * as if the origin was transformed by these components instead. + *

    Applying this RotTrlTransform3d to poses will preserve their current origin-to-pose + * transform as if the origin was transformed by these components instead. * * @param rot The rotation component * @param trl The translation component @@ -79,8 +79,8 @@ public RotTrlTransform3d(Pose3d initial, Pose3d last) { /** * Creates a rotation-translation transformation from a Transform3d. * - *

    Applying this RotTrlTransform3d to poses will preserve their current origin-to-pose transform - * as if the origin was transformed by trf instead. + *

    Applying this RotTrlTransform3d to poses will preserve their current origin-to-pose + * transform as if the origin was transformed by trf instead. * * @param trf The origin transformation */ @@ -177,7 +177,8 @@ public List applyRots(List rots) { } public Pose3d apply(Pose3d pose) { - // return new Pose3d(pose.getTranslation().rotateBy(rot).plus(trl), pose.getRotation().plus(rot)); + // return new Pose3d(pose.getTranslation().rotateBy(rot).plus(trl), + // pose.getRotation().plus(rot)); return new Pose3d(apply(pose.getTranslation()), apply(pose.getRotation())); } @@ -185,7 +186,7 @@ public List applyPoses(List poses) { return poses.stream().map(this::apply).collect(Collectors.toList()); } - //TODO: removal awaiting wpilib Rotation3d performance improvements + // TODO: removal awaiting wpilib Rotation3d performance improvements private Quaternion times(Quaternion other) { final double o_w = other.getW(); final double o_x = other.getX(); @@ -206,10 +207,17 @@ private static Quaternion times(Quaternion a, Quaternion b) { return times(m_w, m_x, m_y, m_z, o_w, o_x, o_y, o_z); } - private static Quaternion times(double m_w, double m_x, double m_y, double m_z, - double o_w, double o_x, double o_y, double o_z) { + private static Quaternion times( + double m_w, + double m_x, + double m_y, + double m_z, + double o_w, + double o_x, + double o_y, + double o_z) { // https://en.wikipedia.org/wiki/Quaternion#Scalar_and_vector_parts - + // v₁ x v₂ final double cross_x = m_y * o_z - o_y * m_z; final double cross_y = o_x * m_z - m_x * o_z; @@ -220,6 +228,6 @@ private static Quaternion times(double m_w, double m_x, double m_y, double m_z, final double new_y = o_y * m_w + (m_y * o_w) + cross_y; final double new_z = o_z * m_w + (m_z * o_w) + cross_z; - return new Quaternion(m_w * o_w - (m_x*o_x + m_y*o_y + m_z*o_z), new_x, new_y, new_z); + return new Quaternion(m_w * o_w - (m_x * o_x + m_y * o_y + m_z * o_z), new_x, new_y, new_z); } } diff --git a/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java b/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java index b5dfa4ec73..47405c44fd 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java @@ -48,8 +48,9 @@ public class TargetModel { new TargetModel(Units.inchesToMeters(6), Units.inchesToMeters(6)); /** - * Creates a rectangular, planar target model given the width and height. - * The model has four vertices: + * Creates a rectangular, planar target model given the width and height. The model has four + * vertices: + * *

      *
    • Point 0: [0, -width/2, -height/2] *
    • Point 1: [0, width/2, -height/2] @@ -70,18 +71,19 @@ public TargetModel(double widthMeters, double heightMeters) { } /** - * Creates a spherical target model which has similar dimensions regardless of its rotation. - * This model has four vertices: + * Creates a spherical target model which has similar dimensions regardless of its rotation. This + * model has four vertices: + * *
        *
      • Point 0: [0, -radius, -radius] *
      • Point 1: [0, radius, -radius] *
      • Point 2: [0, radius, radius] *
      • Point 3: [0, -radius, radius] *
      - * - * Q: Why these vertices? A: This target should be oriented to the camera every frame, - * much like a sprite/decal, and these vertices represent the corners of the rectangle in which - * the ellipse is inscribed. + * + * Q: Why these vertices? A: This target should be oriented to the camera every frame, much + * like a sprite/decal, and these vertices represent the corners of the rectangle in which the + * ellipse is inscribed. */ public TargetModel(double diameterMeters) { double radius = diameterMeters / 2.0; @@ -97,8 +99,8 @@ public TargetModel(double diameterMeters) { /** * Creates a target model from arbitrary 3d vertices. Automatically determines if the given - * vertices are planar(x == 0). More than 2 vertices must be given. If this is a planar - * model, the vertices should define a non-intersecting contour. + * vertices are planar(x == 0). More than 2 vertices must be given. If this is a planar model, the + * vertices should define a non-intersecting contour. * * @param vertices Translations representing the vertices of this target model relative to its * pose. @@ -121,32 +123,30 @@ public TargetModel(List vertices) { /** * This target's vertices offset from its field pose. * - *

      Note: If this target is spherical, use {@link #getOrientedPose(Translation3d, Translation3d)} - * with this method. + *

      Note: If this target is spherical, use {@link #getOrientedPose(Translation3d, + * Translation3d)} with this method. */ public List getFieldVertices(Pose3d targetPose) { var basisChange = new RotTrlTransform3d(targetPose.getRotation(), targetPose.getTranslation()); - return vertices.stream() - .map(t -> basisChange.apply(t)) - .collect(Collectors.toList()); + return vertices.stream().map(t -> basisChange.apply(t)).collect(Collectors.toList()); } /** * Returns a Pose3d with the given target translation oriented (with its relative x-axis aligned) - * to the camera translation. This is used for spherical targets which should not have their projection - * change regardless of their own rotation. - * + * to the camera translation. This is used for spherical targets which should not have their + * projection change regardless of their own rotation. + * * @param tgtTrl This target's translation * @param cameraTrl Camera's translation * @return This target's pose oriented to the camera */ public Pose3d getOrientedPose(Translation3d tgtTrl, Translation3d cameraTrl) { var relCam = cameraTrl.minus(tgtTrl); - var orientToCam = new Rotation3d( - 0, - new Rotation2d(Math.hypot(relCam.getX(), relCam.getY()), -relCam.getZ()).getRadians(), - new Rotation2d(relCam.getX(), relCam.getY()).getRadians() - ); + var orientToCam = + new Rotation3d( + 0, + new Rotation2d(Math.hypot(relCam.getX(), relCam.getY()), -relCam.getZ()).getRadians(), + new Rotation2d(relCam.getX(), relCam.getY()).getRadians()); return new Pose3d(tgtTrl, orientToCam); } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java index 7af5288d44..5995a39cc3 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -32,21 +32,15 @@ import edu.wpi.first.math.Pair; import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Rotation3d; import edu.wpi.first.util.RuntimeLoader; import edu.wpi.first.util.WPIUtilJNI; - import java.util.ArrayList; import java.util.List; import java.util.Optional; import org.opencv.core.Core; import org.opencv.core.CvType; import org.opencv.core.Mat; -import org.opencv.core.MatOfPoint; -import org.opencv.core.MatOfPoint2f; import org.opencv.core.Point; -import org.opencv.core.Rect; -import org.opencv.core.RotatedRect; import org.opencv.core.Scalar; import org.opencv.core.Size; import org.opencv.imgproc.Imgproc; @@ -54,11 +48,9 @@ import org.photonvision.PhotonTargetSortMode; import org.photonvision.common.dataflow.structures.Packet; import org.photonvision.common.networktables.NTTopicSet; -import org.photonvision.estimation.CameraTargetRelation; import org.photonvision.estimation.OpenCVHelp; import org.photonvision.estimation.PNPResults; import org.photonvision.estimation.RotTrlTransform3d; -import org.photonvision.estimation.TargetModel; import org.photonvision.targeting.PhotonPipelineResult; import org.photonvision.targeting.PhotonTrackedTarget; import org.photonvision.targeting.TargetCorner; @@ -213,10 +205,11 @@ public Mat getVideoSimFrameRaw() { */ public boolean canSeeTargetPose(Pose3d camPose, VisionTargetSim target) { // var rel = new CameraTargetRelation(camPose, target.getPose()); - //TODO: removal awaiting wpilib Rotation3d performance improvements + // TODO: removal awaiting wpilib Rotation3d performance improvements var relTarget = RotTrlTransform3d.makeRelativeTo(camPose).apply(target.getPose()); var camToTargYaw = new Rotation2d(relTarget.getX(), relTarget.getY()); - var camToTargPitch = new Rotation2d(Math.hypot(relTarget.getX(), relTarget.getY()), -relTarget.getZ()); + var camToTargPitch = + new Rotation2d(Math.hypot(relTarget.getX(), relTarget.getY()), -relTarget.getZ()); var relCam = RotTrlTransform3d.makeRelativeTo(target.getPose()).apply(camPose); var targToCamAngle = new Rotation2d(relCam.getX(), Math.hypot(relCam.getY(), relCam.getZ())); @@ -310,6 +303,7 @@ public void setTargetSortMode(PhotonTargetSortMode sortMode) { /** * Sets whether the raw video stream simulation is enabled. + * *

      Note: This may increase loop times. */ public void enableRawStream(boolean enabled) { @@ -318,6 +312,7 @@ public void enableRawStream(boolean enabled) { /** * Sets whether a wireframe of the field is drawn to the raw video stream. + * *

      Note: This will dramatically increase loop times. */ public void enableDrawWireframe(boolean enabled) { @@ -327,8 +322,9 @@ public void enableDrawWireframe(boolean enabled) { /** * Sets the resolution of the drawn wireframe if enabled. Drawn line segments will be subdivided * into smaller segments based on a threshold set by the resolution. - * - * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in pixels + * + * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in + * pixels */ public void setWireframeResolution(double resolution) { videoSimWireframeResolution = resolution; @@ -369,15 +365,15 @@ public PhotonPipelineResult process( // find target's 3d corner points var fieldCorners = tgt.getFieldVertices(); - if(tgt.getModel().isSpherical) { // target is spherical + if (tgt.getModel().isSpherical) { // target is spherical var model = tgt.getModel(); - fieldCorners = model.getFieldVertices( - model.getOrientedPose(tgt.getPose().getTranslation(), cameraPose.getTranslation())); + fieldCorners = + model.getFieldVertices( + model.getOrientedPose(tgt.getPose().getTranslation(), cameraPose.getTranslation())); } // project 3d target points into 2d image points var targetCorners = - OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, fieldCorners); + OpenCVHelp.projectPoints(prop.getIntrinsics(), prop.getDistCoeffs(), camRt, fieldCorners); // save visible targets for raw video stream simulation visibleTgts.add(new Pair<>(tgt, targetCorners)); // estimate pixel noise @@ -420,28 +416,35 @@ public PhotonPipelineResult process( // render visible tags to raw video frame if (videoSimRawEnabled) { // draw field wireframe - if(videoSimWireframeEnabled) { - VideoSimUtil.drawFieldWireframe(camRt, prop, videoSimWireframeResolution, 1.5, new Scalar(80), 4, 1, new Scalar(40), videoSimFrameRaw); + if (videoSimWireframeEnabled) { + VideoSimUtil.drawFieldWireframe( + camRt, + prop, + videoSimWireframeResolution, + 1.5, + new Scalar(80), + 4, + 1, + new Scalar(40), + videoSimFrameRaw); } - + // draw targets for (var pair : visibleTgts) { var tgt = pair.getFirst(); var corn = pair.getSecond(); - if(tgt.fiducialID >= 0) { // apriltags + if (tgt.fiducialID >= 0) { // apriltags VideoSimUtil.warp16h5TagImage( - tgt.fiducialID, OpenCVHelp.targetCornersToMat(corn), true, videoSimFrameRaw); - } - else if(!tgt.getModel().isSpherical) { // non-spherical targets + tgt.fiducialID, OpenCVHelp.targetCornersToMat(corn), true, videoSimFrameRaw); + } else if (!tgt.getModel().isSpherical) { // non-spherical targets var contour = OpenCVHelp.targetCornersToMat(corn); - if(!tgt.getModel().isPlanar) { // visualization cant handle non-convex projections of 3d models + if (!tgt.getModel() + .isPlanar) { // visualization cant handle non-convex projections of 3d models contour.fromArray(OpenCVHelp.getConvexHull(corn).toArray()); } - VideoSimUtil.drawPoly( - contour, -1, new Scalar(255), true, videoSimFrameRaw); - } - else { // spherical targets + VideoSimUtil.drawPoly(contour, -1, new Scalar(255), true, videoSimFrameRaw); + } else { // spherical targets VideoSimUtil.drawEllipse(corn, new Scalar(255), videoSimFrameRaw); } } @@ -451,30 +454,35 @@ else if(!tgt.getModel().isSpherical) { // non-spherical targets if (videoSimProcEnabled) { Imgproc.cvtColor(videoSimFrameRaw, videoSimFrameProcessed, Imgproc.COLOR_GRAY2BGR); Imgproc.drawMarker( // crosshair - videoSimFrameProcessed, - new Point(prop.getResWidth()/2.0, prop.getResHeight()/2.0), - new Scalar(0, 255, 0), - Imgproc.MARKER_CROSS, - (int)VideoSimUtil.getScaledThickness(15, videoSimFrameProcessed), - (int)VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), - Imgproc.LINE_AA - ); + videoSimFrameProcessed, + new Point(prop.getResWidth() / 2.0, prop.getResHeight() / 2.0), + new Scalar(0, 255, 0), + Imgproc.MARKER_CROSS, + (int) VideoSimUtil.getScaledThickness(15, videoSimFrameProcessed), + (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), + Imgproc.LINE_AA); for (var tgt : detectableTgts) { if (tgt.getFiducialId() >= 0) { // apriltags VideoSimUtil.drawTagDetection( tgt.getFiducialId(), OpenCVHelp.targetCornersToMat(tgt.getDetectedCorners()), videoSimFrameProcessed); - } - else { // other targets + } else { // other targets Imgproc.rectangle( - videoSimFrameProcessed, OpenCVHelp.getBoundingRect(tgt.getDetectedCorners()), - new Scalar(0, 0, 255), - (int)VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), - Imgproc.LINE_AA); - - for(var pt : tgt.getDetectedCorners()) { - Imgproc.circle(videoSimFrameProcessed, new Point(pt.x, pt.y), 3, new Scalar(255, 50, 0), 1, Imgproc.LINE_AA); + videoSimFrameProcessed, + OpenCVHelp.getBoundingRect(tgt.getDetectedCorners()), + new Scalar(0, 0, 255), + (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), + Imgproc.LINE_AA); + + for (var pt : tgt.getDetectedCorners()) { + Imgproc.circle( + videoSimFrameProcessed, + new Point(pt.x, pt.y), + 3, + new Scalar(255, 50, 0), + 1, + Imgproc.LINE_AA); } } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java index 94b790f13f..f4aea9148c 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java @@ -25,7 +25,6 @@ package org.photonvision.simulation; import com.fasterxml.jackson.databind.ObjectMapper; - import edu.wpi.first.math.Matrix; import edu.wpi.first.math.Nat; import edu.wpi.first.math.Pair; @@ -37,14 +36,12 @@ import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.numbers.*; import edu.wpi.first.wpilibj.DriverStation; - import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.stream.Collectors; - import org.ejml.data.DMatrix3; import org.ejml.dense.fixed.CommonOps_DDF3; import org.opencv.imgproc.Imgproc; @@ -201,14 +198,26 @@ public void setCalibration( this.distCoeffs = distCoeffs; // left, right, up, and down view planes - var p = new Translation3d[]{ - new Translation3d(1, new Rotation3d(0, 0, getPixelYaw(0).plus(new Rotation2d(-Math.PI/2)).getRadians())), - new Translation3d(1, new Rotation3d(0, 0, getPixelYaw(resWidth).plus(new Rotation2d(Math.PI/2)).getRadians())), - new Translation3d(1, new Rotation3d(0, getPixelPitch(0).plus(new Rotation2d(Math.PI/2)).getRadians(), 0)), - new Translation3d(1, new Rotation3d(0, getPixelPitch(resHeight).plus(new Rotation2d(-Math.PI/2)).getRadians(), 0)) - }; + var p = + new Translation3d[] { + new Translation3d( + 1, + new Rotation3d(0, 0, getPixelYaw(0).plus(new Rotation2d(-Math.PI / 2)).getRadians())), + new Translation3d( + 1, + new Rotation3d( + 0, 0, getPixelYaw(resWidth).plus(new Rotation2d(Math.PI / 2)).getRadians())), + new Translation3d( + 1, + new Rotation3d( + 0, getPixelPitch(0).plus(new Rotation2d(Math.PI / 2)).getRadians(), 0)), + new Translation3d( + 1, + new Rotation3d( + 0, getPixelPitch(resHeight).plus(new Rotation2d(-Math.PI / 2)).getRadians(), 0)) + }; viewplanes.clear(); - for(int i = 0; i < p.length; i++) { + for (int i = 0; i < p.length; i++) { viewplanes.add(new DMatrix3(p[i].getX(), p[i].getY(), p[i].getZ())); } } @@ -414,33 +423,34 @@ public Rotation2d getDiagFOV() { /** * Determines where the line segment defined by the two given translations intersects the camera's * frustum/field-of-vision, if at all. - * - *

      The line is parametrized so any of its points p = t * (b - a) + a. - * This method returns these values of t, minimum first, defining the region of the line segment - * which is visible in the frustum. If both ends of the line segment are visible, this simply - * returns {0, 1}. If, for example, point b is visible while a is not, and half of the line segment - * is inside the camera frustum, {0.5, 1} would be returned. - * - * @param camRt The change in basis from world coordinates to camera coordinates. See - * {@link RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * + *

      The line is parametrized so any of its points p = t * (b - a) + a. This method + * returns these values of t, minimum first, defining the region of the line segment which is + * visible in the frustum. If both ends of the line segment are visible, this simply returns {0, + * 1}. If, for example, point b is visible while a is not, and half of the line segment is inside + * the camera frustum, {0.5, 1} would be returned. + * + * @param camRt The change in basis from world coordinates to camera coordinates. See {@link + * RotTrlTransform3d#makeRelativeTo(Pose3d)}. * @param a The initial translation of the line * @param b The final translation of the line * @return A Pair of Doubles. The values may be null: *

        - *
      • {Double, Double} : Two parametrized values(t), minimum first, representing which segment - * of the line is visible in the camera frustum. - *
      • {Double, null} : One value(t) representing a single intersection point. For example, the - * line only intersects the intersection of two adjacent viewplanes. - *
      • {null, null} : No values. The line segment is not visible in the camera frustum. + *
      • {Double, Double} : Two parametrized values(t), minimum first, representing which + * segment of the line is visible in the camera frustum. + *
      • {Double, null} : One value(t) representing a single intersection point. For example, + * the line only intersects the intersection of two adjacent viewplanes. + *
      • {null, null} : No values. The line segment is not visible in the camera frustum. *
      */ - public Pair getVisibleLine(RotTrlTransform3d camRt, Translation3d a, Translation3d b) { + public Pair getVisibleLine( + RotTrlTransform3d camRt, Translation3d a, Translation3d b) { // translations relative to the camera var rela = camRt.apply(a); var relb = camRt.apply(b); // check if both ends are behind camera - if(rela.getX() <= 0 && relb.getX() <= 0) return new Pair<>(null, null); + if (rela.getX() <= 0 && relb.getX() <= 0) return new Pair<>(null, null); var av = new DMatrix3(rela.getX(), rela.getY(), rela.getZ()); var bv = new DMatrix3(relb.getX(), relb.getY(), relb.getZ()); @@ -451,31 +461,26 @@ public Pair getVisibleLine(RotTrlTransform3d camRt, Translation3 // check if the ends of the line segment are visible boolean aVisible = true; boolean bVisible = true; - for(int i = 0; i < viewplanes.size(); i++) { + for (int i = 0; i < viewplanes.size(); i++) { var normal = viewplanes.get(i); double aVisibility = CommonOps_DDF3.dot(av, normal); - if(aVisibility < 0) aVisible = false; + if (aVisibility < 0) aVisible = false; double bVisibility = CommonOps_DDF3.dot(bv, normal); - if(bVisibility < 0) bVisible = false; + if (bVisibility < 0) bVisible = false; // both ends are outside at least one of the same viewplane - if(aVisibility <= 0 && bVisibility <= 0) return new Pair<>(null, null); + if (aVisibility <= 0 && bVisibility <= 0) return new Pair<>(null, null); } // both ends are inside frustum - if(aVisible && bVisible) return new Pair<>(Double.valueOf(0), Double.valueOf(1)); - + if (aVisible && bVisible) return new Pair<>(Double.valueOf(0), Double.valueOf(1)); + // parametrized (t=0 at a, t=1 at b) intersections with viewplanes - double[] intersections = { - Double.NaN, - Double.NaN, - Double.NaN, - Double.NaN - }; + double[] intersections = {Double.NaN, Double.NaN, Double.NaN, Double.NaN}; // intersection points List ipts = new ArrayList<>(); - for(double val : intersections) ipts.add(null); + for (double val : intersections) ipts.add(null); // find intersections - for(int i = 0; i < viewplanes.size(); i++) { + for (int i = 0; i < viewplanes.size(); i++) { var normal = viewplanes.get(i); // we want to know the value of t when the line intercepts this plane @@ -483,10 +488,11 @@ public Pair getVisibleLine(RotTrlTransform3d camRt, Translation3 // we can find the projection of a onto the plane normal // a_projn = normal.times(av.dot(normal) / normal.dot(normal)); var a_projn = new DMatrix3(); - CommonOps_DDF3.scale(CommonOps_DDF3.dot(av, normal) / CommonOps_DDF3.dot(normal, normal), normal, a_projn); + CommonOps_DDF3.scale( + CommonOps_DDF3.dot(av, normal) / CommonOps_DDF3.dot(normal, normal), normal, a_projn); // this projection lets us determine the scalar multiple t of ab where // (t * ab + a) is a vector which lies on the plane - if(Math.abs(CommonOps_DDF3.dot(abv, a_projn)) < 1e-5) continue; // line is parallel to plane + if (Math.abs(CommonOps_DDF3.dot(abv, a_projn)) < 1e-5) continue; // line is parallel to plane intersections[i] = CommonOps_DDF3.dot(a_projn, a_projn) / -CommonOps_DDF3.dot(abv, a_projn); // vector a to the viewplane @@ -498,11 +504,11 @@ public Pair getVisibleLine(RotTrlTransform3d camRt, Translation3 ipts.set(i, intersectpt); // discard intersections outside the camera frustum - for(int j = 1; j < viewplanes.size(); j++) { - int oi = (i+j) % viewplanes.size(); + for (int j = 1; j < viewplanes.size(); j++) { + int oi = (i + j) % viewplanes.size(); var onormal = viewplanes.get(oi); // if the dot of the intersection point with any plane normal is negative, it is outside - if(CommonOps_DDF3.dot(intersectpt, onormal) < 0) { + if (CommonOps_DDF3.dot(intersectpt, onormal) < 0) { intersections[i] = Double.NaN; ipts.set(i, null); break; @@ -510,13 +516,13 @@ public Pair getVisibleLine(RotTrlTransform3d camRt, Translation3 } // discard duplicate intersections - if(ipts.get(i) == null) continue; - for(int j = i-1; j >= 0; j--) { + if (ipts.get(i) == null) continue; + for (int j = i - 1; j >= 0; j--) { var oipt = ipts.get(j); - if(oipt == null) continue; + if (oipt == null) continue; var diff = new DMatrix3(); CommonOps_DDF3.subtract(oipt, intersectpt, diff); - if(CommonOps_DDF3.elementMaxAbs(diff) < 1e-4) { + if (CommonOps_DDF3.elementMaxAbs(diff) < 1e-4) { intersections[i] = Double.NaN; ipts.set(i, null); break; @@ -527,28 +533,25 @@ public Pair getVisibleLine(RotTrlTransform3d camRt, Translation3 // determine visible segment (minimum and maximum t) double inter1 = Double.NaN; double inter2 = Double.NaN; - for(double inter : intersections) { - if(!Double.isNaN(inter)) { - if(Double.isNaN(inter1)) inter1 = inter; + for (double inter : intersections) { + if (!Double.isNaN(inter)) { + if (Double.isNaN(inter1)) inter1 = inter; else inter2 = inter; } } // two viewplane intersections - if(!Double.isNaN(inter2)) { + if (!Double.isNaN(inter2)) { double max = Math.max(inter1, inter2); double min = Math.min(inter1, inter2); - if(aVisible) min = 0; - if(bVisible) max = 1; - return new Pair<>( - min, - max - ); + if (aVisible) min = 0; + if (bVisible) max = 1; + return new Pair<>(min, max); } // one viewplane intersection - else if(!Double.isNaN(inter1)) { - if(aVisible) return new Pair<>(Double.valueOf(0), inter1); - if(bVisible) return new Pair<>(inter1, Double.valueOf(1)); + else if (!Double.isNaN(inter1)) { + if (aVisible) return new Pair<>(Double.valueOf(0), inter1); + if (bVisible) return new Pair<>(inter1, Double.valueOf(1)); return new Pair<>(inter1, null); } // no intersections diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java index 8d91e149e9..a9996999e1 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java @@ -28,7 +28,6 @@ import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.util.RuntimeLoader; - import java.awt.image.BufferedImage; import java.io.IOException; import java.util.ArrayList; @@ -36,7 +35,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - import javax.imageio.ImageIO; import org.opencv.core.Core; import org.opencv.core.CvType; @@ -303,8 +301,9 @@ public static void warp16h5TagImage( } /** - * Given a line thickness in a 640x480 image, try to scale to the given destination image resolution. - * + * Given a line thickness in a 640x480 image, try to scale to the given destination image + * resolution. + * * @param thickness480p A hypothetical line thickness in a 640x480 image * @param destinationImg The destination image to scale to * @return Scaled thickness which cannot be less than 1 @@ -318,7 +317,7 @@ public static double getScaledThickness(double thickness480p, Mat destinationImg /** * Draw a filled ellipse in the destination image. - * + * * @param corners The corners of a rectangle in which the ellipse is inscribed * @param color The color of the ellipse. This is a scalar with BGR values (0-255) * @param destination The destination image to draw onto. The image should be in the BGR color @@ -328,27 +327,26 @@ public static void drawEllipse(List corners, Scalar color, Mat des // create RotatedRect from corners var rect = OpenCVHelp.getMinAreaRect(corners); // inscribe ellipse inside rectangle - Imgproc.ellipse( - destination, - rect, color, -1, Imgproc.LINE_AA); + Imgproc.ellipse(destination, rect, color, -1, Imgproc.LINE_AA); } /** * Draw a polygon outline or filled polygon to the destination image with the given points. - * + * * @param dstPoints The points in the destination image representing the polygon. - * @param thickness The thickness of the outline in pixels. If this is not positive, a filled polygon is drawn instead. + * @param thickness The thickness of the outline in pixels. If this is not positive, a filled + * polygon is drawn instead. * @param color The color drawn. This should match the color space of the destination image. * @param isClosed If the last point should connect to the first point in the polygon outline. * @param destination The destination image to draw onto. */ - public static void drawPoly(MatOfPoint2f dstPoints, int thickness, Scalar color, boolean isClosed, Mat destination) { + public static void drawPoly( + MatOfPoint2f dstPoints, int thickness, Scalar color, boolean isClosed, Mat destination) { var dstPointsd = new MatOfPoint(dstPoints.toArray()); - if(thickness > 0){ + if (thickness > 0) { Imgproc.polylines( - destination, List.of(dstPointsd), isClosed, color, thickness, Imgproc.LINE_AA); - } - else { + destination, List.of(dstPointsd), isClosed, color, thickness, Imgproc.LINE_AA); + } else { Imgproc.fillPoly(destination, List.of(dstPointsd), color, Imgproc.LINE_AA); } } @@ -364,9 +362,9 @@ public static void drawPoly(MatOfPoint2f dstPoints, int thickness, Scalar color, */ public static void drawTagDetection(int id, MatOfPoint2f dstPoints, Mat destination) { double thickness = getScaledThickness(1, destination); - drawPoly(dstPoints, (int)thickness, new Scalar(0, 0, 255), true, destination); + drawPoly(dstPoints, (int) thickness, new Scalar(0, 0, 255), true, destination); var rect = Imgproc.boundingRect(dstPoints); - var textPt = new Point(rect.x+rect.width, rect.y); + var textPt = new Point(rect.x + rect.width, rect.y); textPt.x += thickness; textPt.y += thickness; Imgproc.putText( @@ -376,13 +374,13 @@ public static void drawTagDetection(int id, MatOfPoint2f dstPoints, Mat destinat Imgproc.FONT_HERSHEY_PLAIN, thickness, new Scalar(0, 255, 0), - (int)thickness, + (int) thickness, Imgproc.LINE_AA); } /** * Set the field dimensions that are used for drawing the field wireframe. - * + * * @param fieldLengthMeters * @param fieldWidthMeters */ @@ -392,8 +390,8 @@ public static void setFieldDimensionsMeters(double fieldLengthMeters, double fie } /** - * The translations used to draw the field side walls and driver station walls. It is a List of Lists because - * the translations are not all connected. + * The translations used to draw the field side walls and driver station walls. It is a List of + * Lists because the translations are not all connected. */ private static List> getFieldWallLines() { var list = new ArrayList>(); @@ -404,72 +402,56 @@ private static List> getFieldWallLines() { // field floor list.add( - List.of( - new Translation3d(0, 0, 0), - new Translation3d(fieldLength, 0, 0), - new Translation3d(fieldLength, fieldWidth, 0), - new Translation3d(0, fieldWidth, 0), - new Translation3d(0, 0, 0) - ) - ); + List.of( + new Translation3d(0, 0, 0), + new Translation3d(fieldLength, 0, 0), + new Translation3d(fieldLength, fieldWidth, 0), + new Translation3d(0, fieldWidth, 0), + new Translation3d(0, 0, 0))); // right side wall list.add( - List.of( - new Translation3d(0, 0, 0), - new Translation3d(0, 0, sideHt), - new Translation3d(fieldLength, 0, sideHt), - new Translation3d(fieldLength, 0, 0) - ) - ); + List.of( + new Translation3d(0, 0, 0), + new Translation3d(0, 0, sideHt), + new Translation3d(fieldLength, 0, sideHt), + new Translation3d(fieldLength, 0, 0))); // red driverstation list.add( - List.of( - new Translation3d(fieldLength, 0, sideHt), - new Translation3d(fieldLength, 0, topHt), - new Translation3d(fieldLength, fieldWidth, topHt), - new Translation3d(fieldLength, fieldWidth, sideHt) - ) - ); + List.of( + new Translation3d(fieldLength, 0, sideHt), + new Translation3d(fieldLength, 0, topHt), + new Translation3d(fieldLength, fieldWidth, topHt), + new Translation3d(fieldLength, fieldWidth, sideHt))); list.add( - List.of( - new Translation3d(fieldLength, 0, driveHt), - new Translation3d(fieldLength, fieldWidth, driveHt) - ) - ); + List.of( + new Translation3d(fieldLength, 0, driveHt), + new Translation3d(fieldLength, fieldWidth, driveHt))); // left side wall list.add( - List.of( - new Translation3d(0, fieldWidth, 0), - new Translation3d(0, fieldWidth, sideHt), - new Translation3d(fieldLength, fieldWidth, sideHt), - new Translation3d(fieldLength, fieldWidth, 0) - ) - ); + List.of( + new Translation3d(0, fieldWidth, 0), + new Translation3d(0, fieldWidth, sideHt), + new Translation3d(fieldLength, fieldWidth, sideHt), + new Translation3d(fieldLength, fieldWidth, 0))); // blue driverstation list.add( - List.of( - new Translation3d(0, 0, sideHt), - new Translation3d(0, 0, topHt), - new Translation3d(0, fieldWidth, topHt), - new Translation3d(0, fieldWidth, sideHt) - ) - ); - list.add( - List.of( - new Translation3d(0, 0, driveHt), - new Translation3d(0, fieldWidth, driveHt) - ) - ); + List.of( + new Translation3d(0, 0, sideHt), + new Translation3d(0, 0, topHt), + new Translation3d(0, fieldWidth, topHt), + new Translation3d(0, fieldWidth, sideHt))); + list.add(List.of(new Translation3d(0, 0, driveHt), new Translation3d(0, fieldWidth, driveHt))); return list; } /** - * The translations used to draw the field floor subdivisions (not the floor outline). It is a List of Lists because - * the translations are not all connected. - * - * @param subdivisions How many "subdivisions" along the width/length of the floor. E.g. 3 subdivisions - * would mean 2 lines along the length and 2 lines along the width creating a 3x3 "grid". + * The translations used to draw the field floor subdivisions (not the floor outline). It is a + * List of Lists because the translations are not all connected. + * + * @param subdivisions How many "subdivisions" along the width/length of the floor. E.g. 3 + * subdivisions would mean 2 lines along the length and 2 lines along the width creating a 3x3 + * "grid". */ private static List> getFieldFloorLines(int subdivisions) { var list = new ArrayList>(); @@ -477,75 +459,81 @@ private static List> getFieldFloorLines(int subdivisions) { final double subWidth = fieldWidth / subdivisions; // field floor subdivisions - for(int i = 0; i < subdivisions; i++) { - list.add(List.of( - new Translation3d(0, subWidth*(i+1), 0), - new Translation3d(fieldLength, subWidth*(i+1), 0) - )); - list.add(List.of( - new Translation3d(subLength*(i+1), 0, 0), - new Translation3d(subLength*(i+1), fieldWidth, 0) - )); + for (int i = 0; i < subdivisions; i++) { + list.add( + List.of( + new Translation3d(0, subWidth * (i + 1), 0), + new Translation3d(fieldLength, subWidth * (i + 1), 0))); + list.add( + List.of( + new Translation3d(subLength * (i + 1), 0, 0), + new Translation3d(subLength * (i + 1), fieldWidth, 0))); } return list; } /** - * Convert 3D lines represented by the given series of translations into a polygon in the camera's image. - * - * @param camRt The change in basis from world coordinates to camera coordinates. See - * {@link RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * Convert 3D lines represented by the given series of translations into a polygon in the camera's + * image. + * + * @param camRt The change in basis from world coordinates to camera coordinates. See {@link + * RotTrlTransform3d#makeRelativeTo(Pose3d)}. * @param prop The simulated camera's properties. * @param trls A sequential series of translations defining the polygon to be drawn. - * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in pixels. - * Line segments will be subdivided if they exceed this resolution. + * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in + * pixels. Line segments will be subdivided if they exceed this resolution. * @param isClosed If the final translation should also draw a line to the first translation. * @param destination The destination image that is being drawn to. */ - public static MatOfPoint2f polyFrom3dLines(RotTrlTransform3d camRt, SimCameraProperties prop, - List trls, double resolution, - boolean isClosed, Mat destination) { - resolution = Math.hypot(destination.size().height, destination.size().width)*resolution; + public static MatOfPoint2f polyFrom3dLines( + RotTrlTransform3d camRt, + SimCameraProperties prop, + List trls, + double resolution, + boolean isClosed, + Mat destination) { + resolution = Math.hypot(destination.size().height, destination.size().width) * resolution; List pts = new ArrayList<>(trls); - if(isClosed) pts.add(pts.get(0)); + if (isClosed) pts.add(pts.get(0)); var corners = new ArrayList(); - for(int i = 0; i < pts.size()-1; i++) { + for (int i = 0; i < pts.size() - 1; i++) { var pta = pts.get(i); - var ptb = pts.get(i+1); + var ptb = pts.get(i + 1); // check if line is inside camera fulcrum var inter = prop.getVisibleLine(camRt, pta, ptb); - if(inter.getSecond() == null) continue; - + if (inter.getSecond() == null) continue; + // cull line to the inside of the camera fulcrum double inter1 = inter.getFirst().doubleValue(); double inter2 = inter.getSecond().doubleValue(); var baseDelta = ptb.minus(pta); var old_pta = pta; - if(inter1 > 0) pta = old_pta.plus(baseDelta.times(inter1)); - if(inter2 < 1) ptb = old_pta.plus(baseDelta.times(inter2)); + if (inter1 > 0) pta = old_pta.plus(baseDelta.times(inter1)); + if (inter2 < 1) ptb = old_pta.plus(baseDelta.times(inter2)); baseDelta = ptb.minus(pta); // project points into 2d var corn = new ArrayList(); - corn.addAll(OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, List.of(pta, ptb))); + corn.addAll( + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, List.of(pta, ptb))); var pxa = corn.get(0); var pxb = corn.get(1); // subdivide projected line based on desired resolution - double pxDist = Math.hypot(pxb.x-pxa.x, pxb.y-pxa.y); + double pxDist = Math.hypot(pxb.x - pxa.x, pxb.y - pxa.y); int subdivisions = (int) (pxDist / resolution); - var subDelta = baseDelta.div(subdivisions+1); + var subDelta = baseDelta.div(subdivisions + 1); var subPts = new ArrayList(); - for(int j = 0; j < subdivisions; j++) { - subPts.add(pta.plus(subDelta.times(j+1))); + for (int j = 0; j < subdivisions; j++) { + subPts.add(pta.plus(subDelta.times(j + 1))); } - if(subPts.size() > 0){ - corn.addAll(1, OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, subPts)); + if (subPts.size() > 0) { + corn.addAll( + 1, OpenCVHelp.projectPoints(prop.getIntrinsics(), prop.getDistCoeffs(), camRt, subPts)); } corners.addAll(corn); @@ -556,14 +544,14 @@ public static MatOfPoint2f polyFrom3dLines(RotTrlTransform3d camRt, SimCameraPro /** * Draw a wireframe of the field to the given image. - * - * @param camRt The change in basis from world coordinates to camera coordinates. See - * {@link RotTrlTransform3d#makeRelativeTo(Pose3d)}. + * + * @param camRt The change in basis from world coordinates to camera coordinates. See {@link + * RotTrlTransform3d#makeRelativeTo(Pose3d)}. * @param prop The simulated camera's properties. - * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in pixels. - * Line segments will be subdivided if they exceed this resolution. - * @param wallThickness Thickness of the lines used for drawing the field walls in pixels. - * This is scaled by {@link #getScaledThickness(double, Mat)}. + * @param resolution Resolution as a fraction(0 - 1) of the video frame's diagonal length in + * pixels. Line segments will be subdivided if they exceed this resolution. + * @param wallThickness Thickness of the lines used for drawing the field walls in pixels. This is + * scaled by {@link #getScaledThickness(double, Mat)}. * @param wallColor Color of the lines used for drawing the field walls. * @param floorSubdivisions A NxN "grid" is created from the floor where this parameter is N, * which defines the floor lines. @@ -572,16 +560,33 @@ public static MatOfPoint2f polyFrom3dLines(RotTrlTransform3d camRt, SimCameraPro * @param floorColor Color of the lines used for drawing the field floor grid. * @param destination The destination image to draw to. */ - public static void drawFieldWireframe(RotTrlTransform3d camRt, SimCameraProperties prop, - double resolution, double wallThickness, Scalar wallColor, int floorSubdivisions, - double floorThickness, Scalar floorColor, Mat destination) { - for(var trls : getFieldWallLines()) { + public static void drawFieldWireframe( + RotTrlTransform3d camRt, + SimCameraProperties prop, + double resolution, + double wallThickness, + Scalar wallColor, + int floorSubdivisions, + double floorThickness, + Scalar floorColor, + Mat destination) { + for (var trls : getFieldWallLines()) { var poly = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); - drawPoly(poly, (int)getScaledThickness(wallThickness, destination), wallColor, false, destination); + drawPoly( + poly, + (int) getScaledThickness(wallThickness, destination), + wallColor, + false, + destination); } - for(var trls : getFieldFloorLines(floorSubdivisions)) { + for (var trls : getFieldFloorLines(floorSubdivisions)) { var poly = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); - drawPoly(poly, (int)getScaledThickness(floorThickness, destination), floorColor, false, destination); + drawPoly( + poly, + (int) getScaledThickness(floorThickness, destination), + floorColor, + false, + destination); } } } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java index c48e32e081..4e65bdcbd4 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java @@ -154,9 +154,9 @@ public Optional getRobotToCamera(PhotonCameraSim cameraSim, double } /** - * Get a simulated camera's position on the field. If the requested camera is invalid, an - * empty optional is returned. - * + * Get a simulated camera's position on the field. If the requested camera is invalid, an empty + * optional is returned. + * * @param cameraSim The specific camera to get the field pose of * @return The pose of this camera, or an empty optional if it is invalid */ @@ -165,16 +165,16 @@ public Optional getCameraPose(PhotonCameraSim cameraSim) { } /** - * Get a simulated camera's position on the field. If the requested camera is invalid, an - * empty optional is returned. - * + * Get a simulated camera's position on the field. If the requested camera is invalid, an empty + * optional is returned. + * * @param cameraSim The specific camera to get the field pose of * @param timeSeconds Timestamp in seconds of when the pose should be observed * @return The pose of this camera, or an empty optional if it is invalid */ public Optional getCameraPose(PhotonCameraSim cameraSim, double timeSeconds) { var robotToCamera = getRobotToCamera(cameraSim, timeSeconds); - if(robotToCamera.isEmpty()) return Optional.empty(); + if (robotToCamera.isEmpty()) return Optional.empty(); return Optional.of(getRobotPose(timeSeconds).plus(robotToCamera.get())); } @@ -244,8 +244,8 @@ public void addVisionTargets(VisionTargetSim... targets) { * the subset of these targets which are visible from the given camera position. * *

      The AprilTags from this layout will be added as vision targets under the type "apriltag". - * The poses added preserve the tag layout's current alliance origin. If the tag layout's - * alliance origin is changed, these added tags will have to be cleared and re-added. + * The poses added preserve the tag layout's current alliance origin. If the tag layout's alliance + * origin is changed, these added tags will have to be cleared and re-added. * * @param tagLayout The field tag layout to get Apriltag poses and IDs from */ diff --git a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java index f0eaeef5cb..257114a6ef 100644 --- a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java +++ b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java @@ -180,7 +180,8 @@ public void testProjection() { var actualRelation = new CameraTargetRelation(cameraPose, target.getPose()); assertEquals( actualRelation.camToTargPitch.getDegrees(), - pitchDiff.getDegrees() * Math.cos(yaw2d.getRadians()), // adjust for unaccounted perpsective distortion + pitchDiff.getDegrees() + * Math.cos(yaw2d.getRadians()), // adjust for unaccounted perpsective distortion kRotDeltaDeg, "2d pitch doesn't match 3d"); assertEquals( @@ -208,7 +209,7 @@ public void testSolvePNP_SQUARE() { var pnpSim = OpenCVHelp.solvePNP_SQUARE( prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); - + // check solvePNP estimation accuracy assertSame(relTarget.getRotation(), pnpSim.best.getRotation()); assertSame(relTarget.getTranslation(), pnpSim.best.getTranslation()); From c270828f3c7ef84d46f3995c8f760726af9768e1 Mon Sep 17 00:00:00 2001 From: amquake Date: Sun, 13 Aug 2023 01:24:55 -0700 Subject: [PATCH 12/46] fix tests using target yaw/pitch --- .../simulation/SimCameraProperties.java | 4 ++++ .../org/photonvision/VisionSystemSimTest.java | 17 ++++++++++++----- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java index f4aea9148c..7d901f3023 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java @@ -381,6 +381,10 @@ public Rotation3d getPixelRot(TargetCorner point) { * length should be: * *

      focal length y ⟶ (focal length y / cos(arctan(pixel x offset / focal length x)))
      + * + * More simply, the pitch angle has to be corrected by doing: + * + *
      pitch * cos(yaw)
      * * @return Rotation3d with yaw and pitch of the line projected out of the camera from the given * pixel (roll is zero). diff --git a/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java index 6f7720a666..41e9f0eb00 100644 --- a/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java +++ b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java @@ -60,6 +60,7 @@ import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; import org.opencv.core.Core; +import org.opencv.highgui.HighGui; import org.photonvision.estimation.TargetModel; import org.photonvision.estimation.VisionEstimation; import org.photonvision.simulation.PhotonCameraSim; @@ -348,7 +349,7 @@ private static Stream distCalCParamProvider() { Arguments.of(20, -42, 0), Arguments.of(5, -35, 2), Arguments.of(6, -35, 0), - Arguments.of(10, -34, 3.2), + Arguments.of(10, -34, 2.4), Arguments.of(15, -33, 0), Arguments.of(19.52, -15.98, 1.1)); } @@ -376,23 +377,29 @@ public void testDistanceCalc(double testDist, double testPitch, double testHeigh cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(160)); cameraSim.setMinTargetAreaPixels(0.0); visionSysSim.adjustCamera(cameraSim, robotToCamera); - // note that non-fiducial targets have different center point calculation and will - // return slightly inaccurate yaw/pitch values visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 0)); visionSysSim.update(robotPose); + + // Note that target 2d yaw/pitch accuracy is hindered by two factors in photonvision: + // 1. These are calculated with the average of the minimum area rectangle, which does not + // actually find the target center because of perspective distortion. + // 2. Yaw and pitch are calculated separately which gives incorrect pitch values. + // Specifically, the corrected pitch would be pitch * cos(yaw). var res = camera.getLatestResult(); assertTrue(res.hasTargets()); var tgt = res.getBestTarget(); - assertEquals(0.0, tgt.getYaw(), kRotDeltaDeg); + assertEquals(0.0, tgt.getYaw(), 0.5); + // Distance calculation using this trigonometry may be wildly incorrect when + // there is not much height difference between the target and the camera. double distMeas = PhotonUtils.calculateDistanceToTargetMeters( robotToCamera.getZ(), targetPose.getZ(), Units.degreesToRadians(-testPitch), Units.degreesToRadians(tgt.getPitch())); - assertEquals(Units.feetToMeters(testDist), distMeas, kTrlDelta); + assertEquals(Units.feetToMeters(testDist), distMeas, 0.15); } @Test From 423712be43ded277fbede9b873dfccb9f8a75cd6 Mon Sep 17 00:00:00 2001 From: amquake Date: Sun, 13 Aug 2023 01:25:21 -0700 Subject: [PATCH 13/46] spotless --- .../java/org/photonvision/simulation/SimCameraProperties.java | 4 ++-- .../src/test/java/org/photonvision/VisionSystemSimTest.java | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java index 7d901f3023..9bb27ecba0 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java @@ -381,9 +381,9 @@ public Rotation3d getPixelRot(TargetCorner point) { * length should be: * *
      focal length y ⟶ (focal length y / cos(arctan(pixel x offset / focal length x)))
      - * + * * More simply, the pitch angle has to be corrected by doing: - * + * *
      pitch * cos(yaw)
      * * @return Rotation3d with yaw and pitch of the line projected out of the camera from the given diff --git a/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java index 41e9f0eb00..3d60a5c8a8 100644 --- a/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java +++ b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java @@ -60,7 +60,6 @@ import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; import org.opencv.core.Core; -import org.opencv.highgui.HighGui; import org.photonvision.estimation.TargetModel; import org.photonvision.estimation.VisionEstimation; import org.photonvision.simulation.PhotonCameraSim; From c15f784d0de0d6b8e67ab03ddaaf64f9face6228 Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 14 Aug 2023 02:06:20 -0700 Subject: [PATCH 14/46] fix culled polygons joining, video sim tweaks --- .../photonvision/estimation/OpenCVHelp.java | 23 +++--- .../photonvision/estimation/TargetModel.java | 23 +++--- .../simulation/PhotonCameraSim.java | 71 ++++++++++++++----- .../photonvision/simulation/VideoSimUtil.java | 51 +++++++------ 4 files changed, 106 insertions(+), 62 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java index 1406e49180..29097a4bbd 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -35,7 +35,6 @@ import edu.wpi.first.math.numbers.*; import edu.wpi.first.util.RuntimeLoader; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import org.ejml.simple.SimpleMatrix; import org.opencv.calib3d.Calib3d; @@ -163,9 +162,9 @@ public static TargetCorner averageCorner(List corners) { var pointMat = targetCornersToMat(corners); Core.reduce(pointMat, pointMat, 0, Core.REDUCE_AVG); - var avgPt = matToTargetCorners(pointMat)[0]; + var avgPt = pointMat.toArray()[0]; pointMat.release(); - return avgPt; + return new TargetCorner(avgPt.x, avgPt.y); } public static MatOfPoint2f targetCornersToMat(List corners) { @@ -180,20 +179,20 @@ public static MatOfPoint2f targetCornersToMat(TargetCorner... corners) { return new MatOfPoint2f(points); } - public static TargetCorner[] pointsToTargetCorners(Point... points) { - var corners = new TargetCorner[points.length]; + public static List pointsToTargetCorners(Point... points) { + var corners = new ArrayList(points.length); for (int i = 0; i < points.length; i++) { - corners[i] = new TargetCorner(points[i].x, points[i].y); + corners.add(new TargetCorner(points[i].x, points[i].y)); } return corners; } - public static TargetCorner[] matToTargetCorners(MatOfPoint2f matInput) { - var corners = new TargetCorner[(int) matInput.total()]; + public static List matToTargetCorners(MatOfPoint2f matInput) { + var corners = new ArrayList(); float[] data = new float[(int) matInput.total() * matInput.channels()]; matInput.get(0, 0, data); - for (int i = 0; i < corners.length; i++) { - corners[i] = new TargetCorner(data[0 + 2 * i], data[1 + 2 * i]); + for (int i = 0; i < (int)matInput.total(); i++) { + corners.add(new TargetCorner(data[0 + 2 * i], data[1 + 2 * i])); } return corners; } @@ -302,7 +301,7 @@ public static List projectPoints( distCoeffsMat.release(); imagePoints.release(); - return Arrays.asList(corners); + return corners; } /** @@ -332,7 +331,7 @@ public static List undistortPoints( cameraMatrixMat.release(); distCoeffsMat.release(); - return Arrays.asList(corners_out); + return corners_out; } /** diff --git a/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java b/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java index 47405c44fd..db30ea24fb 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java @@ -75,24 +75,25 @@ public TargetModel(double widthMeters, double heightMeters) { * model has four vertices: * *
        - *
      • Point 0: [0, -radius, -radius] - *
      • Point 1: [0, radius, -radius] - *
      • Point 2: [0, radius, radius] - *
      • Point 3: [0, -radius, radius] + *
      • Point 0: [0, -radius, 0] + *
      • Point 1: [0, 0, -radius] + *
      • Point 2: [0, radius, 0] + *
      • Point 3: [0, 0, radius] *
      * * Q: Why these vertices? A: This target should be oriented to the camera every frame, much - * like a sprite/decal, and these vertices represent the corners of the rectangle in which the - * ellipse is inscribed. + * like a sprite/decal, and these vertices represent the ellipse vertices (maxima). These vertices + * are used for drawing the image of this sphere, but do not match the corners that will be published + * by photonvision. */ public TargetModel(double diameterMeters) { double radius = diameterMeters / 2.0; this.vertices = List.of( - new Translation3d(0, -radius, -radius), - new Translation3d(0, radius, -radius), - new Translation3d(0, radius, radius), - new Translation3d(0, -radius, radius)); + new Translation3d(0, -radius, 0), + new Translation3d(0, 0, -radius), + new Translation3d(0, radius, 0), + new Translation3d(0, 0, radius)); this.isPlanar = false; this.isSpherical = true; } @@ -140,7 +141,7 @@ public List getFieldVertices(Pose3d targetPose) { * @param cameraTrl Camera's translation * @return This target's pose oriented to the camera */ - public Pose3d getOrientedPose(Translation3d tgtTrl, Translation3d cameraTrl) { + public static Pose3d getOrientedPose(Translation3d tgtTrl, Translation3d cameraTrl) { var relCam = cameraTrl.minus(tgtTrl); var orientToCam = new Rotation3d( diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java index 5995a39cc3..229f494849 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -40,7 +40,9 @@ import org.opencv.core.Core; import org.opencv.core.CvType; import org.opencv.core.Mat; +import org.opencv.core.MatOfPoint2f; import org.opencv.core.Point; +import org.opencv.core.RotatedRect; import org.opencv.core.Scalar; import org.opencv.core.Size; import org.opencv.imgproc.Imgproc; @@ -51,6 +53,7 @@ import org.photonvision.estimation.OpenCVHelp; import org.photonvision.estimation.PNPResults; import org.photonvision.estimation.RotTrlTransform3d; +import org.photonvision.estimation.TargetModel; import org.photonvision.targeting.PhotonPipelineResult; import org.photonvision.targeting.PhotonTrackedTarget; import org.photonvision.targeting.TargetCorner; @@ -367,19 +370,58 @@ public PhotonPipelineResult process( var fieldCorners = tgt.getFieldVertices(); if (tgt.getModel().isSpherical) { // target is spherical var model = tgt.getModel(); + // orient the model to the camera (like a sprite/decal) so it appears similar regardless of view fieldCorners = model.getFieldVertices( - model.getOrientedPose(tgt.getPose().getTranslation(), cameraPose.getTranslation())); + TargetModel.getOrientedPose(tgt.getPose().getTranslation(), cameraPose.getTranslation())); } // project 3d target points into 2d image points var targetCorners = OpenCVHelp.projectPoints(prop.getIntrinsics(), prop.getDistCoeffs(), camRt, fieldCorners); + // spherical targets need some additional processing to match what PV publishes + if(tgt.getModel().isSpherical) { + var center = OpenCVHelp.averageCorner(targetCorners); + int l = 0, t, b, r = 0; + // reference point (left side midpoint) + for(int i = 1; i < 4; i++) { + if(targetCorners.get(i).x < targetCorners.get(l).x) l = i; + } + var lc = targetCorners.get(l); + // determine top, right, bottom midpoints + double[] angles = new double[4]; + t = (l+1) % 4; + b = (l+1) % 4; + for(int i = 0; i < 4; i++) { + if(i == l) continue; + var ic = targetCorners.get(i); + angles[i] = Math.atan2(lc.y-ic.y, ic.x-lc.x); + if(angles[i] >= angles[t]) t = i; + if(angles[i] <= angles[b]) b = i; + } + for(int i = 0; i < 4; i++) { + if(i != t && i != l && i != b) r = i; + } + // create RotatedRect from midpoints + var rect = new RotatedRect( + new Point(center.x, center.y), + new Size(targetCorners.get(r).x - lc.x, targetCorners.get(b).y - targetCorners.get(t).y), + Math.toDegrees(-angles[r]) + ); + // set target corners to rect corners + Point[] points = new Point[4]; + rect.points(points); + targetCorners = OpenCVHelp.pointsToTargetCorners(points); + } // save visible targets for raw video stream simulation visibleTgts.add(new Pair<>(tgt, targetCorners)); // estimate pixel noise var noisyTargetCorners = prop.estPixelNoise(targetCorners); + // find the minimum area rectangle of target corners + var minAreaRect = OpenCVHelp.getMinAreaRect(noisyTargetCorners); + Point[] minAreaRectPts = new Point[noisyTargetCorners.size()]; + minAreaRect.points(minAreaRectPts); // find the (naive) 2d yaw/pitch - var centerPt = OpenCVHelp.getMinAreaRect(noisyTargetCorners).center; + var centerPt = minAreaRect.center; var centerRot = prop.getPixelRot(new TargetCorner(centerPt.x, centerPt.y)); // find contour area double areaPercent = prop.getContourAreaPercent(noisyTargetCorners); @@ -397,9 +439,6 @@ public PhotonPipelineResult process( noisyTargetCorners); } - Point[] minAreaRectPts = new Point[noisyTargetCorners.size()]; - OpenCVHelp.getMinAreaRect(noisyTargetCorners).points(minAreaRectPts); - detectableTgts.add( new PhotonTrackedTarget( Math.toDegrees(centerRot.getZ()), @@ -410,7 +449,7 @@ public PhotonPipelineResult process( pnpSim.best, pnpSim.alt, pnpSim.ambiguity, - List.of(OpenCVHelp.pointsToTargetCorners(minAreaRectPts)), + OpenCVHelp.pointsToTargetCorners(minAreaRectPts), noisyTargetCorners)); } // render visible tags to raw video frame @@ -423,9 +462,9 @@ public PhotonPipelineResult process( videoSimWireframeResolution, 1.5, new Scalar(80), - 4, + 6, 1, - new Scalar(40), + new Scalar(30), videoSimFrameRaw); } @@ -475,15 +514,13 @@ public PhotonPipelineResult process( (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), Imgproc.LINE_AA); - for (var pt : tgt.getDetectedCorners()) { - Imgproc.circle( - videoSimFrameProcessed, - new Point(pt.x, pt.y), - 3, - new Scalar(255, 50, 0), - 1, - Imgproc.LINE_AA); - } + VideoSimUtil.drawPoly( + OpenCVHelp.targetCornersToMat(tgt.getDetectedCorners()), + (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), + new Scalar(255, 20, 20), + true, + videoSimFrameProcessed + ); } } videoSimProcessed.putFrame(videoSimFrameProcessed); diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java index a9996999e1..504bcb5cf0 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java @@ -372,8 +372,8 @@ public static void drawTagDetection(int id, MatOfPoint2f dstPoints, Mat destinat String.valueOf(id), textPt, Imgproc.FONT_HERSHEY_PLAIN, - thickness, - new Scalar(0, 255, 0), + 1.5 * thickness, + new Scalar(0, 200, 0), (int) thickness, Imgproc.LINE_AA); } @@ -474,7 +474,7 @@ private static List> getFieldFloorLines(int subdivisions) { } /** - * Convert 3D lines represented by the given series of translations into a polygon in the camera's + * Convert 3D lines represented by the given series of translations into a polygon(s) in the camera's * image. * * @param camRt The change in basis from world coordinates to camera coordinates. See {@link @@ -486,7 +486,7 @@ private static List> getFieldFloorLines(int subdivisions) { * @param isClosed If the final translation should also draw a line to the first translation. * @param destination The destination image that is being drawn to. */ - public static MatOfPoint2f polyFrom3dLines( + public static List polyFrom3dLines( RotTrlTransform3d camRt, SimCameraProperties prop, List trls, @@ -496,7 +496,7 @@ public static MatOfPoint2f polyFrom3dLines( resolution = Math.hypot(destination.size().height, destination.size().width) * resolution; List pts = new ArrayList<>(trls); if (isClosed) pts.add(pts.get(0)); - var corners = new ArrayList(); + List> polygonList = new ArrayList<>(); for (int i = 0; i < pts.size() - 1; i++) { var pta = pts.get(i); @@ -516,12 +516,12 @@ public static MatOfPoint2f polyFrom3dLines( baseDelta = ptb.minus(pta); // project points into 2d - var corn = new ArrayList(); - corn.addAll( + var poly = new ArrayList(); + poly.addAll( OpenCVHelp.projectPoints( prop.getIntrinsics(), prop.getDistCoeffs(), camRt, List.of(pta, ptb))); - var pxa = corn.get(0); - var pxb = corn.get(1); + var pxa = poly.get(0); + var pxb = poly.get(1); // subdivide projected line based on desired resolution double pxDist = Math.hypot(pxb.x - pxa.x, pxb.y - pxa.y); @@ -532,14 +532,17 @@ public static MatOfPoint2f polyFrom3dLines( subPts.add(pta.plus(subDelta.times(j + 1))); } if (subPts.size() > 0) { - corn.addAll( + poly.addAll( 1, OpenCVHelp.projectPoints(prop.getIntrinsics(), prop.getDistCoeffs(), camRt, subPts)); } - corners.addAll(corn); + polygonList.add(poly); } - return OpenCVHelp.targetCornersToMat(corners); + List matList = new ArrayList<>(); + for(var polygon : polygonList) matList.add(OpenCVHelp.targetCornersToMat(polygon)); + + return matList; } /** @@ -570,23 +573,27 @@ public static void drawFieldWireframe( double floorThickness, Scalar floorColor, Mat destination) { - for (var trls : getFieldWallLines()) { - var poly = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); - drawPoly( + for (var trls : getFieldFloorLines(floorSubdivisions)) { + var polys = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); + for(var poly : polys) { + drawPoly( poly, - (int) getScaledThickness(wallThickness, destination), - wallColor, + (int) Math.round(getScaledThickness(floorThickness, destination)), + floorColor, false, destination); + } } - for (var trls : getFieldFloorLines(floorSubdivisions)) { - var poly = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); - drawPoly( + for (var trls : getFieldWallLines()) { + var polys = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); + for(var poly : polys) { + drawPoly( poly, - (int) getScaledThickness(floorThickness, destination), - floorColor, + (int) Math.round(getScaledThickness(wallThickness, destination)), + wallColor, false, destination); + } } } } From 61b037d87ed4532fd759becf3eddb43a6b19856b Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 14 Aug 2023 02:14:55 -0700 Subject: [PATCH 15/46] dont do SQPNP on non-tag targets --- .../photonvision/estimation/VisionEstimation.java | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java index dad9eed5b0..e3c6bfcf1b 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java @@ -66,7 +66,7 @@ public static List getVisibleLayoutTags( * * @param cameraMatrix The camera intrinsics matrix in standard opencv form * @param distCoeffs The camera distortion matrix in standard opencv form - * @param visTags The visible tags reported by PV + * @param visTags The visible tags reported by PV. Non-tag targets are automatically excluded. * @param tagLayout The known tag layout on the field * @return The transformation that maps the field origin to the camera pose. Ensure the {@link * PNPResults} are present before utilizing them. @@ -84,8 +84,15 @@ public static PNPResults estimateCamPosePNP( } var corners = new ArrayList(); - for (var tag : visTags) corners.addAll(tag.getDetectedCorners()); - var knownTags = getVisibleLayoutTags(visTags, tagLayout); + var knownTags = new ArrayList(); + // ensure these are AprilTags in our layout + for (var tgt : visTags) { + int id = tgt.getFiducialId(); + tagLayout.getTagPose(id).ifPresent(pose -> { + knownTags.add(new AprilTag(id, pose)); + corners.addAll(tgt.getDetectedCorners()); + }); + } if (knownTags.size() == 0 || corners.size() == 0 || corners.size() % 4 != 0) { return new PNPResults(); } From e8a08202b67228051062145fc7ab9ed15db67891 Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 14 Aug 2023 16:22:24 -0700 Subject: [PATCH 16/46] non-square target points bugfix --- .../main/java/org/photonvision/simulation/PhotonCameraSim.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java index 229f494849..39861bd017 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -418,7 +418,7 @@ public PhotonPipelineResult process( var noisyTargetCorners = prop.estPixelNoise(targetCorners); // find the minimum area rectangle of target corners var minAreaRect = OpenCVHelp.getMinAreaRect(noisyTargetCorners); - Point[] minAreaRectPts = new Point[noisyTargetCorners.size()]; + Point[] minAreaRectPts = new Point[4]; minAreaRect.points(minAreaRectPts); // find the (naive) 2d yaw/pitch var centerPt = minAreaRect.center; From 08bc306ab96dd7d28e73463770a63cffc2e5d6d3 Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 14 Aug 2023 18:55:24 -0700 Subject: [PATCH 17/46] cuboid target model --- .../photonvision/estimation/TargetModel.java | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java b/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java index db30ea24fb..b0244a0e23 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java @@ -70,6 +70,34 @@ public TargetModel(double widthMeters, double heightMeters) { this.isSpherical = false; } + /** + * Creates a cuboid target model given the length, width, height. The model has eight + * vertices: + * + *
        + *
      • Point 0: [length/2, -width/2, -height/2] + *
      • Point 1: [length/2, width/2, -height/2] + *
      • Point 2: [length/2, width/2, height/2] + *
      • Point 3: [length/2, -width/2, height/2] + *
      • Point 4: [-length/2, -width/2, height/2] + *
      • Point 5: [-length/2, width/2, height/2] + *
      • Point 6: [-length/2, width/2, -height/2] + *
      • Point 7: [-length/2, -width/2, -height/2] + *
      + */ + public TargetModel(double lengthMeters, double widthMeters, double heightMeters) { + this(List.of( + new Translation3d(lengthMeters / 2.0, -widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(lengthMeters / 2.0, widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(lengthMeters / 2.0, widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(lengthMeters / 2.0, -widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, -widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, -widthMeters / 2.0, -heightMeters / 2.0) + )); + } + /** * Creates a spherical target model which has similar dimensions regardless of its rotation. This * model has four vertices: From bb1b8a480c0a82ec09d35dd1f3cf4d844381f61f Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 14 Aug 2023 18:56:58 -0700 Subject: [PATCH 18/46] standardize image operations on Point class --- .../photonvision/estimation/OpenCVHelp.java | 168 +++++++++--------- .../estimation/VisionEstimation.java | 7 +- .../simulation/PhotonCameraSim.java | 55 +++--- .../simulation/SimCameraProperties.java | 66 +++---- .../photonvision/simulation/VideoSimUtil.java | 51 +++--- .../java/org/photonvision/OpenCVTest.java | 17 +- 6 files changed, 168 insertions(+), 196 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java index 29097a4bbd..daf3fff685 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -35,6 +35,7 @@ import edu.wpi.first.math.numbers.*; import edu.wpi.first.util.RuntimeLoader; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import org.ejml.simple.SimpleMatrix; import org.opencv.calib3d.Calib3d; @@ -78,13 +79,10 @@ public final class OpenCVHelp { new Translation3d()); } - public static MatOfDouble matrixToMat(SimpleMatrix matrix) { + public static Mat matrixToMat(SimpleMatrix matrix) { var mat = new Mat(matrix.numRows(), matrix.numCols(), CvType.CV_64F); mat.put(0, 0, matrix.getDDRM().getData()); - var wrappedMat = new MatOfDouble(); - mat.convertTo(wrappedMat, CvType.CV_64F); - mat.release(); - return wrappedMat; + return mat; } public static Matrix matToMatrix(Mat mat) { @@ -157,29 +155,33 @@ public static Rotation3d rvecToRotation(Mat rvecInput) { return rotationEDNtoNWU(new Rotation3d(axis.div(axis.norm()), axis.norm())); } - public static TargetCorner averageCorner(List corners) { - if (corners == null || corners.size() == 0) return null; - - var pointMat = targetCornersToMat(corners); + public static Point avgPoint(Point[] points) { + if (points == null || points.length == 0) return null; + var pointMat = new MatOfPoint2f(points); Core.reduce(pointMat, pointMat, 0, Core.REDUCE_AVG); var avgPt = pointMat.toArray()[0]; pointMat.release(); - return new TargetCorner(avgPt.x, avgPt.y); + return avgPt; } - public static MatOfPoint2f targetCornersToMat(List corners) { - return targetCornersToMat(corners.toArray(TargetCorner[]::new)); + public static Point[] cornersToPoints(List corners) { + var points = new Point[corners.size()]; + for (int i = 0; i < corners.size(); i++) { + var corn = corners.get(i); + points[i] = new Point(corn.x, corn.y); + } + return points; } - public static MatOfPoint2f targetCornersToMat(TargetCorner... corners) { + public static Point[] cornersToPoints(TargetCorner... corners) { var points = new Point[corners.length]; for (int i = 0; i < corners.length; i++) { points[i] = new Point(corners[i].x, corners[i].y); } - return new MatOfPoint2f(points); + return points; } - public static List pointsToTargetCorners(Point... points) { + public static List pointsToCorners(Point... points) { var corners = new ArrayList(points.length); for (int i = 0; i < points.length; i++) { corners.add(new TargetCorner(points[i].x, points[i].y)); @@ -187,7 +189,7 @@ public static List pointsToTargetCorners(Point... points) { return corners; } - public static List matToTargetCorners(MatOfPoint2f matInput) { + public static List pointsToCorners(MatOfPoint2f matInput) { var corners = new ArrayList(); float[] data = new float[(int) matInput.total() * matInput.channels()]; matInput.get(0, 0, data); @@ -272,9 +274,9 @@ private static Translation3d translationNWUtoEDN(Translation3d trl) { * @param camRt The change in basis from world coordinates to camera coordinates. See {@link * RotTrlTransform3d#makeRelativeTo(Pose3d)}. * @param objectTranslations The 3d points to be projected - * @return The 2d points in pixels which correspond to the image of the 3d points on the camera + * @return The 2d points in pixels which correspond to the camera's image of the 3d points */ - public static List projectPoints( + public static Point[] projectPoints( Matrix cameraMatrix, Matrix distCoeffs, RotTrlTransform3d camRt, @@ -285,13 +287,11 @@ public static List projectPoints( var rvec = rotationToRvec(camRt.getRotation()); var tvec = translationToTvec(camRt.getTranslation()); var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); - var distCoeffsMat = matrixToMat(distCoeffs.getStorage()); + var distCoeffsMat = new MatOfDouble(matrixToMat(distCoeffs.getStorage())); var imagePoints = new MatOfPoint2f(); // project to 2d Calib3d.projectPoints(objectPoints, rvec, tvec, cameraMatrixMat, distCoeffsMat, imagePoints); - - // turn 2d point Mat into TargetCorners - var corners = matToTargetCorners(imagePoints); + var points = imagePoints.toArray(); // release our Mats from native memory objectPoints.release(); @@ -301,37 +301,37 @@ public static List projectPoints( distCoeffsMat.release(); imagePoints.release(); - return corners; + return points; } /** * Undistort 2d image points using a given camera's intrinsics and distortion. * - *

      2d image points from projectPoints(CameraProperties, Pose3d, List) projectPoints will + *

      2d image points from {@link #projectPoints(Matrix, Matrix, RotTrlTransform3d, List) projectPoints()} will * naturally be distorted, so this operation is important if the image points need to be directly * used (e.g. 2d yaw/pitch). * - * @param cameraMatrix the camera intrinsics matrix in standard opencv form - * @param distCoeffs the camera distortion matrix in standard opencv form - * @param corners The distorted image points + * @param cameraMatrix The camera intrinsics matrix in standard opencv form + * @param distCoeffs The camera distortion matrix in standard opencv form + * @param points The distorted image points * @return The undistorted image points */ - public static List undistortPoints( - Matrix cameraMatrix, Matrix distCoeffs, List corners) { - var points_in = targetCornersToMat(corners); - var points_out = new MatOfPoint2f(); + public static Point[] undistortPoints( + Matrix cameraMatrix, Matrix distCoeffs, Point[] points) { + var distMat = new MatOfPoint2f(points); + var undistMat = new MatOfPoint2f(); var cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); var distCoeffsMat = matrixToMat(distCoeffs.getStorage()); - Calib3d.undistortImagePoints(points_in, points_out, cameraMatrixMat, distCoeffsMat); - var corners_out = matToTargetCorners(points_out); + Calib3d.undistortImagePoints(distMat, undistMat, cameraMatrixMat, distCoeffsMat); + var undistPoints = undistMat.toArray(); - points_in.release(); - points_out.release(); + distMat.release(); + undistMat.release(); cameraMatrixMat.release(); distCoeffsMat.release(); - return corners_out; + return undistPoints; } /** @@ -340,54 +340,52 @@ public static List undistortPoints( *

      Note that rectangle size and position are stored with ints and do not have sub-pixel * accuracy. * - * @param corners The corners/points to be bounded - * @return Rectangle bounding the given corners + * @param points The points to be bounded + * @return Rectangle bounding the given points */ - public static Rect getBoundingRect(List corners) { - var corn = targetCornersToMat(corners); - var rect = Imgproc.boundingRect(corn); - corn.release(); + public static Rect getBoundingRect(Point[] points) { + var pointMat = new MatOfPoint2f(points); + var rect = Imgproc.boundingRect(pointMat); + pointMat.release(); return rect; } /** * Gets the rotated rectangle with minimum area which bounds this contour. * - *

      Note that rectangle size and position are stored with doubles and have sub-pixel accuracy. + *

      Note that rectangle size and position are stored with floats and have sub-pixel accuracy. * - * @param corners The corners/points to be bounded - * @return Rotated rectangle bounding the given corners + * @param points The points to be bounded + * @return Rotated rectangle bounding the given points */ - public static RotatedRect getMinAreaRect(List corners) { - var corn = targetCornersToMat(corners); - var rect = Imgproc.minAreaRect(corn); - corn.release(); + public static RotatedRect getMinAreaRect(Point[] points) { + var pointMat = new MatOfPoint2f(points); + var rect = Imgproc.minAreaRect(pointMat); + pointMat.release(); return rect; } /** * Gets the convex hull contour (the outline) of a list of points. * - * @param corners - * @return The subset of points defining the contour + * @param points The input contour + * @return The subset of points defining the convex hull. Note that these use ints and not floats. */ - public static MatOfPoint getConvexHull(List corners) { - var temp = targetCornersToMat(corners); - var corn = new MatOfPoint(temp.toArray()); - temp.release(); - + public static Point[] getConvexHull(Point[] points) { + var pointMat = new MatOfPoint(points); // outputHull gives us indices (of corn) that make a convex hull contour var outputHull = new MatOfInt(); - Imgproc.convexHull(corn, outputHull); + + Imgproc.convexHull(pointMat, outputHull); + int[] indices = outputHull.toArray(); outputHull.release(); - var tempPoints = corn.toArray(); - var points = tempPoints.clone(); + pointMat.release(); + var convexPoints = new Point[indices.length]; for (int i = 0; i < indices.length; i++) { - points[i] = tempPoints[indices[i]]; + convexPoints[i] = points[indices[i]]; } - corn.fromArray(points); - return corn; + return convexPoints; } /** @@ -414,7 +412,7 @@ public static MatOfPoint getConvexHull(List corners) { *

    • Point 3: [0, -squareLength / 2, -squareLength / 2] *
    * - * @param imageCorners The projection of these 3d object points into the 2d camera image. The + * @param imagePoints The projection of these 3d object points into the 2d camera image. The * order should match the given object point translations. * @return The resulting transformation that maps the camera pose to the target pose and the * ambiguity if an alternate solution is available. @@ -423,10 +421,10 @@ public static PNPResults solvePNP_SQUARE( Matrix cameraMatrix, Matrix distCoeffs, List modelTrls, - List imageCorners) { + Point[] imagePoints) { // solvepnp inputs - MatOfPoint3f objectPoints = new MatOfPoint3f(); - MatOfPoint2f imagePoints = new MatOfPoint2f(); + MatOfPoint3f objectMat = new MatOfPoint3f(); + MatOfPoint2f imageMat = new MatOfPoint2f(); MatOfDouble cameraMatrixMat = new MatOfDouble(); MatOfDouble distCoeffsMat = new MatOfDouble(); var rvecs = new ArrayList(); @@ -437,10 +435,10 @@ public static PNPResults solvePNP_SQUARE( try { // IPPE_SQUARE expects our corners in a specific order modelTrls = reorderCircular(modelTrls, true, -1); - imageCorners = reorderCircular(imageCorners, true, -1); + imagePoints = reorderCircular(Arrays.asList(imagePoints), true, -1).toArray(Point[]::new); // translate to opencv classes - translationToTvec(modelTrls.toArray(new Translation3d[0])).assignTo(objectPoints); - targetCornersToMat(imageCorners).assignTo(imagePoints); + translationToTvec(modelTrls.toArray(new Translation3d[0])).assignTo(objectMat); + imageMat.fromArray(imagePoints); matrixToMat(cameraMatrix.getStorage()).assignTo(cameraMatrixMat); matrixToMat(distCoeffs.getStorage()).assignTo(distCoeffsMat); @@ -451,8 +449,8 @@ public static PNPResults solvePNP_SQUARE( for (int tries = 0; tries < 2; tries++) { // calc rvecs/tvecs and associated reprojection error from image points Calib3d.solvePnPGeneric( - objectPoints, - imagePoints, + objectMat, + imageMat, cameraMatrixMat, distCoeffsMat, rvecs, @@ -474,10 +472,10 @@ public static PNPResults solvePNP_SQUARE( // check if we got a NaN result if (!Double.isNaN(errors[0])) break; else { // add noise and retry - double[] br = imagePoints.get(0, 0); + double[] br = imageMat.get(0, 0); br[0] -= 0.001; br[1] -= 0.001; - imagePoints.put(0, 0, br); + imageMat.put(0, 0, br); } } @@ -495,8 +493,8 @@ public static PNPResults solvePNP_SQUARE( return new PNPResults(); } finally { // release our Mats from native memory - objectPoints.release(); - imagePoints.release(); + objectMat.release(); + imageMat.release(); cameraMatrixMat.release(); distCoeffsMat.release(); for (var v : rvecs) v.release(); @@ -521,7 +519,7 @@ public static PNPResults solvePNP_SQUARE( * @param cameraMatrix The camera intrinsics matrix in standard opencv form * @param distCoeffs The camera distortion matrix in standard opencv form * @param objectTrls The translations of the object corners, relative to the field. - * @param imageCorners The projection of these 3d object points into the 2d camera image. The + * @param imagePoints The projection of these 3d object points into the 2d camera image. The * order should match the given object point translations. * @return The resulting transformation that maps the camera pose to the target pose. If the 3d * model points are supplied relative to the origin, this transformation brings the camera to @@ -531,13 +529,13 @@ public static PNPResults solvePNP_SQPNP( Matrix cameraMatrix, Matrix distCoeffs, List objectTrls, - List imageCorners) { + Point[] imagePoints) { try { // translate to opencv classes - MatOfPoint3f objectPoints = translationToTvec(objectTrls.toArray(new Translation3d[0])); - MatOfPoint2f imagePoints = targetCornersToMat(imageCorners); - MatOfDouble cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); - MatOfDouble distCoeffsMat = matrixToMat(distCoeffs.getStorage()); + MatOfPoint3f objectMat = translationToTvec(objectTrls.toArray(new Translation3d[0])); + MatOfPoint2f imageMat = new MatOfPoint2f(imagePoints); + Mat cameraMatrixMat = matrixToMat(cameraMatrix.getStorage()); + Mat distCoeffsMat = matrixToMat(distCoeffs.getStorage()); var rvecs = new ArrayList(); var tvecs = new ArrayList(); Mat rvec = Mat.zeros(3, 1, CvType.CV_32F); @@ -545,8 +543,8 @@ public static PNPResults solvePNP_SQPNP( Mat reprojectionError = new Mat(); // calc rvec/tvec from image points Calib3d.solvePnPGeneric( - objectPoints, - imagePoints, + objectMat, + imageMat, cameraMatrixMat, distCoeffsMat, rvecs, @@ -563,8 +561,8 @@ public static PNPResults solvePNP_SQPNP( var best = new Transform3d(tvecToTranslation(tvecs.get(0)), rvecToRotation(rvecs.get(0))); // release our Mats from native memory - objectPoints.release(); - imagePoints.release(); + objectMat.release(); + imageMat.release(); cameraMatrixMat.release(); distCoeffsMat.release(); for (var v : rvecs) v.release(); diff --git a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java index e3c6bfcf1b..392ffd3c2f 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java @@ -35,6 +35,8 @@ import java.util.List; import java.util.Objects; import java.util.stream.Collectors; + +import org.opencv.core.Point; import org.photonvision.targeting.PhotonTrackedTarget; import org.photonvision.targeting.TargetCorner; @@ -96,12 +98,13 @@ public static PNPResults estimateCamPosePNP( if (knownTags.size() == 0 || corners.size() == 0 || corners.size() % 4 != 0) { return new PNPResults(); } + Point[] points = OpenCVHelp.cornersToPoints(corners); // single-tag pnp if (visTags.size() == 1) { var camToTag = OpenCVHelp.solvePNP_SQUARE( - cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, corners); + cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, points); if (!camToTag.isPresent) return new PNPResults(); var bestPose = knownTags.get(0).pose.transformBy(camToTag.best.inverse()); var altPose = new Pose3d(); @@ -120,7 +123,7 @@ public static PNPResults estimateCamPosePNP( else { var objectTrls = new ArrayList(); for (var tag : knownTags) objectTrls.addAll(TargetModel.kTag16h5.getFieldVertices(tag.pose)); - var camToOrigin = OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, corners); + var camToOrigin = OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, points); if (!camToOrigin.isPresent) return new PNPResults(); return new PNPResults( camToOrigin.best.inverse(), diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java index 39861bd017..beb349e8a9 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -227,16 +227,15 @@ public boolean canSeeTargetPose(Pose3d camPose, VisionTargetSim target) { } /** - * Determines if all target corners are inside the camera's image. + * Determines if all target points are inside the camera's image. * - * @param corners The corners of the target as image points(x,y) + * @param points The target's 2d image points */ - public boolean canSeeCorners(List corners) { - // corner is outside of resolution - for (var corner : corners) { - if (MathUtil.clamp(corner.x, 0, prop.getResWidth()) != corner.x - || MathUtil.clamp(corner.y, 0, prop.getResHeight()) != corner.y) { - return false; + public boolean canSeeCorners(Point[] points) { + for (var point : points) { + if (MathUtil.clamp(point.x, 0, prop.getResWidth()) != point.x + || MathUtil.clamp(point.y, 0, prop.getResHeight()) != point.y) { + return false; // point is outside of resolution } } return true; @@ -350,7 +349,7 @@ public PhotonPipelineResult process( return dist1 < dist2 ? 1 : -1; }); // all targets visible before noise - var visibleTgts = new ArrayList>>(); + var visibleTgts = new ArrayList>(); // all targets actually detected by camera (after noise) var detectableTgts = new ArrayList(); // basis change from world coordinates to camera coordinates @@ -376,24 +375,24 @@ public PhotonPipelineResult process( TargetModel.getOrientedPose(tgt.getPose().getTranslation(), cameraPose.getTranslation())); } // project 3d target points into 2d image points - var targetCorners = + var imagePoints = OpenCVHelp.projectPoints(prop.getIntrinsics(), prop.getDistCoeffs(), camRt, fieldCorners); - // spherical targets need some additional processing to match what PV publishes + // spherical targets need a rotated rectangle of their midpoints for visualization if(tgt.getModel().isSpherical) { - var center = OpenCVHelp.averageCorner(targetCorners); + var center = OpenCVHelp.avgPoint(imagePoints); int l = 0, t, b, r = 0; // reference point (left side midpoint) for(int i = 1; i < 4; i++) { - if(targetCorners.get(i).x < targetCorners.get(l).x) l = i; + if(imagePoints[i].x < imagePoints[l].x) l = i; } - var lc = targetCorners.get(l); + var lc = imagePoints[l]; // determine top, right, bottom midpoints double[] angles = new double[4]; t = (l+1) % 4; b = (l+1) % 4; for(int i = 0; i < 4; i++) { if(i == l) continue; - var ic = targetCorners.get(i); + var ic = imagePoints[i]; angles[i] = Math.atan2(lc.y-ic.y, ic.x-lc.x); if(angles[i] >= angles[t]) t = i; if(angles[i] <= angles[b]) b = i; @@ -404,25 +403,25 @@ public PhotonPipelineResult process( // create RotatedRect from midpoints var rect = new RotatedRect( new Point(center.x, center.y), - new Size(targetCorners.get(r).x - lc.x, targetCorners.get(b).y - targetCorners.get(t).y), + new Size(imagePoints[r].x - lc.x, imagePoints[b].y - imagePoints[t].y), Math.toDegrees(-angles[r]) ); // set target corners to rect corners Point[] points = new Point[4]; rect.points(points); - targetCorners = OpenCVHelp.pointsToTargetCorners(points); + imagePoints = points; } // save visible targets for raw video stream simulation - visibleTgts.add(new Pair<>(tgt, targetCorners)); + visibleTgts.add(new Pair<>(tgt, imagePoints)); // estimate pixel noise - var noisyTargetCorners = prop.estPixelNoise(targetCorners); + var noisyTargetCorners = prop.estPixelNoise(imagePoints); // find the minimum area rectangle of target corners var minAreaRect = OpenCVHelp.getMinAreaRect(noisyTargetCorners); Point[] minAreaRectPts = new Point[4]; minAreaRect.points(minAreaRectPts); // find the (naive) 2d yaw/pitch var centerPt = minAreaRect.center; - var centerRot = prop.getPixelRot(new TargetCorner(centerPt.x, centerPt.y)); + var centerRot = prop.getPixelRot(centerPt); // find contour area double areaPercent = prop.getContourAreaPercent(noisyTargetCorners); @@ -449,8 +448,8 @@ public PhotonPipelineResult process( pnpSim.best, pnpSim.alt, pnpSim.ambiguity, - OpenCVHelp.pointsToTargetCorners(minAreaRectPts), - noisyTargetCorners)); + OpenCVHelp.pointsToCorners(minAreaRectPts), + OpenCVHelp.pointsToCorners(noisyTargetCorners))); } // render visible tags to raw video frame if (videoSimRawEnabled) { @@ -475,12 +474,12 @@ public PhotonPipelineResult process( if (tgt.fiducialID >= 0) { // apriltags VideoSimUtil.warp16h5TagImage( - tgt.fiducialID, OpenCVHelp.targetCornersToMat(corn), true, videoSimFrameRaw); + tgt.fiducialID, corn, true, videoSimFrameRaw); } else if (!tgt.getModel().isSpherical) { // non-spherical targets - var contour = OpenCVHelp.targetCornersToMat(corn); + var contour = corn; if (!tgt.getModel() .isPlanar) { // visualization cant handle non-convex projections of 3d models - contour.fromArray(OpenCVHelp.getConvexHull(corn).toArray()); + contour = OpenCVHelp.getConvexHull(contour); } VideoSimUtil.drawPoly(contour, -1, new Scalar(255), true, videoSimFrameRaw); } else { // spherical targets @@ -504,18 +503,18 @@ public PhotonPipelineResult process( if (tgt.getFiducialId() >= 0) { // apriltags VideoSimUtil.drawTagDetection( tgt.getFiducialId(), - OpenCVHelp.targetCornersToMat(tgt.getDetectedCorners()), + OpenCVHelp.cornersToPoints(tgt.getDetectedCorners()), videoSimFrameProcessed); } else { // other targets Imgproc.rectangle( videoSimFrameProcessed, - OpenCVHelp.getBoundingRect(tgt.getDetectedCorners()), + OpenCVHelp.getBoundingRect(OpenCVHelp.cornersToPoints(tgt.getDetectedCorners())), new Scalar(0, 0, 255), (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), Imgproc.LINE_AA); VideoSimUtil.drawPoly( - OpenCVHelp.targetCornersToMat(tgt.getDetectedCorners()), + OpenCVHelp.cornersToPoints(tgt.getDetectedCorners()), (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), new Scalar(255, 20, 20), true, diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java index 9bb27ecba0..4d6f5837ee 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java @@ -41,13 +41,13 @@ import java.util.ArrayList; import java.util.List; import java.util.Random; -import java.util.stream.Collectors; import org.ejml.data.DMatrix3; import org.ejml.dense.fixed.CommonOps_DDF3; +import org.opencv.core.MatOfPoint2f; +import org.opencv.core.Point; import org.opencv.imgproc.Imgproc; import org.photonvision.estimation.OpenCVHelp; import org.photonvision.estimation.RotTrlTransform3d; -import org.photonvision.targeting.TargetCorner; /** * Calibration and performance values for this camera. @@ -315,18 +315,14 @@ public SimCameraProperties copy() { return newProp; } - public List undistort(List points) { - return OpenCVHelp.undistortPoints(camIntrinsics, distCoeffs, points); - } - /** * The percentage(0 - 100) of this camera's resolution the contour takes up in pixels of the * image. * - * @param corners Corners of the contour + * @param points Points of the contour */ - public double getContourAreaPercent(List corners) { - return Imgproc.contourArea(OpenCVHelp.getConvexHull(corners)) / getResArea() * 100; + public double getContourAreaPercent(Point[] points) { + return Imgproc.contourArea(new MatOfPoint2f(OpenCVHelp.getConvexHull(points))) / getResArea() * 100; } /** The yaw from the principal point of this camera to the pixel x value. Positive values left. */ @@ -342,7 +338,7 @@ public Rotation2d getPixelYaw(double pixelX) { * The pitch from the principal point of this camera to the pixel y value. Pitch is positive down. * *

    Note that this angle is naively computed and may be incorrect. See {@link - * #getCorrectedPixelRot(TargetCorner)}. + * #getCorrectedPixelRot(Point)}. */ public Rotation2d getPixelPitch(double pixelY) { double fy = camIntrinsics.get(1, 1); @@ -357,9 +353,9 @@ public Rotation2d getPixelPitch(double pixelY) { * down. * *

    Note that pitch is naively computed and may be incorrect. See {@link - * #getCorrectedPixelRot(TargetCorner)}. + * #getCorrectedPixelRot(Point)}. */ - public Rotation3d getPixelRot(TargetCorner point) { + public Rotation3d getPixelRot(Point point) { return new Rotation3d( 0, getPixelPitch(point.y).getRadians(), getPixelYaw(point.x).getRadians()); } @@ -389,7 +385,7 @@ public Rotation3d getPixelRot(TargetCorner point) { * @return Rotation3d with yaw and pitch of the line projected out of the camera from the given * pixel (roll is zero). */ - public Rotation3d getCorrectedPixelRot(TargetCorner point) { + public Rotation3d getCorrectedPixelRot(Point point) { double fx = camIntrinsics.get(0, 0); double cx = camIntrinsics.get(0, 2); double xOffset = cx - point.x; @@ -562,42 +558,20 @@ else if (!Double.isNaN(inter1)) { else return new Pair<>(null, null); } - /** - * Returns these pixel points as fractions of a 1x1 square image. This means the camera's aspect - * ratio and resolution will be used, and the points' x and y may not reach all portions(e.g. a - * wide aspect ratio means some of the top and bottom of the square image is unreachable). - * - * @param points Pixel points on this camera's image - * @return Points mapped to an image of 1x1 resolution - */ - public List getPixelFraction(List points) { - double resLarge = getAspectRatio() > 1 ? resWidth : resHeight; - - return points.stream() - .map( - p -> { - // offset to account for aspect ratio - return new TargetCorner( - (p.x + (resLarge - resWidth) / 2.0) / resLarge, - (p.y + (resLarge - resHeight) / 2.0) / resLarge); - }) - .collect(Collectors.toList()); - } - /** Returns these points after applying this camera's estimated noise. */ - public List estPixelNoise(List points) { + public Point[] estPixelNoise(Point[] points) { if (avgErrorPx == 0 && errorStdDevPx == 0) return points; - return points.stream() - .map( - p -> { - // error pixels in random direction - double error = avgErrorPx + rand.nextGaussian() * errorStdDevPx; - double errorAngle = rand.nextDouble() * 2 * Math.PI - Math.PI; - return new TargetCorner( - p.x + error * Math.cos(errorAngle), p.y + error * Math.sin(errorAngle)); - }) - .collect(Collectors.toList()); + Point[] noisyPts = new Point[points.length]; + for(int i = 0; i < points.length; i++) { + var p = points[i]; + // error pixels in random direction + double error = avgErrorPx + rand.nextGaussian() * errorStdDevPx; + double errorAngle = rand.nextDouble() * 2 * Math.PI - Math.PI; + noisyPts[i] = new Point( + p.x + error * Math.cos(errorAngle), p.y + error * Math.sin(errorAngle)); + } + return noisyPts; } /** diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java index 504bcb5cf0..b6912e0e34 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java @@ -49,7 +49,6 @@ import org.opencv.imgproc.Imgproc; import org.photonvision.estimation.OpenCVHelp; import org.photonvision.estimation.RotTrlTransform3d; -import org.photonvision.targeting.TargetCorner; public class VideoSimUtil { public static final String kLocalTagImagesPath = "./src/main/resources/images/apriltags/"; @@ -179,19 +178,19 @@ public static Point[] get16h5MarkerPts(int scale) { * supersampling/interpolating the warped image. This should be used if better stream quality * is desired or target detection is being done on the stream, but can hurt performance. * @param destination The destination image to place the warped tag image onto. - * @see OpenCVHelp#targetCornersToMat(org.photonvision.targeting.TargetCorner...) */ public static void warp16h5TagImage( - int tagId, MatOfPoint2f dstPoints, boolean antialiasing, Mat destination) { + int tagId, Point[] dstPoints, boolean antialiasing, Mat destination) { Mat tagImage = kTag16h5Images.get(tagId); if (tagImage == null || tagImage.empty()) return; var tagPoints = new MatOfPoint2f(kTag16h5MarkerPts); // points of tag image corners var tagImageCorners = new MatOfPoint2f(getImageCorners(tagImage.size())); + var dstPointMat = new MatOfPoint2f(dstPoints); // the rectangle describing the rectangle-of-interest(ROI) - var boundingRect = Imgproc.boundingRect(dstPoints); + var boundingRect = Imgproc.boundingRect(dstPointMat); // find the perspective transform from the tag image to the warped destination points - Mat perspecTrf = Imgproc.getPerspectiveTransform(tagPoints, dstPoints); + Mat perspecTrf = Imgproc.getPerspectiveTransform(tagPoints, dstPointMat); // check extreme image corners after transform to check if we need to expand bounding rect var extremeCorners = new MatOfPoint2f(); Core.perspectiveTransform(tagImageCorners, extremeCorners, perspecTrf); @@ -247,12 +246,12 @@ public static void warp16h5TagImage( // upscale if supersampling Mat scaledDstPts = new Mat(); if (supersampling > 1) { - Core.multiply(dstPoints, new Scalar(supersampling, supersampling), scaledDstPts); + Core.multiply(dstPointMat, new Scalar(supersampling, supersampling), scaledDstPts); boundingRect.x *= supersampling; boundingRect.y *= supersampling; boundingRect.width *= supersampling; boundingRect.height *= supersampling; - } else dstPoints.assignTo(scaledDstPts); + } else dstPointMat.assignTo(scaledDstPts); // update transform relative to expanded, scaled bounding rect Core.subtract(scaledDstPts, new Scalar(boundingRect.tl().x, boundingRect.tl().y), scaledDstPts); @@ -318,14 +317,15 @@ public static double getScaledThickness(double thickness480p, Mat destinationImg /** * Draw a filled ellipse in the destination image. * - * @param corners The corners of a rectangle in which the ellipse is inscribed + * @param dstPoints The points in the destination image representing the rectangle in which + * the ellipse is inscribed. * @param color The color of the ellipse. This is a scalar with BGR values (0-255) * @param destination The destination image to draw onto. The image should be in the BGR color * space. */ - public static void drawEllipse(List corners, Scalar color, Mat destination) { - // create RotatedRect from corners - var rect = OpenCVHelp.getMinAreaRect(corners); + public static void drawEllipse(Point[] dstPoints, Scalar color, Mat destination) { + // create RotatedRect from points + var rect = OpenCVHelp.getMinAreaRect(dstPoints); // inscribe ellipse inside rectangle Imgproc.ellipse(destination, rect, color, -1, Imgproc.LINE_AA); } @@ -341,8 +341,8 @@ public static void drawEllipse(List corners, Scalar color, Mat des * @param destination The destination image to draw onto. */ public static void drawPoly( - MatOfPoint2f dstPoints, int thickness, Scalar color, boolean isClosed, Mat destination) { - var dstPointsd = new MatOfPoint(dstPoints.toArray()); + Point[] dstPoints, int thickness, Scalar color, boolean isClosed, Mat destination) { + var dstPointsd = new MatOfPoint(dstPoints); if (thickness > 0) { Imgproc.polylines( destination, List.of(dstPointsd), isClosed, color, thickness, Imgproc.LINE_AA); @@ -360,10 +360,10 @@ public static void drawPoly( * @param destination The destination image to draw onto. The image should be in the BGR color * space. */ - public static void drawTagDetection(int id, MatOfPoint2f dstPoints, Mat destination) { + public static void drawTagDetection(int id, Point[] dstPoints, Mat destination) { double thickness = getScaledThickness(1, destination); drawPoly(dstPoints, (int) thickness, new Scalar(0, 0, 255), true, destination); - var rect = Imgproc.boundingRect(dstPoints); + var rect = Imgproc.boundingRect(new MatOfPoint(dstPoints)); var textPt = new Point(rect.x + rect.width, rect.y); textPt.x += thickness; textPt.y += thickness; @@ -485,8 +485,9 @@ private static List> getFieldFloorLines(int subdivisions) { * pixels. Line segments will be subdivided if they exceed this resolution. * @param isClosed If the final translation should also draw a line to the first translation. * @param destination The destination image that is being drawn to. + * @return A list of polygons(which are an array of points) */ - public static List polyFrom3dLines( + public static List polyFrom3dLines( RotTrlTransform3d camRt, SimCameraProperties prop, List trls, @@ -496,7 +497,7 @@ public static List polyFrom3dLines( resolution = Math.hypot(destination.size().height, destination.size().width) * resolution; List pts = new ArrayList<>(trls); if (isClosed) pts.add(pts.get(0)); - List> polygonList = new ArrayList<>(); + List polyPointList = new ArrayList<>(); for (int i = 0; i < pts.size() - 1; i++) { var pta = pts.get(i); @@ -516,10 +517,9 @@ public static List polyFrom3dLines( baseDelta = ptb.minus(pta); // project points into 2d - var poly = new ArrayList(); - poly.addAll( - OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, List.of(pta, ptb))); + var poly = new ArrayList(); + poly.addAll(Arrays.asList(OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, List.of(pta, ptb)))); var pxa = poly.get(0); var pxb = poly.get(1); @@ -533,16 +533,13 @@ public static List polyFrom3dLines( } if (subPts.size() > 0) { poly.addAll( - 1, OpenCVHelp.projectPoints(prop.getIntrinsics(), prop.getDistCoeffs(), camRt, subPts)); + 1, Arrays.asList(OpenCVHelp.projectPoints(prop.getIntrinsics(), prop.getDistCoeffs(), camRt, subPts))); } - polygonList.add(poly); + polyPointList.add(poly.toArray(Point[]::new)); } - List matList = new ArrayList<>(); - for(var polygon : polygonList) matList.add(OpenCVHelp.targetCornersToMat(polygon)); - - return matList; + return polyPointList; } /** diff --git a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java index 257114a6ef..7e6e62ef48 100644 --- a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java +++ b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java @@ -145,30 +145,31 @@ public void testProjection() { new Pose3d(1, 0, 0, new Rotation3d(0, 0, Math.PI)), TargetModel.kTag16h5, 0); var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); var camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); - var targetCorners = + var imagePoints = OpenCVHelp.projectPoints( prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); // find circulation (counter/clockwise-ness) double circulation = 0; - for (int i = 0; i < targetCorners.size(); i++) { - double xDiff = targetCorners.get((i + 1) % 4).x - targetCorners.get(i).x; - double ySum = targetCorners.get((i + 1) % 4).y + targetCorners.get(i).y; + for (int i = 0; i < imagePoints.length; i++) { + double xDiff = imagePoints[(i + 1) % 4].x - imagePoints[i].x; + double ySum = imagePoints[(i + 1) % 4].y + imagePoints[i].y; circulation += xDiff * ySum; } assertTrue(circulation > 0, "2d fiducial points aren't counter-clockwise"); // undo projection distortion - targetCorners = prop.undistort(targetCorners); + imagePoints = OpenCVHelp.undistortPoints(prop.getIntrinsics(), prop.getDistCoeffs(), imagePoints); + // test projection results after moving camera - var avgCenterRot1 = prop.getPixelRot(OpenCVHelp.averageCorner(targetCorners)); + var avgCenterRot1 = prop.getPixelRot(OpenCVHelp.avgPoint(imagePoints)); cameraPose = cameraPose.plus(new Transform3d(new Translation3d(), new Rotation3d(0, 0.25, 0.25))); camRt = RotTrlTransform3d.makeRelativeTo(cameraPose); - targetCorners = + imagePoints = OpenCVHelp.projectPoints( prop.getIntrinsics(), prop.getDistCoeffs(), camRt, target.getFieldVertices()); - var avgCenterRot2 = prop.getPixelRot(OpenCVHelp.averageCorner(targetCorners)); + var avgCenterRot2 = prop.getPixelRot(OpenCVHelp.avgPoint(imagePoints)); var yaw2d = new Rotation2d(avgCenterRot2.getZ()); var pitch2d = new Rotation2d(avgCenterRot2.getY()); From a084abc480f3c79be4ef8f93eb93bd0e15cdc0c6 Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 14 Aug 2023 18:58:14 -0700 Subject: [PATCH 19/46] spotless --- .../photonvision/estimation/OpenCVHelp.java | 16 +++--- .../photonvision/estimation/TargetModel.java | 27 +++++---- .../estimation/VisionEstimation.java | 14 +++-- .../simulation/PhotonCameraSim.java | 56 +++++++++---------- .../simulation/SimCameraProperties.java | 10 ++-- .../photonvision/simulation/VideoSimUtil.java | 43 +++++++------- .../java/org/photonvision/OpenCVTest.java | 5 +- 7 files changed, 89 insertions(+), 82 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java index daf3fff685..d6f1c91cce 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -193,7 +193,7 @@ public static List pointsToCorners(MatOfPoint2f matInput) { var corners = new ArrayList(); float[] data = new float[(int) matInput.total() * matInput.channels()]; matInput.get(0, 0, data); - for (int i = 0; i < (int)matInput.total(); i++) { + for (int i = 0; i < (int) matInput.total(); i++) { corners.add(new TargetCorner(data[0 + 2 * i], data[1 + 2 * i])); } return corners; @@ -307,9 +307,9 @@ public static Point[] projectPoints( /** * Undistort 2d image points using a given camera's intrinsics and distortion. * - *

    2d image points from {@link #projectPoints(Matrix, Matrix, RotTrlTransform3d, List) projectPoints()} will - * naturally be distorted, so this operation is important if the image points need to be directly - * used (e.g. 2d yaw/pitch). + *

    2d image points from {@link #projectPoints(Matrix, Matrix, RotTrlTransform3d, List) + * projectPoints()} will naturally be distorted, so this operation is important if the image + * points need to be directly used (e.g. 2d yaw/pitch). * * @param cameraMatrix The camera intrinsics matrix in standard opencv form * @param distCoeffs The camera distortion matrix in standard opencv form @@ -412,8 +412,8 @@ public static Point[] getConvexHull(Point[] points) { *

  • Point 3: [0, -squareLength / 2, -squareLength / 2] *
* - * @param imagePoints The projection of these 3d object points into the 2d camera image. The - * order should match the given object point translations. + * @param imagePoints The projection of these 3d object points into the 2d camera image. The order + * should match the given object point translations. * @return The resulting transformation that maps the camera pose to the target pose and the * ambiguity if an alternate solution is available. */ @@ -519,8 +519,8 @@ public static PNPResults solvePNP_SQUARE( * @param cameraMatrix The camera intrinsics matrix in standard opencv form * @param distCoeffs The camera distortion matrix in standard opencv form * @param objectTrls The translations of the object corners, relative to the field. - * @param imagePoints The projection of these 3d object points into the 2d camera image. The - * order should match the given object point translations. + * @param imagePoints The projection of these 3d object points into the 2d camera image. The order + * should match the given object point translations. * @return The resulting transformation that maps the camera pose to the target pose. If the 3d * model points are supplied relative to the origin, this transformation brings the camera to * the origin. diff --git a/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java b/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java index b0244a0e23..90044abb38 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/TargetModel.java @@ -71,8 +71,7 @@ public TargetModel(double widthMeters, double heightMeters) { } /** - * Creates a cuboid target model given the length, width, height. The model has eight - * vertices: + * Creates a cuboid target model given the length, width, height. The model has eight vertices: * *
    *
  • Point 0: [length/2, -width/2, -height/2] @@ -86,16 +85,16 @@ public TargetModel(double widthMeters, double heightMeters) { *
*/ public TargetModel(double lengthMeters, double widthMeters, double heightMeters) { - this(List.of( - new Translation3d(lengthMeters / 2.0, -widthMeters / 2.0, -heightMeters / 2.0), - new Translation3d(lengthMeters / 2.0, widthMeters / 2.0, -heightMeters / 2.0), - new Translation3d(lengthMeters / 2.0, widthMeters / 2.0, heightMeters / 2.0), - new Translation3d(lengthMeters / 2.0, -widthMeters / 2.0, heightMeters / 2.0), - new Translation3d(-lengthMeters / 2.0, -widthMeters / 2.0, heightMeters / 2.0), - new Translation3d(-lengthMeters / 2.0, widthMeters / 2.0, heightMeters / 2.0), - new Translation3d(-lengthMeters / 2.0, widthMeters / 2.0, -heightMeters / 2.0), - new Translation3d(-lengthMeters / 2.0, -widthMeters / 2.0, -heightMeters / 2.0) - )); + this( + List.of( + new Translation3d(lengthMeters / 2.0, -widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(lengthMeters / 2.0, widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(lengthMeters / 2.0, widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(lengthMeters / 2.0, -widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, -widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, widthMeters / 2.0, heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, widthMeters / 2.0, -heightMeters / 2.0), + new Translation3d(-lengthMeters / 2.0, -widthMeters / 2.0, -heightMeters / 2.0))); } /** @@ -111,8 +110,8 @@ public TargetModel(double lengthMeters, double widthMeters, double heightMeters) * * Q: Why these vertices? A: This target should be oriented to the camera every frame, much * like a sprite/decal, and these vertices represent the ellipse vertices (maxima). These vertices - * are used for drawing the image of this sphere, but do not match the corners that will be published - * by photonvision. + * are used for drawing the image of this sphere, but do not match the corners that will be + * published by photonvision. */ public TargetModel(double diameterMeters) { double radius = diameterMeters / 2.0; diff --git a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java index 392ffd3c2f..f5fdb2953d 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java @@ -35,7 +35,6 @@ import java.util.List; import java.util.Objects; import java.util.stream.Collectors; - import org.opencv.core.Point; import org.photonvision.targeting.PhotonTrackedTarget; import org.photonvision.targeting.TargetCorner; @@ -68,7 +67,7 @@ public static List getVisibleLayoutTags( * * @param cameraMatrix The camera intrinsics matrix in standard opencv form * @param distCoeffs The camera distortion matrix in standard opencv form - * @param visTags The visible tags reported by PV. Non-tag targets are automatically excluded. + * @param visTags The visible tags reported by PV. Non-tag targets are automatically excluded. * @param tagLayout The known tag layout on the field * @return The transformation that maps the field origin to the camera pose. Ensure the {@link * PNPResults} are present before utilizing them. @@ -90,10 +89,13 @@ public static PNPResults estimateCamPosePNP( // ensure these are AprilTags in our layout for (var tgt : visTags) { int id = tgt.getFiducialId(); - tagLayout.getTagPose(id).ifPresent(pose -> { - knownTags.add(new AprilTag(id, pose)); - corners.addAll(tgt.getDetectedCorners()); - }); + tagLayout + .getTagPose(id) + .ifPresent( + pose -> { + knownTags.add(new AprilTag(id, pose)); + corners.addAll(tgt.getDetectedCorners()); + }); } if (knownTags.size() == 0 || corners.size() == 0 || corners.size() % 4 != 0) { return new PNPResults(); diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java index beb349e8a9..55a1ce3060 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -40,7 +40,6 @@ import org.opencv.core.Core; import org.opencv.core.CvType; import org.opencv.core.Mat; -import org.opencv.core.MatOfPoint2f; import org.opencv.core.Point; import org.opencv.core.RotatedRect; import org.opencv.core.Scalar; @@ -56,7 +55,6 @@ import org.photonvision.estimation.TargetModel; import org.photonvision.targeting.PhotonPipelineResult; import org.photonvision.targeting.PhotonTrackedTarget; -import org.photonvision.targeting.TargetCorner; /** * A handle for simulating {@link PhotonCamera} values. Processing simulated targets through this @@ -369,43 +367,45 @@ public PhotonPipelineResult process( var fieldCorners = tgt.getFieldVertices(); if (tgt.getModel().isSpherical) { // target is spherical var model = tgt.getModel(); - // orient the model to the camera (like a sprite/decal) so it appears similar regardless of view + // orient the model to the camera (like a sprite/decal) so it appears similar regardless of + // view fieldCorners = model.getFieldVertices( - TargetModel.getOrientedPose(tgt.getPose().getTranslation(), cameraPose.getTranslation())); + TargetModel.getOrientedPose( + tgt.getPose().getTranslation(), cameraPose.getTranslation())); } // project 3d target points into 2d image points var imagePoints = OpenCVHelp.projectPoints(prop.getIntrinsics(), prop.getDistCoeffs(), camRt, fieldCorners); // spherical targets need a rotated rectangle of their midpoints for visualization - if(tgt.getModel().isSpherical) { + if (tgt.getModel().isSpherical) { var center = OpenCVHelp.avgPoint(imagePoints); int l = 0, t, b, r = 0; // reference point (left side midpoint) - for(int i = 1; i < 4; i++) { - if(imagePoints[i].x < imagePoints[l].x) l = i; + for (int i = 1; i < 4; i++) { + if (imagePoints[i].x < imagePoints[l].x) l = i; } var lc = imagePoints[l]; // determine top, right, bottom midpoints double[] angles = new double[4]; - t = (l+1) % 4; - b = (l+1) % 4; - for(int i = 0; i < 4; i++) { - if(i == l) continue; + t = (l + 1) % 4; + b = (l + 1) % 4; + for (int i = 0; i < 4; i++) { + if (i == l) continue; var ic = imagePoints[i]; - angles[i] = Math.atan2(lc.y-ic.y, ic.x-lc.x); - if(angles[i] >= angles[t]) t = i; - if(angles[i] <= angles[b]) b = i; + angles[i] = Math.atan2(lc.y - ic.y, ic.x - lc.x); + if (angles[i] >= angles[t]) t = i; + if (angles[i] <= angles[b]) b = i; } - for(int i = 0; i < 4; i++) { - if(i != t && i != l && i != b) r = i; + for (int i = 0; i < 4; i++) { + if (i != t && i != l && i != b) r = i; } // create RotatedRect from midpoints - var rect = new RotatedRect( - new Point(center.x, center.y), - new Size(imagePoints[r].x - lc.x, imagePoints[b].y - imagePoints[t].y), - Math.toDegrees(-angles[r]) - ); + var rect = + new RotatedRect( + new Point(center.x, center.y), + new Size(imagePoints[r].x - lc.x, imagePoints[b].y - imagePoints[t].y), + Math.toDegrees(-angles[r])); // set target corners to rect corners Point[] points = new Point[4]; rect.points(points); @@ -473,8 +473,7 @@ public PhotonPipelineResult process( var corn = pair.getSecond(); if (tgt.fiducialID >= 0) { // apriltags - VideoSimUtil.warp16h5TagImage( - tgt.fiducialID, corn, true, videoSimFrameRaw); + VideoSimUtil.warp16h5TagImage(tgt.fiducialID, corn, true, videoSimFrameRaw); } else if (!tgt.getModel().isSpherical) { // non-spherical targets var contour = corn; if (!tgt.getModel() @@ -514,12 +513,11 @@ public PhotonPipelineResult process( Imgproc.LINE_AA); VideoSimUtil.drawPoly( - OpenCVHelp.cornersToPoints(tgt.getDetectedCorners()), - (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), - new Scalar(255, 20, 20), - true, - videoSimFrameProcessed - ); + OpenCVHelp.cornersToPoints(tgt.getDetectedCorners()), + (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), + new Scalar(255, 20, 20), + true, + videoSimFrameProcessed); } } videoSimProcessed.putFrame(videoSimFrameProcessed); diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java index 4d6f5837ee..7a3c16aacf 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java @@ -322,7 +322,9 @@ public SimCameraProperties copy() { * @param points Points of the contour */ public double getContourAreaPercent(Point[] points) { - return Imgproc.contourArea(new MatOfPoint2f(OpenCVHelp.getConvexHull(points))) / getResArea() * 100; + return Imgproc.contourArea(new MatOfPoint2f(OpenCVHelp.getConvexHull(points))) + / getResArea() + * 100; } /** The yaw from the principal point of this camera to the pixel x value. Positive values left. */ @@ -563,13 +565,13 @@ public Point[] estPixelNoise(Point[] points) { if (avgErrorPx == 0 && errorStdDevPx == 0) return points; Point[] noisyPts = new Point[points.length]; - for(int i = 0; i < points.length; i++) { + for (int i = 0; i < points.length; i++) { var p = points[i]; // error pixels in random direction double error = avgErrorPx + rand.nextGaussian() * errorStdDevPx; double errorAngle = rand.nextDouble() * 2 * Math.PI - Math.PI; - noisyPts[i] = new Point( - p.x + error * Math.cos(errorAngle), p.y + error * Math.sin(errorAngle)); + noisyPts[i] = + new Point(p.x + error * Math.cos(errorAngle), p.y + error * Math.sin(errorAngle)); } return noisyPts; } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java index b6912e0e34..f52c1ca7ad 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java @@ -317,8 +317,8 @@ public static double getScaledThickness(double thickness480p, Mat destinationImg /** * Draw a filled ellipse in the destination image. * - * @param dstPoints The points in the destination image representing the rectangle in which - * the ellipse is inscribed. + * @param dstPoints The points in the destination image representing the rectangle in which the + * ellipse is inscribed. * @param color The color of the ellipse. This is a scalar with BGR values (0-255) * @param destination The destination image to draw onto. The image should be in the BGR color * space. @@ -474,8 +474,8 @@ private static List> getFieldFloorLines(int subdivisions) { } /** - * Convert 3D lines represented by the given series of translations into a polygon(s) in the camera's - * image. + * Convert 3D lines represented by the given series of translations into a polygon(s) in the + * camera's image. * * @param camRt The change in basis from world coordinates to camera coordinates. See {@link * RotTrlTransform3d#makeRelativeTo(Pose3d)}. @@ -518,8 +518,10 @@ public static List polyFrom3dLines( // project points into 2d var poly = new ArrayList(); - poly.addAll(Arrays.asList(OpenCVHelp.projectPoints( - prop.getIntrinsics(), prop.getDistCoeffs(), camRt, List.of(pta, ptb)))); + poly.addAll( + Arrays.asList( + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, List.of(pta, ptb)))); var pxa = poly.get(0); var pxb = poly.get(1); @@ -533,7 +535,10 @@ public static List polyFrom3dLines( } if (subPts.size() > 0) { poly.addAll( - 1, Arrays.asList(OpenCVHelp.projectPoints(prop.getIntrinsics(), prop.getDistCoeffs(), camRt, subPts))); + 1, + Arrays.asList( + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), camRt, subPts))); } polyPointList.add(poly.toArray(Point[]::new)); @@ -572,24 +577,24 @@ public static void drawFieldWireframe( Mat destination) { for (var trls : getFieldFloorLines(floorSubdivisions)) { var polys = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); - for(var poly : polys) { + for (var poly : polys) { drawPoly( - poly, - (int) Math.round(getScaledThickness(floorThickness, destination)), - floorColor, - false, - destination); + poly, + (int) Math.round(getScaledThickness(floorThickness, destination)), + floorColor, + false, + destination); } } for (var trls : getFieldWallLines()) { var polys = VideoSimUtil.polyFrom3dLines(camRt, prop, trls, resolution, false, destination); - for(var poly : polys) { + for (var poly : polys) { drawPoly( - poly, - (int) Math.round(getScaledThickness(wallThickness, destination)), - wallColor, - false, - destination); + poly, + (int) Math.round(getScaledThickness(wallThickness, destination)), + wallColor, + false, + destination); } } } diff --git a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java index 7e6e62ef48..e483833111 100644 --- a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java +++ b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java @@ -159,8 +159,9 @@ public void testProjection() { assertTrue(circulation > 0, "2d fiducial points aren't counter-clockwise"); // undo projection distortion - imagePoints = OpenCVHelp.undistortPoints(prop.getIntrinsics(), prop.getDistCoeffs(), imagePoints); - + imagePoints = + OpenCVHelp.undistortPoints(prop.getIntrinsics(), prop.getDistCoeffs(), imagePoints); + // test projection results after moving camera var avgCenterRot1 = prop.getPixelRot(OpenCVHelp.avgPoint(imagePoints)); cameraPose = From 8885514d2fa06bd0ec2b9b65429c0db66a4b2747 Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 14 Aug 2023 21:14:24 -0700 Subject: [PATCH 20/46] draw minarearect --- .../java/org/photonvision/simulation/PhotonCameraSim.java | 7 ++++--- .../java/org/photonvision/simulation/VideoSimUtil.java | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java index 55a1ce3060..e50588c640 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -482,7 +482,7 @@ public PhotonPipelineResult process( } VideoSimUtil.drawPoly(contour, -1, new Scalar(255), true, videoSimFrameRaw); } else { // spherical targets - VideoSimUtil.drawEllipse(corn, new Scalar(255), videoSimFrameRaw); + VideoSimUtil.drawInscribedEllipse(corn, new Scalar(255), videoSimFrameRaw); } } videoSimRaw.putFrame(videoSimFrameRaw); @@ -505,6 +505,7 @@ public PhotonPipelineResult process( OpenCVHelp.cornersToPoints(tgt.getDetectedCorners()), videoSimFrameProcessed); } else { // other targets + // bounding rectangle Imgproc.rectangle( videoSimFrameProcessed, OpenCVHelp.getBoundingRect(OpenCVHelp.cornersToPoints(tgt.getDetectedCorners())), @@ -513,9 +514,9 @@ public PhotonPipelineResult process( Imgproc.LINE_AA); VideoSimUtil.drawPoly( - OpenCVHelp.cornersToPoints(tgt.getDetectedCorners()), + OpenCVHelp.cornersToPoints(tgt.getMinAreaRectCorners()), (int) VideoSimUtil.getScaledThickness(1, videoSimFrameProcessed), - new Scalar(255, 20, 20), + new Scalar(255, 30, 30), true, videoSimFrameProcessed); } diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java index f52c1ca7ad..bfd2e98efc 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java @@ -323,7 +323,7 @@ public static double getScaledThickness(double thickness480p, Mat destinationImg * @param destination The destination image to draw onto. The image should be in the BGR color * space. */ - public static void drawEllipse(Point[] dstPoints, Scalar color, Mat destination) { + public static void drawInscribedEllipse(Point[] dstPoints, Scalar color, Mat destination) { // create RotatedRect from points var rect = OpenCVHelp.getMinAreaRect(dstPoints); // inscribe ellipse inside rectangle From 24bf9ff4c7557cf9ce78c524da9bc24c5282b31c Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 14 Aug 2023 21:30:44 -0700 Subject: [PATCH 21/46] update apriltag example --- .../src/main/java/org/photonvision/estimation/OpenCVHelp.java | 1 - .../apriltagExample/src/main/java/frc/robot/Drivetrain.java | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java index d6f1c91cce..3c24949915 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -55,7 +55,6 @@ import org.photonvision.targeting.TargetCorner; public final class OpenCVHelp { - private static RotTrlTransform3d NWU_TO_EDN; private static RotTrlTransform3d EDN_TO_NWU; diff --git a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Drivetrain.java b/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Drivetrain.java index f33d9f0510..dba9ee3bca 100644 --- a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Drivetrain.java +++ b/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Drivetrain.java @@ -142,7 +142,7 @@ public Drivetrain() { var cameraSim = new PhotonCameraSim(pcw.photonCamera, SimCameraProperties.PI4_LIFECAM_640_480()); m_visionSystemSim.addCamera(cameraSim, Constants.VisionConstants.robotToCam); - m_visionSystemSim.addVisionTargets(pcw.photonPoseEstimator.getFieldTags()); + m_visionSystemSim.addAprilTags(pcw.photonPoseEstimator.getFieldTags()); } } From 76eb0f3bae893bb333666d142f3582895e773fae Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 14 Aug 2023 23:23:35 -0700 Subject: [PATCH 22/46] oops --- .../simulation/SimCameraProperties.java | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java index 7a3c16aacf..592e206c61 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java @@ -25,6 +25,7 @@ package org.photonvision.simulation; import com.fasterxml.jackson.databind.ObjectMapper; +import edu.wpi.first.math.MathUtil; import edu.wpi.first.math.Matrix; import edu.wpi.first.math.Nat; import edu.wpi.first.math.Pair; @@ -161,18 +162,15 @@ public void setRandomSeed(long seed) { } public void setCalibration(int resWidth, int resHeight, Rotation2d fovDiag) { - double s = Math.sqrt(resWidth * resWidth + resHeight * resHeight); - var fovWidth = new Rotation2d(fovDiag.getRadians() * (resWidth / s)); - var fovHeight = new Rotation2d(fovDiag.getRadians() * (resHeight / s)); - - double maxFovDeg = Math.max(fovWidth.getDegrees(), fovHeight.getDegrees()); - if (maxFovDeg > 179) { - double scale = 179.0 / maxFovDeg; - fovWidth = new Rotation2d(fovWidth.getRadians() * scale); - fovHeight = new Rotation2d(fovHeight.getRadians() * scale); + if (fovDiag.getDegrees() < 1 || fovDiag.getDegrees() > 179) { + fovDiag = Rotation2d.fromDegrees(MathUtil.clamp(fovDiag.getDegrees(), 1, 179)); DriverStation.reportError( - "Requested FOV width/height too large! Scaling below 180 degrees...", false); + "Requested invalid FOV! Clamping between (1, 179) degrees...", false); } + double resDiag = Math.sqrt(resWidth * resWidth + resHeight * resHeight); + double diagRatio = Math.tan(fovDiag.getRadians() / 2); + var fovWidth = new Rotation2d(Math.atan(diagRatio * (resWidth / resDiag)) * 2); + var fovHeight = new Rotation2d(Math.atan(diagRatio * (resHeight / resDiag)) * 2); // assume no distortion var distCoeff = VecBuilder.fill(0, 0, 0, 0, 0); From 74f158244cb9e41b01ab8596bc8d34cf037ccb2f Mon Sep 17 00:00:00 2001 From: amquake Date: Fri, 18 Aug 2023 23:15:04 -0700 Subject: [PATCH 23/46] fix calculated center pt --- .../java/org/photonvision/simulation/SimCameraProperties.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java index 592e206c61..f34935e1a2 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java @@ -176,8 +176,8 @@ public void setCalibration(int resWidth, int resHeight, Rotation2d fovDiag) { var distCoeff = VecBuilder.fill(0, 0, 0, 0, 0); // assume centered principal point (pixels) - double cx = resWidth / 2.0; - double cy = resHeight / 2.0; + double cx = resWidth / 2.0 - 0.5; + double cy = resHeight / 2.0 - 0.5; // use given fov to determine focal point (pixels) double fx = cx / Math.tan(fovWidth.getRadians() / 2.0); From 4c99b78309829e47f96690badeae5ae686353879 Mon Sep 17 00:00:00 2001 From: amquake Date: Sat, 19 Aug 2023 00:25:20 -0700 Subject: [PATCH 24/46] revert docs note --- .../java/org/photonvision/simulation/SimCameraProperties.java | 4 ---- .../src/test/java/org/photonvision/VisionSystemSimTest.java | 1 - 2 files changed, 5 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java index f34935e1a2..35b0ad190f 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java @@ -378,10 +378,6 @@ public Rotation3d getPixelRot(Point point) { * *
focal length y ⟶ (focal length y / cos(arctan(pixel x offset / focal length x)))
* - * More simply, the pitch angle has to be corrected by doing: - * - *
pitch * cos(yaw)
- * * @return Rotation3d with yaw and pitch of the line projected out of the camera from the given * pixel (roll is zero). */ diff --git a/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java index 3d60a5c8a8..5ab7d00ce2 100644 --- a/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java +++ b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java @@ -384,7 +384,6 @@ public void testDistanceCalc(double testDist, double testPitch, double testHeigh // 1. These are calculated with the average of the minimum area rectangle, which does not // actually find the target center because of perspective distortion. // 2. Yaw and pitch are calculated separately which gives incorrect pitch values. - // Specifically, the corrected pitch would be pitch * cos(yaw). var res = camera.getLatestResult(); assertTrue(res.hasTargets()); var tgt = res.getBestTarget(); From 8f1eb327329880f1d2c7e0fa92b19aae0d46e50d Mon Sep 17 00:00:00 2001 From: amquake Date: Tue, 22 Aug 2023 15:36:07 -0700 Subject: [PATCH 25/46] update wireframe dimensions --- .../java/org/photonvision/simulation/VideoSimUtil.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java index bfd2e98efc..7a32e413a3 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java @@ -27,6 +27,7 @@ import edu.wpi.first.cscore.CvSource; import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.util.Units; import edu.wpi.first.util.RuntimeLoader; import java.awt.image.BufferedImage; import java.io.IOException; @@ -396,9 +397,9 @@ public static void setFieldDimensionsMeters(double fieldLengthMeters, double fie private static List> getFieldWallLines() { var list = new ArrayList>(); - final double sideHt = 0.5; - final double driveHt = 1; - final double topHt = 2; + final double sideHt = Units.inchesToMeters(19.5); + final double driveHt = Units.inchesToMeters(35); + final double topHt = Units.inchesToMeters(78); // field floor list.add( From c9a3a67660332fa6667f2750d8293100961a8717 Mon Sep 17 00:00:00 2001 From: amquake Date: Sat, 9 Sep 2023 02:33:54 -0700 Subject: [PATCH 26/46] init --- photonlib-java-examples/examples.txt | 1 + .../swervedriveposeestsim/.gitignore | 172 +++++++++ .../.wpilib/wpilib_preferences.json | 6 + .../swervedriveposeestsim/WPILib-License.md | 24 ++ .../swervedriveposeestsim/build.gradle | 103 ++++++ .../gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 60756 bytes .../gradle/wrapper/gradle-wrapper.properties | 5 + .../swervedriveposeestsim/gradlew | 240 +++++++++++++ .../swervedriveposeestsim/gradlew.bat | 91 +++++ .../swervedriveposeestsim/settings.gradle | 27 ++ .../src/main/deploy/example.txt | 3 + .../src/main/java/frc/robot/Main.java | 15 + .../src/main/java/frc/robot/Robot.java | 55 +++ .../subsystems/drivetrain/SwerveDriveSim.java | 337 ++++++++++++++++++ 14 files changed, 1079 insertions(+) create mode 100644 photonlib-java-examples/swervedriveposeestsim/.gitignore create mode 100644 photonlib-java-examples/swervedriveposeestsim/.wpilib/wpilib_preferences.json create mode 100644 photonlib-java-examples/swervedriveposeestsim/WPILib-License.md create mode 100644 photonlib-java-examples/swervedriveposeestsim/build.gradle create mode 100644 photonlib-java-examples/swervedriveposeestsim/gradle/wrapper/gradle-wrapper.jar create mode 100644 photonlib-java-examples/swervedriveposeestsim/gradle/wrapper/gradle-wrapper.properties create mode 100644 photonlib-java-examples/swervedriveposeestsim/gradlew create mode 100644 photonlib-java-examples/swervedriveposeestsim/gradlew.bat create mode 100644 photonlib-java-examples/swervedriveposeestsim/settings.gradle create mode 100644 photonlib-java-examples/swervedriveposeestsim/src/main/deploy/example.txt create mode 100644 photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java create mode 100644 photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java create mode 100644 photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java diff --git a/photonlib-java-examples/examples.txt b/photonlib-java-examples/examples.txt index 94803d29fb..12c37ab582 100644 --- a/photonlib-java-examples/examples.txt +++ b/photonlib-java-examples/examples.txt @@ -4,3 +4,4 @@ getinrange simaimandrange simposeest apriltagExample +swervedriveposeestsim diff --git a/photonlib-java-examples/swervedriveposeestsim/.gitignore b/photonlib-java-examples/swervedriveposeestsim/.gitignore new file mode 100644 index 0000000000..a8d1911a9e --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/.gitignore @@ -0,0 +1,172 @@ +# This gitignore has been specially created by the WPILib team. +# If you remove items from this file, intellisense might break. + +### C++ ### +# Prerequisites +*.d + +# Compiled Object files +*.slo +*.lo +*.o +*.obj + +# Precompiled Headers +*.gch +*.pch + +# Compiled Dynamic libraries +*.so +*.dylib +*.dll + +# Fortran module files +*.mod +*.smod + +# Compiled Static libraries +*.lai +*.la +*.a +*.lib + +# Executables +*.exe +*.out +*.app + +### Java ### +# Compiled class file +*.class + +# Log file +*.log + +# BlueJ files +*.ctxt + +# Mobile Tools for Java (J2ME) +.mtj.tmp/ + +# Package Files # +*.jar +*.war +*.nar +*.ear +*.zip +*.tar.gz +*.rar + +# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml +hs_err_pid* + +### Linux ### +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +### Gradle ### +.gradle +/build/ + +# Ignore Gradle GUI config +gradle-app.setting + +# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) +!gradle-wrapper.jar + +# Cache of project +.gradletasknamecache + +# # Work around https://youtrack.jetbrains.com/issue/IDEA-116898 +# gradle/wrapper/gradle-wrapper.properties + +# # VS Code Specific Java Settings +# DO NOT REMOVE .classpath and .project +.classpath +.project +.settings/ +bin/ + +# IntelliJ +*.iml +*.ipr +*.iws +.idea/ +out/ + +# Fleet +.fleet + +# Simulation GUI and other tools window save file +*-window.json diff --git a/photonlib-java-examples/swervedriveposeestsim/.wpilib/wpilib_preferences.json b/photonlib-java-examples/swervedriveposeestsim/.wpilib/wpilib_preferences.json new file mode 100644 index 0000000000..efc45eba76 --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/.wpilib/wpilib_preferences.json @@ -0,0 +1,6 @@ +{ + "enableCppIntellisense": false, + "currentLanguage": "java", + "projectYear": "2023", + "teamNumber": 4512 +} \ No newline at end of file diff --git a/photonlib-java-examples/swervedriveposeestsim/WPILib-License.md b/photonlib-java-examples/swervedriveposeestsim/WPILib-License.md new file mode 100644 index 0000000000..3d5a824cad --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/WPILib-License.md @@ -0,0 +1,24 @@ +Copyright (c) 2009-2021 FIRST and other WPILib contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of FIRST, WPILib, nor the names of other WPILib + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY FIRST AND OTHER WPILIB CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY NONINFRINGEMENT AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL FIRST OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/photonlib-java-examples/swervedriveposeestsim/build.gradle b/photonlib-java-examples/swervedriveposeestsim/build.gradle new file mode 100644 index 0000000000..5ad363b78a --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/build.gradle @@ -0,0 +1,103 @@ +plugins { + id "java" + id "edu.wpi.first.GradleRIO" version "2023.4.2" +} + +sourceCompatibility = JavaVersion.VERSION_11 +targetCompatibility = JavaVersion.VERSION_11 + +def ROBOT_MAIN_CLASS = "frc.robot.Main" + +repositories { + mavenLocal() +} + +// Define my targets (RoboRIO) and artifacts (deployable files) +// This is added by GradleRIO's backing project DeployUtils. +deploy { + targets { + roborio(getTargetTypeClass('RoboRIO')) { + // Team number is loaded either from the .wpilib/wpilib_preferences.json + // or from command line. If not found an exception will be thrown. + // You can use getTeamOrDefault(team) instead of getTeamNumber if you + // want to store a team number in this file. + team = project.frc.getTeamNumber() + debug = project.frc.getDebugOrDefault(false) + + artifacts { + // First part is artifact name, 2nd is artifact type + // getTargetTypeClass is a shortcut to get the class type using a string + + frcJava(getArtifactTypeClass('FRCJavaArtifact')) { + } + + // Static files artifact + frcStaticFileDeploy(getArtifactTypeClass('FileTreeArtifact')) { + files = project.fileTree('src/main/deploy') + directory = '/home/lvuser/deploy' + } + } + } + } +} + +def deployArtifact = deploy.targets.roborio.artifacts.frcJava + +// Set to true to use debug for JNI. +wpi.java.debugJni = false + +// Set this to true to enable desktop support. +def includeDesktopSupport = true + +// Defining my dependencies. In this case, WPILib (+ friends), and vendor libraries. +// Also defines JUnit 5. +dependencies { + implementation wpi.java.deps.wpilib() + implementation wpi.java.vendor.java() + + roborioDebug wpi.java.deps.wpilibJniDebug(wpi.platforms.roborio) + roborioDebug wpi.java.vendor.jniDebug(wpi.platforms.roborio) + + roborioRelease wpi.java.deps.wpilibJniRelease(wpi.platforms.roborio) + roborioRelease wpi.java.vendor.jniRelease(wpi.platforms.roborio) + + nativeDebug wpi.java.deps.wpilibJniDebug(wpi.platforms.desktop) + nativeDebug wpi.java.vendor.jniDebug(wpi.platforms.desktop) + simulationDebug wpi.sim.enableDebug() + + nativeRelease wpi.java.deps.wpilibJniRelease(wpi.platforms.desktop) + nativeRelease wpi.java.vendor.jniRelease(wpi.platforms.desktop) + simulationRelease wpi.sim.enableRelease() + + testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.2' + testImplementation 'org.junit.jupiter:junit-jupiter-params:5.8.2' + testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.2' +} + +test { + useJUnitPlatform() + systemProperty 'junit.jupiter.extensions.autodetection.enabled', 'true' +} + +// Simulation configuration (e.g. environment variables). +wpi.sim.addGui().defaultEnabled = true +wpi.sim.addDriverstation() + +// Setting up my Jar File. In this case, adding all libraries into the main jar ('fat jar') +// in order to make them all available at runtime. Also adding the manifest so WPILib +// knows where to look for our Robot Class. +jar { + from { configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) } } + manifest edu.wpi.first.gradlerio.GradleRIOPlugin.javaManifest(ROBOT_MAIN_CLASS) + duplicatesStrategy = DuplicatesStrategy.INCLUDE +} + +// Configure jar and deploy tasks +deployArtifact.jarTask = jar +wpi.java.configureExecutableTasks(jar) +wpi.java.configureTestTasks(test) + +// Configure string concat to always inline compile +tasks.withType(JavaCompile) { + options.compilerArgs.add '-XDstringConcat=inline' +} diff --git a/photonlib-java-examples/swervedriveposeestsim/gradle/wrapper/gradle-wrapper.jar b/photonlib-java-examples/swervedriveposeestsim/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000000000000000000000000000000000..249e5832f090a2944b7473328c07c9755baa3196 GIT binary patch literal 60756 zcmb5WV{~QRw(p$^Dz@00IL3?^hro$gg*4VI_WAaTyVM5Foj~O|-84 z$;06hMwt*rV;^8iB z1~&0XWpYJmG?Ts^K9PC62H*`G}xom%S%yq|xvG~FIfP=9*f zZoDRJBm*Y0aId=qJ?7dyb)6)JGWGwe)MHeNSzhi)Ko6J<-m@v=a%NsP537lHe0R* z`If4$aaBA#S=w!2z&m>{lpTy^Lm^mg*3?M&7HFv}7K6x*cukLIGX;bQG|QWdn{%_6 zHnwBKr84#B7Z+AnBXa16a?or^R?+>$4`}{*a_>IhbjvyTtWkHw)|ay)ahWUd-qq$~ zMbh6roVsj;_qnC-R{G+Cy6bApVOinSU-;(DxUEl!i2)1EeQ9`hrfqj(nKI7?Z>Xur zoJz-a`PxkYit1HEbv|jy%~DO^13J-ut986EEG=66S}D3!L}Efp;Bez~7tNq{QsUMm zh9~(HYg1pA*=37C0}n4g&bFbQ+?-h-W}onYeE{q;cIy%eZK9wZjSwGvT+&Cgv z?~{9p(;bY_1+k|wkt_|N!@J~aoY@|U_RGoWX<;p{Nu*D*&_phw`8jYkMNpRTWx1H* z>J-Mi_!`M468#5Aix$$u1M@rJEIOc?k^QBc?T(#=n&*5eS#u*Y)?L8Ha$9wRWdH^3D4|Ps)Y?m0q~SiKiSfEkJ!=^`lJ(%W3o|CZ zSrZL-Xxc{OrmsQD&s~zPfNJOpSZUl%V8tdG%ei}lQkM+z@-4etFPR>GOH9+Y_F<3=~SXln9Kb-o~f>2a6Xz@AS3cn^;c_>lUwlK(n>z?A>NbC z`Ud8^aQy>wy=$)w;JZzA)_*Y$Z5hU=KAG&htLw1Uh00yE!|Nu{EZkch zY9O6x7Y??>!7pUNME*d!=R#s)ghr|R#41l!c?~=3CS8&zr6*aA7n9*)*PWBV2w+&I zpW1-9fr3j{VTcls1>ua}F*bbju_Xq%^v;-W~paSqlf zolj*dt`BBjHI)H9{zrkBo=B%>8}4jeBO~kWqO!~Thi!I1H(in=n^fS%nuL=X2+s!p}HfTU#NBGiwEBF^^tKU zbhhv+0dE-sbK$>J#t-J!B$TMgN@Wh5wTtK2BG}4BGfsZOoRUS#G8Cxv|6EI*n&Xxq zt{&OxCC+BNqz$9b0WM7_PyBJEVObHFh%%`~!@MNZlo*oXDCwDcFwT~Rls!aApL<)^ zbBftGKKBRhB!{?fX@l2_y~%ygNFfF(XJzHh#?`WlSL{1lKT*gJM zs>bd^H9NCxqxn(IOky5k-wALFowQr(gw%|`0991u#9jXQh?4l|l>pd6a&rx|v=fPJ z1mutj{YzpJ_gsClbWFk(G}bSlFi-6@mwoQh-XeD*j@~huW4(8ub%^I|azA)h2t#yG z7e_V_<4jlM3D(I+qX}yEtqj)cpzN*oCdYHa!nm%0t^wHm)EmFP*|FMw!tb@&`G-u~ zK)=Sf6z+BiTAI}}i{*_Ac$ffr*Wrv$F7_0gJkjx;@)XjYSh`RjAgrCck`x!zP>Ifu z&%he4P|S)H*(9oB4uvH67^0}I-_ye_!w)u3v2+EY>eD3#8QR24<;7?*hj8k~rS)~7 zSXs5ww)T(0eHSp$hEIBnW|Iun<_i`}VE0Nc$|-R}wlSIs5pV{g_Dar(Zz<4X3`W?K z6&CAIl4U(Qk-tTcK{|zYF6QG5ArrEB!;5s?tW7 zrE3hcFY&k)+)e{+YOJ0X2uDE_hd2{|m_dC}kgEKqiE9Q^A-+>2UonB+L@v3$9?AYw zVQv?X*pK;X4Ovc6Ev5Gbg{{Eu*7{N3#0@9oMI~}KnObQE#Y{&3mM4`w%wN+xrKYgD zB-ay0Q}m{QI;iY`s1Z^NqIkjrTlf`B)B#MajZ#9u41oRBC1oM1vq0i|F59> z#StM@bHt|#`2)cpl_rWB($DNJ3Lap}QM-+A$3pe}NyP(@+i1>o^fe-oxX#Bt`mcQc zb?pD4W%#ep|3%CHAYnr*^M6Czg>~L4?l16H1OozM{P*en298b+`i4$|w$|4AHbzqB zHpYUsHZET$Z0ztC;U+0*+amF!@PI%^oUIZy{`L{%O^i{Xk}X0&nl)n~tVEpcAJSJ} zverw15zP1P-O8h9nd!&hj$zuwjg?DoxYIw{jWM zW5_pj+wFy8Tsa9g<7Qa21WaV&;ejoYflRKcz?#fSH_)@*QVlN2l4(QNk| z4aPnv&mrS&0|6NHq05XQw$J^RR9T{3SOcMKCXIR1iSf+xJ0E_Wv?jEc*I#ZPzyJN2 zUG0UOXHl+PikM*&g$U@g+KbG-RY>uaIl&DEtw_Q=FYq?etc!;hEC_}UX{eyh%dw2V zTTSlap&5>PY{6I#(6`j-9`D&I#|YPP8a;(sOzgeKDWsLa!i-$frD>zr-oid!Hf&yS z!i^cr&7tN}OOGmX2)`8k?Tn!!4=tz~3hCTq_9CdiV!NIblUDxHh(FJ$zs)B2(t5@u z-`^RA1ShrLCkg0)OhfoM;4Z{&oZmAec$qV@ zGQ(7(!CBk<5;Ar%DLJ0p0!ResC#U<+3i<|vib1?{5gCebG7$F7URKZXuX-2WgF>YJ^i zMhHDBsh9PDU8dlZ$yJKtc6JA#y!y$57%sE>4Nt+wF1lfNIWyA`=hF=9Gj%sRwi@vd z%2eVV3y&dvAgyuJ=eNJR+*080dbO_t@BFJO<@&#yqTK&+xc|FRR;p;KVk@J3$S{p` zGaMj6isho#%m)?pOG^G0mzOAw0z?!AEMsv=0T>WWcE>??WS=fII$t$(^PDPMU(P>o z_*0s^W#|x)%tx8jIgZY~A2yG;US0m2ZOQt6yJqW@XNY_>_R7(Nxb8Ged6BdYW6{prd!|zuX$@Q2o6Ona8zzYC1u!+2!Y$Jc9a;wy+pXt}o6~Bu1oF1c zp7Y|SBTNi@=I(K%A60PMjM#sfH$y*c{xUgeSpi#HB`?|`!Tb&-qJ3;vxS!TIzuTZs-&%#bAkAyw9m4PJgvey zM5?up*b}eDEY+#@tKec)-c(#QF0P?MRlD1+7%Yk*jW;)`f;0a-ZJ6CQA?E%>i2Dt7T9?s|9ZF|KP4;CNWvaVKZ+Qeut;Jith_y{v*Ny6Co6!8MZx;Wgo z=qAi%&S;8J{iyD&>3CLCQdTX*$+Rx1AwA*D_J^0>suTgBMBb=*hefV+Ars#mmr+YsI3#!F@Xc1t4F-gB@6aoyT+5O(qMz*zG<9Qq*f0w^V!03rpr*-WLH}; zfM{xSPJeu6D(%8HU%0GEa%waFHE$G?FH^kMS-&I3)ycx|iv{T6Wx}9$$D&6{%1N_8 z_CLw)_9+O4&u94##vI9b-HHm_95m)fa??q07`DniVjAy`t7;)4NpeyAY(aAk(+T_O z1om+b5K2g_B&b2DCTK<>SE$Ode1DopAi)xaJjU>**AJK3hZrnhEQ9E`2=|HHe<^tv z63e(bn#fMWuz>4erc47}!J>U58%<&N<6AOAewyzNTqi7hJc|X{782&cM zHZYclNbBwU6673=!ClmxMfkC$(CykGR@10F!zN1Se83LR&a~$Ht&>~43OX22mt7tcZUpa;9@q}KDX3O&Ugp6< zLZLfIMO5;pTee1vNyVC$FGxzK2f>0Z-6hM82zKg44nWo|n}$Zk6&;5ry3`(JFEX$q zK&KivAe${e^5ZGc3a9hOt|!UOE&OocpVryE$Y4sPcs4rJ>>Kbi2_subQ9($2VN(3o zb~tEzMsHaBmBtaHAyES+d3A(qURgiskSSwUc9CfJ@99&MKp2sooSYZu+-0t0+L*!I zYagjOlPgx|lep9tiU%ts&McF6b0VE57%E0Ho%2oi?=Ks+5%aj#au^OBwNwhec zta6QAeQI^V!dF1C)>RHAmB`HnxyqWx?td@4sd15zPd*Fc9hpDXP23kbBenBxGeD$k z;%0VBQEJ-C)&dTAw_yW@k0u?IUk*NrkJ)(XEeI z9Y>6Vel>#s_v@=@0<{4A{pl=9cQ&Iah0iD0H`q)7NeCIRz8zx;! z^OO;1+IqoQNak&pV`qKW+K0^Hqp!~gSohcyS)?^P`JNZXw@gc6{A3OLZ?@1Uc^I2v z+X!^R*HCm3{7JPq{8*Tn>5;B|X7n4QQ0Bs79uTU%nbqOJh`nX(BVj!#f;#J+WZxx4 z_yM&1Y`2XzhfqkIMO7tB3raJKQS+H5F%o83bM+hxbQ zeeJm=Dvix$2j|b4?mDacb67v-1^lTp${z=jc1=j~QD>7c*@+1?py>%Kj%Ejp7Y-!? z8iYRUlGVrQPandAaxFfks53@2EC#0)%mrnmGRn&>=$H$S8q|kE_iWko4`^vCS2aWg z#!`RHUGyOt*k?bBYu3*j3u0gB#v(3tsije zgIuNNWNtrOkx@Pzs;A9un+2LX!zw+p3_NX^Sh09HZAf>m8l@O*rXy_82aWT$Q>iyy zqO7Of)D=wcSn!0+467&!Hl))eff=$aneB?R!YykdKW@k^_uR!+Q1tR)+IJb`-6=jj zymzA>Sv4>Z&g&WWu#|~GcP7qP&m*w-S$)7Xr;(duqCTe7p8H3k5>Y-n8438+%^9~K z3r^LIT_K{i7DgEJjIocw_6d0!<;wKT`X;&vv+&msmhAAnIe!OTdybPctzcEzBy88_ zWO{6i4YT%e4^WQZB)KHCvA(0tS zHu_Bg+6Ko%a9~$EjRB90`P(2~6uI@SFibxct{H#o&y40MdiXblu@VFXbhz>Nko;7R z70Ntmm-FePqhb%9gL+7U8@(ch|JfH5Fm)5${8|`Lef>LttM_iww6LW2X61ldBmG0z zax3y)njFe>j*T{i0s8D4=L>X^j0)({R5lMGVS#7(2C9@AxL&C-lZQx~czI7Iv+{%1 z2hEG>RzX4S8x3v#9sgGAnPzptM)g&LB}@%E>fy0vGSa(&q0ch|=ncKjNrK z`jA~jObJhrJ^ri|-)J^HUyeZXz~XkBp$VhcTEcTdc#a2EUOGVX?@mYx#Vy*!qO$Jv zQ4rgOJ~M*o-_Wptam=~krnmG*p^j!JAqoQ%+YsDFW7Cc9M%YPiBOrVcD^RY>m9Pd< zu}#9M?K{+;UIO!D9qOpq9yxUquQRmQNMo0pT`@$pVt=rMvyX)ph(-CCJLvUJy71DI zBk7oc7)-%ngdj~s@76Yse3L^gV0 z2==qfp&Q~L(+%RHP0n}+xH#k(hPRx(!AdBM$JCfJ5*C=K3ts>P?@@SZ_+{U2qFZb>4kZ{Go37{# zSQc+-dq*a-Vy4?taS&{Ht|MLRiS)Sn14JOONyXqPNnpq&2y~)6wEG0oNy>qvod$FF z`9o&?&6uZjhZ4_*5qWVrEfu(>_n2Xi2{@Gz9MZ8!YmjYvIMasE9yVQL10NBrTCczq zcTY1q^PF2l!Eraguf{+PtHV3=2A?Cu&NN&a8V(y;q(^_mFc6)%Yfn&X&~Pq zU1?qCj^LF(EQB1F`8NxNjyV%fde}dEa(Hx=r7$~ts2dzDwyi6ByBAIx$NllB4%K=O z$AHz1<2bTUb>(MCVPpK(E9wlLElo(aSd(Os)^Raum`d(g9Vd_+Bf&V;l=@mM=cC>) z)9b0enb)u_7V!!E_bl>u5nf&Rl|2r=2F3rHMdb7y9E}}F82^$Rf+P8%dKnOeKh1vs zhH^P*4Ydr^$)$h@4KVzxrHyy#cKmWEa9P5DJ|- zG;!Qi35Tp7XNj60=$!S6U#!(${6hyh7d4q=pF{`0t|N^|L^d8pD{O9@tF~W;#Je*P z&ah%W!KOIN;SyAEhAeTafJ4uEL`(RtnovM+cb(O#>xQnk?dzAjG^~4$dFn^<@-Na3 z395;wBnS{t*H;Jef2eE!2}u5Ns{AHj>WYZDgQJt8v%x?9{MXqJsGP|l%OiZqQ1aB! z%E=*Ig`(!tHh>}4_z5IMpg{49UvD*Pp9!pxt_gdAW%sIf3k6CTycOT1McPl=_#0?8 zVjz8Hj*Vy9c5-krd-{BQ{6Xy|P$6LJvMuX$* zA+@I_66_ET5l2&gk9n4$1M3LN8(yEViRx&mtd#LD}AqEs?RW=xKC(OCWH;~>(X6h!uDxXIPH06xh z*`F4cVlbDP`A)-fzf>MuScYsmq&1LUMGaQ3bRm6i7OsJ|%uhTDT zlvZA1M}nz*SalJWNT|`dBm1$xlaA>CCiQ zK`xD-RuEn>-`Z?M{1%@wewf#8?F|(@1e0+T4>nmlSRrNK5f)BJ2H*$q(H>zGD0>eL zQ!tl_Wk)k*e6v^m*{~A;@6+JGeWU-q9>?+L_#UNT%G?4&BnOgvm9@o7l?ov~XL+et zbGT)|G7)KAeqb=wHSPk+J1bdg7N3$vp(ekjI1D9V$G5Cj!=R2w=3*4!z*J-r-cyeb zd(i2KmX!|Lhey!snRw z?#$Gu%S^SQEKt&kep)up#j&9}e+3=JJBS(s>MH+|=R(`8xK{mmndWo_r`-w1#SeRD&YtAJ#GiVI*TkQZ}&aq<+bU2+coU3!jCI6E+Ad_xFW*ghnZ$q zAoF*i&3n1j#?B8x;kjSJD${1jdRB;)R*)Ao!9bd|C7{;iqDo|T&>KSh6*hCD!rwv= zyK#F@2+cv3=|S1Kef(E6Niv8kyLVLX&e=U;{0x{$tDfShqkjUME>f8d(5nzSkY6@! z^-0>DM)wa&%m#UF1F?zR`8Y3X#tA!*7Q$P3lZJ%*KNlrk_uaPkxw~ zxZ1qlE;Zo;nb@!SMazSjM>;34ROOoygo%SF);LL>rRonWwR>bmSd1XD^~sGSu$Gg# zFZ`|yKU0%!v07dz^v(tY%;So(e`o{ZYTX`hm;@b0%8|H>VW`*cr8R%3n|ehw2`(9B+V72`>SY}9^8oh$En80mZK9T4abVG*to;E z1_S6bgDOW?!Oy1LwYy=w3q~KKdbNtyH#d24PFjX)KYMY93{3-mPP-H>@M-_>N~DDu zENh~reh?JBAK=TFN-SfDfT^=+{w4ea2KNWXq2Y<;?(gf(FgVp8Zp-oEjKzB%2Iqj;48GmY3h=bcdYJ}~&4tS`Q1sb=^emaW$IC$|R+r-8V- zf0$gGE(CS_n4s>oicVk)MfvVg#I>iDvf~Ov8bk}sSxluG!6#^Z_zhB&U^`eIi1@j( z^CK$z^stBHtaDDHxn+R;3u+>Lil^}fj?7eaGB z&5nl^STqcaBxI@v>%zG|j))G(rVa4aY=B@^2{TFkW~YP!8!9TG#(-nOf^^X-%m9{Z zCC?iC`G-^RcBSCuk=Z`(FaUUe?hf3{0C>>$?Vs z`2Uud9M+T&KB6o4o9kvdi^Q=Bw!asPdxbe#W-Oaa#_NP(qpyF@bVxv5D5))srkU#m zj_KA+#7sqDn*Ipf!F5Byco4HOSd!Ui$l94|IbW%Ny(s1>f4|Mv^#NfB31N~kya9!k zWCGL-$0ZQztBate^fd>R!hXY_N9ZjYp3V~4_V z#eB)Kjr8yW=+oG)BuNdZG?jaZlw+l_ma8aET(s+-x+=F-t#Qoiuu1i`^x8Sj>b^U} zs^z<()YMFP7CmjUC@M=&lA5W7t&cxTlzJAts*%PBDAPuqcV5o7HEnqjif_7xGt)F% zGx2b4w{@!tE)$p=l3&?Bf#`+!-RLOleeRk3 z7#pF|w@6_sBmn1nECqdunmG^}pr5(ZJQVvAt$6p3H(16~;vO>?sTE`Y+mq5YP&PBo zvq!7#W$Gewy`;%6o^!Dtjz~x)T}Bdk*BS#=EY=ODD&B=V6TD2z^hj1m5^d6s)D*wk zu$z~D7QuZ2b?5`p)E8e2_L38v3WE{V`bVk;6fl#o2`) z99JsWhh?$oVRn@$S#)uK&8DL8>An0&S<%V8hnGD7Z^;Y(%6;^9!7kDQ5bjR_V+~wp zfx4m3z6CWmmZ<8gDGUyg3>t8wgJ5NkkiEm^(sedCicP^&3D%}6LtIUq>mXCAt{9eF zNXL$kGcoUTf_Lhm`t;hD-SE)m=iBnxRU(NyL}f6~1uH)`K!hmYZjLI%H}AmEF5RZt z06$wn63GHnApHXZZJ}s^s)j9(BM6e*7IBK6Bq(!)d~zR#rbxK9NVIlgquoMq z=eGZ9NR!SEqP6=9UQg#@!rtbbSBUM#ynF);zKX+|!Zm}*{H z+j=d?aZ2!?@EL7C~%B?6ouCKLnO$uWn;Y6Xz zX8dSwj732u(o*U3F$F=7xwxm>E-B+SVZH;O-4XPuPkLSt_?S0)lb7EEg)Mglk0#eS z9@jl(OnH4juMxY+*r03VDfPx_IM!Lmc(5hOI;`?d37f>jPP$?9jQQIQU@i4vuG6MagEoJrQ=RD7xt@8E;c zeGV*+Pt+t$@pt!|McETOE$9k=_C!70uhwRS9X#b%ZK z%q(TIUXSS^F0`4Cx?Rk07C6wI4!UVPeI~-fxY6`YH$kABdOuiRtl73MqG|~AzZ@iL&^s?24iS;RK_pdlWkhcF z@Wv-Om(Aealfg)D^adlXh9Nvf~Uf@y;g3Y)i(YP zEXDnb1V}1pJT5ZWyw=1i+0fni9yINurD=EqH^ciOwLUGi)C%Da)tyt=zq2P7pV5-G zR7!oq28-Fgn5pW|nlu^b!S1Z#r7!Wtr{5J5PQ>pd+2P7RSD?>(U7-|Y z7ZQ5lhYIl_IF<9?T9^IPK<(Hp;l5bl5tF9>X-zG14_7PfsA>6<$~A338iYRT{a@r_ zuXBaT=`T5x3=s&3=RYx6NgG>No4?5KFBVjE(swfcivcIpPQFx5l+O;fiGsOrl5teR z_Cm+;PW}O0Dwe_(4Z@XZ)O0W-v2X><&L*<~*q3dg;bQW3g7)a#3KiQP>+qj|qo*Hk z?57>f2?f@`=Fj^nkDKeRkN2d$Z@2eNKpHo}ksj-$`QKb6n?*$^*%Fb3_Kbf1(*W9K>{L$mud2WHJ=j0^=g30Xhg8$#g^?36`p1fm;;1@0Lrx+8t`?vN0ZorM zSW?rhjCE8$C|@p^sXdx z|NOHHg+fL;HIlqyLp~SSdIF`TnSHehNCU9t89yr@)FY<~hu+X`tjg(aSVae$wDG*C zq$nY(Y494R)hD!i1|IIyP*&PD_c2FPgeY)&mX1qujB1VHPG9`yFQpLFVQ0>EKS@Bp zAfP5`C(sWGLI?AC{XEjLKR4FVNw(4+9b?kba95ukgR1H?w<8F7)G+6&(zUhIE5Ef% z=fFkL3QKA~M@h{nzjRq!Y_t!%U66#L8!(2-GgFxkD1=JRRqk=n%G(yHKn%^&$dW>; zSjAcjETMz1%205se$iH_)ZCpfg_LwvnsZQAUCS#^FExp8O4CrJb6>JquNV@qPq~3A zZ<6dOU#6|8+fcgiA#~MDmcpIEaUO02L5#T$HV0$EMD94HT_eXLZ2Zi&(! z&5E>%&|FZ`)CN10tM%tLSPD*~r#--K(H-CZqIOb99_;m|D5wdgJ<1iOJz@h2Zkq?} z%8_KXb&hf=2Wza(Wgc;3v3TN*;HTU*q2?#z&tLn_U0Nt!y>Oo>+2T)He6%XuP;fgn z-G!#h$Y2`9>Jtf}hbVrm6D70|ERzLAU>3zoWhJmjWfgM^))T+2u$~5>HF9jQDkrXR z=IzX36)V75PrFjkQ%TO+iqKGCQ-DDXbaE;C#}!-CoWQx&v*vHfyI>$HNRbpvm<`O( zlx9NBWD6_e&J%Ous4yp~s6)Ghni!I6)0W;9(9$y1wWu`$gs<$9Mcf$L*piP zPR0Av*2%ul`W;?-1_-5Zy0~}?`e@Y5A&0H!^ApyVTT}BiOm4GeFo$_oPlDEyeGBbh z1h3q&Dx~GmUS|3@4V36&$2uO8!Yp&^pD7J5&TN{?xphf*-js1fP?B|`>p_K>lh{ij zP(?H%e}AIP?_i^f&Li=FDSQ`2_NWxL+BB=nQr=$ zHojMlXNGauvvwPU>ZLq!`bX-5F4jBJ&So{kE5+ms9UEYD{66!|k~3vsP+mE}x!>%P za98bAU0!h0&ka4EoiDvBM#CP#dRNdXJcb*(%=<(g+M@<)DZ!@v1V>;54En?igcHR2 zhubQMq}VSOK)onqHfczM7YA@s=9*ow;k;8)&?J3@0JiGcP! zP#00KZ1t)GyZeRJ=f0^gc+58lc4Qh*S7RqPIC6GugG1gXe$LIQMRCo8cHf^qXgAa2 z`}t>u2Cq1CbSEpLr~E=c7~=Qkc9-vLE%(v9N*&HF`(d~(0`iukl5aQ9u4rUvc8%m) zr2GwZN4!s;{SB87lJB;veebPmqE}tSpT>+`t?<457Q9iV$th%i__Z1kOMAswFldD6 ztbOvO337S5o#ZZgN2G99_AVqPv!?Gmt3pzgD+Hp3QPQ`9qJ(g=kjvD+fUSS3upJn! zqoG7acIKEFRX~S}3|{EWT$kdz#zrDlJU(rPkxjws_iyLKU8+v|*oS_W*-guAb&Pj1 z35Z`3z<&Jb@2Mwz=KXucNYdY#SNO$tcVFr9KdKm|%^e-TXzs6M`PBper%ajkrIyUe zp$vVxVs9*>Vp4_1NC~Zg)WOCPmOxI1V34QlG4!aSFOH{QqSVq1^1)- z0P!Z?tT&E-ll(pwf0?=F=yOzik=@nh1Clxr9}Vij89z)ePDSCYAqw?lVI?v?+&*zH z)p$CScFI8rrwId~`}9YWPFu0cW1Sf@vRELs&cbntRU6QfPK-SO*mqu|u~}8AJ!Q$z znzu}50O=YbjwKCuSVBs6&CZR#0FTu)3{}qJJYX(>QPr4$RqWiwX3NT~;>cLn*_&1H zaKpIW)JVJ>b{uo2oq>oQt3y=zJjb%fU@wLqM{SyaC6x2snMx-}ivfU<1- znu1Lh;i$3Tf$Kh5Uk))G!D1UhE8pvx&nO~w^fG)BC&L!_hQk%^p`Kp@F{cz>80W&T ziOK=Sq3fdRu*V0=S53rcIfWFazI}Twj63CG(jOB;$*b`*#B9uEnBM`hDk*EwSRdwP8?5T?xGUKs=5N83XsR*)a4|ijz|c{4tIU+4j^A5C<#5 z*$c_d=5ml~%pGxw#?*q9N7aRwPux5EyqHVkdJO=5J>84!X6P>DS8PTTz>7C#FO?k#edkntG+fJk8ZMn?pmJSO@`x-QHq;7^h6GEXLXo1TCNhH z8ZDH{*NLAjo3WM`xeb=X{((uv3H(8&r8fJJg_uSs_%hOH%JDD?hu*2NvWGYD+j)&` zz#_1%O1wF^o5ryt?O0n;`lHbzp0wQ?rcbW(F1+h7_EZZ9{>rePvLAPVZ_R|n@;b$;UchU=0j<6k8G9QuQf@76oiE*4 zXOLQ&n3$NR#p4<5NJMVC*S);5x2)eRbaAM%VxWu9ohlT;pGEk7;002enCbQ>2r-us z3#bpXP9g|mE`65VrN`+3mC)M(eMj~~eOf)do<@l+fMiTR)XO}422*1SL{wyY(%oMpBgJagtiDf zz>O6(m;};>Hi=t8o{DVC@YigqS(Qh+ix3Rwa9aliH}a}IlOCW1@?%h_bRbq-W{KHF z%Vo?-j@{Xi@=~Lz5uZP27==UGE15|g^0gzD|3x)SCEXrx`*MP^FDLl%pOi~~Il;dc z^hrwp9sYeT7iZ)-ajKy@{a`kr0-5*_!XfBpXwEcFGJ;%kV$0Nx;apKrur zJN2J~CAv{Zjj%FolyurtW8RaFmpn&zKJWL>(0;;+q(%(Hx!GMW4AcfP0YJ*Vz!F4g z!ZhMyj$BdXL@MlF%KeInmPCt~9&A!;cRw)W!Hi@0DY(GD_f?jeV{=s=cJ6e}JktJw zQORnxxj3mBxfrH=x{`_^Z1ddDh}L#V7i}$njUFRVwOX?qOTKjfPMBO4y(WiU<)epb zvB9L=%jW#*SL|Nd_G?E*_h1^M-$PG6Pc_&QqF0O-FIOpa4)PAEPsyvB)GKasmBoEt z?_Q2~QCYGH+hW31x-B=@5_AN870vY#KB~3a*&{I=f);3Kv7q4Q7s)0)gVYx2#Iz9g(F2;=+Iy4 z6KI^8GJ6D@%tpS^8boU}zpi=+(5GfIR)35PzrbuXeL1Y1N%JK7PG|^2k3qIqHfX;G zQ}~JZ-UWx|60P5?d1e;AHx!_;#PG%d=^X(AR%i`l0jSpYOpXoKFW~7ip7|xvN;2^? zsYC9fanpO7rO=V7+KXqVc;Q5z%Bj})xHVrgoR04sA2 zl~DAwv=!(()DvH*=lyhIlU^hBkA0$e*7&fJpB0|oB7)rqGK#5##2T`@_I^|O2x4GO z;xh6ROcV<9>?e0)MI(y++$-ksV;G;Xe`lh76T#Htuia+(UrIXrf9?

L(tZ$0BqX1>24?V$S+&kLZ`AodQ4_)P#Q3*4xg8}lMV-FLwC*cN$< zt65Rf%7z41u^i=P*qO8>JqXPrinQFapR7qHAtp~&RZ85$>ob|Js;GS^y;S{XnGiBc zGa4IGvDl?x%gY`vNhv8wgZnP#UYI-w*^4YCZnxkF85@ldepk$&$#3EAhrJY0U)lR{F6sM3SONV^+$;Zx8BD&Eku3K zKNLZyBni3)pGzU0;n(X@1fX8wYGKYMpLmCu{N5-}epPDxClPFK#A@02WM3!myN%bkF z|GJ4GZ}3sL{3{qXemy+#Uk{4>Kf8v11;f8I&c76+B&AQ8udd<8gU7+BeWC`akUU~U zgXoxie>MS@rBoyY8O8Tc&8id!w+_ooxcr!1?#rc$-|SBBtH6S?)1e#P#S?jFZ8u-Bs&k`yLqW|{j+%c#A4AQ>+tj$Y z^CZajspu$F%73E68Lw5q7IVREED9r1Ijsg#@DzH>wKseye>hjsk^{n0g?3+gs@7`i zHx+-!sjLx^fS;fY!ERBU+Q zVJ!e0hJH%P)z!y%1^ZyG0>PN@5W~SV%f>}c?$H8r;Sy-ui>aruVTY=bHe}$e zi&Q4&XK!qT7-XjCrDaufT@>ieQ&4G(SShUob0Q>Gznep9fR783jGuUynAqc6$pYX; z7*O@@JW>O6lKIk0G00xsm|=*UVTQBB`u1f=6wGAj%nHK_;Aqmfa!eAykDmi-@u%6~ z;*c!pS1@V8r@IX9j&rW&d*}wpNs96O2Ute>%yt{yv>k!6zfT6pru{F1M3P z2WN1JDYqoTB#(`kE{H676QOoX`cnqHl1Yaru)>8Ky~VU{)r#{&s86Vz5X)v15ULHA zAZDb{99+s~qI6;-dQ5DBjHJP@GYTwn;Dv&9kE<0R!d z8tf1oq$kO`_sV(NHOSbMwr=To4r^X$`sBW4$gWUov|WY?xccQJN}1DOL|GEaD_!@& z15p?Pj+>7d`@LvNIu9*^hPN)pwcv|akvYYq)ks%`G>!+!pW{-iXPZsRp8 z35LR;DhseQKWYSD`%gO&k$Dj6_6q#vjWA}rZcWtQr=Xn*)kJ9kacA=esi*I<)1>w^ zO_+E>QvjP)qiSZg9M|GNeLtO2D7xT6vsj`88sd!94j^AqxFLi}@w9!Y*?nwWARE0P znuI_7A-saQ+%?MFA$gttMV-NAR^#tjl_e{R$N8t2NbOlX373>e7Ox=l=;y#;M7asp zRCz*CLnrm$esvSb5{T<$6CjY zmZ(i{Rs_<#pWW>(HPaaYj`%YqBra=Ey3R21O7vUbzOkJJO?V`4-D*u4$Me0Bx$K(lYo`JO}gnC zx`V}a7m-hLU9Xvb@K2ymioF)vj12<*^oAqRuG_4u%(ah?+go%$kOpfb`T96P+L$4> zQ#S+sA%VbH&mD1k5Ak7^^dZoC>`1L%i>ZXmooA!%GI)b+$D&ziKrb)a=-ds9xk#~& z7)3iem6I|r5+ZrTRe_W861x8JpD`DDIYZNm{$baw+$)X^Jtjnl0xlBgdnNY}x%5za zkQ8E6T<^$sKBPtL4(1zi_Rd(tVth*3Xs!ulflX+70?gb&jRTnI8l+*Aj9{|d%qLZ+ z>~V9Z;)`8-lds*Zgs~z1?Fg?Po7|FDl(Ce<*c^2=lFQ~ahwh6rqSjtM5+$GT>3WZW zj;u~w9xwAhOc<kF}~`CJ68 z?(S5vNJa;kriPlim33{N5`C{9?NWhzsna_~^|K2k4xz1`xcui*LXL-1#Y}Hi9`Oo!zQ>x-kgAX4LrPz63uZ+?uG*84@PKq-KgQlMNRwz=6Yes) zY}>YN+qP}nwr$(CZQFjUOI=-6J$2^XGvC~EZ+vrqWaOXB$k?%Suf5k=4>AveC1aJ! ziaW4IS%F$_Babi)kA8Y&u4F7E%99OPtm=vzw$$ zEz#9rvn`Iot_z-r3MtV>k)YvErZ<^Oa${`2>MYYODSr6?QZu+be-~MBjwPGdMvGd!b!elsdi4% z`37W*8+OGulab8YM?`KjJ8e+jM(tqLKSS@=jimq3)Ea2EB%88L8CaM+aG7;27b?5` z4zuUWBr)f)k2o&xg{iZ$IQkJ+SK>lpq4GEacu~eOW4yNFLU!Kgc{w4&D$4ecm0f}~ zTTzquRW@`f0}|IILl`!1P+;69g^upiPA6F{)U8)muWHzexRenBU$E^9X-uIY2%&1w z_=#5*(nmxJ9zF%styBwivi)?#KMG96-H@hD-H_&EZiRNsfk7mjBq{L%!E;Sqn!mVX*}kXhwH6eh;b42eD!*~upVG@ z#smUqz$ICm!Y8wY53gJeS|Iuard0=;k5i5Z_hSIs6tr)R4n*r*rE`>38Pw&lkv{_r!jNN=;#?WbMj|l>cU(9trCq; z%nN~r^y7!kH^GPOf3R}?dDhO=v^3BeP5hF|%4GNQYBSwz;x({21i4OQY->1G=KFyu z&6d`f2tT9Yl_Z8YACZaJ#v#-(gcyeqXMhYGXb=t>)M@fFa8tHp2x;ODX=Ap@a5I=U z0G80^$N0G4=U(>W%mrrThl0DjyQ-_I>+1Tdd_AuB3qpYAqY54upwa3}owa|x5iQ^1 zEf|iTZxKNGRpI>34EwkIQ2zHDEZ=(J@lRaOH>F|2Z%V_t56Km$PUYu^xA5#5Uj4I4RGqHD56xT%H{+P8Ag>e_3pN$4m8n>i%OyJFPNWaEnJ4McUZPa1QmOh?t8~n& z&RulPCors8wUaqMHECG=IhB(-tU2XvHP6#NrLVyKG%Ee*mQ5Ps%wW?mcnriTVRc4J`2YVM>$ixSF2Xi+Wn(RUZnV?mJ?GRdw%lhZ+t&3s7g!~g{%m&i<6 z5{ib-<==DYG93I(yhyv4jp*y3#*WNuDUf6`vTM%c&hiayf(%=x@4$kJ!W4MtYcE#1 zHM?3xw63;L%x3drtd?jot!8u3qeqctceX3m;tWetK+>~q7Be$h>n6riK(5@ujLgRS zvOym)k+VAtyV^mF)$29Y`nw&ijdg~jYpkx%*^ z8dz`C*g=I?;clyi5|!27e2AuSa$&%UyR(J3W!A=ZgHF9OuKA34I-1U~pyD!KuRkjA zbkN!?MfQOeN>DUPBxoy5IX}@vw`EEB->q!)8fRl_mqUVuRu|C@KD-;yl=yKc=ZT0% zB$fMwcC|HE*0f8+PVlWHi>M`zfsA(NQFET?LrM^pPcw`cK+Mo0%8*x8@65=CS_^$cG{GZQ#xv($7J z??R$P)nPLodI;P!IC3eEYEHh7TV@opr#*)6A-;EU2XuogHvC;;k1aI8asq7ovoP!* z?x%UoPrZjj<&&aWpsbr>J$Er-7!E(BmOyEv!-mbGQGeJm-U2J>74>o5x`1l;)+P&~ z>}f^=Rx(ZQ2bm+YE0u=ZYrAV@apyt=v1wb?R@`i_g64YyAwcOUl=C!i>=Lzb$`tjv zOO-P#A+)t-JbbotGMT}arNhJmmGl-lyUpMn=2UacVZxmiG!s!6H39@~&uVokS zG=5qWhfW-WOI9g4!R$n7!|ViL!|v3G?GN6HR0Pt_L5*>D#FEj5wM1DScz4Jv@Sxnl zB@MPPmdI{(2D?;*wd>3#tjAirmUnQoZrVv`xM3hARuJksF(Q)wd4P$88fGYOT1p6U z`AHSN!`St}}UMBT9o7i|G`r$ zrB=s$qV3d6$W9@?L!pl0lf%)xs%1ko^=QY$ty-57=55PvP(^6E7cc zGJ*>m2=;fOj?F~yBf@K@9qwX0hA803Xw+b0m}+#a(>RyR8}*Y<4b+kpp|OS+!whP( zH`v{%s>jsQI9rd$*vm)EkwOm#W_-rLTHcZRek)>AtF+~<(did)*oR1|&~1|e36d-d zgtm5cv1O0oqgWC%Et@P4Vhm}Ndl(Y#C^MD03g#PH-TFy+7!Osv1z^UWS9@%JhswEq~6kSr2DITo59+; ze=ZC}i2Q?CJ~Iyu?vn|=9iKV>4j8KbxhE4&!@SQ^dVa-gK@YfS9xT(0kpW*EDjYUkoj! zE49{7H&E}k%5(>sM4uGY)Q*&3>{aitqdNnRJkbOmD5Mp5rv-hxzOn80QsG=HJ_atI-EaP69cacR)Uvh{G5dTpYG7d zbtmRMq@Sexey)||UpnZ?;g_KMZq4IDCy5}@u!5&B^-=6yyY{}e4Hh3ee!ZWtL*s?G zxG(A!<9o!CL+q?u_utltPMk+hn?N2@?}xU0KlYg?Jco{Yf@|mSGC<(Zj^yHCvhmyx z?OxOYoxbptDK()tsJ42VzXdINAMWL$0Gcw?G(g8TMB)Khw_|v9`_ql#pRd2i*?CZl z7k1b!jQB=9-V@h%;Cnl7EKi;Y^&NhU0mWEcj8B|3L30Ku#-9389Q+(Yet0r$F=+3p z6AKOMAIi|OHyzlHZtOm73}|ntKtFaXF2Fy|M!gOh^L4^62kGUoWS1i{9gsds_GWBc zLw|TaLP64z3z9?=R2|T6Xh2W4_F*$cq>MtXMOy&=IPIJ`;!Tw?PqvI2b*U1)25^<2 zU_ZPoxg_V0tngA0J+mm?3;OYw{i2Zb4x}NedZug!>EoN3DC{1i)Z{Z4m*(y{ov2%- zk(w>+scOO}MN!exSc`TN)!B=NUX`zThWO~M*ohqq;J2hx9h9}|s#?@eR!=F{QTrq~ zTcY|>azkCe$|Q0XFUdpFT=lTcyW##i;-e{}ORB4D?t@SfqGo_cS z->?^rh$<&n9DL!CF+h?LMZRi)qju!meugvxX*&jfD!^1XB3?E?HnwHP8$;uX{Rvp# zh|)hM>XDv$ZGg=$1{+_bA~u-vXqlw6NH=nkpyWE0u}LQjF-3NhATL@9rRxMnpO%f7 z)EhZf{PF|mKIMFxnC?*78(}{Y)}iztV12}_OXffJ;ta!fcFIVjdchyHxH=t%ci`Xd zX2AUB?%?poD6Zv*&BA!6c5S#|xn~DK01#XvjT!w!;&`lDXSJT4_j$}!qSPrb37vc{ z9^NfC%QvPu@vlxaZ;mIbn-VHA6miwi8qJ~V;pTZkKqqOii<1Cs}0i?uUIss;hM4dKq^1O35y?Yp=l4i zf{M!@QHH~rJ&X~8uATV><23zZUbs-J^3}$IvV_ANLS08>k`Td7aU_S1sLsfi*C-m1 z-e#S%UGs4E!;CeBT@9}aaI)qR-6NU@kvS#0r`g&UWg?fC7|b^_HyCE!8}nyh^~o@< zpm7PDFs9yxp+byMS(JWm$NeL?DNrMCNE!I^ko-*csB+dsf4GAq{=6sfyf4wb>?v1v zmb`F*bN1KUx-`ra1+TJ37bXNP%`-Fd`vVQFTwWpX@;s(%nDQa#oWhgk#mYlY*!d>( zE&!|ySF!mIyfING+#%RDY3IBH_fW$}6~1%!G`suHub1kP@&DoAd5~7J55;5_noPI6eLf{t;@9Kf<{aO0`1WNKd?<)C-|?C?)3s z>wEq@8=I$Wc~Mt$o;g++5qR+(6wt9GI~pyrDJ%c?gPZe)owvy^J2S=+M^ z&WhIE`g;;J^xQLVeCtf7b%Dg#Z2gq9hp_%g)-%_`y*zb; zn9`f`mUPN-Ts&fFo(aNTsXPA|J!TJ{0hZp0^;MYHLOcD=r_~~^ymS8KLCSeU3;^QzJNqS z5{5rEAv#l(X?bvwxpU;2%pQftF`YFgrD1jt2^~Mt^~G>T*}A$yZc@(k9orlCGv&|1 zWWvVgiJsCAtamuAYT~nzs?TQFt<1LSEx!@e0~@yd6$b5!Zm(FpBl;(Cn>2vF?k zOm#TTjFwd2D-CyA!mqR^?#Uwm{NBemP>(pHmM}9;;8`c&+_o3#E5m)JzfwN?(f-a4 zyd%xZc^oQx3XT?vcCqCX&Qrk~nu;fxs@JUoyVoi5fqpi&bUhQ2y!Ok2pzsFR(M(|U zw3E+kH_zmTRQ9dUMZWRE%Zakiwc+lgv7Z%|YO9YxAy`y28`Aw;WU6HXBgU7fl@dnt z-fFBV)}H-gqP!1;V@Je$WcbYre|dRdp{xt!7sL3Eoa%IA`5CAA%;Wq8PktwPdULo! z8!sB}Qt8#jH9Sh}QiUtEPZ6H0b*7qEKGJ%ITZ|vH)5Q^2m<7o3#Z>AKc%z7_u`rXA zqrCy{-{8;9>dfllLu$^M5L z-hXs))h*qz%~ActwkIA(qOVBZl2v4lwbM>9l70Y`+T*elINFqt#>OaVWoja8RMsep z6Or3f=oBnA3vDbn*+HNZP?8LsH2MY)x%c13@(XfuGR}R?Nu<|07{$+Lc3$Uv^I!MQ z>6qWgd-=aG2Y^24g4{Bw9ueOR)(9h`scImD=86dD+MnSN4$6 z^U*o_mE-6Rk~Dp!ANp#5RE9n*LG(Vg`1)g6!(XtDzsov$Dvz|Gv1WU68J$CkshQhS zCrc|cdkW~UK}5NeaWj^F4MSgFM+@fJd{|LLM)}_O<{rj z+?*Lm?owq?IzC%U%9EBga~h-cJbIu=#C}XuWN>OLrc%M@Gu~kFEYUi4EC6l#PR2JS zQUkGKrrS#6H7}2l0F@S11DP`@pih0WRkRJl#F;u{c&ZC{^$Z+_*lB)r)-bPgRFE;* zl)@hK4`tEP=P=il02x7-C7p%l=B`vkYjw?YhdJU9!P!jcmY$OtC^12w?vy3<<=tlY zUwHJ_0lgWN9vf>1%WACBD{UT)1qHQSE2%z|JHvP{#INr13jM}oYv_5#xsnv9`)UAO zuwgyV4YZ;O)eSc3(mka6=aRohi!HH@I#xq7kng?Acdg7S4vDJb6cI5fw?2z%3yR+| zU5v@Hm}vy;${cBp&@D=HQ9j7NcFaOYL zj-wV=eYF{|XTkFNM2uz&T8uH~;)^Zo!=KP)EVyH6s9l1~4m}N%XzPpduPg|h-&lL` zAXspR0YMOKd2yO)eMFFJ4?sQ&!`dF&!|niH*!^*Ml##o0M(0*uK9&yzekFi$+mP9s z>W9d%Jb)PtVi&-Ha!o~Iyh@KRuKpQ@)I~L*d`{O8!kRObjO7=n+Gp36fe!66neh+7 zW*l^0tTKjLLzr`x4`_8&on?mjW-PzheTNox8Hg7Nt@*SbE-%kP2hWYmHu#Fn@Q^J(SsPUz*|EgOoZ6byg3ew88UGdZ>9B2Tq=jF72ZaR=4u%1A6Vm{O#?@dD!(#tmR;eP(Fu z{$0O%=Vmua7=Gjr8nY%>ul?w=FJ76O2js&17W_iq2*tb!i{pt#`qZB#im9Rl>?t?0c zicIC}et_4d+CpVPx)i4~$u6N-QX3H77ez z?ZdvXifFk|*F8~L(W$OWM~r`pSk5}#F?j_5u$Obu9lDWIknO^AGu+Blk7!9Sb;NjS zncZA?qtASdNtzQ>z7N871IsPAk^CC?iIL}+{K|F@BuG2>qQ;_RUYV#>hHO(HUPpk@ z(bn~4|F_jiZi}Sad;_7`#4}EmD<1EiIxa48QjUuR?rC}^HRocq`OQPM@aHVKP9E#q zy%6bmHygCpIddPjE}q_DPC`VH_2m;Eey&ZH)E6xGeStOK7H)#+9y!%-Hm|QF6w#A( zIC0Yw%9j$s-#odxG~C*^MZ?M<+&WJ+@?B_QPUyTg9DJGtQN#NIC&-XddRsf3n^AL6 zT@P|H;PvN;ZpL0iv$bRb7|J{0o!Hq+S>_NrH4@coZtBJu#g8#CbR7|#?6uxi8d+$g z87apN>EciJZ`%Zv2**_uiET9Vk{pny&My;+WfGDw4EVL#B!Wiw&M|A8f1A@ z(yFQS6jfbH{b8Z-S7D2?Ixl`j0{+ZnpT=;KzVMLW{B$`N?Gw^Fl0H6lT61%T2AU**!sX0u?|I(yoy&Xveg7XBL&+>n6jd1##6d>TxE*Vj=8lWiG$4=u{1UbAa5QD>5_ z;Te^42v7K6Mmu4IWT6Rnm>oxrl~b<~^e3vbj-GCdHLIB_>59}Ya+~OF68NiH=?}2o zP(X7EN=quQn&)fK>M&kqF|<_*H`}c zk=+x)GU>{Af#vx&s?`UKUsz})g^Pc&?Ka@t5$n$bqf6{r1>#mWx6Ep>9|A}VmWRnowVo`OyCr^fHsf# zQjQ3Ttp7y#iQY8l`zEUW)(@gGQdt(~rkxlkefskT(t%@i8=|p1Y9Dc5bc+z#n$s13 zGJk|V0+&Ekh(F};PJzQKKo+FG@KV8a<$gmNSD;7rd_nRdc%?9)p!|B-@P~kxQG}~B zi|{0}@}zKC(rlFUYp*dO1RuvPC^DQOkX4<+EwvBAC{IZQdYxoq1Za!MW7%p7gGr=j zzWnAq%)^O2$eItftC#TTSArUyL$U54-O7e|)4_7%Q^2tZ^0-d&3J1}qCzR4dWX!)4 zzIEKjgnYgMus^>6uw4Jm8ga6>GBtMjpNRJ6CP~W=37~||gMo_p@GA@#-3)+cVYnU> zE5=Y4kzl+EbEh%dhQokB{gqNDqx%5*qBusWV%!iprn$S!;oN_6E3?0+umADVs4ako z?P+t?m?};gev9JXQ#Q&KBpzkHPde_CGu-y z<{}RRAx=xlv#mVi+Ibrgx~ujW$h{?zPfhz)Kp7kmYS&_|97b&H&1;J-mzrBWAvY} zh8-I8hl_RK2+nnf&}!W0P+>5?#?7>npshe<1~&l_xqKd0_>dl_^RMRq@-Myz&|TKZBj1=Q()) zF{dBjv5)h=&Z)Aevx}+i|7=R9rG^Di!sa)sZCl&ctX4&LScQ-kMncgO(9o6W6)yd< z@Rk!vkja*X_N3H=BavGoR0@u0<}m-7|2v!0+2h~S2Q&a=lTH91OJsvms2MT~ zY=c@LO5i`mLpBd(vh|)I&^A3TQLtr>w=zoyzTd=^f@TPu&+*2MtqE$Avf>l>}V|3-8Fp2hzo3y<)hr_|NO(&oSD z!vEjTWBxbKTiShVl-U{n*B3#)3a8$`{~Pk}J@elZ=>Pqp|MQ}jrGv7KrNcjW%TN_< zZz8kG{#}XoeWf7qY?D)L)8?Q-b@Na&>i=)(@uNo zr;cH98T3$Iau8Hn*@vXi{A@YehxDE2zX~o+RY`)6-X{8~hMpc#C`|8y> zU8Mnv5A0dNCf{Ims*|l-^ z(MRp{qoGohB34|ggDI*p!Aw|MFyJ|v+<+E3brfrI)|+l3W~CQLPbnF@G0)P~Ly!1TJLp}xh8uW`Q+RB-v`MRYZ9Gam3cM%{ zb4Cb*f)0deR~wtNb*8w-LlIF>kc7DAv>T0D(a3@l`k4TFnrO+g9XH7;nYOHxjc4lq zMmaW6qpgAgy)MckYMhl?>sq;-1E)-1llUneeA!ya9KM$)DaNGu57Z5aE>=VST$#vb zFo=uRHr$0M{-ha>h(D_boS4zId;3B|Tpqo|?B?Z@I?G(?&Iei+-{9L_A9=h=Qfn-U z1wIUnQe9!z%_j$F_{rf&`ZFSott09gY~qrf@g3O=Y>vzAnXCyL!@(BqWa)Zqt!#_k zfZHuwS52|&&)aK;CHq9V-t9qt0au{$#6c*R#e5n3rje0hic7c7m{kW$p(_`wB=Gw7 z4k`1Hi;Mc@yA7dp@r~?@rfw)TkjAW++|pkfOG}0N|2guek}j8Zen(!+@7?qt_7ndX zB=BG6WJ31#F3#Vk3=aQr8T)3`{=p9nBHlKzE0I@v`{vJ}h8pd6vby&VgFhzH|q;=aonunAXL6G2y(X^CtAhWr*jI zGjpY@raZDQkg*aMq}Ni6cRF z{oWv}5`nhSAv>usX}m^GHt`f(t8@zHc?K|y5Zi=4G*UG1Sza{$Dpj%X8 zzEXaKT5N6F5j4J|w#qlZP!zS7BT)9b+!ZSJdToqJts1c!)fwih4d31vfb{}W)EgcA zH2pZ^8_k$9+WD2n`6q5XbOy8>3pcYH9 z07eUB+p}YD@AH!}p!iKv><2QF-Y^&xx^PAc1F13A{nUeCDg&{hnix#FiO!fe(^&%Qcux!h znu*S!s$&nnkeotYsDthh1dq(iQrE|#f_=xVgfiiL&-5eAcC-> z5L0l|DVEM$#ulf{bj+Y~7iD)j<~O8CYM8GW)dQGq)!mck)FqoL^X zwNdZb3->hFrbHFm?hLvut-*uK?zXn3q1z|UX{RZ;-WiLoOjnle!xs+W0-8D)kjU#R z+S|A^HkRg$Ij%N4v~k`jyHffKaC~=wg=9)V5h=|kLQ@;^W!o2^K+xG&2n`XCd>OY5Ydi= zgHH=lgy++erK8&+YeTl7VNyVm9-GfONlSlVb3)V9NW5tT!cJ8d7X)!b-$fb!s76{t z@d=Vg-5K_sqHA@Zx-L_}wVnc@L@GL9_K~Zl(h5@AR#FAiKad8~KeWCo@mgXIQ#~u{ zgYFwNz}2b6Vu@CP0XoqJ+dm8px(5W5-Jpis97F`+KM)TuP*X8H@zwiVKDKGVp59pI zifNHZr|B+PG|7|Y<*tqap0CvG7tbR1R>jn70t1X`XJixiMVcHf%Ez*=xm1(CrTSDt z0cle!+{8*Ja&EOZ4@$qhBuKQ$U95Q%rc7tg$VRhk?3=pE&n+T3upZg^ZJc9~c2es% zh7>+|mrmA-p&v}|OtxqmHIBgUxL~^0+cpfkSK2mhh+4b=^F1Xgd2)}U*Yp+H?ls#z zrLxWg_hm}AfK2XYWr!rzW4g;+^^&bW%LmbtRai9f3PjU${r@n`JThy-cphbcwn)rq9{A$Ht`lmYKxOacy z6v2R(?gHhD5@&kB-Eg?4!hAoD7~(h>(R!s1c1Hx#s9vGPePUR|of32bS`J5U5w{F) z>0<^ktO2UHg<0{oxkdOQ;}coZDQph8p6ruj*_?uqURCMTac;>T#v+l1Tc~%^k-Vd@ zkc5y35jVNc49vZpZx;gG$h{%yslDI%Lqga1&&;mN{Ush1c7p>7e-(zp}6E7f-XmJb4nhk zb8zS+{IVbL$QVF8pf8}~kQ|dHJAEATmmnrb_wLG}-yHe>W|A&Y|;muy-d^t^<&)g5SJfaTH@P1%euONny=mxo+C z4N&w#biWY41r8k~468tvuYVh&XN&d#%QtIf9;iVXfWY)#j=l`&B~lqDT@28+Y!0E+MkfC}}H*#(WKKdJJq=O$vNYCb(ZG@p{fJgu;h z21oHQ(14?LeT>n5)s;uD@5&ohU!@wX8w*lB6i@GEH0pM>YTG+RAIWZD;4#F1&F%Jp zXZUml2sH0!lYJT?&sA!qwez6cXzJEd(1ZC~kT5kZSp7(@=H2$Azb_*W&6aA|9iwCL zdX7Q=42;@dspHDwYE?miGX#L^3xD&%BI&fN9^;`v4OjQXPBaBmOF1;#C)8XA(WFlH zycro;DS2?(G&6wkr6rqC>rqDv3nfGw3hmN_9Al>TgvmGsL8_hXx09};l9Ow@)F5@y z#VH5WigLDwZE4nh^7&@g{1FV^UZ%_LJ-s<{HN*2R$OPg@R~Z`c-ET*2}XB@9xvAjrK&hS=f|R8Gr9 zr|0TGOsI7RD+4+2{ZiwdVD@2zmg~g@^D--YL;6UYGSM8i$NbQr4!c7T9rg!8;TM0E zT#@?&S=t>GQm)*ua|?TLT2ktj#`|R<_*FAkOu2Pz$wEc%-=Y9V*$&dg+wIei3b*O8 z2|m$!jJG!J!ZGbbIa!(Af~oSyZV+~M1qGvelMzPNE_%5?c2>;MeeG2^N?JDKjFYCy z7SbPWH-$cWF9~fX%9~v99L!G(wi!PFp>rB!9xj7=Cv|F+7CsGNwY0Q_J%FID%C^CBZQfJ9K(HK%k31j~e#&?hQ zNuD6gRkVckU)v+53-fc} z7ZCzYN-5RG4H7;>>Hg?LU9&5_aua?A0)0dpew1#MMlu)LHe(M;OHjHIUl7|%%)YPo z0cBk;AOY00%Fe6heoN*$(b<)Cd#^8Iu;-2v@>cE-OB$icUF9EEoaC&q8z9}jMTT2I z8`9;jT%z0;dy4!8U;GW{i`)3!c6&oWY`J3669C!tM<5nQFFrFRglU8f)5Op$GtR-3 zn!+SPCw|04sv?%YZ(a7#L?vsdr7ss@WKAw&A*}-1S|9~cL%uA+E~>N6QklFE>8W|% zyX-qAUGTY1hQ-+um`2|&ji0cY*(qN!zp{YpDO-r>jPk*yuVSay<)cUt`t@&FPF_&$ zcHwu1(SQ`I-l8~vYyUxm@D1UEdFJ$f5Sw^HPH7b!9 zzYT3gKMF((N(v0#4f_jPfVZ=ApN^jQJe-X$`A?X+vWjLn_%31KXE*}5_}d8 zw_B1+a#6T1?>M{ronLbHIlEsMf93muJ7AH5h%;i99<~JX^;EAgEB1uHralD*!aJ@F zV2ruuFe9i2Q1C?^^kmVy921eb=tLDD43@-AgL^rQ3IO9%+vi_&R2^dpr}x{bCVPej z7G0-0o64uyWNtr*loIvslyo0%)KSDDKjfThe0hcqs)(C-MH1>bNGBDRTW~scy_{w} zp^aq8Qb!h9Lwielq%C1b8=?Z=&U)ST&PHbS)8Xzjh2DF?d{iAv)Eh)wsUnf>UtXN( zL7=$%YrZ#|^c{MYmhn!zV#t*(jdmYdCpwqpZ{v&L8KIuKn`@IIZfp!uo}c;7J57N` zAxyZ-uA4=Gzl~Ovycz%MW9ZL7N+nRo&1cfNn9(1H5eM;V_4Z_qVann7F>5f>%{rf= zPBZFaV@_Sobl?Fy&KXyzFDV*FIdhS5`Uc~S^Gjo)aiTHgn#<0C=9o-a-}@}xDor;D zZyZ|fvf;+=3MZd>SR1F^F`RJEZo+|MdyJYQAEauKu%WDol~ayrGU3zzbHKsnHKZ*z zFiwUkL@DZ>!*x05ql&EBq@_Vqv83&?@~q5?lVmffQZ+V-=qL+!u4Xs2Z2zdCQ3U7B&QR9_Iggy} z(om{Y9eU;IPe`+p1ifLx-XWh?wI)xU9ik+m#g&pGdB5Bi<`PR*?92lE0+TkRuXI)z z5LP!N2+tTc%cB6B1F-!fj#}>S!vnpgVU~3!*U1ej^)vjUH4s-bd^%B=ItQqDCGbrEzNQi(dJ`J}-U=2{7-d zK8k^Rlq2N#0G?9&1?HSle2vlkj^KWSBYTwx`2?9TU_DX#J+f+qLiZCqY1TXHFxXZqYMuD@RU$TgcnCC{_(vwZ-*uX)~go#%PK z@}2Km_5aQ~(<3cXeJN6|F8X_1@L%@xTzs}$_*E|a^_URF_qcF;Pfhoe?FTFwvjm1o z8onf@OY@jC2tVcMaZS;|T!Ks(wOgPpRzRnFS-^RZ4E!9dsnj9sFt609a|jJbb1Dt@ z<=Gal2jDEupxUSwWu6zp<<&RnAA;d&4gKVG0iu6g(DsST(4)z6R)zDpfaQ}v{5ARt zyhwvMtF%b-YazR5XLz+oh=mn;y-Mf2a8>7?2v8qX;19y?b>Z5laGHvzH;Nu9S`B8} zI)qN$GbXIQ1VL3lnof^6TS~rvPVg4V?Dl2Bb*K2z4E{5vy<(@@K_cN@U>R!>aUIRnb zL*)=787*cs#zb31zBC49x$`=fkQbMAef)L2$dR{)6BAz!t5U_B#1zZG`^neKSS22oJ#5B=gl%U=WeqL9REF2g zZnfCb0?quf?Ztj$VXvDSWoK`0L=Zxem2q}!XWLoT-kYMOx)!7fcgT35uC~0pySEme z`{wGWTkGr7>+Kb^n;W?BZH6ZP(9tQX%-7zF>vc2}LuWDI(9kh1G#7B99r4x6;_-V+k&c{nPUrR zAXJGRiMe~aup{0qzmLNjS_BC4cB#sXjckx{%_c&^xy{M61xEb>KW_AG5VFXUOjAG4 z^>Qlm9A#1N{4snY=(AmWzatb!ngqiqPbBZ7>Uhb3)dTkSGcL#&SH>iMO-IJBPua`u zo)LWZ>=NZLr758j{%(|uQuZ)pXq_4c!!>s|aDM9#`~1bzK3J1^^D#<2bNCccH7~-X}Ggi!pIIF>uFx%aPARGQsnC8ZQc8lrQ5o~smqOg>Ti^GNme94*w z)JZy{_{#$jxGQ&`M z!OMvZMHR>8*^>eS%o*6hJwn!l8VOOjZQJvh)@tnHVW&*GYPuxqXw}%M!(f-SQf`=L z5;=5w2;%82VMH6Xi&-K3W)o&K^+vJCepWZ-rW%+Dc6X3(){z$@4zjYxQ|}8UIojeC zYZpQ1dU{fy=oTr<4VX?$q)LP}IUmpiez^O&N3E_qPpchGTi5ZM6-2ScWlQq%V&R2Euz zO|Q0Hx>lY1Q1cW5xHv5!0OGU~PVEqSuy#fD72d#O`N!C;o=m+YioGu-wH2k6!t<~K zSr`E=W9)!g==~x9VV~-8{4ZN9{~-A9zJpRe%NGg$+MDuI-dH|b@BD)~>pPCGUNNzY zMDg||0@XGQgw`YCt5C&A{_+J}mvV9Wg{6V%2n#YSRN{AP#PY?1FF1#|vO_%e+#`|2*~wGAJaeRX6=IzFNeWhz6gJc8+(03Ph4y6ELAm=AkN7TOgMUEw*N{= z_)EIDQx5q22oUR+_b*tazu9+pX|n1c*IB-}{DqIj z-?E|ks{o3AGRNb;+iKcHkZvYJvFsW&83RAPs1Oh@IWy%l#5x2oUP6ZCtv+b|q>jsf zZ_9XO;V!>n`UxH1LvH8)L4?8raIvasEhkpQoJ`%!5rBs!0Tu(s_D{`4opB;57)pkX z4$A^8CsD3U5*!|bHIEqsn~{q+Ddj$ME@Gq4JXtgVz&7l{Ok!@?EA{B3P~NAqb9)4? zkQo30A^EbHfQ@87G5&EQTd`frrwL)&Yw?%-W@uy^Gn23%j?Y!Iea2xw<-f;esq zf%w5WN@E1}zyXtYv}}`U^B>W`>XPmdLj%4{P298|SisrE;7HvXX;A}Ffi8B#3Lr;1 zHt6zVb`8{#+e$*k?w8|O{Uh|&AG}|DG1PFo1i?Y*cQm$ZwtGcVgMwtBUDa{~L1KT-{jET4w60>{KZ27vXrHJ;fW{6| z=|Y4!&UX020wU1>1iRgB@Q#m~1^Z^9CG1LqDhYBrnx%IEdIty z!46iOoKlKs)c}newDG)rWUikD%j`)p z_w9Ph&e40=(2eBy;T!}*1p1f1SAUDP9iWy^u^Ubdj21Kn{46;GR+hwLO=4D11@c~V zI8x&(D({K~Df2E)Nx_yQvYfh4;MbMJ@Z}=Dt3_>iim~QZ*hZIlEs0mEb z_54+&*?wMD`2#vsQRN3KvoT>hWofI_Vf(^C1ff-Ike@h@saEf7g}<9T`W;HAne-Nd z>RR+&SP35w)xKn8^U$7))PsM!jKwYZ*RzEcG-OlTrX3}9a{q%#Un5E5W{{hp>w~;` zGky+3(vJvQyGwBo`tCpmo0mo((?nM8vf9aXrrY1Ve}~TuVkB(zeds^jEfI}xGBCM2 zL1|#tycSaWCurP+0MiActG3LCas@_@tao@(R1ANlwB$4K53egNE_;!&(%@Qo$>h`^1S_!hN6 z)vZtG$8fN!|BXBJ=SI>e(LAU(y(i*PHvgQ2llulxS8>qsimv7yL}0q_E5WiAz7)(f zC(ahFvG8&HN9+6^jGyLHM~$)7auppeWh_^zKk&C_MQ~8;N??OlyH~azgz5fe^>~7F zl3HnPN3z-kN)I$4@`CLCMQx3sG~V8hPS^}XDXZrQA>}mQPw%7&!sd(Pp^P=tgp-s^ zjl}1-KRPNWXgV_K^HkP__SR`S-|OF0bR-N5>I%ODj&1JUeAQ3$9i;B~$S6}*^tK?= z**%aCiH7y?xdY?{LgVP}S0HOh%0%LI$wRx;$T|~Y8R)Vdwa}kGWv8?SJVm^>r6+%I z#lj1aR94{@MP;t-scEYQWc#xFA30^}?|BeX*W#9OL;Q9#WqaaM546j5j29((^_8Nu z4uq}ESLr~r*O7E7$D{!k9W>`!SLoyA53i9QwRB{!pHe8um|aDE`Cg0O*{jmor)^t)3`>V>SWN-2VJcFmj^1?~tT=JrP`fVh*t zXHarp=8HEcR#vFe+1a%XXuK+)oFs`GDD}#Z+TJ}Ri`FvKO@ek2ayn}yaOi%(8p%2$ zpEu)v0Jym@f}U|-;}CbR=9{#<^z28PzkkTNvyKvJDZe+^VS2bES3N@Jq!-*}{oQlz z@8bgC_KnDnT4}d#&Cpr!%Yb?E!brx0!eVOw~;lLwUoz#Np%d$o%9scc3&zPm`%G((Le|6o1 zM(VhOw)!f84zG^)tZ1?Egv)d8cdNi+T${=5kV+j;Wf%2{3g@FHp^Gf*qO0q!u$=m9 zCaY`4mRqJ;FTH5`a$affE5dJrk~k`HTP_7nGTY@B9o9vvnbytaID;^b=Tzp7Q#DmD zC(XEN)Ktn39z5|G!wsVNnHi) z%^q94!lL|hF`IijA^9NR0F$@h7k5R^ljOW(;Td9grRN0Mb)l_l7##{2nPQ@?;VjXv zaLZG}yuf$r$<79rVPpXg?6iiieX|r#&`p#Con2i%S8*8F}(E) zI5E6c3tG*<;m~6>!&H!GJ6zEuhH7mkAzovdhLy;)q z{H2*8I^Pb}xC4s^6Y}6bJvMu=8>g&I)7!N!5QG$xseeU#CC?ZM-TbjsHwHgDGrsD= z{%f;@Sod+Ch66Ko2WF~;Ty)v>&x^aovCbCbD7>qF*!?BXmOV3(s|nxsb*Lx_2lpB7 zokUnzrk;P=T-&kUHO}td+Zdj!3n&NR?K~cRU zAXU!DCp?51{J4w^`cV#ye}(`SQhGQkkMu}O3M*BWt4UsC^jCFUy;wTINYmhD$AT;4 z?Xd{HaJjP`raZ39qAm;%beDbrLpbRf(mkKbANan7XsL>_pE2oo^$TgdidjRP!5-`% zv0d!|iKN$c0(T|L0C~XD0aS8t{*&#LnhE;1Kb<9&=c2B+9JeLvJr*AyyRh%@jHej=AetOMSlz^=!kxX>>B{2B1uIrQyfd8KjJ+DBy!h)~*(!|&L4^Q_07SQ~E zcemVP`{9CwFvPFu7pyVGCLhH?LhEVb2{7U+Z_>o25#+3<|8%1T^5dh}*4(kfJGry} zm%r#hU+__Z;;*4fMrX=Bkc@7|v^*B;HAl0((IBPPii%X9+u3DDF6%bI&6?Eu$8&aWVqHIM7mK6?Uvq$1|(-T|)IV<>e?!(rY zqkmO1MRaLeTR=)io(0GVtQT@s6rN%C6;nS3@eu;P#ry4q;^O@1ZKCJyp_Jo)Ty^QW z+vweTx_DLm{P-XSBj~Sl<%_b^$=}odJ!S2wAcxenmzFGX1t&Qp8Vxz2VT`uQsQYtdn&_0xVivIcxZ_hnrRtwq4cZSj1c-SG9 z7vHBCA=fd0O1<4*=lu$6pn~_pVKyL@ztw1swbZi0B?spLo56ZKu5;7ZeUml1Ws1?u zqMf1p{5myAzeX$lAi{jIUqo1g4!zWLMm9cfWcnw`k6*BR^?$2(&yW?>w;G$EmTA@a z6?y#K$C~ZT8+v{87n5Dm&H6Pb_EQ@V0IWmG9cG=O;(;5aMWWrIPzz4Q`mhK;qQp~a z+BbQrEQ+w{SeiuG-~Po5f=^EvlouB@_|4xQXH@A~KgpFHrwu%dwuCR)=B&C(y6J4J zvoGk9;lLs9%iA-IJGU#RgnZZR+@{5lYl8(e1h6&>Vc_mvg0d@);X zji4T|n#lB!>pfL|8tQYkw?U2bD`W{na&;*|znjmalA&f;*U++_aBYerq;&C8Kw7mI z7tsG*?7*5j&dU)Lje;^{D_h`%(dK|pB*A*1(Jj)w^mZ9HB|vGLkF1GEFhu&rH=r=8 zMxO42e{Si6$m+Zj`_mXb&w5Q(i|Yxyg?juUrY}78uo@~3v84|8dfgbPd0iQJRdMj< zncCNGdMEcsxu#o#B5+XD{tsg*;j-eF8`mp~K8O1J!Z0+>0=7O=4M}E?)H)ENE;P*F z$Ox?ril_^p0g7xhDUf(q652l|562VFlC8^r8?lQv;TMvn+*8I}&+hIQYh2 z1}uQQaag&!-+DZ@|C+C$bN6W;S-Z@)d1|en+XGvjbOxCa-qAF*LA=6s(Jg+g;82f$ z(Vb)8I)AH@cdjGFAR5Rqd0wiNCu!xtqWbcTx&5kslzTb^7A78~Xzw1($UV6S^VWiP zFd{Rimd-0CZC_Bu(WxBFW7+k{cOW7DxBBkJdJ;VsJ4Z@lERQr%3eVv&$%)b%<~ zCl^Y4NgO}js@u{|o~KTgH}>!* z_iDNqX2(As7T0xivMH|3SC1ivm8Q}6Ffcd7owUKN5lHAtzMM4<0v+ykUT!QiowO;`@%JGv+K$bBx@*S7C8GJVqQ_K>12}M`f_Ys=S zKFh}HM9#6Izb$Y{wYzItTy+l5U2oL%boCJn?R3?jP@n$zSIwlmyGq30Cw4QBO|14` zW5c);AN*J3&eMFAk$SR~2k|&+&Bc$e>s%c{`?d~85S-UWjA>DS5+;UKZ}5oVa5O(N zqqc@>)nee)+4MUjH?FGv%hm2{IlIF-QX}ym-7ok4Z9{V+ZHVZQl$A*x!(q%<2~iVv znUa+BX35&lCb#9VE-~Y^W_f;Xhl%vgjwdjzMy$FsSIj&ok}L+X`4>J=9BkN&nu^E*gbhj3(+D>C4E z@Fwq_=N)^bKFSHTzZk?-gNU$@l}r}dwGyh_fNi=9b|n}J>&;G!lzilbWF4B}BBq4f zYIOl?b)PSh#XTPp4IS5ZR_2C!E)Z`zH0OW%4;&~z7UAyA-X|sh9@~>cQW^COA9hV4 zXcA6qUo9P{bW1_2`eo6%hgbN%(G-F1xTvq!sc?4wN6Q4`e9Hku zFwvlAcRY?6h^Fj$R8zCNEDq8`=uZB8D-xn)tA<^bFFy}4$vA}Xq0jAsv1&5!h!yRA zU()KLJya5MQ`q&LKdH#fwq&(bNFS{sKlEh_{N%{XCGO+po#(+WCLmKW6&5iOHny>g z3*VFN?mx!16V5{zyuMWDVP8U*|BGT$(%IO|)?EF|OI*sq&RovH!N%=>i_c?K*A>>k zyg1+~++zY4Q)J;VWN0axhoIKx;l&G$gvj(#go^pZskEVj8^}is3Jw26LzYYVos0HX zRPvmK$dVxM8(Tc?pHFe0Z3uq){{#OK3i-ra#@+;*=ui8)y6hsRv z4Fxx1c1+fr!VI{L3DFMwXKrfl#Q8hfP@ajgEau&QMCxd{g#!T^;ATXW)nUg&$-n25 zruy3V!!;{?OTobo|0GAxe`Acn3GV@W=&n;~&9 zQM>NWW~R@OYORkJAo+eq1!4vzmf9K%plR4(tB@TR&FSbDoRgJ8qVcH#;7lQub*nq&?Z>7WM=oeEVjkaG zT#f)=o!M2DO5hLR+op>t0CixJCIeXH*+z{-XS|%jx)y(j&}Wo|3!l7{o)HU3m7LYyhv*xF&tq z%IN7N;D4raue&&hm0xM=`qv`+TK@;_xAcGKuK(2|75~ar2Yw)geNLSmVxV@x89bQu zpViVKKnlkwjS&&c|-X6`~xdnh}Ps)Hs z4VbUL^{XNLf7_|Oi>tA%?SG5zax}esF*FH3d(JH^Gvr7Rp*n=t7frH!U;!y1gJB^i zY_M$KL_}mW&XKaDEi9K-wZR|q*L32&m+2n_8lq$xRznJ7p8}V>w+d@?uB!eS3#u<} zIaqi!b!w}a2;_BfUUhGMy#4dPx>)_>yZ`ai?Rk`}d0>~ce-PfY-b?Csd(28yX22L% zI7XI>OjIHYTk_@Xk;Gu^F52^Gn6E1&+?4MxDS2G_#PQ&yXPXP^<-p|2nLTb@AAQEY zI*UQ9Pmm{Kat}wuazpjSyXCdnrD&|C1c5DIb1TnzF}f4KIV6D)CJ!?&l&{T)e4U%3HTSYqsQ zo@zWB1o}ceQSV)<4G<)jM|@@YpL+XHuWsr5AYh^Q{K=wSV99D~4RRU52FufmMBMmd z_H}L#qe(}|I9ZyPRD6kT>Ivj&2Y?qVZq<4bG_co_DP`sE*_Xw8D;+7QR$Uq(rr+u> z8bHUWbV19i#)@@G4bCco@Xb<8u~wVDz9S`#k@ciJtlu@uP1U0X?yov8v9U3VOig2t zL9?n$P3=1U_Emi$#slR>N5wH-=J&T=EdUHA}_Z zZIl3nvMP*AZS9{cDqFanrA~S5BqxtNm9tlu;^`)3X&V4tMAkJ4gEIPl= zoV!Gyx0N{3DpD@)pv^iS*dl2FwANu;1;%EDl}JQ7MbxLMAp>)UwNwe{=V}O-5C*>F zu?Ny+F64jZn<+fKjF01}8h5H_3pey|;%bI;SFg$w8;IC<8l|3#Lz2;mNNik6sVTG3 z+Su^rIE#40C4a-587$U~%KedEEw1%r6wdvoMwpmlXH$xPnNQN#f%Z7|p)nC>WsuO= z4zyqapLS<8(UJ~Qi9d|dQijb_xhA2)v>la)<1md5s^R1N&PiuA$^k|A<+2C?OiHbj z>Bn$~t)>Y(Zb`8hW7q9xQ=s>Rv81V+UiuZJc<23HplI88isqRCId89fb`Kt|CxVIg znWcwprwXnotO>3s&Oypkte^9yJjlUVVxSe%_xlzmje|mYOVPH^vjA=?6xd0vaj0Oz zwJ4OJNiFdnHJX3rw&inskjryukl`*fRQ#SMod5J|KroJRsVXa5_$q7whSQ{gOi*s0 z1LeCy|JBWRsDPn7jCb4s(p|JZiZ8+*ExC@Vj)MF|*Vp{B(ziccSn`G1Br9bV(v!C2 z6#?eqpJBc9o@lJ#^p-`-=`4i&wFe>2)nlPK1p9yPFzJCzBQbpkcR>={YtamIw)3nt z(QEF;+)4`>8^_LU)_Q3 zC5_7lgi_6y>U%m)m@}Ku4C}=l^J=<<7c;99ec3p{aR+v=diuJR7uZi%aQv$oP?dn?@6Yu_+*^>T0ptf(oobdL;6)N-I!TO`zg^Xbv3#L0I~sn@WGk-^SmPh5>W+LB<+1PU}AKa?FCWF|qMNELOgdxR{ zbqE7@jVe+FklzdcD$!(A$&}}H*HQFTJ+AOrJYnhh}Yvta(B zQ_bW4Rr;R~&6PAKwgLWXS{Bnln(vUI+~g#kl{r+_zbngT`Y3`^Qf=!PxN4IYX#iW4 zucW7@LLJA9Zh3(rj~&SyN_pjO8H&)|(v%!BnMWySBJV=eSkB3YSTCyIeJ{i;(oc%_hk{$_l;v>nWSB)oVeg+blh=HB5JSlG_r7@P z3q;aFoZjD_qS@zygYqCn=;Zxjo!?NK!%J$ z52lOP`8G3feEj+HTp@Tnn9X~nG=;tS+z}u{mQX_J0kxtr)O30YD%oo)L@wy`jpQYM z@M>Me=95k1p*FW~rHiV1CIfVc{K8r|#Kt(ApkXKsDG$_>76UGNhHExFCw#Ky9*B-z zNq2ga*xax!HMf_|Vp-86r{;~YgQKqu7%szk8$hpvi_2I`OVbG1doP(`gn}=W<8%Gn z%81#&WjkH4GV;4u43EtSW>K_Ta3Zj!XF?;SO3V#q=<=>Tc^@?A`i;&`-cYj|;^ zEo#Jl5zSr~_V-4}y8pnufXLa80vZY4z2ko7fj>DR)#z=wWuS1$$W!L?(y}YC+yQ|G z@L&`2upy3f>~*IquAjkVNU>}c10(fq#HdbK$~Q3l6|=@-eBbo>B9(6xV`*)sae58*f zym~RRVx;xoCG3`JV`xo z!lFw)=t2Hy)e!IFs?0~7osWk(d%^wxq&>_XD4+U#y&-VF%4z?XH^i4w`TxpF{`XhZ z%G}iEzf!T(l>g;W9<~K+)$g!{UvhW{E0Lis(S^%I8OF&%kr!gJ&fMOpM=&=Aj@wuL zBX?*6i51Qb$uhkwkFYkaD_UDE+)rh1c;(&Y=B$3)J&iJfQSx!1NGgPtK!$c9OtJuu zX(pV$bfuJpRR|K(dp@^j}i&HeJOh@|7lWo8^$*o~Xqo z5Sb+!EtJ&e@6F+h&+_1ETbg7LfP5GZjvIUIN3ibCOldAv z)>YdO|NH$x7AC8dr=<2ekiY1%fN*r~e5h6Yaw<{XIErujKV~tiyrvV_DV0AzEknC- zR^xKM3i<1UkvqBj3C{wDvytOd+YtDSGu!gEMg+!&|8BQrT*|p)(dwQLEy+ zMtMzij3zo40)CA!BKZF~yWg?#lWhqD3@qR)gh~D{uZaJO;{OWV8XZ_)J@r3=)T|kt zUS1pXr6-`!Z}w2QR7nP%d?ecf90;K_7C3d!UZ`N(TZoWNN^Q~RjVhQG{Y<%E1PpV^4 z-m-K+$A~-+VDABs^Q@U*)YvhY4Znn2^w>732H?NRK(5QSS$V@D7yz2BVX4)f5A04~$WbxGOam22>t&uD)JB8-~yiQW6ik;FGblY_I>SvB_z2?PS z*Qm&qbKI{H1V@YGWzpx`!v)WeLT02};JJo*#f$a*FH?IIad-^(;9XC#YTWN6;Z6+S zm4O1KH=#V@FJw7Pha0!9Vb%ZIM$)a`VRMoiN&C|$YA3~ZC*8ayZRY^fyuP6$n%2IU z$#XceYZeqLTXw(m$_z|33I$B4k~NZO>pP6)H_}R{E$i%USGy{l{-jOE;%CloYPEU+ zRFxOn4;7lIOh!7abb23YKD+_-?O z0FP9otcAh+oSj;=f#$&*ExUHpd&e#bSF%#8*&ItcL2H$Sa)?pt0Xtf+t)z$_u^wZi z44oE}r4kIZGy3!Mc8q$B&6JqtnHZ>Znn!Zh@6rgIu|yU+zG8q`q9%B18|T|oN3zMq z`l&D;U!OL~%>vo&q0>Y==~zLiCZk4v%s_7!9DxQ~id1LLE93gf*gg&2$|hB#j8;?3 z5v4S;oM6rT{Y;I+#FdmNw z){d%tNM<<#GN%n9ox7B=3#;u7unZ~tLB_vRZ52a&2=IM)2VkXm=L+Iqq~uk#Dug|x z>S84e+A7EiOY5lj*!q?6HDkNh~0g;0Jy(al!ZHHDtur9T$y-~)94HelX1NHjXWIM7UAe}$?jiz z9?P4`I0JM=G5K{3_%2jPLC^_Mlw?-kYYgb7`qGa3@dn|^1fRMwiyM@Ch z;CB&o7&&?c5e>h`IM;Wnha0QKnEp=$hA8TJgR-07N~U5(>9vJzeoFsSRBkDq=x(YgEMpb=l4TDD`2 zwVJpWGTA_u7}?ecW7s6%rUs&NXD3+n;jB86`X?8(l3MBo6)PdakI6V6a}22{)8ilT zM~T*mU}__xSy|6XSrJ^%lDAR3Lft%+yxC|ZUvSO_nqMX!_ul3;R#*{~4DA=h$bP)%8Yv9X zyp><|e8=_ttI}ZAwOd#dlnSjck#6%273{E$kJuCGu=I@O)&6ID{nWF5@gLb16sj|&Sb~+du4e4O_%_o`Ix4NRrAsyr1_}MuP94s>de8cH-OUkVPk3+K z&jW)It9QiU-ti~AuJkL`XMca8Oh4$SyJ=`-5WU<{cIh+XVH#e4d&zive_UHC!pN>W z3TB;Mn5i)9Qn)#6@lo4QpI3jFYc0~+jS)4AFz8fVC;lD^+idw^S~Qhq>Tg(!3$yLD zzktzoFrU@6s4wwCMz}edpF5i5Q1IMmEJQHzp(LAt)pgN3&O!&d?3W@6U4)I^2V{;- z6A(?zd93hS*uQmnh4T)nHnE{wVhh(=MMD(h(P4+^p83Om6t<*cUW>l(qJzr%5vp@K zN27ka(L{JX=1~e2^)F^i=TYj&;<7jyUUR2Bek^A8+3Up*&Xwc{)1nRR5CT8vG>ExV zHnF3UqXJOAno_?bnhCX-&kwI~Ti8t4`n0%Up>!U`ZvK^w2+0Cs-b9%w%4`$+To|k= zKtgc&l}P`*8IS>8DOe?EB84^kx4BQp3<7P{Pq}&p%xF_81pg!l2|u=&I{AuUgmF5n zJQCTLv}%}xbFGYtKfbba{CBo)lWW%Z>i(_NvLhoQZ*5-@2l&x>e+I~0Nld3UI9tdL zRzu8}i;X!h8LHVvN?C+|M81e>Jr38%&*9LYQec9Ax>?NN+9(_>XSRv&6hlCYB`>Qm z1&ygi{Y()OU4@D_jd_-7vDILR{>o|7-k)Sjdxkjgvi{@S>6GqiF|o`*Otr;P)kLHN zZkpts;0zw_6;?f(@4S1FN=m!4^mv~W+lJA`&7RH%2$)49z0A+8@0BCHtj|yH--AEL z0tW6G%X-+J+5a{5*WKaM0QDznf;V?L5&uQw+yegDNDP`hA;0XPYc6e0;Xv6|i|^F2WB)Z$LR|HR4 zTQsRAby9(^Z@yATyOgcfQw7cKyr^3Tz7lc7+JEwwzA7)|2x+PtEb>nD(tpxJQm)Kn zW9K_*r!L%~N*vS8<5T=iv|o!zTe9k_2jC_j*7ik^M_ zaf%k{WX{-;0*`t`G!&`eW;gChVXnJ-Rn)To8vW-?>>a%QU1v`ZC=U)f8iA@%JG0mZ zDqH;~mgBnrCP~1II<=V9;EBL)J+xzCoiRBaeH&J6rL!{4zIY8tZka?_FBeQeNO3q6 zyG_alW54Ba&wQf{&F1v-r1R6ID)PTsqjIBc+5MHkcW5Fnvi~{-FjKe)t1bl}Y;z@< z=!%zvpRua>>t_x}^}z0<7MI!H2v6|XAyR9!t50q-A)xk0nflgF4*OQlCGK==4S|wc zRMsSscNhRzHMBU8TdcHN!q^I}x0iXJ%uehac|Zs_B$p@CnF)HeXPpB_Za}F{<@6-4 zl%kml@}kHQ(ypD8FsPJ2=14xXJE|b20RUIgs!2|R3>LUMGF6X*B_I|$`Qg=;zm7C z{mEDy9dTmPbued7mlO@phdmAmJ7p@GR1bjCkMw6*G7#4+`k>fk1czdJUB!e@Q(~6# zwo%@p@V5RL0ABU2LH7Asq^quDUho@H>eTZH9f*no9fY0T zD_-9px3e}A!>>kv5wk91%C9R1J_Nh!*&Kk$J3KNxC}c_@zlgpJZ+5L)Nw|^p=2ue}CJtm;uj*Iqr)K})kA$xtNUEvX;4!Px*^&9T_`IN{D z{6~QY=Nau6EzpvufB^hflc#XIsSq0Y9(nf$d~6ZwK}fal92)fr%T3=q{0mP-EyP_G z)UR5h@IX}3Qll2b0oCAcBF>b*@Etu*aTLPU<%C>KoOrk=x?pN!#f_Og-w+;xbFgjQ zXp`et%lDBBh~OcFnMKMUoox0YwBNy`N0q~bSPh@+enQ=4RUw1) zpovN`QoV>vZ#5LvC;cl|6jPr}O5tu!Ipoyib8iXqy}TeJ;4+_7r<1kV0v5?Kv>fYp zg>9L`;XwXa&W7-jf|9~uP2iyF5`5AJ`Q~p4eBU$MCC00`rcSF>`&0fbd^_eqR+}mK z4n*PMMa&FOcc)vTUR zlDUAn-mh`ahi_`f`=39JYTNVjsTa_Y3b1GOIi)6dY)D}xeshB0T8Eov5%UhWd1)u}kjEQ|LDo{tqKKrYIfVz~@dp!! zMOnah@vp)%_-jDTUG09l+;{CkDCH|Q{NqX*uHa1YxFShy*1+;J`gywKaz|2Q{lG8x zP?KBur`}r`!WLKXY_K;C8$EWG>jY3UIh{+BLv0=2)KH%P}6xE2kg)%(-uA6lC?u8}{K(#P*c zE9C8t*u%j2r_{;Rpe1A{9nNXU;b_N0vNgyK!EZVut~}+R2rcbsHilqsOviYh-pYX= zHw@53nlmwYI5W5KP>&`dBZe0Jn?nAdC^HY1wlR6$u^PbpB#AS&5L6zqrXN&7*N2Q` z+Rae1EwS)H=aVSIkr8Ek^1jy2iS2o7mqm~Mr&g5=jjt7VxwglQ^`h#Mx+x2v|9ZAwE$i_9918MjJxTMr?n!bZ6n$}y11u8I9COTU`Z$Fi z!AeAQLMw^gp_{+0QTEJrhL424pVDp%wpku~XRlD3iv{vQ!lAf!_jyqd_h}+Tr1XG| z`*FT*NbPqvHCUsYAkFnM`@l4u_QH&bszpUK#M~XLJt{%?00GXY?u_{gj3Hvs!=N(I z(=AuWPijyoU!r?aFTsa8pLB&cx}$*%;K$e*XqF{~*rA-qn)h^!(-;e}O#B$|S~c+U zN4vyOK0vmtx$5K!?g*+J@G1NmlEI=pyZXZ69tAv=@`t%ag_Hk{LP~OH9iE)I= zaJ69b4kuCkV0V zo(M0#>phpQ_)@j;h%m{-a*LGi(72TP)ws2w*@4|C-3+;=5DmC4s7Lp95%n%@Ko zfdr3-a7m*dys9iIci$A=4NPJ`HfJ;hujLgU)ZRuJI`n;Pw|yksu!#LQnJ#dJysgNb z@@qwR^wrk(jbq4H?d!lNyy72~Dnn87KxsgQ!)|*m(DRM+eC$wh7KnS-mho3|KE)7h zK3k;qZ;K1Lj6uEXLYUYi)1FN}F@-xJ z@@3Hb84sl|j{4$3J}aTY@cbX@pzB_qM~APljrjju6P0tY{C@ zpUCOz_NFmALMv1*blCcwUD3?U6tYs+N%cmJ98D%3)%)Xu^uvzF zS5O!sc#X6?EwsYkvPo6A%O8&y8sCCQH<%f2togVwW&{M;PR!a(ZT_A+jVAbf{@5kL zB@Z(hb$3U{T_}SKA_CoQVU-;j>2J=L#lZ~aQCFg-d<9rzs$_gO&d5N6eFSc z1ml8)P*FSi+k@!^M9nDWR5e@ATD8oxtDu=36Iv2!;dZzidIS(PCtEuXAtlBb1;H%Z zwnC^Ek*D)EX4#Q>R$$WA2sxC_t(!!6Tr?C#@{3}n{<^o;9id1RA&-Pig1e-2B1XpG zliNjgmd3c&%A}s>qf{_j#!Z`fu0xIwm4L0)OF=u(OEmp;bLCIaZX$&J_^Z%4Sq4GZ zPn6sV_#+6pJmDN_lx@1;Zw6Md_p0w9h6mHtzpuIEwNn>OnuRSC2=>fP^Hqgc)xu^4 z<3!s`cORHJh#?!nKI`Et7{3C27+EuH)Gw1f)aoP|B3y?fuVfvpYYmmukx0ya-)TQX zR{ggy5cNf4X|g)nl#jC9p>7|09_S7>1D2GTRBUTW zAkQ=JMRogZqG#v;^=11O6@rPPwvJkr{bW-Qg8`q8GoD#K`&Y+S#%&B>SGRL>;ZunM@49!}Uy zN|bBCJ%sO;@3wl0>0gbl3L@1^O60ONObz8ZI7nder>(udj-jt`;yj^nTQ$L9`OU9W zX4alF#$|GiR47%x@s&LV>2Sz2R6?;2R~5k6V>)nz!o_*1Y!$p>BC5&?hJg_MiE6UBy>RkVZj`9UWbRkN-Hk!S`=BS3t3uyX6)7SF#)71*}`~Ogz z1rap5H6~dhBJ83;q-Y<5V35C2&F^JI-it(=5D#v!fAi9p#UwV~2tZQI+W(Dv?1t9? zfh*xpxxO{-(VGB>!Q&0%^YW_F!@aZS#ucP|YaD#>wd1Fv&Z*SR&mc;asi}1G) z_H>`!akh-Zxq9#io(7%;a$)w+{QH)Y$?UK1Dt^4)up!Szcxnu}kn$0afcfJL#IL+S z5gF_Y30j;{lNrG6m~$Ay?)*V9fZuU@3=kd40=LhazjFrau>(Y>SJNtOz>8x_X-BlA zIpl{i>OarVGj1v(4?^1`R}aQB&WCRQzS~;7R{tDZG=HhgrW@B`W|#cdyj%YBky)P= zpxuOZkW>S6%q7U{VsB#G(^FMsH5QuGXhb(sY+!-R8Bmv6Sx3WzSW<1MPPN1!&PurYky(@`bP9tz z52}LH9Q?+FF5jR6-;|+GVdRA!qtd;}*-h&iIw3Tq3qF9sDIb1FFxGbo&fbG5n8$3F zyY&PWL{ys^dTO}oZ#@sIX^BKW*bon=;te9j5k+T%wJ zNJtoN1~YVj4~YRrlZl)b&kJqp+Z`DqT!la$x&&IxgOQw#yZd-nBP3!7FijBXD|IsU8Zl^ zc6?MKpJQ+7ka|tZQLfchD$PD|;K(9FiLE|eUZX#EZxhG!S-63C$jWX1Yd!6-Yxi-u zjULIr|0-Q%D9jz}IF~S%>0(jOqZ(Ln<$9PxiySr&2Oic7vb<8q=46)Ln%Z|<*z5&> z3f~Zw@m;vR(bESB<=Jqkxn(=#hQw42l(7)h`vMQQTttz9XW6^|^8EK7qhju4r_c*b zJIi`)MB$w@9epwdIfnEBR+?~);yd6C(LeMC& zn&&N*?-g&BBJcV;8&UoZi4Lmxcj16ojlxR~zMrf=O_^i1wGb9X-0@6_rpjPYemIin zmJb+;lHe;Yp=8G)Q(L1bzH*}I>}uAqhj4;g)PlvD9_e_ScR{Ipq|$8NvAvLD8MYr}xl=bU~)f%B3E>r3Bu9_t|ThF3C5~BdOve zEbk^r&r#PT&?^V1cb{72yEWH}TXEE}w>t!cY~rA+hNOTK8FAtIEoszp!qqptS&;r$ zaYV-NX96-h$6aR@1xz6_E0^N49mU)-v#bwtGJm)ibygzJ8!7|WIrcb`$XH~^!a#s& z{Db-0IOTFq#9!^j!n_F}#Z_nX{YzBK8XLPVmc&X`fT7!@$U-@2KM9soGbmOSAmqV z{nr$L^MBo_u^Joyf0E^=eo{Rt0{{e$IFA(#*kP@SQd6lWT2-#>` zP1)7_@IO!9lk>Zt?#CU?cuhiLF&)+XEM9B)cS(gvQT!X3`wL*{fArTS;Ak`J<84du zALKPz4}3nlG8Fo^MH0L|oK2-4xIY!~Oux~1sw!+It)&D3p;+N8AgqKI`ld6v71wy8I!eP0o~=RVcFQR2Gr(eP_JbSytoQ$Yt}l*4r@A8Me94y z8cTDWhqlq^qoAhbOzGBXv^Wa4vUz$(7B!mX`T=x_ueKRRDfg&Uc-e1+z4x$jyW_Pm zp?U;-R#xt^Z8Ev~`m`iL4*c#65Nn)q#=Y0l1AuD&+{|8-Gsij3LUZXpM0Bx0u7WWm zH|%yE@-#XEph2}-$-thl+S;__ciBxSSzHveP%~v}5I%u!z_l_KoW{KRx2=eB33umE zIYFtu^5=wGU`Jab8#}cnYry@9p5UE#U|VVvx_4l49JQ;jQdp(uw=$^A$EA$LM%vmE zvdEOaIcp5qX8wX{mYf0;#51~imYYPn4=k&#DsKTxo{_Mg*;S495?OBY?#gv=edYC* z^O@-sd-qa+U24xvcbL0@C7_6o!$`)sVr-jSJE4XQUQ$?L7}2(}Eixqv;L8AdJAVqc zq}RPgpnDb@E_;?6K58r3h4-!4rT4Ab#rLHLX?eMOfluJk=3i1@Gt1i#iA=O`M0@x! z(HtJP9BMHXEzuD93m|B&woj0g6T?f#^)>J>|I4C5?Gam>n9!8CT%~aT;=oco5d6U8 zMXl(=W;$ND_8+DD*?|5bJ!;8ebESXMUKBAf7YBwNVJibGaJ*(2G`F%wx)grqVPjudiaq^Kl&g$8A2 zWMxMr@_$c}d+;_B`#kUX-t|4VKH&_f^^EP0&=DPLW)H)UzBG%%Tra*5 z%$kyZe3I&S#gfie^z5)!twG={3Cuh)FdeA!Kj<-9** zvT*5%Tb`|QbE!iW-XcOuy39>D3oe6x{>&<#E$o8Ac|j)wq#kQzz|ATd=Z0K!p2$QE zPu?jL8Lb^y3_CQE{*}sTDe!2!dtlFjq&YLY@2#4>XS`}v#PLrpvc4*@q^O{mmnr5D zmyJq~t?8>FWU5vZdE(%4cuZuao0GNjp3~Dt*SLaxI#g_u>hu@k&9Ho*#CZP~lFJHj z(e!SYlLigyc?&5-YxlE{uuk$9b&l6d`uIlpg_z15dPo*iU&|Khx2*A5Fp;8iK_bdP z?T6|^7@lcx2j0T@x>X7|kuuBSB7<^zeY~R~4McconTxA2flHC0_jFxmSTv-~?zVT| zG_|yDqa9lkF*B6_{j=T>=M8r<0s;@z#h)3BQ4NLl@`Xr__o7;~M&dL3J8fP&zLfDfy z);ckcTev{@OUlZ`bCo(-3? z1u1xD`PKgSg?RqeVVsF<1SLF;XYA@Bsa&cY!I48ZJn1V<3d!?s=St?TLo zC0cNr`qD*M#s6f~X>SCNVkva^9A2ZP>CoJ9bvgXe_c}WdX-)pHM5m7O zrHt#g$F0AO+nGA;7dSJ?)|Mo~cf{z2L)Rz!`fpi73Zv)H=a5K)*$5sf_IZypi($P5 zsPwUc4~P-J1@^3C6-r9{V-u0Z&Sl7vNfmuMY4yy*cL>_)BmQF!8Om9Dej%cHxbIzA zhtV0d{=%cr?;bpBPjt@4w=#<>k5ee=TiWAXM2~tUGfm z$s&!Dm0R^V$}fOR*B^kGaipi~rx~A2cS0;t&khV1a4u38*XRUP~f za!rZMtay8bsLt6yFYl@>-y^31(*P!L^^s@mslZy(SMsv9bVoX`O#yBgEcjCmGpyc* zeH$Dw6vB5P*;jor+JOX@;6K#+xc)Z9B8M=x2a@Wx-{snPGpRmOC$zpsqW*JCh@M2Y z#K+M(>=#d^>Of9C`))h<=Bsy)6zaMJ&x-t%&+UcpLjV`jo4R2025 zXaG8EA!0lQa)|dx-@{O)qP6`$rhCkoQqZ`^SW8g-kOwrwsK8 z3ms*AIcyj}-1x&A&vSq{r=QMyp3CHdWH35!sad#!Sm>^|-|afB+Q;|Iq@LFgqIp#Z zD1%H+3I?6RGnk&IFo|u+E0dCxXz4yI^1i!QTu7uvIEH>i3rR{srcST`LIRwdV1P;W z+%AN1NIf@xxvVLiSX`8ILA8MzNqE&7>%jMzGt9wm78bo9<;h*W84i29^w!>V>{N+S zd`5Zmz^G;f=icvoOZfK5#1ctx*~UwD=ab4DGQXehQ!XYnak*dee%YN$_ZPL%KZuz$ zD;$PpT;HM^$KwtQm@7uvT`i6>Hae1CoRVM2)NL<2-k2PiX=eAx+-6j#JI?M}(tuBW zkF%jjLR)O`gI2fcPBxF^HeI|DWwQWHVR!;;{BXXHskxh8F@BMDn`oEi-NHt;CLymW z=KSv5)3dyzec0T5B*`g-MQ<;gz=nIWKUi9ko<|4I(-E0k$QncH>E4l z**1w&#={&zv4Tvhgz#c29`m|;lU-jmaXFMC11 z*dlXDMEOG>VoLMc>!rApwOu2prKSi*!w%`yzGmS+k(zm*CsLK*wv{S_0WX^8A-rKy zbk^Gf_92^7iB_uUF)EE+ET4d|X|>d&mdN?x@vxKAQk`O+r4Qdu>XGy(a(19g;=jU} zFX{O*_NG>!$@jh!U369Lnc+D~qch3uT+_Amyi}*k#LAAwh}k8IPK5a-WZ81ufD>l> z$4cF}GSz>ce`3FAic}6W4Z7m9KGO?(eWqi@L|5Hq0@L|&2flN1PVl}XgQ2q*_n2s3 zt5KtowNkTYB5b;SVuoXA@i5irXO)A&%7?V`1@HGCB&)Wgk+l|^XXChq;u(nyPB}b3 zY>m5jkxpZgi)zfbgv&ec4Zqdvm+D<?Im*mXweS9H+V>)zF#Zp3)bhl$PbISY{5=_z!8&*Jv~NYtI-g!>fDs zmvL5O^U%!^VaKA9gvKw|5?-jk>~%CVGvctKmP$kpnpfN{D8@X*Aazi$txfa%vd-|E z>kYmV66W!lNekJPom29LdZ%(I+ZLZYTXzTg*to~m?7vp%{V<~>H+2}PQ?PPAq`36R z<%wR8v6UkS>Wt#hzGk#44W<%9S=nBfB);6clKwnxY}T*w21Qc3_?IJ@4gYzC7s;WP zVQNI(M=S=JT#xsZy7G`cR(BP9*je0bfeN8JN5~zY(DDs0t{LpHOIbN);?T-69Pf3R zSNe*&p2%AwXHL>__g+xd4Hlc_vu<25H?(`nafS%)3UPP7_4;gk-9ckt8SJRTv5v0M z_Hww`qPudL?ajIR&X*;$y-`<)6dxx1U~5eGS13CB!lX;3w7n&lDDiArbAhSycd}+b zya_3p@A`$kQy;|NJZ~s44Hqo7Hwt}X86NK=(ey>lgWTtGL6k@Gy;PbO!M%1~Wcn2k zUFP|*5d>t-X*RU8g%>|(wwj*~#l4z^Aatf^DWd1Wj#Q*AY0D^V@sC`M zjJc6qXu0I7Y*2;;gGu!plAFzG=J;1%eIOdn zQA>J&e05UN*7I5@yRhK|lbBSfJ+5Uq;!&HV@xfPZrgD}kE*1DSq^=%{o%|LChhl#0 zlMb<^a6ixzpd{kNZr|3jTGeEzuo}-eLT-)Q$#b{!vKx8Tg}swCni>{#%vDY$Ww$84 zew3c9BBovqb}_&BRo#^!G(1Eg((BScRZ}C)Oz?y`T5wOrv);)b^4XR8 zhJo7+<^7)qB>I;46!GySzdneZ>n_E1oWZY;kf94#)s)kWjuJN1c+wbVoNQcmnv}{> zN0pF+Sl3E}UQ$}slSZeLJrwT>Sr}#V(dVaezCQl2|4LN`7L7v&siYR|r7M(*JYfR$ zst3=YaDw$FSc{g}KHO&QiKxuhEzF{f%RJLKe3p*7=oo`WNP)M(9X1zIQPP0XHhY3c znrP{$4#Ol$A0s|4S7Gx2L23dv*Gv2o;h((XVn+9+$qvm}s%zi6nI-_s6?mG! zj{DV;qesJb&owKeEK?=J>UcAlYckA7Sl+I&IN=yasrZOkejir*kE@SN`fk<8Fgx*$ zy&fE6?}G)d_N`){P~U@1jRVA|2*69)KSe_}!~?+`Yb{Y=O~_+@!j<&oVQQMnhoIRU zA0CyF1OFfkK44n*JD~!2!SCPM;PRSk%1XL=0&rz00wxPs&-_eapJy#$h!eqY%nS0{ z!aGg58JIJPF3_ci%n)QSVpa2H`vIe$RD43;#IRfDV&Ibit z+?>HW4{2wOfC6Fw)}4x}i1maDxcE1qi@BS*qcxD2gE@h3#4cgU*D-&3z7D|tVZWt= z-Cy2+*Cm@P4GN_TPUtaVyVesbVDazF@)j8VJ4>XZv!f%}&eO1SvIgr}4`A*3#vat< z_MoByL(qW6L7SFZ#|Gc1fFN)L2PxY+{B8tJp+pxRyz*87)vXR}*=&ahXjBlQKguuf zX6x<<6fQulE^C*KH8~W%ptpaC0l?b=_{~*U4?5Vt;dgM4t_{&UZ1C2j?b>b+5}{IF_CUyvz-@QZPMlJ)r_tS$9kH%RPv#2_nMb zRLj5;chJ72*U`Z@Dqt4$@_+k$%|8m(HqLG!qT4P^DdfvGf&){gKnGCX#H0!;W=AGP zbA&Z`-__a)VTS}kKFjWGk z%|>yE?t*EJ!qeQ%dPk$;xIQ+P0;()PCBDgjJm6Buj{f^awNoVx+9<|lg3%-$G(*f) zll6oOkN|yamn1uyl2*N-lnqRI1cvs_JxLTeahEK=THV$Sz*gQhKNb*p0fNoda#-&F zB-qJgW^g}!TtM|0bS2QZekW7_tKu%GcJ!4?lObt0z_$mZ4rbQ0o=^curCs3bJK6sq z9fu-aW-l#>z~ca(B;4yv;2RZ?tGYAU)^)Kz{L|4oPj zdOf_?de|#yS)p2v8-N||+XL=O*%3+y)oI(HbM)Ds?q8~HPzIP(vs*G`iddbWq}! z(2!VjP&{Z1w+%eUq^ '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit + +APP_NAME="Gradle" +APP_BASE_NAME=${0##*/} + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/photonlib-java-examples/swervedriveposeestsim/gradlew.bat b/photonlib-java-examples/swervedriveposeestsim/gradlew.bat new file mode 100644 index 0000000000..f127cfd49d --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/gradlew.bat @@ -0,0 +1,91 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%"=="" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%"=="" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if %ERRORLEVEL% equ 0 goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if %ERRORLEVEL% equ 0 goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/photonlib-java-examples/swervedriveposeestsim/settings.gradle b/photonlib-java-examples/swervedriveposeestsim/settings.gradle new file mode 100644 index 0000000000..48c039ed86 --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/settings.gradle @@ -0,0 +1,27 @@ +import org.gradle.internal.os.OperatingSystem + +pluginManagement { + repositories { + mavenLocal() + gradlePluginPortal() + String frcYear = '2023' + File frcHome + if (OperatingSystem.current().isWindows()) { + String publicFolder = System.getenv('PUBLIC') + if (publicFolder == null) { + publicFolder = "C:\\Users\\Public" + } + def homeRoot = new File(publicFolder, "wpilib") + frcHome = new File(homeRoot, frcYear) + } else { + def userFolder = System.getProperty("user.home") + def homeRoot = new File(userFolder, "wpilib") + frcHome = new File(homeRoot, frcYear) + } + def frcHomeMaven = new File(frcHome, 'maven') + maven { + name 'frcHome' + url frcHomeMaven + } + } +} diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/deploy/example.txt b/photonlib-java-examples/swervedriveposeestsim/src/main/deploy/example.txt new file mode 100644 index 0000000000..bb82515dad --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/deploy/example.txt @@ -0,0 +1,3 @@ +Files placed in this directory will be deployed to the RoboRIO into the +'deploy' directory in the home folder. Use the 'Filesystem.getDeployDirectory' wpilib function +to get a proper path relative to the deploy directory. \ No newline at end of file diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java new file mode 100644 index 0000000000..fe215d7362 --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java @@ -0,0 +1,15 @@ +// Copyright (c) FIRST and other WPILib contributors. +// Open Source Software; you can modify and/or share it under the terms of +// the WPILib BSD license file in the root directory of this project. + +package frc.robot; + +import edu.wpi.first.wpilibj.RobotBase; + +public final class Main { + private Main() {} + + public static void main(String... args) { + RobotBase.startRobot(Robot::new); + } +} diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java new file mode 100644 index 0000000000..d84dc95fc9 --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java @@ -0,0 +1,55 @@ +// Copyright (c) FIRST and other WPILib contributors. +// Open Source Software; you can modify and/or share it under the terms of +// the WPILib BSD license file in the root directory of this project. + +package frc.robot; + +import edu.wpi.first.wpilibj.TimedRobot; +import edu.wpi.first.wpilibj2.command.CommandScheduler; + +public class Robot extends TimedRobot { + + @Override + public void robotInit() {} + + @Override + public void robotPeriodic() { + CommandScheduler.getInstance().run(); + } + + @Override + public void disabledInit() {} + + @Override + public void disabledPeriodic() {} + + @Override + public void disabledExit() {} + + @Override + public void autonomousInit() {} + + @Override + public void autonomousPeriodic() {} + + @Override + public void autonomousExit() {} + + @Override + public void teleopInit() {} + + @Override + public void teleopPeriodic() {} + + @Override + public void teleopExit() {} + + @Override + public void testInit() {} + + @Override + public void testPeriodic() {} + + @Override + public void testExit() {} +} diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java new file mode 100644 index 0000000000..d45dd47493 --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java @@ -0,0 +1,337 @@ +package frc.robot.subsystems.drivetrain; + +import java.util.ArrayList; +import java.util.List; +import java.util.Random; + +import edu.wpi.first.math.MathUtil; +import edu.wpi.first.math.Matrix; +import edu.wpi.first.math.Nat; +import edu.wpi.first.math.VecBuilder; +import edu.wpi.first.math.controller.SimpleMotorFeedforward; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.kinematics.ChassisSpeeds; +import edu.wpi.first.math.kinematics.SwerveDriveKinematics; +import edu.wpi.first.math.kinematics.SwerveModulePosition; +import edu.wpi.first.math.kinematics.SwerveModuleState; +import edu.wpi.first.math.numbers.N1; +import edu.wpi.first.math.numbers.N2; +import edu.wpi.first.math.system.Discretization; +import edu.wpi.first.math.system.LinearSystem; +import edu.wpi.first.math.system.plant.DCMotor; +import edu.wpi.first.wpilibj.RobotController; + +public class SwerveDriveSim { + + private final SimpleMotorFeedforward driveFF; + private final LinearSystem drivePlant; + private final DCMotor driveGearbox; + private final double driveGearing; + private final double driveWheelRadius; + private final SimpleMotorFeedforward steerFF; + private final LinearSystem steerPlant; + private final DCMotor steerGearbox; + private final double steerGearing; + + private final SwerveDriveKinematics kinematics; + private final int numModules; + + private final double[] driveInputs; + private final List> driveStates; + private final double[] steerInputs; + private final List> steerStates; + + private final Random rand = new Random(); + + // noiseless "actual" pose of the robot on the field + private Pose2d pose = new Pose2d(); + private double omegaRadsPerSec = 0; + + /** + * + * @param driveFF The feedforward for the drive motors of this swerve drive. This should be in units of meters. + * @param driveGearbox + * @param driveGearing + * @param driveWheelRadius + * @param steerFF The feedforward for the steer motors of this swerve drive. This should be in units of radians. + * @param steerGearbox + * @param steerGearing + * @param kinematics + */ + public SwerveDriveSim( + SimpleMotorFeedforward driveFF, DCMotor driveGearbox, double driveGearing, double driveWheelRadius, + SimpleMotorFeedforward steerFF, DCMotor steerGearbox, double steerGearing, + SwerveDriveKinematics kinematics) { + this.driveFF = driveFF; + drivePlant = new LinearSystem( + Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -driveFF.kv / driveFF.ka), + VecBuilder.fill(0.0, 1.0 / driveFF.ka), + Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), + VecBuilder.fill(0.0, 0.0)); + this.driveGearbox = driveGearbox; + this.driveGearing = driveGearing; + this.driveWheelRadius = driveWheelRadius; + this.steerFF = steerFF; + steerPlant = new LinearSystem( + Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -driveFF.kv / driveFF.ka), + VecBuilder.fill(0.0, 1.0 / driveFF.ka), + Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), + VecBuilder.fill(0.0, 0.0)); + this.steerGearbox = steerGearbox; + this.steerGearing = steerGearing; + + this.kinematics = kinematics; + numModules = kinematics.toSwerveModuleStates(new ChassisSpeeds()).length; + driveInputs = new double[numModules]; + driveStates = new ArrayList<>(numModules); + steerInputs = new double[numModules]; + steerStates = new ArrayList<>(numModules); + for(int i = 0; i < numModules; i++) { + driveStates.set(i, VecBuilder.fill(0, 0)); + steerStates.set(i, VecBuilder.fill(0, 0)); + } + } + + /** + * Sets the input voltages of the drive motors. + * @param inputs Input voltages. These should match the module order used in the kinematics. + */ + public void setDriveInputs(double... inputs) { + final double battVoltage = RobotController.getBatteryVoltage(); + for(int i = 0; i < driveInputs.length; i++) { + double input = inputs[i]; + driveInputs[i] = MathUtil.clamp(input, -battVoltage, battVoltage); + } + } + + /** + * Sets the input voltages of the steer motors. + * @param inputs Input voltages. These should match the module order used in the kinematics. + */ + public void setSteerInputs(double... inputs) { + final double battVoltage = RobotController.getBatteryVoltage(); + for(int i = 0; i < steerInputs.length; i++) { + double input = inputs[i]; + steerInputs[i] = MathUtil.clamp(input, -battVoltage, battVoltage); + } + } + + /** + * Computes the new x given the old x and the control input. Includes the effect of static friction. + * + * @param discA The discretized system matrix. + * @param discB The discretized input matrix. + * @param x The position/velocity state of the drive/steer system. + * @param input The input voltage. + * @param ks The kS value of the feedforward model. + * @return The updated x, including the effect of static friction. + */ + protected static Matrix calculateX(Matrix discA, Matrix discB, Matrix x, double input, double ks) { + var Ax = discA.times(x); + double nextStateVel = Ax.get(1, 0); + // input required to make next state vel == 0 + double inputToStop = nextStateVel / -discB.get(1, 0); + // ks effect on system velocity + double ksSystemEffect = MathUtil.clamp(inputToStop, -ks, ks); + + // after ks system effect: + nextStateVel += discB.get(1, 0) * ksSystemEffect; + inputToStop = nextStateVel / -discB.get(1, 0); + double signToStop = Math.signum(inputToStop); + double inputSign = Math.signum(input); + double ksInputEffect = 0; + + // system velocity was reduced to 0, resist any input + if(Math.abs(ksSystemEffect) < ks) { + double absInput = Math.abs(input); + ksInputEffect = -MathUtil.clamp(ks * inputSign, -absInput, absInput); + } + // non-zero system velocity, but input causes velocity sign change. Resist input after sign change + else if((input * signToStop) > (inputToStop * signToStop)) { + double absInput = Math.abs(input - inputToStop); + ksInputEffect = -MathUtil.clamp(ks * inputSign, -absInput, absInput); + } + + // calculate next x including static friction + var Bu = discB.times(VecBuilder.fill(input + ksSystemEffect + ksInputEffect)); + return Ax.plus(Bu); + } + + /** + * Update the drivetrain states with the given timestep. + * + * @param dtSeconds The timestep in seconds. This should be the robot loop period. + */ + public void update(double dtSeconds) { + var driveDiscAB = Discretization.discretizeAB(drivePlant.getA(), drivePlant.getB(), dtSeconds); + var steerDiscAB = Discretization.discretizeAB(steerPlant.getA(), steerPlant.getB(), dtSeconds); + + var moduleDeltas = new SwerveModulePosition[numModules]; + for(int i = 0; i < numModules; i++) { + double prevDriveStatePos = driveStates.get(i).get(0, 0); + driveStates.set(i, calculateX(driveDiscAB.getFirst(), driveDiscAB.getSecond(), driveStates.get(i), driveInputs[i], driveFF.ks)); + double currDriveStatePos = driveStates.get(i).get(0, 0); + steerStates.set(i, calculateX(steerDiscAB.getFirst(), steerDiscAB.getSecond(), steerStates.get(i), steerInputs[i], steerFF.ks)); + double currSteerStatePos = steerStates.get(i).get(0, 0); + moduleDeltas[i] = new SwerveModulePosition(currDriveStatePos - prevDriveStatePos, new Rotation2d(currSteerStatePos)); + } + + var twist = kinematics.toTwist2d(moduleDeltas); + pose = pose.exp(twist); + omegaRadsPerSec = twist.dtheta / dtSeconds; + } + + /** + * Reset the simulated swerve drive state. This effectively teleports the robot and should only + * be used during the setup of the simulation world. + * + * @param pose The new pose of the simulated swerve drive. + * @param preserveMotion If true, the current module states will be preserved. Otherwise, + * they will be reset to zeros. + */ + public void reset(Pose2d pose, boolean preserveMotion) { + this.pose = pose; + if(!preserveMotion) { + for(int i = 0; i < numModules; i++) { + driveStates.set(i, VecBuilder.fill(0, 0)); + steerStates.set(i, VecBuilder.fill(0, 0)); + } + omegaRadsPerSec = 0; + } + } + + /** + * Reset the simulated swerve drive state. This effectively teleports the robot and should only + * be used during the setup of the simulation world. + * + * @param pose The new pose of the simulated swerve drive. + * @param moduleDriveStates The new states of the modules' drive systems in [meters, meters/sec]. + * These should match the module order used in the kinematics. + * @param moduleSteerStates The new states of the modules' steer systems in [radians, radians/sec]. + * These should match the module order used in the kinematics. + */ + public void reset(Pose2d pose, List> moduleDriveStates, List> moduleSteerStates) { + if(moduleDriveStates.size() != driveStates.size() || moduleSteerStates.size() != steerStates.size()) + throw new IllegalArgumentException("Provided module states do not match number of modules!"); + this.pose = pose; + for(int i = 0; i < numModules; i++) { + driveStates.set(i, moduleDriveStates.get(i).copy()); + steerStates.set(i, moduleSteerStates.get(i).copy()); + } + omegaRadsPerSec = kinematics.toChassisSpeeds(getModuleStates()).omegaRadiansPerSecond; + } + + /** + * Get the pose of the simulated swerve drive. Note that this is the "actual" pose of the robot in + * the simulation world, without any noise. If you are simulating a pose estimator, this pose should + * only be used for visualization or camera simulation. This should be called after {@link #update(double)}. + */ + public Pose2d getPose() { + return pose; + } + + /** + * Get the {@link SwerveModulePosition} of each module. The returned array order matches the kinematics module order. + * This should be called after {@link #update(double)}. + */ + public SwerveModulePosition[] getModulePositions() { + var positions = new SwerveModulePosition[numModules]; + for(int i = 0; i < numModules; i++) { + positions[i] = new SwerveModulePosition(driveStates.get(i).get(0, 0), new Rotation2d(steerStates.get(i).get(0, 0))); + } + return positions; + } + + /** + * Get the {@link SwerveModulePosition} of each module with rudimentary noise simulation. + * The returned array order matches the kinematics module order. This should be called after + * {@link #update(double)}. + * + * @param driveStdDev The standard deviation in meters of the drive wheel position. + * @param steerStdDev The standard deviation in radians of the steer angle. + */ + public SwerveModulePosition[] getNoisyModulePositions(double driveStdDev, double steerStdDev) { + var positions = new SwerveModulePosition[numModules]; + for(int i = 0; i < numModules; i++) { + positions[i] = new SwerveModulePosition( + rand.nextGaussian(driveStates.get(i).get(0, 0), driveStdDev), + new Rotation2d(rand.nextGaussian(steerStates.get(i).get(0, 0), steerStdDev)) + ); + } + return positions; + } + + /** + * Get the {@link SwerveModuleState} of each module. The returned array order matches the kinematics module order. + * This should be called after {@link #update(double)}. + */ + public SwerveModuleState[] getModuleStates() { + var positions = new SwerveModuleState[numModules]; + for(int i = 0; i < numModules; i++) { + positions[i] = new SwerveModuleState(driveStates.get(i).get(1, 0), new Rotation2d(steerStates.get(i).get(0, 0))); + } + return positions; + } + + /** + * Get the angular velocity of the robot, which can be useful for gyro simulation. This should be called + * after {@link #update(double)}. + */ + public double getOmegaRadsPerSec() { + return omegaRadsPerSec; + } + + /** + * Calculates the current drawn from the battery by the motor(s). Ignores regenerative current from back-emf. + * + * @param motor The motor(s) used. + * @param radiansPerSec The velocity of the motor in radians per second. + * @param inputVolts The voltage commanded by the motor controller (battery voltage * duty cycle). + * @param battVolts The voltage of the battery. + */ + private static double getCurrentDraw(DCMotor motor, double radiansPerSec, double inputVolts, double battVolts) { + double effVolts = inputVolts - radiansPerSec / motor.KvRadPerSecPerVolt; + // ignore regeneration + if(inputVolts >= 0) effVolts = MathUtil.clamp(effVolts, 0, inputVolts); + else effVolts = MathUtil.clamp(effVolts, inputVolts, 0); + // calculate battery current + return (inputVolts / battVolts) * (effVolts / motor.rOhms); + } + + /** + * Get the current draw in amps for each module's drive motor(s). This should be called after {@link #update(double)}. + * The returned array order matches the kinematics module order. + */ + public double[] getDriveCurrentDraw() { + double[] currents = new double[numModules]; + for(int i = 0; i < numModules; i++) { + double radiansPerSec = driveStates.get(i).get(1, 0) * driveGearing / driveWheelRadius; + currents[i] = getCurrentDraw(driveGearbox, radiansPerSec, driveInputs[i], RobotController.getBatteryVoltage()); + } + return currents; + } + + /** + * Get the current draw in amps for each module's steer motor(s). This should be called after {@link #update(double)}. + * The returned array order matches the kinematics module order. + */ + public double[] getSteerCurrentDraw() { + double[] currents = new double[numModules]; + for(int i = 0; i < numModules; i++) { + double radiansPerSec = steerStates.get(i).get(1, 0) * steerGearing; + currents[i] = getCurrentDraw(steerGearbox, radiansPerSec, steerInputs[i], RobotController.getBatteryVoltage()); + } + return currents; + } + + /** + * Get the total current draw in amps of all swerve motors. This should be called after {@link #update(double)}. + */ + public double getTotalCurrentDraw() { + double sum = 0; + for(double val : getDriveCurrentDraw()) sum += val; + for(double val : getSteerCurrentDraw()) sum += val; + return sum; + } +} From b2669468fe4a240d78579e4d8e671cd9fabb4f55 Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 18 Sep 2023 19:47:34 -0700 Subject: [PATCH 27/46] swervedriveposeestsim --- .../swervedriveposeestsim/simgui.json | 103 ++++++ .../src/main/java/frc/robot/Constants.java | 139 ++++++++ .../src/main/java/frc/robot/Robot.java | 129 ++++++-- .../src/main/java/frc/robot/Vision.java | 127 ++++++++ .../subsystems/drivetrain/SwerveDrive.java | 300 ++++++++++++++++++ .../subsystems/drivetrain/SwerveDriveSim.java | 149 +++++++-- .../subsystems/drivetrain/SwerveModule.java | 163 ++++++++++ .../swervedriveposeestsim/swerve_module.png | Bin 0 -> 2954 bytes .../swervedriveposeestsim/tag-blue.png | Bin 0 -> 4711 bytes .../swervedriveposeestsim/tag-green.png | Bin 0 -> 4698 bytes 10 files changed, 1053 insertions(+), 57 deletions(-) create mode 100644 photonlib-java-examples/swervedriveposeestsim/simgui.json create mode 100644 photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java create mode 100644 photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java create mode 100644 photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java create mode 100644 photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java create mode 100644 photonlib-java-examples/swervedriveposeestsim/swerve_module.png create mode 100644 photonlib-java-examples/swervedriveposeestsim/tag-blue.png create mode 100644 photonlib-java-examples/swervedriveposeestsim/tag-green.png diff --git a/photonlib-java-examples/swervedriveposeestsim/simgui.json b/photonlib-java-examples/swervedriveposeestsim/simgui.json new file mode 100644 index 0000000000..23b949fb97 --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/simgui.json @@ -0,0 +1,103 @@ +{ + "NTProvider": { + "types": { + "/FMSInfo": "FMSInfo", + "/SmartDashboard/VisionSystemSim-main/Sim Field": "Field2d" + }, + "windows": { + "/SmartDashboard/VisionSystemSim-main/Sim Field": { + "EstimatedRobot": { + "arrowWeight": 3.0, + "length": 0.800000011920929, + "selectable": false, + "weight": 3.0, + "width": 0.800000011920929 + }, + "EstimatedRobotModules": { + "arrows": false, + "image": "swerve_module.png", + "length": 0.30000001192092896, + "selectable": false, + "width": 0.30000001192092896 + }, + "Robot": { + "arrowColor": [ + 1.0, + 1.0, + 1.0, + 255.0 + ], + "arrowWeight": 2.0, + "color": [ + 1.0, + 1.0, + 1.0, + 255.0 + ], + "length": 0.800000011920929, + "selectable": false, + "weight": 2.0, + "width": 0.800000011920929 + }, + "VisionEstimation": { + "arrowColor": [ + 0.0, + 0.6075949668884277, + 1.0, + 255.0 + ], + "arrowWeight": 2.0, + "color": [ + 0.0, + 0.6075949668884277, + 1.0, + 255.0 + ], + "selectable": false, + "weight": 2.0 + }, + "apriltag": { + "image": "tag-green.png", + "length": 0.5, + "width": 0.4000000059604645 + }, + "cameras": { + "arrowColor": [ + 0.29535865783691406, + 1.0, + 0.9910804033279419, + 255.0 + ], + "arrowSize": 19, + "arrowWeight": 3.0, + "length": 1.0, + "style": "Hidden", + "weight": 1.0, + "width": 1.0 + }, + "height": 8.013699531555176, + "visibleTargetPoses": { + "arrows": false, + "image": "tag-blue.png", + "length": 0.5, + "selectable": false, + "width": 0.4000000059604645 + }, + "width": 16.541749954223633, + "window": { + "visible": true + } + } + } + }, + "NetworkTables": { + "transitory": { + "SmartDashboard": { + "VisionSystemSim-main": { + "open": true + }, + "open": true + } + } + } +} diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java new file mode 100644 index 0000000000..1d6c42d65f --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java @@ -0,0 +1,139 @@ +package frc.robot; + +import java.io.IOException; + +import edu.wpi.first.apriltag.AprilTagFieldLayout; +import edu.wpi.first.apriltag.AprilTagFields; +import edu.wpi.first.math.Matrix; +import edu.wpi.first.math.VecBuilder; +import edu.wpi.first.math.controller.SimpleMotorFeedforward; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Transform3d; +import edu.wpi.first.math.geometry.Translation2d; +import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.numbers.N1; +import edu.wpi.first.math.numbers.N3; +import edu.wpi.first.math.util.Units; + +public class Constants { + + public static class Vision { + public static final String kCameraName = "YOUR CAMERA NAME"; + // Cam mounted facing forward, half a meter forward of center, half a meter up from center. + public static final Transform3d kRobotToCam = + new Transform3d( + new Translation3d(0.5, 0.0, 0.5), + new Rotation3d(0, 0, 0)); + + // The layout of the AprilTags on the field + public static final AprilTagFieldLayout kTagLayout; + static { + try { + kTagLayout = AprilTagFields.kDefaultField.loadAprilTagLayoutField(); + } catch (IOException e) { + e.printStackTrace(); + throw new RuntimeException(e); + } + } + + // The standard deviations of our vision estimated poses, which affect correction rate + // (Fake values. Experiment and determine estimation noise on an actual robot.) + public static final Matrix kSingleTagStdDevs = VecBuilder.fill(4, 4, 8); + public static final Matrix kMultiTagStdDevs = VecBuilder.fill(0.5, 0.5, 1); + } + + public static class Swerve { + // Physical properties + public static final double kTrackWidth = Units.inchesToMeters(18.5); + public static final double kTrackLength = Units.inchesToMeters(18.5); + public static final double kRobotWidth = Units.inchesToMeters(25 + 3.25*2); + public static final double kRobotLength = Units.inchesToMeters(25 + 3.25*2); + public static final double kMaxLinearSpeed = Units.feetToMeters(15.5); + public static final double kMaxAngularSpeed = Units.rotationsToRadians(2); + public static final double kWheelDiameter = Units.inchesToMeters(4); + public static final double kWheelCircumference = kWheelDiameter*Math.PI; + + public static final double kDriveGearRatio = 6.75; // 6.75:1 SDS MK4 L2 ratio + public static final double kSteerGearRatio = 12.8; // 12.8:1 + + public static final double kDriveDistPerPulse = kWheelCircumference / 1024 / kDriveGearRatio; + public static final double kSteerRadPerPulse = 2 * Math.PI / 1024; + + public enum ModuleConstants { + FL( // Front left + 1, + 0, 0, 1, + 1, 2, 3, 0, + kTrackLength/2, kTrackWidth/2 + ), + FR( // Front Right + 2, + 2, 4, 5, + 3, 6, 7, 0, + kTrackLength/2, -kTrackWidth/2 + ), + BL( // Back Left + 3, + 4, 8, 9, + 5, 10, 11, 0, + -kTrackLength/2, kTrackWidth/2 + ), + BR( // Back Right + 4, + 6, 12, 13, + 7, 14, 15, 0, + -kTrackLength/2, -kTrackWidth/2 + ); + + public final int moduleNum; + public final int driveMotorID; + public final int driveEncoderA; + public final int driveEncoderB; + public final int steerMotorID; + public final int steerEncoderA; + public final int steerEncoderB; + public final double angleOffset; + public final Translation2d centerOffset; + + private ModuleConstants( + int moduleNum, + int driveMotorID, int driveEncoderA, int driveEncoderB, + int steerMotorID, int steerEncoderA, int steerEncoderB, double angleOffset, + double xOffset, double yOffset + ){ + this.moduleNum = moduleNum; + this.driveMotorID = driveMotorID; + this.driveEncoderA = driveEncoderA; + this.driveEncoderB = driveEncoderB; + this.steerMotorID = steerMotorID; + this.steerEncoderA = steerEncoderA; + this.steerEncoderB = steerEncoderB; + this.angleOffset = angleOffset; + centerOffset = new Translation2d(xOffset, yOffset); + } + } + + // Feedforward + // Linear drive feed forward + public static final SimpleMotorFeedforward kDriveFF = new SimpleMotorFeedforward( // real + 0.25, // Voltage to break static friction + 2.5, // Volts per meter per second + 0.3 // Volts per meter per second squared + ); + // Steer feed forward + public static final SimpleMotorFeedforward kSteerFF = new SimpleMotorFeedforward( // real + 0.5, // Voltage to break static friction + 0.25, // Volts per radian per second + 0.01 // Volts per radian per second squared + ); + + // PID + public static final double kDriveKP = 1; + public static final double kDriveKI = 0; + public static final double kDriveKD = 0; + + public static final double kSteerKP = 20; + public static final double kSteerKI = 0; + public static final double kSteerKD = 0.25; + } +} diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java index d84dc95fc9..b98e0fa7b6 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java @@ -4,52 +4,137 @@ package frc.robot; +import java.util.Random; + +import edu.wpi.first.math.filter.SlewRateLimiter; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Transform2d; +import edu.wpi.first.math.geometry.Translation2d; import edu.wpi.first.wpilibj.TimedRobot; -import edu.wpi.first.wpilibj2.command.CommandScheduler; +import edu.wpi.first.wpilibj.Timer; +import edu.wpi.first.wpilibj.XboxController; +import edu.wpi.first.wpilibj.simulation.BatterySim; +import edu.wpi.first.wpilibj.simulation.RoboRioSim; +import frc.robot.subsystems.drivetrain.SwerveDrive; public class Robot extends TimedRobot { + private SwerveDrive drivetrain; + private Vision vision; + + private XboxController controller; + // Limit max speed + private final double kDriveSpeed = 0.6; + // Rudimentary limiting of drivetrain acceleration + private SlewRateLimiter forwardLimiter = new SlewRateLimiter(1.0 / 0.6); // 1 / x seconds to 100% + private SlewRateLimiter strafeLimiter = new SlewRateLimiter(1.0 / 0.6); + private SlewRateLimiter turnLimiter = new SlewRateLimiter(1.0 / 0.33); + + private Timer autoTimer = new Timer(); + private Random rand = new Random(4512); @Override - public void robotInit() {} + public void robotInit() { + drivetrain = new SwerveDrive(); + vision = new Vision(); + + controller = new XboxController(0); + } @Override public void robotPeriodic() { - CommandScheduler.getInstance().run(); + drivetrain.periodic(); + + // Correct pose estimate with vision measurements + var visionEst = vision.getEstimatedGlobalPose(); + visionEst.ifPresent(est -> { + var estPose = est.estimatedPose.toPose2d(); + // Change our trust in the measurement based on the tags we can see + var estStdDevs = vision.getEstimationStdDevs(estPose); + + drivetrain.addVisionMeasurement(est.estimatedPose.toPose2d(), est.timestampSeconds, estStdDevs); + }); + + // Apply a random offset to pose estimator to test vision correction + if(controller.getBButtonPressed()) { + var trf = new Transform2d(new Translation2d(rand.nextDouble(4)-2, rand.nextDouble(4)-2), new Rotation2d(rand.nextDouble(2*Math.PI))); + drivetrain.resetPose(drivetrain.getPose().plus(trf), false); + } + + // Log values to the dashboard + drivetrain.log(); } @Override public void disabledInit() {} @Override - public void disabledPeriodic() {} + public void disabledPeriodic() { + drivetrain.stop(); + } @Override public void disabledExit() {} @Override - public void autonomousInit() {} - - @Override - public void autonomousPeriodic() {} + public void autonomousInit() { + autoTimer.restart(); + var pose = new Pose2d(1, 1, new Rotation2d()); + drivetrain.resetPose(pose, true); + vision.resetSimPose(pose); + } @Override - public void autonomousExit() {} - + public void autonomousPeriodic() { + // translate diagonally while spinning + if(autoTimer.get() < 10) { + drivetrain.drive(0.5, 0.5, 0.5, false); + } + else { + autoTimer.stop(); + drivetrain.stop(); + } + } + @Override public void teleopInit() {} @Override - public void teleopPeriodic() {} - - @Override - public void teleopExit() {} - - @Override - public void testInit() {} - - @Override - public void testPeriodic() {} - + public void teleopPeriodic() { + // We will use an "arcade drive" scheme to turn joystick values into target robot speeds + // We want to get joystick values where pushing forward/left is positive + double forward = -controller.getLeftY() * kDriveSpeed; + if(Math.abs(forward) < 0.1) forward = 0; // deadband small values + forward = forwardLimiter.calculate(forward); // limit acceleration + double strafe = -controller.getLeftX() * kDriveSpeed; + if(Math.abs(strafe) < 0.1) strafe = 0; + strafe = strafeLimiter.calculate(strafe); + double turn = -controller.getRightX() * kDriveSpeed; + if(Math.abs(turn) < 0.1) turn = 0; + turn = turnLimiter.calculate(turn); + + // Convert from joystick values to real target speeds + forward *= Constants.Swerve.kMaxLinearSpeed; + strafe *= Constants.Swerve.kMaxLinearSpeed; + turn *= Constants.Swerve.kMaxLinearSpeed; + + // Command drivetrain motors based on target speeds + drivetrain.drive(forward, strafe, turn, true); + } + @Override - public void testExit() {} + public void simulationPeriodic() { + // Update drivetrain simulation + drivetrain.simulationPeriodic(); + + // Update camera simulation + vision.simulationPeriodic(drivetrain.getSimPose()); + + var debugField = vision.getSimDebugField(); + debugField.getObject("EstimatedRobot").setPose(drivetrain.getPose()); + debugField.getObject("EstimatedRobotModules").setPoses(drivetrain.getModulePoses()); + + // Calculate battery voltage sag due to current draw + RoboRioSim.setVInVoltage(BatterySim.calculateDefaultBatteryLoadedVoltage(drivetrain.getCurrentDraw())); + } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java new file mode 100644 index 0000000000..3bff6fb6d4 --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java @@ -0,0 +1,127 @@ +package frc.robot; + +import org.photonvision.EstimatedRobotPose; +import org.photonvision.PhotonCamera; +import org.photonvision.PhotonPoseEstimator; +import org.photonvision.PhotonPoseEstimator.PoseStrategy; +import org.photonvision.simulation.PhotonCameraSim; +import org.photonvision.simulation.SimCameraProperties; +import org.photonvision.simulation.VisionSystemSim; +import org.photonvision.targeting.PhotonPipelineResult; + +import edu.wpi.first.math.Matrix; +import edu.wpi.first.math.VecBuilder; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.numbers.N1; +import edu.wpi.first.math.numbers.N3; +import edu.wpi.first.wpilibj.smartdashboard.Field2d; + +import static frc.robot.Constants.Vision.*; + +import java.util.Optional; + +public class Vision { + private final PhotonCamera camera; + private final PhotonPoseEstimator photonEstimator; + + // Simulation + private PhotonCameraSim cameraSim; + private VisionSystemSim visionSim; + + public Vision() { + camera = new PhotonCamera(kCameraName); + + photonEstimator = new PhotonPoseEstimator(kTagLayout, PoseStrategy.MULTI_TAG_PNP, camera, kRobotToCam); + photonEstimator.setMultiTagFallbackStrategy(PoseStrategy.LOWEST_AMBIGUITY); + + //----- Simulation + if(Robot.isSimulation()) { + // Create the vision system simulation which handles cameras and targets on the field. + visionSim = new VisionSystemSim("main"); + // Add all the AprilTags inside the tag layout as visible targets to this simulated field. + visionSim.addAprilTags(kTagLayout); + // Create simulated camera properties. These can be set to mimic your actual camera. + var cameraProp = new SimCameraProperties(); + cameraProp.setCalibration(960, 720, Rotation2d.fromDegrees(90)); + cameraProp.setCalibError(0.35, 0.10); + cameraProp.setFPS(15); + cameraProp.setAvgLatencyMs(50); + cameraProp.setLatencyStdDevMs(15); + // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible targets. + cameraSim = new PhotonCameraSim(camera, cameraProp); + // Add the simulated camera to view the targets on this simulated field. + visionSim.addCamera(cameraSim, kRobotToCam); + + cameraSim.enableDrawWireframe(true); + } + } + + public PhotonPipelineResult getLatestResult() { + return camera.getLatestResult(); + } + + /** + * The latest estimated robot pose on the field from vision data. This may be empty. This should + * only be called once per loop. + * + * @return An {@link EstimatedRobotPose} with an estimated pose, estimate timestamp, and targets used for estimation. + */ + public Optional getEstimatedGlobalPose() { + var visionEst = photonEstimator.update(); + if(Robot.isSimulation()) { + visionEst.ifPresentOrElse( + est -> getSimDebugField().getObject("VisionEstimation").setPose(est.estimatedPose.toPose2d()), + () -> getSimDebugField().getObject("VisionEstimation").setPoses() + ); + } + return visionEst; + } + + /** + * The standard deviations of the estimated pose from {@link #getEstimatedGlobalPose()}, for use with + * {@link edu.wpi.first.math.estimator.SwerveDrivePoseEstimator SwerveDrivePoseEstimator}. This should + * only be used when there are targets visible. + * + * @param estimatedPose The estimated pose to guess standard deviations for. + */ + public Matrix getEstimationStdDevs(Pose2d estimatedPose) { + var estStdDevs = kSingleTagStdDevs; + var targets = getLatestResult().getTargets(); + int numTags = 0; + double avgDist = 0; + for(var tgt : targets) { + var tagPose = photonEstimator.getFieldTags().getTagPose(tgt.getFiducialId()); + if(tagPose.isEmpty()) continue; + numTags++; + avgDist += tagPose.get().toPose2d().getTranslation().getDistance(estimatedPose.getTranslation()); + } + if(numTags == 0) return estStdDevs; + avgDist /= numTags; + // Decrease std devs if multiple targets are visible + if(numTags > 1) estStdDevs = kMultiTagStdDevs; + // Increase std devs based on (average) distance + if(numTags == 1 && avgDist > 4) estStdDevs = VecBuilder.fill(Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE); + else estStdDevs = estStdDevs.times(1 + (avgDist * avgDist / 30)); + + return estStdDevs; + } + + + //----- Simulation + + public void simulationPeriodic(Pose2d robotSimPose) { + visionSim.update(robotSimPose); + } + + /** Reset pose history of the robot in the vision system simulation. */ + public void resetSimPose(Pose2d pose) { + if(Robot.isSimulation()) visionSim.resetRobotPose(pose); + } + + /** A Field2d for visualizing our robot and objects on the field. */ + public Field2d getSimDebugField() { + if(!Robot.isSimulation()) return null; + return visionSim.getDebugField(); + } +} diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java new file mode 100644 index 0000000000..b0c13d6bbe --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java @@ -0,0 +1,300 @@ +package frc.robot.subsystems.drivetrain; + +import static frc.robot.Constants.Swerve.*; + +import edu.wpi.first.math.Matrix; +import edu.wpi.first.math.VecBuilder; +import edu.wpi.first.math.estimator.SwerveDrivePoseEstimator; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Transform2d; +import edu.wpi.first.math.kinematics.ChassisSpeeds; +import edu.wpi.first.math.kinematics.SwerveDriveKinematics; +import edu.wpi.first.math.kinematics.SwerveModulePosition; +import edu.wpi.first.math.kinematics.SwerveModuleState; +import edu.wpi.first.math.numbers.*; +import edu.wpi.first.math.system.plant.DCMotor; +import edu.wpi.first.wpilibj.ADXRS450_Gyro; +import edu.wpi.first.wpilibj.SPI.Port; +import edu.wpi.first.wpilibj.simulation.ADXRS450_GyroSim; +import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; +import frc.robot.Robot; + +public class SwerveDrive { + // Construct the swerve modules with their respective constants. + // The SwerveModule class will handle all the details of controlling the modules. + private final SwerveModule[] swerveMods = { + new SwerveModule(ModuleConstants.FL), + new SwerveModule(ModuleConstants.FR), + new SwerveModule(ModuleConstants.BL), + new SwerveModule(ModuleConstants.BR) + }; + + // The kinematics for translating between robot chassis speeds and module states. + private final SwerveDriveKinematics kinematics = new SwerveDriveKinematics( + swerveMods[0].getModuleConstants().centerOffset, + swerveMods[1].getModuleConstants().centerOffset, + swerveMods[2].getModuleConstants().centerOffset, + swerveMods[3].getModuleConstants().centerOffset + ); + + private final ADXRS450_Gyro gyro = new ADXRS450_Gyro(Port.kOnboardCS0); + + // The robot pose estimator for tracking swerve odometry and applying vision corrections. + private final SwerveDrivePoseEstimator poseEstimator; + + private ChassisSpeeds targetChassisSpeeds = new ChassisSpeeds(); + + //----- Simulation + private final ADXRS450_GyroSim gyroSim; + private final SwerveDriveSim swerveDriveSim; + private double totalCurrentDraw = 0; + + public SwerveDrive() { + // Define the standard deviations for the pose estimator, which determine how fast the pose + // estimate converges to the vision measurement. This should depend on the vision measurement noise + // and how many or how frequently vision measurements are applied to the pose estimator. + var stateStdDevs = VecBuilder.fill(0.1, 0.1, 0.1); + var visionStdDevs = VecBuilder.fill(1, 1, 1); + poseEstimator = new SwerveDrivePoseEstimator( + kinematics, + getGyroYaw(), + getModulePositions(), + new Pose2d(), + stateStdDevs, + visionStdDevs + ); + + //----- Simulation + gyroSim = new ADXRS450_GyroSim(gyro); + swerveDriveSim = new SwerveDriveSim( + kDriveFF, DCMotor.getFalcon500(1), kDriveGearRatio, kWheelDiameter/2.0, + kSteerFF, DCMotor.getFalcon500(1), kSteerGearRatio, + kinematics); + } + + public void periodic() { + for(SwerveModule module : swerveMods) { + module.periodic(); + } + + // Update the odometry of the swerve drive using the wheel encoders and gyro. + poseEstimator.update(getGyroYaw(), getModulePositions()); + } + + /** + * Basic drive control. A target field-relative ChassisSpeeds (vx, vy, omega) is converted to + * specific swerve module states. + * + * @param vxMeters X velocity (forwards/backwards) + * @param vyMeters Y velocity (strafe left/right) + * @param omegaRadians Angular velocity (rotation CCW+) + * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback control. + */ + public void drive(double vxMeters, double vyMeters, double omegaRadians, boolean openLoop){ + var targetChassisSpeeds = ChassisSpeeds.fromFieldRelativeSpeeds(vxMeters, vyMeters, omegaRadians, getHeading()); + setChassisSpeeds(targetChassisSpeeds, openLoop, false); + } + + /** + * Command the swerve drive to the desired chassis speeds by converting them to swerve module states and + * using {@link #setModuleStates(SwerveModuleState[], boolean)}. + * + * @param targetChassisSpeeds Target robot-relative chassis speeds (vx, vy, omega). + * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback control. + * @param steerInPlace If modules should steer to the target angle when target velocity is 0. + */ + public void setChassisSpeeds(ChassisSpeeds targetChassisSpeeds, boolean openLoop, boolean steerInPlace){ + setModuleStates(kinematics.toSwerveModuleStates(targetChassisSpeeds), openLoop, steerInPlace); + this.targetChassisSpeeds = targetChassisSpeeds; + } + + /** + * Command the swerve modules to the desired states. Velocites exceeding the maximum speed will be + * desaturated (while preserving the ratios between modules). + * + * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback control. + * @param steerInPlace If modules should steer to the target angle when target velocity is 0. + */ + public void setModuleStates(SwerveModuleState[] desiredStates, boolean openLoop, boolean steerInPlace){ + SwerveDriveKinematics.desaturateWheelSpeeds(desiredStates, kMaxLinearSpeed); + for(int i = 0; i < swerveMods.length; i++) { + swerveMods[i].setDesiredState(desiredStates[i], openLoop, steerInPlace); + } + } + + /** Stop the swerve drive. */ + public void stop(){ + drive(0, 0, 0, true); + } + + /** See {@link SwerveDrivePoseEstimator#addVisionMeasurement(Pose2d, double)}. */ + public void addVisionMeasurement(Pose2d visionMeasurement, double timestampSeconds){ + poseEstimator.addVisionMeasurement( + visionMeasurement, + timestampSeconds + ); + } + + /** See {@link SwerveDrivePoseEstimator#addVisionMeasurement(Pose2d, double, Matrix)}. */ + public void addVisionMeasurement(Pose2d visionMeasurement, double timestampSeconds, Matrix stdDevs){ + poseEstimator.addVisionMeasurement( + visionMeasurement, + timestampSeconds, + stdDevs + ); + } + + /** + * Reset the estimated pose of the swerve drive on the field. + * + * @param pose New robot pose. + * @param resetSimPose If the simulated robot pose should also be reset. This effectively teleports the + * robot and should only be used during the setup of the simulation world. + */ + public void resetPose(Pose2d pose, boolean resetSimPose){ + if(resetSimPose) { + swerveDriveSim.reset(pose, false); + // we shouldnt realistically be resetting pose after startup, but we will handle it anyway for testing + for(int i = 0; i < swerveMods.length; i++) { + swerveMods[i].simulationUpdate(0, 0, 0, 0, 0, 0); + } + gyroSim.setAngle(-pose.getRotation().getDegrees()); + gyroSim.setRate(0); + } + + poseEstimator.resetPosition(getGyroYaw(), getModulePositions(), pose); + } + + /** Get the estimated pose of the swerve drive on the field. */ + public Pose2d getPose() { + return poseEstimator.getEstimatedPosition(); + } + + /** The heading of the swerve drive's estimated pose on the field. */ + public Rotation2d getHeading(){ + return getPose().getRotation(); + } + + /** Raw gyro yaw (this may not match the field heading!). */ + public Rotation2d getGyroYaw(){ + return gyro.getRotation2d(); + } + + /** Get the chassis speeds of the robot (vx, vy, omega) from the swerve module states. */ + public ChassisSpeeds getChassisSpeeds(){ + return kinematics.toChassisSpeeds(getModuleStates()); + } + + /** + * Get the SwerveModuleState of each swerve module (speed, angle). + * The returned array order matches the kinematics module order. + */ + public SwerveModuleState[] getModuleStates(){ + return new SwerveModuleState[]{ + swerveMods[0].getState(), + swerveMods[1].getState(), + swerveMods[2].getState(), + swerveMods[3].getState() + }; + } + + /** + * Get the SwerveModulePosition of each swerve module (position, angle). + * The returned array order matches the kinematics module order. + */ + public SwerveModulePosition[] getModulePositions(){ + return new SwerveModulePosition[]{ + swerveMods[0].getPosition(), + swerveMods[1].getPosition(), + swerveMods[2].getPosition(), + swerveMods[3].getPosition() + }; + } + + /** + * Get the Pose2d of each swerve module based on kinematics and current robot pose. + * The returned array order matches the kinematics module order. + */ + public Pose2d[] getModulePoses(){ + Pose2d[] modulePoses = new Pose2d[swerveMods.length]; + for(int i = 0; i < swerveMods.length; i++){ + var module = swerveMods[i]; + modulePoses[i] = getPose().transformBy( + new Transform2d(module.getModuleConstants().centerOffset, module.getAbsoluteHeading()) + ); + } + return modulePoses; + } + + /** Log various drivetrain values to the dashboard. */ + public void log(){ + String table = "Drive/"; + Pose2d pose = getPose(); + SmartDashboard.putNumber(table+"X", pose.getX()); + SmartDashboard.putNumber(table+"Y", pose.getY()); + SmartDashboard.putNumber(table+"Heading", pose.getRotation().getDegrees()); + ChassisSpeeds chassisSpeeds = getChassisSpeeds(); + SmartDashboard.putNumber(table+"VX", chassisSpeeds.vxMetersPerSecond); + SmartDashboard.putNumber(table+"VY", chassisSpeeds.vyMetersPerSecond); + SmartDashboard.putNumber(table+"Omega Degrees", Math.toDegrees(chassisSpeeds.omegaRadiansPerSecond)); + SmartDashboard.putNumber(table+"Target VX", targetChassisSpeeds.vxMetersPerSecond); + SmartDashboard.putNumber(table+"Target VY", targetChassisSpeeds.vyMetersPerSecond); + SmartDashboard.putNumber(table+"Target Omega Degrees", Math.toDegrees(targetChassisSpeeds.omegaRadiansPerSecond)); + + for(SwerveModule module : swerveMods) { + module.log(); + } + } + + + //----- Simulation + + public void simulationPeriodic(){ + // Pass commanded motor voltages into swerve drive simulation + double[] driveInputs = new double[swerveMods.length]; + double[] steerInputs = new double[swerveMods.length]; + for(int i = 0; i < swerveMods.length; i++) { + driveInputs[i] = swerveMods[i].getDriveVoltage(); + steerInputs[i] = swerveMods[i].getSteerVoltage(); + } + swerveDriveSim.setDriveInputs(driveInputs); + swerveDriveSim.setSteerInputs(steerInputs); + + // Simulate one timestep + swerveDriveSim.update(Robot.kDefaultPeriod); + + // Update module and gyro values with simulated values + var driveStates = swerveDriveSim.getDriveStates(); + var steerStates = swerveDriveSim.getSteerStates(); + totalCurrentDraw = 0; + double[] driveCurrents = swerveDriveSim.getDriveCurrentDraw(); + for(double current : driveCurrents) totalCurrentDraw += current; + double[] steerCurrents = swerveDriveSim.getSteerCurrentDraw(); + for(double current : steerCurrents) totalCurrentDraw += current; + for(int i = 0; i < swerveMods.length; i++) { + double drivePos = driveStates.get(i).get(0, 0); + double driveRate = driveStates.get(i).get(1, 0); + double steerPos = steerStates.get(i).get(0, 0); + double steerRate = steerStates.get(i).get(1, 0); + swerveMods[i].simulationUpdate( + drivePos, driveRate, driveCurrents[i], + steerPos, steerRate, steerCurrents[i]); + } + + gyroSim.setRate(-swerveDriveSim.getOmegaRadsPerSec()); + gyroSim.setAngle(-swerveDriveSim.getPose().getRotation().getDegrees()); + } + + /** + * The "actual" pose of the robot on the field used in simulation. + * This is different from the swerve drive's estimated pose. + */ + public Pose2d getSimPose() { + return swerveDriveSim.getPose(); + } + + public double getCurrentDraw() { + return totalCurrentDraw; + } +} diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java index d45dd47493..efc30e683c 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java @@ -22,16 +22,27 @@ import edu.wpi.first.math.system.plant.DCMotor; import edu.wpi.first.wpilibj.RobotController; +/** + * This class attempts to simulate the dynamics of a swerve drive. In simulationPeriodic, users should first set + * inputs from motors with {@link #setDriveInputs(double...)} and {@link #setSteerInputs(double...)}, + * call {@link #update(double)} to update the simulation, and then set swerve module's encoder values and + * the drivetrain's gyro values with the results from this class. + * + *

In this class, distances are expressed with meters, angles with radians, and inputs with voltages. + * + *

Teams can use {@link edu.wpi.first.wpilibj.smartdashboard.Field2d} to visualize + * their robot on the Sim GUI's field. + */ public class SwerveDriveSim { - private final SimpleMotorFeedforward driveFF; private final LinearSystem drivePlant; - private final DCMotor driveGearbox; + private final double driveKs; + private final DCMotor driveMotor; private final double driveGearing; private final double driveWheelRadius; - private final SimpleMotorFeedforward steerFF; private final LinearSystem steerPlant; - private final DCMotor steerGearbox; + private final double steerKs; + private final DCMotor steerMotor; private final double steerGearing; private final SwerveDriveKinematics kinematics; @@ -49,36 +60,80 @@ public class SwerveDriveSim { private double omegaRadsPerSec = 0; /** + * Creates a swerve drive simulation. * * @param driveFF The feedforward for the drive motors of this swerve drive. This should be in units of meters. - * @param driveGearbox - * @param driveGearing - * @param driveWheelRadius + * @param driveMotor The DCMotor model for the drive motor(s) of this swerve drive's modules. + * This should not have any gearing applied. + * @param driveGearing The gear ratio of the drive system. Positive values indicate a reduction where one + * rotation of the drive wheel equals driveGearing rotations of the drive motor. + * @param driveWheelRadius The radius of the module's driving wheel. * @param steerFF The feedforward for the steer motors of this swerve drive. This should be in units of radians. - * @param steerGearbox - * @param steerGearing - * @param kinematics + * @param steerMotor The DCMotor model for the steer motor(s) of this swerve drive's modules. + * This should not have any gearing applied. + * @param steerGearing The gear ratio of the steer system. Positive values indicate a reduction where one + * rotation of the module heading/azimuth equals steerGearing rotations of the steer motor. + * @param kinematics The kinematics for this swerve drive. All swerve module information used in + * this class should match the order of the modules this kinematics object was constructed with. */ public SwerveDriveSim( - SimpleMotorFeedforward driveFF, DCMotor driveGearbox, double driveGearing, double driveWheelRadius, - SimpleMotorFeedforward steerFF, DCMotor steerGearbox, double steerGearing, + SimpleMotorFeedforward driveFF, DCMotor driveMotor, double driveGearing, double driveWheelRadius, + SimpleMotorFeedforward steerFF, DCMotor steerMotor, double steerGearing, SwerveDriveKinematics kinematics) { - this.driveFF = driveFF; - drivePlant = new LinearSystem( - Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -driveFF.kv / driveFF.ka), - VecBuilder.fill(0.0, 1.0 / driveFF.ka), - Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), - VecBuilder.fill(0.0, 0.0)); - this.driveGearbox = driveGearbox; + this( + new LinearSystem( + Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -driveFF.kv / driveFF.ka), + VecBuilder.fill(0.0, 1.0 / driveFF.ka), + Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), + VecBuilder.fill(0.0, 0.0) + ), + driveFF.ks, driveMotor, driveGearing, driveWheelRadius, + new LinearSystem( + Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -steerFF.kv / steerFF.ka), + VecBuilder.fill(0.0, 1.0 / steerFF.ka), + Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), + VecBuilder.fill(0.0, 0.0) + ), + steerFF.ks, steerMotor, steerGearing, + kinematics + ); + } + + /** + * Creates a swerve drive simulation. + * + * @param drivePlant The {@link LinearSystem} representing a swerve module's drive system. + * The state should be in units of meters and input in volts. + * @param driveKs The static gain in volts of the drive system's feedforward, or the minimum voltage + * to cause motion. Set this to 0 to ignore static friction. + * @param driveMotor The DCMotor model for the drive motor(s) of this swerve drive's modules. + * This should not have any gearing applied. + * @param driveGearing The gear ratio of the drive system. Positive values indicate a reduction where one + * rotation of the drive wheel equals driveGearing rotations of the drive motor. + * @param driveWheelRadius The radius of the module's driving wheel. + * @param steerPlant The {@link LinearSystem} representing a swerve module's steer system. + * The state should be in units of radians and input in volts. + * @param steerKs The static gain in volts of the steer system's feedforward, or the minimum voltage + * to cause motion. Set this to 0 to ignore static friction. + * @param steerMotor The DCMotor model for the steer motor(s) of this swerve drive's modules. + * This should not have any gearing applied. + * @param steerGearing The gear ratio of the steer system. Positive values indicate a reduction where one + * rotation of the module heading/azimuth equals steerGearing rotations of the steer motor. + * @param kinematics The kinematics for this swerve drive. All swerve module information used in + * this class should match the order of the modules this kinematics object was constructed with. + */ + public SwerveDriveSim( + LinearSystem drivePlant, double driveKs, DCMotor driveMotor, double driveGearing, double driveWheelRadius, + LinearSystem steerPlant, double steerKs, DCMotor steerMotor, double steerGearing, + SwerveDriveKinematics kinematics) { + this.drivePlant = drivePlant; + this.driveKs = driveKs; + this.driveMotor = driveMotor; this.driveGearing = driveGearing; this.driveWheelRadius = driveWheelRadius; - this.steerFF = steerFF; - steerPlant = new LinearSystem( - Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -driveFF.kv / driveFF.ka), - VecBuilder.fill(0.0, 1.0 / driveFF.ka), - Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), - VecBuilder.fill(0.0, 0.0)); - this.steerGearbox = steerGearbox; + this.steerPlant = steerPlant; + this.steerKs = steerKs; + this.steerMotor = steerMotor; this.steerGearing = steerGearing; this.kinematics = kinematics; @@ -88,8 +143,8 @@ public SwerveDriveSim( steerInputs = new double[numModules]; steerStates = new ArrayList<>(numModules); for(int i = 0; i < numModules; i++) { - driveStates.set(i, VecBuilder.fill(0, 0)); - steerStates.set(i, VecBuilder.fill(0, 0)); + driveStates.add(VecBuilder.fill(0, 0)); + steerStates.add(VecBuilder.fill(0, 0)); } } @@ -170,9 +225,9 @@ public void update(double dtSeconds) { var moduleDeltas = new SwerveModulePosition[numModules]; for(int i = 0; i < numModules; i++) { double prevDriveStatePos = driveStates.get(i).get(0, 0); - driveStates.set(i, calculateX(driveDiscAB.getFirst(), driveDiscAB.getSecond(), driveStates.get(i), driveInputs[i], driveFF.ks)); + driveStates.set(i, calculateX(driveDiscAB.getFirst(), driveDiscAB.getSecond(), driveStates.get(i), driveInputs[i], driveKs)); double currDriveStatePos = driveStates.get(i).get(0, 0); - steerStates.set(i, calculateX(steerDiscAB.getFirst(), steerDiscAB.getSecond(), steerStates.get(i), steerInputs[i], steerFF.ks)); + steerStates.set(i, calculateX(steerDiscAB.getFirst(), steerDiscAB.getSecond(), steerStates.get(i), steerInputs[i], steerKs)); double currSteerStatePos = steerStates.get(i).get(0, 0); moduleDeltas[i] = new SwerveModulePosition(currDriveStatePos - prevDriveStatePos, new Rotation2d(currSteerStatePos)); } @@ -275,8 +330,32 @@ public SwerveModuleState[] getModuleStates() { } /** - * Get the angular velocity of the robot, which can be useful for gyro simulation. This should be called - * after {@link #update(double)}. + * Get the state of each module's drive system in [meters, meters/sec]. The returned list order matches the kinematics module order. + * This should be called after {@link #update(double)}. + */ + public List> getDriveStates() { + List> states = new ArrayList<>(); + for(int i = 0; i < driveStates.size(); i++) { + states.add(driveStates.get(i).copy()); + } + return states; + } + + /** + * Get the state of each module's steer system in [radians, radians/sec]. The returned list order matches the kinematics module order. + * This should be called after {@link #update(double)}. + */ + public List> getSteerStates() { + List> states = new ArrayList<>(); + for(int i = 0; i < steerStates.size(); i++) { + states.add(steerStates.get(i).copy()); + } + return states; + } + + /** + * Get the angular velocity of the robot, which can be useful for gyro simulation. CCW positive. + * This should be called after {@link #update(double)}. */ public double getOmegaRadsPerSec() { return omegaRadsPerSec; @@ -290,7 +369,7 @@ public double getOmegaRadsPerSec() { * @param inputVolts The voltage commanded by the motor controller (battery voltage * duty cycle). * @param battVolts The voltage of the battery. */ - private static double getCurrentDraw(DCMotor motor, double radiansPerSec, double inputVolts, double battVolts) { + protected static double getCurrentDraw(DCMotor motor, double radiansPerSec, double inputVolts, double battVolts) { double effVolts = inputVolts - radiansPerSec / motor.KvRadPerSecPerVolt; // ignore regeneration if(inputVolts >= 0) effVolts = MathUtil.clamp(effVolts, 0, inputVolts); @@ -307,7 +386,7 @@ public double[] getDriveCurrentDraw() { double[] currents = new double[numModules]; for(int i = 0; i < numModules; i++) { double radiansPerSec = driveStates.get(i).get(1, 0) * driveGearing / driveWheelRadius; - currents[i] = getCurrentDraw(driveGearbox, radiansPerSec, driveInputs[i], RobotController.getBatteryVoltage()); + currents[i] = getCurrentDraw(driveMotor, radiansPerSec, driveInputs[i], RobotController.getBatteryVoltage()); } return currents; } @@ -320,7 +399,7 @@ public double[] getSteerCurrentDraw() { double[] currents = new double[numModules]; for(int i = 0; i < numModules; i++) { double radiansPerSec = steerStates.get(i).get(1, 0) * steerGearing; - currents[i] = getCurrentDraw(steerGearbox, radiansPerSec, steerInputs[i], RobotController.getBatteryVoltage()); + currents[i] = getCurrentDraw(steerMotor, radiansPerSec, steerInputs[i], RobotController.getBatteryVoltage()); } return currents; } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java new file mode 100644 index 0000000000..31345af184 --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java @@ -0,0 +1,163 @@ +package frc.robot.subsystems.drivetrain; + +import static frc.robot.Constants.Swerve.*; + +import edu.wpi.first.math.MathUtil; +import edu.wpi.first.math.controller.PIDController; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.kinematics.SwerveModulePosition; +import edu.wpi.first.math.kinematics.SwerveModuleState; +import edu.wpi.first.math.util.Units; +import edu.wpi.first.wpilibj.Encoder; +import edu.wpi.first.wpilibj.RobotController; +import edu.wpi.first.wpilibj.motorcontrol.PWMSparkMax; +import edu.wpi.first.wpilibj.simulation.EncoderSim; +import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; + +public class SwerveModule { + + //--- Module Constants + private final ModuleConstants moduleConstants; + + //--- Hardware + private final PWMSparkMax driveMotor; + private final Encoder driveEncoder; + private final PWMSparkMax steerMotor; + private final Encoder steerEncoder; + + //--- Control + private SwerveModuleState desiredState = new SwerveModuleState(); + private boolean openLoop = false; + + // Simple PID feedback controllers run on the roborio + private PIDController drivePidController = new PIDController(kDriveKP, kDriveKI, kDriveKD); + // (A profiled steering PID controller may give better results by utilizing feedforward.) + private PIDController steerPidController = new PIDController(kSteerKP, kSteerKI, kSteerKD); + + //--- Simulation + private final EncoderSim driveEncoderSim; + private double driveCurrentSim = 0; + private final EncoderSim steerEncoderSim; + private double steerCurrentSim = 0; + + public SwerveModule(ModuleConstants moduleConstants){ + this.moduleConstants = moduleConstants; + + driveMotor = new PWMSparkMax(moduleConstants.driveMotorID); + driveEncoder = new Encoder(moduleConstants.driveEncoderA, moduleConstants.driveEncoderB); + driveEncoder.setDistancePerPulse(kDriveDistPerPulse); + steerMotor = new PWMSparkMax(moduleConstants.steerMotorID); + steerEncoder = new Encoder(moduleConstants.steerEncoderA, moduleConstants.steerEncoderB); + steerEncoder.setDistancePerPulse(kSteerRadPerPulse); + + steerPidController.enableContinuousInput(-Math.PI, Math.PI); + + //--- Simulation + driveEncoderSim = new EncoderSim(driveEncoder); + steerEncoderSim = new EncoderSim(steerEncoder); + } + + public void periodic(){ + // Perform PID feedback control to steer the module to the target angle + double steerPid = steerPidController.calculate(getAbsoluteHeading().getRadians(), desiredState.angle.getRadians()); + steerMotor.setVoltage(steerPid); + + // Use feedforward model to translate target drive velocities into voltages + double driveFF = kDriveFF.calculate(desiredState.speedMetersPerSecond); + double drivePid = 0; + if(!openLoop){ + // Perform PID feedback control to compensate for disturbances + drivePid = drivePidController.calculate(driveEncoder.getRate(), desiredState.speedMetersPerSecond); + } + + driveMotor.setVoltage(driveFF + drivePid); + } + + /** + * Command this swerve module to the desired angle and velocity. + * @param steerInPlace If modules should steer to target angle when target velocity is 0 + */ + public void setDesiredState(SwerveModuleState desiredState, boolean openLoop, boolean steerInPlace){ + Rotation2d currentRotation = getAbsoluteHeading(); + // Avoid turning more than 90 degrees by inverting speed on large angle changes + desiredState = SwerveModuleState.optimize(desiredState, currentRotation); + + this.desiredState = desiredState; + } + + /** Module heading reported by steering encoder. */ + public Rotation2d getAbsoluteHeading(){ + return Rotation2d.fromRadians(steerEncoder.getDistance()); + } + + /** + * {@link SwerveModuleState} describing absolute module rotation and velocity in meters per second. + */ + public SwerveModuleState getState(){ + return new SwerveModuleState( + driveEncoder.getRate(), + getAbsoluteHeading() + ); + } + + /** + * {@link SwerveModulePosition} describing absolute module rotation and position in meters. + */ + public SwerveModulePosition getPosition() { + return new SwerveModulePosition( + driveEncoder.getDistance(), + getAbsoluteHeading() + ); + } + + /** Voltage of the drive motor */ + public double getDriveVoltage() { + return driveMotor.get() * RobotController.getBatteryVoltage(); + } + + /** Voltage of the steer motor */ + public double getSteerVoltage() { + return steerMotor.get() * RobotController.getBatteryVoltage(); + } + + /** + * Constants about this module, like motor IDs, encoder angle offset, and translation from center. + */ + public ModuleConstants getModuleConstants(){ + return moduleConstants; + } + + public void log(){ + var state = getState(); + + String table = "Module "+moduleConstants.moduleNum+"/"; + SmartDashboard.putNumber(table+"Steer Degrees", Math.toDegrees(MathUtil.angleModulus(state.angle.getRadians()))); + SmartDashboard.putNumber(table+"Steer Target Degrees", Math.toDegrees(steerPidController.getSetpoint())); + SmartDashboard.putNumber(table+"Drive Velocity Feet", Units.metersToFeet(state.speedMetersPerSecond)); + SmartDashboard.putNumber(table+"Drive Velocity Target Feet", Units.metersToFeet(desiredState.speedMetersPerSecond)); + SmartDashboard.putNumber(table+"Drive Current", driveCurrentSim); + SmartDashboard.putNumber(table+"Steer Current", steerCurrentSim); + } + + + //----- Simulation + + public void simulationUpdate( + double driveEncoderDist, double driveEncoderRate, double driveCurrent, + double steerEncoderDist, double steerEncoderRate, double steerCurrent) { + driveEncoderSim.setDistance(driveEncoderDist); + driveEncoderSim.setRate(driveEncoderRate); + this.driveCurrentSim = driveCurrent; + steerEncoderSim.setDistance(steerEncoderDist); + steerEncoderSim.setRate(steerEncoderRate); + this.steerCurrentSim = steerCurrent; + } + + public double getDriveCurrentSim() { + return driveCurrentSim; + } + + public double getSteerCurrentSim() { + return steerCurrentSim; + } +} \ No newline at end of file diff --git a/photonlib-java-examples/swervedriveposeestsim/swerve_module.png b/photonlib-java-examples/swervedriveposeestsim/swerve_module.png new file mode 100644 index 0000000000000000000000000000000000000000..25990c8399b37db4290551cf5b746d1b63108f8a GIT binary patch literal 2954 zcmd6p{Xf&|AIGoHHn!aB)S}Y1bA&p%>4uTmMoM);s1TuY8W_zn6QXs42#)=`~;1rCWD8q*!PxsLKU0@H?Y5@EYiwelObj0E2NW4!F4B+0)|)KJ|06ZmNpI-z#n;xRvR zqWzDSmskxwJ)p-%`D}!74j)?#?IN6*=WGAo(oW;-g>hgGD|^P3 zs`SBIPOW2J6=xTWgHR%1XqvVQoWI+xq`d!<_vG95z4yK0&!Obq&hwj`1wXdjKP3B z4_0z8tohEnou*{?3ncZ~n5U@%sY^CBw$U_0!A_CtQvC4xkm|Y(z#*~RDH!kM3Jj9U zUC*G_X#$U=QRhDU>_lL`;BK>RkoXZ|EV?_d!Dah_z@1xz1e09A#ig*m~F$k)s4&IKFaLNj?W7@?27S zY_L~=lc-1cwub#zG!~tdR_yUS-b(GM`C>yJx+WGpKkgl}!U`)<(u6VbUdpGU5SI}d zEN6Dt!OROMI@H;slBFp}!h@#q(>DgsKt5c00NY_;3pH# ztRoEcetiBj9$uqJ{Yn?-qLM$$BD7wNq&J6Hf*5hQXV1&R-jtdsaB-Q%P3e9Ux7#ER z*~yfoVH|dkbu^dhQ0p;9_;zcb_iD`p>UjlcF(&Rv*@;s}8`bm6;3mRW^YS5N#t@Xh z;+xZ+hEwMQeVl2=nGOd&{cEJM0zSd{2 z!fr)@Kkd^nqndSQCpLsfBRTW*rOXcso*y7LQOmB<(Ka{K>Gc3jsV>}!rftXq9u&8s zx?9aJbf_;dJB)+pZWwV&)tV>NC!Ulmo3r56iz&7H4No^{JUxeqJ;y@^O$7hws+o0& zIRT<7q~q*A5uV1u`~f4*fPB-HhB9UIU%`bfdoa;iMB3Z?UDv{(5D6}EYzS3206h`X z=j}3!`1RCldqD-^`?o?PPMLh)l6i%)c^_mh#t|yW&_PT2R+I_oLq9`OuxHzYV`oteF zLT(ZguEArdUS&XAw!kSj(KIm((J8Tw6J$cHy_){XF>*ocG*TdMwteSjIClUBqU%)wpU`vr!O~w#n91&0s1$A@ zwe?%JqUr$GkWdMIuieZ8CSj9G4=$LpFqvLp?7dhQR}r6?Ci3Htaj$sX7sE7q&d(@< z$q_Wzt_K)rVEe?(HPfNN>7^xKjdn1&cX4~j5Z?+$`^P}y4jk!}gjf^`=@ zN$6vchGDX|3hj|yrf`O$MFz}bRqmE#SZMF!AgD%COJ#5q1b&7SOCFC&!}WnD(xyeR zm(n<+)~p3m`;PYuVGKVN>a*!!X=@`eQ6wglYo)#?kv?*FZ{jA@xl56V6Vyq41ArEs zfsOBzR!=q!@vA>SeXrM!pDGxw<8H<7zPQXZ6}2^iPk7ikjDbKpNUQOH=7;@0stLsP0al&%!f2eK#O@>I~TnpB|zuxrH2~b-Ua-w5waCdfgm6>|dTtNRT zOX`s9p7dWCnF<(&*2LWll}fvhrE1tk zPq1TlAYK}pM+Vj#z&3OK5T*YGr18W(`93AA2gp!s_>&wp85?d9R|uks;O?irjR;SJH1_@5>TTBIR$WMA8qT@o@oWa)RY$qJlj*Cgy7jqC?^xAOEJh_I?y@k6 zU>e^$K-hC0AaFovcueFg04N)fRO8B{+rh9XC_m;K`*p~X=1?B;YEL?bC|v70;{QQ3 z_jDbs)Zhyc-oo3lpYw8{` z!8E)g>rUNR<|@MdEvKB@Cq%eAa$0-aO@ymob!8qRU*0LI*H`3vLUjSMgYdPgHeUPD z+S$Qa^$HIW?v$Ku=rk=2<%LLa7{A^D$^h78oyFtO*(t7NkzQwB8}O9tHCm3e)e`Cy9P- zbPV1maqh&HEO>oO z(7|)lxPIu%us(P^PwKp?h|yO4fzRg=g4o9wPa&EiGGgg&VtveZC>%!0R!^mDfx;C@ z*+!`pZ73Xyr|rp~eV`Aj%6`o+?oQe#d*e0Gf-Gh#a6k{M(pEBFp%3)TR8j=0V)BZA zk{5OwtuQPpYzn1Vdv9BgLs9>pP~%4YYkUOks8mmht=g*bSjpO8Sz!(J_YkB_h6=Cn o+ow(@pSLe?B8BF7pj(6~s>?;ov@y`RA$JBR&MHa@if05Kai!2kdN literal 0 HcmV?d00001 diff --git a/photonlib-java-examples/swervedriveposeestsim/tag-blue.png b/photonlib-java-examples/swervedriveposeestsim/tag-blue.png new file mode 100644 index 0000000000000000000000000000000000000000..04b9e4f7b7241c94a87249c24fb09c2577b2cf44 GIT binary patch literal 4711 zcmeHL`BPI@6uy8KL`Weqf;O_r;;<=N1zgHf6rvI=C?--RfQSo_M3w-8rcOr_vEWE? z83-NQ0%HxENFvITK^Bq4b{w#p7?~hg1VIc1WRaemK>H80{V6|W&U^2E_nhzC`_6aX zjbIPW-`H@aAwtNQ8sHaxE-pV>SB!(F;Vy4iC{BwiC+C(?cnn+J4 z=~|}*Iqum`u{c(=U9f^de>ycFnNIy>ugiMoaC09SnGruQoMENu zA`=@WLTCLb$omps4;c`Fr;iN*Sp_uephW@?#IrKhM<%vfkah{QZ-KTDv|8~0%c0wZ zmm)UYQXJGh7@ww_-{02TJC;?Qdgiqx<3`cZ>9Kcz$}G=V#>(qYO{c0RC*Ms?d+CeL zQ7I^KhR6<+J+L0(_1!!-dyXoaC<%~x%bK5$jZ*Cs1|x|2$k?*z^;f5I zst3L;gceZl9V%*bP$K4y@uHKF@!vpuaL&ILW)HbLRhT6_SrQE`BD8sL{0bNmT_T_O zfv?9SlM)WXV|KVJ?j2s&A0_cUP>)%t$QYh_JiL&k05JReoARyDR(d2N`fGo{J+|-f z{}QZ&EE9sCAaRg%T}ZXe6l^BFLU9h7Qm1eG!K>IFU)x_qa`geUS%f2(0dDV_J9W2V zY?Gi`h39q+HOUhj)&SA3g*WAGIFh}H9=-$SxJNf_#Xb9I0-u{j*^pxO?YB-)<$| zOld<$22kL+*%0W0>gIgDUNhf=8Qq^8-YJCc9dk{V;hE#Xyhx>#M<&pL0rMhX@9r^R zi!UYzmQVD_1$rpuMwtBS27zAlwMHFEVhQ<{+7It1*V@P`+G2Ejb^2n>|zmaC#*!d)S zjuT4gB_TTK$ndWhx@-yNQAB+<>Hb2*yxYL$KmAI%HqB*uy$l@Dp{e>tIcfUR+}PeY zO<6gqHv#Rhv}A3uA(*dLqS8nbn>#%!?xj(X)F`f)6|`7>g-Ti1Y)j~|A(YTQg*{WJ z|M)w}D$VSegxI*LWq7ILUvVcCS#Fv|H`(c%YPS-dzBl$}%tRB@T`(VA`sm<^d4Du^$0=5e z2F%N??ppv$cecAMM0Z+uzG@&u&7`9{f9|)4_StOrGK;#?D&hSft7$LC)o)Q=*#!5r z%Ps<|y>M6Zpj2@DzXlg8oV=sG?shBf2ud!APp=-`5jBUh;o zi$u*tWgM=&%>Cg)3-~1EH63`Q8FKEt+^ZS7J(i-@1)HJ3mqj_zQ10H(S3<(L9*>o1 zeCKa$k!3-}t{*RAJ!wtDVaL1cHgEI8Rc)szum{%+BC7=tgksDRJe|2*L<_>y@<^;= zEF&FsVU}7>7{WRzDO9?nMwpGQZ;cR3k?V2R)FvUDfF;P_){{2SOesSk88iX=j4v5f z%hy|=NpcN@$PYv`7S7Ir#FBul@^bi+R1B@enULbCFyzk=cIJTR0$WA@F1VkINZNb` z+)uW?xr6ml=>n=MeI$^mMYzg6BagNQqLi->EpD>{rM7F7-G)r)I-}yPFl~7(CRn}< zUp{=3_yN@G^BK==*+kYL$9>!UyWJwMsaMJB_CSA0Pn(>2n?2CGRy-k58 zhlpkmzZW0+cmV4{$5pCa`C=}mG9&%>nvcuk2m3M!ph+q_C6g?Ig4G@(v`e5pN@@?v k+Jo}Hd{91lsCH^;si4Yp)^TGd{5gcE+i8BKzRa_K0jU-?_5c6? literal 0 HcmV?d00001 diff --git a/photonlib-java-examples/swervedriveposeestsim/tag-green.png b/photonlib-java-examples/swervedriveposeestsim/tag-green.png new file mode 100644 index 0000000000000000000000000000000000000000..63029fcf205d2a1b52eb450aa1e26fbdfd310586 GIT binary patch literal 4698 zcmeHLYg3a~5I*5jKx+ge)e0&gii(5a6%bGmxikcdU<|@&07sk_2r+;_t4$FlTolv+ zLJ^(TkqjkV6#{Ze#3)q>+DXCEM4%CpMhuogNfFWR2{8QwZ9nBhX5Vx6d3T@Pb9T>p zlk<77|Dpve79fNc1qS%;N63JQkm1LsAHj@=-}q$^1lE3kA5_z0t%iY7oHxZAq56V_ z@1l)iY?c%d!9s}GiGKvk^`Eg3vUnBf>m8nUSR?biL^`;P9jzprnN`{yT`j)3bji&n zq+iLl$5s<7?d+31ioFEuqqoH*`|qAf-5pZ&g1Y~Lf+`R3G$R_q-!ONX&=hZlhZum`}dj;CRSR;k<)Syudg#^ zMg|S3ckPMD{j_OwwMHcwjV7fD%et%czNsd}1}T(}>=Q3GecN=tQT_aSkg^X$R}6$a zzCO8*B{d_jRUrDed|F9}m>}}eP)5XZ12`6Ft}cQgI47tvo0S!$hj29T^DE_BIH0k0 z?DK*%!*QMRHcSt7MBy`|V{6f%C;HG8Z{OLLcVn3zc7j zv3Vqx$^m3me_o3^D-?(}yN9)?NZ^S=tM|g55+CNKp&jj7fBlSOuC*eV2GAzy$5qjT+iMYgD&@99f?WkOReAidxIABHq-y7ttAA-E!fn zbglYLiJC;NJAXbQEoimdb%}Q5xpMsbzsfC`qU9DQn*v;vzShouV#31k?~b;TOpx=2 zXC+&64DWMcYNyt+ieq@6<4RwvT*0yab6tK4qIGzJ?ZZp&Ur>Sb4YexS98@^ao|;zMBbpWans#s)jyrR{7tah0eJ zE|%^>HAk%&WnI}JwZb-YU93slr^lNh$vBZ4cMwhrRx@^u!R@L0N$T54nkbQ0vS=A4lJO@&t4C_q4k_yyE z`*B4!o$FQG;z~XArOviZoCL28V&}K3WyuYPLB(xv)FlE6JV(%ltF%}wV`4=S#&qJ> zxyDseDNq|yie+ltSGH1l&#_FQ@*eB@iUmW3%~qK+RL~&VdB2r)P#mtDff$@RU%b}! zBC0~;j^Ej#&}4x8Gal2-BIDNLTqV^-_dqPu;4WydNLYm-_Bu!;Zit~&-UQsb+5S0# zDLfA02dkh3qrr&sRA9EJ%@|&!xsCS$h<*cS&}=O*r`~^&{3)2?XQf4c0)5BPW}q0h zjwn+@8LS```@o|?M7h&7jv@QpZk{Ssk>CkvqxduNR>!4X*EoVVko70yzYA?02Kq)5 zGYqaMC#tg5D_Ij`WrLGvTR*%#Hk~F+pV=Vqd$epN^cE#qd}kw#XQ2OB(0?z%_SAne l>QkWK1^;Rx47a6?FD6iOngjQS4#WRJC~#k}Z;cN<^KVFxHsJsO literal 0 HcmV?d00001 From d0578f5068fd626ca223d368022deca6f1a4f59c Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 18 Sep 2023 20:02:57 -0700 Subject: [PATCH 28/46] Update build.gradle --- .../swervedriveposeestsim/build.gradle | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/photonlib-java-examples/swervedriveposeestsim/build.gradle b/photonlib-java-examples/swervedriveposeestsim/build.gradle index 5ad363b78a..7610d2a2ac 100644 --- a/photonlib-java-examples/swervedriveposeestsim/build.gradle +++ b/photonlib-java-examples/swervedriveposeestsim/build.gradle @@ -6,11 +6,9 @@ plugins { sourceCompatibility = JavaVersion.VERSION_11 targetCompatibility = JavaVersion.VERSION_11 -def ROBOT_MAIN_CLASS = "frc.robot.Main" +apply from: "${rootDir}/../shared/examples_common.gradle" -repositories { - mavenLocal() -} +def ROBOT_MAIN_CLASS = "frc.robot.Main" // Define my targets (RoboRIO) and artifacts (deployable files) // This is added by GradleRIO's backing project DeployUtils. @@ -21,7 +19,7 @@ deploy { // or from command line. If not found an exception will be thrown. // You can use getTeamOrDefault(team) instead of getTeamNumber if you // want to store a team number in this file. - team = project.frc.getTeamNumber() + team = project.frc.getTeamOrDefault(4512) debug = project.frc.getDebugOrDefault(false) artifacts { @@ -69,14 +67,7 @@ dependencies { nativeRelease wpi.java.vendor.jniRelease(wpi.platforms.desktop) simulationRelease wpi.sim.enableRelease() - testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.2' - testImplementation 'org.junit.jupiter:junit-jupiter-params:5.8.2' - testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.2' -} - -test { - useJUnitPlatform() - systemProperty 'junit.jupiter.extensions.autodetection.enabled', 'true' + testImplementation 'junit:junit:4.13.1' } // Simulation configuration (e.g. environment variables). From 675bacaac944ab673c18b3c38a55f13e7ef5893f Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 18 Sep 2023 20:03:09 -0700 Subject: [PATCH 29/46] dont use java17 random --- .../swervedriveposeestsim/src/main/java/frc/robot/Robot.java | 2 +- .../java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java index b98e0fa7b6..93be201a02 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java @@ -57,7 +57,7 @@ public void robotPeriodic() { // Apply a random offset to pose estimator to test vision correction if(controller.getBButtonPressed()) { - var trf = new Transform2d(new Translation2d(rand.nextDouble(4)-2, rand.nextDouble(4)-2), new Rotation2d(rand.nextDouble(2*Math.PI))); + var trf = new Transform2d(new Translation2d(rand.nextDouble() * 4 - 2, rand.nextDouble() * 4 - 2), new Rotation2d(rand.nextDouble() * 2 * Math.PI)); drivetrain.resetPose(drivetrain.getPose().plus(trf), false); } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java index efc30e683c..607b4a04fb 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java @@ -310,8 +310,8 @@ public SwerveModulePosition[] getNoisyModulePositions(double driveStdDev, double var positions = new SwerveModulePosition[numModules]; for(int i = 0; i < numModules; i++) { positions[i] = new SwerveModulePosition( - rand.nextGaussian(driveStates.get(i).get(0, 0), driveStdDev), - new Rotation2d(rand.nextGaussian(steerStates.get(i).get(0, 0), steerStdDev)) + driveStates.get(i).get(0, 0) + rand.nextGaussian() * driveStdDev, + new Rotation2d(steerStates.get(i).get(0, 0) + rand.nextGaussian() * steerStdDev) ); } return positions; From a3a3cbf70a304401dfbf5c246b33d9c89566dadc Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 18 Sep 2023 20:13:52 -0700 Subject: [PATCH 30/46] remove apriltagExample and simposeest --- .../apriltagExample/.gitignore | 162 ------------ .../.wpilib/wpilib_preferences.json | 6 - .../apriltagExample/WPILib-License.md | 24 -- .../apriltagExample/build.gradle | 94 ------- .../gradle/wrapper/gradle-wrapper.jar | Bin 60756 -> 0 bytes .../gradle/wrapper/gradle-wrapper.properties | 5 - .../apriltagExample/gradlew | 240 ------------------ .../apriltagExample/gradlew.bat | 91 ------- .../apriltagExample/settings.gradle | 27 -- .../apriltagExample/simgui-ds.json | 104 -------- .../apriltagExample/simgui.json | 63 ----- .../src/main/deploy/example.txt | 3 - .../src/main/java/frc/robot/Constants.java | 57 ----- .../src/main/java/frc/robot/Drivetrain.java | 219 ---------------- .../src/main/java/frc/robot/Main.java | 45 ---- .../java/frc/robot/PhotonCameraWrapper.java | 76 ------ .../src/main/java/frc/robot/Robot.java | 101 -------- photonlib-java-examples/simposeest/.gitignore | 162 ------------ .../.wpilib/wpilib_preferences.json | 6 - .../simposeest/WPILib-License.md | 24 -- .../simposeest/build.gradle | 94 ------- .../simposeest/settings.gradle | 29 --- .../simposeest/simgui-ds.json | 102 -------- .../simposeest/simgui.json | 57 ----- .../simposeest/src/main/deploy/example.txt | 3 - .../main/java/frc/robot/AutoController.java | 110 -------- .../src/main/java/frc/robot/Constants.java | 110 -------- .../src/main/java/frc/robot/Drivetrain.java | 139 ---------- .../frc/robot/DrivetrainPoseEstimator.java | 109 -------- .../main/java/frc/robot/DrivetrainSim.java | 172 ------------- .../src/main/java/frc/robot/Main.java | 45 ---- .../java/frc/robot/OperatorInterface.java | 57 ----- .../main/java/frc/robot/PoseTelemetry.java | 61 ----- .../src/main/java/frc/robot/Robot.java | 90 ------- 34 files changed, 2687 deletions(-) delete mode 100644 photonlib-java-examples/apriltagExample/.gitignore delete mode 100644 photonlib-java-examples/apriltagExample/.wpilib/wpilib_preferences.json delete mode 100644 photonlib-java-examples/apriltagExample/WPILib-License.md delete mode 100644 photonlib-java-examples/apriltagExample/build.gradle delete mode 100644 photonlib-java-examples/apriltagExample/gradle/wrapper/gradle-wrapper.jar delete mode 100644 photonlib-java-examples/apriltagExample/gradle/wrapper/gradle-wrapper.properties delete mode 100644 photonlib-java-examples/apriltagExample/gradlew delete mode 100644 photonlib-java-examples/apriltagExample/gradlew.bat delete mode 100644 photonlib-java-examples/apriltagExample/settings.gradle delete mode 100644 photonlib-java-examples/apriltagExample/simgui-ds.json delete mode 100644 photonlib-java-examples/apriltagExample/simgui.json delete mode 100644 photonlib-java-examples/apriltagExample/src/main/deploy/example.txt delete mode 100644 photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Constants.java delete mode 100644 photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Drivetrain.java delete mode 100644 photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Main.java delete mode 100644 photonlib-java-examples/apriltagExample/src/main/java/frc/robot/PhotonCameraWrapper.java delete mode 100644 photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Robot.java delete mode 100644 photonlib-java-examples/simposeest/.gitignore delete mode 100644 photonlib-java-examples/simposeest/.wpilib/wpilib_preferences.json delete mode 100644 photonlib-java-examples/simposeest/WPILib-License.md delete mode 100644 photonlib-java-examples/simposeest/build.gradle delete mode 100644 photonlib-java-examples/simposeest/settings.gradle delete mode 100644 photonlib-java-examples/simposeest/simgui-ds.json delete mode 100644 photonlib-java-examples/simposeest/simgui.json delete mode 100644 photonlib-java-examples/simposeest/src/main/deploy/example.txt delete mode 100644 photonlib-java-examples/simposeest/src/main/java/frc/robot/AutoController.java delete mode 100644 photonlib-java-examples/simposeest/src/main/java/frc/robot/Constants.java delete mode 100644 photonlib-java-examples/simposeest/src/main/java/frc/robot/Drivetrain.java delete mode 100644 photonlib-java-examples/simposeest/src/main/java/frc/robot/DrivetrainPoseEstimator.java delete mode 100644 photonlib-java-examples/simposeest/src/main/java/frc/robot/DrivetrainSim.java delete mode 100644 photonlib-java-examples/simposeest/src/main/java/frc/robot/Main.java delete mode 100644 photonlib-java-examples/simposeest/src/main/java/frc/robot/OperatorInterface.java delete mode 100644 photonlib-java-examples/simposeest/src/main/java/frc/robot/PoseTelemetry.java delete mode 100644 photonlib-java-examples/simposeest/src/main/java/frc/robot/Robot.java diff --git a/photonlib-java-examples/apriltagExample/.gitignore b/photonlib-java-examples/apriltagExample/.gitignore deleted file mode 100644 index 9535c83037..0000000000 --- a/photonlib-java-examples/apriltagExample/.gitignore +++ /dev/null @@ -1,162 +0,0 @@ -# This gitignore has been specially created by the WPILib team. -# If you remove items from this file, intellisense might break. - -### C++ ### -# Prerequisites -*.d - -# Compiled Object files -*.slo -*.lo -*.o -*.obj - -# Precompiled Headers -*.gch -*.pch - -# Compiled Dynamic libraries -*.so -*.dylib -*.dll - -# Fortran module files -*.mod -*.smod - -# Compiled Static libraries -*.lai -*.la -*.a -*.lib - -# Executables -*.exe -*.out -*.app - -### Java ### -# Compiled class file -*.class - -# Log file -*.log - -# BlueJ files -*.ctxt - -# Mobile Tools for Java (J2ME) -.mtj.tmp/ - -# Package Files # -*.jar -*.war -*.nar -*.ear -*.zip -*.tar.gz -*.rar - -# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml -hs_err_pid* - -### Linux ### -*~ - -# temporary files which can be created if a process still has a handle open of a deleted file -.fuse_hidden* - -# KDE directory preferences -.directory - -# Linux trash folder which might appear on any partition or disk -.Trash-* - -# .nfs files are created when an open file is removed but is still being accessed -.nfs* - -### macOS ### -# General -.DS_Store -.AppleDouble -.LSOverride - -# Icon must end with two \r -Icon - -# Thumbnails -._* - -# Files that might appear in the root of a volume -.DocumentRevisions-V100 -.fseventsd -.Spotlight-V100 -.TemporaryItems -.Trashes -.VolumeIcon.icns -.com.apple.timemachine.donotpresent - -# Directories potentially created on remote AFP share -.AppleDB -.AppleDesktop -Network Trash Folder -Temporary Items -.apdisk - -### VisualStudioCode ### -.vscode/* -!.vscode/settings.json -!.vscode/tasks.json -!.vscode/launch.json -!.vscode/extensions.json - -### Windows ### -# Windows thumbnail cache files -Thumbs.db -ehthumbs.db -ehthumbs_vista.db - -# Dump file -*.stackdump - -# Folder config file -[Dd]esktop.ini - -# Recycle Bin used on file shares -$RECYCLE.BIN/ - -# Windows Installer files -*.cab -*.msi -*.msix -*.msm -*.msp - -# Windows shortcuts -*.lnk - -### Gradle ### -.gradle -/build/ - -# Ignore Gradle GUI config -gradle-app.setting - -# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) -!gradle-wrapper.jar - -# Cache of project -.gradletasknamecache - -# # Work around https://youtrack.jetbrains.com/issue/IDEA-116898 -# gradle/wrapper/gradle-wrapper.properties - -# # VS Code Specific Java Settings -# DO NOT REMOVE .classpath and .project -.classpath -.project -.settings/ -bin/ - -# Simulation GUI and other tools window save file -*-window.json diff --git a/photonlib-java-examples/apriltagExample/.wpilib/wpilib_preferences.json b/photonlib-java-examples/apriltagExample/.wpilib/wpilib_preferences.json deleted file mode 100644 index 66deed1a92..0000000000 --- a/photonlib-java-examples/apriltagExample/.wpilib/wpilib_preferences.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "enableCppIntellisense": false, - "currentLanguage": "java", - "projectYear": "2023Beta", - "teamNumber": 1736 -} diff --git a/photonlib-java-examples/apriltagExample/WPILib-License.md b/photonlib-java-examples/apriltagExample/WPILib-License.md deleted file mode 100644 index 3d5a824cad..0000000000 --- a/photonlib-java-examples/apriltagExample/WPILib-License.md +++ /dev/null @@ -1,24 +0,0 @@ -Copyright (c) 2009-2021 FIRST and other WPILib contributors -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of FIRST, WPILib, nor the names of other WPILib - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY FIRST AND OTHER WPILIB CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY NONINFRINGEMENT AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL FIRST OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/photonlib-java-examples/apriltagExample/build.gradle b/photonlib-java-examples/apriltagExample/build.gradle deleted file mode 100644 index fa1a30798c..0000000000 --- a/photonlib-java-examples/apriltagExample/build.gradle +++ /dev/null @@ -1,94 +0,0 @@ -plugins { - id "java" - id "edu.wpi.first.GradleRIO" version "2023.4.2" -} - -sourceCompatibility = JavaVersion.VERSION_11 -targetCompatibility = JavaVersion.VERSION_11 - -apply from: "${rootDir}/../shared/examples_common.gradle" - -def ROBOT_MAIN_CLASS = "frc.robot.Main" - -// Define my targets (RoboRIO) and artifacts (deployable files) -// This is added by GradleRIO's backing project DeployUtils. -deploy { - targets { - roborio(getTargetTypeClass('RoboRIO')) { - // Team number is loaded either from the .wpilib/wpilib_preferences.json - // or from command line. If not found an exception will be thrown. - // You can use getTeamOrDefault(team) instead of getTeamNumber if you - // want to store a team number in this file. - team = project.frc.getTeamOrDefault(5940) - debug = project.frc.getDebugOrDefault(false) - - artifacts { - // First part is artifact name, 2nd is artifact type - // getTargetTypeClass is a shortcut to get the class type using a string - - frcJava(getArtifactTypeClass('FRCJavaArtifact')) { - } - - // Static files artifact - frcStaticFileDeploy(getArtifactTypeClass('FileTreeArtifact')) { - files = project.fileTree('src/main/deploy') - directory = '/home/lvuser/deploy' - } - } - } - } -} - -def deployArtifact = deploy.targets.roborio.artifacts.frcJava - -// Set to true to use debug for JNI. -wpi.java.debugJni = false - -// Set this to true to enable desktop support. -def includeDesktopSupport = true - -// Defining my dependencies. In this case, WPILib (+ friends), and vendor libraries. -// Also defines JUnit 4. -dependencies { - implementation wpi.java.deps.wpilib() - implementation wpi.java.vendor.java() - - roborioDebug wpi.java.deps.wpilibJniDebug(wpi.platforms.roborio) - roborioDebug wpi.java.vendor.jniDebug(wpi.platforms.roborio) - - roborioRelease wpi.java.deps.wpilibJniRelease(wpi.platforms.roborio) - roborioRelease wpi.java.vendor.jniRelease(wpi.platforms.roborio) - - nativeDebug wpi.java.deps.wpilibJniDebug(wpi.platforms.desktop) - nativeDebug wpi.java.vendor.jniDebug(wpi.platforms.desktop) - simulationDebug wpi.sim.enableDebug() - - nativeRelease wpi.java.deps.wpilibJniRelease(wpi.platforms.desktop) - nativeRelease wpi.java.vendor.jniRelease(wpi.platforms.desktop) - simulationRelease wpi.sim.enableRelease() - - testImplementation 'junit:junit:4.13.1' -} - -// Simulation configuration (e.g. environment variables). -wpi.sim.addGui().defaultEnabled = true -wpi.sim.addDriverstation() - -// Setting up my Jar File. In this case, adding all libraries into the main jar ('fat jar') -// in order to make them all available at runtime. Also adding the manifest so WPILib -// knows where to look for our Robot Class. -jar { - from { configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) } } - manifest edu.wpi.first.gradlerio.GradleRIOPlugin.javaManifest(ROBOT_MAIN_CLASS) - duplicatesStrategy = DuplicatesStrategy.INCLUDE -} - -// Configure jar and deploy tasks -deployArtifact.jarTask = jar -wpi.java.configureExecutableTasks(jar) -wpi.java.configureTestTasks(test) - -// Configure string concat to always inline compile -tasks.withType(JavaCompile) { - options.compilerArgs.add '-XDstringConcat=inline' -} diff --git a/photonlib-java-examples/apriltagExample/gradle/wrapper/gradle-wrapper.jar b/photonlib-java-examples/apriltagExample/gradle/wrapper/gradle-wrapper.jar deleted file mode 100644 index 249e5832f090a2944b7473328c07c9755baa3196..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60756 zcmb5WV{~QRw(p$^Dz@00IL3?^hro$gg*4VI_WAaTyVM5Foj~O|-84 z$;06hMwt*rV;^8iB z1~&0XWpYJmG?Ts^K9PC62H*`G}xom%S%yq|xvG~FIfP=9*f zZoDRJBm*Y0aId=qJ?7dyb)6)JGWGwe)MHeNSzhi)Ko6J<-m@v=a%NsP537lHe0R* z`If4$aaBA#S=w!2z&m>{lpTy^Lm^mg*3?M&7HFv}7K6x*cukLIGX;bQG|QWdn{%_6 zHnwBKr84#B7Z+AnBXa16a?or^R?+>$4`}{*a_>IhbjvyTtWkHw)|ay)ahWUd-qq$~ zMbh6roVsj;_qnC-R{G+Cy6bApVOinSU-;(DxUEl!i2)1EeQ9`hrfqj(nKI7?Z>Xur zoJz-a`PxkYit1HEbv|jy%~DO^13J-ut986EEG=66S}D3!L}Efp;Bez~7tNq{QsUMm zh9~(HYg1pA*=37C0}n4g&bFbQ+?-h-W}onYeE{q;cIy%eZK9wZjSwGvT+&Cgv z?~{9p(;bY_1+k|wkt_|N!@J~aoY@|U_RGoWX<;p{Nu*D*&_phw`8jYkMNpRTWx1H* z>J-Mi_!`M468#5Aix$$u1M@rJEIOc?k^QBc?T(#=n&*5eS#u*Y)?L8Ha$9wRWdH^3D4|Ps)Y?m0q~SiKiSfEkJ!=^`lJ(%W3o|CZ zSrZL-Xxc{OrmsQD&s~zPfNJOpSZUl%V8tdG%ei}lQkM+z@-4etFPR>GOH9+Y_F<3=~SXln9Kb-o~f>2a6Xz@AS3cn^;c_>lUwlK(n>z?A>NbC z`Ud8^aQy>wy=$)w;JZzA)_*Y$Z5hU=KAG&htLw1Uh00yE!|Nu{EZkch zY9O6x7Y??>!7pUNME*d!=R#s)ghr|R#41l!c?~=3CS8&zr6*aA7n9*)*PWBV2w+&I zpW1-9fr3j{VTcls1>ua}F*bbju_Xq%^v;-W~paSqlf zolj*dt`BBjHI)H9{zrkBo=B%>8}4jeBO~kWqO!~Thi!I1H(in=n^fS%nuL=X2+s!p}HfTU#NBGiwEBF^^tKU zbhhv+0dE-sbK$>J#t-J!B$TMgN@Wh5wTtK2BG}4BGfsZOoRUS#G8Cxv|6EI*n&Xxq zt{&OxCC+BNqz$9b0WM7_PyBJEVObHFh%%`~!@MNZlo*oXDCwDcFwT~Rls!aApL<)^ zbBftGKKBRhB!{?fX@l2_y~%ygNFfF(XJzHh#?`WlSL{1lKT*gJM zs>bd^H9NCxqxn(IOky5k-wALFowQr(gw%|`0991u#9jXQh?4l|l>pd6a&rx|v=fPJ z1mutj{YzpJ_gsClbWFk(G}bSlFi-6@mwoQh-XeD*j@~huW4(8ub%^I|azA)h2t#yG z7e_V_<4jlM3D(I+qX}yEtqj)cpzN*oCdYHa!nm%0t^wHm)EmFP*|FMw!tb@&`G-u~ zK)=Sf6z+BiTAI}}i{*_Ac$ffr*Wrv$F7_0gJkjx;@)XjYSh`RjAgrCck`x!zP>Ifu z&%he4P|S)H*(9oB4uvH67^0}I-_ye_!w)u3v2+EY>eD3#8QR24<;7?*hj8k~rS)~7 zSXs5ww)T(0eHSp$hEIBnW|Iun<_i`}VE0Nc$|-R}wlSIs5pV{g_Dar(Zz<4X3`W?K z6&CAIl4U(Qk-tTcK{|zYF6QG5ArrEB!;5s?tW7 zrE3hcFY&k)+)e{+YOJ0X2uDE_hd2{|m_dC}kgEKqiE9Q^A-+>2UonB+L@v3$9?AYw zVQv?X*pK;X4Ovc6Ev5Gbg{{Eu*7{N3#0@9oMI~}KnObQE#Y{&3mM4`w%wN+xrKYgD zB-ay0Q}m{QI;iY`s1Z^NqIkjrTlf`B)B#MajZ#9u41oRBC1oM1vq0i|F59> z#StM@bHt|#`2)cpl_rWB($DNJ3Lap}QM-+A$3pe}NyP(@+i1>o^fe-oxX#Bt`mcQc zb?pD4W%#ep|3%CHAYnr*^M6Czg>~L4?l16H1OozM{P*en298b+`i4$|w$|4AHbzqB zHpYUsHZET$Z0ztC;U+0*+amF!@PI%^oUIZy{`L{%O^i{Xk}X0&nl)n~tVEpcAJSJ} zverw15zP1P-O8h9nd!&hj$zuwjg?DoxYIw{jWM zW5_pj+wFy8Tsa9g<7Qa21WaV&;ejoYflRKcz?#fSH_)@*QVlN2l4(QNk| z4aPnv&mrS&0|6NHq05XQw$J^RR9T{3SOcMKCXIR1iSf+xJ0E_Wv?jEc*I#ZPzyJN2 zUG0UOXHl+PikM*&g$U@g+KbG-RY>uaIl&DEtw_Q=FYq?etc!;hEC_}UX{eyh%dw2V zTTSlap&5>PY{6I#(6`j-9`D&I#|YPP8a;(sOzgeKDWsLa!i-$frD>zr-oid!Hf&yS z!i^cr&7tN}OOGmX2)`8k?Tn!!4=tz~3hCTq_9CdiV!NIblUDxHh(FJ$zs)B2(t5@u z-`^RA1ShrLCkg0)OhfoM;4Z{&oZmAec$qV@ zGQ(7(!CBk<5;Ar%DLJ0p0!ResC#U<+3i<|vib1?{5gCebG7$F7URKZXuX-2WgF>YJ^i zMhHDBsh9PDU8dlZ$yJKtc6JA#y!y$57%sE>4Nt+wF1lfNIWyA`=hF=9Gj%sRwi@vd z%2eVV3y&dvAgyuJ=eNJR+*080dbO_t@BFJO<@&#yqTK&+xc|FRR;p;KVk@J3$S{p` zGaMj6isho#%m)?pOG^G0mzOAw0z?!AEMsv=0T>WWcE>??WS=fII$t$(^PDPMU(P>o z_*0s^W#|x)%tx8jIgZY~A2yG;US0m2ZOQt6yJqW@XNY_>_R7(Nxb8Ged6BdYW6{prd!|zuX$@Q2o6Ona8zzYC1u!+2!Y$Jc9a;wy+pXt}o6~Bu1oF1c zp7Y|SBTNi@=I(K%A60PMjM#sfH$y*c{xUgeSpi#HB`?|`!Tb&-qJ3;vxS!TIzuTZs-&%#bAkAyw9m4PJgvey zM5?up*b}eDEY+#@tKec)-c(#QF0P?MRlD1+7%Yk*jW;)`f;0a-ZJ6CQA?E%>i2Dt7T9?s|9ZF|KP4;CNWvaVKZ+Qeut;Jith_y{v*Ny6Co6!8MZx;Wgo z=qAi%&S;8J{iyD&>3CLCQdTX*$+Rx1AwA*D_J^0>suTgBMBb=*hefV+Ars#mmr+YsI3#!F@Xc1t4F-gB@6aoyT+5O(qMz*zG<9Qq*f0w^V!03rpr*-WLH}; zfM{xSPJeu6D(%8HU%0GEa%waFHE$G?FH^kMS-&I3)ycx|iv{T6Wx}9$$D&6{%1N_8 z_CLw)_9+O4&u94##vI9b-HHm_95m)fa??q07`DniVjAy`t7;)4NpeyAY(aAk(+T_O z1om+b5K2g_B&b2DCTK<>SE$Ode1DopAi)xaJjU>**AJK3hZrnhEQ9E`2=|HHe<^tv z63e(bn#fMWuz>4erc47}!J>U58%<&N<6AOAewyzNTqi7hJc|X{782&cM zHZYclNbBwU6673=!ClmxMfkC$(CykGR@10F!zN1Se83LR&a~$Ht&>~43OX22mt7tcZUpa;9@q}KDX3O&Ugp6< zLZLfIMO5;pTee1vNyVC$FGxzK2f>0Z-6hM82zKg44nWo|n}$Zk6&;5ry3`(JFEX$q zK&KivAe${e^5ZGc3a9hOt|!UOE&OocpVryE$Y4sPcs4rJ>>Kbi2_subQ9($2VN(3o zb~tEzMsHaBmBtaHAyES+d3A(qURgiskSSwUc9CfJ@99&MKp2sooSYZu+-0t0+L*!I zYagjOlPgx|lep9tiU%ts&McF6b0VE57%E0Ho%2oi?=Ks+5%aj#au^OBwNwhec zta6QAeQI^V!dF1C)>RHAmB`HnxyqWx?td@4sd15zPd*Fc9hpDXP23kbBenBxGeD$k z;%0VBQEJ-C)&dTAw_yW@k0u?IUk*NrkJ)(XEeI z9Y>6Vel>#s_v@=@0<{4A{pl=9cQ&Iah0iD0H`q)7NeCIRz8zx;! z^OO;1+IqoQNak&pV`qKW+K0^Hqp!~gSohcyS)?^P`JNZXw@gc6{A3OLZ?@1Uc^I2v z+X!^R*HCm3{7JPq{8*Tn>5;B|X7n4QQ0Bs79uTU%nbqOJh`nX(BVj!#f;#J+WZxx4 z_yM&1Y`2XzhfqkIMO7tB3raJKQS+H5F%o83bM+hxbQ zeeJm=Dvix$2j|b4?mDacb67v-1^lTp${z=jc1=j~QD>7c*@+1?py>%Kj%Ejp7Y-!? z8iYRUlGVrQPandAaxFfks53@2EC#0)%mrnmGRn&>=$H$S8q|kE_iWko4`^vCS2aWg z#!`RHUGyOt*k?bBYu3*j3u0gB#v(3tsije zgIuNNWNtrOkx@Pzs;A9un+2LX!zw+p3_NX^Sh09HZAf>m8l@O*rXy_82aWT$Q>iyy zqO7Of)D=wcSn!0+467&!Hl))eff=$aneB?R!YykdKW@k^_uR!+Q1tR)+IJb`-6=jj zymzA>Sv4>Z&g&WWu#|~GcP7qP&m*w-S$)7Xr;(duqCTe7p8H3k5>Y-n8438+%^9~K z3r^LIT_K{i7DgEJjIocw_6d0!<;wKT`X;&vv+&msmhAAnIe!OTdybPctzcEzBy88_ zWO{6i4YT%e4^WQZB)KHCvA(0tS zHu_Bg+6Ko%a9~$EjRB90`P(2~6uI@SFibxct{H#o&y40MdiXblu@VFXbhz>Nko;7R z70Ntmm-FePqhb%9gL+7U8@(ch|JfH5Fm)5${8|`Lef>LttM_iww6LW2X61ldBmG0z zax3y)njFe>j*T{i0s8D4=L>X^j0)({R5lMGVS#7(2C9@AxL&C-lZQx~czI7Iv+{%1 z2hEG>RzX4S8x3v#9sgGAnPzptM)g&LB}@%E>fy0vGSa(&q0ch|=ncKjNrK z`jA~jObJhrJ^ri|-)J^HUyeZXz~XkBp$VhcTEcTdc#a2EUOGVX?@mYx#Vy*!qO$Jv zQ4rgOJ~M*o-_Wptam=~krnmG*p^j!JAqoQ%+YsDFW7Cc9M%YPiBOrVcD^RY>m9Pd< zu}#9M?K{+;UIO!D9qOpq9yxUquQRmQNMo0pT`@$pVt=rMvyX)ph(-CCJLvUJy71DI zBk7oc7)-%ngdj~s@76Yse3L^gV0 z2==qfp&Q~L(+%RHP0n}+xH#k(hPRx(!AdBM$JCfJ5*C=K3ts>P?@@SZ_+{U2qFZb>4kZ{Go37{# zSQc+-dq*a-Vy4?taS&{Ht|MLRiS)Sn14JOONyXqPNnpq&2y~)6wEG0oNy>qvod$FF z`9o&?&6uZjhZ4_*5qWVrEfu(>_n2Xi2{@Gz9MZ8!YmjYvIMasE9yVQL10NBrTCczq zcTY1q^PF2l!Eraguf{+PtHV3=2A?Cu&NN&a8V(y;q(^_mFc6)%Yfn&X&~Pq zU1?qCj^LF(EQB1F`8NxNjyV%fde}dEa(Hx=r7$~ts2dzDwyi6ByBAIx$NllB4%K=O z$AHz1<2bTUb>(MCVPpK(E9wlLElo(aSd(Os)^Raum`d(g9Vd_+Bf&V;l=@mM=cC>) z)9b0enb)u_7V!!E_bl>u5nf&Rl|2r=2F3rHMdb7y9E}}F82^$Rf+P8%dKnOeKh1vs zhH^P*4Ydr^$)$h@4KVzxrHyy#cKmWEa9P5DJ|- zG;!Qi35Tp7XNj60=$!S6U#!(${6hyh7d4q=pF{`0t|N^|L^d8pD{O9@tF~W;#Je*P z&ah%W!KOIN;SyAEhAeTafJ4uEL`(RtnovM+cb(O#>xQnk?dzAjG^~4$dFn^<@-Na3 z395;wBnS{t*H;Jef2eE!2}u5Ns{AHj>WYZDgQJt8v%x?9{MXqJsGP|l%OiZqQ1aB! z%E=*Ig`(!tHh>}4_z5IMpg{49UvD*Pp9!pxt_gdAW%sIf3k6CTycOT1McPl=_#0?8 zVjz8Hj*Vy9c5-krd-{BQ{6Xy|P$6LJvMuX$* zA+@I_66_ET5l2&gk9n4$1M3LN8(yEViRx&mtd#LD}AqEs?RW=xKC(OCWH;~>(X6h!uDxXIPH06xh z*`F4cVlbDP`A)-fzf>MuScYsmq&1LUMGaQ3bRm6i7OsJ|%uhTDT zlvZA1M}nz*SalJWNT|`dBm1$xlaA>CCiQ zK`xD-RuEn>-`Z?M{1%@wewf#8?F|(@1e0+T4>nmlSRrNK5f)BJ2H*$q(H>zGD0>eL zQ!tl_Wk)k*e6v^m*{~A;@6+JGeWU-q9>?+L_#UNT%G?4&BnOgvm9@o7l?ov~XL+et zbGT)|G7)KAeqb=wHSPk+J1bdg7N3$vp(ekjI1D9V$G5Cj!=R2w=3*4!z*J-r-cyeb zd(i2KmX!|Lhey!snRw z?#$Gu%S^SQEKt&kep)up#j&9}e+3=JJBS(s>MH+|=R(`8xK{mmndWo_r`-w1#SeRD&YtAJ#GiVI*TkQZ}&aq<+bU2+coU3!jCI6E+Ad_xFW*ghnZ$q zAoF*i&3n1j#?B8x;kjSJD${1jdRB;)R*)Ao!9bd|C7{;iqDo|T&>KSh6*hCD!rwv= zyK#F@2+cv3=|S1Kef(E6Niv8kyLVLX&e=U;{0x{$tDfShqkjUME>f8d(5nzSkY6@! z^-0>DM)wa&%m#UF1F?zR`8Y3X#tA!*7Q$P3lZJ%*KNlrk_uaPkxw~ zxZ1qlE;Zo;nb@!SMazSjM>;34ROOoygo%SF);LL>rRonWwR>bmSd1XD^~sGSu$Gg# zFZ`|yKU0%!v07dz^v(tY%;So(e`o{ZYTX`hm;@b0%8|H>VW`*cr8R%3n|ehw2`(9B+V72`>SY}9^8oh$En80mZK9T4abVG*to;E z1_S6bgDOW?!Oy1LwYy=w3q~KKdbNtyH#d24PFjX)KYMY93{3-mPP-H>@M-_>N~DDu zENh~reh?JBAK=TFN-SfDfT^=+{w4ea2KNWXq2Y<;?(gf(FgVp8Zp-oEjKzB%2Iqj;48GmY3h=bcdYJ}~&4tS`Q1sb=^emaW$IC$|R+r-8V- zf0$gGE(CS_n4s>oicVk)MfvVg#I>iDvf~Ov8bk}sSxluG!6#^Z_zhB&U^`eIi1@j( z^CK$z^stBHtaDDHxn+R;3u+>Lil^}fj?7eaGB z&5nl^STqcaBxI@v>%zG|j))G(rVa4aY=B@^2{TFkW~YP!8!9TG#(-nOf^^X-%m9{Z zCC?iC`G-^RcBSCuk=Z`(FaUUe?hf3{0C>>$?Vs z`2Uud9M+T&KB6o4o9kvdi^Q=Bw!asPdxbe#W-Oaa#_NP(qpyF@bVxv5D5))srkU#m zj_KA+#7sqDn*Ipf!F5Byco4HOSd!Ui$l94|IbW%Ny(s1>f4|Mv^#NfB31N~kya9!k zWCGL-$0ZQztBate^fd>R!hXY_N9ZjYp3V~4_V z#eB)Kjr8yW=+oG)BuNdZG?jaZlw+l_ma8aET(s+-x+=F-t#Qoiuu1i`^x8Sj>b^U} zs^z<()YMFP7CmjUC@M=&lA5W7t&cxTlzJAts*%PBDAPuqcV5o7HEnqjif_7xGt)F% zGx2b4w{@!tE)$p=l3&?Bf#`+!-RLOleeRk3 z7#pF|w@6_sBmn1nECqdunmG^}pr5(ZJQVvAt$6p3H(16~;vO>?sTE`Y+mq5YP&PBo zvq!7#W$Gewy`;%6o^!Dtjz~x)T}Bdk*BS#=EY=ODD&B=V6TD2z^hj1m5^d6s)D*wk zu$z~D7QuZ2b?5`p)E8e2_L38v3WE{V`bVk;6fl#o2`) z99JsWhh?$oVRn@$S#)uK&8DL8>An0&S<%V8hnGD7Z^;Y(%6;^9!7kDQ5bjR_V+~wp zfx4m3z6CWmmZ<8gDGUyg3>t8wgJ5NkkiEm^(sedCicP^&3D%}6LtIUq>mXCAt{9eF zNXL$kGcoUTf_Lhm`t;hD-SE)m=iBnxRU(NyL}f6~1uH)`K!hmYZjLI%H}AmEF5RZt z06$wn63GHnApHXZZJ}s^s)j9(BM6e*7IBK6Bq(!)d~zR#rbxK9NVIlgquoMq z=eGZ9NR!SEqP6=9UQg#@!rtbbSBUM#ynF);zKX+|!Zm}*{H z+j=d?aZ2!?@EL7C~%B?6ouCKLnO$uWn;Y6Xz zX8dSwj732u(o*U3F$F=7xwxm>E-B+SVZH;O-4XPuPkLSt_?S0)lb7EEg)Mglk0#eS z9@jl(OnH4juMxY+*r03VDfPx_IM!Lmc(5hOI;`?d37f>jPP$?9jQQIQU@i4vuG6MagEoJrQ=RD7xt@8E;c zeGV*+Pt+t$@pt!|McETOE$9k=_C!70uhwRS9X#b%ZK z%q(TIUXSS^F0`4Cx?Rk07C6wI4!UVPeI~-fxY6`YH$kABdOuiRtl73MqG|~AzZ@iL&^s?24iS;RK_pdlWkhcF z@Wv-Om(Aealfg)D^adlXh9Nvf~Uf@y;g3Y)i(YP zEXDnb1V}1pJT5ZWyw=1i+0fni9yINurD=EqH^ciOwLUGi)C%Da)tyt=zq2P7pV5-G zR7!oq28-Fgn5pW|nlu^b!S1Z#r7!Wtr{5J5PQ>pd+2P7RSD?>(U7-|Y z7ZQ5lhYIl_IF<9?T9^IPK<(Hp;l5bl5tF9>X-zG14_7PfsA>6<$~A338iYRT{a@r_ zuXBaT=`T5x3=s&3=RYx6NgG>No4?5KFBVjE(swfcivcIpPQFx5l+O;fiGsOrl5teR z_Cm+;PW}O0Dwe_(4Z@XZ)O0W-v2X><&L*<~*q3dg;bQW3g7)a#3KiQP>+qj|qo*Hk z?57>f2?f@`=Fj^nkDKeRkN2d$Z@2eNKpHo}ksj-$`QKb6n?*$^*%Fb3_Kbf1(*W9K>{L$mud2WHJ=j0^=g30Xhg8$#g^?36`p1fm;;1@0Lrx+8t`?vN0ZorM zSW?rhjCE8$C|@p^sXdx z|NOHHg+fL;HIlqyLp~SSdIF`TnSHehNCU9t89yr@)FY<~hu+X`tjg(aSVae$wDG*C zq$nY(Y494R)hD!i1|IIyP*&PD_c2FPgeY)&mX1qujB1VHPG9`yFQpLFVQ0>EKS@Bp zAfP5`C(sWGLI?AC{XEjLKR4FVNw(4+9b?kba95ukgR1H?w<8F7)G+6&(zUhIE5Ef% z=fFkL3QKA~M@h{nzjRq!Y_t!%U66#L8!(2-GgFxkD1=JRRqk=n%G(yHKn%^&$dW>; zSjAcjETMz1%205se$iH_)ZCpfg_LwvnsZQAUCS#^FExp8O4CrJb6>JquNV@qPq~3A zZ<6dOU#6|8+fcgiA#~MDmcpIEaUO02L5#T$HV0$EMD94HT_eXLZ2Zi&(! z&5E>%&|FZ`)CN10tM%tLSPD*~r#--K(H-CZqIOb99_;m|D5wdgJ<1iOJz@h2Zkq?} z%8_KXb&hf=2Wza(Wgc;3v3TN*;HTU*q2?#z&tLn_U0Nt!y>Oo>+2T)He6%XuP;fgn z-G!#h$Y2`9>Jtf}hbVrm6D70|ERzLAU>3zoWhJmjWfgM^))T+2u$~5>HF9jQDkrXR z=IzX36)V75PrFjkQ%TO+iqKGCQ-DDXbaE;C#}!-CoWQx&v*vHfyI>$HNRbpvm<`O( zlx9NBWD6_e&J%Ous4yp~s6)Ghni!I6)0W;9(9$y1wWu`$gs<$9Mcf$L*piP zPR0Av*2%ul`W;?-1_-5Zy0~}?`e@Y5A&0H!^ApyVTT}BiOm4GeFo$_oPlDEyeGBbh z1h3q&Dx~GmUS|3@4V36&$2uO8!Yp&^pD7J5&TN{?xphf*-js1fP?B|`>p_K>lh{ij zP(?H%e}AIP?_i^f&Li=FDSQ`2_NWxL+BB=nQr=$ zHojMlXNGauvvwPU>ZLq!`bX-5F4jBJ&So{kE5+ms9UEYD{66!|k~3vsP+mE}x!>%P za98bAU0!h0&ka4EoiDvBM#CP#dRNdXJcb*(%=<(g+M@<)DZ!@v1V>;54En?igcHR2 zhubQMq}VSOK)onqHfczM7YA@s=9*ow;k;8)&?J3@0JiGcP! zP#00KZ1t)GyZeRJ=f0^gc+58lc4Qh*S7RqPIC6GugG1gXe$LIQMRCo8cHf^qXgAa2 z`}t>u2Cq1CbSEpLr~E=c7~=Qkc9-vLE%(v9N*&HF`(d~(0`iukl5aQ9u4rUvc8%m) zr2GwZN4!s;{SB87lJB;veebPmqE}tSpT>+`t?<457Q9iV$th%i__Z1kOMAswFldD6 ztbOvO337S5o#ZZgN2G99_AVqPv!?Gmt3pzgD+Hp3QPQ`9qJ(g=kjvD+fUSS3upJn! zqoG7acIKEFRX~S}3|{EWT$kdz#zrDlJU(rPkxjws_iyLKU8+v|*oS_W*-guAb&Pj1 z35Z`3z<&Jb@2Mwz=KXucNYdY#SNO$tcVFr9KdKm|%^e-TXzs6M`PBper%ajkrIyUe zp$vVxVs9*>Vp4_1NC~Zg)WOCPmOxI1V34QlG4!aSFOH{QqSVq1^1)- z0P!Z?tT&E-ll(pwf0?=F=yOzik=@nh1Clxr9}Vij89z)ePDSCYAqw?lVI?v?+&*zH z)p$CScFI8rrwId~`}9YWPFu0cW1Sf@vRELs&cbntRU6QfPK-SO*mqu|u~}8AJ!Q$z znzu}50O=YbjwKCuSVBs6&CZR#0FTu)3{}qJJYX(>QPr4$RqWiwX3NT~;>cLn*_&1H zaKpIW)JVJ>b{uo2oq>oQt3y=zJjb%fU@wLqM{SyaC6x2snMx-}ivfU<1- znu1Lh;i$3Tf$Kh5Uk))G!D1UhE8pvx&nO~w^fG)BC&L!_hQk%^p`Kp@F{cz>80W&T ziOK=Sq3fdRu*V0=S53rcIfWFazI}Twj63CG(jOB;$*b`*#B9uEnBM`hDk*EwSRdwP8?5T?xGUKs=5N83XsR*)a4|ijz|c{4tIU+4j^A5C<#5 z*$c_d=5ml~%pGxw#?*q9N7aRwPux5EyqHVkdJO=5J>84!X6P>DS8PTTz>7C#FO?k#edkntG+fJk8ZMn?pmJSO@`x-QHq;7^h6GEXLXo1TCNhH z8ZDH{*NLAjo3WM`xeb=X{((uv3H(8&r8fJJg_uSs_%hOH%JDD?hu*2NvWGYD+j)&` zz#_1%O1wF^o5ryt?O0n;`lHbzp0wQ?rcbW(F1+h7_EZZ9{>rePvLAPVZ_R|n@;b$;UchU=0j<6k8G9QuQf@76oiE*4 zXOLQ&n3$NR#p4<5NJMVC*S);5x2)eRbaAM%VxWu9ohlT;pGEk7;002enCbQ>2r-us z3#bpXP9g|mE`65VrN`+3mC)M(eMj~~eOf)do<@l+fMiTR)XO}422*1SL{wyY(%oMpBgJagtiDf zz>O6(m;};>Hi=t8o{DVC@YigqS(Qh+ix3Rwa9aliH}a}IlOCW1@?%h_bRbq-W{KHF z%Vo?-j@{Xi@=~Lz5uZP27==UGE15|g^0gzD|3x)SCEXrx`*MP^FDLl%pOi~~Il;dc z^hrwp9sYeT7iZ)-ajKy@{a`kr0-5*_!XfBpXwEcFGJ;%kV$0Nx;apKrur zJN2J~CAv{Zjj%FolyurtW8RaFmpn&zKJWL>(0;;+q(%(Hx!GMW4AcfP0YJ*Vz!F4g z!ZhMyj$BdXL@MlF%KeInmPCt~9&A!;cRw)W!Hi@0DY(GD_f?jeV{=s=cJ6e}JktJw zQORnxxj3mBxfrH=x{`_^Z1ddDh}L#V7i}$njUFRVwOX?qOTKjfPMBO4y(WiU<)epb zvB9L=%jW#*SL|Nd_G?E*_h1^M-$PG6Pc_&QqF0O-FIOpa4)PAEPsyvB)GKasmBoEt z?_Q2~QCYGH+hW31x-B=@5_AN870vY#KB~3a*&{I=f);3Kv7q4Q7s)0)gVYx2#Iz9g(F2;=+Iy4 z6KI^8GJ6D@%tpS^8boU}zpi=+(5GfIR)35PzrbuXeL1Y1N%JK7PG|^2k3qIqHfX;G zQ}~JZ-UWx|60P5?d1e;AHx!_;#PG%d=^X(AR%i`l0jSpYOpXoKFW~7ip7|xvN;2^? zsYC9fanpO7rO=V7+KXqVc;Q5z%Bj})xHVrgoR04sA2 zl~DAwv=!(()DvH*=lyhIlU^hBkA0$e*7&fJpB0|oB7)rqGK#5##2T`@_I^|O2x4GO z;xh6ROcV<9>?e0)MI(y++$-ksV;G;Xe`lh76T#Htuia+(UrIXrf9?

L(tZ$0BqX1>24?V$S+&kLZ`AodQ4_)P#Q3*4xg8}lMV-FLwC*cN$< zt65Rf%7z41u^i=P*qO8>JqXPrinQFapR7qHAtp~&RZ85$>ob|Js;GS^y;S{XnGiBc zGa4IGvDl?x%gY`vNhv8wgZnP#UYI-w*^4YCZnxkF85@ldepk$&$#3EAhrJY0U)lR{F6sM3SONV^+$;Zx8BD&Eku3K zKNLZyBni3)pGzU0;n(X@1fX8wYGKYMpLmCu{N5-}epPDxClPFK#A@02WM3!myN%bkF z|GJ4GZ}3sL{3{qXemy+#Uk{4>Kf8v11;f8I&c76+B&AQ8udd<8gU7+BeWC`akUU~U zgXoxie>MS@rBoyY8O8Tc&8id!w+_ooxcr!1?#rc$-|SBBtH6S?)1e#P#S?jFZ8u-Bs&k`yLqW|{j+%c#A4AQ>+tj$Y z^CZajspu$F%73E68Lw5q7IVREED9r1Ijsg#@DzH>wKseye>hjsk^{n0g?3+gs@7`i zHx+-!sjLx^fS;fY!ERBU+Q zVJ!e0hJH%P)z!y%1^ZyG0>PN@5W~SV%f>}c?$H8r;Sy-ui>aruVTY=bHe}$e zi&Q4&XK!qT7-XjCrDaufT@>ieQ&4G(SShUob0Q>Gznep9fR783jGuUynAqc6$pYX; z7*O@@JW>O6lKIk0G00xsm|=*UVTQBB`u1f=6wGAj%nHK_;Aqmfa!eAykDmi-@u%6~ z;*c!pS1@V8r@IX9j&rW&d*}wpNs96O2Ute>%yt{yv>k!6zfT6pru{F1M3P z2WN1JDYqoTB#(`kE{H676QOoX`cnqHl1Yaru)>8Ky~VU{)r#{&s86Vz5X)v15ULHA zAZDb{99+s~qI6;-dQ5DBjHJP@GYTwn;Dv&9kE<0R!d z8tf1oq$kO`_sV(NHOSbMwr=To4r^X$`sBW4$gWUov|WY?xccQJN}1DOL|GEaD_!@& z15p?Pj+>7d`@LvNIu9*^hPN)pwcv|akvYYq)ks%`G>!+!pW{-iXPZsRp8 z35LR;DhseQKWYSD`%gO&k$Dj6_6q#vjWA}rZcWtQr=Xn*)kJ9kacA=esi*I<)1>w^ zO_+E>QvjP)qiSZg9M|GNeLtO2D7xT6vsj`88sd!94j^AqxFLi}@w9!Y*?nwWARE0P znuI_7A-saQ+%?MFA$gttMV-NAR^#tjl_e{R$N8t2NbOlX373>e7Ox=l=;y#;M7asp zRCz*CLnrm$esvSb5{T<$6CjY zmZ(i{Rs_<#pWW>(HPaaYj`%YqBra=Ey3R21O7vUbzOkJJO?V`4-D*u4$Me0Bx$K(lYo`JO}gnC zx`V}a7m-hLU9Xvb@K2ymioF)vj12<*^oAqRuG_4u%(ah?+go%$kOpfb`T96P+L$4> zQ#S+sA%VbH&mD1k5Ak7^^dZoC>`1L%i>ZXmooA!%GI)b+$D&ziKrb)a=-ds9xk#~& z7)3iem6I|r5+ZrTRe_W861x8JpD`DDIYZNm{$baw+$)X^Jtjnl0xlBgdnNY}x%5za zkQ8E6T<^$sKBPtL4(1zi_Rd(tVth*3Xs!ulflX+70?gb&jRTnI8l+*Aj9{|d%qLZ+ z>~V9Z;)`8-lds*Zgs~z1?Fg?Po7|FDl(Ce<*c^2=lFQ~ahwh6rqSjtM5+$GT>3WZW zj;u~w9xwAhOc<kF}~`CJ68 z?(S5vNJa;kriPlim33{N5`C{9?NWhzsna_~^|K2k4xz1`xcui*LXL-1#Y}Hi9`Oo!zQ>x-kgAX4LrPz63uZ+?uG*84@PKq-KgQlMNRwz=6Yes) zY}>YN+qP}nwr$(CZQFjUOI=-6J$2^XGvC~EZ+vrqWaOXB$k?%Suf5k=4>AveC1aJ! ziaW4IS%F$_Babi)kA8Y&u4F7E%99OPtm=vzw$$ zEz#9rvn`Iot_z-r3MtV>k)YvErZ<^Oa${`2>MYYODSr6?QZu+be-~MBjwPGdMvGd!b!elsdi4% z`37W*8+OGulab8YM?`KjJ8e+jM(tqLKSS@=jimq3)Ea2EB%88L8CaM+aG7;27b?5` z4zuUWBr)f)k2o&xg{iZ$IQkJ+SK>lpq4GEacu~eOW4yNFLU!Kgc{w4&D$4ecm0f}~ zTTzquRW@`f0}|IILl`!1P+;69g^upiPA6F{)U8)muWHzexRenBU$E^9X-uIY2%&1w z_=#5*(nmxJ9zF%styBwivi)?#KMG96-H@hD-H_&EZiRNsfk7mjBq{L%!E;Sqn!mVX*}kXhwH6eh;b42eD!*~upVG@ z#smUqz$ICm!Y8wY53gJeS|Iuard0=;k5i5Z_hSIs6tr)R4n*r*rE`>38Pw&lkv{_r!jNN=;#?WbMj|l>cU(9trCq; z%nN~r^y7!kH^GPOf3R}?dDhO=v^3BeP5hF|%4GNQYBSwz;x({21i4OQY->1G=KFyu z&6d`f2tT9Yl_Z8YACZaJ#v#-(gcyeqXMhYGXb=t>)M@fFa8tHp2x;ODX=Ap@a5I=U z0G80^$N0G4=U(>W%mrrThl0DjyQ-_I>+1Tdd_AuB3qpYAqY54upwa3}owa|x5iQ^1 zEf|iTZxKNGRpI>34EwkIQ2zHDEZ=(J@lRaOH>F|2Z%V_t56Km$PUYu^xA5#5Uj4I4RGqHD56xT%H{+P8Ag>e_3pN$4m8n>i%OyJFPNWaEnJ4McUZPa1QmOh?t8~n& z&RulPCors8wUaqMHECG=IhB(-tU2XvHP6#NrLVyKG%Ee*mQ5Ps%wW?mcnriTVRc4J`2YVM>$ixSF2Xi+Wn(RUZnV?mJ?GRdw%lhZ+t&3s7g!~g{%m&i<6 z5{ib-<==DYG93I(yhyv4jp*y3#*WNuDUf6`vTM%c&hiayf(%=x@4$kJ!W4MtYcE#1 zHM?3xw63;L%x3drtd?jot!8u3qeqctceX3m;tWetK+>~q7Be$h>n6riK(5@ujLgRS zvOym)k+VAtyV^mF)$29Y`nw&ijdg~jYpkx%*^ z8dz`C*g=I?;clyi5|!27e2AuSa$&%UyR(J3W!A=ZgHF9OuKA34I-1U~pyD!KuRkjA zbkN!?MfQOeN>DUPBxoy5IX}@vw`EEB->q!)8fRl_mqUVuRu|C@KD-;yl=yKc=ZT0% zB$fMwcC|HE*0f8+PVlWHi>M`zfsA(NQFET?LrM^pPcw`cK+Mo0%8*x8@65=CS_^$cG{GZQ#xv($7J z??R$P)nPLodI;P!IC3eEYEHh7TV@opr#*)6A-;EU2XuogHvC;;k1aI8asq7ovoP!* z?x%UoPrZjj<&&aWpsbr>J$Er-7!E(BmOyEv!-mbGQGeJm-U2J>74>o5x`1l;)+P&~ z>}f^=Rx(ZQ2bm+YE0u=ZYrAV@apyt=v1wb?R@`i_g64YyAwcOUl=C!i>=Lzb$`tjv zOO-P#A+)t-JbbotGMT}arNhJmmGl-lyUpMn=2UacVZxmiG!s!6H39@~&uVokS zG=5qWhfW-WOI9g4!R$n7!|ViL!|v3G?GN6HR0Pt_L5*>D#FEj5wM1DScz4Jv@Sxnl zB@MPPmdI{(2D?;*wd>3#tjAirmUnQoZrVv`xM3hARuJksF(Q)wd4P$88fGYOT1p6U z`AHSN!`St}}UMBT9o7i|G`r$ zrB=s$qV3d6$W9@?L!pl0lf%)xs%1ko^=QY$ty-57=55PvP(^6E7cc zGJ*>m2=;fOj?F~yBf@K@9qwX0hA803Xw+b0m}+#a(>RyR8}*Y<4b+kpp|OS+!whP( zH`v{%s>jsQI9rd$*vm)EkwOm#W_-rLTHcZRek)>AtF+~<(did)*oR1|&~1|e36d-d zgtm5cv1O0oqgWC%Et@P4Vhm}Ndl(Y#C^MD03g#PH-TFy+7!Osv1z^UWS9@%JhswEq~6kSr2DITo59+; ze=ZC}i2Q?CJ~Iyu?vn|=9iKV>4j8KbxhE4&!@SQ^dVa-gK@YfS9xT(0kpW*EDjYUkoj! zE49{7H&E}k%5(>sM4uGY)Q*&3>{aitqdNnRJkbOmD5Mp5rv-hxzOn80QsG=HJ_atI-EaP69cacR)Uvh{G5dTpYG7d zbtmRMq@Sexey)||UpnZ?;g_KMZq4IDCy5}@u!5&B^-=6yyY{}e4Hh3ee!ZWtL*s?G zxG(A!<9o!CL+q?u_utltPMk+hn?N2@?}xU0KlYg?Jco{Yf@|mSGC<(Zj^yHCvhmyx z?OxOYoxbptDK()tsJ42VzXdINAMWL$0Gcw?G(g8TMB)Khw_|v9`_ql#pRd2i*?CZl z7k1b!jQB=9-V@h%;Cnl7EKi;Y^&NhU0mWEcj8B|3L30Ku#-9389Q+(Yet0r$F=+3p z6AKOMAIi|OHyzlHZtOm73}|ntKtFaXF2Fy|M!gOh^L4^62kGUoWS1i{9gsds_GWBc zLw|TaLP64z3z9?=R2|T6Xh2W4_F*$cq>MtXMOy&=IPIJ`;!Tw?PqvI2b*U1)25^<2 zU_ZPoxg_V0tngA0J+mm?3;OYw{i2Zb4x}NedZug!>EoN3DC{1i)Z{Z4m*(y{ov2%- zk(w>+scOO}MN!exSc`TN)!B=NUX`zThWO~M*ohqq;J2hx9h9}|s#?@eR!=F{QTrq~ zTcY|>azkCe$|Q0XFUdpFT=lTcyW##i;-e{}ORB4D?t@SfqGo_cS z->?^rh$<&n9DL!CF+h?LMZRi)qju!meugvxX*&jfD!^1XB3?E?HnwHP8$;uX{Rvp# zh|)hM>XDv$ZGg=$1{+_bA~u-vXqlw6NH=nkpyWE0u}LQjF-3NhATL@9rRxMnpO%f7 z)EhZf{PF|mKIMFxnC?*78(}{Y)}iztV12}_OXffJ;ta!fcFIVjdchyHxH=t%ci`Xd zX2AUB?%?poD6Zv*&BA!6c5S#|xn~DK01#XvjT!w!;&`lDXSJT4_j$}!qSPrb37vc{ z9^NfC%QvPu@vlxaZ;mIbn-VHA6miwi8qJ~V;pTZkKqqOii<1Cs}0i?uUIss;hM4dKq^1O35y?Yp=l4i zf{M!@QHH~rJ&X~8uATV><23zZUbs-J^3}$IvV_ANLS08>k`Td7aU_S1sLsfi*C-m1 z-e#S%UGs4E!;CeBT@9}aaI)qR-6NU@kvS#0r`g&UWg?fC7|b^_HyCE!8}nyh^~o@< zpm7PDFs9yxp+byMS(JWm$NeL?DNrMCNE!I^ko-*csB+dsf4GAq{=6sfyf4wb>?v1v zmb`F*bN1KUx-`ra1+TJ37bXNP%`-Fd`vVQFTwWpX@;s(%nDQa#oWhgk#mYlY*!d>( zE&!|ySF!mIyfING+#%RDY3IBH_fW$}6~1%!G`suHub1kP@&DoAd5~7J55;5_noPI6eLf{t;@9Kf<{aO0`1WNKd?<)C-|?C?)3s z>wEq@8=I$Wc~Mt$o;g++5qR+(6wt9GI~pyrDJ%c?gPZe)owvy^J2S=+M^ z&WhIE`g;;J^xQLVeCtf7b%Dg#Z2gq9hp_%g)-%_`y*zb; zn9`f`mUPN-Ts&fFo(aNTsXPA|J!TJ{0hZp0^;MYHLOcD=r_~~^ymS8KLCSeU3;^QzJNqS z5{5rEAv#l(X?bvwxpU;2%pQftF`YFgrD1jt2^~Mt^~G>T*}A$yZc@(k9orlCGv&|1 zWWvVgiJsCAtamuAYT~nzs?TQFt<1LSEx!@e0~@yd6$b5!Zm(FpBl;(Cn>2vF?k zOm#TTjFwd2D-CyA!mqR^?#Uwm{NBemP>(pHmM}9;;8`c&+_o3#E5m)JzfwN?(f-a4 zyd%xZc^oQx3XT?vcCqCX&Qrk~nu;fxs@JUoyVoi5fqpi&bUhQ2y!Ok2pzsFR(M(|U zw3E+kH_zmTRQ9dUMZWRE%Zakiwc+lgv7Z%|YO9YxAy`y28`Aw;WU6HXBgU7fl@dnt z-fFBV)}H-gqP!1;V@Je$WcbYre|dRdp{xt!7sL3Eoa%IA`5CAA%;Wq8PktwPdULo! z8!sB}Qt8#jH9Sh}QiUtEPZ6H0b*7qEKGJ%ITZ|vH)5Q^2m<7o3#Z>AKc%z7_u`rXA zqrCy{-{8;9>dfllLu$^M5L z-hXs))h*qz%~ActwkIA(qOVBZl2v4lwbM>9l70Y`+T*elINFqt#>OaVWoja8RMsep z6Or3f=oBnA3vDbn*+HNZP?8LsH2MY)x%c13@(XfuGR}R?Nu<|07{$+Lc3$Uv^I!MQ z>6qWgd-=aG2Y^24g4{Bw9ueOR)(9h`scImD=86dD+MnSN4$6 z^U*o_mE-6Rk~Dp!ANp#5RE9n*LG(Vg`1)g6!(XtDzsov$Dvz|Gv1WU68J$CkshQhS zCrc|cdkW~UK}5NeaWj^F4MSgFM+@fJd{|LLM)}_O<{rj z+?*Lm?owq?IzC%U%9EBga~h-cJbIu=#C}XuWN>OLrc%M@Gu~kFEYUi4EC6l#PR2JS zQUkGKrrS#6H7}2l0F@S11DP`@pih0WRkRJl#F;u{c&ZC{^$Z+_*lB)r)-bPgRFE;* zl)@hK4`tEP=P=il02x7-C7p%l=B`vkYjw?YhdJU9!P!jcmY$OtC^12w?vy3<<=tlY zUwHJ_0lgWN9vf>1%WACBD{UT)1qHQSE2%z|JHvP{#INr13jM}oYv_5#xsnv9`)UAO zuwgyV4YZ;O)eSc3(mka6=aRohi!HH@I#xq7kng?Acdg7S4vDJb6cI5fw?2z%3yR+| zU5v@Hm}vy;${cBp&@D=HQ9j7NcFaOYL zj-wV=eYF{|XTkFNM2uz&T8uH~;)^Zo!=KP)EVyH6s9l1~4m}N%XzPpduPg|h-&lL` zAXspR0YMOKd2yO)eMFFJ4?sQ&!`dF&!|niH*!^*Ml##o0M(0*uK9&yzekFi$+mP9s z>W9d%Jb)PtVi&-Ha!o~Iyh@KRuKpQ@)I~L*d`{O8!kRObjO7=n+Gp36fe!66neh+7 zW*l^0tTKjLLzr`x4`_8&on?mjW-PzheTNox8Hg7Nt@*SbE-%kP2hWYmHu#Fn@Q^J(SsPUz*|EgOoZ6byg3ew88UGdZ>9B2Tq=jF72ZaR=4u%1A6Vm{O#?@dD!(#tmR;eP(Fu z{$0O%=Vmua7=Gjr8nY%>ul?w=FJ76O2js&17W_iq2*tb!i{pt#`qZB#im9Rl>?t?0c zicIC}et_4d+CpVPx)i4~$u6N-QX3H77ez z?ZdvXifFk|*F8~L(W$OWM~r`pSk5}#F?j_5u$Obu9lDWIknO^AGu+Blk7!9Sb;NjS zncZA?qtASdNtzQ>z7N871IsPAk^CC?iIL}+{K|F@BuG2>qQ;_RUYV#>hHO(HUPpk@ z(bn~4|F_jiZi}Sad;_7`#4}EmD<1EiIxa48QjUuR?rC}^HRocq`OQPM@aHVKP9E#q zy%6bmHygCpIddPjE}q_DPC`VH_2m;Eey&ZH)E6xGeStOK7H)#+9y!%-Hm|QF6w#A( zIC0Yw%9j$s-#odxG~C*^MZ?M<+&WJ+@?B_QPUyTg9DJGtQN#NIC&-XddRsf3n^AL6 zT@P|H;PvN;ZpL0iv$bRb7|J{0o!Hq+S>_NrH4@coZtBJu#g8#CbR7|#?6uxi8d+$g z87apN>EciJZ`%Zv2**_uiET9Vk{pny&My;+WfGDw4EVL#B!Wiw&M|A8f1A@ z(yFQS6jfbH{b8Z-S7D2?Ixl`j0{+ZnpT=;KzVMLW{B$`N?Gw^Fl0H6lT61%T2AU**!sX0u?|I(yoy&Xveg7XBL&+>n6jd1##6d>TxE*Vj=8lWiG$4=u{1UbAa5QD>5_ z;Te^42v7K6Mmu4IWT6Rnm>oxrl~b<~^e3vbj-GCdHLIB_>59}Ya+~OF68NiH=?}2o zP(X7EN=quQn&)fK>M&kqF|<_*H`}c zk=+x)GU>{Af#vx&s?`UKUsz})g^Pc&?Ka@t5$n$bqf6{r1>#mWx6Ep>9|A}VmWRnowVo`OyCr^fHsf# zQjQ3Ttp7y#iQY8l`zEUW)(@gGQdt(~rkxlkefskT(t%@i8=|p1Y9Dc5bc+z#n$s13 zGJk|V0+&Ekh(F};PJzQKKo+FG@KV8a<$gmNSD;7rd_nRdc%?9)p!|B-@P~kxQG}~B zi|{0}@}zKC(rlFUYp*dO1RuvPC^DQOkX4<+EwvBAC{IZQdYxoq1Za!MW7%p7gGr=j zzWnAq%)^O2$eItftC#TTSArUyL$U54-O7e|)4_7%Q^2tZ^0-d&3J1}qCzR4dWX!)4 zzIEKjgnYgMus^>6uw4Jm8ga6>GBtMjpNRJ6CP~W=37~||gMo_p@GA@#-3)+cVYnU> zE5=Y4kzl+EbEh%dhQokB{gqNDqx%5*qBusWV%!iprn$S!;oN_6E3?0+umADVs4ako z?P+t?m?};gev9JXQ#Q&KBpzkHPde_CGu-y z<{}RRAx=xlv#mVi+Ibrgx~ujW$h{?zPfhz)Kp7kmYS&_|97b&H&1;J-mzrBWAvY} zh8-I8hl_RK2+nnf&}!W0P+>5?#?7>npshe<1~&l_xqKd0_>dl_^RMRq@-Myz&|TKZBj1=Q()) zF{dBjv5)h=&Z)Aevx}+i|7=R9rG^Di!sa)sZCl&ctX4&LScQ-kMncgO(9o6W6)yd< z@Rk!vkja*X_N3H=BavGoR0@u0<}m-7|2v!0+2h~S2Q&a=lTH91OJsvms2MT~ zY=c@LO5i`mLpBd(vh|)I&^A3TQLtr>w=zoyzTd=^f@TPu&+*2MtqE$Avf>l>}V|3-8Fp2hzo3y<)hr_|NO(&oSD z!vEjTWBxbKTiShVl-U{n*B3#)3a8$`{~Pk}J@elZ=>Pqp|MQ}jrGv7KrNcjW%TN_< zZz8kG{#}XoeWf7qY?D)L)8?Q-b@Na&>i=)(@uNo zr;cH98T3$Iau8Hn*@vXi{A@YehxDE2zX~o+RY`)6-X{8~hMpc#C`|8y> zU8Mnv5A0dNCf{Ims*|l-^ z(MRp{qoGohB34|ggDI*p!Aw|MFyJ|v+<+E3brfrI)|+l3W~CQLPbnF@G0)P~Ly!1TJLp}xh8uW`Q+RB-v`MRYZ9Gam3cM%{ zb4Cb*f)0deR~wtNb*8w-LlIF>kc7DAv>T0D(a3@l`k4TFnrO+g9XH7;nYOHxjc4lq zMmaW6qpgAgy)MckYMhl?>sq;-1E)-1llUneeA!ya9KM$)DaNGu57Z5aE>=VST$#vb zFo=uRHr$0M{-ha>h(D_boS4zId;3B|Tpqo|?B?Z@I?G(?&Iei+-{9L_A9=h=Qfn-U z1wIUnQe9!z%_j$F_{rf&`ZFSott09gY~qrf@g3O=Y>vzAnXCyL!@(BqWa)Zqt!#_k zfZHuwS52|&&)aK;CHq9V-t9qt0au{$#6c*R#e5n3rje0hic7c7m{kW$p(_`wB=Gw7 z4k`1Hi;Mc@yA7dp@r~?@rfw)TkjAW++|pkfOG}0N|2guek}j8Zen(!+@7?qt_7ndX zB=BG6WJ31#F3#Vk3=aQr8T)3`{=p9nBHlKzE0I@v`{vJ}h8pd6vby&VgFhzH|q;=aonunAXL6G2y(X^CtAhWr*jI zGjpY@raZDQkg*aMq}Ni6cRF z{oWv}5`nhSAv>usX}m^GHt`f(t8@zHc?K|y5Zi=4G*UG1Sza{$Dpj%X8 zzEXaKT5N6F5j4J|w#qlZP!zS7BT)9b+!ZSJdToqJts1c!)fwih4d31vfb{}W)EgcA zH2pZ^8_k$9+WD2n`6q5XbOy8>3pcYH9 z07eUB+p}YD@AH!}p!iKv><2QF-Y^&xx^PAc1F13A{nUeCDg&{hnix#FiO!fe(^&%Qcux!h znu*S!s$&nnkeotYsDthh1dq(iQrE|#f_=xVgfiiL&-5eAcC-> z5L0l|DVEM$#ulf{bj+Y~7iD)j<~O8CYM8GW)dQGq)!mck)FqoL^X zwNdZb3->hFrbHFm?hLvut-*uK?zXn3q1z|UX{RZ;-WiLoOjnle!xs+W0-8D)kjU#R z+S|A^HkRg$Ij%N4v~k`jyHffKaC~=wg=9)V5h=|kLQ@;^W!o2^K+xG&2n`XCd>OY5Ydi= zgHH=lgy++erK8&+YeTl7VNyVm9-GfONlSlVb3)V9NW5tT!cJ8d7X)!b-$fb!s76{t z@d=Vg-5K_sqHA@Zx-L_}wVnc@L@GL9_K~Zl(h5@AR#FAiKad8~KeWCo@mgXIQ#~u{ zgYFwNz}2b6Vu@CP0XoqJ+dm8px(5W5-Jpis97F`+KM)TuP*X8H@zwiVKDKGVp59pI zifNHZr|B+PG|7|Y<*tqap0CvG7tbR1R>jn70t1X`XJixiMVcHf%Ez*=xm1(CrTSDt z0cle!+{8*Ja&EOZ4@$qhBuKQ$U95Q%rc7tg$VRhk?3=pE&n+T3upZg^ZJc9~c2es% zh7>+|mrmA-p&v}|OtxqmHIBgUxL~^0+cpfkSK2mhh+4b=^F1Xgd2)}U*Yp+H?ls#z zrLxWg_hm}AfK2XYWr!rzW4g;+^^&bW%LmbtRai9f3PjU${r@n`JThy-cphbcwn)rq9{A$Ht`lmYKxOacy z6v2R(?gHhD5@&kB-Eg?4!hAoD7~(h>(R!s1c1Hx#s9vGPePUR|of32bS`J5U5w{F) z>0<^ktO2UHg<0{oxkdOQ;}coZDQph8p6ruj*_?uqURCMTac;>T#v+l1Tc~%^k-Vd@ zkc5y35jVNc49vZpZx;gG$h{%yslDI%Lqga1&&;mN{Ush1c7p>7e-(zp}6E7f-XmJb4nhk zb8zS+{IVbL$QVF8pf8}~kQ|dHJAEATmmnrb_wLG}-yHe>W|A&Y|;muy-d^t^<&)g5SJfaTH@P1%euONny=mxo+C z4N&w#biWY41r8k~468tvuYVh&XN&d#%QtIf9;iVXfWY)#j=l`&B~lqDT@28+Y!0E+MkfC}}H*#(WKKdJJq=O$vNYCb(ZG@p{fJgu;h z21oHQ(14?LeT>n5)s;uD@5&ohU!@wX8w*lB6i@GEH0pM>YTG+RAIWZD;4#F1&F%Jp zXZUml2sH0!lYJT?&sA!qwez6cXzJEd(1ZC~kT5kZSp7(@=H2$Azb_*W&6aA|9iwCL zdX7Q=42;@dspHDwYE?miGX#L^3xD&%BI&fN9^;`v4OjQXPBaBmOF1;#C)8XA(WFlH zycro;DS2?(G&6wkr6rqC>rqDv3nfGw3hmN_9Al>TgvmGsL8_hXx09};l9Ow@)F5@y z#VH5WigLDwZE4nh^7&@g{1FV^UZ%_LJ-s<{HN*2R$OPg@R~Z`c-ET*2}XB@9xvAjrK&hS=f|R8Gr9 zr|0TGOsI7RD+4+2{ZiwdVD@2zmg~g@^D--YL;6UYGSM8i$NbQr4!c7T9rg!8;TM0E zT#@?&S=t>GQm)*ua|?TLT2ktj#`|R<_*FAkOu2Pz$wEc%-=Y9V*$&dg+wIei3b*O8 z2|m$!jJG!J!ZGbbIa!(Af~oSyZV+~M1qGvelMzPNE_%5?c2>;MeeG2^N?JDKjFYCy z7SbPWH-$cWF9~fX%9~v99L!G(wi!PFp>rB!9xj7=Cv|F+7CsGNwY0Q_J%FID%C^CBZQfJ9K(HK%k31j~e#&?hQ zNuD6gRkVckU)v+53-fc} z7ZCzYN-5RG4H7;>>Hg?LU9&5_aua?A0)0dpew1#MMlu)LHe(M;OHjHIUl7|%%)YPo z0cBk;AOY00%Fe6heoN*$(b<)Cd#^8Iu;-2v@>cE-OB$icUF9EEoaC&q8z9}jMTT2I z8`9;jT%z0;dy4!8U;GW{i`)3!c6&oWY`J3669C!tM<5nQFFrFRglU8f)5Op$GtR-3 zn!+SPCw|04sv?%YZ(a7#L?vsdr7ss@WKAw&A*}-1S|9~cL%uA+E~>N6QklFE>8W|% zyX-qAUGTY1hQ-+um`2|&ji0cY*(qN!zp{YpDO-r>jPk*yuVSay<)cUt`t@&FPF_&$ zcHwu1(SQ`I-l8~vYyUxm@D1UEdFJ$f5Sw^HPH7b!9 zzYT3gKMF((N(v0#4f_jPfVZ=ApN^jQJe-X$`A?X+vWjLn_%31KXE*}5_}d8 zw_B1+a#6T1?>M{ronLbHIlEsMf93muJ7AH5h%;i99<~JX^;EAgEB1uHralD*!aJ@F zV2ruuFe9i2Q1C?^^kmVy921eb=tLDD43@-AgL^rQ3IO9%+vi_&R2^dpr}x{bCVPej z7G0-0o64uyWNtr*loIvslyo0%)KSDDKjfThe0hcqs)(C-MH1>bNGBDRTW~scy_{w} zp^aq8Qb!h9Lwielq%C1b8=?Z=&U)ST&PHbS)8Xzjh2DF?d{iAv)Eh)wsUnf>UtXN( zL7=$%YrZ#|^c{MYmhn!zV#t*(jdmYdCpwqpZ{v&L8KIuKn`@IIZfp!uo}c;7J57N` zAxyZ-uA4=Gzl~Ovycz%MW9ZL7N+nRo&1cfNn9(1H5eM;V_4Z_qVann7F>5f>%{rf= zPBZFaV@_Sobl?Fy&KXyzFDV*FIdhS5`Uc~S^Gjo)aiTHgn#<0C=9o-a-}@}xDor;D zZyZ|fvf;+=3MZd>SR1F^F`RJEZo+|MdyJYQAEauKu%WDol~ayrGU3zzbHKsnHKZ*z zFiwUkL@DZ>!*x05ql&EBq@_Vqv83&?@~q5?lVmffQZ+V-=qL+!u4Xs2Z2zdCQ3U7B&QR9_Iggy} z(om{Y9eU;IPe`+p1ifLx-XWh?wI)xU9ik+m#g&pGdB5Bi<`PR*?92lE0+TkRuXI)z z5LP!N2+tTc%cB6B1F-!fj#}>S!vnpgVU~3!*U1ej^)vjUH4s-bd^%B=ItQqDCGbrEzNQi(dJ`J}-U=2{7-d zK8k^Rlq2N#0G?9&1?HSle2vlkj^KWSBYTwx`2?9TU_DX#J+f+qLiZCqY1TXHFxXZqYMuD@RU$TgcnCC{_(vwZ-*uX)~go#%PK z@}2Km_5aQ~(<3cXeJN6|F8X_1@L%@xTzs}$_*E|a^_URF_qcF;Pfhoe?FTFwvjm1o z8onf@OY@jC2tVcMaZS;|T!Ks(wOgPpRzRnFS-^RZ4E!9dsnj9sFt609a|jJbb1Dt@ z<=Gal2jDEupxUSwWu6zp<<&RnAA;d&4gKVG0iu6g(DsST(4)z6R)zDpfaQ}v{5ARt zyhwvMtF%b-YazR5XLz+oh=mn;y-Mf2a8>7?2v8qX;19y?b>Z5laGHvzH;Nu9S`B8} zI)qN$GbXIQ1VL3lnof^6TS~rvPVg4V?Dl2Bb*K2z4E{5vy<(@@K_cN@U>R!>aUIRnb zL*)=787*cs#zb31zBC49x$`=fkQbMAef)L2$dR{)6BAz!t5U_B#1zZG`^neKSS22oJ#5B=gl%U=WeqL9REF2g zZnfCb0?quf?Ztj$VXvDSWoK`0L=Zxem2q}!XWLoT-kYMOx)!7fcgT35uC~0pySEme z`{wGWTkGr7>+Kb^n;W?BZH6ZP(9tQX%-7zF>vc2}LuWDI(9kh1G#7B99r4x6;_-V+k&c{nPUrR zAXJGRiMe~aup{0qzmLNjS_BC4cB#sXjckx{%_c&^xy{M61xEb>KW_AG5VFXUOjAG4 z^>Qlm9A#1N{4snY=(AmWzatb!ngqiqPbBZ7>Uhb3)dTkSGcL#&SH>iMO-IJBPua`u zo)LWZ>=NZLr758j{%(|uQuZ)pXq_4c!!>s|aDM9#`~1bzK3J1^^D#<2bNCccH7~-X}Ggi!pIIF>uFx%aPARGQsnC8ZQc8lrQ5o~smqOg>Ti^GNme94*w z)JZy{_{#$jxGQ&`M z!OMvZMHR>8*^>eS%o*6hJwn!l8VOOjZQJvh)@tnHVW&*GYPuxqXw}%M!(f-SQf`=L z5;=5w2;%82VMH6Xi&-K3W)o&K^+vJCepWZ-rW%+Dc6X3(){z$@4zjYxQ|}8UIojeC zYZpQ1dU{fy=oTr<4VX?$q)LP}IUmpiez^O&N3E_qPpchGTi5ZM6-2ScWlQq%V&R2Euz zO|Q0Hx>lY1Q1cW5xHv5!0OGU~PVEqSuy#fD72d#O`N!C;o=m+YioGu-wH2k6!t<~K zSr`E=W9)!g==~x9VV~-8{4ZN9{~-A9zJpRe%NGg$+MDuI-dH|b@BD)~>pPCGUNNzY zMDg||0@XGQgw`YCt5C&A{_+J}mvV9Wg{6V%2n#YSRN{AP#PY?1FF1#|vO_%e+#`|2*~wGAJaeRX6=IzFNeWhz6gJc8+(03Ph4y6ELAm=AkN7TOgMUEw*N{= z_)EIDQx5q22oUR+_b*tazu9+pX|n1c*IB-}{DqIj z-?E|ks{o3AGRNb;+iKcHkZvYJvFsW&83RAPs1Oh@IWy%l#5x2oUP6ZCtv+b|q>jsf zZ_9XO;V!>n`UxH1LvH8)L4?8raIvasEhkpQoJ`%!5rBs!0Tu(s_D{`4opB;57)pkX z4$A^8CsD3U5*!|bHIEqsn~{q+Ddj$ME@Gq4JXtgVz&7l{Ok!@?EA{B3P~NAqb9)4? zkQo30A^EbHfQ@87G5&EQTd`frrwL)&Yw?%-W@uy^Gn23%j?Y!Iea2xw<-f;esq zf%w5WN@E1}zyXtYv}}`U^B>W`>XPmdLj%4{P298|SisrE;7HvXX;A}Ffi8B#3Lr;1 zHt6zVb`8{#+e$*k?w8|O{Uh|&AG}|DG1PFo1i?Y*cQm$ZwtGcVgMwtBUDa{~L1KT-{jET4w60>{KZ27vXrHJ;fW{6| z=|Y4!&UX020wU1>1iRgB@Q#m~1^Z^9CG1LqDhYBrnx%IEdIty z!46iOoKlKs)c}newDG)rWUikD%j`)p z_w9Ph&e40=(2eBy;T!}*1p1f1SAUDP9iWy^u^Ubdj21Kn{46;GR+hwLO=4D11@c~V zI8x&(D({K~Df2E)Nx_yQvYfh4;MbMJ@Z}=Dt3_>iim~QZ*hZIlEs0mEb z_54+&*?wMD`2#vsQRN3KvoT>hWofI_Vf(^C1ff-Ike@h@saEf7g}<9T`W;HAne-Nd z>RR+&SP35w)xKn8^U$7))PsM!jKwYZ*RzEcG-OlTrX3}9a{q%#Un5E5W{{hp>w~;` zGky+3(vJvQyGwBo`tCpmo0mo((?nM8vf9aXrrY1Ve}~TuVkB(zeds^jEfI}xGBCM2 zL1|#tycSaWCurP+0MiActG3LCas@_@tao@(R1ANlwB$4K53egNE_;!&(%@Qo$>h`^1S_!hN6 z)vZtG$8fN!|BXBJ=SI>e(LAU(y(i*PHvgQ2llulxS8>qsimv7yL}0q_E5WiAz7)(f zC(ahFvG8&HN9+6^jGyLHM~$)7auppeWh_^zKk&C_MQ~8;N??OlyH~azgz5fe^>~7F zl3HnPN3z-kN)I$4@`CLCMQx3sG~V8hPS^}XDXZrQA>}mQPw%7&!sd(Pp^P=tgp-s^ zjl}1-KRPNWXgV_K^HkP__SR`S-|OF0bR-N5>I%ODj&1JUeAQ3$9i;B~$S6}*^tK?= z**%aCiH7y?xdY?{LgVP}S0HOh%0%LI$wRx;$T|~Y8R)Vdwa}kGWv8?SJVm^>r6+%I z#lj1aR94{@MP;t-scEYQWc#xFA30^}?|BeX*W#9OL;Q9#WqaaM546j5j29((^_8Nu z4uq}ESLr~r*O7E7$D{!k9W>`!SLoyA53i9QwRB{!pHe8um|aDE`Cg0O*{jmor)^t)3`>V>SWN-2VJcFmj^1?~tT=JrP`fVh*t zXHarp=8HEcR#vFe+1a%XXuK+)oFs`GDD}#Z+TJ}Ri`FvKO@ek2ayn}yaOi%(8p%2$ zpEu)v0Jym@f}U|-;}CbR=9{#<^z28PzkkTNvyKvJDZe+^VS2bES3N@Jq!-*}{oQlz z@8bgC_KnDnT4}d#&Cpr!%Yb?E!brx0!eVOw~;lLwUoz#Np%d$o%9scc3&zPm`%G((Le|6o1 zM(VhOw)!f84zG^)tZ1?Egv)d8cdNi+T${=5kV+j;Wf%2{3g@FHp^Gf*qO0q!u$=m9 zCaY`4mRqJ;FTH5`a$affE5dJrk~k`HTP_7nGTY@B9o9vvnbytaID;^b=Tzp7Q#DmD zC(XEN)Ktn39z5|G!wsVNnHi) z%^q94!lL|hF`IijA^9NR0F$@h7k5R^ljOW(;Td9grRN0Mb)l_l7##{2nPQ@?;VjXv zaLZG}yuf$r$<79rVPpXg?6iiieX|r#&`p#Con2i%S8*8F}(E) zI5E6c3tG*<;m~6>!&H!GJ6zEuhH7mkAzovdhLy;)q z{H2*8I^Pb}xC4s^6Y}6bJvMu=8>g&I)7!N!5QG$xseeU#CC?ZM-TbjsHwHgDGrsD= z{%f;@Sod+Ch66Ko2WF~;Ty)v>&x^aovCbCbD7>qF*!?BXmOV3(s|nxsb*Lx_2lpB7 zokUnzrk;P=T-&kUHO}td+Zdj!3n&NR?K~cRU zAXU!DCp?51{J4w^`cV#ye}(`SQhGQkkMu}O3M*BWt4UsC^jCFUy;wTINYmhD$AT;4 z?Xd{HaJjP`raZ39qAm;%beDbrLpbRf(mkKbANan7XsL>_pE2oo^$TgdidjRP!5-`% zv0d!|iKN$c0(T|L0C~XD0aS8t{*&#LnhE;1Kb<9&=c2B+9JeLvJr*AyyRh%@jHej=AetOMSlz^=!kxX>>B{2B1uIrQyfd8KjJ+DBy!h)~*(!|&L4^Q_07SQ~E zcemVP`{9CwFvPFu7pyVGCLhH?LhEVb2{7U+Z_>o25#+3<|8%1T^5dh}*4(kfJGry} zm%r#hU+__Z;;*4fMrX=Bkc@7|v^*B;HAl0((IBPPii%X9+u3DDF6%bI&6?Eu$8&aWVqHIM7mK6?Uvq$1|(-T|)IV<>e?!(rY zqkmO1MRaLeTR=)io(0GVtQT@s6rN%C6;nS3@eu;P#ry4q;^O@1ZKCJyp_Jo)Ty^QW z+vweTx_DLm{P-XSBj~Sl<%_b^$=}odJ!S2wAcxenmzFGX1t&Qp8Vxz2VT`uQsQYtdn&_0xVivIcxZ_hnrRtwq4cZSj1c-SG9 z7vHBCA=fd0O1<4*=lu$6pn~_pVKyL@ztw1swbZi0B?spLo56ZKu5;7ZeUml1Ws1?u zqMf1p{5myAzeX$lAi{jIUqo1g4!zWLMm9cfWcnw`k6*BR^?$2(&yW?>w;G$EmTA@a z6?y#K$C~ZT8+v{87n5Dm&H6Pb_EQ@V0IWmG9cG=O;(;5aMWWrIPzz4Q`mhK;qQp~a z+BbQrEQ+w{SeiuG-~Po5f=^EvlouB@_|4xQXH@A~KgpFHrwu%dwuCR)=B&C(y6J4J zvoGk9;lLs9%iA-IJGU#RgnZZR+@{5lYl8(e1h6&>Vc_mvg0d@);X zji4T|n#lB!>pfL|8tQYkw?U2bD`W{na&;*|znjmalA&f;*U++_aBYerq;&C8Kw7mI z7tsG*?7*5j&dU)Lje;^{D_h`%(dK|pB*A*1(Jj)w^mZ9HB|vGLkF1GEFhu&rH=r=8 zMxO42e{Si6$m+Zj`_mXb&w5Q(i|Yxyg?juUrY}78uo@~3v84|8dfgbPd0iQJRdMj< zncCNGdMEcsxu#o#B5+XD{tsg*;j-eF8`mp~K8O1J!Z0+>0=7O=4M}E?)H)ENE;P*F z$Ox?ril_^p0g7xhDUf(q652l|562VFlC8^r8?lQv;TMvn+*8I}&+hIQYh2 z1}uQQaag&!-+DZ@|C+C$bN6W;S-Z@)d1|en+XGvjbOxCa-qAF*LA=6s(Jg+g;82f$ z(Vb)8I)AH@cdjGFAR5Rqd0wiNCu!xtqWbcTx&5kslzTb^7A78~Xzw1($UV6S^VWiP zFd{Rimd-0CZC_Bu(WxBFW7+k{cOW7DxBBkJdJ;VsJ4Z@lERQr%3eVv&$%)b%<~ zCl^Y4NgO}js@u{|o~KTgH}>!* z_iDNqX2(As7T0xivMH|3SC1ivm8Q}6Ffcd7owUKN5lHAtzMM4<0v+ykUT!QiowO;`@%JGv+K$bBx@*S7C8GJVqQ_K>12}M`f_Ys=S zKFh}HM9#6Izb$Y{wYzItTy+l5U2oL%boCJn?R3?jP@n$zSIwlmyGq30Cw4QBO|14` zW5c);AN*J3&eMFAk$SR~2k|&+&Bc$e>s%c{`?d~85S-UWjA>DS5+;UKZ}5oVa5O(N zqqc@>)nee)+4MUjH?FGv%hm2{IlIF-QX}ym-7ok4Z9{V+ZHVZQl$A*x!(q%<2~iVv znUa+BX35&lCb#9VE-~Y^W_f;Xhl%vgjwdjzMy$FsSIj&ok}L+X`4>J=9BkN&nu^E*gbhj3(+D>C4E z@Fwq_=N)^bKFSHTzZk?-gNU$@l}r}dwGyh_fNi=9b|n}J>&;G!lzilbWF4B}BBq4f zYIOl?b)PSh#XTPp4IS5ZR_2C!E)Z`zH0OW%4;&~z7UAyA-X|sh9@~>cQW^COA9hV4 zXcA6qUo9P{bW1_2`eo6%hgbN%(G-F1xTvq!sc?4wN6Q4`e9Hku zFwvlAcRY?6h^Fj$R8zCNEDq8`=uZB8D-xn)tA<^bFFy}4$vA}Xq0jAsv1&5!h!yRA zU()KLJya5MQ`q&LKdH#fwq&(bNFS{sKlEh_{N%{XCGO+po#(+WCLmKW6&5iOHny>g z3*VFN?mx!16V5{zyuMWDVP8U*|BGT$(%IO|)?EF|OI*sq&RovH!N%=>i_c?K*A>>k zyg1+~++zY4Q)J;VWN0axhoIKx;l&G$gvj(#go^pZskEVj8^}is3Jw26LzYYVos0HX zRPvmK$dVxM8(Tc?pHFe0Z3uq){{#OK3i-ra#@+;*=ui8)y6hsRv z4Fxx1c1+fr!VI{L3DFMwXKrfl#Q8hfP@ajgEau&QMCxd{g#!T^;ATXW)nUg&$-n25 zruy3V!!;{?OTobo|0GAxe`Acn3GV@W=&n;~&9 zQM>NWW~R@OYORkJAo+eq1!4vzmf9K%plR4(tB@TR&FSbDoRgJ8qVcH#;7lQub*nq&?Z>7WM=oeEVjkaG zT#f)=o!M2DO5hLR+op>t0CixJCIeXH*+z{-XS|%jx)y(j&}Wo|3!l7{o)HU3m7LYyhv*xF&tq z%IN7N;D4raue&&hm0xM=`qv`+TK@;_xAcGKuK(2|75~ar2Yw)geNLSmVxV@x89bQu zpViVKKnlkwjS&&c|-X6`~xdnh}Ps)Hs z4VbUL^{XNLf7_|Oi>tA%?SG5zax}esF*FH3d(JH^Gvr7Rp*n=t7frH!U;!y1gJB^i zY_M$KL_}mW&XKaDEi9K-wZR|q*L32&m+2n_8lq$xRznJ7p8}V>w+d@?uB!eS3#u<} zIaqi!b!w}a2;_BfUUhGMy#4dPx>)_>yZ`ai?Rk`}d0>~ce-PfY-b?Csd(28yX22L% zI7XI>OjIHYTk_@Xk;Gu^F52^Gn6E1&+?4MxDS2G_#PQ&yXPXP^<-p|2nLTb@AAQEY zI*UQ9Pmm{Kat}wuazpjSyXCdnrD&|C1c5DIb1TnzF}f4KIV6D)CJ!?&l&{T)e4U%3HTSYqsQ zo@zWB1o}ceQSV)<4G<)jM|@@YpL+XHuWsr5AYh^Q{K=wSV99D~4RRU52FufmMBMmd z_H}L#qe(}|I9ZyPRD6kT>Ivj&2Y?qVZq<4bG_co_DP`sE*_Xw8D;+7QR$Uq(rr+u> z8bHUWbV19i#)@@G4bCco@Xb<8u~wVDz9S`#k@ciJtlu@uP1U0X?yov8v9U3VOig2t zL9?n$P3=1U_Emi$#slR>N5wH-=J&T=EdUHA}_Z zZIl3nvMP*AZS9{cDqFanrA~S5BqxtNm9tlu;^`)3X&V4tMAkJ4gEIPl= zoV!Gyx0N{3DpD@)pv^iS*dl2FwANu;1;%EDl}JQ7MbxLMAp>)UwNwe{=V}O-5C*>F zu?Ny+F64jZn<+fKjF01}8h5H_3pey|;%bI;SFg$w8;IC<8l|3#Lz2;mNNik6sVTG3 z+Su^rIE#40C4a-587$U~%KedEEw1%r6wdvoMwpmlXH$xPnNQN#f%Z7|p)nC>WsuO= z4zyqapLS<8(UJ~Qi9d|dQijb_xhA2)v>la)<1md5s^R1N&PiuA$^k|A<+2C?OiHbj z>Bn$~t)>Y(Zb`8hW7q9xQ=s>Rv81V+UiuZJc<23HplI88isqRCId89fb`Kt|CxVIg znWcwprwXnotO>3s&Oypkte^9yJjlUVVxSe%_xlzmje|mYOVPH^vjA=?6xd0vaj0Oz zwJ4OJNiFdnHJX3rw&inskjryukl`*fRQ#SMod5J|KroJRsVXa5_$q7whSQ{gOi*s0 z1LeCy|JBWRsDPn7jCb4s(p|JZiZ8+*ExC@Vj)MF|*Vp{B(ziccSn`G1Br9bV(v!C2 z6#?eqpJBc9o@lJ#^p-`-=`4i&wFe>2)nlPK1p9yPFzJCzBQbpkcR>={YtamIw)3nt z(QEF;+)4`>8^_LU)_Q3 zC5_7lgi_6y>U%m)m@}Ku4C}=l^J=<<7c;99ec3p{aR+v=diuJR7uZi%aQv$oP?dn?@6Yu_+*^>T0ptf(oobdL;6)N-I!TO`zg^Xbv3#L0I~sn@WGk-^SmPh5>W+LB<+1PU}AKa?FCWF|qMNELOgdxR{ zbqE7@jVe+FklzdcD$!(A$&}}H*HQFTJ+AOrJYnhh}Yvta(B zQ_bW4Rr;R~&6PAKwgLWXS{Bnln(vUI+~g#kl{r+_zbngT`Y3`^Qf=!PxN4IYX#iW4 zucW7@LLJA9Zh3(rj~&SyN_pjO8H&)|(v%!BnMWySBJV=eSkB3YSTCyIeJ{i;(oc%_hk{$_l;v>nWSB)oVeg+blh=HB5JSlG_r7@P z3q;aFoZjD_qS@zygYqCn=;Zxjo!?NK!%J$ z52lOP`8G3feEj+HTp@Tnn9X~nG=;tS+z}u{mQX_J0kxtr)O30YD%oo)L@wy`jpQYM z@M>Me=95k1p*FW~rHiV1CIfVc{K8r|#Kt(ApkXKsDG$_>76UGNhHExFCw#Ky9*B-z zNq2ga*xax!HMf_|Vp-86r{;~YgQKqu7%szk8$hpvi_2I`OVbG1doP(`gn}=W<8%Gn z%81#&WjkH4GV;4u43EtSW>K_Ta3Zj!XF?;SO3V#q=<=>Tc^@?A`i;&`-cYj|;^ zEo#Jl5zSr~_V-4}y8pnufXLa80vZY4z2ko7fj>DR)#z=wWuS1$$W!L?(y}YC+yQ|G z@L&`2upy3f>~*IquAjkVNU>}c10(fq#HdbK$~Q3l6|=@-eBbo>B9(6xV`*)sae58*f zym~RRVx;xoCG3`JV`xo z!lFw)=t2Hy)e!IFs?0~7osWk(d%^wxq&>_XD4+U#y&-VF%4z?XH^i4w`TxpF{`XhZ z%G}iEzf!T(l>g;W9<~K+)$g!{UvhW{E0Lis(S^%I8OF&%kr!gJ&fMOpM=&=Aj@wuL zBX?*6i51Qb$uhkwkFYkaD_UDE+)rh1c;(&Y=B$3)J&iJfQSx!1NGgPtK!$c9OtJuu zX(pV$bfuJpRR|K(dp@^j}i&HeJOh@|7lWo8^$*o~Xqo z5Sb+!EtJ&e@6F+h&+_1ETbg7LfP5GZjvIUIN3ibCOldAv z)>YdO|NH$x7AC8dr=<2ekiY1%fN*r~e5h6Yaw<{XIErujKV~tiyrvV_DV0AzEknC- zR^xKM3i<1UkvqBj3C{wDvytOd+YtDSGu!gEMg+!&|8BQrT*|p)(dwQLEy+ zMtMzij3zo40)CA!BKZF~yWg?#lWhqD3@qR)gh~D{uZaJO;{OWV8XZ_)J@r3=)T|kt zUS1pXr6-`!Z}w2QR7nP%d?ecf90;K_7C3d!UZ`N(TZoWNN^Q~RjVhQG{Y<%E1PpV^4 z-m-K+$A~-+VDABs^Q@U*)YvhY4Znn2^w>732H?NRK(5QSS$V@D7yz2BVX4)f5A04~$WbxGOam22>t&uD)JB8-~yiQW6ik;FGblY_I>SvB_z2?PS z*Qm&qbKI{H1V@YGWzpx`!v)WeLT02};JJo*#f$a*FH?IIad-^(;9XC#YTWN6;Z6+S zm4O1KH=#V@FJw7Pha0!9Vb%ZIM$)a`VRMoiN&C|$YA3~ZC*8ayZRY^fyuP6$n%2IU z$#XceYZeqLTXw(m$_z|33I$B4k~NZO>pP6)H_}R{E$i%USGy{l{-jOE;%CloYPEU+ zRFxOn4;7lIOh!7abb23YKD+_-?O z0FP9otcAh+oSj;=f#$&*ExUHpd&e#bSF%#8*&ItcL2H$Sa)?pt0Xtf+t)z$_u^wZi z44oE}r4kIZGy3!Mc8q$B&6JqtnHZ>Znn!Zh@6rgIu|yU+zG8q`q9%B18|T|oN3zMq z`l&D;U!OL~%>vo&q0>Y==~zLiCZk4v%s_7!9DxQ~id1LLE93gf*gg&2$|hB#j8;?3 z5v4S;oM6rT{Y;I+#FdmNw z){d%tNM<<#GN%n9ox7B=3#;u7unZ~tLB_vRZ52a&2=IM)2VkXm=L+Iqq~uk#Dug|x z>S84e+A7EiOY5lj*!q?6HDkNh~0g;0Jy(al!ZHHDtur9T$y-~)94HelX1NHjXWIM7UAe}$?jiz z9?P4`I0JM=G5K{3_%2jPLC^_Mlw?-kYYgb7`qGa3@dn|^1fRMwiyM@Ch z;CB&o7&&?c5e>h`IM;Wnha0QKnEp=$hA8TJgR-07N~U5(>9vJzeoFsSRBkDq=x(YgEMpb=l4TDD`2 zwVJpWGTA_u7}?ecW7s6%rUs&NXD3+n;jB86`X?8(l3MBo6)PdakI6V6a}22{)8ilT zM~T*mU}__xSy|6XSrJ^%lDAR3Lft%+yxC|ZUvSO_nqMX!_ul3;R#*{~4DA=h$bP)%8Yv9X zyp><|e8=_ttI}ZAwOd#dlnSjck#6%273{E$kJuCGu=I@O)&6ID{nWF5@gLb16sj|&Sb~+du4e4O_%_o`Ix4NRrAsyr1_}MuP94s>de8cH-OUkVPk3+K z&jW)It9QiU-ti~AuJkL`XMca8Oh4$SyJ=`-5WU<{cIh+XVH#e4d&zive_UHC!pN>W z3TB;Mn5i)9Qn)#6@lo4QpI3jFYc0~+jS)4AFz8fVC;lD^+idw^S~Qhq>Tg(!3$yLD zzktzoFrU@6s4wwCMz}edpF5i5Q1IMmEJQHzp(LAt)pgN3&O!&d?3W@6U4)I^2V{;- z6A(?zd93hS*uQmnh4T)nHnE{wVhh(=MMD(h(P4+^p83Om6t<*cUW>l(qJzr%5vp@K zN27ka(L{JX=1~e2^)F^i=TYj&;<7jyUUR2Bek^A8+3Up*&Xwc{)1nRR5CT8vG>ExV zHnF3UqXJOAno_?bnhCX-&kwI~Ti8t4`n0%Up>!U`ZvK^w2+0Cs-b9%w%4`$+To|k= zKtgc&l}P`*8IS>8DOe?EB84^kx4BQp3<7P{Pq}&p%xF_81pg!l2|u=&I{AuUgmF5n zJQCTLv}%}xbFGYtKfbba{CBo)lWW%Z>i(_NvLhoQZ*5-@2l&x>e+I~0Nld3UI9tdL zRzu8}i;X!h8LHVvN?C+|M81e>Jr38%&*9LYQec9Ax>?NN+9(_>XSRv&6hlCYB`>Qm z1&ygi{Y()OU4@D_jd_-7vDILR{>o|7-k)Sjdxkjgvi{@S>6GqiF|o`*Otr;P)kLHN zZkpts;0zw_6;?f(@4S1FN=m!4^mv~W+lJA`&7RH%2$)49z0A+8@0BCHtj|yH--AEL z0tW6G%X-+J+5a{5*WKaM0QDznf;V?L5&uQw+yegDNDP`hA;0XPYc6e0;Xv6|i|^F2WB)Z$LR|HR4 zTQsRAby9(^Z@yATyOgcfQw7cKyr^3Tz7lc7+JEwwzA7)|2x+PtEb>nD(tpxJQm)Kn zW9K_*r!L%~N*vS8<5T=iv|o!zTe9k_2jC_j*7ik^M_ zaf%k{WX{-;0*`t`G!&`eW;gChVXnJ-Rn)To8vW-?>>a%QU1v`ZC=U)f8iA@%JG0mZ zDqH;~mgBnrCP~1II<=V9;EBL)J+xzCoiRBaeH&J6rL!{4zIY8tZka?_FBeQeNO3q6 zyG_alW54Ba&wQf{&F1v-r1R6ID)PTsqjIBc+5MHkcW5Fnvi~{-FjKe)t1bl}Y;z@< z=!%zvpRua>>t_x}^}z0<7MI!H2v6|XAyR9!t50q-A)xk0nflgF4*OQlCGK==4S|wc zRMsSscNhRzHMBU8TdcHN!q^I}x0iXJ%uehac|Zs_B$p@CnF)HeXPpB_Za}F{<@6-4 zl%kml@}kHQ(ypD8FsPJ2=14xXJE|b20RUIgs!2|R3>LUMGF6X*B_I|$`Qg=;zm7C z{mEDy9dTmPbued7mlO@phdmAmJ7p@GR1bjCkMw6*G7#4+`k>fk1czdJUB!e@Q(~6# zwo%@p@V5RL0ABU2LH7Asq^quDUho@H>eTZH9f*no9fY0T zD_-9px3e}A!>>kv5wk91%C9R1J_Nh!*&Kk$J3KNxC}c_@zlgpJZ+5L)Nw|^p=2ue}CJtm;uj*Iqr)K})kA$xtNUEvX;4!Px*^&9T_`IN{D z{6~QY=Nau6EzpvufB^hflc#XIsSq0Y9(nf$d~6ZwK}fal92)fr%T3=q{0mP-EyP_G z)UR5h@IX}3Qll2b0oCAcBF>b*@Etu*aTLPU<%C>KoOrk=x?pN!#f_Og-w+;xbFgjQ zXp`et%lDBBh~OcFnMKMUoox0YwBNy`N0q~bSPh@+enQ=4RUw1) zpovN`QoV>vZ#5LvC;cl|6jPr}O5tu!Ipoyib8iXqy}TeJ;4+_7r<1kV0v5?Kv>fYp zg>9L`;XwXa&W7-jf|9~uP2iyF5`5AJ`Q~p4eBU$MCC00`rcSF>`&0fbd^_eqR+}mK z4n*PMMa&FOcc)vTUR zlDUAn-mh`ahi_`f`=39JYTNVjsTa_Y3b1GOIi)6dY)D}xeshB0T8Eov5%UhWd1)u}kjEQ|LDo{tqKKrYIfVz~@dp!! zMOnah@vp)%_-jDTUG09l+;{CkDCH|Q{NqX*uHa1YxFShy*1+;J`gywKaz|2Q{lG8x zP?KBur`}r`!WLKXY_K;C8$EWG>jY3UIh{+BLv0=2)KH%P}6xE2kg)%(-uA6lC?u8}{K(#P*c zE9C8t*u%j2r_{;Rpe1A{9nNXU;b_N0vNgyK!EZVut~}+R2rcbsHilqsOviYh-pYX= zHw@53nlmwYI5W5KP>&`dBZe0Jn?nAdC^HY1wlR6$u^PbpB#AS&5L6zqrXN&7*N2Q` z+Rae1EwS)H=aVSIkr8Ek^1jy2iS2o7mqm~Mr&g5=jjt7VxwglQ^`h#Mx+x2v|9ZAwE$i_9918MjJxTMr?n!bZ6n$}y11u8I9COTU`Z$Fi z!AeAQLMw^gp_{+0QTEJrhL424pVDp%wpku~XRlD3iv{vQ!lAf!_jyqd_h}+Tr1XG| z`*FT*NbPqvHCUsYAkFnM`@l4u_QH&bszpUK#M~XLJt{%?00GXY?u_{gj3Hvs!=N(I z(=AuWPijyoU!r?aFTsa8pLB&cx}$*%;K$e*XqF{~*rA-qn)h^!(-;e}O#B$|S~c+U zN4vyOK0vmtx$5K!?g*+J@G1NmlEI=pyZXZ69tAv=@`t%ag_Hk{LP~OH9iE)I= zaJ69b4kuCkV0V zo(M0#>phpQ_)@j;h%m{-a*LGi(72TP)ws2w*@4|C-3+;=5DmC4s7Lp95%n%@Ko zfdr3-a7m*dys9iIci$A=4NPJ`HfJ;hujLgU)ZRuJI`n;Pw|yksu!#LQnJ#dJysgNb z@@qwR^wrk(jbq4H?d!lNyy72~Dnn87KxsgQ!)|*m(DRM+eC$wh7KnS-mho3|KE)7h zK3k;qZ;K1Lj6uEXLYUYi)1FN}F@-xJ z@@3Hb84sl|j{4$3J}aTY@cbX@pzB_qM~APljrjju6P0tY{C@ zpUCOz_NFmALMv1*blCcwUD3?U6tYs+N%cmJ98D%3)%)Xu^uvzF zS5O!sc#X6?EwsYkvPo6A%O8&y8sCCQH<%f2togVwW&{M;PR!a(ZT_A+jVAbf{@5kL zB@Z(hb$3U{T_}SKA_CoQVU-;j>2J=L#lZ~aQCFg-d<9rzs$_gO&d5N6eFSc z1ml8)P*FSi+k@!^M9nDWR5e@ATD8oxtDu=36Iv2!;dZzidIS(PCtEuXAtlBb1;H%Z zwnC^Ek*D)EX4#Q>R$$WA2sxC_t(!!6Tr?C#@{3}n{<^o;9id1RA&-Pig1e-2B1XpG zliNjgmd3c&%A}s>qf{_j#!Z`fu0xIwm4L0)OF=u(OEmp;bLCIaZX$&J_^Z%4Sq4GZ zPn6sV_#+6pJmDN_lx@1;Zw6Md_p0w9h6mHtzpuIEwNn>OnuRSC2=>fP^Hqgc)xu^4 z<3!s`cORHJh#?!nKI`Et7{3C27+EuH)Gw1f)aoP|B3y?fuVfvpYYmmukx0ya-)TQX zR{ggy5cNf4X|g)nl#jC9p>7|09_S7>1D2GTRBUTW zAkQ=JMRogZqG#v;^=11O6@rPPwvJkr{bW-Qg8`q8GoD#K`&Y+S#%&B>SGRL>;ZunM@49!}Uy zN|bBCJ%sO;@3wl0>0gbl3L@1^O60ONObz8ZI7nder>(udj-jt`;yj^nTQ$L9`OU9W zX4alF#$|GiR47%x@s&LV>2Sz2R6?;2R~5k6V>)nz!o_*1Y!$p>BC5&?hJg_MiE6UBy>RkVZj`9UWbRkN-Hk!S`=BS3t3uyX6)7SF#)71*}`~Ogz z1rap5H6~dhBJ83;q-Y<5V35C2&F^JI-it(=5D#v!fAi9p#UwV~2tZQI+W(Dv?1t9? zfh*xpxxO{-(VGB>!Q&0%^YW_F!@aZS#ucP|YaD#>wd1Fv&Z*SR&mc;asi}1G) z_H>`!akh-Zxq9#io(7%;a$)w+{QH)Y$?UK1Dt^4)up!Szcxnu}kn$0afcfJL#IL+S z5gF_Y30j;{lNrG6m~$Ay?)*V9fZuU@3=kd40=LhazjFrau>(Y>SJNtOz>8x_X-BlA zIpl{i>OarVGj1v(4?^1`R}aQB&WCRQzS~;7R{tDZG=HhgrW@B`W|#cdyj%YBky)P= zpxuOZkW>S6%q7U{VsB#G(^FMsH5QuGXhb(sY+!-R8Bmv6Sx3WzSW<1MPPN1!&PurYky(@`bP9tz z52}LH9Q?+FF5jR6-;|+GVdRA!qtd;}*-h&iIw3Tq3qF9sDIb1FFxGbo&fbG5n8$3F zyY&PWL{ys^dTO}oZ#@sIX^BKW*bon=;te9j5k+T%wJ zNJtoN1~YVj4~YRrlZl)b&kJqp+Z`DqT!la$x&&IxgOQw#yZd-nBP3!7FijBXD|IsU8Zl^ zc6?MKpJQ+7ka|tZQLfchD$PD|;K(9FiLE|eUZX#EZxhG!S-63C$jWX1Yd!6-Yxi-u zjULIr|0-Q%D9jz}IF~S%>0(jOqZ(Ln<$9PxiySr&2Oic7vb<8q=46)Ln%Z|<*z5&> z3f~Zw@m;vR(bESB<=Jqkxn(=#hQw42l(7)h`vMQQTttz9XW6^|^8EK7qhju4r_c*b zJIi`)MB$w@9epwdIfnEBR+?~);yd6C(LeMC& zn&&N*?-g&BBJcV;8&UoZi4Lmxcj16ojlxR~zMrf=O_^i1wGb9X-0@6_rpjPYemIin zmJb+;lHe;Yp=8G)Q(L1bzH*}I>}uAqhj4;g)PlvD9_e_ScR{Ipq|$8NvAvLD8MYr}xl=bU~)f%B3E>r3Bu9_t|ThF3C5~BdOve zEbk^r&r#PT&?^V1cb{72yEWH}TXEE}w>t!cY~rA+hNOTK8FAtIEoszp!qqptS&;r$ zaYV-NX96-h$6aR@1xz6_E0^N49mU)-v#bwtGJm)ibygzJ8!7|WIrcb`$XH~^!a#s& z{Db-0IOTFq#9!^j!n_F}#Z_nX{YzBK8XLPVmc&X`fT7!@$U-@2KM9soGbmOSAmqV z{nr$L^MBo_u^Joyf0E^=eo{Rt0{{e$IFA(#*kP@SQd6lWT2-#>` zP1)7_@IO!9lk>Zt?#CU?cuhiLF&)+XEM9B)cS(gvQT!X3`wL*{fArTS;Ak`J<84du zALKPz4}3nlG8Fo^MH0L|oK2-4xIY!~Oux~1sw!+It)&D3p;+N8AgqKI`ld6v71wy8I!eP0o~=RVcFQR2Gr(eP_JbSytoQ$Yt}l*4r@A8Me94y z8cTDWhqlq^qoAhbOzGBXv^Wa4vUz$(7B!mX`T=x_ueKRRDfg&Uc-e1+z4x$jyW_Pm zp?U;-R#xt^Z8Ev~`m`iL4*c#65Nn)q#=Y0l1AuD&+{|8-Gsij3LUZXpM0Bx0u7WWm zH|%yE@-#XEph2}-$-thl+S;__ciBxSSzHveP%~v}5I%u!z_l_KoW{KRx2=eB33umE zIYFtu^5=wGU`Jab8#}cnYry@9p5UE#U|VVvx_4l49JQ;jQdp(uw=$^A$EA$LM%vmE zvdEOaIcp5qX8wX{mYf0;#51~imYYPn4=k&#DsKTxo{_Mg*;S495?OBY?#gv=edYC* z^O@-sd-qa+U24xvcbL0@C7_6o!$`)sVr-jSJE4XQUQ$?L7}2(}Eixqv;L8AdJAVqc zq}RPgpnDb@E_;?6K58r3h4-!4rT4Ab#rLHLX?eMOfluJk=3i1@Gt1i#iA=O`M0@x! z(HtJP9BMHXEzuD93m|B&woj0g6T?f#^)>J>|I4C5?Gam>n9!8CT%~aT;=oco5d6U8 zMXl(=W;$ND_8+DD*?|5bJ!;8ebESXMUKBAf7YBwNVJibGaJ*(2G`F%wx)grqVPjudiaq^Kl&g$8A2 zWMxMr@_$c}d+;_B`#kUX-t|4VKH&_f^^EP0&=DPLW)H)UzBG%%Tra*5 z%$kyZe3I&S#gfie^z5)!twG={3Cuh)FdeA!Kj<-9** zvT*5%Tb`|QbE!iW-XcOuy39>D3oe6x{>&<#E$o8Ac|j)wq#kQzz|ATd=Z0K!p2$QE zPu?jL8Lb^y3_CQE{*}sTDe!2!dtlFjq&YLY@2#4>XS`}v#PLrpvc4*@q^O{mmnr5D zmyJq~t?8>FWU5vZdE(%4cuZuao0GNjp3~Dt*SLaxI#g_u>hu@k&9Ho*#CZP~lFJHj z(e!SYlLigyc?&5-YxlE{uuk$9b&l6d`uIlpg_z15dPo*iU&|Khx2*A5Fp;8iK_bdP z?T6|^7@lcx2j0T@x>X7|kuuBSB7<^zeY~R~4McconTxA2flHC0_jFxmSTv-~?zVT| zG_|yDqa9lkF*B6_{j=T>=M8r<0s;@z#h)3BQ4NLl@`Xr__o7;~M&dL3J8fP&zLfDfy z);ckcTev{@OUlZ`bCo(-3? z1u1xD`PKgSg?RqeVVsF<1SLF;XYA@Bsa&cY!I48ZJn1V<3d!?s=St?TLo zC0cNr`qD*M#s6f~X>SCNVkva^9A2ZP>CoJ9bvgXe_c}WdX-)pHM5m7O zrHt#g$F0AO+nGA;7dSJ?)|Mo~cf{z2L)Rz!`fpi73Zv)H=a5K)*$5sf_IZypi($P5 zsPwUc4~P-J1@^3C6-r9{V-u0Z&Sl7vNfmuMY4yy*cL>_)BmQF!8Om9Dej%cHxbIzA zhtV0d{=%cr?;bpBPjt@4w=#<>k5ee=TiWAXM2~tUGfm z$s&!Dm0R^V$}fOR*B^kGaipi~rx~A2cS0;t&khV1a4u38*XRUP~f za!rZMtay8bsLt6yFYl@>-y^31(*P!L^^s@mslZy(SMsv9bVoX`O#yBgEcjCmGpyc* zeH$Dw6vB5P*;jor+JOX@;6K#+xc)Z9B8M=x2a@Wx-{snPGpRmOC$zpsqW*JCh@M2Y z#K+M(>=#d^>Of9C`))h<=Bsy)6zaMJ&x-t%&+UcpLjV`jo4R2025 zXaG8EA!0lQa)|dx-@{O)qP6`$rhCkoQqZ`^SW8g-kOwrwsK8 z3ms*AIcyj}-1x&A&vSq{r=QMyp3CHdWH35!sad#!Sm>^|-|afB+Q;|Iq@LFgqIp#Z zD1%H+3I?6RGnk&IFo|u+E0dCxXz4yI^1i!QTu7uvIEH>i3rR{srcST`LIRwdV1P;W z+%AN1NIf@xxvVLiSX`8ILA8MzNqE&7>%jMzGt9wm78bo9<;h*W84i29^w!>V>{N+S zd`5Zmz^G;f=icvoOZfK5#1ctx*~UwD=ab4DGQXehQ!XYnak*dee%YN$_ZPL%KZuz$ zD;$PpT;HM^$KwtQm@7uvT`i6>Hae1CoRVM2)NL<2-k2PiX=eAx+-6j#JI?M}(tuBW zkF%jjLR)O`gI2fcPBxF^HeI|DWwQWHVR!;;{BXXHskxh8F@BMDn`oEi-NHt;CLymW z=KSv5)3dyzec0T5B*`g-MQ<;gz=nIWKUi9ko<|4I(-E0k$QncH>E4l z**1w&#={&zv4Tvhgz#c29`m|;lU-jmaXFMC11 z*dlXDMEOG>VoLMc>!rApwOu2prKSi*!w%`yzGmS+k(zm*CsLK*wv{S_0WX^8A-rKy zbk^Gf_92^7iB_uUF)EE+ET4d|X|>d&mdN?x@vxKAQk`O+r4Qdu>XGy(a(19g;=jU} zFX{O*_NG>!$@jh!U369Lnc+D~qch3uT+_Amyi}*k#LAAwh}k8IPK5a-WZ81ufD>l> z$4cF}GSz>ce`3FAic}6W4Z7m9KGO?(eWqi@L|5Hq0@L|&2flN1PVl}XgQ2q*_n2s3 zt5KtowNkTYB5b;SVuoXA@i5irXO)A&%7?V`1@HGCB&)Wgk+l|^XXChq;u(nyPB}b3 zY>m5jkxpZgi)zfbgv&ec4Zqdvm+D<?Im*mXweS9H+V>)zF#Zp3)bhl$PbISY{5=_z!8&*Jv~NYtI-g!>fDs zmvL5O^U%!^VaKA9gvKw|5?-jk>~%CVGvctKmP$kpnpfN{D8@X*Aazi$txfa%vd-|E z>kYmV66W!lNekJPom29LdZ%(I+ZLZYTXzTg*to~m?7vp%{V<~>H+2}PQ?PPAq`36R z<%wR8v6UkS>Wt#hzGk#44W<%9S=nBfB);6clKwnxY}T*w21Qc3_?IJ@4gYzC7s;WP zVQNI(M=S=JT#xsZy7G`cR(BP9*je0bfeN8JN5~zY(DDs0t{LpHOIbN);?T-69Pf3R zSNe*&p2%AwXHL>__g+xd4Hlc_vu<25H?(`nafS%)3UPP7_4;gk-9ckt8SJRTv5v0M z_Hww`qPudL?ajIR&X*;$y-`<)6dxx1U~5eGS13CB!lX;3w7n&lDDiArbAhSycd}+b zya_3p@A`$kQy;|NJZ~s44Hqo7Hwt}X86NK=(ey>lgWTtGL6k@Gy;PbO!M%1~Wcn2k zUFP|*5d>t-X*RU8g%>|(wwj*~#l4z^Aatf^DWd1Wj#Q*AY0D^V@sC`M zjJc6qXu0I7Y*2;;gGu!plAFzG=J;1%eIOdn zQA>J&e05UN*7I5@yRhK|lbBSfJ+5Uq;!&HV@xfPZrgD}kE*1DSq^=%{o%|LChhl#0 zlMb<^a6ixzpd{kNZr|3jTGeEzuo}-eLT-)Q$#b{!vKx8Tg}swCni>{#%vDY$Ww$84 zew3c9BBovqb}_&BRo#^!G(1Eg((BScRZ}C)Oz?y`T5wOrv);)b^4XR8 zhJo7+<^7)qB>I;46!GySzdneZ>n_E1oWZY;kf94#)s)kWjuJN1c+wbVoNQcmnv}{> zN0pF+Sl3E}UQ$}slSZeLJrwT>Sr}#V(dVaezCQl2|4LN`7L7v&siYR|r7M(*JYfR$ zst3=YaDw$FSc{g}KHO&QiKxuhEzF{f%RJLKe3p*7=oo`WNP)M(9X1zIQPP0XHhY3c znrP{$4#Ol$A0s|4S7Gx2L23dv*Gv2o;h((XVn+9+$qvm}s%zi6nI-_s6?mG! zj{DV;qesJb&owKeEK?=J>UcAlYckA7Sl+I&IN=yasrZOkejir*kE@SN`fk<8Fgx*$ zy&fE6?}G)d_N`){P~U@1jRVA|2*69)KSe_}!~?+`Yb{Y=O~_+@!j<&oVQQMnhoIRU zA0CyF1OFfkK44n*JD~!2!SCPM;PRSk%1XL=0&rz00wxPs&-_eapJy#$h!eqY%nS0{ z!aGg58JIJPF3_ci%n)QSVpa2H`vIe$RD43;#IRfDV&Ibit z+?>HW4{2wOfC6Fw)}4x}i1maDxcE1qi@BS*qcxD2gE@h3#4cgU*D-&3z7D|tVZWt= z-Cy2+*Cm@P4GN_TPUtaVyVesbVDazF@)j8VJ4>XZv!f%}&eO1SvIgr}4`A*3#vat< z_MoByL(qW6L7SFZ#|Gc1fFN)L2PxY+{B8tJp+pxRyz*87)vXR}*=&ahXjBlQKguuf zX6x<<6fQulE^C*KH8~W%ptpaC0l?b=_{~*U4?5Vt;dgM4t_{&UZ1C2j?b>b+5}{IF_CUyvz-@QZPMlJ)r_tS$9kH%RPv#2_nMb zRLj5;chJ72*U`Z@Dqt4$@_+k$%|8m(HqLG!qT4P^DdfvGf&){gKnGCX#H0!;W=AGP zbA&Z`-__a)VTS}kKFjWGk z%|>yE?t*EJ!qeQ%dPk$;xIQ+P0;()PCBDgjJm6Buj{f^awNoVx+9<|lg3%-$G(*f) zll6oOkN|yamn1uyl2*N-lnqRI1cvs_JxLTeahEK=THV$Sz*gQhKNb*p0fNoda#-&F zB-qJgW^g}!TtM|0bS2QZekW7_tKu%GcJ!4?lObt0z_$mZ4rbQ0o=^curCs3bJK6sq z9fu-aW-l#>z~ca(B;4yv;2RZ?tGYAU)^)Kz{L|4oPj zdOf_?de|#yS)p2v8-N||+XL=O*%3+y)oI(HbM)Ds?q8~HPzIP(vs*G`iddbWq}! z(2!VjP&{Z1w+%eUq^ '} - case $link in #( - /*) app_path=$link ;; #( - *) app_path=$APP_HOME$link ;; - esac -done - -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -APP_NAME="Gradle" -APP_BASE_NAME=${0##*/} - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' - -# Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD=maximum - -warn () { - echo "$*" -} >&2 - -die () { - echo - echo "$*" - echo - exit 1 -} >&2 - -# OS specific support (must be 'true' or 'false'). -cygwin=false -msys=false -darwin=false -nonstop=false -case "$( uname )" in #( - CYGWIN* ) cygwin=true ;; #( - Darwin* ) darwin=true ;; #( - MSYS* | MINGW* ) msys=true ;; #( - NONSTOP* ) nonstop=true ;; -esac - -CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar - - -# Determine the Java command to use to start the JVM. -if [ -n "$JAVA_HOME" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD=$JAVA_HOME/jre/sh/java - else - JAVACMD=$JAVA_HOME/bin/java - fi - if [ ! -x "$JAVACMD" ] ; then - die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." - fi -else - JAVACMD=java - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." -fi - -# Increase the maximum file descriptors if we can. -if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then - case $MAX_FD in #( - max*) - MAX_FD=$( ulimit -H -n ) || - warn "Could not query maximum file descriptor limit" - esac - case $MAX_FD in #( - '' | soft) :;; #( - *) - ulimit -n "$MAX_FD" || - warn "Could not set maximum file descriptor limit to $MAX_FD" - esac -fi - -# Collect all arguments for the java command, stacking in reverse order: -# * args from the command line -# * the main class name -# * -classpath -# * -D...appname settings -# * --module-path (only if needed) -# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. - -# For Cygwin or MSYS, switch paths to Windows format before running java -if "$cygwin" || "$msys" ; then - APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) - CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) - - JAVACMD=$( cygpath --unix "$JAVACMD" ) - - # Now convert the arguments - kludge to limit ourselves to /bin/sh - for arg do - if - case $arg in #( - -*) false ;; # don't mess with options #( - /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath - [ -e "$t" ] ;; #( - *) false ;; - esac - then - arg=$( cygpath --path --ignore --mixed "$arg" ) - fi - # Roll the args list around exactly as many times as the number of - # args, so each arg winds up back in the position where it started, but - # possibly modified. - # - # NB: a `for` loop captures its iteration list before it begins, so - # changing the positional parameters here affects neither the number of - # iterations, nor the values presented in `arg`. - shift # remove old arg - set -- "$@" "$arg" # push replacement arg - done -fi - -# Collect all arguments for the java command; -# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of -# shell script including quotes and variable substitutions, so put them in -# double quotes to make sure that they get re-expanded; and -# * put everything else in single quotes, so that it's not re-expanded. - -set -- \ - "-Dorg.gradle.appname=$APP_BASE_NAME" \ - -classpath "$CLASSPATH" \ - org.gradle.wrapper.GradleWrapperMain \ - "$@" - -# Stop when "xargs" is not available. -if ! command -v xargs >/dev/null 2>&1 -then - die "xargs is not available" -fi - -# Use "xargs" to parse quoted args. -# -# With -n1 it outputs one arg per line, with the quotes and backslashes removed. -# -# In Bash we could simply go: -# -# readarray ARGS < <( xargs -n1 <<<"$var" ) && -# set -- "${ARGS[@]}" "$@" -# -# but POSIX shell has neither arrays nor command substitution, so instead we -# post-process each arg (as a line of input to sed) to backslash-escape any -# character that might be a shell metacharacter, then use eval to reverse -# that process (while maintaining the separation between arguments), and wrap -# the whole thing up as a single "set" statement. -# -# This will of course break if any of these variables contains a newline or -# an unmatched quote. -# - -eval "set -- $( - printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | - xargs -n1 | - sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | - tr '\n' ' ' - )" '"$@"' - -exec "$JAVACMD" "$@" diff --git a/photonlib-java-examples/apriltagExample/gradlew.bat b/photonlib-java-examples/apriltagExample/gradlew.bat deleted file mode 100644 index f127cfd49d..0000000000 --- a/photonlib-java-examples/apriltagExample/gradlew.bat +++ /dev/null @@ -1,91 +0,0 @@ -@rem -@rem Copyright 2015 the original author or authors. -@rem -@rem Licensed under the Apache License, Version 2.0 (the "License"); -@rem you may not use this file except in compliance with the License. -@rem You may obtain a copy of the License at -@rem -@rem https://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, software -@rem distributed under the License is distributed on an "AS IS" BASIS, -@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -@rem See the License for the specific language governing permissions and -@rem limitations under the License. -@rem - -@if "%DEBUG%"=="" @echo off -@rem ########################################################################## -@rem -@rem Gradle startup script for Windows -@rem -@rem ########################################################################## - -@rem Set local scope for the variables with windows NT shell -if "%OS%"=="Windows_NT" setlocal - -set DIRNAME=%~dp0 -if "%DIRNAME%"=="" set DIRNAME=. -set APP_BASE_NAME=%~n0 -set APP_HOME=%DIRNAME% - -@rem Resolve any "." and ".." in APP_HOME to make it shorter. -for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi - -@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" - -@rem Find java.exe -if defined JAVA_HOME goto findJavaFromJavaHome - -set JAVA_EXE=java.exe -%JAVA_EXE% -version >NUL 2>&1 -if %ERRORLEVEL% equ 0 goto execute - -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:findJavaFromJavaHome -set JAVA_HOME=%JAVA_HOME:"=% -set JAVA_EXE=%JAVA_HOME%/bin/java.exe - -if exist "%JAVA_EXE%" goto execute - -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:execute -@rem Setup the command line - -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar - - -@rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* - -:end -@rem End local scope for the variables with windows NT shell -if %ERRORLEVEL% equ 0 goto mainEnd - -:fail -rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of -rem the _cmd.exe /c_ return code! -set EXIT_CODE=%ERRORLEVEL% -if %EXIT_CODE% equ 0 set EXIT_CODE=1 -if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% -exit /b %EXIT_CODE% - -:mainEnd -if "%OS%"=="Windows_NT" endlocal - -:omega diff --git a/photonlib-java-examples/apriltagExample/settings.gradle b/photonlib-java-examples/apriltagExample/settings.gradle deleted file mode 100644 index 48c039ed86..0000000000 --- a/photonlib-java-examples/apriltagExample/settings.gradle +++ /dev/null @@ -1,27 +0,0 @@ -import org.gradle.internal.os.OperatingSystem - -pluginManagement { - repositories { - mavenLocal() - gradlePluginPortal() - String frcYear = '2023' - File frcHome - if (OperatingSystem.current().isWindows()) { - String publicFolder = System.getenv('PUBLIC') - if (publicFolder == null) { - publicFolder = "C:\\Users\\Public" - } - def homeRoot = new File(publicFolder, "wpilib") - frcHome = new File(homeRoot, frcYear) - } else { - def userFolder = System.getProperty("user.home") - def homeRoot = new File(userFolder, "wpilib") - frcHome = new File(homeRoot, frcYear) - } - def frcHomeMaven = new File(frcHome, 'maven') - maven { - name 'frcHome' - url frcHomeMaven - } - } -} diff --git a/photonlib-java-examples/apriltagExample/simgui-ds.json b/photonlib-java-examples/apriltagExample/simgui-ds.json deleted file mode 100644 index 25e298be7f..0000000000 --- a/photonlib-java-examples/apriltagExample/simgui-ds.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "Keyboard 0 Settings": { - "window": { - "visible": true - } - }, - "keyboardJoysticks": [ - { - "axisConfig": [ - {}, - { - "decKey": 87, - "incKey": 83 - }, - { - "decKey": 69, - "decayRate": 0.0, - "incKey": 82, - "keyRate": 0.009999999776482582 - }, - {}, - { - "decKey": 65, - "incKey": 68 - } - ], - "axisCount": 5, - "buttonCount": 4, - "buttonKeys": [ - 90, - 88, - 67, - 86 - ], - "povConfig": [ - { - "key0": 328, - "key135": 323, - "key180": 322, - "key225": 321, - "key270": 324, - "key315": 327, - "key45": 329, - "key90": 326 - } - ], - "povCount": 1 - }, - { - "axisConfig": [ - { - "decKey": 74, - "incKey": 76 - }, - { - "decKey": 73, - "incKey": 75 - } - ], - "axisCount": 2, - "buttonCount": 4, - "buttonKeys": [ - 77, - 44, - 46, - 47 - ], - "povCount": 0 - }, - { - "axisConfig": [ - { - "decKey": 263, - "incKey": 262 - }, - { - "decKey": 265, - "incKey": 264 - } - ], - "axisCount": 2, - "buttonCount": 6, - "buttonKeys": [ - 260, - 268, - 266, - 261, - 269, - 267 - ], - "povCount": 0 - }, - { - "axisCount": 0, - "buttonCount": 0, - "povCount": 0 - } - ], - "robotJoysticks": [ - { - "guid": "Keyboard0" - } - ] -} diff --git a/photonlib-java-examples/apriltagExample/simgui.json b/photonlib-java-examples/apriltagExample/simgui.json deleted file mode 100644 index 5c06de417f..0000000000 --- a/photonlib-java-examples/apriltagExample/simgui.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "HALProvider": { - "Other Devices": { - "AnalogGyro[0]": { - "header": { - "open": true - } - } - } - }, - "NTProvider": { - "types": { - "/FMSInfo": "FMSInfo", - "/LiveWindow/Ungrouped/AnalogGyro[0]": "Gyro", - "/LiveWindow/Ungrouped/MotorControllerGroup[1]": "Motor Controller", - "/LiveWindow/Ungrouped/MotorControllerGroup[2]": "Motor Controller", - "/LiveWindow/Ungrouped/PIDController[1]": "PIDController", - "/LiveWindow/Ungrouped/PIDController[2]": "PIDController", - "/SmartDashboard/Field": "Field2d", - "/SmartDashboard/VisionSystemSim-Test/Sim Field": "Field2d", - "/SmartDashboard/VisionSystemSim-YOUR CAMERA NAME/Sim Field": "Field2d" - }, - "windows": { - "/SmartDashboard/Field": { - "window": { - "visible": true - } - }, - "/SmartDashboard/VisionSystemSim-Test/Sim Field": { - "window": { - "visible": true - } - }, - "/SmartDashboard/VisionSystemSim-YOUR CAMERA NAME/Sim Field": { - "window": { - "visible": true - } - } - } - }, - "NetworkTables": { - "transitory": { - "LiveWindow": { - "open": true - }, - "photonvision": { - "OV9281": { - "open": true - }, - "USB_Camera": { - "open": true - }, - "YOUR CAMERA NAME": { - "open": true - }, - "open": true, - "testCamera": { - "open": true - } - } - } - } -} diff --git a/photonlib-java-examples/apriltagExample/src/main/deploy/example.txt b/photonlib-java-examples/apriltagExample/src/main/deploy/example.txt deleted file mode 100644 index d6ec5cf830..0000000000 --- a/photonlib-java-examples/apriltagExample/src/main/deploy/example.txt +++ /dev/null @@ -1,3 +0,0 @@ -Files placed in this directory will be deployed to the RoboRIO into the -'deploy' directory in the home folder. Use the 'Filesystem.getDeployDirectory' wpilib function -to get a proper path relative to the deploy directory. diff --git a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Constants.java b/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Constants.java deleted file mode 100644 index b1a19249b4..0000000000 --- a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Constants.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * MIT License - * - * Copyright (c) PhotonVision - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package frc.robot; - -import edu.wpi.first.math.geometry.Rotation3d; -import edu.wpi.first.math.geometry.Transform3d; -import edu.wpi.first.math.geometry.Translation3d; -import edu.wpi.first.math.util.Units; - -public class Constants { - static class DriveTrainConstants { - static final double kMaxSpeed = 3.0; // meters per second - static final double kMaxAngularSpeed = 2 * Math.PI; // one rotation per second - static final double kTrackWidth = 0.381 * 2; // meters - static final double kWheelRadius = 0.0508; // meters - static final int kEncoderResolution = 4096; - static final double distancePerPulse = 2 * Math.PI * kWheelRadius / (double) kEncoderResolution; - } - - static class FieldConstants { - static final double length = Units.feetToMeters(54); - static final double width = Units.feetToMeters(27); - } - - static class VisionConstants { - static final Transform3d robotToCam = - new Transform3d( - new Translation3d(0.5, 0.0, 0.5), - new Rotation3d( - 0, 0, - 0)); // Cam mounted facing forward, half a meter forward of center, half a meter up - // from center. - static final String cameraName = "YOUR CAMERA NAME"; - } -} diff --git a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Drivetrain.java b/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Drivetrain.java deleted file mode 100644 index dba9ee3bca..0000000000 --- a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Drivetrain.java +++ /dev/null @@ -1,219 +0,0 @@ -/* - * MIT License - * - * Copyright (c) PhotonVision - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package frc.robot; - -import edu.wpi.first.math.controller.PIDController; -import edu.wpi.first.math.controller.SimpleMotorFeedforward; -import edu.wpi.first.math.estimator.DifferentialDrivePoseEstimator; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.kinematics.ChassisSpeeds; -import edu.wpi.first.math.kinematics.DifferentialDriveKinematics; -import edu.wpi.first.math.kinematics.DifferentialDriveWheelSpeeds; -import edu.wpi.first.math.numbers.N2; -import edu.wpi.first.math.system.LinearSystem; -import edu.wpi.first.math.system.plant.DCMotor; -import edu.wpi.first.math.system.plant.LinearSystemId; -import edu.wpi.first.wpilibj.AnalogGyro; -import edu.wpi.first.wpilibj.Encoder; -import edu.wpi.first.wpilibj.RobotBase; -import edu.wpi.first.wpilibj.RobotController; -import edu.wpi.first.wpilibj.motorcontrol.MotorController; -import edu.wpi.first.wpilibj.motorcontrol.MotorControllerGroup; -import edu.wpi.first.wpilibj.motorcontrol.PWMSparkMax; -import edu.wpi.first.wpilibj.simulation.AnalogGyroSim; -import edu.wpi.first.wpilibj.simulation.DifferentialDrivetrainSim; -import edu.wpi.first.wpilibj.simulation.EncoderSim; -import edu.wpi.first.wpilibj.smartdashboard.Field2d; -import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; -import frc.robot.Constants.DriveTrainConstants; -import java.util.Optional; -import org.photonvision.EstimatedRobotPose; -import org.photonvision.simulation.PhotonCameraSim; -import org.photonvision.simulation.SimCameraProperties; -import org.photonvision.simulation.VisionSystemSim; - -/** Represents a differential drive style drivetrain. */ -public class Drivetrain { - private final MotorController m_leftLeader = new PWMSparkMax(1); - private final MotorController m_leftFollower = new PWMSparkMax(2); - private final MotorController m_rightLeader = new PWMSparkMax(3); - private final MotorController m_rightFollower = new PWMSparkMax(4); - - private final Encoder m_leftEncoder = new Encoder(0, 1); - private final Encoder m_rightEncoder = new Encoder(2, 3); - - private final MotorControllerGroup m_leftGroup = - new MotorControllerGroup(m_leftLeader, m_leftFollower); - private final MotorControllerGroup m_rightGroup = - new MotorControllerGroup(m_rightLeader, m_rightFollower); - - private final AnalogGyro m_gyro = new AnalogGyro(0); - - private final PIDController m_leftPIDController = new PIDController(1, 0, 0); - private final PIDController m_rightPIDController = new PIDController(1, 0, 0); - - private final DifferentialDriveKinematics m_kinematics = - new DifferentialDriveKinematics(Constants.DriveTrainConstants.kTrackWidth); - - public PhotonCameraWrapper pcw; - - /* - * Here we use DifferentialDrivePoseEstimator so that we can fuse odometry - * readings. The - * numbers used below are robot specific, and should be tuned. - */ - private final DifferentialDrivePoseEstimator m_poseEstimator = - new DifferentialDrivePoseEstimator( - m_kinematics, m_gyro.getRotation2d(), 0.0, 0.0, new Pose2d()); - - // Gains are for example purposes only - must be determined for your own robot! - private final SimpleMotorFeedforward m_feedforward = new SimpleMotorFeedforward(1, 3); - - // Simulation classes help us simulate our robot - private final AnalogGyroSim m_gyroSim = new AnalogGyroSim(m_gyro); - private final EncoderSim m_leftEncoderSim = new EncoderSim(m_leftEncoder); - private final EncoderSim m_rightEncoderSim = new EncoderSim(m_rightEncoder); - private final Field2d m_fieldSim = new Field2d(); - private final LinearSystem m_drivetrainSystem = - LinearSystemId.identifyDrivetrainSystem(1.98, 0.2, 1.5, 0.8); - private final DifferentialDrivetrainSim m_drivetrainSimulator = - new DifferentialDrivetrainSim( - m_drivetrainSystem, - DCMotor.getCIM(2), - 8, - DriveTrainConstants.kTrackWidth, - DriveTrainConstants.kWheelRadius, - null); - - // Simulated PhotonCamera -- only used in sim! - VisionSystemSim m_visionSystemSim; - - /** - * Constructs a differential drive object. Sets the encoder distance per pulse and resets the - * gyro. - */ - public Drivetrain() { - pcw = new PhotonCameraWrapper(); - - m_gyro.reset(); - - // We need to invert one side of the drivetrain so that positive voltages - // result in both sides moving forward. Depending on how your robot's - // gearbox is constructed, you might have to invert the left side instead. - m_rightGroup.setInverted(true); - - // Set the distance per pulse for the drive encoders. We can simply use the - // distance traveled for one rotation of the wheel divided by the encoder - // resolution. - m_leftEncoder.setDistancePerPulse(DriveTrainConstants.distancePerPulse); - m_rightEncoder.setDistancePerPulse(DriveTrainConstants.distancePerPulse); - - m_leftEncoder.reset(); - m_rightEncoder.reset(); - - SmartDashboard.putData("Field", m_fieldSim); - - // Only simulate our PhotonCamera in simulation - if (RobotBase.isSimulation()) { - m_visionSystemSim = new VisionSystemSim(Constants.VisionConstants.cameraName); - var cameraSim = - new PhotonCameraSim(pcw.photonCamera, SimCameraProperties.PI4_LIFECAM_640_480()); - m_visionSystemSim.addCamera(cameraSim, Constants.VisionConstants.robotToCam); - m_visionSystemSim.addAprilTags(pcw.photonPoseEstimator.getFieldTags()); - } - } - - /** - * Sets the desired wheel speeds. - * - * @param speeds The desired wheel speeds. - */ - public void setSpeeds(DifferentialDriveWheelSpeeds speeds) { - final double leftFeedforward = m_feedforward.calculate(speeds.leftMetersPerSecond); - final double rightFeedforward = m_feedforward.calculate(speeds.rightMetersPerSecond); - - final double leftOutput = - m_leftPIDController.calculate(m_leftEncoder.getRate(), speeds.leftMetersPerSecond); - final double rightOutput = - m_rightPIDController.calculate(m_rightEncoder.getRate(), speeds.rightMetersPerSecond); - m_leftGroup.setVoltage(leftOutput + leftFeedforward); - m_rightGroup.setVoltage(rightOutput + rightFeedforward); - } - - /** - * Drives the robot with the given linear velocity and angular velocity. - * - * @param xSpeed Linear velocity in m/s. - * @param rot Angular velocity in rad/s. - */ - public void drive(double xSpeed, double rot) { - var wheelSpeeds = m_kinematics.toWheelSpeeds(new ChassisSpeeds(xSpeed, 0.0, rot)); - setSpeeds(wheelSpeeds); - } - - /** Update our simulation. This should be run every robot loop in simulation. */ - public void simulationPeriodic() { - // To update our simulation, we set motor voltage inputs, update the - // simulation, and write the simulated positions and velocities to our - // simulated encoder and gyro. We negate the right side so that positive - // voltages make the right side move forward. - m_drivetrainSimulator.setInputs( - m_leftGroup.get() * RobotController.getInputVoltage(), - m_rightGroup.get() * RobotController.getInputVoltage()); - m_drivetrainSimulator.update(0.02); - - m_leftEncoderSim.setDistance(m_drivetrainSimulator.getLeftPositionMeters()); - m_leftEncoderSim.setRate(m_drivetrainSimulator.getLeftVelocityMetersPerSecond()); - m_rightEncoderSim.setDistance(m_drivetrainSimulator.getRightPositionMeters()); - m_rightEncoderSim.setRate(m_drivetrainSimulator.getRightVelocityMetersPerSecond()); - m_gyroSim.setAngle(-m_drivetrainSimulator.getHeading().getDegrees()); - - // Update results from vision as well - m_visionSystemSim.update(m_drivetrainSimulator.getPose()); - } - - /** Updates the field-relative position. */ - public void updateOdometry() { - m_poseEstimator.update( - m_gyro.getRotation2d(), m_leftEncoder.getDistance(), m_rightEncoder.getDistance()); - - Optional result = - pcw.getEstimatedGlobalPose(m_poseEstimator.getEstimatedPosition()); - - if (result.isPresent()) { - EstimatedRobotPose camPose = result.get(); - m_poseEstimator.addVisionMeasurement( - camPose.estimatedPose.toPose2d(), camPose.timestampSeconds); - m_fieldSim.getObject("Cam Est Pos").setPose(camPose.estimatedPose.toPose2d()); - } else { - // move it way off the screen to make it disappear - m_fieldSim.getObject("Cam Est Pos").setPose(new Pose2d(-100, -100, new Rotation2d())); - } - - m_fieldSim.getObject("Actual Pos").setPose(m_drivetrainSimulator.getPose()); - m_fieldSim.setRobotPose(m_poseEstimator.getEstimatedPosition()); - } -} diff --git a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Main.java b/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Main.java deleted file mode 100644 index 077f979f97..0000000000 --- a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Main.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * MIT License - * - * Copyright (c) PhotonVision - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package frc.robot; - -import edu.wpi.first.wpilibj.RobotBase; - -/** - * Do NOT add any static variables to this class, or any initialization at all. Unless you know what - * you are doing, do not modify this file except to change the parameter class to the startRobot - * call. - */ -public final class Main { - private Main() {} - - /** - * Main initialization function. Do not perform any initialization here. - * - *

If you change your main robot class, change the parameter type. - */ - public static void main(String... args) { - RobotBase.startRobot(Robot::new); - } -} diff --git a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/PhotonCameraWrapper.java b/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/PhotonCameraWrapper.java deleted file mode 100644 index d46957886c..0000000000 --- a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/PhotonCameraWrapper.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * MIT License - * - * Copyright (c) PhotonVision - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package frc.robot; - -import edu.wpi.first.apriltag.AprilTagFieldLayout; -import edu.wpi.first.apriltag.AprilTagFields; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.wpilibj.DriverStation; -import frc.robot.Constants.VisionConstants; -import java.io.IOException; -import java.util.Optional; -import org.photonvision.EstimatedRobotPose; -import org.photonvision.PhotonCamera; -import org.photonvision.PhotonPoseEstimator; -import org.photonvision.PhotonPoseEstimator.PoseStrategy; - -public class PhotonCameraWrapper { - PhotonCamera photonCamera; - PhotonPoseEstimator photonPoseEstimator; - - public PhotonCameraWrapper() { - // Change the name of your camera here to whatever it is in the PhotonVision UI. - photonCamera = new PhotonCamera(VisionConstants.cameraName); - - try { - // Attempt to load the AprilTagFieldLayout that will tell us where the tags are on the field. - AprilTagFieldLayout fieldLayout = AprilTagFields.k2023ChargedUp.loadAprilTagLayoutField(); - // Create pose estimator - photonPoseEstimator = - new PhotonPoseEstimator( - fieldLayout, PoseStrategy.MULTI_TAG_PNP, photonCamera, VisionConstants.robotToCam); - photonPoseEstimator.setMultiTagFallbackStrategy(PoseStrategy.LOWEST_AMBIGUITY); - } catch (IOException e) { - // The AprilTagFieldLayout failed to load. We won't be able to estimate poses if we don't know - // where the tags are. - DriverStation.reportError("Failed to load AprilTagFieldLayout", e.getStackTrace()); - photonPoseEstimator = null; - } - } - - /** - * @param estimatedRobotPose The current best guess at robot pose - * @return an EstimatedRobotPose with an estimated pose, the timestamp, and targets used to create - * the estimate - */ - public Optional getEstimatedGlobalPose(Pose2d prevEstimatedRobotPose) { - if (photonPoseEstimator == null) { - // The field layout failed to load, so we cannot estimate poses. - return Optional.empty(); - } - photonPoseEstimator.setReferencePose(prevEstimatedRobotPose); - return photonPoseEstimator.update(); - } -} diff --git a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Robot.java deleted file mode 100644 index 78f0f50ab9..0000000000 --- a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Robot.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * MIT License - * - * Copyright (c) PhotonVision - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package frc.robot; - -import edu.wpi.first.math.filter.SlewRateLimiter; -import edu.wpi.first.networktables.NetworkTableInstance; -import edu.wpi.first.wpilibj.TimedRobot; -import edu.wpi.first.wpilibj.XboxController; -import frc.robot.Constants.DriveTrainConstants; - -public class Robot extends TimedRobot { - private XboxController m_controller; - private Drivetrain m_drive; - - // Slew rate limiters to make joystick inputs more gentle; 1/3 sec from 0 to 1. - private final SlewRateLimiter m_speedLimiter = new SlewRateLimiter(3); - private final SlewRateLimiter m_rotLimiter = new SlewRateLimiter(3); - - @Override - public void robotInit() { - if (Robot.isSimulation()) { - NetworkTableInstance instance = NetworkTableInstance.getDefault(); - - // We have to have Photon running and set to NT server mode for it to connect - // to our computer instead of to a roboRIO. - instance.stopServer(); - // set the NT server if simulating this code. - // "localhost" for photon on desktop, or "photonvision.local" / "[ip-address]" for coprocessor - instance.setServer("localhost"); - instance.startClient4("Robot Simulation"); - } - - m_controller = new XboxController(0); - m_drive = new Drivetrain(); - - // Flush NetworkTables every loop. This ensures that robot pose and other values - // are sent during every iteration. This only applies to local NT connections! - setNetworkTablesFlushEnabled(true); - } - - @Override - public void robotPeriodic() { - m_drive.updateOdometry(); - } - - @Override - public void autonomousInit() {} - - @Override - public void autonomousPeriodic() {} - - @Override - public void teleopPeriodic() { - // Get the x speed. We are inverting this because Xbox controllers return - // negative values when we push forward. - var joyY = m_controller.getLeftY(); - if (Math.abs(joyY) < 0.075) { - joyY = 0; - } - final var xSpeed = -m_speedLimiter.calculate(joyY) * DriveTrainConstants.kMaxSpeed; - - // Get the rate of angular rotation. We are inverting this because we want a - // positive value when we pull to the left (remember, CCW is positive in - // mathematics). Xbox controllers return positive values when you pull to - // the right by default. - var joyX = m_controller.getRightX(); - if (Math.abs(joyX) < 0.075) { - joyX = 0; - } - final var rot = -m_rotLimiter.calculate(joyX) * DriveTrainConstants.kMaxAngularSpeed; - - m_drive.drive(xSpeed, rot); - } - - @Override - public void simulationPeriodic() { - m_drive.simulationPeriodic(); - } -} diff --git a/photonlib-java-examples/simposeest/.gitignore b/photonlib-java-examples/simposeest/.gitignore deleted file mode 100644 index 9535c83037..0000000000 --- a/photonlib-java-examples/simposeest/.gitignore +++ /dev/null @@ -1,162 +0,0 @@ -# This gitignore has been specially created by the WPILib team. -# If you remove items from this file, intellisense might break. - -### C++ ### -# Prerequisites -*.d - -# Compiled Object files -*.slo -*.lo -*.o -*.obj - -# Precompiled Headers -*.gch -*.pch - -# Compiled Dynamic libraries -*.so -*.dylib -*.dll - -# Fortran module files -*.mod -*.smod - -# Compiled Static libraries -*.lai -*.la -*.a -*.lib - -# Executables -*.exe -*.out -*.app - -### Java ### -# Compiled class file -*.class - -# Log file -*.log - -# BlueJ files -*.ctxt - -# Mobile Tools for Java (J2ME) -.mtj.tmp/ - -# Package Files # -*.jar -*.war -*.nar -*.ear -*.zip -*.tar.gz -*.rar - -# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml -hs_err_pid* - -### Linux ### -*~ - -# temporary files which can be created if a process still has a handle open of a deleted file -.fuse_hidden* - -# KDE directory preferences -.directory - -# Linux trash folder which might appear on any partition or disk -.Trash-* - -# .nfs files are created when an open file is removed but is still being accessed -.nfs* - -### macOS ### -# General -.DS_Store -.AppleDouble -.LSOverride - -# Icon must end with two \r -Icon - -# Thumbnails -._* - -# Files that might appear in the root of a volume -.DocumentRevisions-V100 -.fseventsd -.Spotlight-V100 -.TemporaryItems -.Trashes -.VolumeIcon.icns -.com.apple.timemachine.donotpresent - -# Directories potentially created on remote AFP share -.AppleDB -.AppleDesktop -Network Trash Folder -Temporary Items -.apdisk - -### VisualStudioCode ### -.vscode/* -!.vscode/settings.json -!.vscode/tasks.json -!.vscode/launch.json -!.vscode/extensions.json - -### Windows ### -# Windows thumbnail cache files -Thumbs.db -ehthumbs.db -ehthumbs_vista.db - -# Dump file -*.stackdump - -# Folder config file -[Dd]esktop.ini - -# Recycle Bin used on file shares -$RECYCLE.BIN/ - -# Windows Installer files -*.cab -*.msi -*.msix -*.msm -*.msp - -# Windows shortcuts -*.lnk - -### Gradle ### -.gradle -/build/ - -# Ignore Gradle GUI config -gradle-app.setting - -# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) -!gradle-wrapper.jar - -# Cache of project -.gradletasknamecache - -# # Work around https://youtrack.jetbrains.com/issue/IDEA-116898 -# gradle/wrapper/gradle-wrapper.properties - -# # VS Code Specific Java Settings -# DO NOT REMOVE .classpath and .project -.classpath -.project -.settings/ -bin/ - -# Simulation GUI and other tools window save file -*-window.json diff --git a/photonlib-java-examples/simposeest/.wpilib/wpilib_preferences.json b/photonlib-java-examples/simposeest/.wpilib/wpilib_preferences.json deleted file mode 100644 index c63569f160..0000000000 --- a/photonlib-java-examples/simposeest/.wpilib/wpilib_preferences.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "enableCppIntellisense": false, - "currentLanguage": "java", - "projectYear": "2023", - "teamNumber": 6995 -} diff --git a/photonlib-java-examples/simposeest/WPILib-License.md b/photonlib-java-examples/simposeest/WPILib-License.md deleted file mode 100644 index 3d5a824cad..0000000000 --- a/photonlib-java-examples/simposeest/WPILib-License.md +++ /dev/null @@ -1,24 +0,0 @@ -Copyright (c) 2009-2021 FIRST and other WPILib contributors -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of FIRST, WPILib, nor the names of other WPILib - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY FIRST AND OTHER WPILIB CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY NONINFRINGEMENT AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL FIRST OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/photonlib-java-examples/simposeest/build.gradle b/photonlib-java-examples/simposeest/build.gradle deleted file mode 100644 index fa1a30798c..0000000000 --- a/photonlib-java-examples/simposeest/build.gradle +++ /dev/null @@ -1,94 +0,0 @@ -plugins { - id "java" - id "edu.wpi.first.GradleRIO" version "2023.4.2" -} - -sourceCompatibility = JavaVersion.VERSION_11 -targetCompatibility = JavaVersion.VERSION_11 - -apply from: "${rootDir}/../shared/examples_common.gradle" - -def ROBOT_MAIN_CLASS = "frc.robot.Main" - -// Define my targets (RoboRIO) and artifacts (deployable files) -// This is added by GradleRIO's backing project DeployUtils. -deploy { - targets { - roborio(getTargetTypeClass('RoboRIO')) { - // Team number is loaded either from the .wpilib/wpilib_preferences.json - // or from command line. If not found an exception will be thrown. - // You can use getTeamOrDefault(team) instead of getTeamNumber if you - // want to store a team number in this file. - team = project.frc.getTeamOrDefault(5940) - debug = project.frc.getDebugOrDefault(false) - - artifacts { - // First part is artifact name, 2nd is artifact type - // getTargetTypeClass is a shortcut to get the class type using a string - - frcJava(getArtifactTypeClass('FRCJavaArtifact')) { - } - - // Static files artifact - frcStaticFileDeploy(getArtifactTypeClass('FileTreeArtifact')) { - files = project.fileTree('src/main/deploy') - directory = '/home/lvuser/deploy' - } - } - } - } -} - -def deployArtifact = deploy.targets.roborio.artifacts.frcJava - -// Set to true to use debug for JNI. -wpi.java.debugJni = false - -// Set this to true to enable desktop support. -def includeDesktopSupport = true - -// Defining my dependencies. In this case, WPILib (+ friends), and vendor libraries. -// Also defines JUnit 4. -dependencies { - implementation wpi.java.deps.wpilib() - implementation wpi.java.vendor.java() - - roborioDebug wpi.java.deps.wpilibJniDebug(wpi.platforms.roborio) - roborioDebug wpi.java.vendor.jniDebug(wpi.platforms.roborio) - - roborioRelease wpi.java.deps.wpilibJniRelease(wpi.platforms.roborio) - roborioRelease wpi.java.vendor.jniRelease(wpi.platforms.roborio) - - nativeDebug wpi.java.deps.wpilibJniDebug(wpi.platforms.desktop) - nativeDebug wpi.java.vendor.jniDebug(wpi.platforms.desktop) - simulationDebug wpi.sim.enableDebug() - - nativeRelease wpi.java.deps.wpilibJniRelease(wpi.platforms.desktop) - nativeRelease wpi.java.vendor.jniRelease(wpi.platforms.desktop) - simulationRelease wpi.sim.enableRelease() - - testImplementation 'junit:junit:4.13.1' -} - -// Simulation configuration (e.g. environment variables). -wpi.sim.addGui().defaultEnabled = true -wpi.sim.addDriverstation() - -// Setting up my Jar File. In this case, adding all libraries into the main jar ('fat jar') -// in order to make them all available at runtime. Also adding the manifest so WPILib -// knows where to look for our Robot Class. -jar { - from { configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) } } - manifest edu.wpi.first.gradlerio.GradleRIOPlugin.javaManifest(ROBOT_MAIN_CLASS) - duplicatesStrategy = DuplicatesStrategy.INCLUDE -} - -// Configure jar and deploy tasks -deployArtifact.jarTask = jar -wpi.java.configureExecutableTasks(jar) -wpi.java.configureTestTasks(test) - -// Configure string concat to always inline compile -tasks.withType(JavaCompile) { - options.compilerArgs.add '-XDstringConcat=inline' -} diff --git a/photonlib-java-examples/simposeest/settings.gradle b/photonlib-java-examples/simposeest/settings.gradle deleted file mode 100644 index bf7fd8a7d3..0000000000 --- a/photonlib-java-examples/simposeest/settings.gradle +++ /dev/null @@ -1,29 +0,0 @@ -import org.gradle.internal.os.OperatingSystem - -rootProject.name = 'simposeest' - -pluginManagement { - repositories { - mavenLocal() - gradlePluginPortal() - String frcYear = '2023' - File frcHome - if (OperatingSystem.current().isWindows()) { - String publicFolder = System.getenv('PUBLIC') - if (publicFolder == null) { - publicFolder = "C:\\Users\\Public" - } - def homeRoot = new File(publicFolder, "wpilib") - frcHome = new File(homeRoot, frcYear) - } else { - def userFolder = System.getProperty("user.home") - def homeRoot = new File(userFolder, "wpilib") - frcHome = new File(homeRoot, frcYear) - } - def frcHomeMaven = new File(frcHome, 'maven') - maven { - name 'frcHome' - url frcHomeMaven - } - } -} diff --git a/photonlib-java-examples/simposeest/simgui-ds.json b/photonlib-java-examples/simposeest/simgui-ds.json deleted file mode 100644 index 2bf389ee40..0000000000 --- a/photonlib-java-examples/simposeest/simgui-ds.json +++ /dev/null @@ -1,102 +0,0 @@ -{ - "Keyboard 0 Settings": { - "window": { - "visible": true - } - }, - "keyboardJoysticks": [ - { - "axisConfig": [ - { - "decKey": 65, - "incKey": 68 - }, - { - "decKey": 87, - "incKey": 83 - }, - { - "decKey": 69, - "decayRate": 0.0, - "incKey": 82, - "keyRate": 0.009999999776482582 - } - ], - "axisCount": 3, - "buttonCount": 4, - "buttonKeys": [ - 90, - 88, - 67, - 86 - ], - "povConfig": [ - { - "key0": 328, - "key135": 323, - "key180": 322, - "key225": 321, - "key270": 324, - "key315": 327, - "key45": 329, - "key90": 326 - } - ], - "povCount": 1 - }, - { - "axisConfig": [ - { - "decKey": 74, - "incKey": 76 - }, - { - "decKey": 73, - "incKey": 75 - } - ], - "axisCount": 2, - "buttonCount": 4, - "buttonKeys": [ - 77, - 44, - 46, - 47 - ], - "povCount": 0 - }, - { - "axisConfig": [ - { - "decKey": 263, - "incKey": 262 - }, - { - "decKey": 265, - "incKey": 264 - } - ], - "axisCount": 2, - "buttonCount": 6, - "buttonKeys": [ - 260, - 268, - 266, - 261, - 269, - 267 - ], - "povCount": 0 - }, - { - "axisCount": 0, - "buttonCount": 0, - "povCount": 0 - } - ], - "robotJoysticks": [ - { - "guid": "Keyboard0" - } - ] -} diff --git a/photonlib-java-examples/simposeest/simgui.json b/photonlib-java-examples/simposeest/simgui.json deleted file mode 100644 index a31aae4fae..0000000000 --- a/photonlib-java-examples/simposeest/simgui.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "HALProvider": { - "Other Devices": { - "AnalogGyro[0]": { - "header": { - "open": true - } - } - } - }, - "NTProvider": { - "types": { - "/FMSInfo": "FMSInfo", - "/SmartDashboard/Field": "Field2d", - "/SmartDashboard/mainCam Sim Field": "Field2d" - }, - "windows": { - "/SmartDashboard/Field": { - "window": { - "visible": true - } - }, - "/SmartDashboard/mainCam Sim Field": { - "window": { - "visible": true - } - } - } - }, - "NetworkTables": { - "transitory": { - "SmartDashboard": { - "Field": { - "open": true - }, - "open": true - } - } - }, - "NetworkTables Info": { - "Clients": { - "open": true - }, - "Connections": { - "open": true - }, - "Server": { - "Publishers": { - "open": true - }, - "Subscribers": { - "open": true - }, - "open": true - } - } -} diff --git a/photonlib-java-examples/simposeest/src/main/deploy/example.txt b/photonlib-java-examples/simposeest/src/main/deploy/example.txt deleted file mode 100644 index d6ec5cf830..0000000000 --- a/photonlib-java-examples/simposeest/src/main/deploy/example.txt +++ /dev/null @@ -1,3 +0,0 @@ -Files placed in this directory will be deployed to the RoboRIO into the -'deploy' directory in the home folder. Use the 'Filesystem.getDeployDirectory' wpilib function -to get a proper path relative to the deploy directory. diff --git a/photonlib-java-examples/simposeest/src/main/java/frc/robot/AutoController.java b/photonlib-java-examples/simposeest/src/main/java/frc/robot/AutoController.java deleted file mode 100644 index bb0ee5a9b4..0000000000 --- a/photonlib-java-examples/simposeest/src/main/java/frc/robot/AutoController.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * MIT License - * - * Copyright (c) PhotonVision - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package frc.robot; - -import edu.wpi.first.math.controller.RamseteController; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.kinematics.ChassisSpeeds; -import edu.wpi.first.math.trajectory.Trajectory; -import edu.wpi.first.math.trajectory.TrajectoryConfig; -import edu.wpi.first.math.trajectory.TrajectoryGenerator; -import edu.wpi.first.wpilibj.Timer; -import java.util.List; - -/** - * Implements logic to convert a set of desired waypoints (ie, a trajectory) and the current - * estimate of where the robot is at (ie, the estimated Pose) into motion commands for a drivetrain. - * The Ramaste controller is used to smoothly move the robot from where it thinks it is to where it - * thinks it ought to be. - */ -public class AutoController { - private Trajectory trajectory; - - private RamseteController ramsete = new RamseteController(); - - private Timer timer = new Timer(); - - boolean isRunning = false; - - Trajectory.State desiredDtState; - - public AutoController() { - // Change this trajectory if you need the robot to follow different paths. - trajectory = - TrajectoryGenerator.generateTrajectory( - new Pose2d(2, 2, new Rotation2d()), - List.of(), - new Pose2d(6, 4, new Rotation2d()), - new TrajectoryConfig(2, 2)); - } - - /** - * @return The starting (initial) pose of the currently-active trajectory - */ - public Pose2d getInitialPose() { - return trajectory.getInitialPose(); - } - - /** Starts the controller running. Call this at the start of autonomous */ - public void startPath() { - timer.reset(); - timer.start(); - isRunning = true; - } - - /** Stops the controller from generating commands */ - public void stopPath() { - isRunning = false; - timer.reset(); - } - - /** - * Given the current estimate of the robot's position, calculate drivetrain speed commands which - * will best-execute the active trajectory. Be sure to call `startPath()` prior to calling this - * method. - * - * @param curEstPose Current estimate of drivetrain pose on the field - * @return The commanded drivetrain motion - */ - public ChassisSpeeds getCurMotorCmds(Pose2d curEstPose) { - if (isRunning) { - double elapsed = timer.get(); - desiredDtState = trajectory.sample(elapsed); - } else { - desiredDtState = new Trajectory.State(); - } - - return ramsete.calculate(curEstPose, desiredDtState); - } - - /** - * @return The position which the auto controller is attempting to move the drivetrain to right - * now. - */ - public Pose2d getCurPose2d() { - return desiredDtState.poseMeters; - } -} diff --git a/photonlib-java-examples/simposeest/src/main/java/frc/robot/Constants.java b/photonlib-java-examples/simposeest/src/main/java/frc/robot/Constants.java deleted file mode 100644 index e5e8ca9caa..0000000000 --- a/photonlib-java-examples/simposeest/src/main/java/frc/robot/Constants.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * MIT License - * - * Copyright (c) PhotonVision - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package frc.robot; - -import edu.wpi.first.math.geometry.Pose3d; -import edu.wpi.first.math.geometry.Rotation3d; -import edu.wpi.first.math.geometry.Transform3d; -import edu.wpi.first.math.geometry.Translation3d; -import edu.wpi.first.math.kinematics.DifferentialDriveKinematics; -import edu.wpi.first.math.util.Units; -import org.photonvision.simulation.SimVisionTarget; - -/** - * Holding class for all physical constants that must be used throughout the codebase. These values - * should be set by one of a few methods: 1) Talk to your mechanical and electrical teams and - * determine how the physical robot is being built and configured. 2) Read the game manual and look - * at the field drawings 3) Match with how your vision coprocessor is configured. - */ -public class Constants { - ////////////////////////////////////////////////////////////////// - // Drivetrain Physical - ////////////////////////////////////////////////////////////////// - public static final double kMaxSpeed = 3.0; // 3 meters per second. - public static final double kMaxAngularSpeed = Math.PI; // 1/2 rotation per second. - - public static final double kTrackWidth = 0.381 * 2; - public static final double kWheelRadius = 0.0508; - public static final int kEncoderResolution = 4096; - - public static final DifferentialDriveKinematics kDtKinematics = - new DifferentialDriveKinematics(kTrackWidth); - - ////////////////////////////////////////////////////////////////// - // Electrical IO - ////////////////////////////////////////////////////////////////// - public static final int kGyroPin = 0; - - public static final int kDtLeftEncoderPinA = 0; - public static final int kDtLeftEncoderPinB = 1; - public static final int kDtRightEncoderPinA = 2; - public static final int kDtRightEncoderPinB = 3; - - public static final int kDtLeftLeaderPin = 1; - public static final int kDtLeftFollowerPin = 2; - public static final int kDtRightLeaderPin = 3; - public static final int kDtRightFollowerPin = 4; - - ////////////////////////////////////////////////////////////////// - // Vision Processing - ////////////////////////////////////////////////////////////////// - // Name configured in the PhotonVision GUI for the camera - public static final String kCamName = "mainCam"; - - // Physical location of the camera on the robot, relative to the center of the - // robot. - public static final Transform3d kCameraToRobot = - new Transform3d( - new Translation3d(-0.25, 0, -.25), // in meters - new Rotation3d()); - - // See - // https://firstfrc.blob.core.windows.net/frc2020/PlayingField/2020FieldDrawing-SeasonSpecific.pdf - // page 208 - public static final double targetWidth = - Units.inchesToMeters(41.30) - Units.inchesToMeters(6.70); // meters - - // See - // https://firstfrc.blob.core.windows.net/frc2020/PlayingField/2020FieldDrawing-SeasonSpecific.pdf - // page 197 - public static final double targetHeight = - Units.inchesToMeters(98.19) - Units.inchesToMeters(81.19); // meters - - // See https://firstfrc.blob.core.windows.net/frc2020/PlayingField/LayoutandMarkingDiagram.pdf - // pages 4 and 5 - public static final double kFarTgtXPos = Units.feetToMeters(54); - public static final double kFarTgtYPos = - Units.feetToMeters(27 / 2) - Units.inchesToMeters(43.75) - Units.inchesToMeters(48.0 / 2.0); - public static final double kFarTgtZPos = - (Units.inchesToMeters(98.19) - targetHeight) / 2 + targetHeight; - - public static final Pose3d kFarTargetPose = - new Pose3d( - new Translation3d(kFarTgtXPos, kFarTgtYPos, kFarTgtZPos), - new Rotation3d(0.0, 0.0, Units.degreesToRadians(180))); - - public static final SimVisionTarget kFarTarget = - new SimVisionTarget(kFarTargetPose, targetWidth, targetHeight, 42); -} diff --git a/photonlib-java-examples/simposeest/src/main/java/frc/robot/Drivetrain.java b/photonlib-java-examples/simposeest/src/main/java/frc/robot/Drivetrain.java deleted file mode 100644 index 996739c95f..0000000000 --- a/photonlib-java-examples/simposeest/src/main/java/frc/robot/Drivetrain.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * MIT License - * - * Copyright (c) PhotonVision - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package frc.robot; - -import edu.wpi.first.math.controller.PIDController; -import edu.wpi.first.math.controller.SimpleMotorFeedforward; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.kinematics.ChassisSpeeds; -import edu.wpi.first.math.kinematics.DifferentialDriveKinematics; -import edu.wpi.first.math.kinematics.DifferentialDriveWheelSpeeds; -import edu.wpi.first.wpilibj.Encoder; -import edu.wpi.first.wpilibj.motorcontrol.MotorControllerGroup; -import edu.wpi.first.wpilibj.motorcontrol.PWMVictorSPX; - -/** - * Implements a controller for the drivetrain. Converts a set of chassis motion commands into motor - * controller PWM values which attempt to speed up or slow down the wheels to match the desired - * speed. - */ -public class Drivetrain { - // PWM motor controller output definitions - PWMVictorSPX leftLeader = new PWMVictorSPX(Constants.kDtLeftLeaderPin); - PWMVictorSPX leftFollower = new PWMVictorSPX(Constants.kDtLeftFollowerPin); - PWMVictorSPX rightLeader = new PWMVictorSPX(Constants.kDtRightLeaderPin); - PWMVictorSPX rightFollower = new PWMVictorSPX(Constants.kDtRightFollowerPin); - - MotorControllerGroup leftGroup = new MotorControllerGroup(leftLeader, leftFollower); - MotorControllerGroup rightGroup = new MotorControllerGroup(rightLeader, rightFollower); - - // Drivetrain wheel speed sensors - // Used both for speed control and pose estimation. - Encoder leftEncoder = new Encoder(Constants.kDtLeftEncoderPinA, Constants.kDtLeftEncoderPinB); - Encoder rightEncoder = new Encoder(Constants.kDtRightEncoderPinA, Constants.kDtRightEncoderPinB); - - // Drivetrain Pose Estimation - final DrivetrainPoseEstimator poseEst; - - // Kinematics - defines the physical size and shape of the drivetrain, which is - // required to convert from - // chassis speed commands to wheel speed commands. - DifferentialDriveKinematics kinematics = new DifferentialDriveKinematics(Constants.kTrackWidth); - - // Closed-loop PIDF controllers for servoing each side of the drivetrain to a - // specific speed. - // Gains are for example purposes only - must be determined for your own robot! - SimpleMotorFeedforward feedforward = new SimpleMotorFeedforward(1, 3); - PIDController leftPIDController = new PIDController(8.5, 0, 0); - PIDController rightPIDController = new PIDController(8.5, 0, 0); - - public Drivetrain() { - // Set the distance per pulse for the drive encoders. We can simply use the - // distance traveled for one rotation of the wheel divided by the encoder - // resolution. - leftEncoder.setDistancePerPulse( - 2 * Math.PI * Constants.kWheelRadius / Constants.kEncoderResolution); - rightEncoder.setDistancePerPulse( - 2 * Math.PI * Constants.kWheelRadius / Constants.kEncoderResolution); - - leftEncoder.reset(); - rightEncoder.reset(); - - rightGroup.setInverted(true); - - poseEst = new DrivetrainPoseEstimator(leftEncoder.getDistance(), rightEncoder.getDistance()); - } - - /** - * Given a set of chassis (fwd/rev + rotate) speed commands, perform all periodic tasks to assign - * new outputs to the motor controllers. - * - * @param xSpeed Desired chassis Forward or Reverse speed (in meters/sec). Positive is forward. - * @param rot Desired chassis rotation speed in radians/sec. Positive is counter-clockwise. - */ - public void drive(double xSpeed, double rot) { - // Convert our fwd/rev and rotate commands to wheel speed commands - DifferentialDriveWheelSpeeds speeds = - kinematics.toWheelSpeeds(new ChassisSpeeds(xSpeed, 0, rot)); - - // Calculate the feedback (PID) portion of our motor command, based on desired - // wheel speed - var leftOutput = leftPIDController.calculate(leftEncoder.getRate(), speeds.leftMetersPerSecond); - var rightOutput = - rightPIDController.calculate(rightEncoder.getRate(), speeds.rightMetersPerSecond); - - // Calculate the feedforward (F) portion of our motor command, based on desired - // wheel speed - var leftFeedforward = feedforward.calculate(speeds.leftMetersPerSecond); - var rightFeedforward = feedforward.calculate(speeds.rightMetersPerSecond); - - // Update the motor controllers with our new motor commands - leftGroup.setVoltage(leftOutput + leftFeedforward); - rightGroup.setVoltage(rightOutput + rightFeedforward); - - // Update the pose estimator with the most recent sensor readings. - poseEst.update(leftEncoder.getDistance(), rightEncoder.getDistance()); - } - - /** - * Force the pose estimator and all sensors to a particular pose. This is useful for indicating to - * the software when you have manually moved your robot in a particular position on the field (EX: - * when you place it on the field at the start of the match). - * - * @param pose - */ - public void resetOdometry(Pose2d pose) { - leftEncoder.reset(); - rightEncoder.reset(); - poseEst.resetToPose(pose, leftEncoder.getDistance(), rightEncoder.getDistance()); - } - - /** - * @return The current best-guess at drivetrain Pose on the field. - */ - public Pose2d getCtrlsPoseEstimate() { - return poseEst.getPoseEst(); - } -} diff --git a/photonlib-java-examples/simposeest/src/main/java/frc/robot/DrivetrainPoseEstimator.java b/photonlib-java-examples/simposeest/src/main/java/frc/robot/DrivetrainPoseEstimator.java deleted file mode 100644 index 1b28def5f6..0000000000 --- a/photonlib-java-examples/simposeest/src/main/java/frc/robot/DrivetrainPoseEstimator.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * MIT License - * - * Copyright (c) PhotonVision - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package frc.robot; - -import edu.wpi.first.math.Matrix; -import edu.wpi.first.math.VecBuilder; -import edu.wpi.first.math.estimator.DifferentialDrivePoseEstimator; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.numbers.N1; -import edu.wpi.first.math.numbers.N3; -import edu.wpi.first.math.numbers.N5; -import edu.wpi.first.math.util.Units; -import edu.wpi.first.wpilibj.AnalogGyro; -import org.photonvision.PhotonCamera; - -/** - * Performs estimation of the drivetrain's current position on the field, using a vision system, - * drivetrain encoders, and a gyroscope. These sensor readings are fused together using a Kalman - * filter. This in turn creates a best-guess at a Pose2d of where our drivetrain is currently at. - */ -public class DrivetrainPoseEstimator { - // Sensors used as part of the Pose Estimation - private final AnalogGyro gyro = new AnalogGyro(Constants.kGyroPin); - private PhotonCamera cam = new PhotonCamera(Constants.kCamName); - // Note - drivetrain encoders are also used. The Drivetrain class must pass us - // the relevant readings. - - // Kalman Filter Configuration. These can be "tuned-to-taste" based on how much - // you trust your - // various sensors. Smaller numbers will cause the filter to "trust" the - // estimate from that particular - // component more than the others. This in turn means the particualr component - // will have a stronger - // influence on the final pose estimate. - Matrix stateStdDevs = VecBuilder.fill(0.05, 0.05, Units.degreesToRadians(5), 0.05, 0.05); - Matrix localMeasurementStdDevs = VecBuilder.fill(0.01, 0.01, Units.degreesToRadians(0.1)); - Matrix visionMeasurementStdDevs = - VecBuilder.fill(0.01, 0.01, Units.degreesToRadians(0.1)); - - private final DifferentialDrivePoseEstimator m_poseEstimator; - - public DrivetrainPoseEstimator(double leftDist, double rightDist) { - m_poseEstimator = - new DifferentialDrivePoseEstimator( - Constants.kDtKinematics, - gyro.getRotation2d(), - 0, // Assume zero encoder counts at start - 0, - new Pose2d()); // Default - start at origin. This will get reset by the autonomous init - } - - /** - * Perform all periodic pose estimation tasks. - * - * @param actWheelSpeeds Current Speeds (in m/s) of the drivetrain wheels - * @param leftDist Distance (in m) the left wheel has traveled - * @param rightDist Distance (in m) the right wheel has traveled - */ - public void update(double leftDist, double rightDist) { - m_poseEstimator.update(gyro.getRotation2d(), leftDist, rightDist); - - var res = cam.getLatestResult(); - if (res.hasTargets()) { - var imageCaptureTime = res.getTimestampSeconds(); - var camToTargetTrans = res.getBestTarget().getBestCameraToTarget(); - var camPose = Constants.kFarTargetPose.transformBy(camToTargetTrans.inverse()); - m_poseEstimator.addVisionMeasurement( - camPose.transformBy(Constants.kCameraToRobot).toPose2d(), imageCaptureTime); - } - } - - /** - * Force the pose estimator to a particular pose. This is useful for indicating to the software - * when you have manually moved your robot in a particular position on the field (EX: when you - * place it on the field at the start of the match). - */ - public void resetToPose(Pose2d pose, double leftDist, double rightDist) { - m_poseEstimator.resetPosition(gyro.getRotation2d(), leftDist, rightDist, pose); - } - - /** - * @return The current best-guess at drivetrain position on the field. - */ - public Pose2d getPoseEst() { - return m_poseEstimator.getEstimatedPosition(); - } -} diff --git a/photonlib-java-examples/simposeest/src/main/java/frc/robot/DrivetrainSim.java b/photonlib-java-examples/simposeest/src/main/java/frc/robot/DrivetrainSim.java deleted file mode 100644 index 7d1eee2d47..0000000000 --- a/photonlib-java-examples/simposeest/src/main/java/frc/robot/DrivetrainSim.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * MIT License - * - * Copyright (c) PhotonVision - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package frc.robot; - -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.numbers.N2; -import edu.wpi.first.math.system.LinearSystem; -import edu.wpi.first.math.system.plant.DCMotor; -import edu.wpi.first.math.system.plant.LinearSystemId; -import edu.wpi.first.wpilibj.DriverStation; -import edu.wpi.first.wpilibj.RobotController; -import edu.wpi.first.wpilibj.simulation.AnalogGyroSim; -import edu.wpi.first.wpilibj.simulation.DifferentialDrivetrainSim; -import edu.wpi.first.wpilibj.simulation.EncoderSim; -import edu.wpi.first.wpilibj.simulation.PWMSim; -import org.photonvision.simulation.SimVisionSystem; - -/** - * Implementation of a simulation of robot physics, sensors, motor controllers Includes a Simulated - * PhotonVision system and one vision target. - * - *

This class and its methods are only relevant during simulation. While on the real robot, the - * real motors/sensors/physics are used instead. - */ -public class DrivetrainSim { - // Simulated Sensors - AnalogGyroSim gyroSim = new AnalogGyroSim(Constants.kGyroPin); - EncoderSim leftEncoderSim = EncoderSim.createForChannel(Constants.kDtLeftEncoderPinA); - EncoderSim rightEncoderSim = EncoderSim.createForChannel(Constants.kDtRightEncoderPinA); - - // Simulated Motor Controllers - PWMSim leftLeader = new PWMSim(Constants.kDtLeftLeaderPin); - PWMSim leftFollower = new PWMSim(Constants.kDtLeftFollowerPin); - PWMSim rightLeader = new PWMSim(Constants.kDtRightLeaderPin); - PWMSim rightFollower = new PWMSim(Constants.kDtRightFollowerPin); - - // Simulation Physics - // Configure these to match your drivetrain's physical dimensions - // and characterization results. - LinearSystem drivetrainSystem = - LinearSystemId.identifyDrivetrainSystem(1.98, 0.2, 1.5, 0.3); - DifferentialDrivetrainSim drivetrainSimulator = - new DifferentialDrivetrainSim( - drivetrainSystem, - DCMotor.getCIM(2), - 8, - Constants.kTrackWidth, - Constants.kWheelRadius, - null); - - // Simulated Vision System. - // Configure these to match your PhotonVision Camera, - // pipeline, and LED setup. - double camDiagFOV = 75.0; // degrees - double camPitch = 15.0; // degrees - double camHeightOffGround = 0.85; // meters - double maxLEDRange = 20; // meters - int camResolutionWidth = 640; // pixels - int camResolutionHeight = 480; // pixels - double minTargetArea = 10; // square pixels - - SimVisionSystem simVision = - new SimVisionSystem( - Constants.kCamName, - camDiagFOV, - Constants.kCameraToRobot, - maxLEDRange, - camResolutionWidth, - camResolutionHeight, - minTargetArea); - - public DrivetrainSim() { - simVision.addSimVisionTarget(Constants.kFarTarget); - } - - /** - * Perform all periodic drivetrain simulation related tasks to advance our simulation of robot - * physics forward by a single 20ms step. - */ - public void update() { - double leftMotorCmd = 0; - double rightMotorCmd = 0; - - if (DriverStation.isEnabled() && !RobotController.isBrownedOut()) { - // If the motor controllers are enabled... - // Roughly model the effect of leader and follower motor pushing on the same - // gearbox. - // Note if the software is incorrect and drives them against each other, speed - // will be - // roughly matching the physical situation, but current draw will _not_ be - // accurate. - leftMotorCmd = (leftLeader.getSpeed() + leftFollower.getSpeed()) / 2.0; - rightMotorCmd = (rightLeader.getSpeed() + rightFollower.getSpeed()) / 2.0; - } - - // Update the physics simulation - drivetrainSimulator.setInputs( - leftMotorCmd * RobotController.getInputVoltage(), - -rightMotorCmd * RobotController.getInputVoltage()); - drivetrainSimulator.update(0.02); - - // Update our sensors based on the simulated motion of the robot - leftEncoderSim.setDistance((drivetrainSimulator.getLeftPositionMeters())); - leftEncoderSim.setRate((drivetrainSimulator.getLeftVelocityMetersPerSecond())); - rightEncoderSim.setDistance((drivetrainSimulator.getRightPositionMeters())); - rightEncoderSim.setRate((drivetrainSimulator.getRightVelocityMetersPerSecond())); - gyroSim.setAngle( - -drivetrainSimulator - .getHeading() - .getDegrees()); // Gyros have an inverted reference frame for - // angles, so multiply by -1.0; - - // Update PhotonVision based on our new robot position. - simVision.processFrame(drivetrainSimulator.getPose()); - } - - /** - * Resets the simulation back to a pre-defined pose Useful to simulate the action of placing the - * robot onto a specific spot in the field (IE, at the start of each match). - * - * @param pose - */ - public void resetPose(Pose2d pose) { - drivetrainSimulator.setPose(pose); - } - - /** - * @return The simulated robot's position, in meters. - */ - public Pose2d getCurPose() { - return drivetrainSimulator.getPose(); - } - - /** - * For testing purposes only! Applies an unmodeled, undetected offset to the pose Similar to if - * you magically kicked your robot to the side in a way the encoders and gyro didn't measure. - * - *

This distrubance should be corrected for once a vision target is in view. - */ - public void applyKick() { - Pose2d newPose = - drivetrainSimulator - .getPose() - .transformBy(new Transform2d(new Translation2d(0, 0.5), new Rotation2d())); - drivetrainSimulator.setPose(newPose); - } -} diff --git a/photonlib-java-examples/simposeest/src/main/java/frc/robot/Main.java b/photonlib-java-examples/simposeest/src/main/java/frc/robot/Main.java deleted file mode 100644 index 077f979f97..0000000000 --- a/photonlib-java-examples/simposeest/src/main/java/frc/robot/Main.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * MIT License - * - * Copyright (c) PhotonVision - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package frc.robot; - -import edu.wpi.first.wpilibj.RobotBase; - -/** - * Do NOT add any static variables to this class, or any initialization at all. Unless you know what - * you are doing, do not modify this file except to change the parameter class to the startRobot - * call. - */ -public final class Main { - private Main() {} - - /** - * Main initialization function. Do not perform any initialization here. - * - *

If you change your main robot class, change the parameter type. - */ - public static void main(String... args) { - RobotBase.startRobot(Robot::new); - } -} diff --git a/photonlib-java-examples/simposeest/src/main/java/frc/robot/OperatorInterface.java b/photonlib-java-examples/simposeest/src/main/java/frc/robot/OperatorInterface.java deleted file mode 100644 index 95429a7e30..0000000000 --- a/photonlib-java-examples/simposeest/src/main/java/frc/robot/OperatorInterface.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * MIT License - * - * Copyright (c) PhotonVision - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package frc.robot; - -import edu.wpi.first.math.filter.SlewRateLimiter; -import edu.wpi.first.wpilibj.XboxController; - -public class OperatorInterface { - private XboxController opCtrl = new XboxController(0); - - // Slew rate limiters to make joystick inputs more gentle; 1/3 sec from 0 - // to 1. - private SlewRateLimiter speedLimiter = new SlewRateLimiter(3); - private SlewRateLimiter rotLimiter = new SlewRateLimiter(3); - - public OperatorInterface() {} - - public double getFwdRevSpdCmd() { - // Get the x speed. We are inverting this because Xbox controllers return - // negative values when we push forward. - return -speedLimiter.calculate(opCtrl.getLeftY()) * Constants.kMaxSpeed; - } - - public double getRotateSpdCmd() { - // Get the rate of angular rotation. We are inverting this because we want a - // positive value when we pull to the left (remember, CCW is positive in - // mathematics). Xbox controllers return positive values when you pull to - // the right by default. - return -rotLimiter.calculate(opCtrl.getRightX()) * Constants.kMaxAngularSpeed; - } - - public boolean getSimKickCmd() { - return opCtrl.getXButtonPressed(); - } -} diff --git a/photonlib-java-examples/simposeest/src/main/java/frc/robot/PoseTelemetry.java b/photonlib-java-examples/simposeest/src/main/java/frc/robot/PoseTelemetry.java deleted file mode 100644 index 4897669eb2..0000000000 --- a/photonlib-java-examples/simposeest/src/main/java/frc/robot/PoseTelemetry.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * MIT License - * - * Copyright (c) PhotonVision - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package frc.robot; - -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.wpilibj.smartdashboard.Field2d; -import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; - -/** Reports our expected, desired, and actual poses to dashboards */ -public class PoseTelemetry { - Field2d field = new Field2d(); - - Pose2d actPose = new Pose2d(); - Pose2d desPose = new Pose2d(); - Pose2d estPose = new Pose2d(); - - public PoseTelemetry() { - SmartDashboard.putData("Field", field); - update(); - } - - public void update() { - field.getObject("DesPose").setPose(desPose); - field.getObject("ActPose").setPose(actPose); - field.getObject("Robot").setPose(estPose); - } - - public void setActualPose(Pose2d in) { - actPose = in; - } - - public void setDesiredPose(Pose2d in) { - desPose = in; - } - - public void setEstimatedPose(Pose2d in) { - estPose = in; - } -} diff --git a/photonlib-java-examples/simposeest/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/simposeest/src/main/java/frc/robot/Robot.java deleted file mode 100644 index 4acbb81f2c..0000000000 --- a/photonlib-java-examples/simposeest/src/main/java/frc/robot/Robot.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * MIT License - * - * Copyright (c) PhotonVision - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package frc.robot; - -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.kinematics.ChassisSpeeds; -import edu.wpi.first.wpilibj.TimedRobot; - -public class Robot extends TimedRobot { - AutoController autoCtrl = new AutoController(); - Drivetrain dt = new Drivetrain(); - OperatorInterface opInf = new OperatorInterface(); - - DrivetrainSim dtSim = new DrivetrainSim(); - - PoseTelemetry pt = new PoseTelemetry(); - - @Override - public void robotInit() { - // Flush NetworkTables every loop. This ensures that robot pose and other values - // are sent during every iteration. - setNetworkTablesFlushEnabled(true); - } - - @Override - public void autonomousInit() { - resetOdometery(); - autoCtrl.startPath(); - } - - @Override - public void autonomousPeriodic() { - ChassisSpeeds speeds = autoCtrl.getCurMotorCmds(dt.getCtrlsPoseEstimate()); - dt.drive(speeds.vxMetersPerSecond, speeds.omegaRadiansPerSecond); - pt.setDesiredPose(autoCtrl.getCurPose2d()); - } - - @Override - public void teleopPeriodic() { - dt.drive(opInf.getFwdRevSpdCmd(), opInf.getRotateSpdCmd()); - } - - @Override - public void robotPeriodic() { - pt.setEstimatedPose(dt.getCtrlsPoseEstimate()); - pt.update(); - } - - @Override - public void disabledPeriodic() { - dt.drive(0, 0); - } - - @Override - public void simulationPeriodic() { - if (opInf.getSimKickCmd()) { - dtSim.applyKick(); - } - dtSim.update(); - pt.setActualPose(dtSim.getCurPose()); - } - - private void resetOdometery() { - Pose2d startPose = autoCtrl.getInitialPose(); - dtSim.resetPose(startPose); - dt.resetOdometry(startPose); - } -} From 5c9657742d9f127f137cfb9b3bd003325c351f97 Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 18 Sep 2023 20:18:22 -0700 Subject: [PATCH 31/46] remove unused stubs --- .../src/main/java/frc/robot/Robot.java | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java index 93be201a02..aa55e1cb50 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java @@ -64,18 +64,12 @@ public void robotPeriodic() { // Log values to the dashboard drivetrain.log(); } - - @Override - public void disabledInit() {} - + @Override public void disabledPeriodic() { drivetrain.stop(); } - - @Override - public void disabledExit() {} - + @Override public void autonomousInit() { autoTimer.restart(); @@ -96,9 +90,6 @@ public void autonomousPeriodic() { } } - @Override - public void teleopInit() {} - @Override public void teleopPeriodic() { // We will use an "arcade drive" scheme to turn joystick values into target robot speeds From 599a6d91398fea25fca6f64995cfd17118fd7311 Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 18 Sep 2023 22:37:44 -0700 Subject: [PATCH 32/46] Update simaimandrange --- .../simaimandrange/.gitignore | 1 + .../simaimandrange/simgui-ds.json | 22 ++--- .../simaimandrange/simgui.json | 56 ++++++++---- .../src/main/java/frc/robot/Robot.java | 20 +++-- .../java/frc/robot/sim/DrivetrainSim.java | 88 +++++++++++-------- 5 files changed, 118 insertions(+), 69 deletions(-) diff --git a/photonlib-java-examples/simaimandrange/.gitignore b/photonlib-java-examples/simaimandrange/.gitignore index 9535c83037..9912213c2b 100644 --- a/photonlib-java-examples/simaimandrange/.gitignore +++ b/photonlib-java-examples/simaimandrange/.gitignore @@ -160,3 +160,4 @@ bin/ # Simulation GUI and other tools window save file *-window.json +networktables.json diff --git a/photonlib-java-examples/simaimandrange/simgui-ds.json b/photonlib-java-examples/simaimandrange/simgui-ds.json index 77d5b5b89e..2b161f3ef4 100644 --- a/photonlib-java-examples/simaimandrange/simgui-ds.json +++ b/photonlib-java-examples/simaimandrange/simgui-ds.json @@ -1,28 +1,30 @@ { "Keyboard 0 Settings": { "window": { - "visible": true + "visible": false } }, "keyboardJoysticks": [ { "axisConfig": [ + {}, { "decKey": 87, "incKey": 83 }, - {}, { - "decKey": 69, - "decayRate": 0.0, - "incKey": 82, - "keyRate": 0.009999999776482582 + "decayRate": 0.10000000149011612, + "incKey": 81, + "keyRate": 0.10000000149011612 + }, + { + "decayRate": 0.10000000149011612, + "incKey": 69, + "keyRate": 0.10000000149011612 }, - {}, - {}, { - "decKey": 68, - "incKey": 65 + "decKey": 65, + "incKey": 68 } ], "axisCount": 6, diff --git a/photonlib-java-examples/simaimandrange/simgui.json b/photonlib-java-examples/simaimandrange/simgui.json index 53fcc87a54..2ebe473318 100644 --- a/photonlib-java-examples/simaimandrange/simgui.json +++ b/photonlib-java-examples/simaimandrange/simgui.json @@ -2,16 +2,47 @@ "NTProvider": { "types": { "/FMSInfo": "FMSInfo", - "/SmartDashboard/Field": "Field2d", - "/SmartDashboard/photonvision Sim Field": "Field2d" + "/SmartDashboard/VisionSystemSim-main/Sim Field": "Field2d" }, "windows": { - "/SmartDashboard/Field": { - "window": { - "visible": true - } - }, - "/SmartDashboard/photonvision Sim Field": { + "/SmartDashboard/VisionSystemSim-main/Sim Field": { + "Robot": { + "arrowColor": [ + 1.0, + 1.0, + 1.0, + 255.0 + ], + "arrowWeight": 3.0, + "color": [ + 1.0, + 1.0, + 1.0, + 255.0 + ], + "weight": 3.0 + }, + "cameras": { + "arrowColor": [ + 0.0, + 0.683544397354126, + 1.0, + 255.0 + ], + "arrowSize": 29, + "selectable": false, + "style": "Hidden" + }, + "targets": { + "color": [ + 0.05063295364379883, + 1.0, + 0.0, + 255.0 + ], + "style": "Hidden", + "weight": 1.0 + }, "window": { "visible": true } @@ -21,7 +52,7 @@ "NetworkTables": { "transitory": { "SmartDashboard": { - "Field": { + "VisionSystemSim-main/Sim Field": { "open": true }, "open": true @@ -33,12 +64,5 @@ } } } - }, - "Plot": { - "Plot <0>": { - "window": { - "visible": false - } - } } } diff --git a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Robot.java index 6dc8b6fd9e..53197dd216 100644 --- a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/Robot.java @@ -56,8 +56,8 @@ public class Robot extends TimedRobot { PhotonCamera camera = new PhotonCamera("photonvision"); // PID constants should be tuned per robot - final double LINEAR_P = 2.0; - final double LINEAR_D = 0.0; + final double LINEAR_P = 0.5; + final double LINEAR_D = 0.1; PIDController forwardController = new PIDController(LINEAR_P, 0, LINEAR_D); final double ANGULAR_P = 0.03; @@ -71,6 +71,12 @@ public class Robot extends TimedRobot { PWMVictorSPX rightMotor = new PWMVictorSPX(1); DifferentialDrive drive = new DifferentialDrive(leftMotor, rightMotor); + @Override + public void robotInit() { + leftMotor.setInverted(false); + rightMotor.setInverted(true); + } + @Override public void teleopPeriodic() { double forwardSpeed; @@ -91,11 +97,11 @@ public void teleopPeriodic() { Units.degreesToRadians(result.getBestTarget().getPitch())); // Use this range as the measurement we give to the PID controller. - // -1.0 required to ensure positive PID controller effort _increases_ range + // (This forwardSpeed must be positive to go forward.) forwardSpeed = -forwardController.calculate(range, GOAL_RANGE_METERS); // Also calculate angular power - // -1.0 required to ensure positive PID controller effort _increases_ yaw + // (This rotationSpeed must be positive to turn counter-clockwise.) rotationSpeed = -turnController.calculate(result.getBestTarget().getYaw(), 0); } else { // If we have no targets, stay still. @@ -104,8 +110,8 @@ public void teleopPeriodic() { } } else { // Manual Driver Mode - forwardSpeed = -xboxController.getRightY(); - rotationSpeed = xboxController.getLeftX(); + forwardSpeed = -xboxController.getLeftY() * 0.75; + rotationSpeed = -xboxController.getRightX() * 0.75; } // Use our forward/turn speeds to control the drivetrain @@ -119,7 +125,7 @@ public void teleopPeriodic() { @Override public void simulationInit() { - dtSim = new DrivetrainSim(); + dtSim = new DrivetrainSim(leftMotor.getChannel(), rightMotor.getChannel(), camera); } @Override diff --git a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java index 2686a202ba..531cf8df13 100644 --- a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java +++ b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java @@ -26,6 +26,7 @@ import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.geometry.Rotation3d; import edu.wpi.first.math.geometry.Transform3d; import edu.wpi.first.math.geometry.Translation3d; @@ -38,11 +39,16 @@ import edu.wpi.first.wpilibj.RobotController; import edu.wpi.first.wpilibj.simulation.DifferentialDrivetrainSim; import edu.wpi.first.wpilibj.simulation.PWMSim; -import edu.wpi.first.wpilibj.smartdashboard.Field2d; -import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import frc.robot.Robot; -import org.photonvision.simulation.SimVisionSystem; -import org.photonvision.simulation.SimVisionTarget; + +import java.util.List; + +import org.photonvision.PhotonCamera; +import org.photonvision.estimation.TargetModel; +import org.photonvision.simulation.PhotonCameraSim; +import org.photonvision.simulation.SimCameraProperties; +import org.photonvision.simulation.VisionSystemSim; +import org.photonvision.simulation.VisionTargetSim; /** * Implementation of a simulation of robot physics, sensors, motor controllers Includes a Simulated @@ -53,53 +59,47 @@ */ public class DrivetrainSim { // Simulated Motor Controllers - PWMSim leftLeader = new PWMSim(0); - PWMSim rightLeader = new PWMSim(1); + PWMSim leftLeader; + PWMSim rightLeader; // Simulation Physics // Configure these to match your drivetrain's physical dimensions // and characterization results. + double trackwidthMeters = Units.feetToMeters(2.0); LinearSystem drivetrainSystem = - LinearSystemId.identifyDrivetrainSystem(1.98, 0.2, 1.5, 0.3, 1.0); + LinearSystemId.identifyDrivetrainSystem(2.0, 0.5, 2.25, 0.3, trackwidthMeters); DifferentialDrivetrainSim drivetrainSimulator = new DifferentialDrivetrainSim( drivetrainSystem, DCMotor.getCIM(2), 8, - Units.feetToMeters(2.0), + trackwidthMeters, Units.inchesToMeters(6.0 / 2.0), null); // Simulated Vision System. // Configure these to match your PhotonVision Camera, // pipeline, and LED setup. - double camDiagFOV = 170.0; // degrees - assume wide-angle camera + double camDiagFOV = 100.0; // degrees double camPitch = Robot.CAMERA_PITCH_RADIANS; // degrees double camHeightOffGround = Robot.CAMERA_HEIGHT_METERS; // meters + double minTargetArea = 0.1; // percentage (0 - 100) double maxLEDRange = 20; // meters int camResolutionWidth = 640; // pixels int camResolutionHeight = 480; // pixels - double minTargetArea = 10; // square pixels - - SimVisionSystem simVision = - new SimVisionSystem( - "photonvision", - camDiagFOV, - new Transform3d( - new Translation3d(0, 0, camHeightOffGround), new Rotation3d(0, camPitch, 0)), - maxLEDRange, - camResolutionWidth, - camResolutionHeight, - minTargetArea); + PhotonCameraSim cameraSim; + + VisionSystemSim visionSim = new VisionSystemSim("main"); // See // https://firstfrc.blob.core.windows.net/frc2020/PlayingField/2020FieldDrawing-SeasonSpecific.pdf // page 208 - double targetWidth = Units.inchesToMeters(41.30) - Units.inchesToMeters(6.70); // meters - // See - // https://firstfrc.blob.core.windows.net/frc2020/PlayingField/2020FieldDrawing-SeasonSpecific.pdf - // page 197 - double targetHeight = Units.inchesToMeters(98.19) - Units.inchesToMeters(81.19); // meters + TargetModel targetModel = new TargetModel(List.of( + new Translation3d(0, Units.inchesToMeters(-9.819867), Units.inchesToMeters(-8.5)), + new Translation3d(0, Units.inchesToMeters(9.819867), Units.inchesToMeters(-8.5)), + new Translation3d(0, Units.inchesToMeters(19.625), Units.inchesToMeters(8.5)), + new Translation3d(0, Units.inchesToMeters(-19.625), Units.inchesToMeters(8.5)) + )); // See https://firstfrc.blob.core.windows.net/frc2020/PlayingField/LayoutandMarkingDiagram.pdf // pages 4 and 5 double tgtXPos = Units.feetToMeters(54); @@ -108,13 +108,30 @@ public class DrivetrainSim { Pose3d farTargetPose = new Pose3d( new Translation3d(tgtXPos, tgtYPos, Robot.TARGET_HEIGHT_METERS), - new Rotation3d(0.0, 0.0, 0.0)); + new Rotation3d(0.0, 0.0, Math.PI)); + + public DrivetrainSim(int leftMotorPort, int rightMotorPort, PhotonCamera camera) { + leftLeader = new PWMSim(leftMotorPort); + rightLeader = new PWMSim(rightMotorPort); - Field2d field = new Field2d(); + // Make the vision target visible to this simulated field. + var visionTarget = new VisionTargetSim(farTargetPose, targetModel); + visionSim.addVisionTargets(visionTarget); - public DrivetrainSim() { - simVision.addSimVisionTarget(new SimVisionTarget(farTargetPose, targetWidth, targetHeight, -1)); - SmartDashboard.putData("Field", field); + // Create simulated camera properties. These can be set to mimic your actual camera. + var cameraProp = new SimCameraProperties(); + cameraProp.setCalibration(camResolutionWidth, camResolutionHeight, Rotation2d.fromDegrees(camDiagFOV)); + cameraProp.setCalibError(0.2, 0.05); + cameraProp.setFPS(25); + cameraProp.setAvgLatencyMs(30); + cameraProp.setLatencyStdDevMs(4); + // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible targets. + cameraSim = new PhotonCameraSim(camera, cameraProp, minTargetArea, maxLEDRange); + + // Add the simulated camera to view the targets on this simulated field. + visionSim.addCamera(cameraSim, new Transform3d(new Translation3d(0.25, 0, Robot.CAMERA_HEIGHT_METERS), new Rotation3d(0, -Robot.CAMERA_PITCH_RADIANS, 0))); + + cameraSim.enableDrawWireframe(true); } /** @@ -131,14 +148,12 @@ public void update() { } drivetrainSimulator.setInputs( - leftMotorCmd * RobotController.getInputVoltage(), - -rightMotorCmd * RobotController.getInputVoltage()); + leftMotorCmd * RobotController.getBatteryVoltage(), + -rightMotorCmd * RobotController.getBatteryVoltage()); drivetrainSimulator.update(0.02); // Update PhotonVision based on our new robot position. - simVision.processFrame(drivetrainSimulator.getPose()); - - field.setRobotPose(drivetrainSimulator.getPose()); + visionSim.update(drivetrainSimulator.getPose()); } /** @@ -149,5 +164,6 @@ public void update() { */ public void resetPose(Pose2d pose) { drivetrainSimulator.setPose(pose); + visionSim.resetRobotPose(pose); } } From c33e8064ff4bb653f75dcfae8738ed0eaa7b259c Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 18 Sep 2023 22:47:29 -0700 Subject: [PATCH 33/46] swervedriveposeestsim keyboard control --- .../swervedriveposeestsim/.gitignore | 1 + .../swervedriveposeestsim/simgui-ds.json | 105 ++++++++++++++++++ 2 files changed, 106 insertions(+) create mode 100644 photonlib-java-examples/swervedriveposeestsim/simgui-ds.json diff --git a/photonlib-java-examples/swervedriveposeestsim/.gitignore b/photonlib-java-examples/swervedriveposeestsim/.gitignore index a8d1911a9e..9f76c3a543 100644 --- a/photonlib-java-examples/swervedriveposeestsim/.gitignore +++ b/photonlib-java-examples/swervedriveposeestsim/.gitignore @@ -170,3 +170,4 @@ out/ # Simulation GUI and other tools window save file *-window.json +networktables.json diff --git a/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json b/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json new file mode 100644 index 0000000000..d9b634834b --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json @@ -0,0 +1,105 @@ +{ + "Keyboard 0 Settings": { + "window": { + "visible": false + } + }, + "keyboardJoysticks": [ + { + "axisConfig": [ + { + "decKey": 65, + "incKey": 68 + }, + { + "decKey": 87, + "incKey": 83 + }, + { + "decayRate": 0.0, + "keyRate": 0.009999999776482582 + }, + {}, + { + "decKey": 81, + "incKey": 69 + } + ], + "axisCount": 6, + "buttonCount": 4, + "buttonKeys": [ + 90, + 88, + 67, + 86 + ], + "povConfig": [ + { + "key0": 328, + "key135": 323, + "key180": 322, + "key225": 321, + "key270": 324, + "key315": 327, + "key45": 329, + "key90": 326 + } + ], + "povCount": 1 + }, + { + "axisConfig": [ + { + "decKey": 74, + "incKey": 76 + }, + { + "decKey": 73, + "incKey": 75 + } + ], + "axisCount": 2, + "buttonCount": 4, + "buttonKeys": [ + 77, + 44, + 46, + 47 + ], + "povCount": 0 + }, + { + "axisConfig": [ + { + "decKey": 263, + "incKey": 262 + }, + { + "decKey": 265, + "incKey": 264 + } + ], + "axisCount": 2, + "buttonCount": 6, + "buttonKeys": [ + 260, + 268, + 266, + 261, + 269, + 267 + ], + "povCount": 0 + }, + { + "axisCount": 0, + "buttonCount": 0, + "povCount": 0 + } + ], + "robotJoysticks": [ + { + "guid": "Keyboard0" + } + ] +} From d0f02ecf34656b9d9741178222890084abfa1bc7 Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 18 Sep 2023 23:43:09 -0700 Subject: [PATCH 34/46] Create java-examples README.md --- photonlib-java-examples/README.md | 47 +++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 photonlib-java-examples/README.md diff --git a/photonlib-java-examples/README.md b/photonlib-java-examples/README.md new file mode 100644 index 0000000000..ff4abbd9d7 --- /dev/null +++ b/photonlib-java-examples/README.md @@ -0,0 +1,47 @@ +## PhotonLib Java Examples + +### [**`aimattarget`**](aimattarget) + +A simple demonstration of using PhotonVision's 2d target yaw to align a differential drivetrain with a target. + +--- + +### [**`getinrange`**](getinrange) + +A simple demonstration of using PhotonVision's 2d target pitch to bring a differential drivetrain to a specific distance from a target. + +--- + +### [**`aimandrange`**](aimandrange) + +A combination of the previous `aimattarget` and `getinrange` examples to simultaneously aim and get in range of a target. + +--- + +### [**`simaimandrange`**](simaimandrange) + +The above `aimandrange` example with simulation support. + + + +**Keyboard controls:** +- Drive forward/backward: W/S +- Turn left/right: A/D +- Perform vision alignment: Z + +--- + +### [**`swervedriveposeestsim`**](swervedriveposeestsim) + +A minimal swerve drive example demonstrating the usage of PhotonVision for AprilTag vision estimation with a swerve drive pose estimator. + +The example also has simulation support with an approximation of swerve drive dynamics. + + + + + +**Keyboard controls:** +- Translate field-relative: WASD +- Rotate counter/clockwise: Q/E +- Offset pose estimate: X From e243cfee54609f98c3c8a78d460a0110591bcbd3 Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 18 Sep 2023 23:44:16 -0700 Subject: [PATCH 35/46] hide keyboard settings --- photonlib-java-examples/swervedriveposeestsim/simgui-ds.json | 5 ----- 1 file changed, 5 deletions(-) diff --git a/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json b/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json index d9b634834b..addd5860ce 100644 --- a/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json +++ b/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json @@ -1,9 +1,4 @@ { - "Keyboard 0 Settings": { - "window": { - "visible": false - } - }, "keyboardJoysticks": [ { "axisConfig": [ From c57370ec6f29c8acc9741ee9c39f1e827c603299 Mon Sep 17 00:00:00 2001 From: amquake Date: Mon, 18 Sep 2023 23:44:54 -0700 Subject: [PATCH 36/46] Revert "hide keyboard settings" This reverts commit e243cfee54609f98c3c8a78d460a0110591bcbd3. --- photonlib-java-examples/swervedriveposeestsim/simgui-ds.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json b/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json index addd5860ce..d9b634834b 100644 --- a/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json +++ b/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json @@ -1,4 +1,9 @@ { + "Keyboard 0 Settings": { + "window": { + "visible": false + } + }, "keyboardJoysticks": [ { "axisConfig": [ From 5b416574e34e76d979a8ca3598523db9e1ff00f1 Mon Sep 17 00:00:00 2001 From: amquake Date: Tue, 19 Sep 2023 00:50:09 -0700 Subject: [PATCH 37/46] avoid sticky field objects --- .../simulation/VisionSystemSim.java | 22 +++++++++---------- .../swervedriveposeestsim/simgui-ds.json | 5 ----- .../src/main/java/frc/robot/Vision.java | 6 ++++- 3 files changed, 15 insertions(+), 18 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java index 4e65bdcbd4..ac56256be9 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java @@ -337,7 +337,7 @@ public Field2d getDebugField() { * Periodic update. Ensure this is called repeatedly-- camera performance is used to automatically * determine if a new frame should be submitted. * - * @param robotPoseMeters The current robot pose in meters + * @param robotPoseMeters The simulated robot pose in meters */ public void update(Pose2d robotPoseMeters) { update(new Pose3d(robotPoseMeters)); @@ -347,7 +347,7 @@ public void update(Pose2d robotPoseMeters) { * Periodic update. Ensure this is called repeatedly-- camera performance is used to automatically * determine if a new frame should be submitted. * - * @param robotPoseMeters The current robot pose in meters + * @param robotPoseMeters The simulated robot pose in meters */ public void update(Pose3d robotPoseMeters) { var targetTypes = targetSets.entrySet(); @@ -370,13 +370,15 @@ public void update(Pose3d robotPoseMeters) { var allTargets = new ArrayList(); targetTypes.forEach((entry) -> allTargets.addAll(entry.getValue())); - var visibleTargets = new ArrayList(); - var cameraPose2ds = new ArrayList(); + var visTgtPoses2d = new ArrayList(); + var cameraPoses2d = new ArrayList(); + boolean processed = false; // process each camera for (var camSim : camSimMap.values()) { // check if this camera is ready to process and get latency var optTimestamp = camSim.consumeNextEntryTime(); if (optTimestamp.isEmpty()) continue; + else processed = true; // when this result "was" read by NT long timestampNT = optTimestamp.get(); // this result's processing latency in milliseconds @@ -387,7 +389,7 @@ public void update(Pose3d robotPoseMeters) { // use camera pose from the image capture timestamp Pose3d lateRobotPose = getRobotPose(timestampCapture); Pose3d lateCameraPose = lateRobotPose.plus(getRobotToCamera(camSim, timestampCapture).get()); - cameraPose2ds.add(lateCameraPose.toPose2d()); + cameraPoses2d.add(lateCameraPose.toPose2d()); // process a PhotonPipelineResult with visible targets var camResult = camSim.process(latencyMillis, lateCameraPose, allTargets); @@ -395,14 +397,10 @@ public void update(Pose3d robotPoseMeters) { camSim.submitProcessedFrame(camResult, timestampNT); // display debug results for (var target : camResult.getTargets()) { - visibleTargets.add(lateCameraPose.transformBy(target.getBestCameraToTarget())); + visTgtPoses2d.add(lateCameraPose.transformBy(target.getBestCameraToTarget()).toPose2d()); } } - if (visibleTargets.size() != 0) { - dbgField - .getObject("visibleTargetPoses") - .setPoses(visibleTargets.stream().map(Pose3d::toPose2d).collect(Collectors.toList())); - } - if (cameraPose2ds.size() != 0) dbgField.getObject("cameras").setPoses(cameraPose2ds); + if (processed) dbgField.getObject("visibleTargetPoses").setPoses(visTgtPoses2d); + if (cameraPoses2d.size() != 0) dbgField.getObject("cameras").setPoses(cameraPoses2d); } } diff --git a/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json b/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json index d9b634834b..addd5860ce 100644 --- a/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json +++ b/photonlib-java-examples/swervedriveposeestsim/simgui-ds.json @@ -1,9 +1,4 @@ { - "Keyboard 0 Settings": { - "window": { - "visible": false - } - }, "keyboardJoysticks": [ { "axisConfig": [ diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java index 3bff6fb6d4..e868b5cf82 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java @@ -24,6 +24,7 @@ public class Vision { private final PhotonCamera camera; private final PhotonPoseEstimator photonEstimator; + private double lastEstTimestamp = 0; // Simulation private PhotonCameraSim cameraSim; @@ -69,12 +70,15 @@ public PhotonPipelineResult getLatestResult() { */ public Optional getEstimatedGlobalPose() { var visionEst = photonEstimator.update(); + double latestTimestamp = camera.getLatestResult().getTimestampSeconds(); + boolean newResult = Math.abs(latestTimestamp - lastEstTimestamp) > 1e-5; if(Robot.isSimulation()) { visionEst.ifPresentOrElse( est -> getSimDebugField().getObject("VisionEstimation").setPose(est.estimatedPose.toPose2d()), - () -> getSimDebugField().getObject("VisionEstimation").setPoses() + () -> {if(newResult) getSimDebugField().getObject("VisionEstimation").setPoses();} ); } + if(newResult) lastEstTimestamp = latestTimestamp; return visionEst; } From fcdf6f26f5616c1c08931b18aada9cb415f611f2 Mon Sep 17 00:00:00 2001 From: amquake Date: Tue, 19 Sep 2023 01:03:26 -0700 Subject: [PATCH 38/46] dont display target estimations with empty transforms --- .../java/org/photonvision/simulation/VisionSystemSim.java | 6 +++++- photonlib-java-examples/simaimandrange/simgui-ds.json | 5 ----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java index ac56256be9..29e4f48b30 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java @@ -66,6 +66,8 @@ public class VisionSystemSim { private final Field2d dbgField; + private final Transform3d kEmptyTrf = new Transform3d(); + /** * A simulated vision system involving a camera(s) and coprocessor(s) mounted on a mobile robot * running PhotonVision, detecting targets placed on the field. {@link VisionTargetSim}s added to @@ -397,7 +399,9 @@ public void update(Pose3d robotPoseMeters) { camSim.submitProcessedFrame(camResult, timestampNT); // display debug results for (var target : camResult.getTargets()) { - visTgtPoses2d.add(lateCameraPose.transformBy(target.getBestCameraToTarget()).toPose2d()); + var trf = target.getBestCameraToTarget(); + if(trf.equals(kEmptyTrf)) continue; + visTgtPoses2d.add(lateCameraPose.transformBy(trf).toPose2d()); } } if (processed) dbgField.getObject("visibleTargetPoses").setPoses(visTgtPoses2d); diff --git a/photonlib-java-examples/simaimandrange/simgui-ds.json b/photonlib-java-examples/simaimandrange/simgui-ds.json index 2b161f3ef4..8dc94bc6a6 100644 --- a/photonlib-java-examples/simaimandrange/simgui-ds.json +++ b/photonlib-java-examples/simaimandrange/simgui-ds.json @@ -1,9 +1,4 @@ { - "Keyboard 0 Settings": { - "window": { - "visible": false - } - }, "keyboardJoysticks": [ { "axisConfig": [ From dd37fe71fba3ef64e7de0a208cfc348b85415d5b Mon Sep 17 00:00:00 2001 From: amquake Date: Tue, 19 Sep 2023 01:07:27 -0700 Subject: [PATCH 39/46] Update examples.txt --- photonlib-java-examples/examples.txt | 2 -- 1 file changed, 2 deletions(-) diff --git a/photonlib-java-examples/examples.txt b/photonlib-java-examples/examples.txt index 12c37ab582..c7627d90a9 100644 --- a/photonlib-java-examples/examples.txt +++ b/photonlib-java-examples/examples.txt @@ -2,6 +2,4 @@ aimandrange aimattarget getinrange simaimandrange -simposeest -apriltagExample swervedriveposeestsim From aa521d37049fb98b4fba7ade75a9cd69c3db4c6f Mon Sep 17 00:00:00 2001 From: amquake Date: Tue, 19 Sep 2023 01:08:40 -0700 Subject: [PATCH 40/46] spotless --- .../simulation/VisionSystemSim.java | 2 +- .../java/frc/robot/sim/DrivetrainSim.java | 29 +- .../src/main/java/frc/robot/Constants.java | 89 ++--- .../src/main/java/frc/robot/Main.java | 8 +- .../src/main/java/frc/robot/Robot.java | 54 +-- .../src/main/java/frc/robot/Vision.java | 80 +++-- .../subsystems/drivetrain/SwerveDrive.java | 210 +++++------ .../subsystems/drivetrain/SwerveDriveSim.java | 333 ++++++++++-------- .../subsystems/drivetrain/SwerveModule.java | 84 +++-- 9 files changed, 482 insertions(+), 407 deletions(-) diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java index 29e4f48b30..ed0207096e 100644 --- a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java @@ -400,7 +400,7 @@ public void update(Pose3d robotPoseMeters) { // display debug results for (var target : camResult.getTargets()) { var trf = target.getBestCameraToTarget(); - if(trf.equals(kEmptyTrf)) continue; + if (trf.equals(kEmptyTrf)) continue; visTgtPoses2d.add(lateCameraPose.transformBy(trf).toPose2d()); } } diff --git a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java index 531cf8df13..0034ac8772 100644 --- a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java +++ b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java @@ -40,9 +40,7 @@ import edu.wpi.first.wpilibj.simulation.DifferentialDrivetrainSim; import edu.wpi.first.wpilibj.simulation.PWMSim; import frc.robot.Robot; - import java.util.List; - import org.photonvision.PhotonCamera; import org.photonvision.estimation.TargetModel; import org.photonvision.simulation.PhotonCameraSim; @@ -94,12 +92,13 @@ public class DrivetrainSim { // See // https://firstfrc.blob.core.windows.net/frc2020/PlayingField/2020FieldDrawing-SeasonSpecific.pdf // page 208 - TargetModel targetModel = new TargetModel(List.of( - new Translation3d(0, Units.inchesToMeters(-9.819867), Units.inchesToMeters(-8.5)), - new Translation3d(0, Units.inchesToMeters(9.819867), Units.inchesToMeters(-8.5)), - new Translation3d(0, Units.inchesToMeters(19.625), Units.inchesToMeters(8.5)), - new Translation3d(0, Units.inchesToMeters(-19.625), Units.inchesToMeters(8.5)) - )); + TargetModel targetModel = + new TargetModel( + List.of( + new Translation3d(0, Units.inchesToMeters(-9.819867), Units.inchesToMeters(-8.5)), + new Translation3d(0, Units.inchesToMeters(9.819867), Units.inchesToMeters(-8.5)), + new Translation3d(0, Units.inchesToMeters(19.625), Units.inchesToMeters(8.5)), + new Translation3d(0, Units.inchesToMeters(-19.625), Units.inchesToMeters(8.5)))); // See https://firstfrc.blob.core.windows.net/frc2020/PlayingField/LayoutandMarkingDiagram.pdf // pages 4 and 5 double tgtXPos = Units.feetToMeters(54); @@ -120,17 +119,23 @@ public DrivetrainSim(int leftMotorPort, int rightMotorPort, PhotonCamera camera) // Create simulated camera properties. These can be set to mimic your actual camera. var cameraProp = new SimCameraProperties(); - cameraProp.setCalibration(camResolutionWidth, camResolutionHeight, Rotation2d.fromDegrees(camDiagFOV)); + cameraProp.setCalibration( + camResolutionWidth, camResolutionHeight, Rotation2d.fromDegrees(camDiagFOV)); cameraProp.setCalibError(0.2, 0.05); cameraProp.setFPS(25); cameraProp.setAvgLatencyMs(30); cameraProp.setLatencyStdDevMs(4); - // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible targets. + // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible + // targets. cameraSim = new PhotonCameraSim(camera, cameraProp, minTargetArea, maxLEDRange); // Add the simulated camera to view the targets on this simulated field. - visionSim.addCamera(cameraSim, new Transform3d(new Translation3d(0.25, 0, Robot.CAMERA_HEIGHT_METERS), new Rotation3d(0, -Robot.CAMERA_PITCH_RADIANS, 0))); - + visionSim.addCamera( + cameraSim, + new Transform3d( + new Translation3d(0.25, 0, Robot.CAMERA_HEIGHT_METERS), + new Rotation3d(0, -Robot.CAMERA_PITCH_RADIANS, 0))); + cameraSim.enableDrawWireframe(true); } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java index 1d6c42d65f..0de741f4da 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java @@ -1,7 +1,5 @@ package frc.robot; -import java.io.IOException; - import edu.wpi.first.apriltag.AprilTagFieldLayout; import edu.wpi.first.apriltag.AprilTagFields; import edu.wpi.first.math.Matrix; @@ -14,6 +12,7 @@ import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; import edu.wpi.first.math.util.Units; +import java.io.IOException; public class Constants { @@ -21,12 +20,11 @@ public static class Vision { public static final String kCameraName = "YOUR CAMERA NAME"; // Cam mounted facing forward, half a meter forward of center, half a meter up from center. public static final Transform3d kRobotToCam = - new Transform3d( - new Translation3d(0.5, 0.0, 0.5), - new Rotation3d(0, 0, 0)); + new Transform3d(new Translation3d(0.5, 0.0, 0.5), new Rotation3d(0, 0, 0)); // The layout of the AprilTags on the field public static final AprilTagFieldLayout kTagLayout; + static { try { kTagLayout = AprilTagFields.kDefaultField.loadAprilTagLayoutField(); @@ -42,49 +40,33 @@ public static class Vision { public static final Matrix kMultiTagStdDevs = VecBuilder.fill(0.5, 0.5, 1); } - public static class Swerve { + public static class Swerve { // Physical properties public static final double kTrackWidth = Units.inchesToMeters(18.5); public static final double kTrackLength = Units.inchesToMeters(18.5); - public static final double kRobotWidth = Units.inchesToMeters(25 + 3.25*2); - public static final double kRobotLength = Units.inchesToMeters(25 + 3.25*2); + public static final double kRobotWidth = Units.inchesToMeters(25 + 3.25 * 2); + public static final double kRobotLength = Units.inchesToMeters(25 + 3.25 * 2); public static final double kMaxLinearSpeed = Units.feetToMeters(15.5); public static final double kMaxAngularSpeed = Units.rotationsToRadians(2); public static final double kWheelDiameter = Units.inchesToMeters(4); - public static final double kWheelCircumference = kWheelDiameter*Math.PI; + public static final double kWheelCircumference = kWheelDiameter * Math.PI; public static final double kDriveGearRatio = 6.75; // 6.75:1 SDS MK4 L2 ratio public static final double kSteerGearRatio = 12.8; // 12.8:1 public static final double kDriveDistPerPulse = kWheelCircumference / 1024 / kDriveGearRatio; public static final double kSteerRadPerPulse = 2 * Math.PI / 1024; - + public enum ModuleConstants { FL( // Front left - 1, - 0, 0, 1, - 1, 2, 3, 0, - kTrackLength/2, kTrackWidth/2 - ), + 1, 0, 0, 1, 1, 2, 3, 0, kTrackLength / 2, kTrackWidth / 2), FR( // Front Right - 2, - 2, 4, 5, - 3, 6, 7, 0, - kTrackLength/2, -kTrackWidth/2 - ), + 2, 2, 4, 5, 3, 6, 7, 0, kTrackLength / 2, -kTrackWidth / 2), BL( // Back Left - 3, - 4, 8, 9, - 5, 10, 11, 0, - -kTrackLength/2, kTrackWidth/2 - ), + 3, 4, 8, 9, 5, 10, 11, 0, -kTrackLength / 2, kTrackWidth / 2), BR( // Back Right - 4, - 6, 12, 13, - 7, 14, 15, 0, - -kTrackLength/2, -kTrackWidth/2 - ); - + 4, 6, 12, 13, 7, 14, 15, 0, -kTrackLength / 2, -kTrackWidth / 2); + public final int moduleNum; public final int driveMotorID; public final int driveEncoderA; @@ -94,13 +76,18 @@ public enum ModuleConstants { public final int steerEncoderB; public final double angleOffset; public final Translation2d centerOffset; - + private ModuleConstants( - int moduleNum, - int driveMotorID, int driveEncoderA, int driveEncoderB, - int steerMotorID, int steerEncoderA, int steerEncoderB, double angleOffset, - double xOffset, double yOffset - ){ + int moduleNum, + int driveMotorID, + int driveEncoderA, + int driveEncoderB, + int steerMotorID, + int steerEncoderA, + int steerEncoderB, + double angleOffset, + double xOffset, + double yOffset) { this.moduleNum = moduleNum; this.driveMotorID = driveMotorID; this.driveEncoderA = driveEncoderA; @@ -112,26 +99,28 @@ private ModuleConstants( centerOffset = new Translation2d(xOffset, yOffset); } } - + // Feedforward // Linear drive feed forward - public static final SimpleMotorFeedforward kDriveFF = new SimpleMotorFeedforward( // real - 0.25, // Voltage to break static friction - 2.5, // Volts per meter per second - 0.3 // Volts per meter per second squared - ); + public static final SimpleMotorFeedforward kDriveFF = + new SimpleMotorFeedforward( // real + 0.25, // Voltage to break static friction + 2.5, // Volts per meter per second + 0.3 // Volts per meter per second squared + ); // Steer feed forward - public static final SimpleMotorFeedforward kSteerFF = new SimpleMotorFeedforward( // real - 0.5, // Voltage to break static friction - 0.25, // Volts per radian per second - 0.01 // Volts per radian per second squared - ); - + public static final SimpleMotorFeedforward kSteerFF = + new SimpleMotorFeedforward( // real + 0.5, // Voltage to break static friction + 0.25, // Volts per radian per second + 0.01 // Volts per radian per second squared + ); + // PID public static final double kDriveKP = 1; public static final double kDriveKI = 0; public static final double kDriveKD = 0; - + public static final double kSteerKP = 20; public static final double kSteerKI = 0; public static final double kSteerKD = 0.25; diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java index fe215d7362..5333e77e96 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java @@ -7,9 +7,9 @@ import edu.wpi.first.wpilibj.RobotBase; public final class Main { - private Main() {} + private Main() {} - public static void main(String... args) { - RobotBase.startRobot(Robot::new); - } + public static void main(String... args) { + RobotBase.startRobot(Robot::new); + } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java index aa55e1cb50..3ce01f8e20 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java @@ -4,8 +4,6 @@ package frc.robot; -import java.util.Random; - import edu.wpi.first.math.filter.SlewRateLimiter; import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Rotation2d; @@ -17,6 +15,7 @@ import edu.wpi.first.wpilibj.simulation.BatterySim; import edu.wpi.first.wpilibj.simulation.RoboRioSim; import frc.robot.subsystems.drivetrain.SwerveDrive; +import java.util.Random; public class Robot extends TimedRobot { private SwerveDrive drivetrain; @@ -32,7 +31,7 @@ public class Robot extends TimedRobot { private Timer autoTimer = new Timer(); private Random rand = new Random(4512); - + @Override public void robotInit() { drivetrain = new SwerveDrive(); @@ -40,36 +39,41 @@ public void robotInit() { controller = new XboxController(0); } - + @Override public void robotPeriodic() { drivetrain.periodic(); // Correct pose estimate with vision measurements var visionEst = vision.getEstimatedGlobalPose(); - visionEst.ifPresent(est -> { - var estPose = est.estimatedPose.toPose2d(); - // Change our trust in the measurement based on the tags we can see - var estStdDevs = vision.getEstimationStdDevs(estPose); - - drivetrain.addVisionMeasurement(est.estimatedPose.toPose2d(), est.timestampSeconds, estStdDevs); - }); + visionEst.ifPresent( + est -> { + var estPose = est.estimatedPose.toPose2d(); + // Change our trust in the measurement based on the tags we can see + var estStdDevs = vision.getEstimationStdDevs(estPose); + + drivetrain.addVisionMeasurement( + est.estimatedPose.toPose2d(), est.timestampSeconds, estStdDevs); + }); // Apply a random offset to pose estimator to test vision correction - if(controller.getBButtonPressed()) { - var trf = new Transform2d(new Translation2d(rand.nextDouble() * 4 - 2, rand.nextDouble() * 4 - 2), new Rotation2d(rand.nextDouble() * 2 * Math.PI)); + if (controller.getBButtonPressed()) { + var trf = + new Transform2d( + new Translation2d(rand.nextDouble() * 4 - 2, rand.nextDouble() * 4 - 2), + new Rotation2d(rand.nextDouble() * 2 * Math.PI)); drivetrain.resetPose(drivetrain.getPose().plus(trf), false); } // Log values to the dashboard drivetrain.log(); } - + @Override public void disabledPeriodic() { drivetrain.stop(); } - + @Override public void autonomousInit() { autoTimer.restart(); @@ -77,31 +81,30 @@ public void autonomousInit() { drivetrain.resetPose(pose, true); vision.resetSimPose(pose); } - + @Override public void autonomousPeriodic() { // translate diagonally while spinning - if(autoTimer.get() < 10) { + if (autoTimer.get() < 10) { drivetrain.drive(0.5, 0.5, 0.5, false); - } - else { + } else { autoTimer.stop(); drivetrain.stop(); } } - + @Override public void teleopPeriodic() { // We will use an "arcade drive" scheme to turn joystick values into target robot speeds // We want to get joystick values where pushing forward/left is positive double forward = -controller.getLeftY() * kDriveSpeed; - if(Math.abs(forward) < 0.1) forward = 0; // deadband small values + if (Math.abs(forward) < 0.1) forward = 0; // deadband small values forward = forwardLimiter.calculate(forward); // limit acceleration double strafe = -controller.getLeftX() * kDriveSpeed; - if(Math.abs(strafe) < 0.1) strafe = 0; + if (Math.abs(strafe) < 0.1) strafe = 0; strafe = strafeLimiter.calculate(strafe); double turn = -controller.getRightX() * kDriveSpeed; - if(Math.abs(turn) < 0.1) turn = 0; + if (Math.abs(turn) < 0.1) turn = 0; turn = turnLimiter.calculate(turn); // Convert from joystick values to real target speeds @@ -120,12 +123,13 @@ public void simulationPeriodic() { // Update camera simulation vision.simulationPeriodic(drivetrain.getSimPose()); - + var debugField = vision.getSimDebugField(); debugField.getObject("EstimatedRobot").setPose(drivetrain.getPose()); debugField.getObject("EstimatedRobotModules").setPoses(drivetrain.getModulePoses()); // Calculate battery voltage sag due to current draw - RoboRioSim.setVInVoltage(BatterySim.calculateDefaultBatteryLoadedVoltage(drivetrain.getCurrentDraw())); + RoboRioSim.setVInVoltage( + BatterySim.calculateDefaultBatteryLoadedVoltage(drivetrain.getCurrentDraw())); } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java index e868b5cf82..e88bdf2db4 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java @@ -1,13 +1,6 @@ package frc.robot; -import org.photonvision.EstimatedRobotPose; -import org.photonvision.PhotonCamera; -import org.photonvision.PhotonPoseEstimator; -import org.photonvision.PhotonPoseEstimator.PoseStrategy; -import org.photonvision.simulation.PhotonCameraSim; -import org.photonvision.simulation.SimCameraProperties; -import org.photonvision.simulation.VisionSystemSim; -import org.photonvision.targeting.PhotonPipelineResult; +import static frc.robot.Constants.Vision.*; import edu.wpi.first.math.Matrix; import edu.wpi.first.math.VecBuilder; @@ -16,10 +9,15 @@ import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; import edu.wpi.first.wpilibj.smartdashboard.Field2d; - -import static frc.robot.Constants.Vision.*; - import java.util.Optional; +import org.photonvision.EstimatedRobotPose; +import org.photonvision.PhotonCamera; +import org.photonvision.PhotonPoseEstimator; +import org.photonvision.PhotonPoseEstimator.PoseStrategy; +import org.photonvision.simulation.PhotonCameraSim; +import org.photonvision.simulation.SimCameraProperties; +import org.photonvision.simulation.VisionSystemSim; +import org.photonvision.targeting.PhotonPipelineResult; public class Vision { private final PhotonCamera camera; @@ -33,11 +31,12 @@ public class Vision { public Vision() { camera = new PhotonCamera(kCameraName); - photonEstimator = new PhotonPoseEstimator(kTagLayout, PoseStrategy.MULTI_TAG_PNP, camera, kRobotToCam); + photonEstimator = + new PhotonPoseEstimator(kTagLayout, PoseStrategy.MULTI_TAG_PNP, camera, kRobotToCam); photonEstimator.setMultiTagFallbackStrategy(PoseStrategy.LOWEST_AMBIGUITY); - //----- Simulation - if(Robot.isSimulation()) { + // ----- Simulation + if (Robot.isSimulation()) { // Create the vision system simulation which handles cameras and targets on the field. visionSim = new VisionSystemSim("main"); // Add all the AprilTags inside the tag layout as visible targets to this simulated field. @@ -49,11 +48,12 @@ public Vision() { cameraProp.setFPS(15); cameraProp.setAvgLatencyMs(50); cameraProp.setLatencyStdDevMs(15); - // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible targets. + // Create a PhotonCameraSim which will update the linked PhotonCamera's values with visible + // targets. cameraSim = new PhotonCameraSim(camera, cameraProp); // Add the simulated camera to view the targets on this simulated field. visionSim.addCamera(cameraSim, kRobotToCam); - + cameraSim.enableDrawWireframe(true); } } @@ -65,28 +65,33 @@ public PhotonPipelineResult getLatestResult() { /** * The latest estimated robot pose on the field from vision data. This may be empty. This should * only be called once per loop. - * - * @return An {@link EstimatedRobotPose} with an estimated pose, estimate timestamp, and targets used for estimation. + * + * @return An {@link EstimatedRobotPose} with an estimated pose, estimate timestamp, and targets + * used for estimation. */ public Optional getEstimatedGlobalPose() { var visionEst = photonEstimator.update(); double latestTimestamp = camera.getLatestResult().getTimestampSeconds(); boolean newResult = Math.abs(latestTimestamp - lastEstTimestamp) > 1e-5; - if(Robot.isSimulation()) { + if (Robot.isSimulation()) { visionEst.ifPresentOrElse( - est -> getSimDebugField().getObject("VisionEstimation").setPose(est.estimatedPose.toPose2d()), - () -> {if(newResult) getSimDebugField().getObject("VisionEstimation").setPoses();} - ); + est -> + getSimDebugField() + .getObject("VisionEstimation") + .setPose(est.estimatedPose.toPose2d()), + () -> { + if (newResult) getSimDebugField().getObject("VisionEstimation").setPoses(); + }); } - if(newResult) lastEstTimestamp = latestTimestamp; + if (newResult) lastEstTimestamp = latestTimestamp; return visionEst; } /** - * The standard deviations of the estimated pose from {@link #getEstimatedGlobalPose()}, for use with - * {@link edu.wpi.first.math.estimator.SwerveDrivePoseEstimator SwerveDrivePoseEstimator}. This should - * only be used when there are targets visible. - * + * The standard deviations of the estimated pose from {@link #getEstimatedGlobalPose()}, for use + * with {@link edu.wpi.first.math.estimator.SwerveDrivePoseEstimator SwerveDrivePoseEstimator}. + * This should only be used when there are targets visible. + * * @param estimatedPose The estimated pose to guess standard deviations for. */ public Matrix getEstimationStdDevs(Pose2d estimatedPose) { @@ -94,25 +99,26 @@ public Matrix getEstimationStdDevs(Pose2d estimatedPose) { var targets = getLatestResult().getTargets(); int numTags = 0; double avgDist = 0; - for(var tgt : targets) { + for (var tgt : targets) { var tagPose = photonEstimator.getFieldTags().getTagPose(tgt.getFiducialId()); - if(tagPose.isEmpty()) continue; + if (tagPose.isEmpty()) continue; numTags++; - avgDist += tagPose.get().toPose2d().getTranslation().getDistance(estimatedPose.getTranslation()); + avgDist += + tagPose.get().toPose2d().getTranslation().getDistance(estimatedPose.getTranslation()); } - if(numTags == 0) return estStdDevs; + if (numTags == 0) return estStdDevs; avgDist /= numTags; // Decrease std devs if multiple targets are visible - if(numTags > 1) estStdDevs = kMultiTagStdDevs; + if (numTags > 1) estStdDevs = kMultiTagStdDevs; // Increase std devs based on (average) distance - if(numTags == 1 && avgDist > 4) estStdDevs = VecBuilder.fill(Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE); + if (numTags == 1 && avgDist > 4) + estStdDevs = VecBuilder.fill(Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE); else estStdDevs = estStdDevs.times(1 + (avgDist * avgDist / 30)); return estStdDevs; } - - //----- Simulation + // ----- Simulation public void simulationPeriodic(Pose2d robotSimPose) { visionSim.update(robotSimPose); @@ -120,12 +126,12 @@ public void simulationPeriodic(Pose2d robotSimPose) { /** Reset pose history of the robot in the vision system simulation. */ public void resetSimPose(Pose2d pose) { - if(Robot.isSimulation()) visionSim.resetRobotPose(pose); + if (Robot.isSimulation()) visionSim.resetRobotPose(pose); } /** A Field2d for visualizing our robot and objects on the field. */ public Field2d getSimDebugField() { - if(!Robot.isSimulation()) return null; + if (!Robot.isSimulation()) return null; return visionSim.getDebugField(); } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java index b0c13d6bbe..b37dfb855c 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java @@ -31,50 +31,57 @@ public class SwerveDrive { }; // The kinematics for translating between robot chassis speeds and module states. - private final SwerveDriveKinematics kinematics = new SwerveDriveKinematics( - swerveMods[0].getModuleConstants().centerOffset, - swerveMods[1].getModuleConstants().centerOffset, - swerveMods[2].getModuleConstants().centerOffset, - swerveMods[3].getModuleConstants().centerOffset - ); + private final SwerveDriveKinematics kinematics = + new SwerveDriveKinematics( + swerveMods[0].getModuleConstants().centerOffset, + swerveMods[1].getModuleConstants().centerOffset, + swerveMods[2].getModuleConstants().centerOffset, + swerveMods[3].getModuleConstants().centerOffset); private final ADXRS450_Gyro gyro = new ADXRS450_Gyro(Port.kOnboardCS0); - + // The robot pose estimator for tracking swerve odometry and applying vision corrections. private final SwerveDrivePoseEstimator poseEstimator; private ChassisSpeeds targetChassisSpeeds = new ChassisSpeeds(); - //----- Simulation + // ----- Simulation private final ADXRS450_GyroSim gyroSim; private final SwerveDriveSim swerveDriveSim; private double totalCurrentDraw = 0; - + public SwerveDrive() { // Define the standard deviations for the pose estimator, which determine how fast the pose - // estimate converges to the vision measurement. This should depend on the vision measurement noise + // estimate converges to the vision measurement. This should depend on the vision measurement + // noise // and how many or how frequently vision measurements are applied to the pose estimator. var stateStdDevs = VecBuilder.fill(0.1, 0.1, 0.1); var visionStdDevs = VecBuilder.fill(1, 1, 1); - poseEstimator = new SwerveDrivePoseEstimator( - kinematics, - getGyroYaw(), - getModulePositions(), - new Pose2d(), - stateStdDevs, - visionStdDevs - ); - - //----- Simulation + poseEstimator = + new SwerveDrivePoseEstimator( + kinematics, + getGyroYaw(), + getModulePositions(), + new Pose2d(), + stateStdDevs, + visionStdDevs); + + // ----- Simulation gyroSim = new ADXRS450_GyroSim(gyro); - swerveDriveSim = new SwerveDriveSim( - kDriveFF, DCMotor.getFalcon500(1), kDriveGearRatio, kWheelDiameter/2.0, - kSteerFF, DCMotor.getFalcon500(1), kSteerGearRatio, - kinematics); + swerveDriveSim = + new SwerveDriveSim( + kDriveFF, + DCMotor.getFalcon500(1), + kDriveGearRatio, + kWheelDiameter / 2.0, + kSteerFF, + DCMotor.getFalcon500(1), + kSteerGearRatio, + kinematics); } public void periodic() { - for(SwerveModule module : swerveMods) { + for (SwerveModule module : swerveMods) { module.periodic(); } @@ -85,26 +92,30 @@ public void periodic() { /** * Basic drive control. A target field-relative ChassisSpeeds (vx, vy, omega) is converted to * specific swerve module states. - * + * * @param vxMeters X velocity (forwards/backwards) * @param vyMeters Y velocity (strafe left/right) * @param omegaRadians Angular velocity (rotation CCW+) - * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback control. + * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback + * control. */ - public void drive(double vxMeters, double vyMeters, double omegaRadians, boolean openLoop){ - var targetChassisSpeeds = ChassisSpeeds.fromFieldRelativeSpeeds(vxMeters, vyMeters, omegaRadians, getHeading()); + public void drive(double vxMeters, double vyMeters, double omegaRadians, boolean openLoop) { + var targetChassisSpeeds = + ChassisSpeeds.fromFieldRelativeSpeeds(vxMeters, vyMeters, omegaRadians, getHeading()); setChassisSpeeds(targetChassisSpeeds, openLoop, false); } /** - * Command the swerve drive to the desired chassis speeds by converting them to swerve module states and - * using {@link #setModuleStates(SwerveModuleState[], boolean)}. - * + * Command the swerve drive to the desired chassis speeds by converting them to swerve module + * states and using {@link #setModuleStates(SwerveModuleState[], boolean)}. + * * @param targetChassisSpeeds Target robot-relative chassis speeds (vx, vy, omega). - * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback control. + * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback + * control. * @param steerInPlace If modules should steer to the target angle when target velocity is 0. */ - public void setChassisSpeeds(ChassisSpeeds targetChassisSpeeds, boolean openLoop, boolean steerInPlace){ + public void setChassisSpeeds( + ChassisSpeeds targetChassisSpeeds, boolean openLoop, boolean steerInPlace) { setModuleStates(kinematics.toSwerveModuleStates(targetChassisSpeeds), openLoop, steerInPlace); this.targetChassisSpeeds = targetChassisSpeeds; } @@ -112,51 +123,48 @@ public void setChassisSpeeds(ChassisSpeeds targetChassisSpeeds, boolean openLoop /** * Command the swerve modules to the desired states. Velocites exceeding the maximum speed will be * desaturated (while preserving the ratios between modules). - * - * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback control. + * + * @param openLoop If swerve modules should use feedforward only and ignore velocity feedback + * control. * @param steerInPlace If modules should steer to the target angle when target velocity is 0. */ - public void setModuleStates(SwerveModuleState[] desiredStates, boolean openLoop, boolean steerInPlace){ + public void setModuleStates( + SwerveModuleState[] desiredStates, boolean openLoop, boolean steerInPlace) { SwerveDriveKinematics.desaturateWheelSpeeds(desiredStates, kMaxLinearSpeed); - for(int i = 0; i < swerveMods.length; i++) { + for (int i = 0; i < swerveMods.length; i++) { swerveMods[i].setDesiredState(desiredStates[i], openLoop, steerInPlace); } } /** Stop the swerve drive. */ - public void stop(){ + public void stop() { drive(0, 0, 0, true); } /** See {@link SwerveDrivePoseEstimator#addVisionMeasurement(Pose2d, double)}. */ - public void addVisionMeasurement(Pose2d visionMeasurement, double timestampSeconds){ - poseEstimator.addVisionMeasurement( - visionMeasurement, - timestampSeconds - ); + public void addVisionMeasurement(Pose2d visionMeasurement, double timestampSeconds) { + poseEstimator.addVisionMeasurement(visionMeasurement, timestampSeconds); } /** See {@link SwerveDrivePoseEstimator#addVisionMeasurement(Pose2d, double, Matrix)}. */ - public void addVisionMeasurement(Pose2d visionMeasurement, double timestampSeconds, Matrix stdDevs){ - poseEstimator.addVisionMeasurement( - visionMeasurement, - timestampSeconds, - stdDevs - ); + public void addVisionMeasurement( + Pose2d visionMeasurement, double timestampSeconds, Matrix stdDevs) { + poseEstimator.addVisionMeasurement(visionMeasurement, timestampSeconds, stdDevs); } /** * Reset the estimated pose of the swerve drive on the field. - * + * * @param pose New robot pose. - * @param resetSimPose If the simulated robot pose should also be reset. This effectively teleports the - * robot and should only be used during the setup of the simulation world. + * @param resetSimPose If the simulated robot pose should also be reset. This effectively + * teleports the robot and should only be used during the setup of the simulation world. */ - public void resetPose(Pose2d pose, boolean resetSimPose){ - if(resetSimPose) { + public void resetPose(Pose2d pose, boolean resetSimPose) { + if (resetSimPose) { swerveDriveSim.reset(pose, false); - // we shouldnt realistically be resetting pose after startup, but we will handle it anyway for testing - for(int i = 0; i < swerveMods.length; i++) { + // we shouldnt realistically be resetting pose after startup, but we will handle it anyway for + // testing + for (int i = 0; i < swerveMods.length; i++) { swerveMods[i].simulationUpdate(0, 0, 0, 0, 0, 0); } gyroSim.setAngle(-pose.getRotation().getDegrees()); @@ -169,29 +177,29 @@ public void resetPose(Pose2d pose, boolean resetSimPose){ /** Get the estimated pose of the swerve drive on the field. */ public Pose2d getPose() { return poseEstimator.getEstimatedPosition(); - } + } /** The heading of the swerve drive's estimated pose on the field. */ - public Rotation2d getHeading(){ + public Rotation2d getHeading() { return getPose().getRotation(); } /** Raw gyro yaw (this may not match the field heading!). */ - public Rotation2d getGyroYaw(){ + public Rotation2d getGyroYaw() { return gyro.getRotation2d(); } /** Get the chassis speeds of the robot (vx, vy, omega) from the swerve module states. */ - public ChassisSpeeds getChassisSpeeds(){ + public ChassisSpeeds getChassisSpeeds() { return kinematics.toChassisSpeeds(getModuleStates()); } - + /** - * Get the SwerveModuleState of each swerve module (speed, angle). - * The returned array order matches the kinematics module order. + * Get the SwerveModuleState of each swerve module (speed, angle). The returned array order + * matches the kinematics module order. */ - public SwerveModuleState[] getModuleStates(){ - return new SwerveModuleState[]{ + public SwerveModuleState[] getModuleStates() { + return new SwerveModuleState[] { swerveMods[0].getState(), swerveMods[1].getState(), swerveMods[2].getState(), @@ -200,11 +208,11 @@ public SwerveModuleState[] getModuleStates(){ } /** - * Get the SwerveModulePosition of each swerve module (position, angle). - * The returned array order matches the kinematics module order. + * Get the SwerveModulePosition of each swerve module (position, angle). The returned array order + * matches the kinematics module order. */ - public SwerveModulePosition[] getModulePositions(){ - return new SwerveModulePosition[]{ + public SwerveModulePosition[] getModulePositions() { + return new SwerveModulePosition[] { swerveMods[0].getPosition(), swerveMods[1].getPosition(), swerveMods[2].getPosition(), @@ -213,48 +221,51 @@ public SwerveModulePosition[] getModulePositions(){ } /** - * Get the Pose2d of each swerve module based on kinematics and current robot pose. - * The returned array order matches the kinematics module order. + * Get the Pose2d of each swerve module based on kinematics and current robot pose. The returned + * array order matches the kinematics module order. */ - public Pose2d[] getModulePoses(){ + public Pose2d[] getModulePoses() { Pose2d[] modulePoses = new Pose2d[swerveMods.length]; - for(int i = 0; i < swerveMods.length; i++){ + for (int i = 0; i < swerveMods.length; i++) { var module = swerveMods[i]; - modulePoses[i] = getPose().transformBy( - new Transform2d(module.getModuleConstants().centerOffset, module.getAbsoluteHeading()) - ); + modulePoses[i] = + getPose() + .transformBy( + new Transform2d( + module.getModuleConstants().centerOffset, module.getAbsoluteHeading())); } return modulePoses; } /** Log various drivetrain values to the dashboard. */ - public void log(){ + public void log() { String table = "Drive/"; Pose2d pose = getPose(); - SmartDashboard.putNumber(table+"X", pose.getX()); - SmartDashboard.putNumber(table+"Y", pose.getY()); - SmartDashboard.putNumber(table+"Heading", pose.getRotation().getDegrees()); + SmartDashboard.putNumber(table + "X", pose.getX()); + SmartDashboard.putNumber(table + "Y", pose.getY()); + SmartDashboard.putNumber(table + "Heading", pose.getRotation().getDegrees()); ChassisSpeeds chassisSpeeds = getChassisSpeeds(); - SmartDashboard.putNumber(table+"VX", chassisSpeeds.vxMetersPerSecond); - SmartDashboard.putNumber(table+"VY", chassisSpeeds.vyMetersPerSecond); - SmartDashboard.putNumber(table+"Omega Degrees", Math.toDegrees(chassisSpeeds.omegaRadiansPerSecond)); - SmartDashboard.putNumber(table+"Target VX", targetChassisSpeeds.vxMetersPerSecond); - SmartDashboard.putNumber(table+"Target VY", targetChassisSpeeds.vyMetersPerSecond); - SmartDashboard.putNumber(table+"Target Omega Degrees", Math.toDegrees(targetChassisSpeeds.omegaRadiansPerSecond)); - - for(SwerveModule module : swerveMods) { + SmartDashboard.putNumber(table + "VX", chassisSpeeds.vxMetersPerSecond); + SmartDashboard.putNumber(table + "VY", chassisSpeeds.vyMetersPerSecond); + SmartDashboard.putNumber( + table + "Omega Degrees", Math.toDegrees(chassisSpeeds.omegaRadiansPerSecond)); + SmartDashboard.putNumber(table + "Target VX", targetChassisSpeeds.vxMetersPerSecond); + SmartDashboard.putNumber(table + "Target VY", targetChassisSpeeds.vyMetersPerSecond); + SmartDashboard.putNumber( + table + "Target Omega Degrees", Math.toDegrees(targetChassisSpeeds.omegaRadiansPerSecond)); + + for (SwerveModule module : swerveMods) { module.log(); } } + // ----- Simulation - //----- Simulation - - public void simulationPeriodic(){ + public void simulationPeriodic() { // Pass commanded motor voltages into swerve drive simulation double[] driveInputs = new double[swerveMods.length]; double[] steerInputs = new double[swerveMods.length]; - for(int i = 0; i < swerveMods.length; i++) { + for (int i = 0; i < swerveMods.length; i++) { driveInputs[i] = swerveMods[i].getDriveVoltage(); steerInputs[i] = swerveMods[i].getSteerVoltage(); } @@ -269,17 +280,16 @@ public void simulationPeriodic(){ var steerStates = swerveDriveSim.getSteerStates(); totalCurrentDraw = 0; double[] driveCurrents = swerveDriveSim.getDriveCurrentDraw(); - for(double current : driveCurrents) totalCurrentDraw += current; + for (double current : driveCurrents) totalCurrentDraw += current; double[] steerCurrents = swerveDriveSim.getSteerCurrentDraw(); - for(double current : steerCurrents) totalCurrentDraw += current; - for(int i = 0; i < swerveMods.length; i++) { + for (double current : steerCurrents) totalCurrentDraw += current; + for (int i = 0; i < swerveMods.length; i++) { double drivePos = driveStates.get(i).get(0, 0); double driveRate = driveStates.get(i).get(1, 0); double steerPos = steerStates.get(i).get(0, 0); double steerRate = steerStates.get(i).get(1, 0); swerveMods[i].simulationUpdate( - drivePos, driveRate, driveCurrents[i], - steerPos, steerRate, steerCurrents[i]); + drivePos, driveRate, driveCurrents[i], steerPos, steerRate, steerCurrents[i]); } gyroSim.setRate(-swerveDriveSim.getOmegaRadsPerSec()); @@ -287,8 +297,8 @@ public void simulationPeriodic(){ } /** - * The "actual" pose of the robot on the field used in simulation. - * This is different from the swerve drive's estimated pose. + * The "actual" pose of the robot on the field used in simulation. This is different from the + * swerve drive's estimated pose. */ public Pose2d getSimPose() { return swerveDriveSim.getPose(); diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java index 607b4a04fb..c5f8c87c47 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java @@ -1,9 +1,5 @@ package frc.robot.subsystems.drivetrain; -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - import edu.wpi.first.math.MathUtil; import edu.wpi.first.math.Matrix; import edu.wpi.first.math.Nat; @@ -21,20 +17,24 @@ import edu.wpi.first.math.system.LinearSystem; import edu.wpi.first.math.system.plant.DCMotor; import edu.wpi.first.wpilibj.RobotController; +import java.util.ArrayList; +import java.util.List; +import java.util.Random; /** - * This class attempts to simulate the dynamics of a swerve drive. In simulationPeriodic, users should first set - * inputs from motors with {@link #setDriveInputs(double...)} and {@link #setSteerInputs(double...)}, - * call {@link #update(double)} to update the simulation, and then set swerve module's encoder values and - * the drivetrain's gyro values with the results from this class. - * - *

In this class, distances are expressed with meters, angles with radians, and inputs with voltages. - * - *

Teams can use {@link edu.wpi.first.wpilibj.smartdashboard.Field2d} to visualize - * their robot on the Sim GUI's field. + * This class attempts to simulate the dynamics of a swerve drive. In simulationPeriodic, users + * should first set inputs from motors with {@link #setDriveInputs(double...)} and {@link + * #setSteerInputs(double...)}, call {@link #update(double)} to update the simulation, and then set + * swerve module's encoder values and the drivetrain's gyro values with the results from this class. + * + *

In this class, distances are expressed with meters, angles with radians, and inputs with + * voltages. + * + *

Teams can use {@link edu.wpi.first.wpilibj.smartdashboard.Field2d} to visualize their robot on + * the Sim GUI's field. */ public class SwerveDriveSim { - + private final LinearSystem drivePlant; private final double driveKs; private final DCMotor driveMotor; @@ -44,7 +44,7 @@ public class SwerveDriveSim { private final double steerKs; private final DCMotor steerMotor; private final double steerGearing; - + private final SwerveDriveKinematics kinematics; private final int numModules; @@ -58,73 +58,93 @@ public class SwerveDriveSim { // noiseless "actual" pose of the robot on the field private Pose2d pose = new Pose2d(); private double omegaRadsPerSec = 0; - + /** * Creates a swerve drive simulation. - * - * @param driveFF The feedforward for the drive motors of this swerve drive. This should be in units of meters. - * @param driveMotor The DCMotor model for the drive motor(s) of this swerve drive's modules. - * This should not have any gearing applied. - * @param driveGearing The gear ratio of the drive system. Positive values indicate a reduction where one - * rotation of the drive wheel equals driveGearing rotations of the drive motor. - * @param driveWheelRadius The radius of the module's driving wheel. - * @param steerFF The feedforward for the steer motors of this swerve drive. This should be in units of radians. - * @param steerMotor The DCMotor model for the steer motor(s) of this swerve drive's modules. - * This should not have any gearing applied. - * @param steerGearing The gear ratio of the steer system. Positive values indicate a reduction where one - * rotation of the module heading/azimuth equals steerGearing rotations of the steer motor. + * + * @param driveFF The feedforward for the drive motors of this swerve drive. This should be in + * units of meters. + * @param driveMotor The DCMotor model for the drive motor(s) of this swerve drive's modules. This + * should not have any gearing applied. + * @param driveGearing The gear ratio of the drive system. Positive values indicate a reduction + * where one rotation of the drive wheel equals driveGearing rotations of the drive motor. + * @param driveWheelRadius The radius of the module's driving wheel. + * @param steerFF The feedforward for the steer motors of this swerve drive. This should be in + * units of radians. + * @param steerMotor The DCMotor model for the steer motor(s) of this swerve drive's modules. This + * should not have any gearing applied. + * @param steerGearing The gear ratio of the steer system. Positive values indicate a reduction + * where one rotation of the module heading/azimuth equals steerGearing rotations of the steer + * motor. * @param kinematics The kinematics for this swerve drive. All swerve module information used in - * this class should match the order of the modules this kinematics object was constructed with. + * this class should match the order of the modules this kinematics object was constructed + * with. */ public SwerveDriveSim( - SimpleMotorFeedforward driveFF, DCMotor driveMotor, double driveGearing, double driveWheelRadius, - SimpleMotorFeedforward steerFF, DCMotor steerMotor, double steerGearing, + SimpleMotorFeedforward driveFF, + DCMotor driveMotor, + double driveGearing, + double driveWheelRadius, + SimpleMotorFeedforward steerFF, + DCMotor steerMotor, + double steerGearing, SwerveDriveKinematics kinematics) { this( - new LinearSystem( - Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -driveFF.kv / driveFF.ka), - VecBuilder.fill(0.0, 1.0 / driveFF.ka), - Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), - VecBuilder.fill(0.0, 0.0) - ), - driveFF.ks, driveMotor, driveGearing, driveWheelRadius, - new LinearSystem( - Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -steerFF.kv / steerFF.ka), - VecBuilder.fill(0.0, 1.0 / steerFF.ka), - Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), - VecBuilder.fill(0.0, 0.0) - ), - steerFF.ks, steerMotor, steerGearing, - kinematics - ); + new LinearSystem( + Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -driveFF.kv / driveFF.ka), + VecBuilder.fill(0.0, 1.0 / driveFF.ka), + Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), + VecBuilder.fill(0.0, 0.0)), + driveFF.ks, + driveMotor, + driveGearing, + driveWheelRadius, + new LinearSystem( + Matrix.mat(Nat.N2(), Nat.N2()).fill(0.0, 1.0, 0.0, -steerFF.kv / steerFF.ka), + VecBuilder.fill(0.0, 1.0 / steerFF.ka), + Matrix.mat(Nat.N2(), Nat.N2()).fill(1.0, 0.0, 0.0, 1.0), + VecBuilder.fill(0.0, 0.0)), + steerFF.ks, + steerMotor, + steerGearing, + kinematics); } /** * Creates a swerve drive simulation. - * - * @param drivePlant The {@link LinearSystem} representing a swerve module's drive system. - * The state should be in units of meters and input in volts. - * @param driveKs The static gain in volts of the drive system's feedforward, or the minimum voltage - * to cause motion. Set this to 0 to ignore static friction. - * @param driveMotor The DCMotor model for the drive motor(s) of this swerve drive's modules. - * This should not have any gearing applied. - * @param driveGearing The gear ratio of the drive system. Positive values indicate a reduction where one - * rotation of the drive wheel equals driveGearing rotations of the drive motor. - * @param driveWheelRadius The radius of the module's driving wheel. - * @param steerPlant The {@link LinearSystem} representing a swerve module's steer system. - * The state should be in units of radians and input in volts. - * @param steerKs The static gain in volts of the steer system's feedforward, or the minimum voltage - * to cause motion. Set this to 0 to ignore static friction. - * @param steerMotor The DCMotor model for the steer motor(s) of this swerve drive's modules. - * This should not have any gearing applied. - * @param steerGearing The gear ratio of the steer system. Positive values indicate a reduction where one - * rotation of the module heading/azimuth equals steerGearing rotations of the steer motor. + * + * @param drivePlant The {@link LinearSystem} representing a swerve module's drive system. The + * state should be in units of meters and input in volts. + * @param driveKs The static gain in volts of the drive system's feedforward, or the minimum + * voltage to cause motion. Set this to 0 to ignore static friction. + * @param driveMotor The DCMotor model for the drive motor(s) of this swerve drive's modules. This + * should not have any gearing applied. + * @param driveGearing The gear ratio of the drive system. Positive values indicate a reduction + * where one rotation of the drive wheel equals driveGearing rotations of the drive motor. + * @param driveWheelRadius The radius of the module's driving wheel. + * @param steerPlant The {@link LinearSystem} representing a swerve module's steer system. The + * state should be in units of radians and input in volts. + * @param steerKs The static gain in volts of the steer system's feedforward, or the minimum + * voltage to cause motion. Set this to 0 to ignore static friction. + * @param steerMotor The DCMotor model for the steer motor(s) of this swerve drive's modules. This + * should not have any gearing applied. + * @param steerGearing The gear ratio of the steer system. Positive values indicate a reduction + * where one rotation of the module heading/azimuth equals steerGearing rotations of the steer + * motor. * @param kinematics The kinematics for this swerve drive. All swerve module information used in - * this class should match the order of the modules this kinematics object was constructed with. + * this class should match the order of the modules this kinematics object was constructed + * with. */ public SwerveDriveSim( - LinearSystem drivePlant, double driveKs, DCMotor driveMotor, double driveGearing, double driveWheelRadius, - LinearSystem steerPlant, double steerKs, DCMotor steerMotor, double steerGearing, + LinearSystem drivePlant, + double driveKs, + DCMotor driveMotor, + double driveGearing, + double driveWheelRadius, + LinearSystem steerPlant, + double steerKs, + DCMotor steerMotor, + double steerGearing, SwerveDriveKinematics kinematics) { this.drivePlant = drivePlant; this.driveKs = driveKs; @@ -142,7 +162,7 @@ public SwerveDriveSim( driveStates = new ArrayList<>(numModules); steerInputs = new double[numModules]; steerStates = new ArrayList<>(numModules); - for(int i = 0; i < numModules; i++) { + for (int i = 0; i < numModules; i++) { driveStates.add(VecBuilder.fill(0, 0)); steerStates.add(VecBuilder.fill(0, 0)); } @@ -150,11 +170,12 @@ public SwerveDriveSim( /** * Sets the input voltages of the drive motors. + * * @param inputs Input voltages. These should match the module order used in the kinematics. */ public void setDriveInputs(double... inputs) { final double battVoltage = RobotController.getBatteryVoltage(); - for(int i = 0; i < driveInputs.length; i++) { + for (int i = 0; i < driveInputs.length; i++) { double input = inputs[i]; driveInputs[i] = MathUtil.clamp(input, -battVoltage, battVoltage); } @@ -162,19 +183,21 @@ public void setDriveInputs(double... inputs) { /** * Sets the input voltages of the steer motors. + * * @param inputs Input voltages. These should match the module order used in the kinematics. */ public void setSteerInputs(double... inputs) { final double battVoltage = RobotController.getBatteryVoltage(); - for(int i = 0; i < steerInputs.length; i++) { + for (int i = 0; i < steerInputs.length; i++) { double input = inputs[i]; steerInputs[i] = MathUtil.clamp(input, -battVoltage, battVoltage); } } /** - * Computes the new x given the old x and the control input. Includes the effect of static friction. - * + * Computes the new x given the old x and the control input. Includes the effect of static + * friction. + * * @param discA The discretized system matrix. * @param discB The discretized input matrix. * @param x The position/velocity state of the drive/steer system. @@ -182,7 +205,8 @@ public void setSteerInputs(double... inputs) { * @param ks The kS value of the feedforward model. * @return The updated x, including the effect of static friction. */ - protected static Matrix calculateX(Matrix discA, Matrix discB, Matrix x, double input, double ks) { + protected static Matrix calculateX( + Matrix discA, Matrix discB, Matrix x, double input, double ks) { var Ax = discA.times(x); double nextStateVel = Ax.get(1, 0); // input required to make next state vel == 0 @@ -198,16 +222,17 @@ protected static Matrix calculateX(Matrix discA, Matrix double ksInputEffect = 0; // system velocity was reduced to 0, resist any input - if(Math.abs(ksSystemEffect) < ks) { + if (Math.abs(ksSystemEffect) < ks) { double absInput = Math.abs(input); ksInputEffect = -MathUtil.clamp(ks * inputSign, -absInput, absInput); } - // non-zero system velocity, but input causes velocity sign change. Resist input after sign change - else if((input * signToStop) > (inputToStop * signToStop)) { + // non-zero system velocity, but input causes velocity sign change. Resist input after sign + // change + else if ((input * signToStop) > (inputToStop * signToStop)) { double absInput = Math.abs(input - inputToStop); ksInputEffect = -MathUtil.clamp(ks * inputSign, -absInput, absInput); } - + // calculate next x including static friction var Bu = discB.times(VecBuilder.fill(input + ksSystemEffect + ksInputEffect)); return Ax.plus(Bu); @@ -215,7 +240,7 @@ else if((input * signToStop) > (inputToStop * signToStop)) { /** * Update the drivetrain states with the given timestep. - * + * * @param dtSeconds The timestep in seconds. This should be the robot loop period. */ public void update(double dtSeconds) { @@ -223,13 +248,29 @@ public void update(double dtSeconds) { var steerDiscAB = Discretization.discretizeAB(steerPlant.getA(), steerPlant.getB(), dtSeconds); var moduleDeltas = new SwerveModulePosition[numModules]; - for(int i = 0; i < numModules; i++) { + for (int i = 0; i < numModules; i++) { double prevDriveStatePos = driveStates.get(i).get(0, 0); - driveStates.set(i, calculateX(driveDiscAB.getFirst(), driveDiscAB.getSecond(), driveStates.get(i), driveInputs[i], driveKs)); + driveStates.set( + i, + calculateX( + driveDiscAB.getFirst(), + driveDiscAB.getSecond(), + driveStates.get(i), + driveInputs[i], + driveKs)); double currDriveStatePos = driveStates.get(i).get(0, 0); - steerStates.set(i, calculateX(steerDiscAB.getFirst(), steerDiscAB.getSecond(), steerStates.get(i), steerInputs[i], steerKs)); + steerStates.set( + i, + calculateX( + steerDiscAB.getFirst(), + steerDiscAB.getSecond(), + steerStates.get(i), + steerInputs[i], + steerKs)); double currSteerStatePos = steerStates.get(i).get(0, 0); - moduleDeltas[i] = new SwerveModulePosition(currDriveStatePos - prevDriveStatePos, new Rotation2d(currSteerStatePos)); + moduleDeltas[i] = + new SwerveModulePosition( + currDriveStatePos - prevDriveStatePos, new Rotation2d(currSteerStatePos)); } var twist = kinematics.toTwist2d(moduleDeltas); @@ -238,17 +279,17 @@ public void update(double dtSeconds) { } /** - * Reset the simulated swerve drive state. This effectively teleports the robot and should only - * be used during the setup of the simulation world. - * + * Reset the simulated swerve drive state. This effectively teleports the robot and should only be + * used during the setup of the simulation world. + * * @param pose The new pose of the simulated swerve drive. - * @param preserveMotion If true, the current module states will be preserved. Otherwise, - * they will be reset to zeros. + * @param preserveMotion If true, the current module states will be preserved. Otherwise, they + * will be reset to zeros. */ public void reset(Pose2d pose, boolean preserveMotion) { this.pose = pose; - if(!preserveMotion) { - for(int i = 0; i < numModules; i++) { + if (!preserveMotion) { + for (int i = 0; i < numModules; i++) { driveStates.set(i, VecBuilder.fill(0, 0)); steerStates.set(i, VecBuilder.fill(0, 0)); } @@ -257,20 +298,22 @@ public void reset(Pose2d pose, boolean preserveMotion) { } /** - * Reset the simulated swerve drive state. This effectively teleports the robot and should only - * be used during the setup of the simulation world. - * + * Reset the simulated swerve drive state. This effectively teleports the robot and should only be + * used during the setup of the simulation world. + * * @param pose The new pose of the simulated swerve drive. * @param moduleDriveStates The new states of the modules' drive systems in [meters, meters/sec]. * These should match the module order used in the kinematics. - * @param moduleSteerStates The new states of the modules' steer systems in [radians, radians/sec]. - * These should match the module order used in the kinematics. + * @param moduleSteerStates The new states of the modules' steer systems in [radians, + * radians/sec]. These should match the module order used in the kinematics. */ - public void reset(Pose2d pose, List> moduleDriveStates, List> moduleSteerStates) { - if(moduleDriveStates.size() != driveStates.size() || moduleSteerStates.size() != steerStates.size()) + public void reset( + Pose2d pose, List> moduleDriveStates, List> moduleSteerStates) { + if (moduleDriveStates.size() != driveStates.size() + || moduleSteerStates.size() != steerStates.size()) throw new IllegalArgumentException("Provided module states do not match number of modules!"); this.pose = pose; - for(int i = 0; i < numModules; i++) { + for (int i = 0; i < numModules; i++) { driveStates.set(i, moduleDriveStates.get(i).copy()); steerStates.set(i, moduleSteerStates.get(i).copy()); } @@ -279,75 +322,80 @@ public void reset(Pose2d pose, List> moduleDriveStates, List> getDriveStates() { List> states = new ArrayList<>(); - for(int i = 0; i < driveStates.size(); i++) { + for (int i = 0; i < driveStates.size(); i++) { states.add(driveStates.get(i).copy()); } return states; } /** - * Get the state of each module's steer system in [radians, radians/sec]. The returned list order matches the kinematics module order. - * This should be called after {@link #update(double)}. + * Get the state of each module's steer system in [radians, radians/sec]. The returned list order + * matches the kinematics module order. This should be called after {@link #update(double)}. */ public List> getSteerStates() { List> states = new ArrayList<>(); - for(int i = 0; i < steerStates.size(); i++) { + for (int i = 0; i < steerStates.size(); i++) { states.add(steerStates.get(i).copy()); } return states; @@ -355,62 +403,69 @@ public List> getSteerStates() { /** * Get the angular velocity of the robot, which can be useful for gyro simulation. CCW positive. - * This should be called after {@link #update(double)}. + * This should be called after {@link #update(double)}. */ public double getOmegaRadsPerSec() { return omegaRadsPerSec; } /** - * Calculates the current drawn from the battery by the motor(s). Ignores regenerative current from back-emf. - * + * Calculates the current drawn from the battery by the motor(s). Ignores regenerative current + * from back-emf. + * * @param motor The motor(s) used. * @param radiansPerSec The velocity of the motor in radians per second. * @param inputVolts The voltage commanded by the motor controller (battery voltage * duty cycle). * @param battVolts The voltage of the battery. */ - protected static double getCurrentDraw(DCMotor motor, double radiansPerSec, double inputVolts, double battVolts) { + protected static double getCurrentDraw( + DCMotor motor, double radiansPerSec, double inputVolts, double battVolts) { double effVolts = inputVolts - radiansPerSec / motor.KvRadPerSecPerVolt; // ignore regeneration - if(inputVolts >= 0) effVolts = MathUtil.clamp(effVolts, 0, inputVolts); + if (inputVolts >= 0) effVolts = MathUtil.clamp(effVolts, 0, inputVolts); else effVolts = MathUtil.clamp(effVolts, inputVolts, 0); // calculate battery current return (inputVolts / battVolts) * (effVolts / motor.rOhms); } /** - * Get the current draw in amps for each module's drive motor(s). This should be called after {@link #update(double)}. - * The returned array order matches the kinematics module order. + * Get the current draw in amps for each module's drive motor(s). This should be called after + * {@link #update(double)}. The returned array order matches the kinematics module order. */ public double[] getDriveCurrentDraw() { double[] currents = new double[numModules]; - for(int i = 0; i < numModules; i++) { + for (int i = 0; i < numModules; i++) { double radiansPerSec = driveStates.get(i).get(1, 0) * driveGearing / driveWheelRadius; - currents[i] = getCurrentDraw(driveMotor, radiansPerSec, driveInputs[i], RobotController.getBatteryVoltage()); + currents[i] = + getCurrentDraw( + driveMotor, radiansPerSec, driveInputs[i], RobotController.getBatteryVoltage()); } return currents; } /** - * Get the current draw in amps for each module's steer motor(s). This should be called after {@link #update(double)}. - * The returned array order matches the kinematics module order. + * Get the current draw in amps for each module's steer motor(s). This should be called after + * {@link #update(double)}. The returned array order matches the kinematics module order. */ public double[] getSteerCurrentDraw() { double[] currents = new double[numModules]; - for(int i = 0; i < numModules; i++) { + for (int i = 0; i < numModules; i++) { double radiansPerSec = steerStates.get(i).get(1, 0) * steerGearing; - currents[i] = getCurrentDraw(steerMotor, radiansPerSec, steerInputs[i], RobotController.getBatteryVoltage()); + currents[i] = + getCurrentDraw( + steerMotor, radiansPerSec, steerInputs[i], RobotController.getBatteryVoltage()); } return currents; } /** - * Get the total current draw in amps of all swerve motors. This should be called after {@link #update(double)}. + * Get the total current draw in amps of all swerve motors. This should be called after {@link + * #update(double)}. */ public double getTotalCurrentDraw() { double sum = 0; - for(double val : getDriveCurrentDraw()) sum += val; - for(double val : getSteerCurrentDraw()) sum += val; + for (double val : getDriveCurrentDraw()) sum += val; + for (double val : getSteerCurrentDraw()) sum += val; return sum; } } diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java index 31345af184..ff67afdc31 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java @@ -16,16 +16,16 @@ public class SwerveModule { - //--- Module Constants + // --- Module Constants private final ModuleConstants moduleConstants; - //--- Hardware + // --- Hardware private final PWMSparkMax driveMotor; private final Encoder driveEncoder; private final PWMSparkMax steerMotor; private final Encoder steerEncoder; - //--- Control + // --- Control private SwerveModuleState desiredState = new SwerveModuleState(); private boolean openLoop = false; @@ -34,13 +34,13 @@ public class SwerveModule { // (A profiled steering PID controller may give better results by utilizing feedforward.) private PIDController steerPidController = new PIDController(kSteerKP, kSteerKI, kSteerKD); - //--- Simulation + // --- Simulation private final EncoderSim driveEncoderSim; private double driveCurrentSim = 0; private final EncoderSim steerEncoderSim; private double steerCurrentSim = 0; - public SwerveModule(ModuleConstants moduleConstants){ + public SwerveModule(ModuleConstants moduleConstants) { this.moduleConstants = moduleConstants; driveMotor = new PWMSparkMax(moduleConstants.driveMotorID); @@ -52,22 +52,25 @@ public SwerveModule(ModuleConstants moduleConstants){ steerPidController.enableContinuousInput(-Math.PI, Math.PI); - //--- Simulation + // --- Simulation driveEncoderSim = new EncoderSim(driveEncoder); steerEncoderSim = new EncoderSim(steerEncoder); } - public void periodic(){ + public void periodic() { // Perform PID feedback control to steer the module to the target angle - double steerPid = steerPidController.calculate(getAbsoluteHeading().getRadians(), desiredState.angle.getRadians()); + double steerPid = + steerPidController.calculate( + getAbsoluteHeading().getRadians(), desiredState.angle.getRadians()); steerMotor.setVoltage(steerPid); // Use feedforward model to translate target drive velocities into voltages double driveFF = kDriveFF.calculate(desiredState.speedMetersPerSecond); double drivePid = 0; - if(!openLoop){ + if (!openLoop) { // Perform PID feedback control to compensate for disturbances - drivePid = drivePidController.calculate(driveEncoder.getRate(), desiredState.speedMetersPerSecond); + drivePid = + drivePidController.calculate(driveEncoder.getRate(), desiredState.speedMetersPerSecond); } driveMotor.setVoltage(driveFF + drivePid); @@ -75,9 +78,11 @@ public void periodic(){ /** * Command this swerve module to the desired angle and velocity. + * * @param steerInPlace If modules should steer to target angle when target velocity is 0 */ - public void setDesiredState(SwerveModuleState desiredState, boolean openLoop, boolean steerInPlace){ + public void setDesiredState( + SwerveModuleState desiredState, boolean openLoop, boolean steerInPlace) { Rotation2d currentRotation = getAbsoluteHeading(); // Avoid turning more than 90 degrees by inverting speed on large angle changes desiredState = SwerveModuleState.optimize(desiredState, currentRotation); @@ -86,28 +91,21 @@ public void setDesiredState(SwerveModuleState desiredState, boolean openLoop, bo } /** Module heading reported by steering encoder. */ - public Rotation2d getAbsoluteHeading(){ + public Rotation2d getAbsoluteHeading() { return Rotation2d.fromRadians(steerEncoder.getDistance()); } /** - * {@link SwerveModuleState} describing absolute module rotation and velocity in meters per second. + * {@link SwerveModuleState} describing absolute module rotation and velocity in meters per + * second. */ - public SwerveModuleState getState(){ - return new SwerveModuleState( - driveEncoder.getRate(), - getAbsoluteHeading() - ); + public SwerveModuleState getState() { + return new SwerveModuleState(driveEncoder.getRate(), getAbsoluteHeading()); } - /** - * {@link SwerveModulePosition} describing absolute module rotation and position in meters. - */ + /** {@link SwerveModulePosition} describing absolute module rotation and position in meters. */ public SwerveModulePosition getPosition() { - return new SwerveModulePosition( - driveEncoder.getDistance(), - getAbsoluteHeading() - ); + return new SwerveModulePosition(driveEncoder.getDistance(), getAbsoluteHeading()); } /** Voltage of the drive motor */ @@ -123,28 +121,36 @@ public double getSteerVoltage() { /** * Constants about this module, like motor IDs, encoder angle offset, and translation from center. */ - public ModuleConstants getModuleConstants(){ + public ModuleConstants getModuleConstants() { return moduleConstants; } - public void log(){ + public void log() { var state = getState(); - String table = "Module "+moduleConstants.moduleNum+"/"; - SmartDashboard.putNumber(table+"Steer Degrees", Math.toDegrees(MathUtil.angleModulus(state.angle.getRadians()))); - SmartDashboard.putNumber(table+"Steer Target Degrees", Math.toDegrees(steerPidController.getSetpoint())); - SmartDashboard.putNumber(table+"Drive Velocity Feet", Units.metersToFeet(state.speedMetersPerSecond)); - SmartDashboard.putNumber(table+"Drive Velocity Target Feet", Units.metersToFeet(desiredState.speedMetersPerSecond)); - SmartDashboard.putNumber(table+"Drive Current", driveCurrentSim); - SmartDashboard.putNumber(table+"Steer Current", steerCurrentSim); + String table = "Module " + moduleConstants.moduleNum + "/"; + SmartDashboard.putNumber( + table + "Steer Degrees", Math.toDegrees(MathUtil.angleModulus(state.angle.getRadians()))); + SmartDashboard.putNumber( + table + "Steer Target Degrees", Math.toDegrees(steerPidController.getSetpoint())); + SmartDashboard.putNumber( + table + "Drive Velocity Feet", Units.metersToFeet(state.speedMetersPerSecond)); + SmartDashboard.putNumber( + table + "Drive Velocity Target Feet", + Units.metersToFeet(desiredState.speedMetersPerSecond)); + SmartDashboard.putNumber(table + "Drive Current", driveCurrentSim); + SmartDashboard.putNumber(table + "Steer Current", steerCurrentSim); } - - //----- Simulation + // ----- Simulation public void simulationUpdate( - double driveEncoderDist, double driveEncoderRate, double driveCurrent, - double steerEncoderDist, double steerEncoderRate, double steerCurrent) { + double driveEncoderDist, + double driveEncoderRate, + double driveCurrent, + double steerEncoderDist, + double steerEncoderRate, + double steerCurrent) { driveEncoderSim.setDistance(driveEncoderDist); driveEncoderSim.setRate(driveEncoderRate); this.driveCurrentSim = driveCurrent; @@ -160,4 +166,4 @@ public double getDriveCurrentSim() { public double getSteerCurrentSim() { return steerCurrentSim; } -} \ No newline at end of file +} From 3a3624a9feb5f3aa7b52db1f2043f8e48b0a154f Mon Sep 17 00:00:00 2001 From: amquake Date: Tue, 19 Sep 2023 01:09:56 -0700 Subject: [PATCH 41/46] wpiformat --- .../.wpilib/wpilib_preferences.json | 2 +- .../src/main/deploy/example.txt | 2 +- .../src/main/java/frc/robot/Constants.java | 25 +++++++++++++++++- .../src/main/java/frc/robot/Main.java | 26 ++++++++++++++++--- .../src/main/java/frc/robot/Robot.java | 26 ++++++++++++++++--- .../src/main/java/frc/robot/Vision.java | 24 +++++++++++++++++ .../subsystems/drivetrain/SwerveDrive.java | 24 +++++++++++++++++ .../subsystems/drivetrain/SwerveDriveSim.java | 25 +++++++++++++++++- .../subsystems/drivetrain/SwerveModule.java | 25 +++++++++++++++++- 9 files changed, 168 insertions(+), 11 deletions(-) diff --git a/photonlib-java-examples/swervedriveposeestsim/.wpilib/wpilib_preferences.json b/photonlib-java-examples/swervedriveposeestsim/.wpilib/wpilib_preferences.json index efc45eba76..832704069b 100644 --- a/photonlib-java-examples/swervedriveposeestsim/.wpilib/wpilib_preferences.json +++ b/photonlib-java-examples/swervedriveposeestsim/.wpilib/wpilib_preferences.json @@ -3,4 +3,4 @@ "currentLanguage": "java", "projectYear": "2023", "teamNumber": 4512 -} \ No newline at end of file +} diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/deploy/example.txt b/photonlib-java-examples/swervedriveposeestsim/src/main/deploy/example.txt index bb82515dad..d6ec5cf830 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/deploy/example.txt +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/deploy/example.txt @@ -1,3 +1,3 @@ Files placed in this directory will be deployed to the RoboRIO into the 'deploy' directory in the home folder. Use the 'Filesystem.getDeployDirectory' wpilib function -to get a proper path relative to the deploy directory. \ No newline at end of file +to get a proper path relative to the deploy directory. diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java index 0de741f4da..708d87a5b0 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Constants.java @@ -1,3 +1,27 @@ +/* + * MIT License + * + * Copyright (c) PhotonVision + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package frc.robot; import edu.wpi.first.apriltag.AprilTagFieldLayout; @@ -15,7 +39,6 @@ import java.io.IOException; public class Constants { - public static class Vision { public static final String kCameraName = "YOUR CAMERA NAME"; // Cam mounted facing forward, half a meter forward of center, half a meter up from center. diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java index 5333e77e96..f227c28da0 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Main.java @@ -1,6 +1,26 @@ -// Copyright (c) FIRST and other WPILib contributors. -// Open Source Software; you can modify and/or share it under the terms of -// the WPILib BSD license file in the root directory of this project. +/* + * MIT License + * + * Copyright (c) PhotonVision + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ package frc.robot; diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java index 3ce01f8e20..d32d4d477c 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Robot.java @@ -1,6 +1,26 @@ -// Copyright (c) FIRST and other WPILib contributors. -// Open Source Software; you can modify and/or share it under the terms of -// the WPILib BSD license file in the root directory of this project. +/* + * MIT License + * + * Copyright (c) PhotonVision + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ package frc.robot; diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java index e88bdf2db4..1d9b2ab9ed 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/Vision.java @@ -1,3 +1,27 @@ +/* + * MIT License + * + * Copyright (c) PhotonVision + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package frc.robot; import static frc.robot.Constants.Vision.*; diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java index b37dfb855c..21911c997d 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDrive.java @@ -1,3 +1,27 @@ +/* + * MIT License + * + * Copyright (c) PhotonVision + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package frc.robot.subsystems.drivetrain; import static frc.robot.Constants.Swerve.*; diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java index c5f8c87c47..aff69fc703 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveDriveSim.java @@ -1,3 +1,27 @@ +/* + * MIT License + * + * Copyright (c) PhotonVision + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package frc.robot.subsystems.drivetrain; import edu.wpi.first.math.MathUtil; @@ -34,7 +58,6 @@ * the Sim GUI's field. */ public class SwerveDriveSim { - private final LinearSystem drivePlant; private final double driveKs; private final DCMotor driveMotor; diff --git a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java index ff67afdc31..4615d29217 100644 --- a/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java +++ b/photonlib-java-examples/swervedriveposeestsim/src/main/java/frc/robot/subsystems/drivetrain/SwerveModule.java @@ -1,3 +1,27 @@ +/* + * MIT License + * + * Copyright (c) PhotonVision + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package frc.robot.subsystems.drivetrain; import static frc.robot.Constants.Swerve.*; @@ -15,7 +39,6 @@ import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; public class SwerveModule { - // --- Module Constants private final ModuleConstants moduleConstants; From ed6aa15e014d23c581fdec173814f556c4eb232b Mon Sep 17 00:00:00 2001 From: Matt M Date: Sat, 23 Sep 2023 11:17:39 -0400 Subject: [PATCH 42/46] Switch example build to search for folder names lol --- photonlib-cpp-examples/README.md | 16 ++++++++++++++++ photonlib-cpp-examples/build.gradle | 4 +++- photonlib-cpp-examples/examples.gradle | 9 +++++++++ photonlib-cpp-examples/examples.txt | 4 ---- photonlib-cpp-examples/settings.gradle | 4 +++- photonlib-java-examples/README.md | 15 +++++++++++++++ photonlib-java-examples/build.gradle | 4 +++- photonlib-java-examples/examples.gradle | 9 +++++++++ photonlib-java-examples/examples.txt | 5 ----- photonlib-java-examples/settings.gradle | 7 ++----- 10 files changed, 60 insertions(+), 17 deletions(-) create mode 100644 photonlib-cpp-examples/README.md create mode 100644 photonlib-cpp-examples/examples.gradle delete mode 100644 photonlib-cpp-examples/examples.txt create mode 100644 photonlib-java-examples/examples.gradle delete mode 100644 photonlib-java-examples/examples.txt diff --git a/photonlib-cpp-examples/README.md b/photonlib-cpp-examples/README.md new file mode 100644 index 0000000000..c089f1366a --- /dev/null +++ b/photonlib-cpp-examples/README.md @@ -0,0 +1,16 @@ +## PhotonLib C++ Examples + +### Building + +Build photonvision and publish it locally with: + +``` +photonvision$ ./gradlew photon-lib:publishtomavenlocal +``` + +Now, cd into here, pull in the latest vendor json, and simulate the project of choice + +``` +photonvision/photonlib-cpp-exaples: ./gradlew copyPhotonlib +photonvision/photonlib-cpp-exaples: ./gradlew aimandrange:simulateNativeRelease +``` diff --git a/photonlib-cpp-examples/build.gradle b/photonlib-cpp-examples/build.gradle index 3605779a96..cb4661a3a9 100644 --- a/photonlib-cpp-examples/build.gradle +++ b/photonlib-cpp-examples/build.gradle @@ -27,9 +27,11 @@ spotless { } } +apply from: "examples.gradle" + // Task that depends on the build task for every example task buildAllExamples { task -> - new File('examples.txt').eachLine { line -> + exampleFolderNames.each { line -> task.dependsOn(line + ":build") } } diff --git a/photonlib-cpp-examples/examples.gradle b/photonlib-cpp-examples/examples.gradle new file mode 100644 index 0000000000..5a48c873ef --- /dev/null +++ b/photonlib-cpp-examples/examples.gradle @@ -0,0 +1,9 @@ +// These should be the only 2 non-project subdirectories in the examples folder +// I could check for (it)/build.gradle to exist, but w/e +def EXCLUDED_DIRS = [".gradle", "bin"] + +// List all non-hidden directories not in EXCUDED_DIRS +ext.exampleFolderNames = file("${rootDir}") + .listFiles() + .findAll { it.isDirectory() && !it.isHidden() && !(it.name in EXCLUDED_DIRS) } + .collect { it.name } diff --git a/photonlib-cpp-examples/examples.txt b/photonlib-cpp-examples/examples.txt deleted file mode 100644 index 25f7648520..0000000000 --- a/photonlib-cpp-examples/examples.txt +++ /dev/null @@ -1,4 +0,0 @@ -aimandrange -getinrange -aimattarget -apriltagExample diff --git a/photonlib-cpp-examples/settings.gradle b/photonlib-cpp-examples/settings.gradle index 51a82a8d56..4c125fe51c 100644 --- a/photonlib-cpp-examples/settings.gradle +++ b/photonlib-cpp-examples/settings.gradle @@ -1 +1,3 @@ -new File('examples.txt').eachLine { line -> include line } +apply from: "examples.gradle" + +exampleFolderNames.each { line -> include line } diff --git a/photonlib-java-examples/README.md b/photonlib-java-examples/README.md index ff4abbd9d7..ffd15b1f5f 100644 --- a/photonlib-java-examples/README.md +++ b/photonlib-java-examples/README.md @@ -1,5 +1,20 @@ ## PhotonLib Java Examples +### Building + +Build photonvision and publish it locally with: + +``` +photonvision$ ./gradlew photon-lib:publishtomavenlocal +``` + +Now, cd into here, pull in the latest vendor json, and simulate the project of choice + +``` +photonvision/photonlib-java-exaples: ./gradlew copyPhotonlib +photonvision/photonlib-java-exaples: ./gradlew aimandrange:simulateJava +``` + ### [**`aimattarget`**](aimattarget) A simple demonstration of using PhotonVision's 2d target yaw to align a differential drivetrain with a target. diff --git a/photonlib-java-examples/build.gradle b/photonlib-java-examples/build.gradle index 3605779a96..29a90b45f9 100644 --- a/photonlib-java-examples/build.gradle +++ b/photonlib-java-examples/build.gradle @@ -2,6 +2,8 @@ plugins { id "com.diffplug.spotless" version "6.1.2" } +apply from: "examples.gradle" + allprojects { repositories { mavenCentral() @@ -29,7 +31,7 @@ spotless { // Task that depends on the build task for every example task buildAllExamples { task -> - new File('examples.txt').eachLine { line -> + exampleFolderNames.each { line -> task.dependsOn(line + ":build") } } diff --git a/photonlib-java-examples/examples.gradle b/photonlib-java-examples/examples.gradle new file mode 100644 index 0000000000..5a48c873ef --- /dev/null +++ b/photonlib-java-examples/examples.gradle @@ -0,0 +1,9 @@ +// These should be the only 2 non-project subdirectories in the examples folder +// I could check for (it)/build.gradle to exist, but w/e +def EXCLUDED_DIRS = [".gradle", "bin"] + +// List all non-hidden directories not in EXCUDED_DIRS +ext.exampleFolderNames = file("${rootDir}") + .listFiles() + .findAll { it.isDirectory() && !it.isHidden() && !(it.name in EXCLUDED_DIRS) } + .collect { it.name } diff --git a/photonlib-java-examples/examples.txt b/photonlib-java-examples/examples.txt deleted file mode 100644 index c7627d90a9..0000000000 --- a/photonlib-java-examples/examples.txt +++ /dev/null @@ -1,5 +0,0 @@ -aimandrange -aimattarget -getinrange -simaimandrange -swervedriveposeestsim diff --git a/photonlib-java-examples/settings.gradle b/photonlib-java-examples/settings.gradle index 63bcd788b7..4c125fe51c 100644 --- a/photonlib-java-examples/settings.gradle +++ b/photonlib-java-examples/settings.gradle @@ -1,6 +1,3 @@ -include 'photon-targeting' -include 'photon-core' -include 'photon-server' -include 'photon-lib' +apply from: "examples.gradle" -new File('examples.txt').eachLine { line -> include line } +exampleFolderNames.each { line -> include line } From fb081ef2a6ed6050b58e7a3ca678bacdf178ecd9 Mon Sep 17 00:00:00 2001 From: Matt Date: Sat, 23 Sep 2023 22:43:06 -0400 Subject: [PATCH 43/46] Probably fix examples CI --- photonlib-cpp-examples/examples.gradle | 9 +++++++-- photonlib-java-examples/examples.gradle | 9 +++++++-- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/photonlib-cpp-examples/examples.gradle b/photonlib-cpp-examples/examples.gradle index 5a48c873ef..e5520b4206 100644 --- a/photonlib-cpp-examples/examples.gradle +++ b/photonlib-cpp-examples/examples.gradle @@ -1,9 +1,14 @@ // These should be the only 2 non-project subdirectories in the examples folder // I could check for (it)/build.gradle to exist, but w/e -def EXCLUDED_DIRS = [".gradle", "bin"] +def EXCLUDED_DIRS = ["bin", "build"] // List all non-hidden directories not in EXCUDED_DIRS ext.exampleFolderNames = file("${rootDir}") .listFiles() - .findAll { it.isDirectory() && !it.isHidden() && !(it.name in EXCLUDED_DIRS) } + .findAll { + return (it.isDirectory() + && !it.isHidden() + && !(it.name in EXCLUDED_DIRS) && !it.name.startsWith(".") + && it.toPath().resolve("build.gradle").toFile().exists()) + } .collect { it.name } diff --git a/photonlib-java-examples/examples.gradle b/photonlib-java-examples/examples.gradle index 5a48c873ef..e5520b4206 100644 --- a/photonlib-java-examples/examples.gradle +++ b/photonlib-java-examples/examples.gradle @@ -1,9 +1,14 @@ // These should be the only 2 non-project subdirectories in the examples folder // I could check for (it)/build.gradle to exist, but w/e -def EXCLUDED_DIRS = [".gradle", "bin"] +def EXCLUDED_DIRS = ["bin", "build"] // List all non-hidden directories not in EXCUDED_DIRS ext.exampleFolderNames = file("${rootDir}") .listFiles() - .findAll { it.isDirectory() && !it.isHidden() && !(it.name in EXCLUDED_DIRS) } + .findAll { + return (it.isDirectory() + && !it.isHidden() + && !(it.name in EXCLUDED_DIRS) && !it.name.startsWith(".") + && it.toPath().resolve("build.gradle").toFile().exists()) + } .collect { it.name } From c8f6f5bd4e3a7ff486c3a5f9981bea30e9ec65a6 Mon Sep 17 00:00:00 2001 From: Matt Date: Sat, 23 Sep 2023 22:44:29 -0400 Subject: [PATCH 44/46] Run wpiformat --- photonlib-cpp-examples/examples.gradle | 8 ++++---- photonlib-java-examples/examples.gradle | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/photonlib-cpp-examples/examples.gradle b/photonlib-cpp-examples/examples.gradle index e5520b4206..976440f33f 100644 --- a/photonlib-cpp-examples/examples.gradle +++ b/photonlib-cpp-examples/examples.gradle @@ -5,10 +5,10 @@ def EXCLUDED_DIRS = ["bin", "build"] // List all non-hidden directories not in EXCUDED_DIRS ext.exampleFolderNames = file("${rootDir}") .listFiles() - .findAll { - return (it.isDirectory() - && !it.isHidden() - && !(it.name in EXCLUDED_DIRS) && !it.name.startsWith(".") + .findAll { + return (it.isDirectory() + && !it.isHidden() + && !(it.name in EXCLUDED_DIRS) && !it.name.startsWith(".") && it.toPath().resolve("build.gradle").toFile().exists()) } .collect { it.name } diff --git a/photonlib-java-examples/examples.gradle b/photonlib-java-examples/examples.gradle index e5520b4206..976440f33f 100644 --- a/photonlib-java-examples/examples.gradle +++ b/photonlib-java-examples/examples.gradle @@ -5,10 +5,10 @@ def EXCLUDED_DIRS = ["bin", "build"] // List all non-hidden directories not in EXCUDED_DIRS ext.exampleFolderNames = file("${rootDir}") .listFiles() - .findAll { - return (it.isDirectory() - && !it.isHidden() - && !(it.name in EXCLUDED_DIRS) && !it.name.startsWith(".") + .findAll { + return (it.isDirectory() + && !it.isHidden() + && !(it.name in EXCLUDED_DIRS) && !it.name.startsWith(".") && it.toPath().resolve("build.gradle").toFile().exists()) } .collect { it.name } From 1eb5b37978a50f56e7dd08b96ddbb76fc6cea5cc Mon Sep 17 00:00:00 2001 From: amquake Date: Sun, 24 Sep 2023 00:01:47 -0700 Subject: [PATCH 45/46] link example build instructions --- photonlib-cpp-examples/README.md | 15 +++------------ photonlib-java-examples/README.md | 15 +++------------ 2 files changed, 6 insertions(+), 24 deletions(-) diff --git a/photonlib-cpp-examples/README.md b/photonlib-cpp-examples/README.md index c089f1366a..0a600f1a03 100644 --- a/photonlib-cpp-examples/README.md +++ b/photonlib-cpp-examples/README.md @@ -1,16 +1,7 @@ ## PhotonLib C++ Examples -### Building +### Running examples -Build photonvision and publish it locally with: +For instructions on how to run these examples locally, see [Running Examples](https://docs.photonvision.org/en/latest/docs/contributing/photonvision/build-instructions.html#running-examples). -``` -photonvision$ ./gradlew photon-lib:publishtomavenlocal -``` - -Now, cd into here, pull in the latest vendor json, and simulate the project of choice - -``` -photonvision/photonlib-cpp-exaples: ./gradlew copyPhotonlib -photonvision/photonlib-cpp-exaples: ./gradlew aimandrange:simulateNativeRelease -``` +--- diff --git a/photonlib-java-examples/README.md b/photonlib-java-examples/README.md index ffd15b1f5f..d02d6d3a7e 100644 --- a/photonlib-java-examples/README.md +++ b/photonlib-java-examples/README.md @@ -1,19 +1,10 @@ ## PhotonLib Java Examples -### Building +### Running examples -Build photonvision and publish it locally with: +For instructions on how to run these examples locally, see [Running Examples](https://docs.photonvision.org/en/latest/docs/contributing/photonvision/build-instructions.html#running-examples). -``` -photonvision$ ./gradlew photon-lib:publishtomavenlocal -``` - -Now, cd into here, pull in the latest vendor json, and simulate the project of choice - -``` -photonvision/photonlib-java-exaples: ./gradlew copyPhotonlib -photonvision/photonlib-java-exaples: ./gradlew aimandrange:simulateJava -``` +--- ### [**`aimattarget`**](aimattarget) From 3bc624c98dc1104926aa8d4b22ba09e24259b334 Mon Sep 17 00:00:00 2001 From: amquake Date: Sun, 24 Sep 2023 00:02:13 -0700 Subject: [PATCH 46/46] example backlinks to index readme --- photonlib-java-examples/aimandrange/README.md | 3 +++ photonlib-java-examples/aimattarget/README.md | 3 +++ photonlib-java-examples/getinrange/README.md | 3 +++ photonlib-java-examples/simaimandrange/README.md | 3 +++ photonlib-java-examples/swervedriveposeestsim/README.md | 3 +++ 5 files changed, 15 insertions(+) create mode 100644 photonlib-java-examples/aimandrange/README.md create mode 100644 photonlib-java-examples/aimattarget/README.md create mode 100644 photonlib-java-examples/getinrange/README.md create mode 100644 photonlib-java-examples/simaimandrange/README.md create mode 100644 photonlib-java-examples/swervedriveposeestsim/README.md diff --git a/photonlib-java-examples/aimandrange/README.md b/photonlib-java-examples/aimandrange/README.md new file mode 100644 index 0000000000..718aed1b26 --- /dev/null +++ b/photonlib-java-examples/aimandrange/README.md @@ -0,0 +1,3 @@ +## **`aimandrange`** + +### See [PhotonLib Java Examples](./README.md#aimandrange) diff --git a/photonlib-java-examples/aimattarget/README.md b/photonlib-java-examples/aimattarget/README.md new file mode 100644 index 0000000000..f9cd1927a4 --- /dev/null +++ b/photonlib-java-examples/aimattarget/README.md @@ -0,0 +1,3 @@ +## **`aimattarget`** + +### See [PhotonLib Java Examples](./README.md#aimattarget) diff --git a/photonlib-java-examples/getinrange/README.md b/photonlib-java-examples/getinrange/README.md new file mode 100644 index 0000000000..2072891fca --- /dev/null +++ b/photonlib-java-examples/getinrange/README.md @@ -0,0 +1,3 @@ +## **`getinrange`** + +### See [PhotonLib Java Examples](./README.md#getinrange) diff --git a/photonlib-java-examples/simaimandrange/README.md b/photonlib-java-examples/simaimandrange/README.md new file mode 100644 index 0000000000..f06248a2e0 --- /dev/null +++ b/photonlib-java-examples/simaimandrange/README.md @@ -0,0 +1,3 @@ +## **`simaimandrange`** + +### See [PhotonLib Java Examples](./README.md#simaimandrange) diff --git a/photonlib-java-examples/swervedriveposeestsim/README.md b/photonlib-java-examples/swervedriveposeestsim/README.md new file mode 100644 index 0000000000..85e8d497db --- /dev/null +++ b/photonlib-java-examples/swervedriveposeestsim/README.md @@ -0,0 +1,3 @@ +## **`swervedriveposeestsim`** + +### See [PhotonLib Java Examples](./README.md#swervedriveposeestsim)