From e1110fd8a740046b172eb499431365c6ae15b089 Mon Sep 17 00:00:00 2001 From: Pranav Maringanti <141516835+nootsquared@users.noreply.github.com> Date: Fri, 6 Dec 2024 17:34:15 -0500 Subject: [PATCH] added AK vision template code WITHOUT CHANGES FOR DIFFERENTIAL --- src/main/java/frc/robot/BuildConstants.java | 12 +- src/main/java/frc/robot/RobotContainer.java | 23 +- .../java/frc/robot/physicalConstants.java | 5 +- .../frc/robot/subsystems/drive/Drive.java | 12 +- .../frc/robot/subsystems/vision/Vision.java | 303 ++++++++++-------- .../subsystems/vision/VisionConstants.java | 58 ++++ .../frc/robot/subsystems/vision/VisionIO.java | 70 ++-- .../subsystems/vision/VisionIOLimelight.java | 180 ++++++++--- 8 files changed, 441 insertions(+), 222 deletions(-) create mode 100644 src/main/java/frc/robot/subsystems/vision/VisionConstants.java diff --git a/src/main/java/frc/robot/BuildConstants.java b/src/main/java/frc/robot/BuildConstants.java index 1d72851..d788149 100644 --- a/src/main/java/frc/robot/BuildConstants.java +++ b/src/main/java/frc/robot/BuildConstants.java @@ -5,12 +5,12 @@ public final class BuildConstants { public static final String MAVEN_GROUP = ""; public static final String MAVEN_NAME = "2025TemplateRobotCode"; public static final String VERSION = "unspecified"; - public static final int GIT_REVISION = 14; - public static final String GIT_SHA = "71903e992f09d70d8dc29903114fa0ed607e07e7"; - public static final String GIT_DATE = "2024-11-10 16:32:20 EST"; - public static final String GIT_BRANCH = "subsystems-import"; - public static final String BUILD_DATE = "2024-11-10 17:28:21 EST"; - public static final long BUILD_UNIX_TIME = 1731277701353L; + public static final int GIT_REVISION = 19; + public static final String GIT_SHA = "908516b9048eb7665587b6e948fd70985430f41e"; + public static final String GIT_DATE = "2024-12-06 09:22:43 EST"; + public static final String GIT_BRANCH = "AK-files"; + public static final String BUILD_DATE = "2024-12-06 17:15:57 EST"; + public static final long BUILD_UNIX_TIME = 1733523357007L; public static final int DIRTY = 1; private BuildConstants() {} diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index 971afff..e91c530 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -13,8 +13,12 @@ package frc.robot; +import org.littletonrobotics.junction.networktables.LoggedDashboardChooser; +import org.littletonrobotics.junction.networktables.LoggedDashboardNumber; + import com.pathplanner.lib.auto.AutoBuilder; import com.pathplanner.lib.auto.NamedCommands; + import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.wpilibj.GenericHID; @@ -34,8 +38,11 @@ import frc.robot.subsystems.flywheel.FlywheelIO; import frc.robot.subsystems.flywheel.FlywheelIOSim; import frc.robot.subsystems.flywheel.FlywheelIOSparkMax; -import org.littletonrobotics.junction.networktables.LoggedDashboardChooser; -import org.littletonrobotics.junction.networktables.LoggedDashboardNumber; +import frc.robot.subsystems.vision.Vision; +import static frc.robot.subsystems.vision.VisionConstants.camera0Name; +import static frc.robot.subsystems.vision.VisionConstants.camera1Name; +import frc.robot.subsystems.vision.VisionIO; +import frc.robot.subsystems.vision.VisionIOLimelight; /** * This class is where the bulk of the robot should be declared. Since Command-based is a @@ -47,6 +54,7 @@ public class RobotContainer { // Subsystems public static Drive drive; private final Flywheel flywheel; + private final Vision vision; // Controller private final CommandXboxController controller = new CommandXboxController(0); @@ -68,7 +76,14 @@ public RobotContainer() { new ModuleIOTalonFX(1), new ModuleIOTalonFX(2), new ModuleIOTalonFX(3)); + flywheel = new Flywheel(new FlywheelIOSparkMax()); + vision = + new Vision( + drive::addVisionMeasurement, + new VisionIOLimelight(camera0Name, drive::getRotation), + new VisionIOLimelight(camera1Name, drive::getRotation)); + // drive = new Drive( // new GyroIOPigeon2(true), // new ModuleIOTalonFX(0), @@ -87,7 +102,10 @@ public RobotContainer() { new ModuleIOSim(), new ModuleIOSim(), new ModuleIOSim()); + vision = new Vision(drive::addVisionMeasurement, new VisionIO() {}, new VisionIO() {}); + flywheel = new Flywheel(new FlywheelIOSim()); + break; default: @@ -100,6 +118,7 @@ public RobotContainer() { new ModuleIO() {}, new ModuleIO() {}); flywheel = new Flywheel(new FlywheelIO() {}); + vision = new Vision(drive::addVisionMeasurement, new VisionIO() {}, new VisionIO() {}); break; } diff --git a/src/main/java/frc/robot/physicalConstants.java b/src/main/java/frc/robot/physicalConstants.java index 072bcf7..dbaeddf 100644 --- a/src/main/java/frc/robot/physicalConstants.java +++ b/src/main/java/frc/robot/physicalConstants.java @@ -45,8 +45,7 @@ public static class IntakeConstants { public static final boolean CURRENT_LIMIT_ENABLED = true; } - public static final class shooterConstants { - } + public static final class shooterConstants {} public static class ElevatorConstants { public static final double CURRENT_LIMIT = 40.0; @@ -70,7 +69,7 @@ public static final class PivotConstants { public static final double REDUCTION = (15.0 / 1.0) * (34.0 / 24.0) * (24.0 / 18.0) * (50.0 / 14.0); public static final double STOW_SETPOINT_DEG = 50.7; - + public static final double PIVOT_ZERO_ANGLE = 59; } diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index 3f153eb..06699f2 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -15,6 +15,7 @@ import static edu.wpi.first.units.Units.*; +import edu.wpi.first.math.Matrix; import com.pathplanner.lib.auto.AutoBuilder; import com.pathplanner.lib.pathfinding.Pathfinding; import com.pathplanner.lib.util.HolonomicPathFollowerConfig; @@ -31,6 +32,8 @@ import edu.wpi.first.math.kinematics.SwerveDriveKinematics; import edu.wpi.first.math.kinematics.SwerveModulePosition; import edu.wpi.first.math.kinematics.SwerveModuleState; +import edu.wpi.first.math.numbers.N1; +import edu.wpi.first.math.numbers.N3; import edu.wpi.first.util.CircularBuffer; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.DriverStation.Alliance; @@ -38,6 +41,7 @@ import edu.wpi.first.wpilibj2.command.SubsystemBase; import edu.wpi.first.wpilibj2.command.sysid.SysIdRoutine; import frc.robot.physicalConstants; +import frc.robot.subsystems.vision.VisionIO; import frc.robot.util.LocalADStarAK; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; @@ -311,8 +315,12 @@ public void setPose(Pose2d pose) { * @param visionPose The pose of the robot as measured by the vision camera. * @param timestamp The timestamp of the vision measurement in seconds. */ - public void addVisionMeasurement(Pose2d visionPose, double timestamp) { - poseEstimator.addVisionMeasurement(visionPose, timestamp); + public void addVisionMeasurement( + Pose2d visionRobotPoseMeters, + double timestampSeconds, + Matrix visionMeasurementStdDevs) { + poseEstimator.addVisionMeasurement( + visionRobotPoseMeters, timestampSeconds, visionMeasurementStdDevs); } /** Returns the maximum linear speed in meters per sec. */ diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index a0e532c..087dadf 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -1,165 +1,188 @@ +// Copyright 2021-2024 FRC 6328 +// http://github.com/Mechanical-Advantage +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + package frc.robot.subsystems.vision; +import static frc.robot.subsystems.vision.VisionConstants.*; + +import edu.wpi.first.math.Matrix; import edu.wpi.first.math.VecBuilder; import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.util.Units; -import edu.wpi.first.wpilibj.DriverStation; -import edu.wpi.first.wpilibj.DriverStation.Alliance; +import edu.wpi.first.math.numbers.N1; +import edu.wpi.first.math.numbers.N3; +import edu.wpi.first.wpilibj.Alert; +import edu.wpi.first.wpilibj.Alert.AlertType; import edu.wpi.first.wpilibj2.command.SubsystemBase; -import frc.robot.RobotContainer; -import frc.robot.physicalConstants; -import frc.robot.util.LimelightHelpers; -import frc.robot.util.LimelightHelpers.PoseEstimate; -import frc.robot.util.LimelightHelpers.RawFiducial; +import frc.robot.subsystems.vision.VisionIO.PoseObservationType; +import java.util.LinkedList; +import java.util.List; import org.littletonrobotics.junction.Logger; public class Vision extends SubsystemBase { - private static double multiplier = 1.0; - private static boolean toggle = false; - - private boolean overridePathplanner = false; - - // private NetworkTable limelightintake = - // NetworkTableInstance.getDefault().getTable(physicalConstants.LL_INTAKE); + private final VisionConsumer consumer; + private final VisionIO[] io; + private final VisionIOInputsAutoLogged[] inputs; + private final Alert[] disconnectedAlerts; + + public Vision(VisionConsumer consumer, VisionIO... io) { + this.consumer = consumer; + this.io = io; + + // Initialize inputs + this.inputs = new VisionIOInputsAutoLogged[io.length]; + for (int i = 0; i < inputs.length; i++) { + inputs[i] = new VisionIOInputsAutoLogged(); + } - private final VisionIO visionIO; - private final VisionIOInputsAutoLogged visionInputs = new VisionIOInputsAutoLogged(); + // Initialize disconnected alerts + this.disconnectedAlerts = new Alert[io.length]; + for (int i = 0; i < inputs.length; i++) { + disconnectedAlerts[i] = + new Alert( + "Vision camera " + Integer.toString(i) + " is disconnected.", AlertType.kWarning); + } + } - public Vision(VisionIO visionIO) { - this.visionIO = visionIO; + /** + * Returns the X angle to the best target, which can be used for simple servoing with vision. + * + * @param cameraIndex The index of the camera to use. + */ + public Rotation2d getTargetX(int cameraIndex) { + return inputs[cameraIndex].latestTargetObservation.tx(); } @Override public void periodic() { - visionIO.updateInputs(visionInputs); - LimelightHelpers.SetRobotOrientation( - physicalConstants.LL_ALIGN, - RobotContainer.drive.poseEstimator.getEstimatedPosition().getRotation().getDegrees(), - 0, - 0, - 0, - 0, - 0); - - if (DriverStation.getAlliance().isPresent() && visionInputs.aTV) { - Logger.recordOutput( - "tags > 1 or disabled ", visionInputs.tagCount > 1 || DriverStation.isDisabled()); - - if (visionInputs.tagCount > 1 || DriverStation.isDisabled()) { - visionLogic(); - } else { - // mt2TagFiltering(); - visionLogic(); - } - } - } - - public void mt2TagFiltering() { - boolean doRejectUpdate = false; - - LimelightHelpers.PoseEstimate mt2 = - new PoseEstimate( - visionInputs.mt2VisionPose, - visionInputs.timestampSeconds, - visionInputs.latency, - visionInputs.tagCount, - visionInputs.tagSpan, - visionInputs.avgTagDist, - visionInputs.avgTagArea, - new RawFiducial[] {}); - if (Math.abs(RobotContainer.drive.yawVelocityRadPerSec) > Math.toRadians(360)) { - doRejectUpdate = true; - } - - if (mt2.tagCount == 0) { - doRejectUpdate = true; + for (int i = 0; i < io.length; i++) { + io[i].updateInputs(inputs[i]); + Logger.processInputs("Vision/Camera" + Integer.toString(i), inputs[i]); } - if (mt2.pose.getTranslation().getDistance(new Translation2d(7.9, 4.1)) < 0.4) { - doRejectUpdate = true; - } - - if (!doRejectUpdate) { - RobotContainer.drive.poseEstimator.setVisionMeasurementStdDevs( - VecBuilder.fill(0.7, 0.7, Units.degreesToRadians(9999999))); - RobotContainer.drive.poseEstimator.addVisionMeasurement( - mt2.pose, mt2.timestampSeconds - (mt2.latency / 1000.)); - } + // Initialize logging values + List allTagPoses = new LinkedList<>(); + List allRobotPoses = new LinkedList<>(); + List allRobotPosesAccepted = new LinkedList<>(); + List allRobotPosesRejected = new LinkedList<>(); + + // Loop over cameras + for (int cameraIndex = 0; cameraIndex < io.length; cameraIndex++) { + // Update disconnected alert + disconnectedAlerts[cameraIndex].set(!inputs[cameraIndex].connected); + + // Initialize logging values + List tagPoses = new LinkedList<>(); + List robotPoses = new LinkedList<>(); + List robotPosesAccepted = new LinkedList<>(); + List robotPosesRejected = new LinkedList<>(); + + // Add tag poses + for (int tagId : inputs[cameraIndex].tagIds) { + var tagPose = aprilTagLayout.getTagPose(tagId); + if (tagPose.isPresent()) { + tagPoses.add(tagPose.get()); + } + } - Logger.recordOutput("Vision Measurement", mt2.pose); - Logger.recordOutput("Rejecting Tags", doRejectUpdate); - } + // Loop over pose observations + for (var observation : inputs[cameraIndex].poseObservations) { + // Check whether to reject pose + boolean rejectPose = + observation.tagCount() == 0 // Must have at least one tag + || (observation.tagCount() == 1 + && observation.ambiguity() > maxAmbiguity) // Cannot be high ambiguity + || Math.abs(observation.pose().getZ()) + > maxZError // Must have realistic Z coordinate + + // Must be within the field boundaries + || observation.pose().getX() < 0.0 + || observation.pose().getX() > aprilTagLayout.getFieldLength() + || observation.pose().getY() < 0.0 + || observation.pose().getY() > aprilTagLayout.getFieldWidth(); + + // Add pose to log + robotPoses.add(observation.pose()); + if (rejectPose) { + robotPosesRejected.add(observation.pose()); + } else { + robotPosesAccepted.add(observation.pose()); + } + + // Skip if rejected + if (rejectPose) { + continue; + } + + // Calculate standard deviations + double stdDevFactor = + Math.pow(observation.averageTagDistance(), 2.0) / observation.tagCount(); + double linearStdDev = linearStdDevBaseline * stdDevFactor; + double angularStdDev = angularStdDevBaseline * stdDevFactor; + if (observation.type() == PoseObservationType.MEGATAG_2) { + linearStdDev *= linearStdDevMegatag2Factor; + angularStdDev *= angularStdDevMegatag2Factor; + } + if (cameraIndex < cameraStdDevFactors.length) { + linearStdDev *= cameraStdDevFactors[cameraIndex]; + angularStdDev *= cameraStdDevFactors[cameraIndex]; + } + + // Send vision observation + consumer.accept( + observation.pose().toPose2d(), + observation.timestamp(), + VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)); + } - public void visionLogic() { - LimelightHelpers.PoseEstimate limelightMeasurement = - new PoseEstimate( - visionInputs.mt1VisionPose, - visionInputs.timestampSeconds, - visionInputs.latency, - visionInputs.tagCount, - visionInputs.tagSpan, - visionInputs.avgTagDist, - visionInputs.avgTagArea, - new RawFiducial[] {}); - - double xMeterStds; - double yMeterStds; - double headingDegStds; - - double poseDifference = getVisionPoseDifference(limelightMeasurement.pose); - - boolean isFlipped = - DriverStation.getAlliance().isPresent() - && DriverStation.getAlliance().get() == Alliance.Red; - - Logger.recordOutput("avg area", limelightMeasurement.avgTagArea); - - if (limelightMeasurement.tagCount >= 2 && limelightMeasurement.avgTagArea > 0.04) { - xMeterStds = 0.7; - yMeterStds = 0.7; - headingDegStds = 8; - } else if (limelightMeasurement.tagCount == 1 - && poseDifference < 0.5) { // && poseDifference < 0.5 - xMeterStds = 5; - yMeterStds = 5; - headingDegStds = 30; - } else if (limelightMeasurement.tagCount == 1 && poseDifference < 3) { // && poseDifference < 3 - xMeterStds = 11.43; - yMeterStds = 11.43; - headingDegStds = 9999; - } else return; - - Logger.recordOutput("number of tags", limelightMeasurement.tagCount); - - RobotContainer.drive.poseEstimator.setVisionMeasurementStdDevs( - VecBuilder.fill(xMeterStds, yMeterStds, Units.degreesToRadians(headingDegStds))); - - Pose2d pose = limelightMeasurement.pose; - - if (isFlipped) { - pose.getRotation().plus(new Rotation2d(Math.PI)); + // Log camera datadata + Logger.recordOutput( + "Vision/Camera" + Integer.toString(cameraIndex) + "/TagPoses", + tagPoses.toArray(new Pose3d[tagPoses.size()])); + Logger.recordOutput( + "Vision/Camera" + Integer.toString(cameraIndex) + "/RobotPoses", + robotPoses.toArray(new Pose3d[robotPoses.size()])); + Logger.recordOutput( + "Vision/Camera" + Integer.toString(cameraIndex) + "/RobotPosesAccepted", + robotPosesAccepted.toArray(new Pose3d[robotPosesAccepted.size()])); + Logger.recordOutput( + "Vision/Camera" + Integer.toString(cameraIndex) + "/RobotPosesRejected", + robotPosesRejected.toArray(new Pose3d[robotPosesRejected.size()])); + allTagPoses.addAll(tagPoses); + allRobotPoses.addAll(robotPoses); + allRobotPosesAccepted.addAll(robotPosesAccepted); + allRobotPosesRejected.addAll(robotPosesRejected); } - Logger.recordOutput("Vision Measurement", limelightMeasurement.pose); - } - - public double getVisionPoseDifference(Pose2d visionPose) { - return RobotContainer.drive.getPose().getTranslation().getDistance(visionPose.getTranslation()); + // Log summary data + Logger.recordOutput( + "Vision/Summary/TagPoses", allTagPoses.toArray(new Pose3d[allTagPoses.size()])); + Logger.recordOutput( + "Vision/Summary/RobotPoses", allRobotPoses.toArray(new Pose3d[allRobotPoses.size()])); + Logger.recordOutput( + "Vision/Summary/RobotPosesAccepted", + allRobotPosesAccepted.toArray(new Pose3d[allRobotPosesAccepted.size()])); + Logger.recordOutput( + "Vision/Summary/RobotPosesRejected", + allRobotPosesRejected.toArray(new Pose3d[allRobotPosesRejected.size()])); } - public boolean acceptableMeasurements(Pose2d visionMeasurement) { - return Math.abs(visionMeasurement.getX() - RobotContainer.drive.getPose().getX()) < 1 - && Math.abs(visionMeasurement.getY() - RobotContainer.drive.getPose().getY()) < 1; - } - - public boolean canCorrect(Pose2d visionMeasurement, double timeSinceLastCorrection) { - if (timeSinceLastCorrection < 5) { - if (acceptableMeasurements(visionMeasurement)) return true; - } else { - return true; - } - return false; + @FunctionalInterface + public static interface VisionConsumer { + public void accept( + Pose2d visionRobotPoseMeters, + double timestampSeconds, + Matrix visionMeasurementStdDevs); } } diff --git a/src/main/java/frc/robot/subsystems/vision/VisionConstants.java b/src/main/java/frc/robot/subsystems/vision/VisionConstants.java new file mode 100644 index 0000000..7e7a34f --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/VisionConstants.java @@ -0,0 +1,58 @@ +// Copyright 2021-2024 FRC 6328 +// http://github.com/Mechanical-Advantage +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +package frc.robot.subsystems.vision; + +import edu.wpi.first.apriltag.AprilTagFieldLayout; +import edu.wpi.first.apriltag.AprilTagFields; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Transform3d; + +public class VisionConstants { + // AprilTag layout + public static AprilTagFieldLayout aprilTagLayout = + AprilTagFieldLayout.loadField(AprilTagFields.kDefaultField); + + // Camera names, must match names configured on coprocessor + public static String camera0Name = "front_camera"; + public static String camera1Name = "rear_camera"; + + // Robot to camera transforms + // (Not used by Limelight, configure in web UI instead) + public static Transform3d robotToCamera0 = + new Transform3d(0.2, 0.0, 0.2, new Rotation3d(0.0, -0.4, 0.0)); + public static Transform3d robotToCamera1 = + new Transform3d(-0.2, 0.0, 0.2, new Rotation3d(0.0, -0.4, Math.PI)); + + // Basic filtering thresholds + public static double maxAmbiguity = 0.3; + public static double maxZError = 0.75; + + // Standard deviation baselines, for 1 meter distance and 1 tag + // (Adjusted automatically based on distance and # of tags) + public static double linearStdDevBaseline = 0.02; // Meters + public static double angularStdDevBaseline = 0.06; // Radians + + // Standard deviation multipliers for each camera + // (Adjust to trust some cameras more than others) + public static double[] cameraStdDevFactors = + new double[] { + 1.0, // Camera 0 + 1.0 // Camera 1 + }; + + // Multipliers to apply for MegaTag 2 observations + public static double linearStdDevMegatag2Factor = 0.5; // More stable than full 3D solve + public static double angularStdDevMegatag2Factor = + Double.POSITIVE_INFINITY; // No rotation data available +} diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/VisionIO.java index 714dfd1..90648a3 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIO.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIO.java @@ -1,44 +1,48 @@ +// Copyright 2021-2024 FRC 6328 +// http://github.com/Mechanical-Advantage +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + package frc.robot.subsystems.vision; -import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; import org.littletonrobotics.junction.AutoLog; public interface VisionIO { - @AutoLog public static class VisionIOInputs { - public Pose2d mt2VisionPose = new Pose2d(); - public Pose2d mt1VisionPose = new Pose2d(); - public double timestampSeconds; - public int tagCount; - public double latency; - public double tagSpan; - public double avgTagDist; - public double avgTagArea; - - public double iTX; - public double iTY; - public double iTA; - public double iHB; - public boolean iTV; - - public double iTHOR; - public double iTVERT; - - public double iPIPELINELATENCY; - public double iCAPTURELATENCY; - - public double aTX; - public double aTY; - public double aTA; - public double aHB; - public boolean aTV; - - public double aTHOR; - public double aTVERT; + public boolean connected = false; + public TargetObservation latestTargetObservation = + new TargetObservation(new Rotation2d(), new Rotation2d()); + public PoseObservation[] poseObservations = new PoseObservation[0]; + public int[] tagIds = new int[0]; + } - public double aPIPELINELATENCY; - public double aCAPTURELATENCY; + /** Represents the angle to a simple target, not used for pose estimation. */ + public static record TargetObservation(Rotation2d tx, Rotation2d ty) {} + + /** Represents a robot pose sample used for pose estimation. */ + public static record PoseObservation( + double timestamp, + Pose3d pose, + double ambiguity, + int tagCount, + double averageTagDistance, + PoseObservationType type) {} + + public static enum PoseObservationType { + MEGATAG_1, + MEGATAG_2, + PHOTONVISION } public default void updateInputs(VisionIOInputs inputs) {} diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java b/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java index 07fe886..9f694ab 100644 --- a/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOLimelight.java @@ -1,47 +1,155 @@ +// Copyright 2021-2024 FRC 6328 +// http://github.com/Mechanical-Advantage +// +// This program is free software; you can redistribute it and/or +// modify it under the terms of the GNU General Public License +// version 3 as published by the Free Software Foundation or +// available in the root directory of this project. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + package frc.robot.subsystems.vision; -import frc.robot.util.LimelightHelpers; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.util.Units; +import edu.wpi.first.networktables.DoubleArrayPublisher; +import edu.wpi.first.networktables.DoubleArraySubscriber; +import edu.wpi.first.networktables.DoubleSubscriber; +import edu.wpi.first.networktables.NetworkTableInstance; +import edu.wpi.first.wpilibj.RobotController; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.function.Supplier; +/** IO implementation for real Limelight hardware. */ public class VisionIOLimelight implements VisionIO { + private final Supplier rotationSupplier; + private final DoubleArrayPublisher orientationPublisher; - String name; + private final DoubleSubscriber latencySubscriber; + private final DoubleSubscriber txSubscriber; + private final DoubleSubscriber tySubscriber; + private final DoubleArraySubscriber megatag1Subscriber; + private final DoubleArraySubscriber megatag2Subscriber; - public VisionIOLimelight(String name, String name2) {} + /** + * Creates a new VisionIOLimelight. + * + * @param name The configured name of the Limelight. + * @param rotationSupplier Supplier for the current estimated rotation, used for MegaTag 2. + */ + public VisionIOLimelight(String name, Supplier rotationSupplier) { + var table = NetworkTableInstance.getDefault().getTable(name); + this.rotationSupplier = rotationSupplier; + orientationPublisher = table.getDoubleArrayTopic("robot_orientation_set").publish(); + latencySubscriber = table.getDoubleTopic("tl").subscribe(0.0); + txSubscriber = table.getDoubleTopic("tx").subscribe(0.0); + tySubscriber = table.getDoubleTopic("ty").subscribe(0.0); + megatag1Subscriber = table.getDoubleArrayTopic("botpose_wpiblue").subscribe(new double[] {}); + megatag2Subscriber = + table.getDoubleArrayTopic("botpose_orb_wpiblue").subscribe(new double[] {}); + } @Override public void updateInputs(VisionIOInputs inputs) { - inputs.mt2VisionPose = LimelightHelpers.getBotPoseEstimate_wpiBlue_MegaTag2(name).pose; - inputs.mt1VisionPose = LimelightHelpers.getBotPose2d_wpiBlue(name); - inputs.timestampSeconds = - LimelightHelpers.getBotPoseEstimate_wpiBlue_MegaTag2(name).timestampSeconds; - inputs.tagCount = LimelightHelpers.getBotPoseEstimate_wpiBlue_MegaTag2(name).tagCount; - inputs.tagSpan = LimelightHelpers.getBotPoseEstimate_wpiBlue_MegaTag2(name).tagSpan; - inputs.latency = LimelightHelpers.getBotPoseEstimate_wpiBlue_MegaTag2(name).latency; - inputs.avgTagDist = LimelightHelpers.getBotPoseEstimate_wpiBlue_MegaTag2(name).avgTagDist; - inputs.avgTagArea = LimelightHelpers.getBotPoseEstimate_wpiBlue_MegaTag2(name).avgTagArea; - - // -------------------------- - - inputs.iTX = LimelightHelpers.getTX(name); - inputs.iTY = LimelightHelpers.getTY(name); - inputs.iTA = LimelightHelpers.getTA(name); - inputs.iHB = LimelightHelpers.getLimelightNTTableEntry(name, "hb").getDouble(0.0); - inputs.iTV = LimelightHelpers.getTV(name); - inputs.iPIPELINELATENCY = LimelightHelpers.getLatency_Pipeline(name); - inputs.iCAPTURELATENCY = LimelightHelpers.getLatency_Capture(name); - inputs.iTHOR = LimelightHelpers.getLimelightNTTableEntry(name, "thor").getDouble(0.0); - inputs.iTVERT = LimelightHelpers.getLimelightNTTableEntry(name, "tvert").getDouble(0.0); - - // -------------------------- - - inputs.aTX = LimelightHelpers.getTX(name); - inputs.aTY = LimelightHelpers.getTY(name); - inputs.aTA = LimelightHelpers.getTA(name); - inputs.aHB = LimelightHelpers.getLimelightNTTableEntry(name, "hb").getDouble(0.0); - inputs.aTV = LimelightHelpers.getTV(name); - inputs.aPIPELINELATENCY = LimelightHelpers.getLatency_Pipeline(name); - inputs.aCAPTURELATENCY = LimelightHelpers.getLatency_Capture(name); - inputs.aTHOR = LimelightHelpers.getLimelightNTTableEntry(name, "thor").getDouble(0.0); - inputs.aTVERT = LimelightHelpers.getLimelightNTTableEntry(name, "tvert").getDouble(0.0); + // Update connection status based on whether an update has been seen in the last 250ms + inputs.connected = (RobotController.getFPGATime() - latencySubscriber.getLastChange()) < 250; + + // Update target observation + inputs.latestTargetObservation = + new TargetObservation( + Rotation2d.fromDegrees(txSubscriber.get()), Rotation2d.fromDegrees(tySubscriber.get())); + + // Update orientation for MegaTag 2 + orientationPublisher.accept( + new double[] {rotationSupplier.get().getDegrees(), 0.0, 0.0, 0.0, 0.0, 0.0}); + NetworkTableInstance.getDefault() + .flush(); // Increases network traffic but recommended by Limelight + + // Read new pose observations from NetworkTables + Set tagIds = new HashSet<>(); + List poseObservations = new LinkedList<>(); + for (var rawSample : megatag1Subscriber.readQueue()) { + if (rawSample.value.length == 0) continue; + for (int i = 10; i < rawSample.value.length; i += 7) { + tagIds.add((int) rawSample.value[i]); + } + poseObservations.add( + new PoseObservation( + // Timestamp, based on server timestamp of publish and latency + rawSample.timestamp * 1.0e-9 - rawSample.value[7] * 1.0e-3, + + // 3D pose estimate + parsePose(rawSample.value), + + // Ambiguity, using only the first tag because ambiguity isn't applicable for multitag + rawSample.value.length >= 17 ? rawSample.value[16] : 0.0, + + // Tag count + (int) rawSample.value[8], + + // Average tag distance + rawSample.value[10], + + // Observation type + PoseObservationType.MEGATAG_1)); + } + for (var rawSample : megatag2Subscriber.readQueue()) { + if (rawSample.value.length == 0) continue; + for (int i = 10; i < rawSample.value.length; i += 7) { + tagIds.add((int) rawSample.value[i]); + } + poseObservations.add( + new PoseObservation( + // Timestamp, based on server timestamp of publish and latency + rawSample.timestamp * 1.0e-9 - rawSample.value[7] * 1.0e-3, + + // 3D pose estimate + parsePose(rawSample.value), + + // Ambiguity, zeroed because the pose is already disambiguated + 0.0, + + // Tag count + (int) rawSample.value[8], + + // Average tag distance + rawSample.value[10], + + // Observation type + PoseObservationType.MEGATAG_2)); + } + + // Save pose observations to inputs object + inputs.poseObservations = new PoseObservation[poseObservations.size()]; + for (int i = 0; i < poseObservations.size(); i++) { + inputs.poseObservations[i] = poseObservations.get(i); + } + + // Save tag IDs to inputs objects + inputs.tagIds = new int[tagIds.size()]; + int i = 0; + for (int id : tagIds) { + inputs.tagIds[i++] = id; + } + } + + /** Parses the 3D pose from a Limelight botpose array. */ + private static Pose3d parsePose(double[] rawLLArray) { + return new Pose3d( + rawLLArray[0], + rawLLArray[1], + rawLLArray[2], + new Rotation3d( + Units.degreesToRadians(rawLLArray[3]), + Units.degreesToRadians(rawLLArray[4]), + Units.degreesToRadians(rawLLArray[5]))); } }