diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java
index 4394198..474b3b3 100644
--- a/src/main/java/frc/robot/RobotContainer.java
+++ b/src/main/java/frc/robot/RobotContainer.java
@@ -6,11 +6,12 @@ package frc.robot;
import edu.wpi.first.wpilibj.DriverStation.Alliance;
+import org.littletonrobotics.junction.Logger;
+
import com.pathplanner.lib.auto.AutoBuilder;
import com.pathplanner.lib.auto.NamedCommands;
import edu.wpi.first.math.geometry.Pose2d;
-import edu.wpi.first.math.geometry.Transform2d;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.smartdashboard.SendableChooser;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
@@ -52,6 +53,12 @@ public class RobotContainer {
shooter = new Shooter();
vision.addPoseEstimateConsumer(drivetrain::consumeVisualPose);
+ vision.addPoseEstimateConsumer((vp) -> {
+ Logger.recordOutput(
+ "Vision/" + vp.cameraName() + "/Pose",
+ vp.visualPose()
+ );
+ });
driver = new CommandXboxController(OIConstants.kDriverControllerPort);
@@ -94,7 +101,7 @@ public class RobotContainer {
Pose2d hubPose = Utilities.getHubPose();
double distance = drivetrainPose.getTranslation()
- .plus(CompetitionConstants.KRobotToShooter.getTranslation().toTranslation2d())
+ .plus(CompetitionConstants.kRobotToShooter.getTranslation().toTranslation2d())
.getDistance(hubPose.getTranslation());
if(HoodConstants.kUseInterpolatorForAngle) {
@@ -136,6 +143,31 @@ public class RobotContainer {
}
}
+ /**
+ * The "shift display" relies on Elastic's ability to show 1 or more colors
+ * in a box on the dashboard.
+ *
+ * Using the RobotModeTriggers and a Timer based on the FPGA, a reasonably
+ * accurate "shift display" can be created to indicate whose hub is active
+ * and when.
+ *
+ * During autonomous, and the first 10 seconds of teleop, the shift display
+ * will display a gradient of both red and blue, indicating the fact that
+ * both hubs are active.
+ *
+ * For the rest of teleop, with the exception of the endgame, the display
+ * will present either the color red, or the color blue, based on the returned
+ * value of Utilities.whoHasFirstShift(). Because shifts change on a known cycle,
+ * we can use the known state of who has first shift, to determine the remaining three shifts
+ * that come after.
+ *
+ * For the endgame portion of teleop, the shift display returns to the gradient
+ * of both red and blue.
+ *
+ * Because this relies on the RobotModeTriggers and an FPGA timer, it should be
+ * reasonably accurate, it's unlikely to be perfect relative to field time
+ * but it will be very very very (likely unnoticably) close.
+ */
private void configureShiftDisplay() {
SmartDashboard.putStringArray(OIConstants.kCurrentActiveHub, OIConstants.kRedBlueDisplay);
diff --git a/src/main/java/frc/robot/constants/CompetitionConstants.java b/src/main/java/frc/robot/constants/CompetitionConstants.java
index 290fe84..6851863 100644
--- a/src/main/java/frc/robot/constants/CompetitionConstants.java
+++ b/src/main/java/frc/robot/constants/CompetitionConstants.java
@@ -18,7 +18,7 @@ public class CompetitionConstants {
public static final double kHubGoalHeightMeters = Units.inchesToMeters(72);
// TODO Real Values
- public static final Transform3d KRobotToShooter = new Transform3d();
+ public static final Transform3d kRobotToShooter = new Transform3d();
public static final Pose2d kBlueHubLocation = new Pose2d(
Units.inchesToMeters(182.11),
@@ -26,6 +26,13 @@ public class CompetitionConstants {
Rotation2d.fromDegrees(0)
);
+ // TODO The origination value produced by the April Tag Field Layout object may
+ // influence what this value should actually be. See AprilTagFieldLayout.getOrigin
+ // For now, the X axis position (forward/backward) is calculated as though the blue
+ // alliance wall right hand side is the originiation point, so, the distance from
+ // the blue alliance wall, to the blue alliance hub center point, plus
+ // the distance between the center of the blue alliance hub and the center of
+ // the red alliance hub
public static final Pose2d kRedHubLocation = new Pose2d(
Units.inchesToMeters(182.11 + 143.5 * 2),
Units.inchesToMeters(158.84),
diff --git a/src/main/java/frc/robot/interfaces/IAprilTagProvider.java b/src/main/java/frc/robot/interfaces/IAprilTagProvider.java
deleted file mode 100644
index 86ea60b..0000000
--- a/src/main/java/frc/robot/interfaces/IAprilTagProvider.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package frc.robot.interfaces;
-
-import java.util.OptionalDouble;
-
-/**
- * An interface which ensures a class can provide common AprilTag oriented
- * information from various sources in a consistent way.
- */
-public interface IAprilTagProvider {
- /**
- * A method to get the tags currently in the camera's field of view
- * @return
- */
- public int[] getVisibleTagIDs();
-
- /**
- * A method to get the distance from the camera to the AprilTag specified
- *
- * @param id The ID of the AprilTag to give a distance to
- * @return The distance, in meters, to the target, or OptionalDouble.empty() if the tag is not present in the camera's view
- */
- public OptionalDouble getTagDistanceFromCameraByID(int id);
-
- /**
- * A method to get the pitch from the center of the image of a particular AprilTag
- *
- * @param id The ID of the AprilTag to get the pitch of
- * @return The pitch, in degrees, of the target, or OptionalDouble.empty() if the tag is not present in the camera's view
- */
- public OptionalDouble getTagPitchByID(int id);
-
- /**
- * A method to get the yaw from the center of the image of a particular AprilTag
- *
- * @param id The ID of the AprilTag to get the yaw of
- * @return The yaw, in degrees, of the target, or OptionalDouble.empty() if the tag is not present in the camera's view
- */
- public OptionalDouble getTagYawByID(int id);
-}
diff --git a/src/main/java/frc/robot/subsystems/Drivetrain.java b/src/main/java/frc/robot/subsystems/Drivetrain.java
index f126311..7f47368 100644
--- a/src/main/java/frc/robot/subsystems/Drivetrain.java
+++ b/src/main/java/frc/robot/subsystems/Drivetrain.java
@@ -21,15 +21,14 @@ import edu.wpi.first.math.kinematics.SwerveDriveKinematics;
import edu.wpi.first.math.kinematics.SwerveModulePosition;
import edu.wpi.first.math.kinematics.SwerveModuleState;
import edu.wpi.first.wpilibj.DriverStation;
-import edu.wpi.first.wpilibj.DriverStation.Alliance;
import edu.wpi.first.wpilibj2.command.Command;
import edu.wpi.first.wpilibj2.command.PrintCommand;
import edu.wpi.first.wpilibj2.command.SubsystemBase;
import frc.robot.constants.AutoConstants;
-import frc.robot.constants.CompetitionConstants;
import frc.robot.constants.DrivetrainConstants;
import frc.robot.constants.OIConstants;
import frc.robot.utilities.SwerveModule;
+import frc.robot.utilities.Utilities;
import frc.robot.utilities.VisualPose;
public class Drivetrain extends SubsystemBase {
@@ -180,6 +179,19 @@ public class Drivetrain extends SubsystemBase {
});
}
+ /**
+ * Rotates the robot to a face a given Pose2d position on the field
+ *
+ * Note that this Command does not provide a means of timeout. If you are
+ * using this in an auto context, this Command should be decorated with
+ * withTimeout(). Otherwise, you will be waiting for the PID
+ * Controller doing the work to report that it is at the desired setpoint.
+ *
+ * @param targetPose The Pose2d object to rotate the robot towards
+ * @param rotate180 When false, the front of the robot faces the specified pose, when true
+ * the back of the robot faces the specified pose
+ * @return A complete Command structure that performs the specified action
+ */
public Command rotateToPose(Pose2d targetPose, boolean rotate180) {
return runOnce(yawRotationController::reset).andThen(
drive(
@@ -205,21 +217,30 @@ public class Drivetrain extends SubsystemBase {
.until(yawRotationController::atSetpoint);
}
+ /**
+ * Locks the robots rotation to face the Alliance Hub on the field.
+ *
+ * This method is innately aware of which hub to face based on the assigned alliance color.
+ *
+ * This method is NOT for autonomous, see rotateToPose
+ *
+ * This method provides a field oriented mechanism of driving the robot, such that the robot
+ * is always facing the point on the field that is the center of the alliance hub. This
+ * method assumes that the robots estimated pose is reasonably accurate.
+ *
+ * @param xSpeed The X (forward/backward) translational speed of the robot
+ * @param ySpeed The Y (left/right) translational speed of the robot
+ * @param rotate180 When false, the front of the robot faces the hub, when true, the back
+ * of the robot faces the hub
+ * @return A complete Command structure that performs the specified action
+ */
public Command lockRotationToHub(DoubleSupplier xSpeed, DoubleSupplier ySpeed, boolean rotate180) {
return runOnce(yawRotationController::reset).andThen(
drive(
xSpeed,
ySpeed,
() -> {
- Optional alliance = DriverStation.getAlliance();
-
- if(alliance.isEmpty()) {
- return 0;
- }
-
- Pose2d faceTowards = alliance.get() == Alliance.Blue ?
- CompetitionConstants.kBlueHubLocation :
- CompetitionConstants.kRedHubLocation;
+ Pose2d faceTowards = Utilities.getHubPose();
Rotation2d targetRotation = new Rotation2d(
faceTowards.getX() - getPose().getX(),
diff --git a/src/main/java/frc/robot/subsystems/PhotonVision.java b/src/main/java/frc/robot/subsystems/PhotonVision.java
index 77586f8..d7161c6 100644
--- a/src/main/java/frc/robot/subsystems/PhotonVision.java
+++ b/src/main/java/frc/robot/subsystems/PhotonVision.java
@@ -1,32 +1,47 @@
package frc.robot.subsystems;
-import java.io.IOException;
import java.util.ArrayList;
-import java.util.Comparator;
import java.util.List;
import java.util.Optional;
-import java.util.OptionalDouble;
import java.util.function.Consumer;
-import java.util.stream.Stream;
import org.photonvision.EstimatedRobotPose;
import org.photonvision.PhotonCamera;
import org.photonvision.PhotonPoseEstimator;
import org.photonvision.PhotonPoseEstimator.PoseStrategy;
-import org.photonvision.PhotonUtils;
import org.photonvision.targeting.PhotonPipelineResult;
import org.photonvision.targeting.PhotonTrackedTarget;
+import edu.wpi.first.math.geometry.Pose3d;
import edu.wpi.first.math.geometry.Transform3d;
-import edu.wpi.first.math.util.Units;
import edu.wpi.first.wpilibj2.command.SubsystemBase;
-import edu.wpi.first.wpilibj2.command.button.Trigger;
import frc.robot.constants.CompetitionConstants;
import frc.robot.constants.PhotonConstants;
-import frc.robot.interfaces.IAprilTagProvider;
import frc.robot.utilities.PhotonVisionConfig;
import frc.robot.utilities.VisualPose;
+/**
+ * This "Subsystem" is not actually a Subsystem. The intent is for this to be treated as
+ * a "resource", that is, something that is not inherently a physical mechanism to be controlled.
+ *
+ * A "resource" in this instance should be thought of as something that can be safely shared
+ * by other Subsystems generally without collision if more that one Subsystem requires the
+ * "resource" at any given time.
+ *
+ * Resources should NOT produce Commands, they should not have a default Command.
+ * Resources do not have behaviors, and because Commands are in of themselves behaviors,
+ * this class should not have Commands.
+ *
+ * Part of the thinking behind creating the PhotonVision components this way is to rely
+ * on the CommandScheduler to call periodic. If this weren't the case, some other subsystem
+ * would have to manage calling for periodic updates, while still sharing the resource with
+ * other subsystems somehow.
+ *
+ * This class is dynamic, by adding or removing PhotonVisionConfig objects to the "configs"
+ * List in the PhotonConstants file, you change what is set up internally in this class.
+ * 1 config means 1 camera, 1 estimator, 1 stored pipeline result, 2 configs means 2 cameras,
+ * 2 estimators, etc. etc.
+ */
public class PhotonVision extends SubsystemBase {
private PhotonCamera[] cameras;
private PhotonPoseEstimator[] estimators;
@@ -65,6 +80,7 @@ public class PhotonVision extends SubsystemBase {
if(!pose.isEmpty()) {
VisualPose visualPose = new VisualPose(
+ cameras[i].getName(),
pose.get().estimatedPose.toPose2d(),
pose.get().timestampSeconds
);
@@ -76,22 +92,68 @@ public class PhotonVision extends SubsystemBase {
}
}
- public void testMethod(int targetID) {
- Optional target = latestResults.stream()
- .filter((p) -> p != null)
- .map(PhotonPipelineResult::getTargets)
- .map(List::stream)
- .reduce(Stream::concat)
- .get()
- .filter((p) -> p.getFiducialId() == targetID)
- .max(
- Comparator.comparingDouble((ptt) -> {
- return (double)ptt.getDetectedObjectConfidence();
- })
- );
+ /**
+ * Returns the best 3D pose for a given AprilTag ID as seen by the cameras on the robot.
+ *
+ * All cameras fields of view are observed, if no camera can see the given tag ID, this
+ * method will return Optional.empty().
+ *
+ * Note that this method has no minimum confidence threshold for a tag. This means that
+ * if one camera thinks it sees the tag, even with very low confidence, it'll still return
+ * some sort of pose.
+ *
+ * @param tagID The ID of the tag to look for in the latest results from all cameras
+ * @return An Optional object containing a Pose3d object, or Optional.empty() if
+ * the tag is not present anywhere in the robots field of view.
+ */
+ public Optional getBestPoseForTag(int tagID) {
+ PhotonVisionConfig config = null;
+ Transform3d bestCameraToTarget = null;
+ float bestConfidence = -1;
+ for(int cameraIndex = 0; cameraIndex < latestResults.size(); cameraIndex++) {
+ if(latestResults.get(cameraIndex) == null) {
+ continue;
+ }
+
+ for(PhotonTrackedTarget target: latestResults.get(cameraIndex).getTargets()) {
+ if(target.getFiducialId() != tagID || bestConfidence > target.getDetectedObjectConfidence()) {
+ continue;
+ }
+
+ config = PhotonConstants.configs.get(cameraIndex);
+ bestCameraToTarget = target.bestCameraToTarget;
+ bestConfidence = target.getDetectedObjectConfidence();
+ }
+ }
+
+ if(bestCameraToTarget == null) {
+ return Optional.empty();
+ }
+
+ // This is based on what PhotonVision does for multitag Pose estimation
+ // See PhotonPoseEstimator.multiTagOnCoprocStrategy
+ // TODO This doesn't currently account for the offset of the tag relative to say the hub
+ return Optional.of(Pose3d.kZero
+ .plus(bestCameraToTarget.inverse())
+ .relativeTo(CompetitionConstants.kTagLayout.getOrigin())
+ .plus(config.robotToCamera().inverse())
+ .plus(CompetitionConstants.kRobotToShooter));
}
+ /**
+ * Add a Consumer of VisualPose records to the PhotonVision resource.
+ *
+ * Each consumer will receive a VisualPose object when any camera produces a new
+ * VisualPose.
+ *
+ * The number of Poses produced in a given 20ms cycle is the same number as how many
+ * cameras there are on the robot, as currently all cameras configuration will generate
+ * a Pose2d
+ *
+ * @param consumer The lambda or functional reference that will consume Poses produced
+ * by the PhotonVision resource.
+ */
public void addPoseEstimateConsumer(Consumer consumer) {
poseEstimateConsumers.add(consumer);
}
diff --git a/src/main/java/frc/robot/utilities/SparkMAXTester.java b/src/main/java/frc/robot/utilities/SparkMAXTester.java
index 2967757..ccd0c81 100644
--- a/src/main/java/frc/robot/utilities/SparkMAXTester.java
+++ b/src/main/java/frc/robot/utilities/SparkMAXTester.java
@@ -8,13 +8,27 @@ import com.revrobotics.spark.SparkLowLevel.MotorType;
import edu.wpi.first.wpilibj2.command.Command;
import edu.wpi.first.wpilibj2.command.SubsystemBase;
+/**
+ * A simple subsystem that can be used to test a single SparkMax and associated NEO motor
+ */
public class SparkMAXTester extends SubsystemBase {
private SparkMax spark;
+ /**
+ * Constructor
+ *
+ * @param deviceID The CAN ID of the SparkMAX that needs testing
+ */
public SparkMAXTester(int deviceID) {
spark = new SparkMax(deviceID, MotorType.kBrushless);
}
+ /**
+ * Sets the speed of the motor
+ *
+ * @param speed A method or lambda which returns a double between -1 and 1
+ * @return A Command object that runs indefinitely to control motor speed
+ */
public Command setSpeed(DoubleSupplier speed) {
return run(() -> {
spark.set(speed.getAsDouble());
diff --git a/src/main/java/frc/robot/utilities/SwerveModule.java b/src/main/java/frc/robot/utilities/SwerveModule.java
index 9aaa1ba..03ef74b 100644
--- a/src/main/java/frc/robot/utilities/SwerveModule.java
+++ b/src/main/java/frc/robot/utilities/SwerveModule.java
@@ -145,6 +145,8 @@ public class SwerveModule {
Logger.recordOutput(moduleName + "/SwerveModulePosition", getPosition());
Logger.recordOutput(moduleName + "/RelativeEncoderPosition", getTurningEncoderPosition());
+ // TODO Re-enable this? Was turned off when there was drivetrain issues
+ // Now that there aren't, do we try this again?
/*
if(!isAbsoluteEncoderDisabled && turningEncoderAutoRezeroEnabled) {
if(Math.abs(getState().angle.getRadians() - lastTargetState.angle.getRadians()) <= ModuleConstants.kAutoResetPositionDeadband) {
diff --git a/src/main/java/frc/robot/utilities/Utilities.java b/src/main/java/frc/robot/utilities/Utilities.java
index 9d6b54a..77a8651 100644
--- a/src/main/java/frc/robot/utilities/Utilities.java
+++ b/src/main/java/frc/robot/utilities/Utilities.java
@@ -10,6 +10,13 @@ import frc.robot.constants.CompetitionConstants;
public class Utilities {
public static final double kG = -9.81;
+ /**
+ * Returns the Alliance enumeration that indicates who will have the first
+ * shift. Returns null if the data is not available.
+ *
+ * @return The Alliance that will have the first shift, or null if game specific data
+ * is not present
+ */
public static Alliance whoHasFirstShift() {
String gameData = DriverStation.getGameSpecificMessage();
@@ -27,6 +34,13 @@ public class Utilities {
return null;
}
+ /**
+ * Returns the pose of the hub for the given alliance assigned to our robot.
+ * If no alliance is assigned (which is unlikely) this method returns
+ * the Blue hub pose, which is the closet to the field origin
+ *
+ * @return The Pose2d object which represents the appropriate pose of the Hub
+ */
public static Pose2d getHubPose() {
Optional alliance = DriverStation.getAlliance();
@@ -44,8 +58,6 @@ public class Utilities {
* Note that X in this scenario is the physical distance from the shooter exit to
* target. Y is the change in height from the shooter exit to the target height
*
- * TODO Review ChatGPT's math more thoroughly, preferably with someone with fresher math skills
- *
* @param targetVMPS The target velocity of the shooter in Meters per Second
* @param deltaXM The "as the crow flies" distance between the shooter exit and the target
* @param deltaYM The height difference between the shooter exit and the target
@@ -66,8 +78,6 @@ public class Utilities {
* Setting softerShot to true changes the angle of attack to a soft, long range shot. False
* makes the shot more of a lob
*
- * TODO Review ChatGPT's math more thoroughly, preferably with someone with fresher math skills
- *
* @param targetVMPS The target velocity of the shooter in Meters per Second
* @param deltaXM The "as the crow flies" distance between the shooter exit and the target
* @param deltaYM The height difference between the shooter exit and the target
diff --git a/src/main/java/frc/robot/utilities/VisualPose.java b/src/main/java/frc/robot/utilities/VisualPose.java
index 2bcc0f1..b4a75d2 100644
--- a/src/main/java/frc/robot/utilities/VisualPose.java
+++ b/src/main/java/frc/robot/utilities/VisualPose.java
@@ -2,4 +2,8 @@ package frc.robot.utilities;
import edu.wpi.first.math.geometry.Pose2d;
-public record VisualPose(Pose2d visualPose, double timestamp) {}
+/**
+ * A record class which represents the source of a visual pose, the pose itself
+ * and the timestamp the pose was generated.
+ */
+public record VisualPose(String cameraName, Pose2d visualPose, double timestamp) {}