diff --git a/.github/dependabot.yml b/.github/dependabot.yml index f35c15b9..082bc7f4 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -27,3 +27,4 @@ updates: interval: "weekly" ignore: - dependency-name: "gradle" + - dependency-name: "gradle-wrapper" diff --git a/AdvantageScope RBSI Standard.json b/AdvantageScope RBSI Standard.json new file mode 100644 index 00000000..67dc05a9 --- /dev/null +++ b/AdvantageScope RBSI Standard.json @@ -0,0 +1,541 @@ +{ + "hubs": [ + { + "x": 71, + "y": 33, + "width": 1315, + "height": 776, + "state": { + "sidebar": { + "width": 300, + "expanded": [ + "/RealOutputs", + "/RealOutputs/LoopSpike/Drive", + "/RealOutputs/Drive/OdomThread", + "/RealOutputs/CAN/DriveTrain", + "/RealOutputs/CAN/Module0", + "/RealOutputs/LoopSpike", + "/AdvantageKit", + "/RealOutputs/Loop/Drive", + "/RealOutputs/Loop/Mech", + "/RealOutputs/Odometry/Yaw", + "/RealOutputs/Vision/Camera0", + "/RealOutputs/Vision/Camera0/TagPoses", + "/RealOutputs/Vision/Summary/TagPoses", + "/RealOutputs/Vision/Camera1/RobotPosesAccepted", + "/RealOutputs/Vision/Camera0/RobotPosesAccepted", + "/RealOutputs/Power", + "/RealOutputs/Power/Subsystems", + "/NetworkInputs/SmartDashboard", + "/RealOutputs/Loop", + "/RealOutputs/Loop/Virtual" + ] + }, + "tabs": { + "selected": 7, + "tabs": [ + { + "type": 0, + "title": "", + "controller": null, + "controllerUUID": "27w43phb8fm0oyp20p0wxms1a35yrw0i", + "renderer": "", + "controlsHeight": 0 + }, + { + "type": 2, + "title": "2D Field - Odometry", + "controller": { + "sources": [ + { + "type": "robot", + "logKey": "/RealOutputs/Odometry/Robot", + "logType": "Pose2d", + "visible": true, + "options": { + "bumpers": "" + } + } + ], + "field": "FRC:2026 Field", + "orientation": 0, + "size": "large" + }, + "controllerUUID": "ibthfkqod1n0vovjoxhcg2d9954wcpe4", + "renderer": null, + "controlsHeight": 200 + }, + { + "type": 2, + "title": "2D Field - Vision", + "controller": { + "sources": [ + { + "type": "robot", + "logKey": "/RealOutputs/Odometry/Robot", + "logType": "Pose2d", + "visible": false, + "options": { + "bumpers": "" + } + }, + { + "type": "arrow", + "logKey": "/RealOutputs/Vision/Summary/TagPoses", + "logType": "Pose3d[]", + "visible": true, + "options": { + "position": "center" + } + }, + { + "type": "ghost", + "logKey": "/RealOutputs/Vision/Camera0/RobotPosesAccepted", + "logType": "Pose3d[]", + "visible": true, + "options": { + "color": "#00ff00" + } + }, + { + "type": "ghost", + "logKey": "/RealOutputs/Vision/Camera1/RobotPosesAccepted", + "logType": "Pose3d[]", + "visible": true, + "options": { + "color": "#ffff00" + } + } + ], + "field": "FRC:2026 Field", + "orientation": 0, + "size": "large" + }, + "controllerUUID": "u9myuirzag5qydjp9nozn8qxpfqofuuq", + "renderer": null, + "controlsHeight": 200 + }, + { + "type": 9, + "title": "Swerve", + "controller": { + "sources": [ + { + "type": "states", + "logKey": "/RealOutputs/SwerveStates/Measured", + "logType": "SwerveModuleState[]", + "visible": true, + "options": { + "color": "#ff0000", + "arrangement": "0,1,2,3" + } + }, + { + "type": "states", + "logKey": "/RealOutputs/SwerveStates/SetpointsOptimized", + "logType": "SwerveModuleState[]", + "visible": true, + "options": { + "color": "#00ffff", + "arrangement": "0,1,2,3" + } + }, + { + "type": "rotation", + "logKey": "/RealOutputs/Odometry/Robot/rotation", + "logType": "Rotation2d", + "visible": true, + "options": {} + }, + { + "type": "chassisSpeeds", + "logKey": "/RealOutputs/SwerveChassisSpeeds/Measured", + "logType": "ChassisSpeeds", + "visible": true, + "options": { + "color": "#ff0000" + } + } + ], + "maxSpeed": 5, + "sizeX": 0.65, + "sizeY": 0.65, + "orientation": 1 + }, + "controllerUUID": "zo4ukgbiguacvfe9e2iui2ytosl7gutn", + "renderer": null, + "controlsHeight": 200 + }, + { + "type": 1, + "title": "Power", + "controller": { + "leftSources": [ + { + "type": "stepped", + "logKey": "/PowerDistribution/Voltage", + "logType": "Number", + "visible": true, + "options": { + "color": "#2b66a2", + "size": "normal" + } + } + ], + "rightSources": [ + { + "type": "stepped", + "logKey": "/PowerDistribution/TotalCurrent", + "logType": "Number", + "visible": true, + "options": { + "color": "#e5b31b", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/Power/Subsystems/DriveCurrent", + "logType": "Number", + "visible": true, + "options": { + "color": "#80588e", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/Power/Subsystems/SteerCurrent", + "logType": "Number", + "visible": true, + "options": { + "color": "#e48b32", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/Power/Subsystems/FlywheelCurrent", + "logType": "Number", + "visible": true, + "options": { + "color": "#c0b487", + "size": "normal" + } + } + ], + "discreteSources": [ + { + "type": "stripes", + "logKey": "/RealOutputs/Power/BrownoutImminent", + "logType": "Boolean", + "visible": true, + "options": { + "color": "#af2437" + } + } + ], + "leftLockedRange": null, + "rightLockedRange": null, + "leftUnitConversion": { + "autoTarget": null, + "preset": null + }, + "rightUnitConversion": { + "autoTarget": null, + "preset": null + }, + "leftFilter": 0, + "rightFilter": 0 + }, + "controllerUUID": "9vyygyem9we0y5917klbu25cz9hruszx", + "renderer": null, + "controlsHeight": 200 + }, + { + "type": 1, + "title": "CAN Health", + "controller": { + "leftSources": [ + { + "type": "stepped", + "logKey": "/RealOutputs/CAN//Utilization", + "logType": "Number", + "visible": true, + "options": { + "color": "#2b66a2", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/CAN/DriveTrain/Utilization", + "logType": "Number", + "visible": true, + "options": { + "color": "#e5b31b", + "size": "normal" + } + } + ], + "rightSources": [ + { + "type": "stepped", + "logKey": "/RealOutputs/CAN/DriveTrain/RxErrorCount", + "logType": "Number", + "visible": true, + "options": { + "color": "#af2437", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/CAN/DriveTrain/TxErrorCount", + "logType": "Number", + "visible": true, + "options": { + "color": "#80588e", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/CAN//RxErrorCount", + "logType": "Number", + "visible": true, + "options": { + "color": "#e48b32", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/CAN/DriveTrain/TxFullCount", + "logType": "Number", + "visible": true, + "options": { + "color": "#c0b487", + "size": "normal" + } + } + ], + "discreteSources": [], + "leftLockedRange": null, + "rightLockedRange": null, + "leftUnitConversion": { + "autoTarget": null, + "preset": null + }, + "rightUnitConversion": { + "autoTarget": null, + "preset": null + }, + "leftFilter": 0, + "rightFilter": 0 + }, + "controllerUUID": "f15s8kcvuqce63dlc7cf4yunt6c0372w", + "renderer": null, + "controlsHeight": 200 + }, + { + "type": 1, + "title": "Loop Time - Robot", + "controller": { + "leftSources": [], + "rightSources": [ + { + "type": "stepped", + "logKey": "/RealOutputs/LoggedRobot/FullCycleMS", + "logType": "Number", + "visible": true, + "options": { + "color": "#2b66a2", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/LoggedRobot/GCTimeMS", + "logType": "Number", + "visible": true, + "options": { + "color": "#e5b31b", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/LoggedRobot/LogPeriodicMS", + "logType": "Number", + "visible": true, + "options": { + "color": "#80588e", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/LoggedRobot/UserCodeMS", + "logType": "Number", + "visible": true, + "options": { + "color": "#af2437", + "size": "normal" + } + } + ], + "discreteSources": [], + "leftLockedRange": null, + "rightLockedRange": null, + "leftUnitConversion": { + "autoTarget": null, + "preset": null + }, + "rightUnitConversion": { + "autoTarget": null, + "preset": null + }, + "leftFilter": 0, + "rightFilter": 0 + }, + "controllerUUID": "yktsi99006yw1sq2h8mihr8kiy9h5xk9", + "renderer": null, + "controlsHeight": 200 + }, + { + "type": 1, + "title": "Loop Time - Subsystems", + "controller": { + "leftSources": [ + { + "type": "stepped", + "logKey": "/RealOutputs/Loop/Mech/Drive_ms", + "logType": "Number", + "visible": true, + "options": { + "color": "#2b66a2", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/Loop/Mech/Flywheel_ms", + "logType": "Number", + "visible": true, + "options": { + "color": "#e5b31b", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/Loop/Virtual/Accelerometer_ms", + "logType": "Number", + "visible": true, + "options": { + "color": "#af2437", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/Loop/Virtual/RBSICANHealth_ms", + "logType": "Number", + "visible": true, + "options": { + "color": "#80588e", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/Loop/Virtual/RBSIPowerMonitor_ms", + "logType": "Number", + "visible": true, + "options": { + "color": "#e48b32", + "size": "normal" + } + }, + { + "type": "stepped", + "logKey": "/RealOutputs/Loop/Virtual/Vision_ms", + "logType": "Number", + "visible": true, + "options": { + "color": "#c0b487", + "size": "normal" + } + } + ], + "rightSources": [], + "discreteSources": [], + "leftLockedRange": null, + "rightLockedRange": null, + "leftUnitConversion": { + "autoTarget": null, + "preset": null + }, + "rightUnitConversion": { + "autoTarget": null, + "preset": null + }, + "leftFilter": 0, + "rightFilter": 0 + }, + "controllerUUID": "no6ewy1a2lc06qvs7einn5i430wwub9n", + "renderer": null, + "controlsHeight": 195 + }, + { + "type": 3, + "title": "3D Field", + "controller": { + "sources": [ + { + "type": "robot", + "logKey": "/RealOutputs/Vision/Summary/TagPoses", + "logType": "Pose3d[]", + "visible": true, + "options": { + "model": "Crab Bot" + } + }, + { + "type": "robot", + "logKey": "/RealOutputs/Odometry/Robot", + "logType": "Pose2d", + "visible": true, + "options": { + "model": "2026 KitBot" + } + } + ], + "game": "FRC:2026 Field" + }, + "controllerUUID": "x1j1ypjwdchzmk115shjrh4ml6w4osq9", + "renderer": { + "cameraIndex": -1, + "orbitFov": 50, + "cameraPosition": [ + 1.4695761589768238e-15, + 5.999999999999999, + -12 + ], + "cameraTarget": [ + 0, + 0.5, + 0 + ] + }, + "controlsHeight": 200 + } + ] + } + } + } + ], + "satellites": [], + "version": "26.0.0" +} diff --git a/doc/INSTALL.md b/doc/INSTALL.md index c3c23727..ac19b1c4 100644 --- a/doc/INSTALL.md +++ b/doc/INSTALL.md @@ -13,7 +13,7 @@ following components on your laptop and devices. This includes all motors, CANivore, Pigeon 2.0, and all CANcoders! * Rev Hardware Client `2.0`, with the PDH and all SparkMax's, and other devices running firmware `26.1` or newer. -* Vivid Hosting Radio firmware `2.0` or newer is required for competition this +* Vivid Hosting Radio firmware `2.0.1` or newer is required for competition this year. * Photon Vision ([Orange Pi or other device](https://docs.photonvision.org/en/latest/docs/quick-start/quick-install.html)) **running `26.1` or newer** (make sure you are **not** acidentially running @@ -22,6 +22,8 @@ following components on your laptop and devices. It is highly recommmended to update all you devices, and label what can id's or ip adresses and firmware versions they are running. This helps your team, and the FRC field staff quickly identify issues. +If you are running a RoboRIO 1.0 (no sd card) you also neeed to disable the web server ([Instructions Here](https://docs.wpilib.org/en/stable/docs/software/wpilib-tools/roborio-team-number-setter/index.html)) + -------- ### Getting Az-RBSI @@ -118,7 +120,10 @@ steps you need to complete: https://github.com/CrossTheRoadElec/Phoenix6-Examples/blob/1db713d75b08a4315c9273cebf5b5e6a130ed3f7/java/SwerveWithPathPlanner/src/main/java/frc/robot/generated/TunerConstants.java#L171-L175). Before removing them, both lines will be marked as errors in VSCode. -5. In `TunerConstants.java`, change `kSteerInertia` to `0.004` and +5. In `TunerConstants.java`, change `kSlipCurrent` to `60` amps. This will + keep your robot from tearing holes in the carpet at competition! + +6. In `TunerConstants.java`, change `kSteerInertia` to `0.004` and `kDriveInertia` to `0.025` to allow the AdvantageKit simulation code to operate as expected. diff --git a/doc/PV_Cameras.png b/doc/PV_Cameras.png new file mode 100644 index 00000000..b5293864 Binary files /dev/null and b/doc/PV_Cameras.png differ diff --git a/doc/PV_Network.png b/doc/PV_Network.png new file mode 100644 index 00000000..5db753c3 Binary files /dev/null and b/doc/PV_Network.png differ diff --git a/doc/RBSI-GSG.md b/doc/RBSI-GSG.md index da7774e3..b7f541d8 100644 --- a/doc/RBSI-GSG.md +++ b/doc/RBSI-GSG.md @@ -41,6 +41,10 @@ modifications to extant RBSI code will be done to files within the ### Tuning constants for optimal performance +**It cannot be overemphasized the importance of tuning your drivetrain for +smooth and consistent performance, battery longevity, and not tearing up the +field.** + 4. Over the course of your robot project, you will need to tune PID parameters for both your drivebase and any mechanisms you build to play the game. AdvantageKit includes detailed instructions for how to tune the various @@ -128,7 +132,22 @@ section of [each release](https://github.com/AZ-First/Az-RBSI/releases). See the [PhotonVision Wiring documentation ](https://docs.photonvision.org/en/latest/docs/quick-start/wiring.html) for - more details. + more details. DO NOT put the orange pi's (or any devices that cannnot loose power) on port 23 of the PDH. It is a mechanical switch, and if the robot is hit, it briefly will loose power. Mounting the case to the robot requires 4x #10-32 nylock nuts (placed in the hex-shaped mounts inside the case) and 4x #10-32 bolts. + + Order of addembly of the Orange Pi Double Case matters given tight clearances: + 1. Super-glue the nylock nuts into the hex mounting holes. + 2. Intall the fans and grates into the case side. + 3. Assemble the Pi's into the standoffs outside the box. + 4. Solder / mount the Voltage Regular solution of your choice. + 5. Connect the USB-C power cables to the Pi's. + 6. Connect the fan power to the 5V (red) and GND (black) pins in the Pi's. + 7. Install the Pi/standoff assembly into the case using screws at the bottom, + be careful of the tight clearance between the USB sockets and the case opening. + 8. Tie a knot in the incoing power line _to be placed inside the box + for strain relief_, and pass the incoming power line through the notch + in the lower case. + 9. Install the cover on the box using screws. + 10. Mount the case to your robot using the #10-32 screws. diff --git a/doc/RBSI-Vision.md b/doc/RBSI-Vision.md new file mode 100644 index 00000000..43270986 --- /dev/null +++ b/doc/RBSI-Vision.md @@ -0,0 +1,80 @@ +# Az-RBSI Vision Integration + +This page includes detailed steps for integrating robot vision for your +2026 REBUILT robot. + +-------- + +### PhotonVision + +The preferred method for adding vision to your robot is with [PhotonVision]( +https://photonvision.org/). This community-developed open-source package +combines coprocessor-based camera control and analysis with a Java library +for consuming the processed targeting information in the robot code. + +#### Recommended Setup with Az-RBSI + +We recommend using Arducam [OV9281](https://www.amazon.com/dp/B096M5DKY6) +(black & white) and/or [OV9782](https://www.amazon.com/dp/B0CLXZ29F9) (color) +cameras for robot vision due to their Global Shutter, Low Distortion, and USB +connection. In addition to the lens delivered with the camera, supplementary +lenses may be purchased to vary the FOV available to the detector for various +robot applications, such as [Low-Distortion]( +https://www.amazon.com/dp/B07NW8VR71) or [General Purpose]( +https://www.amazon.com/dp/B096V2NP2T). + +For the coprocessor that controls the cameras and analyzes the images for +AprilTag and gamepiece detection, we recommend using one or two Orange Pi 5 +single-board computers -- although PhotonVision does support a number of +[different coprocessor options]( +https://docs.photonvision.org/en/latest/docs/quick-start/quick-install.html). +As decribed in the [Getting Started Guide](RBSI-GSG.md), we include a 3D print +for a case that can hold one or two of these computers. + +#### Setting up PhotonVision on the Coprocessor + +Download the appropriate [disk image]( +https://github.com/PhotonVision/photonvision/releases/tag/v2026.2.1) for your +coprocessor and burn it to an SD card using the [Raspberry Pi Imager]( +https://www.raspberrypi.com/software). Connect the powered-on coprocessor +to the Vivivid radio (port 2 or 3) via ethernet, or connect to a network switch connected to the radio via ethernet, and connect to the PhotonVision software +at the address ``http://photonvision.local:5800``. + +Before you connect the coprocessor to your robot, be sure to set your team +number, set the IP address to "Static" and give it the number ``10.TE.AM.11``, +where "TE.AM" is the approprate parsing of your team number into IP address, +as used by your robot radio and RoboRIO. If desired, you can also give your +coprocessor a hostname. + +![PhotonVision Network Settings](PV_Network.png) + +We suggest you give your first coprocessor the static IP address +``10.TE.AM.11``, and your second coprocessor (if desired) ``10.TE.AM.12``. +The static address allows for more stable operation, and the these particular +addresses do not conflict with other devices on your robot network. + +Plug in cameras (two or three per coprocessor) and navigate to the Camera +Configs page (see below). Activate the cameras. + +![PhotonVision Camera Configs](PV_Cameras.png) + +#### Configuring and Calibrating your Cameras + +This is the most important part! + +Instructions are in the [PhotonVision Documentation]( +https://docs.photonvision.org/en/latest/docs/calibration/calibration.html). + +You should consider calibrating your cameras early and often, including daily +during a competition to ensure that the cameras are reporting as accurate a +pose as possible for your odometry. Also, double-check your calibration by +using a measuring tape to compare the reported vision-derived distance from +each camera to one or more AprilTags with reality. + + +#### Using PhotonVision for vision simulation + +This is an advanced topic, and is therefore in the Restricted Section. (More +information about vision simulation to come in a future release.) + +![Restricted Section](restricted_section.jpg) diff --git a/doc/restricted_section.jpg b/doc/restricted_section.jpg new file mode 100644 index 00000000..4d5bc210 Binary files /dev/null and b/doc/restricted_section.jpg differ diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index cfb7214b..3b31b772 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -164,8 +164,10 @@ public static final class PowerConstants { /************************************************************************* */ /** List of Robot CAN Busses ********************************************* */ public static final class CANBuses { - public static final String DRIVE = "DriveTrain"; public static final String RIO = ""; + public static final String DRIVE = "DriveTrain"; + + public static final String[] ALL = {RIO, DRIVE}; } /************************************************************************* */ @@ -317,7 +319,7 @@ public static final class DrivebaseConstants { public static final double kDriveD = 0.03; public static final double kDriveV = 0.83; public static final double kDriveA = 0.0; - public static final double kDriveS = 0.21; + public static final double kDriveS = 2.00; public static final double kDriveT = SwerveConstants.kDriveGearRatio / DCMotor.getKrakenX60Foc(1).KtNMPerAmp; public static final double kSteerP = 400.0; @@ -439,61 +441,55 @@ public static class VisionConstants { /************************************************************************* */ /** Vision Camera Posses ************************************************* */ - public static class Cameras { - // Camera names, must match names configured on coprocessor - public static String camera0Name = "camera_0"; - public static String camera1Name = "camera_1"; - // ... And more, if needed - - // Robot to camera transforms + public static final class Cameras { + public record CameraConfig( + String name, + Transform3d robotToCamera, + double stdDevFactor, + SimCameraProperties simProps) {} + + // Camera Configuration Records // (ONLY USED FOR PHOTONVISION -- Limelight: configure in web UI instead) // Example Cameras are mounted in the back corners, 18" up from the floor, facing sideways - public static Transform3d robotToCamera0 = - new Transform3d( - Inches.of(-13.0), - Inches.of(13.0), - Inches.of(12.0), - new Rotation3d(0.0, 0.0, Math.PI / 2)); - public static Transform3d robotToCamera1 = - new Transform3d( - Inches.of(-13.0), - Inches.of(-13.0), - Inches.of(12.0), - new Rotation3d(0.0, 0.0, -Math.PI / 2)); - - // Standard deviation multipliers for each camera - // (Adjust to trust some cameras more than others) - public static double[] cameraStdDevFactors = - new double[] { - 1.0, // Camera 0 - 1.0 // Camera 1 - }; - } - - /************************************************************************* */ - /** Simulation Camera Properties ***************************************** */ - public static class SimCameras { - public static final SimCameraProperties kSimCamera1Props = - new SimCameraProperties() { - { - setCalibration(1280, 800, Rotation2d.fromDegrees(120)); - setCalibError(0.25, 0.08); - setFPS(30); - setAvgLatencyMs(20); - setLatencyStdDevMs(5); - } - }; - - public static final SimCameraProperties kSimCamera2Props = - new SimCameraProperties() { - { - setCalibration(1280, 800, Rotation2d.fromDegrees(120)); - setCalibError(0.25, 0.08); - setFPS(30); - setAvgLatencyMs(20); - setLatencyStdDevMs(5); - } - }; + public static final CameraConfig[] ALL = { + new CameraConfig( + "camera_0", + new Transform3d( + Inches.of(-13.0), + Inches.of(13.0), + Inches.of(12.0), + new Rotation3d(0.0, 0.0, Math.PI / 2)), + 1.0, + new SimCameraProperties() { + { + setCalibration(1280, 800, Rotation2d.fromDegrees(120)); + setCalibError(0.25, 0.08); + setFPS(30); + setAvgLatencyMs(20); + setLatencyStdDevMs(5); + } + }), + // + new CameraConfig( + "camera_1", + new Transform3d( + Inches.of(-13.0), + Inches.of(-13.0), + Inches.of(12.0), + new Rotation3d(0.0, 0.0, -Math.PI / 2)), + 1.0, + new SimCameraProperties() { + { + setCalibration(1280, 800, Rotation2d.fromDegrees(120)); + setCalibError(0.25, 0.08); + setFPS(30); + setAvgLatencyMs(20); + setLatencyStdDevMs(5); + } + }), + + // ... And more, if needed + }; } /************************************************************************* */ diff --git a/src/main/java/frc/robot/Robot.java b/src/main/java/frc/robot/Robot.java index e0ef5c62..6d58c6d0 100644 --- a/src/main/java/frc/robot/Robot.java +++ b/src/main/java/frc/robot/Robot.java @@ -18,9 +18,6 @@ package frc.robot; import com.revrobotics.util.StatusLogger; -import edu.wpi.first.apriltag.AprilTagFieldLayout; -import edu.wpi.first.apriltag.AprilTagFields; -import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.Threads; import edu.wpi.first.wpilibj.Timer; @@ -38,7 +35,6 @@ import org.littletonrobotics.urcl.URCL; import org.photonvision.PhotonCamera; import org.photonvision.simulation.PhotonCameraSim; -import org.photonvision.simulation.SimCameraProperties; import org.photonvision.simulation.VisionSystemSim; /** @@ -285,31 +281,32 @@ public void testPeriodic() {} /** This function is called once when the robot is first started up. */ @Override public void simulationInit() { - visionSim = new VisionSystemSim("main"); - - // Load AprilTag field layout - AprilTagFieldLayout fieldLayout = AprilTagFieldLayout.loadField(AprilTagFields.kDefaultField); - - // Register AprilTags with vision simulation - visionSim.addAprilTags(fieldLayout); - - // Simulated Camera Properties - SimCameraProperties cameraProp = new SimCameraProperties(); - // A 1280 x 800 camera with a 100 degree diagonal FOV. - cameraProp.setCalibration(1280, 800, Rotation2d.fromDegrees(100)); - // Approximate detection noise with average and standard deviation error in pixels. - cameraProp.setCalibError(0.25, 0.08); - // Set the camera image capture framerate (Note: this is limited by robot loop rate). - cameraProp.setFPS(20); - // The average and standard deviation in milliseconds of image data latency. - cameraProp.setAvgLatencyMs(35); - cameraProp.setLatencyStdDevMs(5); - - // Define Cameras and add to simulation - PhotonCamera camera0 = new PhotonCamera("frontCam"); - PhotonCamera camera1 = new PhotonCamera("backCam"); - visionSim.addCamera(new PhotonCameraSim(camera0, cameraProp), Constants.Cameras.robotToCamera0); - visionSim.addCamera(new PhotonCameraSim(camera1, cameraProp), Constants.Cameras.robotToCamera1); + // ---------------- SIM-ONLY: vision simulation world + camera sims ---------------- + // 1) Create the vision simulation world + visionSim = new VisionSystemSim("CameraSweepWorld"); + + // 2) Add AprilTags (field layout) + visionSim.addAprilTags(FieldConstants.aprilTagLayout); + + // 3) Build PhotonCameraSim objects from Constants camera configs + final Constants.Cameras.CameraConfig[] camConfigs = Constants.Cameras.ALL; + + PhotonCameraSim[] simCams = new PhotonCameraSim[camConfigs.length]; + + for (int i = 0; i < camConfigs.length; i++) { + final var cfg = camConfigs[i]; + + // Name must match the VisionIOPhotonVisionSim name + PhotonCamera photonCam = new PhotonCamera(cfg.name()); + + // 2026 API: wrap camera with sim properties from Constants + PhotonCameraSim camSim = new PhotonCameraSim(photonCam, cfg.simProps()); + + // Register camera with the sim using the robot-to-camera transform + visionSim.addCamera(camSim, cfg.robotToCamera()); + + simCams[i] = camSim; + } } /** This function is called periodically whilst in simulation. */ diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index 10bcd058..c38f5781 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -13,8 +13,6 @@ package frc.robot; -import static frc.robot.Constants.Cameras.*; - import choreo.auto.AutoChooser; import choreo.auto.AutoFactory; import choreo.auto.AutoRoutine; @@ -23,7 +21,6 @@ import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Rotation2d; import edu.wpi.first.math.geometry.Transform2d; -import edu.wpi.first.math.geometry.Transform3d; import edu.wpi.first.math.kinematics.ChassisSpeeds; import edu.wpi.first.math.util.Units; import edu.wpi.first.wpilibj.Filesystem; @@ -37,8 +34,8 @@ import edu.wpi.first.wpilibj2.command.button.RobotModeTriggers; import edu.wpi.first.wpilibj2.command.sysid.SysIdRoutine; import frc.robot.Constants.CANBuses; +import frc.robot.Constants.Cameras; import frc.robot.Constants.OperatorConstants; -import frc.robot.Constants.SimCameras; import frc.robot.FieldConstants.AprilTagLayoutType; import frc.robot.commands.AutopilotCommands; import frc.robot.commands.DriveCommands; @@ -49,9 +46,6 @@ import frc.robot.subsystems.flywheel_example.FlywheelIO; import frc.robot.subsystems.flywheel_example.FlywheelIOSim; import frc.robot.subsystems.imu.Imu; -import frc.robot.subsystems.imu.ImuIO; -import frc.robot.subsystems.imu.ImuIONavX; -import frc.robot.subsystems.imu.ImuIOPigeon2; import frc.robot.subsystems.imu.ImuIOSim; import frc.robot.subsystems.vision.CameraSweepEvaluator; import frc.robot.subsystems.vision.Vision; @@ -70,6 +64,8 @@ import frc.robot.util.RBSIEnum.DriveStyle; import frc.robot.util.RBSIEnum.Mode; import frc.robot.util.RBSIPowerMonitor; +import java.util.Arrays; +import java.util.List; import java.util.Set; import org.littletonrobotics.junction.networktables.LoggedDashboardChooser; import org.photonvision.PhotonCamera; @@ -111,7 +107,7 @@ public class RobotContainer { private final Vision m_vision; @SuppressWarnings("unused") - private RBSICANHealth m_can0, m_can1; + private List canHealth; /** Dashboard inputs ***************************************************** */ // AutoChoosers for both supported path planning types @@ -132,6 +128,37 @@ public class RobotContainer { // Alerts private final Alert aprilTagLayoutAlert = new Alert("", AlertType.INFO); + // Vision Factories + private VisionIO[] buildVisionIOsReal(Drive drive) { + return switch (Constants.getVisionType()) { + case PHOTON -> + Arrays.stream(Cameras.ALL) + .map(c -> (VisionIO) new VisionIOPhotonVision(c.name(), c.robotToCamera())) + .toArray(VisionIO[]::new); + + case LIMELIGHT -> + Arrays.stream(Cameras.ALL) + .map(c -> (VisionIO) new VisionIOLimelight(c.name(), drive::getHeading)) + .toArray(VisionIO[]::new); + + case NONE -> new VisionIO[] {new VisionIO() {}}; + }; + } + + private static VisionIO[] buildVisionIOsSim(Drive drive) { + var cams = Constants.Cameras.ALL; + VisionIO[] ios = new VisionIO[cams.length]; + for (int i = 0; i < cams.length; i++) { + var cfg = cams[i]; + ios[i] = new VisionIOPhotonVisionSim(cfg.name(), cfg.robotToCamera(), drive::getPose); + } + return ios; + } + + private VisionIO[] buildVisionIOsReplay() { + return new VisionIO[] {new VisionIO() {}}; + } + /** * Constructor for the Robot Container. This container holds subsystems, opertator interface * devices, and commands. @@ -148,74 +175,54 @@ public RobotContainer() { // YAGSL drivebase, get config from deploy directory // Get the IMU instance - ImuIO imuIO = - switch (SwerveConstants.kImuType) { - case "pigeon2" -> new ImuIOPigeon2(); - case "navx", "navx_spi" -> new ImuIONavX(); - default -> throw new RuntimeException("Invalid IMU type"); - }; - m_imu = new Imu(imuIO); + m_imu = new Imu(SwerveConstants.kImu.factory.get()); m_drivebase = new Drive(m_imu); m_flywheel = new Flywheel(new FlywheelIOSim()); // new Flywheel(new FlywheelIOTalonFX()); - m_vision = - switch (Constants.getVisionType()) { - case PHOTON -> - new Vision( - m_drivebase::addVisionMeasurement, - new VisionIOPhotonVision(camera0Name, robotToCamera0), - new VisionIOPhotonVision(camera1Name, robotToCamera1)); - case LIMELIGHT -> - new Vision( - m_drivebase::addVisionMeasurement, - new VisionIOLimelight(camera0Name, m_drivebase::getHeading), - new VisionIOLimelight(camera1Name, m_drivebase::getHeading)); - case NONE -> - new Vision( - m_drivebase::addVisionMeasurement, new VisionIO() {}, new VisionIO() {}); - default -> null; - }; + m_vision = new Vision(m_drivebase::addVisionMeasurement, buildVisionIOsReal(m_drivebase)); m_accel = new Accelerometer(m_imu); sweep = null; - m_can0 = new RBSICANHealth(CANBuses.RIO); - m_can1 = new RBSICANHealth(CANBuses.DRIVE); - break; case SIM: - // Sim robot, instantiate physics sim IO implementations RBSICANBusRegistry.initSim(CANBuses.RIO, CANBuses.DRIVE); - m_imu = new Imu(new ImuIOSim() {}); + + m_imu = new Imu(new ImuIOSim()); m_drivebase = new Drive(m_imu); - m_flywheel = new Flywheel(new FlywheelIOSim() {}); - m_vision = - new Vision( - m_drivebase::addVisionMeasurement, - new VisionIOPhotonVisionSim(camera0Name, robotToCamera0, m_drivebase::getPose), - new VisionIOPhotonVisionSim(camera1Name, robotToCamera1, m_drivebase::getPose)); + m_flywheel = new Flywheel(new FlywheelIOSim()); + + // ---------------- Vision IOs (robot code) ---------------- + var cams = frc.robot.Constants.Cameras.ALL; + + // If you keep Vision expecting exactly two cameras: + VisionIO[] visionIOs = buildVisionIOsSim(m_drivebase); + m_vision = new Vision(m_drivebase::addVisionMeasurement, visionIOs); + m_accel = new Accelerometer(m_imu); - // CameraSweepEvaluator Construct - // 1) Create the vision simulation world + // ---------------- CameraSweepEvaluator (sim-only analysis) ---------------- VisionSystemSim visionSim = new VisionSystemSim("CameraSweepWorld"); - // 2) Add AprilTags (field layout) visionSim.addAprilTags(FieldConstants.aprilTagLayout); - // 3) Create two simulated cameras - // Create PhotonCamera objects (names must match VisionIOPhotonVisionSim) - PhotonCamera camera1 = new PhotonCamera(camera0Name); - PhotonCamera camera2 = new PhotonCamera(camera1Name); - - // Wrap them with simulation + properties (2026 API) - PhotonCameraSim cam1 = new PhotonCameraSim(camera1, SimCameras.kSimCamera1Props); - PhotonCameraSim cam2 = new PhotonCameraSim(camera2, SimCameras.kSimCamera2Props); - - // 4) Register cameras with the sim - visionSim.addCamera(cam1, Transform3d.kZero); - visionSim.addCamera(cam2, Transform3d.kZero); - // 5) Create the sweep evaluator - sweep = new CameraSweepEvaluator(visionSim, cam1, cam2); - m_can0 = new RBSICANHealth(CANBuses.RIO); - m_can1 = new RBSICANHealth(CANBuses.DRIVE); + + PhotonCameraSim[] simCams = new PhotonCameraSim[cams.length]; + + for (int i = 0; i < cams.length; i++) { + var cfg = cams[i]; + + PhotonCamera photonCam = new PhotonCamera(cfg.name()); + PhotonCameraSim camSim = new PhotonCameraSim(photonCam, cfg.simProps()); + + visionSim.addCamera(camSim, cfg.robotToCamera()); + simCams[i] = camSim; + } + + // Create the sweep evaluator (expects two cameras; adapt if you add more later) + if (simCams.length >= 2) { + sweep = new CameraSweepEvaluator(visionSim, simCams[0], simCams[1]); + } else { + sweep = null; // or throw if you require exactly 2 cameras + } + break; default: @@ -224,15 +231,16 @@ public RobotContainer() { m_imu = new Imu(new ImuIOSim() {}); m_drivebase = new Drive(m_imu); m_flywheel = new Flywheel(new FlywheelIO() {}); - m_vision = - new Vision(m_drivebase::addVisionMeasurement, new VisionIO() {}, new VisionIO() {}); + m_vision = new Vision(m_drivebase::addVisionMeasurement, buildVisionIOsReplay()); m_accel = new Accelerometer(m_imu); sweep = null; - m_can0 = new RBSICANHealth(CANBuses.RIO); - m_can1 = new RBSICANHealth(CANBuses.DRIVE); break; } + // Init all CAN busses specified in the `Constants.CANBuses` class + RBSICANBusRegistry.initReal(Constants.CANBuses.ALL); + canHealth = Arrays.stream(Constants.CANBuses.ALL).map(RBSICANHealth::new).toList(); + // In addition to the initial battery capacity from the Dashbaord, ``RBSIPowerMonitor`` takes // all the non-drivebase subsystems for which you wish to have power monitoring; DO NOT // include ``m_drivebase``, as that is automatically monitored. diff --git a/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java b/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java index 98f775fa..a9332cec 100644 --- a/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java +++ b/src/main/java/frc/robot/subsystems/drive/SwerveConstants.java @@ -18,6 +18,9 @@ import frc.robot.Constants; import frc.robot.Constants.CANBuses; import frc.robot.generated.TunerConstants; +import frc.robot.subsystems.imu.ImuIO; +import frc.robot.subsystems.imu.ImuIONavX; +import frc.robot.subsystems.imu.ImuIOPigeon2; import frc.robot.util.YagslConstants; /** @@ -270,6 +273,9 @@ public class SwerveConstants { } } + // Derived constant AFTER the static block + public static final ImuType kImu = ImuType.fromString(kImuType); + // Computed quantities public static final double kDriveBaseRadiusMeters = Math.max( @@ -297,4 +303,30 @@ public class SwerveConstants { // Turn PID configuration0 public static final double turnPIDMinInput = 0; // Radians public static final double turnPIDMaxInput = 2 * Math.PI; // Radians + + /** IMU Type enum */ + public enum ImuType { + PIGEON(new String[] {"pigeon", "pigeon2"}, ImuIOPigeon2::new), + NAVX(new String[] {"navx", "navx_spi"}, ImuIONavX::new); + + private final String[] keys; + public final java.util.function.Supplier factory; + + ImuType(String[] keys, java.util.function.Supplier factory) { + this.keys = keys; + this.factory = factory; + } + + public static ImuType fromString(String s) { + if (s == null) throw new IllegalArgumentException("IMU type string is null"); + String norm = s.trim().toLowerCase(); + + for (ImuType t : values()) { + for (String k : t.keys) { + if (norm.equals(k)) return t; + } + } + throw new IllegalArgumentException("Unknown IMU type: '" + s + "'"); + } + } } diff --git a/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java b/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java index 05a0eb19..3cdcbb9d 100644 --- a/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java +++ b/src/main/java/frc/robot/subsystems/vision/CameraSweepEvaluator.java @@ -17,15 +17,13 @@ package frc.robot.subsystems.vision; -import static frc.robot.Constants.Cameras.robotToCamera0; -import static frc.robot.Constants.Cameras.robotToCamera1; - import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Quaternion; import edu.wpi.first.math.geometry.Rotation3d; import edu.wpi.first.math.geometry.Transform3d; import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.util.Units; +import frc.robot.Constants; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; @@ -44,11 +42,27 @@ public class CameraSweepEvaluator { private final PhotonCameraSim camSim1; private final PhotonCameraSim camSim2; + /** Indexes into Constants.Cameras.ALL for each camera being swept. */ + private final int cam1Index; + + private final int cam2Index; + public CameraSweepEvaluator( VisionSystemSim visionSim, PhotonCameraSim camSim1, PhotonCameraSim camSim2) { + this(visionSim, camSim1, camSim2, 0, 1); + } + + public CameraSweepEvaluator( + VisionSystemSim visionSim, + PhotonCameraSim camSim1, + PhotonCameraSim camSim2, + int cam1Index, + int cam2Index) { this.visionSim = visionSim; this.camSim1 = camSim1; this.camSim2 = camSim2; + this.cam1Index = cam1Index; + this.cam2Index = cam2Index; } private static Quaternion quatFromAxisAngle(double ax, double ay, double az, double angleRad) { @@ -87,7 +101,7 @@ private static Quaternion quatMul(Quaternion a, Quaternion b) { /** * Compose base rotation with an "extra" camera yaw/pitch. * - *

Conventions: yawDeg = rotation about +Z (up) pitchDeg = rotation about +Y + *

Conventions: yawDeg = rotation about +Z (up), pitchDeg = rotation about +Y. * *

Order: extra = yaw ⊗ pitch (yaw first, then pitch, both in the camera/base frame) combined = * base ⊗ extra @@ -107,98 +121,101 @@ private static Rotation3d composeCameraExtra(Rotation3d base, double yawDeg, dou return new Rotation3d(qCombined); } - /** - * Run a full sweep of candidate camera placements. - * - * @param outputCsvPath Path to write the CSV results - */ + private Transform3d getMountTransformOrThrow(int idx) { + var all = Constants.Cameras.ALL; + if (idx < 0 || idx >= all.length) { + throw new IllegalArgumentException( + "CameraSweepEvaluator camera index out of range: " + + idx + + " (ALL.length=" + + all.length + + ")"); + } + return all[idx].robotToCamera(); + } + + /** Run a full sweep of candidate camera placements and write a CSV of results. */ public void runFullSweep(String outputCsvPath) throws IOException { // Example field bounds (meters) -- tune these for your field size double[] fieldX = { 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 6.0, 6.5, 7.0, 7.5, 8.0 }; double[] fieldY = {0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 6.0, 6.5, 7.0, 7.5}; - double[] robotZ = {0.0}; // Usually floor height + double[] robotZ = {0.0}; - // NEW: Sweep robot yaw at each location (degrees) + // Sweep robot yaw at each location (degrees) double[] robotYawDeg = {-180, -135, -90, -45, 0, 45, 90, 135}; // Camera “extra” rotation sweep (applied on top of the mount rotation) double[] camYawDeg = {-15, 0, 15}; double[] camPitchDeg = {-10, 0, 10}; - // Pull fixed mount transforms from Constants once (and log/write them) - final Transform3d rToC0 = robotToCamera0; - final Transform3d rToC1 = robotToCamera1; + // Pull fixed mount transforms from Constants (new regime) + final Transform3d rToC0 = getMountTransformOrThrow(cam1Index); + final Transform3d rToC1 = getMountTransformOrThrow(cam2Index); + // Extract mount transform translation + quaternion once for CSV (self-contained rows) final Translation3d c0T = rToC0.getTranslation(); final Translation3d c1T = rToC1.getTranslation(); final Quaternion c0Q = rToC0.getRotation().getQuaternion(); final Quaternion c1Q = rToC1.getRotation().getQuaternion(); + // Build the target list ONCE (huge savings vs doing it inside the deepest loop) + final Set simTargets = visionSim.getVisionTargets(); + final List targetList = new ArrayList<>(simTargets); + try (FileWriter writer = new FileWriter(outputCsvPath)) { - // Header includes robot yaw and BOTH robot->camera transforms (translation + quaternion) writer.write( "robotX,robotY,robotZ,robotYawDeg," + "cam1YawDeg,cam1PitchDeg,cam2YawDeg,cam2PitchDeg,score," - + "rToC0_tx,rToC0_ty,rToC0_tz,rToC0_qw,rToC0_qx,rToC0_qy,rToC0_qz," - + "rToC1_tx,rToC1_ty,rToC1_tz,rToC1_qw,rToC1_qx,rToC1_qy,rToC1_qz\n"); + + "mount0_idx,mount0_tx,mount0_ty,mount0_tz,mount0_qw,mount0_qx,mount0_qy,mount0_qz," + + "mount1_idx,mount1_tx,mount1_ty,mount1_tz,mount1_qw,mount1_qx,mount1_qy,mount1_qz\n"); - // Sweep robot over the field for (double rx : fieldX) { for (double ry : fieldY) { for (double rz : robotZ) { - - // NEW: Sweep robot yaw at each location for (double rYaw : robotYawDeg) { Pose3d robotPose = new Pose3d( new Translation3d(rx, ry, rz), new Rotation3d(0.0, 0.0, Units.degreesToRadians(rYaw))); - // Base camera poses from mount transforms (these rotate with robot yaw) + // Base camera poses from mount transforms (rotate with robot yaw) Pose3d cam1BasePose = robotPose.transformBy(rToC0); Pose3d cam2BasePose = robotPose.transformBy(rToC1); - // Sweep camera "extra" rotations for (double c1Yaw : camYawDeg) { for (double c1Pitch : camPitchDeg) { - Rotation3d cam1Rot = composeCameraExtra(cam1BasePose.getRotation(), c1Yaw, c1Pitch); Pose3d cam1Pose = new Pose3d(cam1BasePose.getTranslation(), cam1Rot); for (double c2Yaw : camYawDeg) { for (double c2Pitch : camPitchDeg) { - Rotation3d cam2Rot = composeCameraExtra(cam2BasePose.getRotation(), c2Yaw, c2Pitch); Pose3d cam2Pose = new Pose3d(cam2BasePose.getTranslation(), cam2Rot); - // Get all vision targets - Set simTargets = visionSim.getVisionTargets(); - List targetList = new ArrayList<>(simTargets); - - // Simulate camera processing + // Simulate camera processing (timestampSeconds can be 0 for this offline + // sweep) PhotonPipelineResult res1 = camSim1.process(0, cam1Pose, targetList); PhotonPipelineResult res2 = camSim2.process(0, cam2Pose, targetList); - // Score - double score = res1.getTargets().size() + res2.getTargets().size(); - if (res1.getTargets().size() >= 2) score += 2.0; - if (res2.getTargets().size() >= 2) score += 2.0; + // Score: count visible targets + bonus for multi-tag + int n1 = res1.getTargets().size(); + int n2 = res2.getTargets().size(); + double score = n1 + n2; + if (n1 >= 2) score += 2.0; + if (n2 >= 2) score += 2.0; - // Penalize ambiguity safely with loops for (var t : res1.getTargets()) score -= t.getPoseAmbiguity() * 2.0; for (var t : res2.getTargets()) score -= t.getPoseAmbiguity() * 2.0; - // Write CSV row (note: we repeat mount transforms each row for - // “self-contained” CSV) writer.write( String.format( "%.2f,%.2f,%.2f,%.1f,%.1f,%.1f,%.1f,%.1f,%.3f," - + "%.6f,%.6f,%.6f,%.16g,%.16g,%.16g,%.16g," - + "%.6f,%.6f,%.6f,%.16g,%.16g,%.16g,%.16g\n", + + "%d,%.6f,%.6f,%.6f,%.16g,%.16g,%.16g,%.16g," + + "%d,%.6f,%.6f,%.6f,%.16g,%.16g,%.16g,%.16g\n", rx, ry, rz, @@ -208,6 +225,7 @@ public void runFullSweep(String outputCsvPath) throws IOException { c2Yaw, c2Pitch, score, + cam1Index, c0T.getX(), c0T.getY(), c0T.getZ(), @@ -215,6 +233,7 @@ public void runFullSweep(String outputCsvPath) throws IOException { c0Q.getX(), c0Q.getY(), c0Q.getZ(), + cam2Index, c1T.getX(), c1T.getY(), c1T.getZ(), @@ -223,10 +242,9 @@ public void runFullSweep(String outputCsvPath) throws IOException { c1Q.getY(), c1Q.getZ())); + // Optional logging: keep it light (or decimate) so it doesn’t dominate + // runtime Logger.recordOutput("CameraSweep/Score", score); - Logger.recordOutput("CameraSweep/RobotPose", robotPose); - Logger.recordOutput("CameraSweep/Cam1Pose", cam1Pose); - Logger.recordOutput("CameraSweep/Cam2Pose", cam2Pose); } } } diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java index 263ed34b..04e20417 100644 --- a/src/main/java/frc/robot/subsystems/vision/Vision.java +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -11,7 +11,6 @@ package frc.robot.subsystems.vision; -import static frc.robot.Constants.Cameras.*; import static frc.robot.Constants.VisionConstants.*; import edu.wpi.first.math.Matrix; @@ -23,7 +22,7 @@ import edu.wpi.first.math.numbers.N3; import edu.wpi.first.wpilibj.Alert; import edu.wpi.first.wpilibj.Alert.AlertType; -import frc.robot.Constants.Cameras; +import frc.robot.Constants; import frc.robot.FieldConstants; import frc.robot.subsystems.vision.VisionIO.PoseObservationType; import frc.robot.util.VirtualSubsystem; @@ -36,10 +35,28 @@ public class Vision extends VirtualSubsystem { private final VisionIOInputsAutoLogged[] inputs; private final Alert[] disconnectedAlerts; + // Camera configs (names, transforms, stddev multipliers, sim props) + private final Constants.Cameras.CameraConfig[] camConfigs; + + // ---------------- Reusable scratch buffers (avoid per-loop allocations) ---------------- + // Summary buffers + private final ArrayList allTagPoses = new ArrayList<>(32); + private final ArrayList allRobotPoses = new ArrayList<>(64); + private final ArrayList allRobotPosesAccepted = new ArrayList<>(64); + private final ArrayList allRobotPosesRejected = new ArrayList<>(64); + + // Per-camera buffers (reused each camera) + private final ArrayList tagPoses = new ArrayList<>(16); + private final ArrayList robotPoses = new ArrayList<>(32); + private final ArrayList robotPosesAccepted = new ArrayList<>(32); + private final ArrayList robotPosesRejected = new ArrayList<>(32); + public Vision(VisionConsumer consumer, VisionIO... io) { this.consumer = consumer; this.io = io; + this.camConfigs = Constants.Cameras.ALL; + // Initialize inputs this.inputs = new VisionIOInputsAutoLogged[io.length]; for (int i = 0; i < inputs.length; i++) { @@ -48,53 +65,46 @@ public Vision(VisionConsumer consumer, VisionIO... io) { // Initialize disconnected alerts this.disconnectedAlerts = new Alert[io.length]; - for (int i = 0; i < inputs.length; i++) { + for (int i = 0; i < io.length; i++) { disconnectedAlerts[i] = - new Alert( - "Vision camera " + Integer.toString(i) + " is disconnected.", AlertType.kWarning); + new Alert("Vision camera " + i + " is disconnected.", AlertType.kWarning); } - // Log the robot-to-camera transformations - Logger.recordOutput("Vision/RobotToCamera0", Cameras.robotToCamera0); - Logger.recordOutput("Vision/RobotToCamera1", Cameras.robotToCamera1); + // Log robot-to-camera transforms from the new camera config array + // (Only log as many as exist in BOTH configs and IOs) + int n = Math.min(camConfigs.length, io.length); + for (int i = 0; i < n; i++) { + Logger.recordOutput("Vision/RobotToCamera" + i, camConfigs[i].robotToCamera()); + } } - /** - * Returns the X angle to the best target, which can be used for simple servoing with vision. - * - * @param cameraIndex The index of the camera to use. - */ + /** Returns the X angle to the best target, useful for simple servoing. */ public Rotation2d getTargetX(int cameraIndex) { return inputs[cameraIndex].latestTargetObservation.tx(); } @Override - public void rbsiPeriodic() {} - - public void somethingElse() { - - // Update inputs + process inputs first (cheap, and keeps AK logs consistent) + public void rbsiPeriodic() { + // 1) Update inputs + process inputs first (keeps AK logs consistent) for (int i = 0; i < io.length; i++) { io[i].updateInputs(inputs[i]); Logger.processInputs("Vision/Camera" + i, inputs[i]); disconnectedAlerts[i].set(!inputs[i].connected); } - // Reusable scratch buffers (ArrayList avoids LinkedList churn) - // Tune these capacities if you know typical sizes - final ArrayList allTagPoses = new ArrayList<>(32); - final ArrayList allRobotPoses = new ArrayList<>(64); - final ArrayList allRobotPosesAccepted = new ArrayList<>(64); - final ArrayList allRobotPosesRejected = new ArrayList<>(64); + // 2) Clear summary buffers (reused) + allTagPoses.clear(); + allRobotPoses.clear(); + allRobotPosesAccepted.clear(); + allRobotPosesRejected.clear(); - // Loop over cameras + // 3) Process each camera for (int cameraIndex = 0; cameraIndex < io.length; cameraIndex++) { - - // Per-camera scratch buffers - final ArrayList tagPoses = new ArrayList<>(16); - final ArrayList robotPoses = new ArrayList<>(32); - final ArrayList robotPosesAccepted = new ArrayList<>(32); - final ArrayList robotPosesRejected = new ArrayList<>(32); + // Clear per-camera buffers + tagPoses.clear(); + robotPoses.clear(); + robotPosesAccepted.clear(); + robotPosesRejected.clear(); // Add tag poses from ids for (int tagId : inputs[cameraIndex].tagIds) { @@ -106,21 +116,17 @@ public void somethingElse() { // Loop over pose observations for (var observation : inputs[cameraIndex].poseObservations) { - // Check whether to reject pose + // Reject rules boolean rejectPose = - observation.tagCount() == 0 // Must have at least one tag - || (observation.tagCount() == 1 - && observation.ambiguity() > maxAmbiguity) // Cannot be high ambiguity - || Math.abs(observation.pose().getZ()) - > maxZError // Must have realistic Z coordinate - - // Must be within the field boundaries + observation.tagCount() == 0 + || (observation.tagCount() == 1 && observation.ambiguity() > maxAmbiguity) + || Math.abs(observation.pose().getZ()) > maxZError || observation.pose().getX() < 0.0 || observation.pose().getX() > FieldConstants.aprilTagLayout.getFieldLength() || observation.pose().getY() < 0.0 || observation.pose().getY() > FieldConstants.aprilTagLayout.getFieldWidth(); - // Add pose to log + // Log pose buckets robotPoses.add(observation.pose()); if (rejectPose) { robotPosesRejected.add(observation.pose()); @@ -128,33 +134,35 @@ public void somethingElse() { robotPosesAccepted.add(observation.pose()); } - // Skip if rejected if (rejectPose) { continue; } - // Calculate standard deviations + // Standard deviations double stdDevFactor = Math.pow(observation.averageTagDistance(), 2.0) / observation.tagCount(); double linearStdDev = linearStdDevBaseline * stdDevFactor; double angularStdDev = angularStdDevBaseline * stdDevFactor; + if (observation.type() == PoseObservationType.MEGATAG_2) { linearStdDev *= linearStdDevMegatag2Factor; angularStdDev *= angularStdDevMegatag2Factor; } - if (cameraIndex < cameraStdDevFactors.length) { - linearStdDev *= cameraStdDevFactors[cameraIndex]; - angularStdDev *= cameraStdDevFactors[cameraIndex]; + + // Apply per-camera multiplier from CameraConfig + if (cameraIndex < camConfigs.length) { + double k = camConfigs[cameraIndex].stdDevFactor(); + linearStdDev *= k; + angularStdDev *= k; } - // Send vision observation consumer.accept( observation.pose().toPose2d(), observation.timestamp(), VecBuilder.fill(linearStdDev, linearStdDev, angularStdDev)); } - // Log camera data + // Per-camera logs (arrays allocate; acceptable if you’re OK with this in the log loop) Logger.recordOutput( "Vision/Camera" + cameraIndex + "/TagPoses", tagPoses.toArray(new Pose3d[0])); Logger.recordOutput( @@ -166,14 +174,14 @@ public void somethingElse() { "Vision/Camera" + cameraIndex + "/RobotPosesRejected", robotPosesRejected.toArray(new Pose3d[0])); - // Summary aggregation + // Aggregate summary allTagPoses.addAll(tagPoses); allRobotPoses.addAll(robotPoses); allRobotPosesAccepted.addAll(robotPosesAccepted); allRobotPosesRejected.addAll(robotPosesRejected); } - // Log summary data + // 4) Summary logs Logger.recordOutput("Vision/Summary/TagPoses", allTagPoses.toArray(new Pose3d[0])); Logger.recordOutput("Vision/Summary/RobotPoses", allRobotPoses.toArray(new Pose3d[0])); Logger.recordOutput( @@ -184,7 +192,7 @@ public void somethingElse() { @FunctionalInterface public static interface VisionConsumer { - public void accept( + void accept( Pose2d visionRobotPoseMeters, double timestampSeconds, Matrix visionMeasurementStdDevs); diff --git a/vendordeps/Phoenix5-replay-5.36.0.json b/vendordeps/Phoenix5-5.36.0.json similarity index 68% rename from vendordeps/Phoenix5-replay-5.36.0.json rename to vendordeps/Phoenix5-5.36.0.json index 7fbfcf5e..c60dd4cc 100644 --- a/vendordeps/Phoenix5-replay-5.36.0.json +++ b/vendordeps/Phoenix5-5.36.0.json @@ -1,31 +1,31 @@ { - "fileName": "Phoenix5-replay-5.36.0.json", + "fileName": "Phoenix5-5.36.0.json", "name": "CTRE-Phoenix (v5)", "version": "5.36.0", "frcYear": "2026", - "uuid": "fbc886a4-2cec-40c0-9835-71086a8cc3df", + "uuid": "ab676553-b602-441f-a38d-f1296eff6537", "mavenUrls": [ "https://maven.ctr-electronics.com/release/" ], - "jsonUrl": "https://maven.ctr-electronics.com/release/com/ctre/phoenix/Phoenix5-replay-frc2026-latest.json", + "jsonUrl": "https://maven.ctr-electronics.com/release/com/ctre/phoenix/Phoenix5-frc2026-latest.json", "requires": [ { - "uuid": "e7900d8d-826f-4dca-a1ff-182f658e98af", + "uuid": "e995de00-2c64-4df5-8831-c1441420ff19", "errorMessage": "Phoenix 5 requires low-level libraries from Phoenix 6. Please add the Phoenix 6 vendordep before adding Phoenix 5.", - "offlineFileName": "Phoenix6-replay-frc2026-latest.json", - "onlineUrl": "https://maven.ctr-electronics.com/release/com/ctre/phoenix6/latest/Phoenix6-replay-frc2026-latest.json" + "offlineFileName": "Phoenix6-frc2026-latest.json", + "onlineUrl": "https://maven.ctr-electronics.com/release/com/ctre/phoenix6/latest/Phoenix6-frc2026-latest.json" } ], "conflictsWith": [ { - "uuid": "e995de00-2c64-4df5-8831-c1441420ff19", - "errorMessage": "Users must use the regular Phoenix 5 vendordep when using the regular Phoenix 6 vendordep.", - "offlineFileName": "Phoenix6-frc2026-latest.json" + "uuid": "e7900d8d-826f-4dca-a1ff-182f658e98af", + "errorMessage": "Users must use the Phoenix 5 replay vendordep when using the Phoenix 6 replay vendordep.", + "offlineFileName": "Phoenix6-replay-frc2026-latest.json" }, { - "uuid": "ab676553-b602-441f-a38d-f1296eff6537", + "uuid": "fbc886a4-2cec-40c0-9835-71086a8cc3df", "errorMessage": "Users cannot have both the replay and regular Phoenix 5 vendordeps in their robot program.", - "offlineFileName": "Phoenix5-frc2026-latest.json" + "offlineFileName": "Phoenix5-replay-frc2026-latest.json" } ], "javaDependencies": [ @@ -47,22 +47,11 @@ "version": "5.36.0", "isJar": false, "skipInvalidPlatforms": true, - "validPlatforms": [ - "linuxathena" - ], - "simMode": "hwsim" - }, - { - "groupId": "com.ctre.phoenix.replay", - "artifactId": "cci-replay", - "version": "5.36.0", - "isJar": false, - "skipInvalidPlatforms": true, "validPlatforms": [ "windowsx86-64", "linuxx86-64", "linuxarm64", - "osxuniversal" + "linuxathena" ], "simMode": "hwsim" }, @@ -91,6 +80,9 @@ "sharedLibrary": true, "skipInvalidPlatforms": true, "binaryPlatforms": [ + "windowsx86-64", + "linuxx86-64", + "linuxarm64", "linuxathena" ], "simMode": "hwsim" @@ -104,6 +96,9 @@ "sharedLibrary": true, "skipInvalidPlatforms": true, "binaryPlatforms": [ + "windowsx86-64", + "linuxx86-64", + "linuxarm64", "linuxathena" ], "simMode": "hwsim" @@ -116,56 +111,11 @@ "headerClassifier": "headers", "sharedLibrary": true, "skipInvalidPlatforms": true, - "binaryPlatforms": [ - "linuxathena" - ], - "simMode": "hwsim" - }, - { - "groupId": "com.ctre.phoenix.replay", - "artifactId": "wpiapi-cpp-replay", - "version": "5.36.0", - "libName": "CTRE_Phoenix_WPIReplay", - "headerClassifier": "headers", - "sharedLibrary": true, - "skipInvalidPlatforms": true, "binaryPlatforms": [ "windowsx86-64", "linuxx86-64", "linuxarm64", - "osxuniversal" - ], - "simMode": "hwsim" - }, - { - "groupId": "com.ctre.phoenix.replay", - "artifactId": "api-cpp-replay", - "version": "5.36.0", - "libName": "CTRE_PhoenixReplay", - "headerClassifier": "headers", - "sharedLibrary": true, - "skipInvalidPlatforms": true, - "binaryPlatforms": [ - "windowsx86-64", - "linuxx86-64", - "linuxarm64", - "osxuniversal" - ], - "simMode": "hwsim" - }, - { - "groupId": "com.ctre.phoenix.replay", - "artifactId": "cci-replay", - "version": "5.36.0", - "libName": "CTRE_PhoenixCCIReplay", - "headerClassifier": "headers", - "sharedLibrary": true, - "skipInvalidPlatforms": true, - "binaryPlatforms": [ - "windowsx86-64", - "linuxx86-64", - "linuxarm64", - "osxuniversal" + "linuxathena" ], "simMode": "hwsim" }, diff --git a/vendordeps/Phoenix6-replay-frc2026-latest.json b/vendordeps/Phoenix6-26.1.1.json similarity index 87% rename from vendordeps/Phoenix6-replay-frc2026-latest.json rename to vendordeps/Phoenix6-26.1.1.json index 75532e9d..c0a1c19f 100644 --- a/vendordeps/Phoenix6-replay-frc2026-latest.json +++ b/vendordeps/Phoenix6-26.1.1.json @@ -1,18 +1,18 @@ { - "fileName": "Phoenix6-replay-frc2026-latest.json", - "name": "CTRE-Phoenix (v6) Replay", + "fileName": "Phoenix6-26.1.1.json", + "name": "CTRE-Phoenix (v6)", "version": "26.1.1", "frcYear": "2026", - "uuid": "e7900d8d-826f-4dca-a1ff-182f658e98af", + "uuid": "e995de00-2c64-4df5-8831-c1441420ff19", "mavenUrls": [ "https://maven.ctr-electronics.com/release/" ], - "jsonUrl": "https://maven.ctr-electronics.com/release/com/ctre/phoenix6/latest/Phoenix6-replay-frc2026-latest.json", + "jsonUrl": "https://maven.ctr-electronics.com/release/com/ctre/phoenix6/latest/Phoenix6-frc2026-latest.json", "conflictsWith": [ { - "uuid": "e995de00-2c64-4df5-8831-c1441420ff19", + "uuid": "e7900d8d-826f-4dca-a1ff-182f658e98af", "errorMessage": "Users can not have both the replay and regular Phoenix 6 vendordeps in their robot program.", - "offlineFileName": "Phoenix6-frc2026-latest.json" + "offlineFileName": "Phoenix6-replay-frc2026-latest.json" } ], "javaDependencies": [ @@ -29,39 +29,17 @@ "version": "26.1.1", "isJar": false, "skipInvalidPlatforms": true, - "validPlatforms": [ - "linuxathena" - ], - "simMode": "hwsim" - }, - { - "groupId": "com.ctre.phoenix6", - "artifactId": "tools", - "version": "26.1.1", - "isJar": false, - "skipInvalidPlatforms": true, - "validPlatforms": [ - "linuxathena" - ], - "simMode": "hwsim" - }, - { - "groupId": "com.ctre.phoenix6.replay", - "artifactId": "api-cpp-replay", - "version": "26.1.1", - "isJar": false, - "skipInvalidPlatforms": true, "validPlatforms": [ "windowsx86-64", "linuxx86-64", "linuxarm64", - "osxuniversal" + "linuxathena" ], "simMode": "hwsim" }, { - "groupId": "com.ctre.phoenix6.replay", - "artifactId": "tools-replay", + "groupId": "com.ctre.phoenix6", + "artifactId": "tools", "version": "26.1.1", "isJar": false, "skipInvalidPlatforms": true, @@ -69,7 +47,7 @@ "windowsx86-64", "linuxx86-64", "linuxarm64", - "osxuniversal" + "linuxathena" ], "simMode": "hwsim" }, @@ -252,6 +230,9 @@ "sharedLibrary": true, "skipInvalidPlatforms": true, "binaryPlatforms": [ + "windowsx86-64", + "linuxx86-64", + "linuxarm64", "linuxathena" ], "simMode": "hwsim" @@ -264,40 +245,11 @@ "headerClassifier": "headers", "sharedLibrary": true, "skipInvalidPlatforms": true, - "binaryPlatforms": [ - "linuxathena" - ], - "simMode": "hwsim" - }, - { - "groupId": "com.ctre.phoenix6.replay", - "artifactId": "wpiapi-cpp-replay", - "version": "26.1.1", - "libName": "CTRE_Phoenix6_WPIReplay", - "headerClassifier": "headers", - "sharedLibrary": true, - "skipInvalidPlatforms": true, "binaryPlatforms": [ "windowsx86-64", "linuxx86-64", "linuxarm64", - "osxuniversal" - ], - "simMode": "hwsim" - }, - { - "groupId": "com.ctre.phoenix6.replay", - "artifactId": "tools-replay", - "version": "26.1.1", - "libName": "CTRE_PhoenixTools_Replay", - "headerClassifier": "headers", - "sharedLibrary": true, - "skipInvalidPlatforms": true, - "binaryPlatforms": [ - "windowsx86-64", - "linuxx86-64", - "linuxarm64", - "osxuniversal" + "linuxathena" ], "simMode": "hwsim" }, diff --git a/vendordeps/photonlib.json b/vendordeps/photonlib.json index b0ac8fb4..6279e58e 100644 --- a/vendordeps/photonlib.json +++ b/vendordeps/photonlib.json @@ -1,7 +1,7 @@ { "fileName": "photonlib.json", "name": "photonlib", - "version": "v2026.1.1", + "version": "v2026.2.2", "uuid": "515fe07e-bfc6-11fa-b3de-0242ac130004", "frcYear": "2026", "mavenUrls": [ @@ -13,7 +13,7 @@ { "groupId": "org.photonvision", "artifactId": "photontargeting-cpp", - "version": "v2026.1.1", + "version": "v2026.2.2", "skipInvalidPlatforms": true, "isJar": false, "validPlatforms": [ @@ -28,7 +28,7 @@ { "groupId": "org.photonvision", "artifactId": "photonlib-cpp", - "version": "v2026.1.1", + "version": "v2026.2.2", "libName": "photonlib", "headerClassifier": "headers", "sharedLibrary": true, @@ -43,7 +43,7 @@ { "groupId": "org.photonvision", "artifactId": "photontargeting-cpp", - "version": "v2026.1.1", + "version": "v2026.2.2", "libName": "photontargeting", "headerClassifier": "headers", "sharedLibrary": true, @@ -60,12 +60,12 @@ { "groupId": "org.photonvision", "artifactId": "photonlib-java", - "version": "v2026.1.1" + "version": "v2026.2.2" }, { "groupId": "org.photonvision", "artifactId": "photontargeting-java", - "version": "v2026.1.1" + "version": "v2026.2.2" } ] } diff --git a/vendordeps/yagsl-2026.1.30.json b/vendordeps/yagsl-2026.2.3.1.json similarity index 64% rename from vendordeps/yagsl-2026.1.30.json rename to vendordeps/yagsl-2026.2.3.1.json index aaee5a3f..b783dcdb 100644 --- a/vendordeps/yagsl-2026.1.30.json +++ b/vendordeps/yagsl-2026.2.3.1.json @@ -1,7 +1,7 @@ { - "fileName": "yagsl-2026.1.30.json", + "fileName": "yagsl-2026.2.3.1.json", "name": "YAGSL", - "version": "2026.1.30", + "version": "2026.2.3.1", "frcYear": "2026", "uuid": "1ccce5a4-acd2-4d18-bca3-4b8047188400", "mavenUrls": [ @@ -13,7 +13,7 @@ { "groupId": "swervelib", "artifactId": "YAGSL-java", - "version": "2026.1.30" + "version": "2026.2.3.1" }, { "groupId": "org.dyn4j", @@ -34,18 +34,6 @@ "offlineFileName": "REVLib.json", "onlineUrl": "https://software-metadata.revrobotics.com/REVLib-2026.json" }, - { - "uuid": "e7900d8d-826f-4dca-a1ff-182f658e98af", - "errorMessage": "Phoenix6 Replay is required!", - "offlineFileName": "Phoenix6-replay-26.1.0.json", - "onlineUrl": "https://maven.ctr-electronics.com/release/com/ctre/phoenix6/latest/Phoenix6-replay-frc2026-latest.json" - }, - { - "uuid": "fbc886a4-2cec-40c0-9835-71086a8cc3df", - "errorMessage": "Phoenix5 Replay Compatibility is required!", - "offlineFileName": "Phoenix5-replay-5.36.0.json", - "onlineUrl": "https://maven.ctr-electronics.com/release/com/ctre/phoenix/Phoenix5-replay-frc2026-latest.json" - }, { "uuid": "60b2694b-9e6e-4026-81ee-6f167946f4b0", "errorMessage": "ThriftyLib is required!",