22
33import edu .wpi .first .apriltag .AprilTagFieldLayout ;
44import edu .wpi .first .apriltag .AprilTagFields ;
5+ import edu .wpi .first .math .Matrix ;
6+ import edu .wpi .first .math .VecBuilder ;
57import edu .wpi .first .math .geometry .Pose2d ;
68import edu .wpi .first .math .geometry .Pose3d ;
79import edu .wpi .first .math .geometry .Rotation2d ;
1012import edu .wpi .first .math .geometry .Transform3d ;
1113import edu .wpi .first .math .geometry .Translation2d ;
1214import edu .wpi .first .math .geometry .Translation3d ;
15+ import edu .wpi .first .math .numbers .N1 ;
16+ import edu .wpi .first .math .numbers .N3 ;
1317import edu .wpi .first .math .util .Units ;
1418import edu .wpi .first .wpilibj .smartdashboard .Field2d ;
1519import frc .robot .Robot ;
@@ -60,23 +64,26 @@ enum Cameras
6064 new Rotation3d (0 , Math .toRadians (-24.094 ), Math .toRadians (30 )),
6165 new Translation3d (Units .inchesToMeters (12.056 ),
6266 Units .inchesToMeters (10.981 ),
63- Units .inchesToMeters (8.44 ))),
67+ Units .inchesToMeters (8.44 )),
68+ VecBuilder .fill (4 , 4 , 8 ), VecBuilder .fill (0.5 ,0.5 ,1 )),
6469 /**
6570 * Right Camera
6671 */
6772 RIGHT_CAM ("right" ,
6873 new Rotation3d (0 , Math .toRadians (-24.094 ), Math .toRadians (-30 )),
6974 new Translation3d (Units .inchesToMeters (12.056 ),
7075 Units .inchesToMeters (-10.981 ),
71- Units .inchesToMeters (8.44 ))),
76+ Units .inchesToMeters (8.44 )),
77+ VecBuilder .fill (4 , 4 , 8 ), VecBuilder .fill (0.5 ,0.5 ,1 )),
7278 /**
7379 * Center Camera
7480 */
7581 CENTER_CAM ("center" ,
7682 new Rotation3d (0 , Units .degreesToRadians (18 ), 0 ),
7783 new Translation3d (Units .inchesToMeters (-4.628 ),
7884 Units .inchesToMeters (-10.687 ),
79- Units .inchesToMeters (16.129 )));
85+ Units .inchesToMeters (16.129 )),
86+ VecBuilder .fill (4 , 4 , 8 ), VecBuilder .fill (0.5 ,0.5 ,1 ));
8087
8188 /**
8289 * Transform of the camera rotation and translation relative to the center of the robot
@@ -101,15 +108,22 @@ enum Cameras
101108 */
102109 public final PhotonPoseEstimator poseEstimator ;
103110
111+ public final Matrix <N3 , N1 > singleTagStdDevs ;
112+
113+ public final Matrix <N3 , N1 > multiTagStdDevs ;
104114
105115 /**
106- * Construct a Photon Camera class with help.
116+ * Construct a Photon Camera class with help. Standard deviations are fake values,
117+ * experiment and determine estimation noise on an actual robot.
107118 *
108119 * @param name Name of the PhotonVision camera found in the PV UI.
109120 * @param robotToCamRotation {@link Rotation3d} of the camera.
110121 * @param robotToCamTranslation {@link Translation3d} relative to the center of the robot.
122+ * @param singleTagStdDevs Single AprilTag standard deviations of estimated poses from the camera.
123+ * @param multiTagStdDevs Multi AprilTag standard deviations of estimated poses from the camera.
111124 */
112- Cameras (String name , Rotation3d robotToCamRotation , Translation3d robotToCamTranslation )
125+ Cameras (String name , Rotation3d robotToCamRotation , Translation3d robotToCamTranslation ,
126+ Matrix <N3 , N1 > singleTagStdDevs , Matrix <N3 , N1 > multiTagStdDevsMatrix )
113127 {
114128 latencyAlert = new Alert ("'" + name + "' Camera is experiencing high latency." , AlertType .WARNING );
115129
@@ -123,6 +137,9 @@ enum Cameras
123137 robotToCamTransform );
124138 poseEstimator .setMultiTagFallbackStrategy (PoseStrategy .LOWEST_AMBIGUITY );
125139
140+ this .singleTagStdDevs = singleTagStdDevs ;
141+ this .multiTagStdDevs = multiTagStdDevsMatrix ;
142+
126143 if (Robot .isSimulation ())
127144 {
128145 SimCameraProperties cameraProp = new SimCameraProperties ();
@@ -211,42 +228,71 @@ public Vision(Supplier<Pose2d> currentPose, Field2d field)
211228 */
212229 public void updatePoseEstimation (SwerveDrive swerveDrive )
213230 {
214- ArrayList <EstimatedRobotPose > estimatedRobotPoses = getEstimatedGlobalPose ();
215231 if (Robot .isReal ()) {
216- for (EstimatedRobotPose i : estimatedRobotPoses )
232+ for (Cameras camera : Cameras . values () )
217233 {
218- swerveDrive .addVisionMeasurement (i .estimatedPose .toPose2d (), i .timestampSeconds );
234+ Optional <EstimatedRobotPose > poseEst = getEstimatedGlobalPose (camera );
235+ if (poseEst .isPresent ()) {
236+ var pose = poseEst .get ();
237+ swerveDrive .addVisionMeasurement (pose .estimatedPose .toPose2d (), pose .timestampSeconds , getEstimationStdDevs (camera ));
238+ }
219239 }
220240 } else visionSim .update (swerveDrive .getPose ());
221241 }
222242
223243 /**
224- * generates the estimated robot pose. Returns empty if:
244+ * Generates the estimated robot pose. Returns empty if:
225245 * <ul>
226246 * <li> No Pose Estimates could be generated</li>
227247 * <li> The generated pose estimate was considered not accurate</li>
228248 * </ul>
229249 *
230250 * @return an {@link EstimatedRobotPose} with an estimated pose, timestamp, and targets used to create the estimate
231251 */
232- public ArrayList <EstimatedRobotPose > getEstimatedGlobalPose ()
252+ public Optional <EstimatedRobotPose > getEstimatedGlobalPose (Cameras camera )
233253 {
234- ArrayList <EstimatedRobotPose > poses = new ArrayList <>();
235-
236- for (Cameras c : Cameras .values ())
254+ // Alternative method if you want to use both a pose filter and standard deviations based on distance + tags seen.
255+ // Optional<EstimatedRobotPose> poseEst = filterPose(camera.poseEstimator.update());
256+ Optional <EstimatedRobotPose > poseEst = camera .poseEstimator .update ();
257+ if (poseEst .isPresent ())
237258 {
238- Optional <EstimatedRobotPose > poseEst = filterPose (c .poseEstimator .update ());
239-
240- if (poseEst .isPresent ())
241- {
242- poses .add (poseEst .get ());
243- field2d .getObject (c + " est pose" ).setPose (poseEst .get ().estimatedPose .toPose2d ());
244- }
259+ field2d .getObject (camera + " est pose" ).setPose (poseEst .get ().estimatedPose .toPose2d ());
245260 }
246-
247- return poses ;
261+ return poseEst ;
248262 }
249263
264+ /**
265+ * The standard deviations of the estimated pose from {@link #getEstimatedGlobalPose()}, for use
266+ * with {@link edu.wpi.first.math.estimator.SwerveDrivePoseEstimator SwerveDrivePoseEstimator}.
267+ * This should only be used when there are targets visible.
268+ * @param camera Desired camera to get the standard deviation of the estimated pose.
269+ */
270+ public Matrix <N3 , N1 > getEstimationStdDevs (Cameras camera ) {
271+ var poseEst = getEstimatedGlobalPose (camera );
272+ var estStdDevs = camera .singleTagStdDevs ;
273+ var targets = getLatestResult (camera ).getTargets ();
274+ int numTags = 0 ;
275+ double avgDist = 0 ;
276+ for (var tgt : targets ) {
277+ var tagPose = camera .poseEstimator .getFieldTags ().getTagPose (tgt .getFiducialId ());
278+ if (tagPose .isEmpty ()) continue ;
279+ numTags ++;
280+ if (poseEst .isPresent ()) {
281+ avgDist += PhotonUtils .getDistanceToPose (poseEst .get ().estimatedPose .toPose2d (), tagPose .get ().toPose2d ());
282+ }
283+ }
284+ if (numTags == 0 ) return estStdDevs ;
285+ avgDist /= numTags ;
286+ // Decrease std devs if multiple targets are visible
287+ if (numTags > 1 ) estStdDevs = camera .multiTagStdDevs ;
288+ // Increase std devs based on (average) distance
289+ if (numTags == 1 && avgDist > 4 )
290+ estStdDevs = VecBuilder .fill (Double .MAX_VALUE , Double .MAX_VALUE , Double .MAX_VALUE );
291+ else estStdDevs = estStdDevs .times (1 + (avgDist * avgDist / 30 ));
292+
293+ return estStdDevs ;
294+ }
295+
250296 /**
251297 * Filter pose via the ambiguity and find best estimate between all of the camera's throwing out distances more than
252298 * 10m for a short amount of time.
0 commit comments