001// Copyright (c) FIRST and other WPILib contributors. 002// Open Source Software; you can modify and/or share it under the terms of 003// the WPILib BSD license file in the root directory of this project. 004 005package edu.wpi.first.math.estimator; 006 007import edu.wpi.first.math.MatBuilder; 008import edu.wpi.first.math.Matrix; 009import edu.wpi.first.math.Nat; 010import edu.wpi.first.math.StateSpaceUtil; 011import edu.wpi.first.math.VecBuilder; 012import edu.wpi.first.math.geometry.Pose2d; 013import edu.wpi.first.math.geometry.Rotation2d; 014import edu.wpi.first.math.kinematics.DifferentialDriveWheelSpeeds; 015import edu.wpi.first.math.numbers.N1; 016import edu.wpi.first.math.numbers.N3; 017import edu.wpi.first.math.numbers.N5; 018import edu.wpi.first.util.WPIUtilJNI; 019import java.util.function.BiConsumer; 020 021/** 022 * This class wraps an {@link edu.wpi.first.math.estimator.UnscentedKalmanFilter Unscented Kalman 023 * Filter} to fuse latency-compensated vision measurements with differential drive encoder 024 * measurements. It will correct for noisy vision measurements and encoder drift. It is intended to 025 * be an easy drop-in for {@link edu.wpi.first.math.kinematics.DifferentialDriveOdometry}; in fact, 026 * if you never call {@link DifferentialDrivePoseEstimator#addVisionMeasurement} and only call 027 * {@link DifferentialDrivePoseEstimator#update} then this will behave exactly the same as 028 * DifferentialDriveOdometry. 029 * 030 * <p>{@link DifferentialDrivePoseEstimator#update} should be called every robot loop (if your robot 031 * loops are faster than the default then you should change the {@link 032 * DifferentialDrivePoseEstimator#DifferentialDrivePoseEstimator(Rotation2d, Pose2d, Matrix, Matrix, 033 * Matrix, double) nominal delta time}.) {@link DifferentialDrivePoseEstimator#addVisionMeasurement} 034 * can be called as infrequently as you want; if you never call it then this class will behave 035 * exactly like regular encoder odometry. 036 * 037 * <p>The state-space system used internally has the following states (x), inputs (u), and outputs 038 * (y): 039 * 040 * <p><strong> x = [x, y, theta, dist_l, dist_r]ᵀ </strong> in the field coordinate system 041 * containing x position, y position, heading, left encoder distance, and right encoder distance. 042 * 043 * <p><strong> u = [v_l, v_r, dtheta]ᵀ </strong> containing left wheel velocity, right wheel 044 * velocity, and change in gyro heading. 045 * 046 * <p>NB: Using velocities make things considerably easier, because it means that teams don't have 047 * to worry about getting an accurate model. Basically, we suspect that it's easier for teams to get 048 * good encoder data than it is for them to perform system identification well enough to get a good 049 * model. 050 * 051 * <p><strong> y = [x, y, theta]ᵀ </strong> from vision containing x position, y position, and 052 * heading; or <strong>y = [dist_l, dist_r, theta] </strong> containing left encoder position, right 053 * encoder position, and gyro heading. 054 */ 055public class DifferentialDrivePoseEstimator { 056 final UnscentedKalmanFilter<N5, N3, N3> m_observer; // Package-private to allow for unit testing 057 private final BiConsumer<Matrix<N3, N1>, Matrix<N3, N1>> m_visionCorrect; 058 private final KalmanFilterLatencyCompensator<N5, N3, N3> m_latencyCompensator; 059 060 private final double m_nominalDt; // Seconds 061 private double m_prevTimeSeconds = -1.0; 062 063 private Rotation2d m_gyroOffset; 064 private Rotation2d m_previousAngle; 065 066 private Matrix<N3, N3> m_visionContR; 067 068 /** 069 * Constructs a DifferentialDrivePoseEstimator. 070 * 071 * @param gyroAngle The current gyro angle. 072 * @param initialPoseMeters The starting pose estimate. 073 * @param stateStdDevs Standard deviations of model states. Increase these numbers to trust your 074 * model's state estimates less. This matrix is in the form [x, y, theta, dist_l, dist_r]ᵀ, 075 * with units in meters and radians. 076 * @param localMeasurementStdDevs Standard deviations of the encoder and gyro measurements. 077 * Increase these numbers to trust sensor readings from encoders and gyros less. This matrix 078 * is in the form [dist_l, dist_r, theta]ᵀ, with units in meters and radians. 079 * @param visionMeasurementStdDevs Standard deviations of the vision measurements. Increase these 080 * numbers to trust global measurements from vision less. This matrix is in the form [x, y, 081 * theta]ᵀ, with units in meters and radians. 082 */ 083 public DifferentialDrivePoseEstimator( 084 Rotation2d gyroAngle, 085 Pose2d initialPoseMeters, 086 Matrix<N5, N1> stateStdDevs, 087 Matrix<N3, N1> localMeasurementStdDevs, 088 Matrix<N3, N1> visionMeasurementStdDevs) { 089 this( 090 gyroAngle, 091 initialPoseMeters, 092 stateStdDevs, 093 localMeasurementStdDevs, 094 visionMeasurementStdDevs, 095 0.02); 096 } 097 098 /** 099 * Constructs a DifferentialDrivePoseEstimator. 100 * 101 * @param gyroAngle The current gyro angle. 102 * @param initialPoseMeters The starting pose estimate. 103 * @param stateStdDevs Standard deviations of model states. Increase these numbers to trust your 104 * model's state estimates less. This matrix is in the form [x, y, theta, dist_l, dist_r]ᵀ, 105 * with units in meters and radians. 106 * @param localMeasurementStdDevs Standard deviations of the encoder and gyro measurements. 107 * Increase these numbers to trust sensor readings from encoders and gyros less. This matrix 108 * is in the form [dist_l, dist_r, theta]ᵀ, with units in meters and radians. 109 * @param visionMeasurementStdDevs Standard deviations of the vision measurements. Increase these 110 * numbers to trust global measurements from vision less. This matrix is in the form [x, y, 111 * theta]ᵀ, with units in meters and radians. 112 * @param nominalDtSeconds The time in seconds between each robot loop. 113 */ 114 @SuppressWarnings("ParameterName") 115 public DifferentialDrivePoseEstimator( 116 Rotation2d gyroAngle, 117 Pose2d initialPoseMeters, 118 Matrix<N5, N1> stateStdDevs, 119 Matrix<N3, N1> localMeasurementStdDevs, 120 Matrix<N3, N1> visionMeasurementStdDevs, 121 double nominalDtSeconds) { 122 m_nominalDt = nominalDtSeconds; 123 124 m_observer = 125 new UnscentedKalmanFilter<>( 126 Nat.N5(), 127 Nat.N3(), 128 this::f, 129 (x, u) -> VecBuilder.fill(x.get(3, 0), x.get(4, 0), x.get(2, 0)), 130 stateStdDevs, 131 localMeasurementStdDevs, 132 AngleStatistics.angleMean(2), 133 AngleStatistics.angleMean(2), 134 AngleStatistics.angleResidual(2), 135 AngleStatistics.angleResidual(2), 136 AngleStatistics.angleAdd(2), 137 m_nominalDt); 138 m_latencyCompensator = new KalmanFilterLatencyCompensator<>(); 139 140 // Initialize vision R 141 setVisionMeasurementStdDevs(visionMeasurementStdDevs); 142 143 m_visionCorrect = 144 (u, y) -> 145 m_observer.correct( 146 Nat.N3(), 147 u, 148 y, 149 (x, u1) -> new Matrix<>(x.getStorage().extractMatrix(0, 3, 0, 1)), 150 m_visionContR, 151 AngleStatistics.angleMean(2), 152 AngleStatistics.angleResidual(2), 153 AngleStatistics.angleResidual(2), 154 AngleStatistics.angleAdd(2)); 155 156 m_gyroOffset = initialPoseMeters.getRotation().minus(gyroAngle); 157 m_previousAngle = initialPoseMeters.getRotation(); 158 m_observer.setXhat(fillStateVector(initialPoseMeters, 0.0, 0.0)); 159 } 160 161 /** 162 * Sets the pose estimator's trust of global measurements. This might be used to change trust in 163 * vision measurements after the autonomous period, or to change trust as distance to a vision 164 * target increases. 165 * 166 * @param visionMeasurementStdDevs Standard deviations of the vision measurements. Increase these 167 * numbers to trust global measurements from vision less. This matrix is in the form [x, y, 168 * theta]ᵀ, with units in meters and radians. 169 */ 170 public void setVisionMeasurementStdDevs(Matrix<N3, N1> visionMeasurementStdDevs) { 171 m_visionContR = StateSpaceUtil.makeCovarianceMatrix(Nat.N3(), visionMeasurementStdDevs); 172 } 173 174 @SuppressWarnings({"ParameterName", "MethodName"}) 175 private Matrix<N5, N1> f(Matrix<N5, N1> x, Matrix<N3, N1> u) { 176 // Apply a rotation matrix. Note that we do *not* add x--Runge-Kutta does that for us. 177 var theta = x.get(2, 0); 178 var toFieldRotation = 179 new MatBuilder<>(Nat.N5(), Nat.N5()) 180 .fill( 181 Math.cos(theta), 182 -Math.sin(theta), 183 0, 184 0, 185 0, 186 Math.sin(theta), 187 Math.cos(theta), 188 0, 189 0, 190 0, 191 0, 192 0, 193 1, 194 0, 195 0, 196 0, 197 0, 198 0, 199 1, 200 0, 201 0, 202 0, 203 0, 204 0, 205 1); 206 return toFieldRotation.times( 207 VecBuilder.fill(u.get(0, 0), u.get(1, 0), u.get(2, 0), u.get(0, 0), u.get(1, 0))); 208 } 209 210 /** 211 * Resets the robot's position on the field. 212 * 213 * <p>You NEED to reset your encoders (to zero) when calling this method. 214 * 215 * <p>The gyroscope angle does not need to be reset here on the user's robot code. The library 216 * automatically takes care of offsetting the gyro angle. 217 * 218 * @param poseMeters The position on the field that your robot is at. 219 * @param gyroAngle The angle reported by the gyroscope. 220 */ 221 public void resetPosition(Pose2d poseMeters, Rotation2d gyroAngle) { 222 // Reset state estimate and error covariance 223 m_observer.reset(); 224 m_latencyCompensator.reset(); 225 226 m_observer.setXhat(fillStateVector(poseMeters, 0.0, 0.0)); 227 228 m_gyroOffset = getEstimatedPosition().getRotation().minus(gyroAngle); 229 m_previousAngle = poseMeters.getRotation(); 230 } 231 232 /** 233 * Gets the pose of the robot at the current time as estimated by the Unscented Kalman Filter. 234 * 235 * @return The estimated robot pose in meters. 236 */ 237 public Pose2d getEstimatedPosition() { 238 return new Pose2d( 239 m_observer.getXhat(0), m_observer.getXhat(1), new Rotation2d(m_observer.getXhat(2))); 240 } 241 242 /** 243 * Add a vision measurement to the Unscented Kalman Filter. This will correct the odometry pose 244 * estimate while still accounting for measurement noise. 245 * 246 * <p>This method can be called as infrequently as you want, as long as you are calling {@link 247 * DifferentialDrivePoseEstimator#update} every loop. 248 * 249 * @param visionRobotPoseMeters The pose of the robot as measured by the vision camera. 250 * @param timestampSeconds The timestamp of the vision measurement in seconds. Note that if you 251 * don't use your own time source by calling {@link 252 * DifferentialDrivePoseEstimator#updateWithTime} then you must use a timestamp with an epoch 253 * since FPGA startup (i.e. the epoch of this timestamp is the same epoch as 254 * Timer.getFPGATimestamp.) This means that you should use Timer.getFPGATimestamp as your time 255 * source in this case. 256 */ 257 public void addVisionMeasurement(Pose2d visionRobotPoseMeters, double timestampSeconds) { 258 m_latencyCompensator.applyPastGlobalMeasurement( 259 Nat.N3(), 260 m_observer, 261 m_nominalDt, 262 StateSpaceUtil.poseTo3dVector(visionRobotPoseMeters), 263 m_visionCorrect, 264 timestampSeconds); 265 } 266 267 /** 268 * Add a vision measurement to the Unscented Kalman Filter. This will correct the odometry pose 269 * estimate while still accounting for measurement noise. 270 * 271 * <p>This method can be called as infrequently as you want, as long as you are calling {@link 272 * DifferentialDrivePoseEstimator#update} every loop. 273 * 274 * <p>Note that the vision measurement standard deviations passed into this method will continue 275 * to apply to future measurements until a subsequent call to {@link 276 * DifferentialDrivePoseEstimator#setVisionMeasurementStdDevs(Matrix)} or this method. 277 * 278 * @param visionRobotPoseMeters The pose of the robot as measured by the vision camera. 279 * @param timestampSeconds The timestamp of the vision measurement in seconds. Note that if you 280 * don't use your own time source by calling {@link 281 * DifferentialDrivePoseEstimator#updateWithTime} then you must use a timestamp with an epoch 282 * since FPGA startup (i.e. the epoch of this timestamp is the same epoch as 283 * Timer.getFPGATimestamp.) This means that you should use Timer.getFPGATimestamp as your time 284 * source in this case. 285 * @param visionMeasurementStdDevs Standard deviations of the vision measurements. Increase these 286 * numbers to trust global measurements from vision less. This matrix is in the form [x, y, 287 * theta]ᵀ, with units in meters and radians. 288 */ 289 public void addVisionMeasurement( 290 Pose2d visionRobotPoseMeters, 291 double timestampSeconds, 292 Matrix<N3, N1> visionMeasurementStdDevs) { 293 setVisionMeasurementStdDevs(visionMeasurementStdDevs); 294 addVisionMeasurement(visionRobotPoseMeters, timestampSeconds); 295 } 296 297 /** 298 * Updates the the Unscented Kalman Filter using only wheel encoder information. Note that this 299 * should be called every loop. 300 * 301 * @param gyroAngle The current gyro angle. 302 * @param wheelVelocitiesMetersPerSecond The velocities of the wheels in meters per second. 303 * @param distanceLeftMeters The total distance travelled by the left wheel in meters since the 304 * last time you called {@link DifferentialDrivePoseEstimator#resetPosition}. 305 * @param distanceRightMeters The total distance travelled by the right wheel in meters since the 306 * last time you called {@link DifferentialDrivePoseEstimator#resetPosition}. 307 * @return The estimated pose of the robot in meters. 308 */ 309 public Pose2d update( 310 Rotation2d gyroAngle, 311 DifferentialDriveWheelSpeeds wheelVelocitiesMetersPerSecond, 312 double distanceLeftMeters, 313 double distanceRightMeters) { 314 return updateWithTime( 315 WPIUtilJNI.now() * 1.0e-6, 316 gyroAngle, 317 wheelVelocitiesMetersPerSecond, 318 distanceLeftMeters, 319 distanceRightMeters); 320 } 321 322 /** 323 * Updates the the Unscented Kalman Filter using only wheel encoder information. Note that this 324 * should be called every loop. 325 * 326 * @param currentTimeSeconds Time at which this method was called, in seconds. 327 * @param gyroAngle The current gyro angle. 328 * @param wheelVelocitiesMetersPerSecond The velocities of the wheels in meters per second. 329 * @param distanceLeftMeters The total distance travelled by the left wheel in meters since the 330 * last time you called {@link DifferentialDrivePoseEstimator#resetPosition}. 331 * @param distanceRightMeters The total distance travelled by the right wheel in meters since the 332 * last time you called {@link DifferentialDrivePoseEstimator#resetPosition}. 333 * @return The estimated pose of the robot in meters. 334 */ 335 @SuppressWarnings({"LocalVariableName", "ParameterName"}) 336 public Pose2d updateWithTime( 337 double currentTimeSeconds, 338 Rotation2d gyroAngle, 339 DifferentialDriveWheelSpeeds wheelVelocitiesMetersPerSecond, 340 double distanceLeftMeters, 341 double distanceRightMeters) { 342 double dt = m_prevTimeSeconds >= 0 ? currentTimeSeconds - m_prevTimeSeconds : m_nominalDt; 343 m_prevTimeSeconds = currentTimeSeconds; 344 345 var angle = gyroAngle.plus(m_gyroOffset); 346 // Diff drive forward kinematics: 347 // v_c = (v_l + v_r) / 2 348 var wheelVels = wheelVelocitiesMetersPerSecond; 349 var u = 350 VecBuilder.fill( 351 (wheelVels.leftMetersPerSecond + wheelVels.rightMetersPerSecond) / 2, 352 0, 353 angle.minus(m_previousAngle).getRadians() / dt); 354 m_previousAngle = angle; 355 356 var localY = VecBuilder.fill(distanceLeftMeters, distanceRightMeters, angle.getRadians()); 357 m_latencyCompensator.addObserverState(m_observer, u, localY, currentTimeSeconds); 358 m_observer.predict(u, dt); 359 m_observer.correct(u, localY); 360 361 return getEstimatedPosition(); 362 } 363 364 private static Matrix<N5, N1> fillStateVector(Pose2d pose, double leftDist, double rightDist) { 365 return VecBuilder.fill( 366 pose.getTranslation().getX(), 367 pose.getTranslation().getY(), 368 pose.getRotation().getRadians(), 369 leftDist, 370 rightDist); 371 } 372}