forked from Team254/FRC-2019-Public
-
Notifications
You must be signed in to change notification settings - Fork 0
/
RobotState.java
330 lines (271 loc) · 13.6 KB
/
RobotState.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
package com.team254.frc2019;
import com.team254.frc2019.statemachines.SuperstructureCommands;
import com.team254.frc2019.subsystems.EndEffector;
import com.team254.frc2019.subsystems.Limelight;
import com.team254.lib.geometry.Pose2d;
import com.team254.lib.geometry.Rotation2d;
import com.team254.lib.geometry.Translation2d;
import com.team254.lib.geometry.Twist2d;
import com.team254.lib.util.InterpolatingDouble;
import com.team254.lib.util.InterpolatingTreeMap;
import com.team254.lib.util.MovingAverageTwist2d;
import com.team254.lib.vision.AimingParameters;
import com.team254.lib.vision.GoalTracker;
import com.team254.lib.vision.GoalTracker.TrackReportComparator;
import com.team254.lib.vision.TargetInfo;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import java.util.*;
public class RobotState {
private static RobotState mInstance;
public static RobotState getInstance() {
if (mInstance == null) {
mInstance = new RobotState();
}
return mInstance;
}
private static final int kObservationBufferSize = 100;
/*
* RobotState keeps track of the poses of various coordinate frames throughout
* the match. A coordinate frame is simply a point and direction in space that
* defines an (x,y) coordinate system. Transforms (or poses) keep track of the
* spatial relationship between different frames.
*
* Robot frames of interest (from parent to child):
*
* 1. Field frame: origin is where the robot is turned on.
*
* 2. Vehicle frame: origin is the center of the robot wheelbase, facing
* forwards
*
* 3. Turret frame: origin is the center of the turret, which is coincident with
* the origin of the vehicle frame but with potentially different angle.
*
* 4. Camera frame: origin is the center of the Limelight imager relative to the
* turret.
*
* 5. Goal frame: origin is the center of the vision target, facing outwards
* along the normal. Also note that there can be multiple goal frames.
*
* As a kinematic chain with 5 frames, there are 4 transforms of interest:
*
* 1. Field-to-vehicle: This is tracked over time by integrating encoder and
* gyro measurements. It will inevitably drift, but is usually accurate over
* short time periods.
*
* 2. Vehicle-to-turret: Measured by the turret encoder. This is a pure
* rotation.
*
* 3. Turret-to-camera: This is a constant (per camera).
*
* 4. Camera-to-goal: Measured by the vision system.
*/
// FPGATimestamp -> Pose2d or Rotation2d
private InterpolatingTreeMap<InterpolatingDouble, Pose2d> field_to_vehicle_;
private InterpolatingTreeMap<InterpolatingDouble, Rotation2d> vehicle_to_turret_;
private Twist2d vehicle_velocity_predicted_;
private Twist2d vehicle_velocity_measured_;
private MovingAverageTwist2d vehicle_velocity_measured_filtered_;
private double distance_driven_;
private GoalTracker vision_target_low_ = new GoalTracker();
private GoalTracker vision_target_high_ = new GoalTracker();
List<Translation2d> mCameraToVisionTargetPosesLow = new ArrayList<>();
List<Translation2d> mCameraToVisionTargetPosesHigh = new ArrayList<>();
private RobotState() {
reset(0.0, Pose2d.identity(), Rotation2d.identity());
}
/**
* Resets the field to robot transform (robot's position on the field)
*/
public synchronized void reset(double start_time, Pose2d initial_field_to_vehicle,
Rotation2d initial_vehicle_to_turret) {
reset(start_time, initial_field_to_vehicle);
vehicle_to_turret_ = new InterpolatingTreeMap<>(kObservationBufferSize);
vehicle_to_turret_.put(new InterpolatingDouble(start_time), initial_vehicle_to_turret);
}
public synchronized void reset(double start_time, Pose2d initial_field_to_vehicle) {
field_to_vehicle_ = new InterpolatingTreeMap<>(kObservationBufferSize);
field_to_vehicle_.put(new InterpolatingDouble(start_time), initial_field_to_vehicle);
vehicle_velocity_predicted_ = Twist2d.identity();
vehicle_velocity_measured_ = Twist2d.identity();
vehicle_velocity_measured_filtered_ = new MovingAverageTwist2d(25);
distance_driven_ = 0.0;
}
public synchronized void reset() {
reset(Timer.getFPGATimestamp(), Pose2d.identity(), Rotation2d.identity());
}
/**
* Returns the robot's position on the field at a certain time. Linearly interpolates between stored robot positions
* to fill in the gaps.
*/
public synchronized Pose2d getFieldToVehicle(double timestamp) {
return field_to_vehicle_.getInterpolated(new InterpolatingDouble(timestamp));
}
public synchronized Rotation2d getVehicleToTurret(double timestamp) {
return vehicle_to_turret_.getInterpolated(new InterpolatingDouble(timestamp));
}
public synchronized Pose2d getFieldToTurret(double timestamp) {
return getFieldToVehicle(timestamp).transformBy(Pose2d.fromRotation(getVehicleToTurret(timestamp)));
}
public synchronized Map.Entry<InterpolatingDouble, Pose2d> getLatestFieldToVehicle() {
return field_to_vehicle_.lastEntry();
}
public synchronized Map.Entry<InterpolatingDouble, Rotation2d> getLatestVehicleToTurret() {
return vehicle_to_turret_.lastEntry();
}
public synchronized Pose2d getPredictedFieldToVehicle(double lookahead_time) {
return getLatestFieldToVehicle().getValue()
.transformBy(Pose2d.exp(vehicle_velocity_predicted_.scaled(lookahead_time)));
}
public synchronized void addFieldToVehicleObservation(double timestamp, Pose2d observation) {
field_to_vehicle_.put(new InterpolatingDouble(timestamp), observation);
}
public synchronized void addVehicleToTurretObservation(double timestamp, Rotation2d observation) {
vehicle_to_turret_.put(new InterpolatingDouble(timestamp), observation);
}
public synchronized void addObservations(double timestamp, Twist2d displacement, Twist2d measured_velocity,
Twist2d predicted_velocity) {
distance_driven_ += displacement.dx;
addFieldToVehicleObservation(timestamp,
Kinematics.integrateForwardKinematics(getLatestFieldToVehicle().getValue(), displacement));
vehicle_velocity_measured_ = measured_velocity;
if (Math.abs(vehicle_velocity_measured_.dtheta) < 2.0 * Math.PI) {
// Reject really high angular velocities from the filter.
vehicle_velocity_measured_filtered_.add(vehicle_velocity_measured_);
} else {
vehicle_velocity_measured_filtered_.add(new Twist2d(vehicle_velocity_measured_.dx, vehicle_velocity_measured_.dy, 0.0));
}
vehicle_velocity_predicted_ = predicted_velocity;
}
public synchronized double getDistanceDriven() {
return distance_driven_;
}
public synchronized void resetDistanceDriven() {
distance_driven_ = 0.0;
}
public synchronized Twist2d getPredictedVelocity() {
return vehicle_velocity_predicted_;
}
public synchronized Twist2d getMeasuredVelocity() {
return vehicle_velocity_measured_;
}
public synchronized Twist2d getSmoothedVelocity() {
return vehicle_velocity_measured_filtered_.getAverage();
}
public synchronized void resetVision() {
vision_target_low_.reset();
vision_target_high_.reset();
}
private Translation2d getCameraToVisionTargetPose(TargetInfo target, boolean high, Limelight source) {
// Compensate for camera pitch
Translation2d xz_plane_translation = new Translation2d(target.getX(), target.getZ()).rotateBy(source.getHorizontalPlaneToLens());
double x = xz_plane_translation.x();
double y = target.getY();
double z = xz_plane_translation.y();
// find intersection with the goal
double differential_height = source.getLensHeight() - (high ? Constants.kPortTargetHeight : Constants.kHatchTargetHeight);
if ((z < 0.0) == (differential_height > 0.0)) {
double scaling = differential_height / -z;
double distance = Math.hypot(x, y) * scaling;
Rotation2d angle = new Rotation2d(x, y, true);
return new Translation2d(distance * angle.cos(), distance * angle.sin());
}
return null;
}
private void updatePortGoalTracker(double timestamp, List<Translation2d> cameraToVisionTargetPoses, GoalTracker tracker, Limelight source) {
if (cameraToVisionTargetPoses.size() != 2 ||
cameraToVisionTargetPoses.get(0) == null ||
cameraToVisionTargetPoses.get(1) == null) return;
Pose2d cameraToVisionTarget = Pose2d.fromTranslation(cameraToVisionTargetPoses.get(0).interpolate(
cameraToVisionTargetPoses.get(1), 0.5));
Pose2d fieldToVisionTarget = getFieldToTurret(timestamp).transformBy(source.getTurretToLens()).transformBy(cameraToVisionTarget);
tracker.update(timestamp, List.of(new Pose2d(fieldToVisionTarget.getTranslation(), Rotation2d.identity())));
}
public synchronized void addVisionUpdate(double timestamp, List<TargetInfo> observations, Limelight source) {
mCameraToVisionTargetPosesLow.clear();
mCameraToVisionTargetPosesHigh.clear();
if (observations == null || observations.isEmpty()) {
vision_target_low_.update(timestamp, new ArrayList<>());
vision_target_high_.update(timestamp, new ArrayList<>());
return;
}
for (TargetInfo target : observations) {
mCameraToVisionTargetPosesLow.add(getCameraToVisionTargetPose(target, false, source));
mCameraToVisionTargetPosesHigh.add(getCameraToVisionTargetPose(target, true, source));
}
updatePortGoalTracker(timestamp, mCameraToVisionTargetPosesLow, vision_target_low_, source);
updatePortGoalTracker(timestamp, mCameraToVisionTargetPosesHigh, vision_target_high_, source);
}
// use known field target orientations to compensate for inaccuracy, assumes robot starts pointing directly away
// from and perpendicular to alliance wall
private final double[] kPossibleTargetNormals = {0.0, 90.0, 180.0, 270.0, 30.0, 150.0, 210.0, 330.0};
public synchronized Pose2d getFieldToVisionTarget(boolean highTarget) {
GoalTracker tracker = highTarget ? vision_target_high_ : vision_target_low_;
if (!tracker.hasTracks()) {
return null;
}
Pose2d fieldToTarget = tracker.getTracks().get(0).field_to_target;
double normalPositive = (fieldToTarget.getRotation().getDegrees() + 360) % 360;
double normalClamped = kPossibleTargetNormals[0];
for (double possible : kPossibleTargetNormals) {
if (Math.abs(normalPositive - possible) < Math.abs(normalPositive - normalClamped)) {
normalClamped = possible;
}
}
return new Pose2d(fieldToTarget.getTranslation(), Rotation2d.fromDegrees(normalClamped));
}
public synchronized Pose2d getVehicleToVisionTarget(double timestamp, boolean highTarget) {
Pose2d fieldToVisionTarget = getFieldToVisionTarget(highTarget);
if (fieldToVisionTarget == null) {
return null;
}
return getFieldToVehicle(timestamp).inverse().transformBy(fieldToVisionTarget);
}
public synchronized Optional<AimingParameters> getAimingParameters(boolean highTarget, int prev_track_id, double max_track_age) {
GoalTracker tracker = highTarget ? vision_target_high_ : vision_target_low_;
List<GoalTracker.TrackReport> reports = tracker.getTracks();
if (reports.isEmpty()) {
return Optional.empty();
}
double timestamp = Timer.getFPGATimestamp();
// Find the best track.
TrackReportComparator comparator = new TrackReportComparator(
Constants.kTrackStabilityWeight,
Constants.kTrackAgeWeight,
Constants.kTrackSwitchingWeight,
prev_track_id, timestamp);
reports.sort(comparator);
GoalTracker.TrackReport report = null;
for (GoalTracker.TrackReport track : reports) {
if (track.latest_timestamp > timestamp - max_track_age) {
report = track;
break;
}
}
if (report == null) {
return Optional.empty();
}
Pose2d vehicleToGoal = getFieldToVehicle(timestamp).inverse().transformBy(report.field_to_target).transformBy(getVisionTargetToGoalOffset());
AimingParameters params = new AimingParameters(vehicleToGoal,
report.field_to_target,
report.field_to_target.getRotation(),
report.latest_timestamp, report.stability, report.id);
return Optional.of(params);
}
public Pose2d getRobot() {
return new Pose2d();
}
public synchronized boolean useHighTarget() {
return EndEffector.getInstance().getObservedGamePiece() == GamePiece.BALL &&
!SuperstructureCommands.isInCargoShipPosition();
}
public synchronized Pose2d getVisionTargetToGoalOffset() {
// if (SuperstructureCommands.isInCargoShipPosition() && EndEffector.getInstance().getObservedGamePiece() == GamePiece.BALL) {
// return Pose2d.fromTranslation(new Translation2d(-6.0, 0.0));
// }
return Pose2d.identity();
}
public synchronized void outputToSmartDashboard() {
SmartDashboard.putString("Robot Velocity", getMeasuredVelocity().toString());
}
}