|
48 | 48 |
|
49 | 49 | import static org.firstinspires.ftc.robotcore.external.navigation.AngleUnit.DEGREES; |
50 | 50 | import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XYZ; |
51 | | -import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.YZX; |
| 51 | +import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XZY; |
52 | 52 | import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC; |
53 | | -import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection.BACK; |
54 | 53 |
|
55 | 54 | /** |
56 | 55 | * This 2020-2021 OpMode illustrates the basics of using the Vuforia localizer to determine |
|
89 | 88 | @Disabled |
90 | 89 | public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode { |
91 | 90 |
|
92 | | - // IMPORTANT: If you are using a USB WebCam, you must select CAMERA_CHOICE = BACK; and PHONE_IS_PORTRAIT = false; |
93 | | - private static final VuforiaLocalizer.CameraDirection CAMERA_CHOICE = BACK; |
94 | | - private static final boolean PHONE_IS_PORTRAIT = false ; |
95 | | - |
96 | 91 | /* |
97 | 92 | * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which |
98 | 93 | * 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function. |
@@ -140,8 +135,9 @@ public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode { |
140 | 135 |
|
141 | 136 | /* |
142 | 137 | * Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine. |
143 | | - * We can pass Vuforia the handle to a camera preview resource (on the RC phone); |
| 138 | + * We can pass Vuforia the handle to a camera preview resource (on the RC screen); |
144 | 139 | * If no camera monitor is desired, use the parameter-less constructor instead (commented out below). |
| 140 | + * Note: A preview window is required if you want to view the camera stream on the Driver Station Phone. |
145 | 141 | */ |
146 | 142 | int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName()); |
147 | 143 | VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId); |
@@ -207,55 +203,46 @@ public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode { |
207 | 203 | .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 0))); |
208 | 204 | frontWallTarget.setLocation(OpenGLMatrix |
209 | 205 | .translation(-halfField, 0, mmTargetHeight) |
210 | | - .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0 , 90))); |
| 206 | + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 90))); |
211 | 207 |
|
212 | 208 | // The tower goal targets are located a quarter field length from the ends of the back perimeter wall. |
213 | 209 | blueTowerGoalTarget.setLocation(OpenGLMatrix |
214 | 210 | .translation(halfField, quadField, mmTargetHeight) |
215 | | - .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0 , -90))); |
| 211 | + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90))); |
216 | 212 | redTowerGoalTarget.setLocation(OpenGLMatrix |
217 | 213 | .translation(halfField, -quadField, mmTargetHeight) |
218 | 214 | .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90))); |
219 | 215 |
|
220 | 216 | // |
221 | 217 | // Create a transformation matrix describing where the phone is on the robot. |
222 | 218 | // |
223 | | - // NOTE !!!! It's very important that you turn OFF your phone's Auto-Screen-Rotation option. |
224 | | - // Lock it into Portrait for these numbers to work. |
225 | | - // |
226 | 219 | // Info: The coordinate frame for the robot looks the same as the field. |
227 | 220 | // The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis. |
228 | 221 | // Z is UP on the robot. This equates to a bearing angle of Zero degrees. |
229 | 222 | // |
230 | | - // The phone starts out lying flat, with the screen facing Up and with the physical top of the phone |
231 | | - // pointing to the LEFT side of the Robot. |
232 | | - // The two examples below assume that the camera is facing forward out the front of the robot. |
233 | | - |
234 | | - // We need to rotate the camera around it's long axis to bring the correct camera forward. |
235 | | - if (CAMERA_CHOICE == BACK) { |
236 | | - phoneYRotate = -90; |
237 | | - } else { |
238 | | - phoneYRotate = 90; |
239 | | - } |
240 | | - |
241 | | - // Rotate the phone vertical about the X axis if it's in portrait mode |
242 | | - if (PHONE_IS_PORTRAIT) { |
243 | | - phoneXRotate = 90 ; |
244 | | - } |
| 223 | + // For a WebCam, the default starting orientation of the camera is looking UP (pointing in the Z direction), |
| 224 | + // with the wide (horizontal) axis of the camera aligned with the X axis, and |
| 225 | + // the Narrow (vertical) axis of the camera aligned with the Y axis |
| 226 | + // |
| 227 | + // But, this example assumes that the camera is actually facing forward out the front of the robot. |
| 228 | + // So, the "default" camera position requires two rotations to get it oriented correctly. |
| 229 | + // 1) First it must be rotated +90 degrees around the X axis to get it horizontal (it's now facing out the right side of the robot) |
| 230 | + // 2) Next it must be be rotated +90 degrees (counter-clockwise) around the Z axis to face forward. |
| 231 | + // |
| 232 | + // Finally the camera can be translated to its actual mounting position on the robot. |
| 233 | + // In this example, it is centered (left to right), but 4" forward of the middle of the robot, and 8" above ground level. |
245 | 234 |
|
246 | | - // Next, translate the camera lens to where it is on the robot. |
247 | | - // In this example, it is centered (left to right), but forward of the middle of the robot, and above ground level. |
248 | 235 | final float CAMERA_FORWARD_DISPLACEMENT = 4.0f * mmPerInch; // eg: Camera is 4 Inches in front of robot-center |
249 | 236 | final float CAMERA_VERTICAL_DISPLACEMENT = 8.0f * mmPerInch; // eg: Camera is 8 Inches above ground |
250 | 237 | final float CAMERA_LEFT_DISPLACEMENT = 0; // eg: Camera is ON the robot's center line |
251 | 238 |
|
252 | | - OpenGLMatrix robotFromCamera = OpenGLMatrix |
| 239 | + OpenGLMatrix cameraLocationOnRobot = OpenGLMatrix |
253 | 240 | .translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT) |
254 | | - .multiplied(Orientation.getRotationMatrix(EXTRINSIC, YZX, DEGREES, phoneYRotate, phoneZRotate, phoneXRotate)); |
| 241 | + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XZY, DEGREES, 90, 90, 0)); |
255 | 242 |
|
256 | 243 | /** Let all the trackable listeners know where the phone is. */ |
257 | 244 | for (VuforiaTrackable trackable : allTrackables) { |
258 | | - ((VuforiaTrackableDefaultListener) trackable.getListener()).setPhoneInformation(robotFromCamera, parameters.cameraDirection); |
| 245 | + ((VuforiaTrackableDefaultListener) trackable.getListener()).setCameraLocationOnRobot(parameters.cameraName, cameraLocationOnRobot); |
259 | 246 | } |
260 | 247 |
|
261 | 248 | // WARNING: |
|
0 commit comments