-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathVuforia.java0
311 lines (276 loc) · 17.6 KB
/
Vuforia.java0
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
/* Copyright (c) 2018 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
import static org.firstinspires.ftc.robotcore.external.navigation.AngleUnit.DEGREES;
import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XYZ;
import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.YZX;
import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC;
import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection.BACK;
import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection.FRONT;
import java.util.ArrayList;
import java.util.List;
/**
* This 2018-2019 OpMode illustrates the basics of using the Vuforia localizer to determine
* positioning and orientation of robot on the FTC field.
* The code is structured as a LinearOpMode
*
* Vuforia uses the phone's camera to inspect it's surroundings, and attempt to locate target images.
*
* When images are located, Vuforia is able to determine the position and orientation of the
* image relative to the camera. This sample code than combines that information with a
* knowledge of where the target images are on the field, to determine the location of the camera.
*
* This example assumes a "square" field configuration where the red and blue alliance stations
* are on opposite walls of each other.
*
* From the Audience perspective, the Red Alliance station is on the right and the
* Blue Alliance Station is on the left.
* The four vision targets are located in the center of each of the perimeter walls with
* the images facing inwards towards the robots:
* - BlueRover is the Mars Rover image target on the wall closest to the blue alliance
target
* - FrontCraters is the Lunar Craters image target on the wall closest to the audience
* - BackSpace is the Deep Space image target on the wall farthest from the audience
*
* A final calculation then uses the location of the camera on the robot to determine the
* robot's location and orientation on the field.
*
* @see VuforiaLocalizer
* @see VuforiaTrackableDefaultListener
* see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below.
*/
@Autonomous(name="Concept: Vuforia Rover Nav", group ="Concept")
public class Vuforia extends LinearOpMode {
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
* web site at https://developer.vuforia.com/license-manager.
*
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
* random data. As an example, here is a example of a fragment of a valid key:
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGhlfOOI2dRzKS4T0hQ8kT ...
* Once you've obtained a license key, copy the string from the Vuforia web site
* and paste it in to your code on the next line, between the double quotes.
*/
private static final String VUFORIA_KEY = "AaIK0f//////AAAAGTsgmHszM030skFBvcnAJlNSWaH7oKLZhxAZeCi7ToBGSKkO7T3EvzsRVYQdyDp2X+TFK6TQs+3WoCHkZXDYPQd87f77D6kvcBr8zbJ07Fb31UKiXdUBvX+ZQSV3kBhdAoxhfMa0WPgys7DYaeiOmM49CsNra7nVh05ls0th3h07wwHz3s/PBZnQwpbfr260CDgqBv4e9D79Wg5Ja5p+HAOJvyqg2r/Z5dOyRvVI3f/jPBRZHvDgDF9KTcuJAPoDHxfewmGFOFtiUamRLvcrkK9rw2Vygi7w23HYlzFO7yap+jUk1bv0uWNc0j5HPJDAjqa2ijBN9aVDrxzmFJml5WMA3GJJp8WOd9gkGhtI/BIo";
// Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension.
// We will define some constants and conversions here
private static final float mmPerInch = 25.4f;
private static final float mmFTCFieldWidth = (12*6) * mmPerInch; // the width of the FTC field (from the center point to the outer panels)
private static final float mmTargetHeight = (6) * mmPerInch; // the height of the center of the target image above the floor
// Select which camera you want use. The FRONT camera is the one on the same side as the screen.
// Valid choices are: BACK or FRONT
private static final VuforiaLocalizer.CameraDirection CAMERA_CHOICE = BACK;
private OpenGLMatrix lastLocation = null;
private boolean targetVisible = false;
/**
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
* localization engine.
*/
VuforiaLocalizer vuforia;
@Override public void runOpMode() {
/*
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
* We can pass Vuforia the handle to a camera preview resource (on the RC phone);
* If no camera monitor is desired, use the parameterless constructor instead (commented out below).
*/
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
parameters.vuforiaLicenseKey = VUFORIA_KEY;
parameters.cameraDirection = CAMERA_CHOICE;
// Instantiate the Vuforia engine
vuforia = ClassFactory.getInstance().createVuforia(parameters);
// Load the data sets that for the trackable objects. These particular data
// sets are stored in the 'assets' part of our application.
VuforiaTrackables targetsRoverRuckus = this.vuforia.loadTrackablesFromAsset("RoverRuckus");
VuforiaTrackable blueRover = targetsRoverRuckus.get(0);
blueRover.setName("Blue-Rover");
VuforiaTrackable redFootprint = targetsRoverRuckus.get(1);
redFootprint.setName("Red-Footprint");
VuforiaTrackable frontCraters = targetsRoverRuckus.get(2);
frontCraters.setName("Front-Craters");
VuforiaTrackable backSpace = targetsRoverRuckus.get(3);
backSpace.setName("Back-Space");
// For convenience, gather together all the trackable objects in one easily-iterable collection */
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
allTrackables.addAll(targetsRoverRuckus);
/**
* In order for localization to work, we need to tell the system where each target is on the field, and
* where the phone resides on the robot. These specifications are in the form of <em>transformation matrices.</em>
* Transformation matrices are a central, important concept in the math here involved in localization.
* See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
* for detailed information. Commonly, you'll encounter transformation matrices as instances
* of the {@link OpenGLMatrix} class.
*
* If you are standing in the Red Alliance Station looking towards the center of the field,
* - The X axis runs from your left to the right. (positive from the center to the right)
* - The Y axis runs from the Red Alliance Station towards the other side of the field
* where the Blue Alliance Station is. (Positive is from the center, towards the BlueAlliance station)
* - The Z axis runs from the floor, upwards towards the ceiling. (Positive is above the floor)
*
* This Rover Ruckus sample places a specific target in the middle of each perimeter wall.
*
* Before being transformed, each target image is conceptually located at the origin of the field's
* coordinate system (the center of the field), facing up.
*/
/**
* To place the BlueRover target in the middle of the blue perimeter wall:
* - First we rotate it 90 around the field's X axis to flip it upright.
* - Then, we translate it along the Y axis to the blue perimeter wall.
*/
OpenGLMatrix blueRoverLocationOnField = OpenGLMatrix
.translation(0, mmFTCFieldWidth, mmTargetHeight)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 0));
blueRover.setLocation(blueRoverLocationOnField);
/**
* To place the RedFootprint target in the middle of the red perimeter wall:
* - First we rotate it 90 around the field's X axis to flip it upright.
* - Second, we rotate it 180 around the field's Z axis so the image is flat against the red perimeter wall
* and facing inwards to the center of the field.
* - Then, we translate it along the negative Y axis to the red perimeter wall.
*/
OpenGLMatrix redFootprintLocationOnField = OpenGLMatrix
.translation(0, -mmFTCFieldWidth, mmTargetHeight)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 180));
redFootprint.setLocation(redFootprintLocationOnField);
/**
* To place the FrontCraters target in the middle of the front perimeter wall:
* - First we rotate it 90 around the field's X axis to flip it upright.
* - Second, we rotate it 90 around the field's Z axis so the image is flat against the front wall
* and facing inwards to the center of the field.
* - Then, we translate it along the negative X axis to the front perimeter wall.
*/
OpenGLMatrix frontCratersLocationOnField = OpenGLMatrix
.translation(-mmFTCFieldWidth, 0, mmTargetHeight)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0 , 90));
frontCraters.setLocation(frontCratersLocationOnField);
/**
* To place the BackSpace target in the middle of the back perimeter wall:
* - First we rotate it 90 around the field's X axis to flip it upright.
* - Second, we rotate it -90 around the field's Z axis so the image is flat against the back wall
* and facing inwards to the center of the field.
* - Then, we translate it along the X axis to the back perimeter wall.
*/
OpenGLMatrix backSpaceLocationOnField = OpenGLMatrix
.translation(mmFTCFieldWidth, 0, mmTargetHeight)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90));
backSpace.setLocation(backSpaceLocationOnField);
/**
* Create a transformation matrix describing where the phone is on the robot.
*
* The coordinate frame for the robot looks the same as the field.
* The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
* Z is UP on the robot. This equates to a bearing angle of Zero degrees.
*
* The phone starts out lying flat, with the screen facing Up and with the physical top of the phone
* pointing to the LEFT side of the Robot. It's very important when you test this code that the top of the
* camera is pointing to the left side of the robot. The rotation angles don't work if you flip the phone.
*
* If using the rear (High Res) camera:
* We need to rotate the camera around it's long axis to bring the rear camera forward.
* This requires a negative 90 degree rotation on the Y axis
*
* If using the Front (Low Res) camera
* We need to rotate the camera around it's long axis to bring the FRONT camera forward.
* This requires a Positive 90 degree rotation on the Y axis
*
* Next, translate the camera lens to where it is on the robot.
* In this example, it is centered (left to right), but 110 mm forward of the middle of the robot, and 200 mm above ground level.
*/
final int CAMERA_FORWARD_DISPLACEMENT = 110; // eg: Camera is 110 mm in front of robot center
final int CAMERA_VERTICAL_DISPLACEMENT = 200; // eg: Camera is 200 mm above ground
final int CAMERA_LEFT_DISPLACEMENT = 0; // eg: Camera is ON the robot's center line
OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix
.translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, YZX, DEGREES,
CAMERA_CHOICE == FRONT ? 90 : -90, 0, 0));
/** Let all the trackable listeners know where the phone is. */
for (VuforiaTrackable trackable : allTrackables)
{
((VuforiaTrackableDefaultListener)trackable.getListener()).setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
}
/** Wait for the game to begin */
telemetry.addData(">", "Press Play to start tracking");
telemetry.update();
waitForStart();
/** Start tracking the data sets we care about. */
targetsRoverRuckus.activate();
while (opModeIsActive()) {
// check all the trackable target to see which one (if any) is visible.
targetVisible = false;
for (VuforiaTrackable trackable : allTrackables) {
if (((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible()) {
telemetry.addData("Visible Target", trackable.getName());
targetVisible = true;
// getUpdatedRobotLocation() will return null if no new information is available since
// the last time that call was made, or if the trackable is not currently visible.
OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener)trackable.getListener()).getUpdatedRobotLocation();
if (robotLocationTransform != null) {
lastLocation = robotLocationTransform;
}
break;
}
}
// Provide feedback as to where the robot is located (if we know).
if (targetVisible) {
// express position (translation) of robot in inches.
VectorF translation = lastLocation.getTranslation();
telemetry.addData("Pos (in)", "{X, Y, Z} = %.1f, %.1f, %.1f",
translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch);
// express the rotation of the robot in degrees.
Orientation rotation = Orientation.getOrientation(lastLocation, EXTRINSIC, XYZ, DEGREES);
telemetry.addData("Rot (deg)", "{Roll, Pitch, Heading} = %.0f, %.0f, %.0f", rotation.firstAngle, rotation.secondAngle, rotation.thirdAngle);
}
else {
telemetry.addData("Visible Target", "none");
}
telemetry.update();
}
}
}