Advertisement
Guest User

vuforia code

a guest
Nov 21st, 2019
340
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 39.90 KB | None | 0 0
  1. package org.firstinspires.ftc.teamcode;
  2.  
  3. import com.qualcomm.hardware.bosch.BNO055IMU;
  4. import com.qualcomm.hardware.bosch.JustLoggingAccelerationIntegrator;
  5. import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
  6. import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
  7. import com.qualcomm.robotcore.hardware.DcMotor;
  8. import com.qualcomm.robotcore.eventloop.opmode.Disabled;
  9. import com.qualcomm.robotcore.util.ElapsedTime;
  10.  
  11. import org.firstinspires.ftc.robotcore.external.navigation.Acceleration;
  12. import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
  13. import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
  14. import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
  15. import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
  16. import org.firstinspires.ftc.robotcore.external.navigation.Position;
  17. import org.firstinspires.ftc.robotcore.external.navigation.Velocity;
  18.  
  19. import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
  20. import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
  21.  
  22. import org.firstinspires.ftc.robotcore.external.ClassFactory;
  23. import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
  24. import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
  25. import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
  26. import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
  27. import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
  28. import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
  29. import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
  30. import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
  31.  
  32. import java.util.ArrayList;
  33. import java.util.List;
  34.  
  35. import static org.firstinspires.ftc.robotcore.external.navigation.AngleUnit.DEGREES;
  36. import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XYZ;
  37. import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.YZX;
  38. import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC;
  39. import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection.BACK;
  40.  
  41. @Autonomous(name="AutonDriving", group="Skystone")
  42. public class AutonDriving extends LinearOpMode {
  43.  
  44. /* Declare OpMode members. */
  45. SkyStoneHardware robot = new SkyStoneHardware();
  46. private ElapsedTime runtime = new ElapsedTime();
  47. String xyz = "z";
  48. //CONTAINS ALL METHODS AND VARIABlES TO BE EXTENDED BY OTHER AUTON CLASSES
  49. static final double COUNTS_PER_MOTOR_REV = 1120; // Currently: Andymark Neverest 40
  50. static final double COUNTS_PER_REV_ARM = 1495; //torquenado
  51. static final double PULLEY_DIAMETER = 1.3;
  52. static final double COUNTS_PER_INCH_ARM = COUNTS_PER_REV_ARM/(PULLEY_DIAMETER * Math.PI);
  53. static final double DRIVE_GEAR_REDUCTION = .9; // This is < 1.0 if geared UP //On OUR CENTER MOTOR THE GEAR REDUCTION IS .5
  54. static final double WHEEL_DIAMETER_INCHES = 3.54331; // For figuring circumference
  55. static final double COUNTS_PER_INCH = (COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION) /
  56. (WHEEL_DIAMETER_INCHES * Math.PI);
  57.  
  58. BNO055IMU imu;
  59.  
  60. private static final VuforiaLocalizer.CameraDirection CAMERA_CHOICE = BACK;
  61. private static final boolean PHONE_IS_PORTRAIT = false ;
  62.  
  63. private static final String VUFORIA_KEY =
  64. "AYy6NYn/////AAABmTW3q+TyLUMbg/IXWlIG3BkMMq0okH0hLmwj3CxhPhvUlEZHaOAmESqfePJ57KC2g6UdWLN7OYvc8ihGAZSUJ2JPWAsHQGv6GUAj4BlrMCjHvqhY0w3tV/Azw2wlPmls4FcUCRTzidzVEDy+dtxqQ7U5ZtiQhjBZetAcnLsCYb58dgwZEjTx2+36jiqcFYvS+FlNJBpbwmnPUyEEb32YBBZj4ra5jB0v4IW4wYYRKTNijAQKxco33VYSCbH0at99SqhXECURA55dtmmJxYpFlT/sMmj0iblOqoG/auapQmmyEEXt/T8hv9StyirabxhbVVSe7fPsAueiXOWVm0kCPO+KN/TyWYB9Hg/mSfnNu9i9";
  65.  
  66. // Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension.
  67. // We will define some constants and conversions here
  68. private static final float mmPerInch = 25.4f;
  69. // the height of the center of the target image above the floor
  70.  
  71. // Constant for Stone Target
  72. private static final float stoneZ = 2.00f * mmPerInch;
  73.  
  74. // Constants for the center support targets
  75.  
  76. // Class Members
  77. private OpenGLMatrix lastLocation = null;
  78. private VuforiaLocalizer vuforia = null;
  79.  
  80. WebcamName webcamName = null;
  81.  
  82. private boolean targetVisible = false;
  83. private float phoneXRotate = 0;
  84. private float phoneYRotate = 0;
  85. private float phoneZRotate = 0;
  86.  
  87. @Override
  88. public void runOpMode() {
  89. robot.init(hardwareMap);
  90. BNO055IMU.Parameters p = new BNO055IMU.Parameters();
  91. p.angleUnit = BNO055IMU.AngleUnit.DEGREES;
  92. p.accelUnit = BNO055IMU.AccelUnit.METERS_PERSEC_PERSEC;
  93. p.calibrationDataFile = "BNO055IMUCalibration.json";
  94. p.loggingEnabled = true;
  95. p.loggingTag = "IMU";
  96. p.accelerationIntegrationAlgorithm = new JustLoggingAccelerationIntegrator();
  97. imu = hardwareMap.get(BNO055IMU.class, "imu");
  98. imu.initialize(p);
  99.  
  100. //side motors
  101. robot.fLMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
  102. robot.fRMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
  103.  
  104. robot.fLMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  105. robot.fRMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  106.  
  107. //lateral motors
  108. robot.bLMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
  109. robot.bRMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
  110.  
  111. robot.bLMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  112. robot.bRMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  113.  
  114. webcamName = hardwareMap.get(WebcamName.class, "Webcam 1");
  115.  
  116. int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
  117. VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
  118.  
  119. // VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
  120.  
  121. parameters.vuforiaLicenseKey = VUFORIA_KEY;
  122.  
  123. parameters.cameraName = webcamName;
  124.  
  125. // Instantiate the Vuforia engine
  126. vuforia = ClassFactory.getInstance().createVuforia(parameters);
  127.  
  128. // Load the data sets for the trackable objects. These particular data
  129. // sets are stored in the 'assets' part of our application.
  130. VuforiaTrackables targetsSkyStone = this.vuforia.loadTrackablesFromAsset("Skystone");
  131.  
  132. VuforiaTrackable stoneTarget = targetsSkyStone.get(0);
  133. stoneTarget.setName("Stone Target");
  134.  
  135. // For convenience, gather together all the trackable objects in one easily-iterable collection */
  136. List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
  137. allTrackables.addAll(targetsSkyStone);
  138.  
  139. stoneTarget.setLocation(OpenGLMatrix
  140. .translation(0, 0, stoneZ)
  141. .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90)));
  142.  
  143. if (CAMERA_CHOICE == BACK) {
  144. phoneYRotate = -90;
  145. } else {
  146. phoneYRotate = 90;
  147. }
  148.  
  149. // Rotate the phone vertical about the X axis if it's in portrait mode
  150. if (PHONE_IS_PORTRAIT) {
  151. phoneXRotate = 90 ;
  152. }
  153.  
  154. // Next, translate the camera lens to where it is on the robot.
  155. // In this example, it is centered (left to right), but forward of the middle of the robot, and above ground level.
  156. final float CAMERA_FORWARD_DISPLACEMENT = 4.0f * mmPerInch; // eg: Camera is 4 Inches in front of robot-center
  157. final float CAMERA_VERTICAL_DISPLACEMENT = 8.0f * mmPerInch; // eg: Camera is 8 Inches above ground
  158. final float CAMERA_LEFT_DISPLACEMENT = 0; // eg: Camera is ON the robot's center line
  159.  
  160. OpenGLMatrix robotFromCamera = OpenGLMatrix
  161. .translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT)
  162. .multiplied(Orientation.getRotationMatrix(EXTRINSIC, YZX, DEGREES, phoneYRotate, phoneZRotate, phoneXRotate));
  163.  
  164. /** Let all the trackable listeners know where the phone is. */
  165. for (VuforiaTrackable trackable : allTrackables) {
  166. ((VuforiaTrackableDefaultListener) trackable.getListener()).setPhoneInformation(robotFromCamera, parameters.cameraDirection);
  167. }
  168.  
  169.  
  170. waitForStart();
  171. imu.startAccelerationIntegration(new Position(), new Velocity(), 1000);
  172.  
  173.  
  174. }
  175.  
  176. public void vuforia(VuforiaTrackables targetsSkyStone, List<VuforiaTrackable> allTrackables)
  177. {
  178. //TODO: RETURN POSITION STRING
  179. targetsSkyStone.activate();
  180. while (!isStopRequested()) {
  181.  
  182. // check all the trackable targets to see which one (if any) is visible.
  183. targetVisible = false;
  184. for (VuforiaTrackable trackable : allTrackables) {
  185. if (((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible()) {
  186. telemetry.addData("Visible Target", trackable.getName());
  187. targetVisible = true;
  188.  
  189. // getUpdatedRobotLocation() will return null if no new information is available since
  190. // the last time that call was made, or if the trackable is not currently visible.
  191. OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener)trackable.getListener()).getUpdatedRobotLocation();
  192. if (robotLocationTransform != null) {
  193. lastLocation = robotLocationTransform;
  194. }
  195. break;
  196. }
  197. }
  198.  
  199. // Provide feedback as to where the robot is located (if we know).
  200. if (targetVisible) {
  201. // express position (translation) of robot in inches.
  202. VectorF translation = lastLocation.getTranslation();
  203. telemetry.addData("Pos (in)", "{X, Y, Z} = %.1f, %.1f, %.1f",
  204. translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch);
  205.  
  206. // express the rotation of the robot in degrees.
  207. Orientation rotation = Orientation.getOrientation(lastLocation, EXTRINSIC, XYZ, DEGREES);
  208. telemetry.addData("Rot (deg)", "{Roll, Pitch, Heading} = %.0f, %.0f, %.0f", rotation.firstAngle, rotation.secondAngle, rotation.thirdAngle);
  209. }
  210. else {
  211. telemetry.addData("Visible Target", "none");
  212. }
  213. telemetry.update();
  214. }
  215.  
  216. // Disable Tracking when we are done;
  217. targetsSkyStone.deactivate();
  218. }
  219.  
  220. public static double counts(double inches)
  221. {
  222. double newInches = (inches - 3.7959) / 1.1239;
  223. return newInches;
  224. }
  225.  
  226.  
  227.  
  228. public void normalDrive(double lpower, double rpower) {
  229.  
  230. if (opModeIsActive()) {
  231. robot.fLMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  232. robot.fRMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  233. robot.bLMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  234. robot.bRMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  235. robot.fLMotor.setPower(lpower);
  236. robot.fRMotor.setPower(rpower);
  237. robot.bLMotor.setPower(lpower);
  238. robot.bRMotor.setPower(rpower);
  239. }
  240. }
  241.  
  242.  
  243. public void turnToPosition (double target, String xyz, double topPower, double timeoutS, boolean isCorrection) {
  244. //stopAndReset();
  245. target*= -1;
  246. double originalAngle = readAngle(xyz);
  247.  
  248.  
  249. runtime.reset();
  250.  
  251. double angle = readAngle(xyz); //variable for gyro correction around z axis
  252. double error = angle - target;
  253. double powerScaled = topPower;
  254. do {
  255. angle = readAngle(xyz);
  256. error = angle - target;
  257. if (!isCorrection) {
  258. powerScaled = topPower * (error / 180) * pidMultiplierTurning(error);
  259. }
  260.  
  261. //double powerScaled = power*pidMultiplier(error);
  262. telemetry.addData("original angle", originalAngle);
  263. telemetry.addData("current angle", readAngle(xyz));
  264. telemetry.addData("error", error);
  265. telemetry.update();
  266. if (error > 0) {
  267. if (xyz.equals("z")) {
  268. normalDrive(powerScaled, -powerScaled);
  269. }
  270. if (xyz.equals("y")) {
  271. if (opModeIsActive()) {
  272. robot.fLMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  273. robot.fRMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  274. robot.bLMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  275. robot.bRMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  276. robot.fLMotor.setPower(powerScaled);
  277. robot.fRMotor.setPower(powerScaled);
  278. robot.bLMotor.setPower(powerScaled);
  279. robot.bRMotor.setPower(powerScaled);
  280. }
  281. }
  282. } else if (error < 0) {
  283. if (xyz.equals("z")) {
  284. normalDrive(powerScaled, -powerScaled);
  285. }
  286. if (xyz.equals("y")) {
  287. if (opModeIsActive()) {
  288. robot.fLMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  289. robot.fRMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  290. robot.bLMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  291. robot.bRMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  292. robot.fLMotor.setPower(powerScaled);
  293. robot.fRMotor.setPower(powerScaled);
  294. robot.bLMotor.setPower(powerScaled);
  295. robot.bRMotor.setPower(powerScaled);
  296. }
  297. }
  298. }
  299. } while (opModeIsActive() && ((error > .3) || (error < -0.3)) && (runtime.seconds() < timeoutS));
  300. normalDrive(0, 0);
  301. //stopAndReset();
  302.  
  303. }
  304.  
  305. public void stopAndReset()
  306. {
  307. robot.bRMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
  308. robot.bLMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
  309. robot.fRMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
  310. robot.fLMotor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
  311. robot.armExt.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
  312. robot.armLift.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
  313. //robot.susan.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
  314. //
  315. robot.bRMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  316. robot.bLMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  317. robot.fRMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  318. robot.fLMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  319. robot.armExt.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  320. robot.armLift.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  321. //robot.susan.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  322. }
  323.  
  324. public double pidMultiplierDriving(double error) {
  325. //equation for power multiplier is x/sqrt(x^2 + C)
  326. int C = 100;
  327. return Math.abs(error / Math.sqrt((error * error) + C));
  328. }
  329. public double pidMultiplierTurning(double error) {
  330. //equation for power multiplier is x/sqrt(x^2 + C)
  331. double C = .1;
  332. return Math.abs(error / Math.sqrt((error * error) + C));
  333. }
  334.  
  335. public double readAngle(String xyz) {
  336. Orientation angles;
  337. Acceleration gravity;
  338. angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
  339. if (xyz.equals("x")) {
  340. return angles.thirdAngle;
  341. } else if (xyz.equals("y")) {
  342. return angles.secondAngle;
  343. } else if (xyz.equals("z")) {
  344. return angles.firstAngle;
  345. } else {
  346. return 0;
  347. }
  348. }
  349.  
  350. public void encoderDrive(double inches, String direction, double timeoutS, double topPower)
  351. {
  352. stopAndReset();
  353. int TargetFL = 0;
  354. int TargetFR = 0;
  355. int TargetBL = 0;
  356. int TargetBR = 0;
  357. double errorFL = 0;
  358. double errorFR = 0;
  359. double errorBL = 0;
  360. double errorBR = 0;
  361. double powerFL = 0;
  362. double powerFR = 0;
  363. double powerBL = 0;
  364. double powerBR = 0;
  365.  
  366.  
  367. String heading = direction;
  368.  
  369. // Ensure that the opmode is still active
  370. if (opModeIsActive()) {
  371. if(heading == "f")
  372. {
  373. TargetFL = robot.fLMotor.getCurrentPosition() + (int)( inches* COUNTS_PER_INCH);
  374. TargetFR = robot.fRMotor.getCurrentPosition() + (int)( inches* COUNTS_PER_INCH);
  375. TargetBL = robot.bLMotor.getCurrentPosition() + (int)( inches* COUNTS_PER_INCH);
  376. TargetBR = robot.bRMotor.getCurrentPosition() + (int)( inches* COUNTS_PER_INCH);
  377.  
  378. }
  379.  
  380. else if(heading == "b")
  381. {
  382. TargetFL = robot.fLMotor.getCurrentPosition() - (int)( inches* COUNTS_PER_INCH);
  383. TargetFR = robot.fRMotor.getCurrentPosition() - (int)( inches* COUNTS_PER_INCH);
  384. TargetBL = robot.bLMotor.getCurrentPosition() - (int)( inches* COUNTS_PER_INCH);
  385. TargetBR = robot.bRMotor.getCurrentPosition() - (int)( inches* COUNTS_PER_INCH);
  386.  
  387.  
  388. }
  389.  
  390. else if(heading == "r")
  391. {
  392. TargetFL = robot.fLMotor.getCurrentPosition() + (int)( inches* COUNTS_PER_INCH);
  393. TargetFR = robot.fRMotor.getCurrentPosition() - (int)( inches* COUNTS_PER_INCH);
  394. TargetBL = robot.bLMotor.getCurrentPosition() - (int)( inches* COUNTS_PER_INCH);
  395. TargetBR = robot.bRMotor.getCurrentPosition() + (int)( inches* COUNTS_PER_INCH); //weird should be +
  396.  
  397.  
  398. }
  399.  
  400. else if(heading == "l")
  401. {
  402. TargetFL = robot.fLMotor.getCurrentPosition() - (int)( inches* COUNTS_PER_INCH);
  403. TargetFR = robot.fRMotor.getCurrentPosition() + (int)( inches* COUNTS_PER_INCH);
  404. TargetBL = robot.bLMotor.getCurrentPosition() + (int)( inches* COUNTS_PER_INCH); // weird should be +
  405. TargetBR = robot.bRMotor.getCurrentPosition() - (int)( inches* COUNTS_PER_INCH);
  406.  
  407. }
  408.  
  409. else
  410. {
  411. telemetry.addData("not a valid direction", heading );
  412. }
  413.  
  414.  
  415.  
  416. // Determine new target position, and pass to motor controller
  417.  
  418. robot.fLMotor.setTargetPosition(TargetFL);
  419. robot.fRMotor.setTargetPosition(TargetFR);
  420. robot.bRMotor.setTargetPosition(TargetBR);
  421. robot.bLMotor.setTargetPosition(TargetBL);
  422.  
  423.  
  424. // Turn On RUN_TO_POSITION
  425. robot.fLMotor.setMode(DcMotor.RunMode.RUN_TO_POSITION);
  426. robot.fRMotor.setMode(DcMotor.RunMode.RUN_TO_POSITION);
  427. robot.bRMotor.setMode(DcMotor.RunMode.RUN_TO_POSITION);
  428. robot.bLMotor.setMode(DcMotor.RunMode.RUN_TO_POSITION);
  429.  
  430.  
  431. // reset the timeout time and start motion.
  432. runtime.reset();
  433. /*robot.fLMotor.setPower(Speed);
  434. robot.fRMotor.setPower(Speed);
  435. robot.bRMotor.setPower(Speed);
  436. robot.bLMotor.setPower(Speed);*/
  437.  
  438.  
  439. // keep looping while we are still active, and there is time left, and both motors are running.
  440. // Note: We use (isBusy() && isBusy()) in the loop test, which means that when EITHER motor hits
  441. // its target position, the motion will stop. This is "safer" in the event that the robot will
  442. // always end the motion as soon as possible.
  443. // However, if you require that BOTH motors have finished their moves before the robot continues
  444. // onto the next step, use (isBusy() || isBusy()) in the loop test.
  445. while (opModeIsActive() &&
  446. (runtime.seconds() < timeoutS) && ((robot.fLMotor.isBusy() && robot.fRMotor.isBusy()) && robot.bLMotor.isBusy() && robot.bRMotor.isBusy()))
  447. {
  448. errorFL = TargetFL - robot.fLMotor.getCurrentPosition();
  449. errorFR = TargetFR - robot.fRMotor.getCurrentPosition();
  450. errorBL = TargetBL - robot.bLMotor.getCurrentPosition();
  451. errorBR = TargetBR - robot.bRMotor.getCurrentPosition();
  452.  
  453. powerFL = topPower * pidMultiplierDriving(errorFL);
  454. powerFR = topPower * pidMultiplierDriving(errorFR);
  455. powerBL = topPower * pidMultiplierDriving(errorBL);
  456. powerBR = topPower* pidMultiplierDriving(errorBR);
  457.  
  458. robot.fLMotor.setPower(Math.abs(powerFL));
  459. robot.fRMotor.setPower(Math.abs(powerFR));
  460. robot.bRMotor.setPower(Math.abs(powerBL));
  461. robot.bLMotor.setPower(Math.abs(powerBR));
  462. telemetry.addData("Path1", "Running to %7d :%7d :%7d :%7d", TargetFL, TargetFR, TargetBL, TargetBR);
  463.  
  464. telemetry.addData("Path2", "Running at %7d :%7d :%7d :%7d", robot.fLMotor.getCurrentPosition(), robot.fRMotor.getCurrentPosition(), robot.bLMotor.getCurrentPosition(), robot.bRMotor.getCurrentPosition());
  465. //telemetry.addData("speeds", "Running to %7f :%7f :%7f :%7f", speedfL, speedfR, speedfL, speedbR);
  466. telemetry.update();
  467. }
  468.  
  469. // Stop all motion;
  470. robot.fLMotor.setPower(0);
  471. robot.bLMotor.setPower(0);
  472. robot.fRMotor.setPower(0);
  473. robot.bRMotor.setPower(0);
  474.  
  475. // Turn off RUN_TO_POSITION
  476. robot.bRMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  477. robot.bLMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  478. robot.fRMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  479. robot.fLMotor.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
  480. // sleep(250); // optional pause after each move
  481. }
  482. stopAndReset();
  483. }
  484. public void armExtend(double inches, double topPower, double timeoutS)
  485. {
  486. stopAndReset();
  487. int target = robot.armExt.getCurrentPosition() + (int)(inches * COUNTS_PER_INCH_ARM);
  488.  
  489. robot.armExt.setMode(DcMotor.RunMode.RUN_TO_POSITION);
  490.  
  491. robot.armExt.setTargetPosition(target);
  492.  
  493. runtime.reset();
  494. while(opModeIsActive() && runtime.seconds() < timeoutS && robot.armExt.isBusy())
  495. {
  496. double error = target - robot.armExt.getCurrentPosition();
  497. double power = topPower * pidMultiplierDriving(error);
  498. robot.armExt.setPower(power);
  499. telemetry.addData("Path1", "Running to %7d", target);
  500. telemetry.addData("Path2", "Running at %7d", robot.armExt.getCurrentPosition());
  501. telemetry.update();
  502. }
  503. robot.armExt.setPower(0);
  504. stopAndReset();
  505. }
  506.  
  507. public void armLift(double inches, double topPower, double timeoutS)
  508. {
  509. stopAndReset();
  510. int target = robot.armLift.getCurrentPosition() + (int)(inches * COUNTS_PER_INCH_ARM);
  511.  
  512. robot.armLift.setMode(DcMotor.RunMode.RUN_TO_POSITION);
  513.  
  514. robot.armLift.setTargetPosition(target);
  515.  
  516. runtime.reset();
  517. while(opModeIsActive() && runtime.seconds() < timeoutS && robot.armLift.isBusy())
  518. {
  519. double error = target - robot.armLift.getCurrentPosition();
  520. double power = topPower * pidMultiplierDriving(error);
  521. robot.armLift.setPower(power);
  522. telemetry.addData("Path1", "Running to %7d", target);
  523. telemetry.addData("Path2", "Running at %7d", robot.armLift.getCurrentPosition());
  524. telemetry.update();
  525. }
  526. robot.armLift.setPower(0);
  527. stopAndReset();
  528. }
  529. }
  530.  
  531.  
  532.  
  533.  
  534.  
  535.  
  536.  
  537.  
  538.  
  539.  
  540. /* Copyright (c) 2019 FIRST. All rights reserved.
  541. *
  542. * Redistribution and use in source and binary forms, with or without modification,
  543. * are permitted (subject to the limitations in the disclaimer below) provided that
  544. * the following conditions are met:
  545. *
  546. * Redistributions of source code must retain the above copyright notice, this list
  547. * of conditions and the following disclaimer.
  548. *
  549. * Redistributions in binary form must reproduce the above copyright notice, this
  550. * list of conditions and the following disclaimer in the documentation and/or
  551. * other materials provided with the distribution.
  552. *
  553. * Neither the name of FIRST nor the names of its contributors may be used to endorse or
  554. * promote products derived from this software without specific prior written permission.
  555. *
  556. * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
  557. * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  558. * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
  559. * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  560. * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
  561. * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
  562. * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
  563. * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
  564. * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
  565. * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  566. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  567. */
  568.  
  569. package org.firstinspires.ftc.teamcode;
  570.  
  571. import com.qualcomm.robotcore.eventloop.opmode.Disabled;
  572. import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
  573. import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
  574.  
  575. import org.firstinspires.ftc.robotcore.external.ClassFactory;
  576. import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
  577. import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
  578. import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
  579. import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
  580. import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
  581. import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
  582. import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
  583. import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
  584.  
  585. import java.util.ArrayList;
  586. import java.util.List;
  587.  
  588. import static org.firstinspires.ftc.robotcore.external.navigation.AngleUnit.DEGREES;
  589. import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XYZ;
  590. import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.YZX;
  591. import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC;
  592. import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection.BACK;
  593.  
  594. /**
  595. * This 2019-2020 OpMode illustrates the basics of using the Vuforia localizer to determine
  596. * positioning and orientation of robot on the SKYSTONE FTC field.
  597. * The code is structured as a LinearOpMode
  598. *
  599. * When images are located, Vuforia is able to determine the position and orientation of the
  600. * image relative to the camera. This sample code then combines that information with a
  601. * knowledge of where the target images are on the field, to determine the location of the camera.
  602. *
  603. * From the Audience perspective, the Red Alliance station is on the right and the
  604. * Blue Alliance Station is on the left.
  605.  
  606. * Eight perimeter targets are distributed evenly around the four perimeter walls
  607. * Four Bridge targets are located on the bridge uprights.
  608. * Refer to the Field Setup manual for more specific location details
  609. *
  610. * A final calculation then uses the location of the camera on the robot to determine the
  611. * robot's location and orientation on the field.
  612. *
  613. * @see VuforiaLocalizer
  614. * @see VuforiaTrackableDefaultListener
  615. * see skystone/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
  616. *
  617. * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
  618. * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
  619. *
  620. * IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
  621. * is explained below.
  622. */
  623.  
  624. @TeleOp(name="VuforiaTest", group ="Concept")
  625. //@Disabled
  626. public class VuforiaTest extends LinearOpMode {
  627.  
  628. // IMPORTANT: If you are using a USB WebCam, you must select CAMERA_CHOICE = BACK; and PHONE_IS_PORTRAIT = false;
  629. private static final VuforiaLocalizer.CameraDirection CAMERA_CHOICE = BACK;
  630. private static final boolean PHONE_IS_PORTRAIT = false ;
  631.  
  632. /*
  633. * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
  634. * 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
  635. * A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
  636. * web site at https://developer.vuforia.com/license-manager.
  637. *
  638. * Vuforia license keys are always 380 characters long, and look as if they contain mostly
  639. * random data. As an example, here is a example of a fragment of a valid key:
  640. * ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
  641. * Once you've obtained a license key, copy the string from the Vuforia web site
  642. * and paste it in to your code on the next line, between the double quotes.
  643. */
  644. private static final String VUFORIA_KEY =
  645. "AYy6NYn/////AAABmTW3q+TyLUMbg/IXWlIG3BkMMq0okH0hLmwj3CxhPhvUlEZHaOAmESqfePJ57KC2g6UdWLN7OYvc8ihGAZSUJ2JPWAsHQGv6GUAj4BlrMCjHvqhY0w3tV/Azw2wlPmls4FcUCRTzidzVEDy+dtxqQ7U5ZtiQhjBZetAcnLsCYb58dgwZEjTx2+36jiqcFYvS+FlNJBpbwmnPUyEEb32YBBZj4ra5jB0v4IW4wYYRKTNijAQKxco33VYSCbH0at99SqhXECURA55dtmmJxYpFlT/sMmj0iblOqoG/auapQmmyEEXt/T8hv9StyirabxhbVVSe7fPsAueiXOWVm0kCPO+KN/TyWYB9Hg/mSfnNu9i9";
  646.  
  647. // Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension.
  648. // We will define some constants and conversions here
  649. private static final float mmPerInch = 25.4f;
  650. // the height of the center of the target image above the floor
  651.  
  652. // Constant for Stone Target
  653. private static final float stoneZ = 2.00f * mmPerInch;
  654.  
  655. // Constants for the center support targets
  656.  
  657. // Class Members
  658. private OpenGLMatrix lastLocation = null;
  659. private VuforiaLocalizer vuforia = null;
  660.  
  661. /**
  662. * This is the webcam we are to use. As with other hardware devices such as motors and
  663. * servos, this device is identified using the robot configuration tool in the FTC application.
  664. */
  665. WebcamName webcamName = null;
  666.  
  667. private boolean targetVisible = false;
  668. private float phoneXRotate = 0;
  669. private float phoneYRotate = 0;
  670. private float phoneZRotate = 0;
  671.  
  672. @Override public void runOpMode() {
  673. /*
  674. * Retrieve the camera we are to use.
  675. */
  676. webcamName = hardwareMap.get(WebcamName.class, "Webcam 1");
  677.  
  678. /*
  679. * Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
  680. * We can pass Vuforia the handle to a camera preview resource (on the RC phone);
  681. * If no camera monitor is desired, use the parameter-less constructor instead (commented out below).
  682. */
  683. int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
  684. VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
  685.  
  686. // VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
  687.  
  688. parameters.vuforiaLicenseKey = VUFORIA_KEY;
  689.  
  690. /**
  691. * We also indicate which camera on the RC we wish to use.
  692. */
  693. parameters.cameraName = webcamName;
  694.  
  695. // Instantiate the Vuforia engine
  696. vuforia = ClassFactory.getInstance().createVuforia(parameters);
  697.  
  698. // Load the data sets for the trackable objects. These particular data
  699. // sets are stored in the 'assets' part of our application.
  700. VuforiaTrackables targetsSkyStone = this.vuforia.loadTrackablesFromAsset("Skystone");
  701.  
  702. VuforiaTrackable stoneTarget = targetsSkyStone.get(0);
  703. stoneTarget.setName("Stone Target");
  704.  
  705. // For convenience, gather together all the trackable objects in one easily-iterable collection */
  706. List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
  707. allTrackables.addAll(targetsSkyStone);
  708.  
  709. /**
  710. * In order for localization to work, we need to tell the system where each target is on the field, and
  711. * where the phone resides on the robot. These specifications are in the form of <em>transformation matrices.</em>
  712. * Transformation matrices are a central, important concept in the math here involved in localization.
  713. * See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
  714. * for detailed information. Commonly, you'll encounter transformation matrices as instances
  715. * of the {@link OpenGLMatrix} class.
  716. *
  717. * If you are standing in the Red Alliance Station looking towards the center of the field,
  718. * - The X axis runs from your left to the right. (positive from the center to the right)
  719. * - The Y axis runs from the Red Alliance Station towards the other side of the field
  720. * where the Blue Alliance Station is. (Positive is from the center, towards the BlueAlliance station)
  721. * - The Z axis runs from the floor, upwards towards the ceiling. (Positive is above the floor)
  722. *
  723. * Before being transformed, each target image is conceptually located at the origin of the field's
  724. * coordinate system (the center of the field), facing up.
  725. */
  726.  
  727. // Set the position of the Stone Target. Since it's not fixed in position, assume it's at the field origin.
  728. // Rotated it to to face forward, and raised it to sit on the ground correctly.
  729. // This can be used for generic target-centric approach algorithms
  730. stoneTarget.setLocation(OpenGLMatrix
  731. .translation(0, 0, stoneZ)
  732. .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90)));
  733.  
  734.  
  735. //
  736. // Create a transformation matrix describing where the phone is on the robot.
  737. //
  738. // NOTE !!!! It's very important that you turn OFF your phone's Auto-Screen-Rotation option.
  739. // Lock it into Portrait for these numbers to work.
  740. //
  741. // Info: The coordinate frame for the robot looks the same as the field.
  742. // The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
  743. // Z is UP on the robot. This equates to a bearing angle of Zero degrees.
  744. //
  745. // The phone starts out lying flat, with the screen facing Up and with the physical top of the phone
  746. // pointing to the LEFT side of the Robot.
  747. // The two examples below assume that the camera is facing forward out the front of the robot.
  748.  
  749. // We need to rotate the camera around it's long axis to bring the correct camera forward.
  750. if (CAMERA_CHOICE == BACK) {
  751. phoneYRotate = -90;
  752. } else {
  753. phoneYRotate = 90;
  754. }
  755.  
  756. // Rotate the phone vertical about the X axis if it's in portrait mode
  757. if (PHONE_IS_PORTRAIT) {
  758. phoneXRotate = 90 ;
  759. }
  760.  
  761. // Next, translate the camera lens to where it is on the robot.
  762. // In this example, it is centered (left to right), but forward of the middle of the robot, and above ground level.
  763. final float CAMERA_FORWARD_DISPLACEMENT = 4.0f * mmPerInch; // eg: Camera is 4 Inches in front of robot-center
  764. final float CAMERA_VERTICAL_DISPLACEMENT = 8.0f * mmPerInch; // eg: Camera is 8 Inches above ground
  765. final float CAMERA_LEFT_DISPLACEMENT = 0; // eg: Camera is ON the robot's center line
  766.  
  767. OpenGLMatrix robotFromCamera = OpenGLMatrix
  768. .translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT)
  769. .multiplied(Orientation.getRotationMatrix(EXTRINSIC, YZX, DEGREES, phoneYRotate, phoneZRotate, phoneXRotate));
  770.  
  771. /** Let all the trackable listeners know where the phone is. */
  772. for (VuforiaTrackable trackable : allTrackables) {
  773. ((VuforiaTrackableDefaultListener) trackable.getListener()).setPhoneInformation(robotFromCamera, parameters.cameraDirection);
  774. }
  775.  
  776. // WARNING:
  777. // In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed.
  778. // This sequence is used to enable the new remote DS Camera Preview feature to be used with this sample.
  779. // CONSEQUENTLY do not put any driving commands in this loop.
  780. // To restore the normal opmode structure, just un-comment the following line:
  781.  
  782. // waitForStart();
  783.  
  784. // Note: To use the remote camera preview:
  785. // AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream"
  786. // Tap the preview window to receive a fresh image.
  787.  
  788. targetsSkyStone.activate();
  789. while (!isStarted()) {
  790.  
  791. // check all the trackable targets to see which one (if any) is visible.
  792. targetVisible = false;
  793. for (VuforiaTrackable trackable : allTrackables) {
  794. if (((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible()) {
  795. telemetry.addData("Visible Target", trackable.getName());
  796. targetVisible = true;
  797.  
  798. // getUpdatedRobotLocation() will return null if no new information is available since
  799. // the last time that call was made, or if the trackable is not currently visible.
  800. OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener)trackable.getListener()).getUpdatedRobotLocation();
  801. if (robotLocationTransform != null) {
  802. lastLocation = robotLocationTransform;
  803. }
  804. break;
  805. }
  806. }
  807.  
  808. // Provide feedback as to where the robot is located (if we know).
  809. //TODO: ADD STATEMENTS W/ POSITION VALUE CORRESPONDING TO SKYSTONE POSITION
  810. if (targetVisible) {
  811. // express position (translation) of robot in inches.
  812. VectorF translation = lastLocation.getTranslation();
  813. telemetry.addData("Pos (in)", "{X, Y, Z} = %.1f, %.1f, %.1f",
  814. translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch);
  815.  
  816. // express the rotation of the robot in degrees.
  817. Orientation rotation = Orientation.getOrientation(lastLocation, EXTRINSIC, XYZ, DEGREES);
  818. telemetry.addData("Rot (deg)", "{Roll, Pitch, Heading} = %.0f, %.0f, %.0f", rotation.firstAngle, rotation.secondAngle, rotation.thirdAngle);
  819. }
  820. else {
  821. telemetry.addData("Visible Target", "none");
  822. }
  823. telemetry.update();
  824. }
  825. telemetry.addData("stopped", "stopped");
  826. telemetry.update();
  827. sleep(2500);
  828.  
  829. // Disable Tracking when we are done;
  830. targetsSkyStone.deactivate();
  831. }
  832. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement