SHARE
TWEET

Untitled

a guest Dec 14th, 2019 84 Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. /* Copyright (c) 2019 FIRST. All rights reserved.
  2.  *
  3.  * Redistribution and use in source and binary forms, with or without modification,
  4.  * are permitted (subject to the limitations in the disclaimer below) provided that
  5.  * the following conditions are met:
  6.  *
  7.  * Redistributions of source code must retain the above copyright notice, this list
  8.  * of conditions and the following disclaimer.
  9.  *
  10.  * Redistributions in binary form must reproduce the above copyright notice, this
  11.  * list of conditions and the following disclaimer in the documentation and/or
  12.  * other materials provided with the distribution.
  13.  *
  14.  * Neither the name of FIRST nor the names of its contributors may be used to endorse or
  15.  * promote products derived from this software without specific prior written permission.
  16.  *
  17.  * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
  18.  * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  19.  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
  20.  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  21.  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
  22.  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
  23.  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
  24.  * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
  25.  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
  26.  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  27.  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  28.  */
  29.  
  30. package com.team16488.opmodes.auto;
  31.  
  32. import android.app.Activity;
  33. import android.graphics.Color;
  34. import android.view.View;
  35.  
  36. import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
  37. import com.qualcomm.robotcore.eventloop.opmode.Disabled;
  38. import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
  39. import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
  40. import com.qualcomm.robotcore.hardware.ColorSensor;
  41. import com.qualcomm.robotcore.hardware.DistanceSensor;
  42. import com.qualcomm.robotcore.util.ElapsedTime;
  43. import com.team16488.library.subsystems.MecanumDrive;
  44. import com.team16488.skystone.Robot;
  45.  
  46. import org.firstinspires.ftc.robotcore.external.ClassFactory;
  47. import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
  48. import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
  49. import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
  50. import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
  51. import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
  52. import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
  53. import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
  54. import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
  55.  
  56. import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
  57.  
  58. import java.util.ArrayList;
  59. import java.util.List;
  60. import java.lang.Math;
  61. import java.util.Locale;
  62.  
  63.  
  64. import static org.firstinspires.ftc.robotcore.external.navigation.AngleUnit.DEGREES;
  65. import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XYZ;
  66. import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.YZX;
  67. import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC;
  68. import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection.BACK;
  69.  
  70. /**
  71.  * This 2019-2020 OpMode illustrates the basics of using the Vuforia localizer to determine
  72.  * positioning and orientation of robot on the SKYSTONE FTC field.
  73.  * The code is structured as a LinearOpMode
  74.  *
  75.  * When images are located, Vuforia is able to determine the position and orientation of the
  76.  * image relative to the camera.  This sample code then combines that information with a
  77.  * knowledge of where the target images are on the field, to determine the location of the camera.
  78.  *
  79.  * From the Audience perspective, the Red Alliance station is on the right and the
  80.  * Blue Alliance Station is on the left.
  81.  * Eight perimeter targets are distributed evenly around the four perimeter walls
  82.  * Four Bridge targets are located on the bridge uprights.
  83.  * Refer to the Field Setup manual for more specific location details
  84.  *
  85.  * A final calculation then uses the location of the camera on the robot to determine the
  86.  * robot's location and orientation on the field.
  87.  *
  88.  * @see VuforiaLocalizer
  89.  * @see VuforiaTrackableDefaultListener
  90.  * see  skystone/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
  91.  *
  92.  * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
  93.  * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
  94.  *
  95.  * IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
  96.  * is explained below.
  97.  */
  98.  
  99.  
  100. @Autonomous(name="TestAuto", group="Linear Opmode")
  101.  
  102. public class redVision extends LinearOpMode {
  103.  
  104.     // IMPORTANT:  For Phone Camera, set 1) the camera source and 2) the orientation, based on how your phone is mounted:
  105.     // 1) Camera Source.  Valid choices are:  BACK (behind screen) or FRONT (selfie side)
  106.     // 2) Phone Orientation. Choices are: PHONE_IS_PORTRAIT = true (portrait) or PHONE_IS_PORTRAIT = false (landscape)
  107.     //
  108.     // NOTE: If you are running on a CONTROL HUB, with only one USB WebCam, you must select CAMERA_CHOICE = BACK; and PHONE_IS_PORTRAIT = false;
  109.     //
  110.     ColorSensor sensorColor;
  111.     DistanceSensor sensorDistance;
  112.  
  113.     private static final VuforiaLocalizer.CameraDirection CAMERA_CHOICE = BACK;
  114.     private static final boolean PHONE_IS_PORTRAIT = false;
  115.     private boolean align = false;
  116.     private boolean close = false;
  117.     private boolean backedUp = false;
  118.     private boolean otherSide = false;
  119.     private boolean praked = false;
  120.     /*
  121.      * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
  122.      * 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
  123.      * A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
  124.      * web site at https://developer.vuforia.com/license-manager.
  125.      *
  126.      * Vuforia license keys are always 380 characters long, and look as if they contain mostly
  127.      * random data. As an example, here is a example of a fragment of a valid key:
  128.      *      ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
  129.      * Once you've obtained a license key, copy the string from the Vuforia web site
  130.      * and paste it in to your code on the next line, between the double quotes.
  131.      */
  132.     private static final String VUFORIA_KEY =
  133.             "AQLgl7n/////AAABme+dNMPhrUUJjKAoNuY8bohUPjuCocER5Fpn94nlG5wvrLJZsJabuSihGcb5US+gHaLRCt20n4q2opXCriEaa+vi2pb3kIMMLuFioUVynCEJrTa9Y/9wPELJUwvpTfq55v6pSWfU/LIFnkTVIqm5OuG6X/KDeA3nTg6ykBYErTSd1zOYUabMdTR+DBKBevHF9NsmHo3/Le3XgCfopFYw049yYAVmRYy+dx84wlLhgF1JBNtDqx4rjQgICRzKQmKuh4EBe39ygQDnFd85uxD6Lbo6VZ3IuQeIrb0nu9eaD4H8oE+jRIvho8d3WJWR8smec0ddud1UFTRdXt69njtluVDe9zSU5vMGOnDn/cw8lQAb";
  134.  
  135.     // Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension.
  136.     // We will define some constants and conversions here
  137.     private static final float mmPerInch        = 25.4f;
  138.     private static final float mmTargetHeight   = (6) * mmPerInch;          // the height of the center of the target image above the floor
  139.  
  140.     // Constant for Stone Target
  141.     private static final float stoneZ = 2.00f * mmPerInch;
  142.  
  143.     // Constants for the center support targets
  144.     private static final float bridgeZ = 6.42f * mmPerInch;
  145.     private static final float bridgeY = 23 * mmPerInch;
  146.     private static final float bridgeX = 5.18f * mmPerInch;
  147.     private static final float bridgeRotY = 59;                                 // Units are degrees
  148.     private static final float bridgeRotZ = 180;
  149.  
  150.     // Constants for perimeter targets
  151.     private static final float halfField = 72 * mmPerInch;
  152.     private static final float quadField  = 36 * mmPerInch;
  153.  
  154.     // Class Members
  155.     private OpenGLMatrix lastLocation = null;
  156.     private VuforiaLocalizer vuforia = null;
  157.  
  158.     WebcamName webcamName = null;
  159.  
  160.     private boolean targetVisible = false;
  161.     private float phoneXRotate    = 0;
  162.     private float phoneYRotate    = 0;
  163.     private float phoneZRotate    = 0;
  164.     private int asd = 0;
  165.  
  166.     private ElapsedTime runtime = new ElapsedTime();
  167.     @Override
  168.     public void runOpMode() {
  169.  
  170.  
  171.         /*
  172.          * Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
  173.          * We can pass Vuforia the handle to a camera preview resource (on the RC phone);
  174.          * If no camera monitor is desired, use the parameter-less constructor instead (commented out below).
  175.          */
  176.         Robot robot = new Robot(this, telemetry);
  177.         webcamName = hardwareMap.get(WebcamName.class, "Webcam 1");
  178.         int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
  179.         VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
  180.  
  181.         // TAKE THIS OUT IF NO SCREEN WANTED VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
  182.  
  183.         parameters.vuforiaLicenseKey = VUFORIA_KEY;
  184.         parameters.cameraDirection   = CAMERA_CHOICE;
  185.         // do not use extended tracking; too messy
  186.         parameters.useExtendedTracking = false;
  187.         parameters.cameraName = webcamName;
  188.         //  Instantiate the Vuforia engine
  189.         vuforia = ClassFactory.getInstance().createVuforia(parameters);
  190.         MecanumDrive mecanum = new MecanumDrive(hardwareMap);
  191.         // Load the data sets for the trackable objects. These particular data
  192.         // sets are stored in the 'assets' part of our application.
  193.         VuforiaTrackables targetsSkyStone = this.vuforia.loadTrackablesFromAsset("Skystone");
  194.         sensorColor = hardwareMap.get(ColorSensor.class, "colour");
  195.  
  196.         // get a reference to the distance sensor that shares the same name.
  197.         sensorDistance = hardwareMap.get(DistanceSensor.class, "colour");
  198.  
  199.         // hsvValues is an array that will hold the hue, saturation, and value information.
  200.         float[] hsvValues = {0F, 0F, 0F};
  201.  
  202.         // values is a reference to the hsvValues array.
  203.         final float[] values = hsvValues;
  204.  
  205.         // sometimes it helps to multiply the raw RGB values with a scale factor
  206.         // to amplify/attentuate the measured values.
  207.         final double SCALE_FACTOR = 255;
  208.  
  209.         // get a reference to the RelativeLayout so we can change the background
  210.         // color of the Robot Controller app to match the hue detected by the RGB sensor.
  211.         int relativeLayoutId = hardwareMap.appContext.getResources().getIdentifier("RelativeLayout", "id", hardwareMap.appContext.getPackageName());
  212.         final View relativeLayout = ((Activity) hardwareMap.appContext).findViewById(relativeLayoutId);
  213.  
  214.         VuforiaTrackable stoneTarget = targetsSkyStone.get(0);
  215.         stoneTarget.setName("Stone Target");
  216. //        VuforiaTrackable blueRearBridge = targetsSkyStone.get(1);
  217. //        blueRearBridge.setName("Blue Rear Bridge");
  218. //        VuforiaTrackable redRearBridge = targetsSkyStone.get(2);
  219. //        redRearBridge.setName("Red Rear Bridge");
  220. //        VuforiaTrackable redFrontBridge = targetsSkyStone.get(3);
  221. //        redFrontBridge.setName("Red Front Bridge");
  222. //        VuforiaTrackable blueFrontBridge = targetsSkyStone.get(4);
  223. //        blueFrontBridge.setName("Blue Front Bridge");
  224. //        VuforiaTrackable red1 = targetsSkyStone.get(5);
  225. //        red1.setName("Red Perimeter 1");
  226. //        VuforiaTrackable red2 = targetsSkyStone.get(6);
  227. //        red2.setName("Red Perimeter 2");
  228. //        VuforiaTrackable front1 = targetsSkyStone.get(7);
  229. //        front1.setName("Front Perimeter 1");
  230. //        VuforiaTrackable front2 = targetsSkyStone.get(8);
  231. //        front2.setName("Front Perimeter 2");
  232. //        VuforiaTrackable blue1 = targetsSkyStone.get(9);
  233. //        blue1.setName("Blue Perimeter 1");
  234. //        VuforiaTrackable blue2 = targetsSkyStone.get(10);
  235. //        blue2.setName("Blue Perimeter 2");
  236. //        VuforiaTrackable rear1 = targetsSkyStone.get(11);
  237. //        rear1.setName("Rear Perimeter 1");
  238. //        VuforiaTrackable rear2 = targetsSkyStone.get(12);
  239. //        rear2.setName("Rear Perimeter 2");
  240.  
  241.         // For convenience, gather together all the trackable objects in one easily-iterable collection */
  242. //        List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
  243. //        allTrackables.addAll(targetsSkyStone);
  244.  
  245.         /**
  246.          * In order for localization to work, we need to tell the system where each target is on the field, and
  247.          * where the phone resides on the .  These specifications are in the form of <em>transformation matrices.</em>
  248.          * Transformation matrices are a central, important concept in the math here involved in localization.
  249.          * See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
  250.          * for detailed information. Commonly, you'll encounter transformation matrices as instances
  251.          * of the {@link OpenGLMatrix} class.
  252.          *
  253.          * If you are standing in the Red Alliance Station looking towards the center of the field,
  254.          *     - The X axis runs from your left to the right. (positive from the center to the right)
  255.          *     - The Y axis runs from the Red Alliance Station towards the other side of the field
  256.          *       where the Blue Alliance Station is. (Positive is from the center, towards the BlueAlliance station)
  257.          *     - The Z axis runs from the floor, upwards towards the ceiling.  (Positive is above the floor)
  258.          *
  259.          * Before being transformed, each target image is conceptually located at the origin of the field's
  260.          *  coordinate system (the center of the field), facing up.
  261.          */
  262.  
  263.         // Set the position of the Stone Target.  Since it's not fixed in position, assume it's at the field origin.
  264.         // Rotated it to to face forward, and raised it to sit on the ground correctly.
  265.         // This can be used for generic target-centric approach algorithms
  266.         stoneTarget.setLocation(OpenGLMatrix
  267.                 .translation(0, 0, stoneZ)
  268.                 .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90)));
  269.  
  270. //        //Set the position of the bridge support targets with relation to origin (center of field)
  271. //        blueFrontBridge.setLocation(OpenGLMatrix
  272. //                .translation(-bridgeX, bridgeY, bridgeZ)
  273. //                .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 0, bridgeRotY, bridgeRotZ)));
  274. //
  275. //        blueRearBridge.setLocation(OpenGLMatrix
  276. //                .translation(-bridgeX, bridgeY, bridgeZ)
  277. //                .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 0, -bridgeRotY, bridgeRotZ)));
  278. //
  279. //        redFrontBridge.setLocation(OpenGLMatrix
  280. //                .translation(-bridgeX, -bridgeY, bridgeZ)
  281. //                .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 0, -bridgeRotY, 0)));
  282. //
  283. //        redRearBridge.setLocation(OpenGLMatrix
  284. //                .translation(bridgeX, -bridgeY, bridgeZ)
  285. //                .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 0, bridgeRotY, 0)));
  286. //
  287. //        //Set the position of the perimeter targets with relation to origin (center of field)
  288. //        red1.setLocation(OpenGLMatrix
  289. //                .translation(quadField, -halfField, mmTargetHeight)
  290. //                .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 180)));
  291. //
  292. //        red2.setLocation(OpenGLMatrix
  293. //                .translation(-quadField, -halfField, mmTargetHeight)
  294. //                .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 180)));
  295. //
  296. //        front1.setLocation(OpenGLMatrix
  297. //                .translation(-halfField, -quadField, mmTargetHeight)
  298. //                .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0 , 90)));
  299. //
  300. //        front2.setLocation(OpenGLMatrix
  301. //                .translation(-halfField, quadField, mmTargetHeight)
  302. //                .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 90)));
  303. //
  304. //        blue1.setLocation(OpenGLMatrix
  305. //                .translation(-quadField, halfField, mmTargetHeight)
  306. //                .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 0)));
  307. //
  308. //        blue2.setLocation(OpenGLMatrix
  309. //                .translation(quadField, halfField, mmTargetHeight)
  310. //                .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 0)));
  311. //
  312. //        rear1.setLocation(OpenGLMatrix
  313. //                .translation(halfField, quadField, mmTargetHeight)
  314. //                .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0 , -90)));
  315. //
  316. //        rear2.setLocation(OpenGLMatrix
  317. //                .translation(halfField, -quadField, mmTargetHeight)
  318. //                .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90)));
  319.  
  320.         //
  321.         // Create a transformation matrix describing where the phone is on the robot.
  322.         //
  323.         // NOTE !!!!  It's very important that you turn OFF your phone's Auto-Screen-Rotation option.
  324.         // Lock it into Portrait for these numbers to work.
  325.         //
  326.         // Info:  The coordinate frame for the robot looks the same as the field.
  327.         // The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
  328.         // Z is UP on the robot.  This equates to a bearing angle of Zero degrees.
  329.         //
  330.         // The phone starts out lying flat, with the screen facing Up and with the physical top of the phone
  331.         // pointing to the LEFT side of the Robot.
  332.         // The two examples below assume that the camera is facing forward out the front of the robot.
  333.  
  334.         // We need to rotate the camera around it's long axis to bring the correct camera forward.
  335.         if (CAMERA_CHOICE == BACK) {
  336.             phoneYRotate = -90;
  337.         } else {
  338.             phoneYRotate = 90;
  339.         }
  340.  
  341.         // Rotate the phone vertical about the X axis if it's in portrait mode
  342.         if (PHONE_IS_PORTRAIT) {
  343.             phoneXRotate = 90 ;
  344.         }
  345.  
  346.         // Next, translate the camera lens to where it is on the robot.
  347.         // In this example, it is centered (left to right), but forward of the middle of the robot, and above ground level.
  348. //        final float CAMERA_FORWARD_DISPLACEMENT  = 4.0f * mmPerInch;   // eg: Camera is 4 Inches in front of robot center
  349. //        final float CAMERA_VERTICAL_DISPLACEMENT = 8.0f * mmPerInch;   // eg: Camera is 8 Inches above ground
  350.         final float CAMERA_FORWARD_DISPLACEMENT  = 0;   // eg: Camera is 4 Inches in front of robot center
  351.         final float CAMERA_VERTICAL_DISPLACEMENT = 0;
  352.         final float CAMERA_LEFT_DISPLACEMENT     = 0;     // eg: Camera is ON the robot's center line
  353.  
  354.         OpenGLMatrix robotFromCamera = OpenGLMatrix
  355.                 .translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT)
  356.                 .multiplied(Orientation.getRotationMatrix(EXTRINSIC, YZX, DEGREES, phoneYRotate, phoneZRotate, phoneXRotate));
  357.  
  358.         /**  Let all the trackable listeners know where the phone is.  */
  359. //        for (VuforiaTrackable trackable : allTrackables) {
  360.         ((VuforiaTrackableDefaultListener)stoneTarget.getListener()).setPhoneInformation(robotFromCamera, parameters.cameraDirection);
  361.  
  362.  
  363.         // WARNING:
  364.         // In this sample, we do not wait for PLAY to be pressed.  Target Tracking is started immediately when INIT is pressed.
  365.         // This sequence is used to enable the new remote DS Camera Preview feature to be used with this sample.
  366.         // CONSEQUENTLY do not put any driving commands in this loop.
  367.         // To restore the normal opmode structure, just un-comment the following line:
  368.  
  369.         waitForStart();
  370.  
  371.         // Note: To use the remote camera preview:
  372.         // AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream"
  373.         // Tap the preview window to receive a fresh image.
  374.         robot.start();
  375.         runtime.reset();
  376.         targetsSkyStone.activate();
  377.         double a = runtime.seconds();
  378.         while(a<1)
  379.         {
  380.             robot.lift.setPower(0.85);
  381.  
  382.         }
  383.         robot.alternateIntake.setDown(true);
  384.         while(a<2)
  385.         {
  386.             robot.lift.setPower(-0.85);
  387.         }
  388.         robot.alternateIntake.setDown(false);
  389.         while (!isStopRequested()) {
  390.  
  391.             // check all the trackable targets to see which one (if any) is visible.
  392.             targetVisible = false;
  393. //            for (VuforiaTrackable trackable : allTrackables) {
  394.             if (((VuforiaTrackableDefaultListener)stoneTarget.getListener()).isVisible()) {
  395.                 telemetry.addData("Visible Target", stoneTarget.getName());
  396.                 targetVisible = true;
  397.  
  398.                 // getUpdatedRobotLocation() will return null if no new information is available since
  399.                 // the last time that call was made, or if the trackable is not currently visible.
  400.                 OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) stoneTarget.getListener()).getUpdatedRobotLocation();
  401.                 if (robotLocationTransform != null) {
  402.                     lastLocation = robotLocationTransform;
  403.                 }
  404.             }
  405. //                break;
  406. //            }
  407.  
  408.             // Provide feedback as to where the robot is located (if we know).
  409.             if (targetVisible) {
  410.                 // express position (translation) of robot in inches.
  411.                 VectorF translation = lastLocation.getTranslation();
  412.                 double delta_x = translation.get(0);
  413.                 double delta_y = translation.get(1);
  414.                 // not very useful; phone should not move
  415.                 double delta_z = translation.get(2);
  416.  
  417.                 telemetry.addData("Pos X", delta_x / mmPerInch);
  418.                 telemetry.addData("Pos Y",delta_y / mmPerInch);
  419. //                telemetry.addData("Pos Z", delta_z / mmPerInch);
  420. //                telemetry.addData("Pos (in)", "{X, Y, Z} = %.1f, %.1f, %.1f",
  421. //                        translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch);
  422.  
  423.                 // get the rotation of the robot
  424.                 Orientation rotation = Orientation.getOrientation(lastLocation, EXTRINSIC, XYZ, DEGREES);
  425.                 double angleFromPhone = rotation.thirdAngle;
  426.                 // output the 'Z' angle rotation, or heading of the robot
  427.                 // this angle goes from a line parallel to the x axis and the current camera rotation
  428.                 telemetry.addData("Rot (Heading)", angleFromPhone);
  429. //                telemetry.addData("Rot X", rotation.firstAngle);
  430. //                telemetry.addData("Rot Y", rotation.secondAngle);
  431.  
  432.                 // compute distance to target
  433.                 double targetRange = (Math.hypot(delta_x, delta_y))/ mmPerInch;
  434.  
  435.                 // compute angle from line parallel to x axis to the block
  436.                 // From left to block negative; right to block positive
  437.                 double xToBlock1 = Math.toDegrees(Math.atan(delta_y / delta_x));
  438. //                double xToBlock2 = Math.toDegrees(Math.asin(delta_y/targetRange));
  439.  
  440.                 // compute relative angle
  441.                 double relativeAngle = xToBlock1 - angleFromPhone;
  442. //                telemetry.addData("ToBlock Angle1", xToBlock1);
  443.                 telemetry.addData("Relative Angle", relativeAngle);
  444.                 telemetry.addData("Total distance", targetRange);
  445. //                telemetry.addData("To Block Test 2", xToBlock2);
  446.                 Color.RGBToHSV((int) (sensorColor.red() * SCALE_FACTOR),
  447.                         (int) (sensorColor.green() * SCALE_FACTOR),
  448.                         (int) (sensorColor.blue() * SCALE_FACTOR),
  449.                         hsvValues);
  450.  
  451.                 // send the info back to driver station using telemetry function.
  452.                 telemetry.addData("Distance (cm)",
  453.                         String.format(Locale.US, "%.02f", sensorDistance.getDistance(DistanceUnit.CM)));
  454.                 telemetry.addData("Alpha", sensorColor.alpha());
  455.                 telemetry.addData("Red  ", sensorColor.red());
  456.                 telemetry.addData("Green", sensorColor.green());
  457.                 telemetry.addData("Blue ", sensorColor.blue());
  458.                 telemetry.addData("Hue", hsvValues[0]);
  459.  
  460.                 // change the background color to match the color detected by the RGB sensor.
  461.                 // pass a reference to the hue, saturation, and value array as an argument
  462.                 // to the HSVToColor method.
  463.                 relativeLayout.post(new Runnable() {
  464.                     public void run() {
  465.                         relativeLayout.setBackgroundColor(Color.HSVToColor(0xff, values));
  466.                     }
  467.                 });
  468.  
  469.                 telemetry.update();
  470.                 if(89<=angleFromPhone && angleFromPhone<=91)
  471.                 {
  472.                     align = true;
  473.  
  474.                 }
  475.                 if(!align && angleFromPhone>91)
  476.                 {
  477.                     mecanum.setVelocity(0,-0.4,0);
  478.                 }
  479.                 if(!align && angleFromPhone<89)
  480.                 {
  481.                     mecanum.setVelocity(0,0.4,0);
  482.                 }
  483.                 if(align && !close)
  484.                 {
  485.                     mecanum.setVelocity(-0.7,0,0);
  486.                 }
  487.  
  488.                 if(3.35>=delta_x&& 3.15<=delta_x)
  489.                 {
  490.                     close = true;
  491.                 }
  492.  
  493.             }
  494.             else {
  495.                 telemetry.addData("Visible Target", "none");
  496.                 if(!align)
  497.                 {
  498.                     mecanum.setVelocity(0,-0.3,0);
  499.                 }
  500.  
  501.  
  502.             }
  503.             double b = 0;
  504.             if(align && close && !backedUp)
  505.             {
  506.                 robot.alternateIntake.setDown(true);
  507.                 if(b==0)
  508.                 {
  509.                     b = runtime.seconds();
  510.                 }
  511.  
  512.             }
  513.             double c = 0;
  514.             if(align && close && !backedUp && runtime.seconds()>(b + 0.5))
  515.             {
  516.                 mecanum.setVelocity(0.7,0,0);
  517.                 if(c==0)
  518.                 {
  519.                     c = runtime.seconds();
  520.                 }
  521.  
  522.             }
  523.             if(align && close && !backedUp&& runtime.seconds()>(c+1))
  524.             {
  525.                 mecanum.setVelocity(0,0,0);
  526.                 backedUp = true;
  527.             }
  528.             double d = 0;
  529.             if(align && close && backedUp && !otherSide)
  530.             {
  531.                 mecanum.setVelocity((0,0.5,0);
  532.                 if(d ==0){
  533.                     d = runtime.seconds();
  534.                 }
  535.  
  536.             }
  537.             if(align && close && backedUp && runtime.seconds()>(d+0.5) )
  538.             {
  539.                 mecanum.setVelocity(0,0,0);
  540.                 otherSide = true;
  541.                 robot.alternateIntake.setDown(false);
  542.             }
  543.             if(align && close && backedUp && otherSide && sensorColor.red()<500)
  544.             {
  545.                 mecanum.setVelocity(0,-0.3,0);
  546.             }
  547.             if(align && close && backedUp && otherSide && sensorColor.red()>500)
  548.  
  549.  
  550.             telemetry.addData("align", align);
  551.             telemetry.addData("close", close);
  552.             telemetry.update();
  553.             mecanum.update();
  554.  
  555.         }
  556.  
  557.         // Disable Tracking when we are done;
  558.         targetsSkyStone.deactivate();
  559.     }
  560. }
RAW Paste Data
We use cookies for various purposes including analytics. By continuing to use Pastebin, you agree to our use of cookies as described in the Cookies Policy. OK, I Understand
 
Top