Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- package org.firstinspires.ftc.teamcode;
- import com.acmerobotics.dashboard.FtcDashboard;
- import com.acmerobotics.dashboard.config.Config;
- import com.disnodeteam.dogecv.DogeCV;
- import com.disnodeteam.dogecv.detectors.skystone.SkystoneDetector;
- import com.disnodeteam.dogecv.detectors.skystone.StoneDetector;
- import com.disnodeteam.dogecv.filters.GrayscaleFilter;
- import com.disnodeteam.dogecv.filters.LeviColorFilter;
- import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
- import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
- import com.qualcomm.robotcore.util.ElapsedTime;
- import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
- import org.firstinspires.ftc.teamcode.drive.mecanum.SampleMecanumDriveREVOptimized;
- import org.opencv.core.Point;
- import org.openftc.easyopencv.OpenCvCamera;
- import org.openftc.easyopencv.OpenCvCameraRotation;
- import org.openftc.easyopencv.OpenCvInternalCamera;
- import org.openftc.easyopencv.OpenCvWebcam;
- import java.util.List;
- import java.util.Locale;
- /*
- * Thanks to EasyOpenCV for the great API (and most of the example)
- *
- * Original Work Copright(c) 2019 OpenFTC Team
- * Derived Work Copyright(c) 2019 DogeDevs
- */
- @Config
- @TeleOp(name = "Skystone Detector OpenCv", group="DogeCV")
- public class OpenCvShit extends LinearOpMode {
- //private OpenCvCamera phoneCam;
- private OpenCvWebcam webcam;
- private SkystoneDetector skyStoneDetector;
- private StoneDetector stoneDetector;
- public static double yellowt = 100;
- public static int grayt = 40;
- @Override
- public void runOpMode() {
- /*
- * Instantiate an OpenCvCamera object for the camera we'll be using.
- * In this sample, we're using the phone's internal camera. We pass it a
- * CameraDirection enum indicating whether to use the front or back facing
- * camera, as well as the view that we wish to use for camera monitor (on
- * the RC phone). If no camera monitor is desired, use the alternate
- * single-parameter constructor instead (commented out below)
- */
- //int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
- //phoneCam = new OpenCvInternalCamera(OpenCvInternalCamera.CameraDirection.BACK, cameraMonitorViewId);
- webcam = new OpenCvWebcam(hardwareMap.get(WebcamName.class,"camera porno"));
- // OR... Do Not Activate the Camera Monitor View
- //phoneCam = new OpenCvInternalCamera(OpenCvInternalCamera.CameraDirection.BACK);
- /*
- * Open the connection to the camera device
- */
- //phoneCam.openCameraDevice();
- webcam.openCameraDevice();
- /*
- * Specify the image processing pipeline we wish to invoke upon receipt
- * of a frame from the camera. Note that switching pipelines on-the-fly
- * (while a streaming session is in flight) *IS* supported.
- */
- //phoneCam.setPipeline(skyStoneDetector);
- stoneDetector = new StoneDetector();
- skyStoneDetector = new SkystoneDetector();
- webcam.setPipeline(skyStoneDetector);
- /*
- * Tell the camera to start streaming images to us! Note that you must make sure
- * the resolution you specify is supported by the camera. If it is not, an exception
- * will be thrown.
- *
- * Also, we specify the rotation that the camera is used in. This is so that the image
- * from the camera sensor can be rotated such that it is always displayed with the image upright.
- * For a front facing camera, rotation is defined assuming the user is looking at the screen.
- * For a rear facing camera or a webcam, rotation is defined assuming the camera is facing
- * away from the user.
- */
- webcam.startStreaming(640,480, OpenCvCameraRotation.UPRIGHT);
- skyStoneDetector.blackFilter = new GrayscaleFilter(0, grayt);
- skyStoneDetector.yellowFilter = new LeviColorFilter(LeviColorFilter.ColorPreset.YELLOW, yellowt);
- FtcDashboard ftcDashboard = FtcDashboard.getInstance();
- ftcDashboard.startCameraStream(webcam, 30);
- /*
- * Wait for the user to press start on the Driver Station
- */
- waitForStart();
- while (opModeIsActive())
- {
- /*
- * Send some stats to the telemetry
- */
- /* int nr =1;
- for(Point point : stoneDetector.foundScreenPositions()){
- telemetry.addData("stone" + nr, point.x);
- nr++;
- } */
- String position = detecteaza();
- telemetry.addData("poztie", position);
- telemetry.addData("Stone Position X", skyStoneDetector.getScreenPosition().x);
- telemetry.addData("Stone Position Y", skyStoneDetector.getScreenPosition().y);
- telemetry.addData("Frame Count", webcam.getFrameCount());
- telemetry.addData("FPS", String.format(Locale.US, "%.2f", webcam.getFps()));
- telemetry.addData("Total frame time ms", webcam.getTotalFrameTimeMs());
- telemetry.addData("Pipeline time ms", webcam.getPipelineTimeMs());
- telemetry.addData("Overhead time ms", webcam.getOverheadTimeMs());
- telemetry.addData("Theoretical max FPS", webcam.getCurrentPipelineMaxFps());
- telemetry.update();
- /*
- * NOTE: stopping the stream from the camera early (before the end of the OpMode
- * when it will be automatically stopped for you) *IS* supported. The "if" statement
- * below will stop streaming from the camera when the "A" button on gamepad 1 is pressed.
- */
- if(gamepad1.a)
- {
- /*
- * IMPORTANT NOTE: calling stopStreaming() will indeed stop the stream of images
- * from the camera (and, by extension, stop calling your vision pipeline). HOWEVER,
- * if the reason you wish to stop the stream early is to switch use of the camera
- * over to, say, Vuforia or TFOD, you will also need to call closeCameraDevice()
- * (commented out below), because according to the Android Camera API documentation:
- * "Your application should only have one Camera object active at a time for
- * a particular hardware camera."
- *
- * NB: calling closeCameraDevice() will internally call stopStreaming() if applicable,
- * but it doesn't hurt to call it anyway, if for no other reason than clarity.
- *
- * NB2: if you are stopping the camera stream to simply save some processing power
- * (or battery power) for a short while when you do not need your vision pipeline,
- * it is recommended to NOT call closeCameraDevice() as you will then need to re-open
- * it the next time you wish to activate your vision pipeline, which can take a bit of
- * time. Of course, this comment is irrelevant in light of the use case described in
- * the above "important note".
- */
- webcam.stopStreaming();
- //webcam.closeCameraDevice();
- }
- /*
- * The viewport (if one was specified in the constructor) can also be dynamically "paused"
- * and "resumed". The primary use case of this is to reduce CPU, memory, and power load
- * when you need your vision pipeline running, but do not require a live preview on the
- * robot controller screen. For instance, this could be useful if you wish to see the live
- * camera preview as you are initializing your robot, but you no longer require the live
- * preview after you have finished your initialization process; pausing the viewport does
- * not stop running your pipeline.
- *
- * The "if" statements below will pause the viewport if the "X" button on gamepad1 is pressed,
- * and resume the viewport if the "Y" button on gamepad1 is pressed.
- */
- else if(gamepad1.x) {
- webcam.pauseViewport();
- }
- else if(gamepad1.y) {
- webcam.resumeViewport();
- }
- }
- }
- public String detecteaza(){
- ElapsedTime runtime = new ElapsedTime();
- double x_position =0 , y_position= 0;
- sleep(200);
- runtime.reset();
- while (runtime.seconds() < 1.5){
- x_position = skyStoneDetector.getScreenPosition().x;
- y_position = skyStoneDetector.getScreenPosition().y;
- }
- if( x_position < 330){
- return "stanga";
- }else if( x_position > 380 && x_position< 500){
- return "mijloc";
- }else return "dreapta";
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement