Advertisement
Guest User

Untitled

a guest
May 24th, 2016
76
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 35.88 KB | None | 0 0
  1. package angers.m2.contour;
  2.  
  3. import android.annotation.SuppressLint;
  4. import android.content.Context;
  5. import android.content.Intent;
  6. import android.hardware.Camera;
  7. import android.hardware.Sensor;
  8. import android.hardware.SensorEvent;
  9. import android.hardware.SensorEventListener;
  10. import android.net.Uri;
  11. import android.os.Bundle;
  12. import android.os.Environment;
  13. import android.support.v7.app.AppCompatActivity;
  14. import android.util.DisplayMetrics;
  15. import android.view.Menu;
  16. import android.view.MenuItem;
  17. import android.view.MotionEvent;
  18. import android.view.SurfaceView;
  19. import android.view.WindowManager;
  20. import android.widget.LinearLayout.LayoutParams;
  21. import android.widget.Toast;
  22.  
  23. import com.m2sili.univangers.vistalibrary.Vista;
  24.  
  25. import org.opencv.android.BaseLoaderCallback;
  26. import org.opencv.android.CameraBridgeViewBase;
  27. import org.opencv.android.JavaCameraView;
  28. import org.opencv.android.LoaderCallbackInterface;
  29. import org.opencv.android.OpenCVLoader;
  30. import org.opencv.core.Core;
  31. import org.opencv.core.CvType;
  32. import org.opencv.core.Mat;
  33. import org.opencv.core.MatOfByte;
  34. import org.opencv.core.MatOfFloat;
  35. import org.opencv.core.MatOfInt;
  36. import org.opencv.core.MatOfPoint;
  37. import org.opencv.core.MatOfPoint2f;
  38. import org.opencv.core.MatOfRect;
  39. import org.opencv.core.Point;
  40. import org.opencv.core.Rect;
  41. import org.opencv.core.Scalar;
  42. import org.opencv.core.Size;
  43. import org.opencv.core.TermCriteria;
  44. import org.opencv.imgcodecs.Imgcodecs;
  45. import org.opencv.imgproc.Imgproc;
  46. import org.opencv.objdetect.CascadeClassifier;
  47. import org.opencv.utils.Converters;
  48. import org.opencv.video.Video;
  49.  
  50. import java.io.File;
  51. import java.io.FileOutputStream;
  52. import java.io.IOException;
  53. import java.io.InputStream;
  54. import java.text.SimpleDateFormat;
  55. import java.util.ArrayList;
  56. import java.util.Arrays;
  57. import java.util.Date;
  58. import java.util.HashMap;
  59. import java.util.List;
  60. import java.util.Map;
  61.  
  62. public class MainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2, SensorEventListener {
  63.  
  64. public static final int VIEW_MODE_RGBA = 0;
  65. public static final int VIEW_MODE_HOUGHCIRCLES = 1;
  66. public static final int VIEW_MODE_HOUGHLINES = 2;
  67. public static final int VIEW_MODE_CANNY = 3;
  68. public static final int VIEW_MODE_COLCONTOUR = 4;
  69. public static final int VIEW_MODE_FACEDETECT = 5;
  70. public static final int VIEW_MODE_YELLOW_QUAD_DETECT = 6;
  71. public static final int VIEW_MODE_GFTT = 7;
  72. public static final int VIEW_MODE_OPFLOW = 8;
  73.  
  74.  
  75. public static int viewMode = VIEW_MODE_OPFLOW;
  76.  
  77. private CascadeClassifier mCascade;
  78.  
  79. private boolean bShootNow = false, bDisplayTitle = true, bFirstFaceSaved = false;
  80.  
  81. private byte[] byteColourTrackCentreHue;
  82.  
  83. private double d, dTextScaleFactor, x1, x2, y1, y2;
  84.  
  85. private double[] vecHoughLines;
  86.  
  87. private Point pt, pt1, pt2;
  88.  
  89. private int x, y, radius, iMinRadius, iMaxRadius, iCannyLowerThreshold,
  90. iCannyUpperThreshold, iAccumulator, iLineThickness = 3,
  91. iHoughLinesThreshold = 50, iHoughLinesMinLineSize = 20,
  92. iHoughLinesGap = 20, iMaxFaceHeight, iMaxFaceHeightIndex,
  93. iFileOrdinal = 0, iCamera = 0, iNumberOfCameras = 0, iGFFTMax = 40,
  94. iContourAreaMin = 1000;
  95.  
  96. private JavaCameraView mOpenCvCameraView0;
  97. private JavaCameraView mOpenCvCameraView1;
  98.  
  99. private List<Byte> byteStatus;
  100. private List<Integer> iHueMap, channels;
  101. private List<Float> ranges;
  102. private List<Point> pts, corners, cornersThis, cornersPrev;
  103. private List<MatOfPoint> contours;
  104.  
  105. private long lFrameCount = 0, lMilliStart = 0, lMilliNow = 0, lMilliShotTime = 0;
  106.  
  107. private Mat mRgba, mGray, mIntermediateMat, mMatRed, mMatGreen, mMatBlue, mROIMat,
  108. mMatRedInv, mMatGreenInv, mMatBlueInv, mHSVMat, mErodeKernel, mContours,
  109. lines, mFaceDest, mFaceResized, matOpFlowPrev, matOpFlowThis,
  110. matFaceHistogramPrevious, matFaceHistogramThis, mHist;
  111.  
  112. private MatOfFloat mMOFerr, MOFrange;
  113. private MatOfRect faces;
  114. private MatOfByte mMOBStatus;
  115. private MatOfPoint2f mMOP2f1, mMOP2f2, mMOP2fptsPrev, mMOP2fptsThis, mMOP2fptsSafe;
  116. private MatOfPoint2f mApproxContour;
  117. private MatOfPoint MOPcorners;
  118. private MatOfInt MOIone, histSize;
  119.  
  120. private Rect rect, rDest;
  121.  
  122. private Scalar colorRed, colorGreen;
  123. private Size sSize, sSize3, sSize5, sMatSize;
  124. private String string, sShotText;
  125.  
  126. private double lastValue;
  127.  
  128. private Vista vista = new Vista();
  129.  
  130. private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
  131. @Override
  132. public void onManagerConnected(int status) {
  133. switch (status) {
  134. case LoaderCallbackInterface.SUCCESS: {
  135. mOpenCvCameraView0.enableView();
  136.  
  137. if (iNumberOfCameras > 1)
  138. mOpenCvCameraView1.enableView();
  139.  
  140. try {
  141. // DO FACE CASCADE SETUP
  142.  
  143. Context context = getApplicationContext();
  144. InputStream is3 = context.getResources().openRawResource(R.raw.haarcascade_frontalface_default);
  145. File cascadeDir = context.getDir("cascade", Context.MODE_PRIVATE);
  146. File cascadeFile = new File(cascadeDir, "haarcascade_frontalface_default.xml");
  147.  
  148. FileOutputStream os = new FileOutputStream(cascadeFile);
  149.  
  150. byte[] buffer = new byte[4096];
  151. int bytesRead;
  152.  
  153. while ((bytesRead = is3.read(buffer)) != -1) {
  154. os.write(buffer, 0, bytesRead);
  155. }
  156.  
  157. is3.close();
  158. os.close();
  159.  
  160. mCascade = new CascadeClassifier(cascadeFile.getAbsolutePath());
  161.  
  162. if (mCascade.empty()) {
  163. //Log.d(TAG, "Failed to load cascade classifier");
  164. mCascade = null;
  165. }
  166.  
  167. cascadeFile.delete();
  168. cascadeDir.delete();
  169.  
  170. } catch (IOException e) {
  171. e.printStackTrace();
  172. // Log.d(TAG, "Failed to load cascade. Exception thrown: " + e);
  173. }
  174.  
  175. }
  176. break;
  177. default: {
  178. super.onManagerConnected(status);
  179. }
  180. break;
  181. }
  182. }
  183. };
  184.  
  185.  
  186. @Override
  187. protected void onCreate(Bundle savedInstanceState) {
  188. super.onCreate(savedInstanceState);
  189. setContentView(R.layout.activity_main);
  190.  
  191. iNumberOfCameras = Camera.getNumberOfCameras();
  192.  
  193. getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
  194.  
  195. mOpenCvCameraView0 = (JavaCameraView) findViewById(R.id.java_surface_view0);
  196.  
  197. if (iNumberOfCameras > 1)
  198. mOpenCvCameraView1 = (JavaCameraView) findViewById(R.id.java_surface_view1);
  199.  
  200. mOpenCvCameraView0.setVisibility(SurfaceView.VISIBLE);
  201. mOpenCvCameraView0.setCvCameraViewListener(this);
  202.  
  203. mOpenCvCameraView0.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
  204.  
  205. if (iNumberOfCameras > 1) {
  206. mOpenCvCameraView1.setVisibility(SurfaceView.GONE);
  207. mOpenCvCameraView1.setCvCameraViewListener(this);
  208. mOpenCvCameraView1.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
  209. }
  210.  
  211. try {
  212. vista.initialize(this);
  213. } catch (Exception e) {
  214. e.printStackTrace();
  215. }
  216. }
  217.  
  218. @Override
  219. public void onPause() {
  220. super.onPause();
  221. if (mOpenCvCameraView0 != null)
  222. mOpenCvCameraView0.disableView();
  223. if (iNumberOfCameras > 1)
  224. if (mOpenCvCameraView1 != null)
  225. mOpenCvCameraView1.disableView();
  226. }
  227.  
  228.  
  229. public void onResume() {
  230. super.onResume();
  231. viewMode = VIEW_MODE_RGBA;
  232.  
  233. OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_4, this, mLoaderCallback);
  234. }
  235.  
  236. @Override
  237. public void onStart() {
  238. super.onStart();
  239. vista.start();
  240. }
  241.  
  242. @Override
  243. public void onStop() {
  244. super.onStop();
  245. vista.stop();
  246. }
  247.  
  248. public void onDestroy() {
  249. super.onDestroy();
  250. if (mOpenCvCameraView0 != null)
  251. mOpenCvCameraView0.disableView();
  252. if (iNumberOfCameras > 1)
  253. if (mOpenCvCameraView1 != null)
  254. mOpenCvCameraView1.disableView();
  255. }
  256.  
  257. @Override
  258. public boolean onCreateOptionsMenu(Menu menu) {
  259. getMenuInflater().inflate(R.menu.menu_main, menu);
  260. return true;
  261. }
  262.  
  263. @Override
  264. public boolean onOptionsItemSelected(MenuItem item) {
  265. if (item.getItemId() == R.id.action_info) {
  266. Intent myIntent1 = new Intent(Intent.ACTION_VIEW, Uri.parse("http://www.barrythomas.co.uk/machinevision.html"));
  267. startActivity(myIntent1);
  268. } else if (item.getItemId() == R.id.action_rgbpreview) {
  269. viewMode = VIEW_MODE_RGBA;
  270. lFrameCount = 0;
  271. lMilliStart = 0;
  272. } else if (item.getItemId() == R.id.action_cannyedges) {
  273. viewMode = VIEW_MODE_CANNY;
  274. lFrameCount = 0;
  275. lMilliStart = 0;
  276. } else if (item.getItemId() == R.id.action_houghcircles) {
  277. viewMode = VIEW_MODE_HOUGHCIRCLES;
  278. lFrameCount = 0;
  279. lMilliStart = 0;
  280. } else if (item.getItemId() == R.id.action_houghlines) {
  281. viewMode = VIEW_MODE_HOUGHLINES;
  282. lFrameCount = 0;
  283. lMilliStart = 0;
  284. } else if (item.getItemId() == R.id.action_colourcontour) {
  285. viewMode = VIEW_MODE_COLCONTOUR;
  286. lFrameCount = 0;
  287. lMilliStart = 0;
  288. } else if (item.getItemId() == R.id.action_facedetect) {
  289. viewMode = VIEW_MODE_FACEDETECT;
  290. lFrameCount = 0;
  291. lMilliStart = 0;
  292. bFirstFaceSaved = false;
  293. } else if (item.getItemId() == R.id.action_colourquad) {
  294. viewMode = VIEW_MODE_YELLOW_QUAD_DETECT;
  295. lFrameCount = 0;
  296. lMilliStart = 0;
  297. } else if (item.getItemId() == R.id.action_gftt) {
  298. viewMode = VIEW_MODE_GFTT;
  299. lFrameCount = 0;
  300. lMilliStart = 0;
  301. } else if (item.getItemId() == R.id.action_opflow) {
  302. viewMode = VIEW_MODE_OPFLOW;
  303. lFrameCount = 0;
  304. lMilliStart = 0;
  305. } else if (item.getItemId() == R.id.action_toggletitles) {
  306. if (bDisplayTitle == true)
  307. bDisplayTitle = false;
  308. else
  309. bDisplayTitle = true;
  310. } else if (item.getItemId() == R.id.action_swapcamera) {
  311. if (iNumberOfCameras > 1) {
  312. if (iCamera == 0) {
  313. mOpenCvCameraView0.setVisibility(SurfaceView.GONE);
  314. mOpenCvCameraView1 = (JavaCameraView) findViewById(R.id.java_surface_view1);
  315. mOpenCvCameraView1.setCvCameraViewListener(this);
  316. mOpenCvCameraView1.setVisibility(SurfaceView.VISIBLE);
  317.  
  318. iCamera = 1;
  319. } else {
  320. mOpenCvCameraView1.setVisibility(SurfaceView.GONE);
  321. mOpenCvCameraView0 = (JavaCameraView) findViewById(R.id.java_surface_view0);
  322. mOpenCvCameraView0.setCvCameraViewListener(this);
  323. mOpenCvCameraView0.setVisibility(SurfaceView.VISIBLE);
  324.  
  325. iCamera = 0;
  326. }
  327. } else
  328. Toast.makeText(getApplicationContext(), "Sadly, your device does not have a second camera",
  329. Toast.LENGTH_LONG).show();
  330. }
  331.  
  332. return true;
  333. }
  334.  
  335.  
  336. public void onCameraViewStarted(int width, int height) {
  337. // TODO Auto-generated method stub
  338. byteColourTrackCentreHue = new byte[3];
  339. // green = 60 // mid yellow 27
  340. byteColourTrackCentreHue[0] = 27;
  341. byteColourTrackCentreHue[1] = 100;
  342. byteColourTrackCentreHue[2] = (byte) 255;
  343. byteStatus = new ArrayList<Byte>();
  344.  
  345. channels = new ArrayList<Integer>();
  346. channels.add(0);
  347. colorRed = new Scalar(255, 0, 0, 255);
  348. colorGreen = new Scalar(0, 255, 0, 255);
  349. contours = new ArrayList<MatOfPoint>();
  350. corners = new ArrayList<Point>();
  351. cornersThis = new ArrayList<Point>();
  352. cornersPrev = new ArrayList<Point>();
  353.  
  354. faces = new MatOfRect();
  355.  
  356. histSize = new MatOfInt(25);
  357.  
  358. iHueMap = new ArrayList<Integer>();
  359. iHueMap.add(0);
  360. iHueMap.add(0);
  361. lines = new Mat();
  362.  
  363. mApproxContour = new MatOfPoint2f();
  364. mContours = new Mat();
  365. mHist = new Mat();
  366. mGray = new Mat();
  367. mHSVMat = new Mat();
  368. mIntermediateMat = new Mat();
  369. mMatRed = new Mat();
  370. mMatGreen = new Mat();
  371. mMatBlue = new Mat();
  372. mMatRedInv = new Mat();
  373. mMatGreenInv = new Mat();
  374. mMatBlueInv = new Mat();
  375. MOIone = new MatOfInt(0);
  376.  
  377. MOFrange = new MatOfFloat(0f, 256f);
  378. mMOP2f1 = new MatOfPoint2f();
  379. mMOP2f2 = new MatOfPoint2f();
  380. mMOP2fptsPrev = new MatOfPoint2f();
  381. mMOP2fptsThis = new MatOfPoint2f();
  382. mMOP2fptsSafe = new MatOfPoint2f();
  383. mMOFerr = new MatOfFloat();
  384. mMOBStatus = new MatOfByte();
  385. MOPcorners = new MatOfPoint();
  386. mRgba = new Mat();
  387. mROIMat = new Mat();
  388. mFaceDest = new Mat();
  389. mFaceResized = new Mat();
  390. matFaceHistogramPrevious = new Mat();
  391. matFaceHistogramThis = new Mat();
  392. matOpFlowThis = new Mat();
  393. matOpFlowPrev = new Mat();
  394.  
  395. pt = new Point(0, 0);
  396. pt1 = new Point(0, 0);
  397. pt2 = new Point(0, 0);
  398.  
  399. pts = new ArrayList<Point>();
  400.  
  401. ranges = new ArrayList<Float>();
  402. ranges.add(50.0f);
  403. ranges.add(256.0f);
  404. rect = new Rect();
  405. rDest = new Rect();
  406.  
  407. sMatSize = new Size();
  408. sSize = new Size();
  409. sSize3 = new Size(3, 3);
  410. sSize5 = new Size(5, 5);
  411.  
  412. string = "";
  413.  
  414. DisplayMetrics dm = this.getResources().getDisplayMetrics();
  415. int densityDpi = dm.densityDpi;
  416. dTextScaleFactor = ((double) densityDpi / 240.0) * 0.9;
  417.  
  418. mRgba = new Mat(height, width, CvType.CV_8UC4);
  419. mIntermediateMat = new Mat(height, width, CvType.CV_8UC4);
  420. }
  421.  
  422. public void onCameraViewStopped() {
  423. // releaseMats();
  424. }
  425.  
  426. public void releaseMats() {
  427. mRgba.release();
  428. mIntermediateMat.release();
  429. mGray.release();
  430. mMatRed.release();
  431. mMatGreen.release();
  432. mMatBlue.release();
  433. mROIMat.release();
  434. mMatRedInv.release();
  435. mMatGreenInv.release();
  436. mMatBlueInv.release();
  437. mHSVMat.release();
  438. mErodeKernel.release();
  439. mContours.release();
  440. lines.release();
  441. faces.release();
  442. MOPcorners.release();
  443. mMOP2f1.release();
  444. mMOP2f2.release();
  445. mApproxContour.release();
  446.  
  447. }
  448.  
  449. @Override
  450. public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
  451. iMinRadius = 20;
  452. iMaxRadius = 400;
  453. iCannyLowerThreshold = 50;
  454. iCannyUpperThreshold = 180;
  455. iAccumulator = 300;
  456. mErodeKernel = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, sSize3);
  457.  
  458. // start the timing counter to put the framerate on screen
  459. // and make sure the start time is up to date, do
  460. // a reset every 10 seconds
  461. if (lMilliStart == 0)
  462. lMilliStart = System.currentTimeMillis();
  463.  
  464. if ((lMilliNow - lMilliStart) > 10000) {
  465. lMilliStart = System.currentTimeMillis();
  466. lFrameCount = 0;
  467. }
  468.  
  469. mRgba = inputFrame.rgba();
  470.  
  471. sMatSize.width = mRgba.width();
  472. sMatSize.height = mRgba.height();
  473.  
  474. switch (viewMode) {
  475.  
  476. case VIEW_MODE_RGBA:
  477.  
  478. if (bDisplayTitle)
  479. ShowTitle("BGR Preview", 1, colorGreen);
  480.  
  481. break;
  482.  
  483.  
  484. case VIEW_MODE_CANNY:
  485.  
  486. Imgproc.cvtColor(mRgba, mGray, Imgproc.COLOR_RGBA2GRAY);
  487.  
  488. // doing a gaussian blur prevents getting a lot of false hits
  489. Imgproc.GaussianBlur(mGray, mGray, sSize5, 2, 2);
  490.  
  491. iCannyLowerThreshold = 35;
  492. iCannyUpperThreshold = 75;
  493.  
  494. Imgproc.Canny(mGray, mIntermediateMat, iCannyLowerThreshold, iCannyUpperThreshold);
  495.  
  496. Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
  497.  
  498. if (bDisplayTitle)
  499. ShowTitle("Canny Edges", 1, colorGreen);
  500.  
  501. break;
  502.  
  503. case VIEW_MODE_HOUGHCIRCLES:
  504.  
  505. Imgproc.cvtColor(mRgba, mGray, Imgproc.COLOR_RGBA2GRAY);
  506.  
  507. // doing a gaussian blur prevents getting a lot of false hits
  508. Imgproc.GaussianBlur(mGray, mGray, sSize5, 2, 2);
  509.  
  510. // the lower this figure the more spurious circles you get
  511. // 50 looks good in CANNY, but 100 is better when converting that into Hough circles
  512. iCannyUpperThreshold = 100;
  513.  
  514. Imgproc.HoughCircles(mGray, mIntermediateMat, Imgproc.CV_HOUGH_GRADIENT, 2.0, mGray.rows() / 8,
  515. iCannyUpperThreshold, iAccumulator, iMinRadius, iMaxRadius);
  516.  
  517. if (mIntermediateMat.cols() > 0)
  518. for (int x = 0; x < Math.min(mIntermediateMat.cols(), 10); x++) {
  519. double vCircle[] = mIntermediateMat.get(0, x);
  520.  
  521. if (vCircle == null)
  522. break;
  523.  
  524. pt.x = Math.round(vCircle[0]);
  525. pt.y = Math.round(vCircle[1]);
  526. radius = (int) Math.round(vCircle[2]);
  527. // draw the found circle
  528. Imgproc.circle(mRgba, pt, radius, colorRed, iLineThickness);
  529.  
  530. // draw a cross on the centre of the circle
  531. DrawCross(mRgba, colorRed, pt);
  532. }
  533.  
  534. if (bDisplayTitle)
  535. ShowTitle("Hough Circles", 1, colorGreen);
  536.  
  537. break;
  538.  
  539. case VIEW_MODE_HOUGHLINES:
  540.  
  541. Imgproc.cvtColor(mRgba, mGray, Imgproc.COLOR_RGBA2GRAY);
  542.  
  543. // doing a gaussian blur prevents getting a lot of false hits
  544. Imgproc.GaussianBlur(mGray, mGray, sSize5, 2, 2);
  545.  
  546.  
  547. // the lower this figure the more spurious circles you get
  548. // 50 upper looks good in CANNY, but 75 is better when converting that into Hough circles
  549. iCannyLowerThreshold = 45;
  550. iCannyUpperThreshold = 75;
  551.  
  552. Imgproc.Canny(mGray, mGray, iCannyLowerThreshold, iCannyUpperThreshold);
  553.  
  554. Imgproc.HoughLinesP(mGray, lines, 1, Math.PI / 180, iHoughLinesThreshold, iHoughLinesMinLineSize, iHoughLinesGap);
  555.  
  556. for (int x = 0; x < Math.min(lines.cols(), 40); x++) {
  557. vecHoughLines = lines.get(0, x);
  558.  
  559. if (vecHoughLines.length == 0)
  560. break;
  561.  
  562. x1 = vecHoughLines[0];
  563. y1 = vecHoughLines[1];
  564. x2 = vecHoughLines[2];
  565. y2 = vecHoughLines[3];
  566.  
  567. pt1.x = x1;
  568. pt1.y = y1;
  569. pt2.x = x2;
  570. pt2.y = y2;
  571.  
  572. Imgproc.line(mRgba, pt1, pt2, colorRed, 3);
  573. }
  574.  
  575. if (bDisplayTitle)
  576. ShowTitle("Hough Lines", 1, colorGreen);
  577.  
  578. break;
  579.  
  580. case VIEW_MODE_COLCONTOUR:
  581.  
  582. // Convert the image into an HSV image
  583.  
  584. Imgproc.cvtColor(mRgba, mHSVMat, Imgproc.COLOR_RGB2HSV, 3);
  585.  
  586. Core.inRange(mHSVMat, new Scalar(byteColourTrackCentreHue[0] - 10, 100, 100),
  587. new Scalar(byteColourTrackCentreHue[0] + 10, 255, 255), mHSVMat);
  588.  
  589. // Here i'm only using the external contours and by
  590. // eroding we make the draw a teeny bit faster and the result a lot smoother
  591. // on the rough edges where the colour fades out of range by losing a lot
  592. // of the little spiky corners.
  593.  
  594. Imgproc.erode(mHSVMat, mHSVMat, mErodeKernel);
  595. contours.clear();
  596.  
  597. Imgproc.findContours(mHSVMat, contours, mContours, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
  598.  
  599. for (x = 0; x < contours.size(); x++) {
  600. d = Imgproc.contourArea(contours.get(x));
  601.  
  602. // get an approximation of the contour (last but one param is the min required
  603. // distance between the real points and the new approximation (in pixels)
  604.  
  605. // contours is a List<MatOfPoint>
  606. // so contours.get(x) is a single MatOfPoint
  607. // but to use approxPolyDP we need to pass a MatOfPoint2f
  608. // so we need to do a conversion
  609.  
  610. contours.get(x).convertTo(mMOP2f1, CvType.CV_32FC2);
  611.  
  612. if (d > iContourAreaMin) {
  613.  
  614. Imgproc.approxPolyDP(mMOP2f1, mMOP2f2, 2, true);
  615.  
  616. // convert back to MatOfPoint and put it back in the list
  617. mMOP2f2.convertTo(contours.get(x), CvType.CV_32S);
  618.  
  619. // draw the contour itself
  620. Imgproc.drawContours(mRgba, contours, x, colorRed, iLineThickness);
  621.  
  622. }
  623. }
  624.  
  625. if (bDisplayTitle)
  626. ShowTitle("Colour Contours", 1, colorGreen);
  627.  
  628. break;
  629.  
  630.  
  631. case VIEW_MODE_YELLOW_QUAD_DETECT:
  632.  
  633. // Convert the image into an HSV image
  634.  
  635. Imgproc.cvtColor(mRgba, mHSVMat, Imgproc.COLOR_RGB2HSV, 3);
  636.  
  637. Core.inRange(mHSVMat, new Scalar(byteColourTrackCentreHue[0] - 12, 100, 100),
  638. new Scalar(byteColourTrackCentreHue[0] + 12, 255, 255), mHSVMat);
  639.  
  640. contours.clear();
  641.  
  642. Imgproc.findContours(mHSVMat, contours, mContours, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
  643.  
  644. for (x = 0; x < contours.size(); x++) {
  645. d = Imgproc.contourArea(contours.get(x));
  646.  
  647. if (d > iContourAreaMin) {
  648. // get an approximation of the contour (last but one param is the min required
  649. // distance between the real points and the new approximation (in pixels)
  650.  
  651. contours.get(x).convertTo(mMOP2f1, CvType.CV_32FC2);
  652.  
  653. Imgproc.approxPolyDP(mMOP2f1, mMOP2f2, 15, true);
  654.  
  655. // convert back to MatOfPoint and put it back in the list
  656. mMOP2f2.convertTo(contours.get(x), CvType.CV_32S);
  657.  
  658. if (contours.get(x).rows() == 4) {
  659.  
  660. Converters.Mat_to_vector_Point2f(contours.get(x), pts);
  661.  
  662. Imgproc.drawContours(mRgba, contours, x, colorRed, iLineThickness);
  663.  
  664. Imgproc.line(mRgba, pts.get(0), pts.get(2), colorRed, iLineThickness);
  665. Imgproc.line(mRgba, pts.get(1), pts.get(3), colorRed, iLineThickness);
  666. }
  667. }
  668. }
  669.  
  670. if (bDisplayTitle)
  671. ShowTitle("Colour quadrilateral", 1, colorGreen);
  672.  
  673. break;
  674.  
  675.  
  676. case VIEW_MODE_FACEDETECT:
  677.  
  678. // Convert the image into a gray image
  679.  
  680. Imgproc.cvtColor(mRgba, mGray, Imgproc.COLOR_RGBA2GRAY);
  681.  
  682. rDest.x = 5;
  683. rDest.y = 5;
  684. rDest.width = 100;
  685. rDest.height = 100;
  686.  
  687. mFaceDest = mRgba.submat(rDest);
  688. iMaxFaceHeight = 0;
  689. iMaxFaceHeightIndex = -1;
  690.  
  691. if (mCascade != null) {
  692. int height = mGray.rows();
  693. double faceSize = (double) height * 0.25;
  694.  
  695. sSize.width = faceSize;
  696. sSize.height = faceSize;
  697.  
  698. mCascade.detectMultiScale(mGray, faces, 1.1, 2, 2, sSize, new Size());
  699.  
  700. Rect[] facesArray = faces.toArray();
  701.  
  702. for (int i = 0; i < facesArray.length; i++) {
  703.  
  704. // draw the rectangle itself
  705. Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), colorRed, 3);
  706. if (iMaxFaceHeight < facesArray[i].height) {
  707. iMaxFaceHeight = facesArray[i].height;
  708. iMaxFaceHeightIndex = i;
  709. }
  710. }
  711.  
  712. // now save the biggest face to a file
  713. if (iMaxFaceHeight > 0) {
  714. // we have at least one face
  715. rect = facesArray[iMaxFaceHeightIndex];
  716.  
  717. // get the submat of the rect containing the face
  718. mROIMat = mRgba.submat(rect);
  719.  
  720. if (bFirstFaceSaved == false) {
  721. SaveImage(mROIMat);
  722. bFirstFaceSaved = true;
  723. }
  724.  
  725. // resize it to the dest rect size (100x100)
  726. sSize.width = 100;
  727. sSize.height = 100;
  728. Imgproc.resize(mROIMat, mFaceResized, sSize);
  729. // copy it to dest rect in main image
  730.  
  731. mFaceResized.copyTo(mFaceDest);
  732.  
  733. // compare the histogram of this face with the histogram
  734. // of the previous face. If they are a good match (ie
  735. // return val of comparison is > 0.9) then they are probably
  736. // the same face. We already saved this face before, so
  737. // don't save it again. If the previous version was blurry
  738. // and this one is sharp, the coefficient of similarness
  739. // is around 0.7 to 0.85 so the the most recent face will
  740. // be saved this means if some pics of this face are blurry
  741. // and some are sharp, we save both, so one result should be
  742. // sharp.
  743.  
  744. matFaceHistogramThis.copyTo(matFaceHistogramPrevious);
  745.  
  746. matFaceHistogramThis = getHistogram(mFaceDest);
  747.  
  748. // this test makes sure we don't do a compare on the first face found
  749. // because the width of the previous face will be zero
  750. if (matFaceHistogramThis.width() == matFaceHistogramPrevious.width()) {
  751.  
  752. d = Imgproc.compareHist(matFaceHistogramThis, matFaceHistogramPrevious, Imgproc.CV_COMP_CORREL);
  753. //Log.d("Baz", "compareHist d = "+d);
  754. if (d < 0.95) {
  755. SaveImage(mROIMat);
  756. //Log.d("Baz", "New face: "+d);
  757. }
  758. }
  759. }
  760. }
  761.  
  762. if (bDisplayTitle)
  763. ShowTitle("Face Detection", 1, colorGreen);
  764.  
  765. break;
  766.  
  767. case VIEW_MODE_GFTT:
  768.  
  769.  
  770.  
  771. Imgproc.cvtColor(mRgba, mGray, Imgproc.COLOR_RGBA2GRAY);
  772.  
  773. // DON'T do a gaussian blur here, it makes the results poorer and
  774. // takes 0.5 off the fps rate
  775.  
  776. Imgproc.goodFeaturesToTrack(mGray, MOPcorners, iGFFTMax, 0.01, 20);
  777.  
  778. y = MOPcorners.rows();
  779.  
  780. corners = MOPcorners.toList();
  781.  
  782. for (int x = 0; x < y; x++)
  783. Imgproc.circle(mRgba, corners.get(x), 6, colorRed, iLineThickness - 1);
  784. //DrawCross (mRgba, colorRed, corners.get(x));
  785.  
  786.  
  787. break;
  788.  
  789.  
  790. case VIEW_MODE_OPFLOW:
  791.  
  792. getSlipImage(mRgba, new Size(100,100), 3);
  793.  
  794. Imgproc.cvtColor(mRgba, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);
  795. matOpFlowThis.copyTo(matOpFlowPrev);
  796. Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.5, 20);
  797. mMOP2fptsPrev.fromArray(MOPcorners.toArray());
  798. mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
  799.  
  800. vista.cameraFrame(mRgba, 90f, 10f, true);
  801. /*
  802. Parameters:
  803. prevImg first 8-bit input image
  804. nextImg second input image
  805. prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
  806. nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
  807. status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
  808. err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
  809. */
  810. Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);
  811.  
  812. cornersPrev = mMOP2fptsPrev.toList();
  813. cornersThis = mMOP2fptsThis.toList();
  814. byteStatus = mMOBStatus.toList();
  815.  
  816. x = byteStatus.size() - 1;
  817.  
  818. if (Math.abs(pt.y - lastValue) < 3)
  819. for (y = 0; y < 1; y++) {
  820. if (byteStatus.get(y) == 1) {
  821.  
  822. pt = cornersThis.get(y);
  823. pt2 = cornersPrev.get(y);
  824.  
  825. pt.x = 0.0;
  826. pt2.x = 15000;
  827.  
  828. //Imgproc.circle(mRgba, pt, 5, colorRed, iLineThickness - 1);
  829.  
  830. lastValue = (int) pt.y;
  831.  
  832. // Imgproc.line(mRgba, pt, pt2, colorRed, iLineThickness);
  833. }
  834. }
  835.  
  836. break;
  837. }
  838.  
  839. // get the time now in every frame
  840. lMilliNow = System.currentTimeMillis();
  841.  
  842. // update the frame counter
  843. lFrameCount++;
  844.  
  845. if (bDisplayTitle) {
  846. string = String.format("FPS: %2.1f", (float) (lFrameCount * 1000) / (float) (lMilliNow - lMilliStart));
  847.  
  848. ShowTitle(string, 2, colorGreen);
  849. }
  850.  
  851. if (bShootNow) {
  852. // get the time of the attempt to save a screenshot
  853. lMilliShotTime = System.currentTimeMillis();
  854. bShootNow = false;
  855.  
  856. // try it, and set the screen text accordingly.
  857. // this text is shown at the end of each frame until
  858. // 1.5 seconds has elapsed
  859. if (SaveImage(mRgba)) {
  860. sShotText = "SCREENSHOT SAVED";
  861. } else {
  862. sShotText = "SCREENSHOT FAILED";
  863. }
  864. }
  865.  
  866. return mRgba;
  867. }
  868.  
  869. public boolean onTouchEvent(final MotionEvent event) {
  870.  
  871. bShootNow = true;
  872. return false; // don't need more than one touch event
  873.  
  874. }
  875.  
  876. public void DrawCross(Mat mat, Scalar color, Point pt) {
  877. int iCentreCrossWidth = 24;
  878.  
  879. pt1.x = pt.x - (iCentreCrossWidth >> 1);
  880. pt1.y = pt.y;
  881. pt2.x = pt.x + (iCentreCrossWidth >> 1);
  882. pt2.y = pt.y;
  883.  
  884. Imgproc.line(mat, pt1, pt2, color, iLineThickness - 1);
  885.  
  886.  
  887. pt1.x = pt.x;
  888. pt1.y = pt.y + (iCentreCrossWidth >> 1);
  889. pt2.x = pt.x;
  890. pt2.y = pt.y - (iCentreCrossWidth >> 1);
  891.  
  892. Imgproc.line(mat, pt1, pt2, color, iLineThickness - 1);
  893.  
  894. }
  895.  
  896. public Mat getHistogram(Mat mat) {
  897. Imgproc.calcHist(Arrays.asList(mat), MOIone, new Mat(), mHist, histSize, MOFrange);
  898.  
  899. Core.normalize(mHist, mHist);
  900.  
  901. return mHist;
  902. }
  903.  
  904. @SuppressLint("SimpleDateFormat")
  905. public boolean SaveImage(Mat mat) {
  906.  
  907. Imgproc.cvtColor(mat, mIntermediateMat, Imgproc.COLOR_RGBA2BGR, 3);
  908.  
  909. File path = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
  910.  
  911. String filename = "OpenCV_";
  912. SimpleDateFormat fmt = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
  913. Date date = new Date(System.currentTimeMillis());
  914. String dateString = fmt.format(date);
  915. filename += dateString + "-" + iFileOrdinal;
  916. filename += ".png";
  917.  
  918. File file = new File(path, filename);
  919.  
  920. Boolean bool = null;
  921. filename = file.toString();
  922. bool = Imgcodecs.imwrite(filename, mIntermediateMat);
  923.  
  924. return bool;
  925.  
  926. }
  927.  
  928. private void ShowTitle(String s, int iLineNum, Scalar color) {
  929. Imgproc.putText(mRgba, s, new Point(10, (int) (dTextScaleFactor * 60 * iLineNum)),
  930. Core.FONT_HERSHEY_SIMPLEX, dTextScaleFactor, color, 2);
  931. }
  932.  
  933. @Override
  934. public void onSensorChanged(SensorEvent sensorEvent) {
  935. vista.sensorChanged(sensorEvent);
  936. }
  937.  
  938. @Override
  939. public void onAccuracyChanged(Sensor sensor, int i) {
  940. }
  941.  
  942. public List<Mat> getSlipImage(Mat mat, Size size, int k) {
  943. // Resize
  944. Mat matResize = new Mat();
  945. Imgproc.resize(mat, matResize, size);
  946.  
  947. // Kmeans
  948. // Traitement
  949. Imgproc.cvtColor(matResize, matResize, Imgproc.COLOR_BGR2RGB, 3);
  950. Imgproc.cvtColor(matResize, matResize, Imgproc.COLOR_RGB2HSV, 3);
  951. Imgproc.dilate(matResize, matResize, new Mat());
  952.  
  953. // Calcul du kmeans
  954. Mat matResize32f = new Mat();
  955. Mat matResize32fTmp = matResize.reshape(1, matResize.rows() * matResize.cols());
  956. Mat labels = new Mat();
  957. Mat centers = new Mat();
  958.  
  959. matResize32fTmp.convertTo(matResize32f, CvType.CV_32F, 1.0 / 255.0);
  960. Core.kmeans(matResize32f, k, labels, new TermCriteria(TermCriteria.COUNT, 100, 1), 1, Core.KMEANS_PP_CENTERS, centers);
  961. centers.convertTo(centers, CvType.CV_8UC1, 255.0);
  962. centers.reshape(3);
  963.  
  964. List<Mat> clusters = new ArrayList<>();
  965. for (int i = 0; i < centers.rows(); i++) {
  966. clusters.add(Mat.zeros(matResize.size(), matResize.type()));
  967. }
  968.  
  969. Map<Integer, Integer> counts = new HashMap<>();
  970. for (int i = 0; i < centers.rows(); i++)
  971. counts.put(i, 0);
  972.  
  973. int rows = 0;
  974. for (int y = 0; y < matResize.rows(); y++) {
  975. for (int x = 0; x < matResize.cols(); x++) {
  976. int label = (int) labels.get(rows++, 0)[0];
  977. counts.put(label, counts.get(label + 1));
  978. clusters.get(label).put(y, x, (int) centers.get(label, 0)[0], (int) centers.get(label, 1)[0], (int) centers.get(label, 2)[0]);
  979. }
  980. }
  981.  
  982. return clusters;
  983. }
  984. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement