Advertisement
Guest User

baaaanananana

a guest
Jun 6th, 2019
102
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. //
  2. //  AppViewController.m
  3. //  TATATA
  4. //
  5. //  Created by Mikolajczak Piotr on 19.10.2017.
  6. //  Copyright © 2017 Mikolajczak Piotr. All rights reserved.
  7. //
  8. #import <opencv2/opencv.hpp>
  9. #import "AppViewController.h"
  10. #import <opencv2/imgcodecs/ios.h>
  11. #include <iostream>
  12. #include "opencv2/opencv.hpp"
  13. #include <opencv2/imgproc/imgproc.hpp>
  14. #include <opencv2/highgui/highgui.hpp>
  15. #include <opencv2/objdetect/objdetect.hpp>
  16. #import <TesseractOCR/TesseractOCR.h>
  17. #include <sstream>
  18.  
  19. // Define globals
  20. int Sens = 35;
  21. int Count = 0;
  22. int VW = 0;
  23. NSString *SPEAKbuiltUpArea = @"BANANAS";
  24. NSString *SPEAKCity = @"Wjechałeś do miasta.";
  25. AVSpeechSynthesizer *synthesizer = [[AVSpeechSynthesizer alloc]init];
  26. AVSpeechSynthesizer *synthesizer2 = [[AVSpeechSynthesizer alloc]init];
  27. AVSpeechUtterance *speech = [AVSpeechUtterance speechUtteranceWithString:SPEAKbuiltUpArea];
  28. AVSpeechUtterance *speech2 = [AVSpeechUtterance speechUtteranceWithString:SPEAKCity];
  29.  
  30.  
  31. @interface AppViewController ()
  32.  
  33. @end
  34.  
  35. @implementation AppViewController
  36. @synthesize videoCamera, promp;
  37.  
  38. - (void)viewDidLoad {
  39.     [super viewDidLoad];
  40.  
  41.    
  42.     //Prepare the camera.
  43.     videoCamera = [[CvVideoCamera alloc] initWithParentView: imageView];
  44.     videoCamera.defaultAVCaptureDevicePosition = AVCaptureDevicePositionBack;
  45.     videoCamera.defaultAVCaptureSessionPreset = AVCaptureSessionPreset640x480;
  46.     videoCamera.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationPortrait;
  47.     videoCamera.defaultFPS = 30;
  48.     videoCamera.grayscaleMode = NO;
  49.     videoCamera.delegate = self;
  50.    
  51.    
  52.     //Load the CASCADE file
  53.    
  54.     NSString *faceCascadePath = [[NSBundle mainBundle] pathForResource:@"haarcascade_frontalface_alt" ofType:@"xml"];
  55.     const CFIndex CASCADE_NAME_LEN = 2048;
  56.     char *CASCADE_NAME = (char*) malloc(CASCADE_NAME_LEN);
  57.     CFStringGetFileSystemRepresentation((CFStringRef)faceCascadePath, CASCADE_NAME, CASCADE_NAME_LEN);
  58.     if(!face_cascade.load(CASCADE_NAME)) {
  59.         cout << " Unable to load face detector" << endl;
  60.         exit(-1);
  61.     };
  62.    
  63.     NSString *eyeCascadePath = [[NSBundle mainBundle] pathForResource:@"haarcascade_eye_tree_eyeglasses" ofType:@"xml"];
  64.     CFStringGetFileSystemRepresentation((CFStringRef)eyeCascadePath, CASCADE_NAME, CASCADE_NAME_LEN);
  65.     if(!eye_cascade.load(CASCADE_NAME)) {
  66.         cout << " Unable to load the eye detector" << endl;
  67.         exit(-1);
  68.     };
  69.    
  70.     NSString *bananaCascadePath = [[NSBundle mainBundle] pathForResource:@"banana_classifier" ofType:@"xml"];
  71.     CFStringGetFileSystemRepresentation((CFStringRef)bananaCascadePath, CASCADE_NAME, CASCADE_NAME_LEN);
  72.     if(!banana_cascade.load(CASCADE_NAME)) {
  73.         cout << " Unable to load the eye detector" << endl;
  74.         exit(-1);
  75.     };
  76.    
  77.    
  78.    
  79.     // Define the text2Speech
  80.    
  81.     [speech setRate:0.5f];
  82.     speech.voice = [AVSpeechSynthesisVoice voiceWithLanguage:@"pl-PL"];
  83.     speech.volume= 0.5;
  84.     [synthesizer speakUtterance:speech];
  85.     [synthesizer stopSpeakingAtBoundary:AVSpeechBoundaryImmediate];
  86.     [speech2 setRate:0.5f];
  87.     speech2.voice = [AVSpeechSynthesisVoice voiceWithLanguage:@"pl-PL"];
  88.     speech2.volume= 0.5;
  89.     [synthesizer2 speakUtterance:speech];
  90.     [synthesizer2 stopSpeakingAtBoundary:AVSpeechBoundaryImmediate];
  91.    
  92.    
  93.     // Define VW SOUND
  94.    
  95.     NSString *path = [NSString stringWithFormat:@"%@/anita.mp3", [[NSBundle mainBundle] resourcePath]];
  96.     NSURL *soundUrl = [NSURL fileURLWithPath:path];
  97.    
  98.    
  99.    
  100.     // Create audio player object and initialize with URL to sound
  101.     _audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:soundUrl error:nil];
  102.     [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
  103.     NSError *error;
  104.     [[AVAudioSession sharedInstance] overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker
  105.                                                        error:&error];
  106.    
  107. }
  108.  
  109.  
  110.  
  111. -(void)viewDidAppear:(BOOL)animated
  112. {
  113.     [super viewDidAppear: animated];
  114.    
  115.     [videoCamera start];
  116.     imageView.hidden = YES;
  117.    
  118. }
  119.  
  120.  
  121. - (IBAction)vwValue:(id)sender {
  122.     switch(self.vwSwitch.selectedSegmentIndex){
  123.         case 0 :
  124.             VW=1;
  125.             break;
  126.         case 1 :  
  127.             VW=0;
  128.             break;
  129.         default :
  130.             break;
  131.     }
  132. }
  133.  
  134. - (IBAction)SegmentValue:(id)sender {
  135.     switch(self.SegmentSwitch.selectedSegmentIndex){
  136.         case 0 :
  137.                     imageView.hidden = YES;
  138.                     promp.text=@"You turned the camera off.";
  139.             break;
  140.         case 1 :   imageView.hidden = NO;
  141.                     promp.text=@"You turned the camera on.";
  142.             break;
  143.         default :
  144.             break;
  145.     }
  146.    
  147. }
  148.  
  149.  
  150. - (IBAction)sliderChange:(id)sender {
  151.     UISlider *slider =(UISlider *) sender;
  152.     float newValue = [ slider value ];
  153.     Sens = (int)(newValue);
  154.    
  155. }
  156.  
  157.  
  158.  
  159. - (void)didReceiveMemoryWarning {
  160.     [super didReceiveMemoryWarning];
  161.    
  162. }
  163.  
  164. #pragma mark - Protocol CvVideoCameraDelegate
  165.  
  166. #ifdef __cplusplus
  167.  
  168.  
  169. double angle( cv::Point pt1, cv::Point pt2, cv::Point pt0 ) {
  170.     double dx1 = pt1.x - pt0.x;
  171.     double dy1 = pt1.y - pt0.y;
  172.     double dx2 = pt2.x - pt0.x;
  173.     double dy2 = pt2.y - pt0.y;
  174.     return (dx1*dx2 + dy1*dy2)/sqrt((dx1*dx1 + dy1*dy1)*(dx2*dx2 + dy2*dy2) + 1e-10);
  175. }
  176.  
  177. G8Tesseract *tesseract = [[G8Tesseract alloc] initWithLanguage:@"pol"];
  178.  
  179. - (void)processImage:(Mat&)image;
  180. {
  181.     Mat gray;
  182.     cvtColor(image, gray, CV_RGBA2GRAY);
  183.     face_cascade.detectMultiScale(gray, faces, 1.1, 2, 0 | CV_HAAR_SCALE_IMAGE, cv::Size(50, 50));
  184.    
  185.  
  186.    
  187.     if(bananas.size() > 0){
  188.         for(int i=0; i<bananas.size(); i++){
  189.             Mat checkBananas = gray(bananas[i]);
  190.             banana_cascade.detectMultiScale(checkBananas, bananas);
  191.             rectangle(image, bananas[i], BLUE);
  192.            
  193.            
  194.            
  195.             /*
  196.                     if(!synthesizer2.isSpeaking and !synthesizer.isSpeaking) {
  197.                         dispatch_async(dispatch_get_main_queue(), ^{
  198.                             //  [synthesizer speakUtterance:speech];
  199.                             promp.text=@"\banana detected!";
  200.                         });
  201.                        */
  202.                         if(!synthesizer.isSpeaking and !synthesizer2.isSpeaking and bananas.size() == 1){
  203.                             dispatch_async(dispatch_get_main_queue(), ^{
  204.                                 [_audioPlayer play];
  205.                             });
  206.                        
  207.                        
  208.                     }
  209.                    
  210.                 }
  211.                
  212.             }
  213.    
  214.        
  215.  
  216.            
  217.           /*  dispatch_async(dispatch_get_main_queue(), ^{
  218.                 promp.text=@"\nNo face!";
  219.             });
  220.            
  221.             if(eyes.size() > 0) {
  222.                 for(int i=0; i<eyes.size(); i++){
  223.                     cv::Point center(faces[i].x + eyes[i].x + eyes[i].width/2, faces[i].y + eyes[i].y + eyes[i].height/2);
  224.                     int radius = cvRound((eyes[i].width + eyes[i].height)/4);
  225.                     cv::circle(image, center, radius, GREEN);
  226.                     if(!synthesizer2.isSpeaking and !synthesizer.isSpeaking) {
  227.                     dispatch_async(dispatch_get_main_queue(), ^{
  228.                       //  [synthesizer speakUtterance:speech];
  229.                         promp.text=@"\banana detected!";
  230.                     });
  231.                     }
  232.                    
  233.                 }
  234.                
  235.             }
  236.         }
  237.    
  238.     }
  239.    
  240.            */
  241.    /*
  242.    
  243.     ////////////////////////////   VW CARS MODULE  \\\\\\\\\\\\\\\\\\\\\\\\\\\\
  244.    
  245.     if(VW)
  246.     {
  247.        
  248.     // Here we set and configure the processing area.
  249.    
  250.    
  251.     Mat ProcessImageVW;
  252.     image.copyTo(ProcessImageVW);
  253.     cvtColor(ProcessImageVW, ProcessImageVW, CV_BGR2GRAY);
  254.  
  255.     VW_cascade.detectMultiScale(ProcessImageVW, VWs, 2, 10);
  256.    
  257.     for(int i = 0; i <VWs.size(); i++) {
  258.        
  259.         cv::Point a (VWs[i].x, VWs[i].y );
  260.         cv::Point b(VWs[i].x + VWs[i].width, VWs[i].y + VWs[i].height);
  261.         rectangle(image, a,  b, LIMETE, 3.5);
  262.         putText(image, "VW CAR", cv::Point(a.x, a.y-5), FONT_HERSHEY_SIMPLEX, 0.65, LIMETE, 1, CV_AA);
  263.         if(!synthesizer.isSpeaking and !synthesizer2.isSpeaking and VWs.size() == 1 and Count == 0){
  264.             dispatch_async(dispatch_get_main_queue(), ^{
  265.                 [_audioPlayer play];
  266.             });
  267.         }
  268.        
  269.     }
  270.    
  271.     }
  272.  
  273.    
  274.     if (!VW)
  275.        
  276.     {
  277.     ////////////////////////////   BUILT-UP AREA SIGN MODULE  \\\\\\\\\\\\\\\\\\\\\\\\\\\\
  278.    
  279.    
  280.     // The resolution is 640x480, here we set and configure the processing area.
  281.     cv::Point x (240,100);
  282.     cv::Point y (480,540);
  283.     Mat ProcessImage;
  284.     image(cv::Rect(x,y)).copyTo(ProcessImage);
  285.     cvtColor(ProcessImage, ProcessImage, CV_BGR2GRAY);
  286.    
  287.  
  288.     sign_cascade.detectMultiScale(ProcessImage, signs, 2, 4);
  289.    
  290.    
  291.     for(int i = 0; i <signs.size(); i++) {
  292.        
  293.         cv::Point a (signs[i].x+240, signs[i].y+100 );
  294.         cv::Point b(signs[i].x+240 + signs[i].width, signs[i].y+100 + signs[i].height);
  295.         rectangle(image, a,  b, RED, 3);
  296.         putText(image, "BUILT-UP AREA", cv::Point(a.x, a.y-5), FONT_HERSHEY_SIMPLEX, 0.8, RED, 1, CV_AA);
  297.      
  298.         if(!synthesizer.isSpeaking and !synthesizer2.isSpeaking and signs.size() == 1 and Count == 0){
  299.         dispatch_async(dispatch_get_main_queue(), ^{
  300.             [synthesizer speakUtterance:speech];
  301.             promp.text=@"\nWjechałeś do terenu zabudowanego";
  302.         });
  303.         }
  304.        
  305.     }
  306.    
  307.    
  308.    
  309.    
  310.    
  311.    
  312.    
  313.    
  314.    
  315.     //////////////////////////// TOWN SIGN MODULE \\\\\\\\\\\\\\\\\\\\\\\\\\\\
  316.    
  317.    
  318.     Mat image2;
  319.     image(cv::Rect(x,y)).copyTo(image2);
  320.     Mat mHSV;
  321.     Mat mHSVThreshed;
  322.    
  323.     cvtColor(image2, mHSV, COLOR_BGR2HSV,3);
  324.     cv::inRange(mHSV, Scalar(43, 69, 31), Scalar(102, 255, 127), mHSVThreshed);
  325.    
  326.    
  327.     cvtColor(mHSVThreshed, image2, COLOR_GRAY2BGR, 0);
  328.    
  329.    
  330.     std::vector<std::vector<cv::Point> > squares;
  331.     cv::Mat pyr, timg, gray0(image2.size(), CV_8U), gray;
  332.     int thresh = 50, N = 11;
  333.     cv::pyrDown(image2, pyr, cv::Size(image2.cols/2, image2.rows/2));
  334.     cv::pyrUp(pyr, timg, image2.size());
  335.     std::vector<std::vector<cv::Point> > contours;
  336.     for( int c = 0; c < 3; c++ ) {
  337.         int ch[] = {c, 0};
  338.         mixChannels(&timg, 1, &gray0, 1, ch, 1);
  339.         for( int l = 0; l < N; l++ ) {
  340.             if( l == 0 ) {
  341.                 cv::Canny(gray0, gray, 0, thresh, 5);
  342.                 cv::dilate(gray, gray, cv::Mat(), cv::Point(-1,-1));
  343.             }
  344.             else {
  345.                 gray = gray0 >= (l+1)*255/N;
  346.             }
  347.             cv::findContours(gray, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE);
  348.             std::vector<cv::Point> approx;
  349.             for( size_t i = 0; i < contours.size(); i++ )
  350.             {
  351.                 cv::approxPolyDP(cv::Mat(contours[i]), approx, arcLength(cv::Mat(contours[i]), true)*0.02, true);
  352.                 if( approx.size() == 4 && fabs(contourArea(cv::Mat(approx))) > 1000 && cv::isContourConvex(cv::Mat(approx))) {
  353.                     double maxCosine = 0;
  354.                    
  355.                     for( int j = 2; j < 5; j++ )
  356.                     {
  357.                         double cosine = fabs(angle(approx[j%4], approx[j-2], approx[j-1]));
  358.                         maxCosine = MAX(maxCosine, cosine);
  359.                     }
  360.                    
  361.                     if( maxCosine < 0.3 ) {
  362.                        
  363.                         // Set the max size of Sign - Width max is 60 pix and max height is 230 pix.
  364.                         int LT = 3;
  365.                         int BR = 0;
  366.                         int tmpMin = approx[0].x + approx[0].y;
  367.                         int tmpMax = approx[0].x + approx[0].y;
  368.                         for (int i = 0 ; i<4 ; i++) {
  369.                             if (approx[i].x + approx[i].y >  approx[BR].x + approx[BR].y)
  370.                                 BR=i;
  371.                             if ( approx[i].x + approx[i].y <  approx[LT].x + approx[LT].y)
  372.                                 LT=i;
  373.                         }
  374.                         if( ( ( approx[BR].x - approx[LT].x ) < 240 )  and ( ( approx[BR].y - approx[LT].y ) < 60 )  )
  375.                         {
  376.                          
  377.                         approx[0].x = approx[0].x + 240; approx[0].y = approx[0].y + 100;
  378.                         approx[1].x = approx[1].x + 240; approx[1].y = approx[1].y + 100;
  379.                         approx[2].x = approx[2].x + 240; approx[2].y = approx[2].y + 100;
  380.                         approx[3].x = approx[3].x + 240; approx[3].y = approx[3].y + 100;
  381.                         squares.push_back(approx);
  382.                         }
  383.                     }
  384.                 }
  385.             }
  386.         }
  387.     }
  388.    
  389.    
  390.     if (squares.size() > 0 and Count == 0) {
  391.          Count=0;
  392.        
  393.        
  394.         // Calculating the point of region. \\
  395.        
  396.         int a = squares.size() - 1 ;
  397.         int min = 3;
  398.         int max = 0;
  399.         int tmpMin = squares[a][0].x + squares[a][0].y;
  400.         int tmpMax = squares[a][0].x + squares[a][0].y;
  401.         for (int i = 0 ; i<4 ; i++) {
  402.             if ( squares[a][i].x + squares[a][i].y >  squares[a][max].x + squares[a][max].y)
  403.                 max=i;
  404.             if ( squares[a][i].x + squares[a][i].y <  squares[a][min].x + squares[a][min].y)
  405.                 min=i;
  406.         }
  407.        
  408.         cv::Point LT = squares[a][min];
  409.         cv::Point RB = squares[a][max];
  410.         LT.x+=10;
  411.         LT.y+=10;
  412.         RB.x-=10;
  413.         RB.y-=10;
  414.      
  415.        
  416.         Mat RecoImage;
  417.         image(cv::Rect(LT,RB)).copyTo(RecoImage);
  418.        
  419.        
  420.         // Count the white pixels in the sign, to be sure, thats a sign.
  421.        
  422.         int count_white=0;
  423.        
  424.         Mat grayImage;
  425.         cvtColor(RecoImage, grayImage, CV_RGB2GRAY);
  426.        
  427.         // Then apply thresholding to make it binary.
  428.         Mat binaryImage(grayImage.size(), grayImage.type());
  429.         threshold(grayImage, grayImage, 128, 255, CV_THRESH_BINARY);
  430.        
  431.         for (int i = 0; i < binaryImage.rows; i++)
  432.         {
  433.             for (int j = 0; j < binaryImage.cols; j++)
  434.             {
  435.                 int pixel = grayImage.at<uchar>(i,j);
  436.                 if (pixel==255)
  437.                     count_white ++;
  438.             }
  439.         }
  440.        
  441.         //////////////////////////// TESSERACT MODULE \\\\\\\\\\\\\\\\\\\\\\\\\\\\
  442.        
  443.        
  444.         if( count_white >= 0  and count_white < 1200 ) {
  445.        
  446.         tesseract.charWhitelist = @"qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNMęóąśłżźćńĘÓĄŚŁŻŹĆŃ";
  447.         UIImage* templ= MatToUIImage(RecoImage);
  448.         tesseract.delegate = self;
  449.         tesseract.image = [templ g8_blackAndWhite];
  450.         tesseract.maximumRecognitionTime = 60.0;
  451.        
  452.         [tesseract recognize];
  453.        
  454.        
  455.         string city = string([tesseract.recognizedText UTF8String]);
  456.         std::cout << "City to :" << city;
  457.        
  458.         ///The interface for detection
  459.        
  460.         LT.x-=15;
  461.         LT.y-=15;
  462.         RB.x+=15;
  463.         RB.y+=15;
  464.        
  465.         rectangle(image, LT, RB, RED, 3);
  466.         putText(image, city, cv::Point(LT.x, LT.y-5), FONT_HERSHEY_SIMPLEX, 0.8, RED, 1, CV_AA);
  467.    
  468.        
  469.         dispatch_async(dispatch_get_main_queue(), ^{
  470.                 NSString *tmp = @"\nWjechałeś do miasta";
  471.                 promp.text=[NSString stringWithFormat:@"%@ %@", tmp, tesseract.recognizedText];
  472.             });
  473.        
  474.         if(!synthesizer2.isSpeaking and !synthesizer.isSpeaking){
  475.            
  476.             dispatch_async(dispatch_get_main_queue(), ^{
  477.                 [synthesizer2 speakUtterance:speech2];
  478.             });
  479.            
  480.           }
  481.         }
  482.        
  483.     }
  484.     if(Count>0)
  485.     Count--;
  486.    
  487.    
  488.     }
  489.     */
  490. }
  491. #endif
  492.  
  493.  
  494. #pragma mark - UI Actions
  495.  
  496.  
  497. @end
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement