Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- //
- // AppViewController.m
- // TATATA
- //
- // Created by Mikolajczak Piotr on 19.10.2017.
- // Copyright © 2017 Mikolajczak Piotr. All rights reserved.
- //
- #import <opencv2/opencv.hpp>
- #import "AppViewController.h"
- #import <opencv2/imgcodecs/ios.h>
- #include <iostream>
- #include "opencv2/opencv.hpp"
- #include <opencv2/imgproc/imgproc.hpp>
- #include <opencv2/highgui/highgui.hpp>
- #include <opencv2/objdetect/objdetect.hpp>
- #import <TesseractOCR/TesseractOCR.h>
- #include <sstream>
- // Define globals
- int Sens = 35;
- int Count = 0;
- int VW = 0;
- NSString *SPEAKbuiltUpArea = @"BANANAS";
- NSString *SPEAKCity = @"Wjechałeś do miasta.";
- AVSpeechSynthesizer *synthesizer = [[AVSpeechSynthesizer alloc]init];
- AVSpeechSynthesizer *synthesizer2 = [[AVSpeechSynthesizer alloc]init];
- AVSpeechUtterance *speech = [AVSpeechUtterance speechUtteranceWithString:SPEAKbuiltUpArea];
- AVSpeechUtterance *speech2 = [AVSpeechUtterance speechUtteranceWithString:SPEAKCity];
- @interface AppViewController ()
- @end
- @implementation AppViewController
- @synthesize videoCamera, promp;
- - (void)viewDidLoad {
- [super viewDidLoad];
- //Prepare the camera.
- videoCamera = [[CvVideoCamera alloc] initWithParentView: imageView];
- videoCamera.defaultAVCaptureDevicePosition = AVCaptureDevicePositionBack;
- videoCamera.defaultAVCaptureSessionPreset = AVCaptureSessionPreset640x480;
- videoCamera.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationPortrait;
- videoCamera.defaultFPS = 30;
- videoCamera.grayscaleMode = NO;
- videoCamera.delegate = self;
- //Load the CASCADE file
- NSString *faceCascadePath = [[NSBundle mainBundle] pathForResource:@"haarcascade_frontalface_alt" ofType:@"xml"];
- const CFIndex CASCADE_NAME_LEN = 2048;
- char *CASCADE_NAME = (char*) malloc(CASCADE_NAME_LEN);
- CFStringGetFileSystemRepresentation((CFStringRef)faceCascadePath, CASCADE_NAME, CASCADE_NAME_LEN);
- if(!face_cascade.load(CASCADE_NAME)) {
- cout << " Unable to load face detector" << endl;
- exit(-1);
- };
- NSString *eyeCascadePath = [[NSBundle mainBundle] pathForResource:@"haarcascade_eye_tree_eyeglasses" ofType:@"xml"];
- CFStringGetFileSystemRepresentation((CFStringRef)eyeCascadePath, CASCADE_NAME, CASCADE_NAME_LEN);
- if(!eye_cascade.load(CASCADE_NAME)) {
- cout << " Unable to load the eye detector" << endl;
- exit(-1);
- };
- NSString *bananaCascadePath = [[NSBundle mainBundle] pathForResource:@"banana_classifier" ofType:@"xml"];
- CFStringGetFileSystemRepresentation((CFStringRef)bananaCascadePath, CASCADE_NAME, CASCADE_NAME_LEN);
- if(!banana_cascade.load(CASCADE_NAME)) {
- cout << " Unable to load the eye detector" << endl;
- exit(-1);
- };
- // Define the text2Speech
- [speech setRate:0.5f];
- speech.voice = [AVSpeechSynthesisVoice voiceWithLanguage:@"pl-PL"];
- speech.volume= 0.5;
- [synthesizer speakUtterance:speech];
- [synthesizer stopSpeakingAtBoundary:AVSpeechBoundaryImmediate];
- [speech2 setRate:0.5f];
- speech2.voice = [AVSpeechSynthesisVoice voiceWithLanguage:@"pl-PL"];
- speech2.volume= 0.5;
- [synthesizer2 speakUtterance:speech];
- [synthesizer2 stopSpeakingAtBoundary:AVSpeechBoundaryImmediate];
- // Define VW SOUND
- NSString *path = [NSString stringWithFormat:@"%@/anita.mp3", [[NSBundle mainBundle] resourcePath]];
- NSURL *soundUrl = [NSURL fileURLWithPath:path];
- // Create audio player object and initialize with URL to sound
- _audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:soundUrl error:nil];
- [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
- NSError *error;
- [[AVAudioSession sharedInstance] overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker
- error:&error];
- }
- -(void)viewDidAppear:(BOOL)animated
- {
- [super viewDidAppear: animated];
- [videoCamera start];
- imageView.hidden = YES;
- }
- - (IBAction)vwValue:(id)sender {
- switch(self.vwSwitch.selectedSegmentIndex){
- case 0 :
- VW=1;
- break;
- case 1 :
- VW=0;
- break;
- default :
- break;
- }
- }
- - (IBAction)SegmentValue:(id)sender {
- switch(self.SegmentSwitch.selectedSegmentIndex){
- case 0 :
- imageView.hidden = YES;
- promp.text=@"You turned the camera off.";
- break;
- case 1 : imageView.hidden = NO;
- promp.text=@"You turned the camera on.";
- break;
- default :
- break;
- }
- }
- - (IBAction)sliderChange:(id)sender {
- UISlider *slider =(UISlider *) sender;
- float newValue = [ slider value ];
- Sens = (int)(newValue);
- }
- - (void)didReceiveMemoryWarning {
- [super didReceiveMemoryWarning];
- }
- #pragma mark - Protocol CvVideoCameraDelegate
- #ifdef __cplusplus
- double angle( cv::Point pt1, cv::Point pt2, cv::Point pt0 ) {
- double dx1 = pt1.x - pt0.x;
- double dy1 = pt1.y - pt0.y;
- double dx2 = pt2.x - pt0.x;
- double dy2 = pt2.y - pt0.y;
- return (dx1*dx2 + dy1*dy2)/sqrt((dx1*dx1 + dy1*dy1)*(dx2*dx2 + dy2*dy2) + 1e-10);
- }
- G8Tesseract *tesseract = [[G8Tesseract alloc] initWithLanguage:@"pol"];
- - (void)processImage:(Mat&)image;
- {
- Mat gray;
- cvtColor(image, gray, CV_RGBA2GRAY);
- face_cascade.detectMultiScale(gray, faces, 1.1, 2, 0 | CV_HAAR_SCALE_IMAGE, cv::Size(50, 50));
- if(bananas.size() > 0){
- for(int i=0; i<bananas.size(); i++){
- Mat checkBananas = gray(bananas[i]);
- banana_cascade.detectMultiScale(checkBananas, bananas);
- rectangle(image, bananas[i], BLUE);
- /*
- if(!synthesizer2.isSpeaking and !synthesizer.isSpeaking) {
- dispatch_async(dispatch_get_main_queue(), ^{
- // [synthesizer speakUtterance:speech];
- promp.text=@"\banana detected!";
- });
- */
- if(!synthesizer.isSpeaking and !synthesizer2.isSpeaking and bananas.size() == 1){
- dispatch_async(dispatch_get_main_queue(), ^{
- [_audioPlayer play];
- });
- }
- }
- }
- /* dispatch_async(dispatch_get_main_queue(), ^{
- promp.text=@"\nNo face!";
- });
- if(eyes.size() > 0) {
- for(int i=0; i<eyes.size(); i++){
- cv::Point center(faces[i].x + eyes[i].x + eyes[i].width/2, faces[i].y + eyes[i].y + eyes[i].height/2);
- int radius = cvRound((eyes[i].width + eyes[i].height)/4);
- cv::circle(image, center, radius, GREEN);
- if(!synthesizer2.isSpeaking and !synthesizer.isSpeaking) {
- dispatch_async(dispatch_get_main_queue(), ^{
- // [synthesizer speakUtterance:speech];
- promp.text=@"\banana detected!";
- });
- }
- }
- }
- }
- }
- */
- /*
- //////////////////////////// VW CARS MODULE \\\\\\\\\\\\\\\\\\\\\\\\\\\\
- if(VW)
- {
- // Here we set and configure the processing area.
- Mat ProcessImageVW;
- image.copyTo(ProcessImageVW);
- cvtColor(ProcessImageVW, ProcessImageVW, CV_BGR2GRAY);
- VW_cascade.detectMultiScale(ProcessImageVW, VWs, 2, 10);
- for(int i = 0; i <VWs.size(); i++) {
- cv::Point a (VWs[i].x, VWs[i].y );
- cv::Point b(VWs[i].x + VWs[i].width, VWs[i].y + VWs[i].height);
- rectangle(image, a, b, LIMETE, 3.5);
- putText(image, "VW CAR", cv::Point(a.x, a.y-5), FONT_HERSHEY_SIMPLEX, 0.65, LIMETE, 1, CV_AA);
- if(!synthesizer.isSpeaking and !synthesizer2.isSpeaking and VWs.size() == 1 and Count == 0){
- dispatch_async(dispatch_get_main_queue(), ^{
- [_audioPlayer play];
- });
- }
- }
- }
- if (!VW)
- {
- //////////////////////////// BUILT-UP AREA SIGN MODULE \\\\\\\\\\\\\\\\\\\\\\\\\\\\
- // The resolution is 640x480, here we set and configure the processing area.
- cv::Point x (240,100);
- cv::Point y (480,540);
- Mat ProcessImage;
- image(cv::Rect(x,y)).copyTo(ProcessImage);
- cvtColor(ProcessImage, ProcessImage, CV_BGR2GRAY);
- sign_cascade.detectMultiScale(ProcessImage, signs, 2, 4);
- for(int i = 0; i <signs.size(); i++) {
- cv::Point a (signs[i].x+240, signs[i].y+100 );
- cv::Point b(signs[i].x+240 + signs[i].width, signs[i].y+100 + signs[i].height);
- rectangle(image, a, b, RED, 3);
- putText(image, "BUILT-UP AREA", cv::Point(a.x, a.y-5), FONT_HERSHEY_SIMPLEX, 0.8, RED, 1, CV_AA);
- if(!synthesizer.isSpeaking and !synthesizer2.isSpeaking and signs.size() == 1 and Count == 0){
- dispatch_async(dispatch_get_main_queue(), ^{
- [synthesizer speakUtterance:speech];
- promp.text=@"\nWjechałeś do terenu zabudowanego";
- });
- }
- }
- //////////////////////////// TOWN SIGN MODULE \\\\\\\\\\\\\\\\\\\\\\\\\\\\
- Mat image2;
- image(cv::Rect(x,y)).copyTo(image2);
- Mat mHSV;
- Mat mHSVThreshed;
- cvtColor(image2, mHSV, COLOR_BGR2HSV,3);
- cv::inRange(mHSV, Scalar(43, 69, 31), Scalar(102, 255, 127), mHSVThreshed);
- cvtColor(mHSVThreshed, image2, COLOR_GRAY2BGR, 0);
- std::vector<std::vector<cv::Point> > squares;
- cv::Mat pyr, timg, gray0(image2.size(), CV_8U), gray;
- int thresh = 50, N = 11;
- cv::pyrDown(image2, pyr, cv::Size(image2.cols/2, image2.rows/2));
- cv::pyrUp(pyr, timg, image2.size());
- std::vector<std::vector<cv::Point> > contours;
- for( int c = 0; c < 3; c++ ) {
- int ch[] = {c, 0};
- mixChannels(&timg, 1, &gray0, 1, ch, 1);
- for( int l = 0; l < N; l++ ) {
- if( l == 0 ) {
- cv::Canny(gray0, gray, 0, thresh, 5);
- cv::dilate(gray, gray, cv::Mat(), cv::Point(-1,-1));
- }
- else {
- gray = gray0 >= (l+1)*255/N;
- }
- cv::findContours(gray, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE);
- std::vector<cv::Point> approx;
- for( size_t i = 0; i < contours.size(); i++ )
- {
- cv::approxPolyDP(cv::Mat(contours[i]), approx, arcLength(cv::Mat(contours[i]), true)*0.02, true);
- if( approx.size() == 4 && fabs(contourArea(cv::Mat(approx))) > 1000 && cv::isContourConvex(cv::Mat(approx))) {
- double maxCosine = 0;
- for( int j = 2; j < 5; j++ )
- {
- double cosine = fabs(angle(approx[j%4], approx[j-2], approx[j-1]));
- maxCosine = MAX(maxCosine, cosine);
- }
- if( maxCosine < 0.3 ) {
- // Set the max size of Sign - Width max is 60 pix and max height is 230 pix.
- int LT = 3;
- int BR = 0;
- int tmpMin = approx[0].x + approx[0].y;
- int tmpMax = approx[0].x + approx[0].y;
- for (int i = 0 ; i<4 ; i++) {
- if (approx[i].x + approx[i].y > approx[BR].x + approx[BR].y)
- BR=i;
- if ( approx[i].x + approx[i].y < approx[LT].x + approx[LT].y)
- LT=i;
- }
- if( ( ( approx[BR].x - approx[LT].x ) < 240 ) and ( ( approx[BR].y - approx[LT].y ) < 60 ) )
- {
- approx[0].x = approx[0].x + 240; approx[0].y = approx[0].y + 100;
- approx[1].x = approx[1].x + 240; approx[1].y = approx[1].y + 100;
- approx[2].x = approx[2].x + 240; approx[2].y = approx[2].y + 100;
- approx[3].x = approx[3].x + 240; approx[3].y = approx[3].y + 100;
- squares.push_back(approx);
- }
- }
- }
- }
- }
- }
- if (squares.size() > 0 and Count == 0) {
- Count=0;
- // Calculating the point of region. \\
- int a = squares.size() - 1 ;
- int min = 3;
- int max = 0;
- int tmpMin = squares[a][0].x + squares[a][0].y;
- int tmpMax = squares[a][0].x + squares[a][0].y;
- for (int i = 0 ; i<4 ; i++) {
- if ( squares[a][i].x + squares[a][i].y > squares[a][max].x + squares[a][max].y)
- max=i;
- if ( squares[a][i].x + squares[a][i].y < squares[a][min].x + squares[a][min].y)
- min=i;
- }
- cv::Point LT = squares[a][min];
- cv::Point RB = squares[a][max];
- LT.x+=10;
- LT.y+=10;
- RB.x-=10;
- RB.y-=10;
- Mat RecoImage;
- image(cv::Rect(LT,RB)).copyTo(RecoImage);
- // Count the white pixels in the sign, to be sure, thats a sign.
- int count_white=0;
- Mat grayImage;
- cvtColor(RecoImage, grayImage, CV_RGB2GRAY);
- // Then apply thresholding to make it binary.
- Mat binaryImage(grayImage.size(), grayImage.type());
- threshold(grayImage, grayImage, 128, 255, CV_THRESH_BINARY);
- for (int i = 0; i < binaryImage.rows; i++)
- {
- for (int j = 0; j < binaryImage.cols; j++)
- {
- int pixel = grayImage.at<uchar>(i,j);
- if (pixel==255)
- count_white ++;
- }
- }
- //////////////////////////// TESSERACT MODULE \\\\\\\\\\\\\\\\\\\\\\\\\\\\
- if( count_white >= 0 and count_white < 1200 ) {
- tesseract.charWhitelist = @"qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNMęóąśłżźćńĘÓĄŚŁŻŹĆŃ";
- UIImage* templ= MatToUIImage(RecoImage);
- tesseract.delegate = self;
- tesseract.image = [templ g8_blackAndWhite];
- tesseract.maximumRecognitionTime = 60.0;
- [tesseract recognize];
- string city = string([tesseract.recognizedText UTF8String]);
- std::cout << "City to :" << city;
- ///The interface for detection
- LT.x-=15;
- LT.y-=15;
- RB.x+=15;
- RB.y+=15;
- rectangle(image, LT, RB, RED, 3);
- putText(image, city, cv::Point(LT.x, LT.y-5), FONT_HERSHEY_SIMPLEX, 0.8, RED, 1, CV_AA);
- dispatch_async(dispatch_get_main_queue(), ^{
- NSString *tmp = @"\nWjechałeś do miasta";
- promp.text=[NSString stringWithFormat:@"%@ %@", tmp, tesseract.recognizedText];
- });
- if(!synthesizer2.isSpeaking and !synthesizer.isSpeaking){
- dispatch_async(dispatch_get_main_queue(), ^{
- [synthesizer2 speakUtterance:speech2];
- });
- }
- }
- }
- if(Count>0)
- Count--;
- }
- */
- }
- #endif
- #pragma mark - UI Actions
- @end
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement