Advertisement
Guest User

Untitled

a guest
Oct 20th, 2019
148
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 39.60 KB | None | 0 0
  1. //
  2. // OpenCVWrapper.m
  3. // Vyu360
  4. //
  5. // Created by 2Mac on 22.03.2019.
  6. // Copyright © 2019 2Mac. All rights reserved.
  7. //
  8.  
  9. #import "OpenCVWrapper.h"
  10. #import "OpenCVUIImage.h"
  11. #import <opencv2/opencv.hpp>
  12. #import <opencv2/highgui/highgui_c.h>
  13. #import <math.h>
  14. #import <unistd.h>
  15. #import <getopt.h>
  16. #import <iostream>
  17. @interface OpenCVWrapper ()
  18. +(void) buildGaussianPyramid;
  19.  
  20. +(void) buildLaplacianPyramid:(cv::Mat&) img withLapPyr: (std::vector<cv::Mat_<cv::Vec3f>>&)lapPyr withSmallestLevel: (cv::Mat&)smallestLevel;
  21.  
  22.  
  23. @end
  24.  
  25.  
  26. @implementation OpenCVWrapper
  27.  
  28. static cv::Mat map1;
  29. static cv::Mat map2;
  30.  
  31. float xscale;
  32. float yscale;
  33. float xshift;
  34. float yshift;
  35.  
  36. cv::Mat_<cv::Vec3f> left;
  37. cv::Mat_<cv::Vec3f> right;
  38. cv::Mat_<float> blendMask;
  39.  
  40.  
  41. cv::Mat leftSmallestLevel, rightSmallestLevel, resultSmallestLevel;
  42. std::vector<cv::Mat_<cv::Vec3f>> leftLapPyr;
  43. std::vector<cv::Mat_<cv::Vec3f>> rightLapPyr;
  44. std::vector<cv::Mat_<cv::Vec3f>> resultLapPyr;
  45. std::vector<cv::Mat_<cv::Vec3f>> maskGaussianPyramid; //masks are 3-channels for easier multiplication with RGB
  46.  
  47. int levels;
  48.  
  49. +(void) buildGaussianPyramid
  50. {
  51. CV_Assert(leftLapPyr.size()>0);
  52.  
  53. maskGaussianPyramid.clear();
  54. cv::Mat currentImg;
  55. cvtColor(blendMask, currentImg, CV_GRAY2BGR);
  56. maskGaussianPyramid.push_back(currentImg); //highest level
  57.  
  58. currentImg = blendMask;
  59. for (int l=1; l<levels+1; l++) {
  60. cv::Mat _down;
  61. if (leftLapPyr.size() > l) {
  62. cv::pyrDown(currentImg, _down, leftLapPyr[l].size());
  63. } else {
  64. cv::pyrDown(currentImg, _down, leftSmallestLevel.size()); //smallest level
  65. }
  66.  
  67. cv::Mat down;
  68. cv::cvtColor(_down, down, CV_GRAY2BGR);
  69. maskGaussianPyramid.push_back(down);
  70. currentImg = _down;
  71. }
  72. }
  73.  
  74. +(void) buildLaplacianPyramid:(cv::Mat&) img withLapPyr: (std::vector<cv::Mat_<cv::Vec3f>>&)lapPyr withSmallestLevel: (cv::Mat&)smallestLevel
  75. {
  76. lapPyr.clear();
  77. cv::Mat currentImg = img;
  78. for (int l=0; l<levels; l++) {
  79. cv::Mat down,up;
  80. pyrDown(currentImg, down);
  81. pyrUp(down, up, currentImg.size());
  82. cv::Mat lap = currentImg - up;
  83. lapPyr.push_back(lap);
  84. currentImg = down;
  85. }
  86. currentImg.copyTo(smallestLevel);
  87. }
  88.  
  89. +(cv::Mat_<cv::Vec3f>) reconstructImgFromLapPyramid {
  90. cv::Mat currentImg = resultSmallestLevel;
  91. for (int l=levels-1; l>=0; l--) {
  92. cv::Mat up;
  93.  
  94. cv::pyrUp(currentImg, up, resultLapPyr[l].size());
  95. currentImg = up + resultLapPyr[l];
  96. }
  97. return currentImg;
  98. }
  99.  
  100. +(void) blendLapPyrs{
  101. resultSmallestLevel = leftSmallestLevel.mul(maskGaussianPyramid.back()) +
  102. rightSmallestLevel.mul(cv::Scalar(1.0,1.0,1.0) - maskGaussianPyramid.back());
  103. for (int l=0; l<levels; l++) {
  104. cv::Mat A = leftLapPyr[l].mul(maskGaussianPyramid[l]);
  105. cv::Mat antiMask = cv::Scalar(1.0,1.0,1.0) - maskGaussianPyramid[l];
  106. cv::Mat B = rightLapPyr[l].mul(antiMask);
  107. cv::Mat_<cv::Vec3f> blendedLevel = A + B;
  108.  
  109. resultLapPyr.push_back(blendedLevel);
  110. }
  111. }
  112. +(cv::Mat_<cv::Vec3f>) buildPyramid{
  113. [OpenCVWrapper buildLaplacianPyramid:left withLapPyr:leftLapPyr withSmallestLevel:leftSmallestLevel];
  114. [OpenCVWrapper buildLaplacianPyramid:right withLapPyr:rightLapPyr withSmallestLevel:rightSmallestLevel];
  115.  
  116. [OpenCVWrapper buildGaussianPyramid];
  117. [OpenCVWrapper blendLapPyrs];
  118. return [OpenCVWrapper reconstructImgFromLapPyramid];
  119. }
  120. +(NSString *) openCVVersionString
  121. {
  122. return [NSString stringWithFormat:@"OpenCV Version %s", CV_VERSION];
  123. }
  124.  
  125. +(int) detectLensRadius:(UIImage*)inputPhoto
  126. {
  127. cv::Mat input = inputPhoto.CVMat;
  128. cv::Mat grayscale;
  129.  
  130. cv::cvtColor(input, grayscale, cv::COLOR_BGRA2GRAY);
  131.  
  132. std::vector<cv::Vec3f> circles;
  133. cv::HoughCircles(grayscale, circles, cv::HOUGH_GRADIENT, 1, 400);
  134.  
  135. cv::Vec3i circle = circles[0];
  136. int result = circle[2];
  137.  
  138. return result;
  139. }
  140.  
  141. +(UIImage *) cropImage:(UIImage*)inputPhoto
  142. {
  143. cv::Mat input = inputPhoto.CVMat;
  144. cv::Mat croppedMat;
  145. cv::Mat resultMat;
  146.  
  147. input(cv::Rect(0, 9, 2048, 1014)).copyTo(croppedMat);
  148.  
  149. cv::resize(croppedMat, resultMat, cv::Size(2048, 1024));
  150.  
  151. UIImage * result = [UIImage imageWithCVMat: resultMat];
  152.  
  153. return result;
  154. }
  155.  
  156. +(UIImage *) resizeToNormalResolution:(UIImage*)inputPhoto :(int)width :(int)height
  157. {
  158. cv::Mat input = inputPhoto.CVMat;
  159. cv::Mat resultMat;
  160.  
  161. cv::resize(input, resultMat, cv::Size(width, height));
  162.  
  163. UIImage * result = [UIImage imageWithCVMat: resultMat];
  164.  
  165. return result;
  166. }
  167.  
  168. +(UIImage *) reflectBorders:(UIImage*)inputPhoto
  169. {
  170. cv::Mat input = inputPhoto.CVMat;
  171.  
  172. cv::Mat reflected;
  173.  
  174. cv::copyMakeBorder(input, reflected, 20, 20, 20, 20, cv::BORDER_REFLECT);
  175.  
  176. UIImage * result = [UIImage imageWithCVMat: reflected];
  177.  
  178. return result;
  179. }
  180.  
  181. + (UIImage *)inpaintImage:(UIImage*)inputImage withMask:(UIImage*)mask
  182. {
  183. cv::Mat inputBGRA =inputImage.CVMat;
  184. cv::Mat input;
  185. cv::cvtColor(inputBGRA,input,CV_BGRA2BGR);
  186. input.convertTo(input,CV_8UC3);
  187.  
  188. int sizeSide = 512;
  189. int sizeCenter = 1024;
  190. int sizeCrop = 3;
  191. int sizeHeight = 1024;
  192. int sizeWeight = 2048;
  193. int newSizeSide = sizeSide-sizeCrop;
  194. int newSizeCenter = sizeCenter-sizeCrop*2;
  195. int newSizeWeight= newSizeCenter+sizeSide*2;
  196. cv::Ptr<cv::detail::ExposureCompensator> compensator = cv::detail::ExposureCompensator::createDefault(cv::detail::ExposureCompensator::GAIN);
  197. std::vector<cv::Point> corners;
  198. std::vector<cv::UMat> images;
  199. std::vector<cv::UMat> masks;
  200. corners.push_back(cv::Point(0,0));
  201. corners.push_back(cv::Point(512,0));
  202. corners.push_back(cv::Point(1536,0));
  203. cv::UMat sideULeft(sizeHeight,sizeSide,CV_8UC3,cv::Scalar(0,0,0)),sideURight(sizeHeight,sizeSide,CV_8UC3,cv::Scalar(0,0,0)),UCenter(sizeHeight,sizeCenter,CV_8UC3,cv::Scalar(0,0,0));
  204. cv::UMat maskCenter(sizeHeight,sizeCenter,CV_8U,cv::Scalar(255)),maskSide(sizeHeight,sizeSide,CV_8U,cv::Scalar(255));
  205. input(cv::Rect(0,0,sizeSide,sizeHeight)).copyTo(sideULeft);
  206. input(cv::Rect(sizeSide+sizeCenter,0,sizeSide,sizeHeight)).copyTo(sideURight);
  207. input(cv::Rect(sizeSide,0,sizeCenter,sizeHeight)).copyTo(UCenter);
  208. images.push_back(sideULeft);
  209. images.push_back(UCenter);
  210. images.push_back(sideURight);
  211. masks.push_back(maskSide);
  212. masks.push_back(maskCenter);
  213. masks.push_back(maskSide);
  214. compensator->feed(corners,images, masks);
  215. for(auto i =0;i<3;i++)
  216. {
  217. compensator->apply(i, corners[i], images[i], masks[i]);
  218. }
  219. sideULeft.copyTo(input(cv::Rect(0,0,sizeSide,sizeHeight)));
  220. sideURight.copyTo(input(cv::Rect(sizeSide+sizeCenter,0,sizeSide,sizeHeight)));
  221. UCenter.copyTo(input(cv::Rect(sizeSide,0,sizeCenter,sizeHeight)));
  222. input.convertTo(input,CV_32FC3);
  223. cv::Mat result(sizeHeight,sizeWeight - sizeCrop*4,CV_32FC3,cv::Scalar(0,0,0));
  224. input(cv::Rect(0,0,sizeSide-sizeCrop,sizeHeight)).copyTo(result(cv::Rect(0,0,sizeSide-sizeCrop,sizeHeight)));
  225. input(cv::Rect(sizeSide+sizeCrop,0,sizeCenter-sizeCrop*2,sizeHeight)).copyTo(result(cv::Rect(sizeSide-sizeCrop,0,sizeCenter-sizeCrop*2,sizeHeight)));
  226.  
  227. input(cv::Rect(sizeCenter+sizeSide+sizeCrop,0,sizeSide-sizeCrop,sizeHeight)).copyTo(result(cv::Rect(sizeSide+sizeCenter-sizeCrop*3,0,sizeSide-sizeCrop,sizeHeight)) );
  228.  
  229.  
  230.  
  231. cv::Mat sideFullImageBlend(sizeHeight,newSizeWeight,CV_32FC3,cv::Scalar(0,0,0));
  232. cv::Mat centerFullImageBlend(sizeHeight,newSizeWeight,CV_32FC3,cv::Scalar(0,0,0));
  233. result(cv::Rect(0,0,newSizeSide,sizeHeight)).copyTo(sideFullImageBlend(cv::Rect(0,0,newSizeSide,sizeHeight)));
  234. result(cv::Rect(newSizeCenter+newSizeSide,0,newSizeSide,sizeHeight)).copyTo(sideFullImageBlend(cv::Rect(newSizeSide,0,newSizeSide,sizeHeight)));
  235. result(cv::Rect(newSizeSide,0,newSizeCenter,sizeHeight)).copyTo(centerFullImageBlend(cv::Rect(newSizeSide*2,0,newSizeCenter,sizeHeight)));
  236.  
  237. cv::Mat_<float> m(sideFullImageBlend.rows,sideFullImageBlend.cols,0.0);
  238. m(cv::Range::all(),cv::Range(0,newSizeCenter)) = 1.0;
  239.  
  240. /*
  241. leftLapPyr= std::vector<cv::Mat_<cv::Vec3f>>();
  242. rightLapPyr=std::vector<cv::Mat_<cv::Vec3f>>();
  243. resultLapPyr=std::vector<cv::Mat_<cv::Vec3f>>();
  244. maskGaussianPyramid =std::vector<cv::Mat_<cv::Vec3f>>();
  245. left = sideFullImageBlend;
  246. right = centerFullImageBlend;
  247. blendMask = m;
  248. levels = 100;
  249. cv::Mat blendingResult=[OpenCVWrapper buildPyramid];
  250. blendingResult(cv::Rect(newSizeCenter,0,newSizeCenter,sizeHeight)).copyTo(result(cv::Rect(newSizeSide,0,newSizeCenter,sizeHeight)));
  251. blendingResult(cv::Rect(0,0,newSizeSide,sizeHeight)).copyTo(result(cv::Rect(0,0,newSizeSide,sizeHeight)));
  252. blendingResult(cv::Rect(newSizeSide,0,newSizeSide,sizeHeight)).copyTo(result(cv::Rect(newSizeCenter+newSizeSide,0,newSizeSide,sizeHeight)));
  253. */
  254. cv::resize(result,result,cv::Size(sizeWeight,sizeHeight));
  255. //m= cv::Mat_<float> (sideImageBlend.rows,sideImageBlend.cols,0.0);
  256. m(cv::Range::all(),cv::Range(0,sizeSide)) = 1.0;
  257. for(int i=1;i<=50;i++)
  258. {
  259. m(cv::Range::all(),cv::Range(sizeSide-i-1,sizeSide-i)) = 1-i/50;
  260. }
  261.  
  262. leftLapPyr= std::vector<cv::Mat_<cv::Vec3f>>();
  263. rightLapPyr=std::vector<cv::Mat_<cv::Vec3f>>();
  264. resultLapPyr=std::vector<cv::Mat_<cv::Vec3f>>();
  265. maskGaussianPyramid =std::vector<cv::Mat_<cv::Vec3f>>();
  266. /*
  267. left = sideImageBlend;
  268. right = centerImageBlend;
  269. blendMask = m;
  270. levels = 10000;
  271. cv::Mat blendingResult=[OpenCVWrapper buildPyramid];
  272. */
  273. //blendingResult.copyTo(result(cv::Rect(0,0,sizeSide+sizeCenter,sizeHeight)));
  274.  
  275. cv::Mat rightImageBlend(sizeHeight,sizeSide+sizeCenter,CV_32FC3,cv::Scalar(0,0,0));
  276. cv::Mat centerRightImageBlend(sizeHeight,sizeSide+sizeCenter,CV_32FC3,cv::Scalar(0,0,0));
  277.  
  278. result(cv::Rect(sizeSide+sizeCenter,0,sizeSide,sizeHeight)).copyTo(rightImageBlend(cv::Rect(sizeCenter,0,sizeSide,sizeHeight)));
  279. result(cv::Rect(sizeSide,0,sizeCenter,sizeHeight)).copyTo(centerRightImageBlend(cv::Rect(0,0,sizeCenter,sizeHeight)));
  280.  
  281. m(cv::Range::all(),cv::Range::all()) = 0.0;
  282. m(cv::Range::all(),cv::Range(0,sizeCenter)) = 1.0;
  283. for(int i=1;i<=50;i++)
  284. {
  285. m(cv::Range::all(),cv::Range(sizeCenter-i-1,sizeCenter-i)) = 1-i/50;
  286.  
  287. }
  288. /*
  289. leftLapPyr= std::vector<cv::Mat_<cv::Vec3f>>();
  290. rightLapPyr=std::vector<cv::Mat_<cv::Vec3f>>();
  291. resultLapPyr=std::vector<cv::Mat_<cv::Vec3f>>();
  292. maskGaussianPyramid =std::vector<cv::Mat_<cv::Vec3f>>();
  293. left = centerRightImageBlend;
  294. right = rightImageBlend;
  295. blendMask = m;
  296. levels = 10000;
  297. blendingResult=[OpenCVWrapper buildPyramid];
  298. */
  299. // cv::cvtColor(blendingResult,blendingResult,CV_BGR2BGRA);
  300. cv::cvtColor(result,result,CV_BGR2BGRA);
  301. result.convertTo(result,CV_8UC4);
  302. //blendingResult.convertTo(blendingResult, CV_8UC4);
  303. //blendingResult.copyTo(result(cv::Rect(sizeSide,0,sizeSide+sizeCenter,sizeHeight)));
  304. cv::Mat leftBluring =result(cv::Rect(500,0,24,sizeHeight));
  305. cv::Mat rightBluring =result(cv::Rect(1524,0,24,sizeHeight));
  306. //cv::GaussianBlur(result(cv::Rect(500,0,24,sizeHeight)),leftBluring,cv::Size(21,21),0);
  307. //cv::GaussianBlur(result(cv::Rect(1524,0,24,sizeHeight)),rightBluring,cv::Size(21,21),0);
  308. //leftBluring.copyTo(result(cv::Rect(500,0,24,sizeHeight)));
  309. //rightBluring.copyTo(result(cv::Rect(1524,0,24,sizeHeight)));
  310. cv::Mat finalResult(sizeHeight,2038,CV_8UC4,cv::Scalar(0,0,0));
  311. int length = 5;
  312. int range = length*2;
  313. for(auto j = 0;j<sizeHeight;j++)
  314. {
  315. cv::Vec4b colorLeftBegin = result.at<cv::Vec4b>(cv::Point(sizeSide-length ,j));
  316. cv::Vec4b colorLeftEnd = result.at<cv::Vec4b>(cv::Point(sizeSide+length ,j));
  317. cv::Vec4b colorRightBegin = result.at<cv::Vec4b>(cv::Point(sizeSide+sizeCenter-length ,j));
  318. cv::Vec4b colorRightEnd = result.at<cv::Vec4b>(cv::Point(sizeSide+sizeCenter+length ,j));
  319. cv::Vec4b colorLeftRightBegin = result.at<cv::Vec4b>(cv::Point(sizeWeight-length ,j));
  320. cv::Vec4b colorLeftRightEnd = result.at<cv::Vec4b>(cv::Point(length ,j));
  321.  
  322. for(float i =1;i<=range;i++ )
  323. {
  324. result.at<cv::Vec4b>(cv::Point(sizeSide-length+i,j)) = colorLeftBegin*(1-i/range)+colorLeftEnd*(i/range);
  325. result.at<cv::Vec4b>(cv::Point(sizeSide+sizeCenter-length+i,j)) = colorRightBegin*(1-i/range)+colorRightEnd*(i/range);
  326. /*
  327. if(length-i>=0)
  328. {
  329. result.at<cv::Vec4b>(cv::Point(length-i,j)) = colorLeftRightEnd*(1-i/range)+colorLeftRightBegin*(i/range);
  330. }
  331. else
  332. {
  333. result.at<cv::Vec4b>(cv::Point(sizeWeight+length-i,j)) = colorLeftRightEnd*(1-i/range)+colorLeftRightBegin*(i/range);
  334. }*/
  335. }
  336. }
  337. result(cv::Rect(0,0,510,sizeHeight)).copyTo(finalResult(cv::Rect(0,0,510,sizeHeight)));
  338. result(cv::Rect(514,0,1018,sizeHeight)).copyTo(finalResult(cv::Rect(510,0,1018,sizeHeight)));
  339. result(cv::Rect(1538,0,510,sizeHeight)).copyTo(finalResult(cv::Rect(1528,0,510,sizeHeight)));
  340. cv::resize(finalResult,finalResult,cv::Size(sizeWeight,sizeHeight));
  341. UIImage * resultImage = [UIImage imageWithCVMat:result];
  342. // inputBGR
  343. return resultImage;
  344.  
  345.  
  346.  
  347. }
  348.  
  349. +(UIImage *) compensateExposures:(UIImage*)firstPhoto :(UIImage*)secondPhoto :(UIImage*)firstMask :(UIImage*)secondMask
  350. {
  351. cv::Mat firstInput = firstPhoto.CVMat;
  352. cv::Mat secondInput = secondPhoto.CVMat;
  353.  
  354. cv::Mat firstInputMask= firstMask.CVMat;
  355. cv::Mat secondInputMask = secondMask.CVMat;
  356.  
  357.  
  358. const std::vector<Point> corners(2);
  359.  
  360. std::vector<cv::Mat> photos;
  361. photos.push_back(firstInput);
  362. photos.push_back(secondInput);
  363.  
  364. std::vector<cv::Mat> masks;
  365. masks.push_back(firstInputMask);
  366. masks.push_back(secondInputMask);
  367.  
  368. cv::Ptr<cv::detail::ExposureCompensator> compensator = cv::detail::ExposureCompensator::createDefault(cv::detail::ExposureCompensator::GAIN_BLOCKS);
  369.  
  370. //compensator->feed(corners, photos, masks);
  371. return nil;
  372.  
  373. }
  374.  
  375.  
  376. +(UIImage *) matchColors:(UIImage*)firstImage with:(UIImage*)secondImage
  377. {
  378. cv::Mat first = firstImage.CVMat;
  379. cv::Mat second = secondImage.CVMat;
  380.  
  381. cv::Scalar m1, d1;
  382. cv::meanStdDev(first, m1, d1);
  383.  
  384. cv::Scalar m2, d2;
  385. cv::meanStdDev(second, m2, d2);
  386.  
  387. cv::Scalar mdiff = m1-m2;
  388.  
  389. first -= mdiff;
  390.  
  391. cv::Scalar m3, d3;
  392. cv::meanStdDev(first, m3, d3);
  393.  
  394. UIImage * firstImageResult = [UIImage imageWithCVMat:first];
  395.  
  396. first.release();
  397. second.release();
  398.  
  399. return firstImageResult;
  400. }
  401.  
  402. +(void) calibrateMaps:(bool)isFrontCamera {//:(float)dOne :(float)dTwo :(float)dThree :(float)dFour :(float)kOne :(float)kTwo :(float)kThree :(float)kFour {
  403.  
  404. float K[3][3] = {
  405. {0,0,0},
  406. {0,0,0},
  407. {0,0,0}
  408. };
  409.  
  410. if (isFrontCamera == false) {
  411. // K = {
  412. // {175, 0, 380}, // 175 380 // 380 480
  413. // {0, 134, 485}, // 134 485
  414. // {0, 0, 1}
  415. // };
  416. K[0][0] = 160;//175; // 170
  417. K[0][1] = 0;
  418. K[0][2] = 400;//380;
  419. K[1][0] = 0;
  420. K[1][1] = 136;//134;
  421. K[1][2] = 485;
  422. K[2][0] = 0;
  423. K[2][1] = 0;
  424. K[2][2] = 1;
  425. } else {
  426. K[0][0] = 204;
  427. K[0][1] = 0;
  428. K[0][2] = 380;
  429. K[1][0] = 0;
  430. K[1][1] = 200;
  431. K[1][2] = 480;
  432. K[2][0] = 0;
  433. K[2][1] = 0;
  434. K[2][2] = 1;
  435. }
  436.  
  437. // float K[3][3] = {
  438. // {kOne, 0, kThree}, // 830 390
  439. // {0, kTwo, kFour},
  440. // {0, 0, 1}
  441. // };
  442.  
  443. cv::Mat kMat(3, 3, CV_32FC1, K);
  444. //std::memcpy(kMat.data, K, 3*3*sizeof(float));
  445.  
  446.  
  447. float D[4] = {0, 0.165, 0, 0};
  448. //float D[4] = {dOne, dTwo, dThree, dFour};
  449. cv::Mat dMat(1, 4, CV_32FC1, D);
  450. //std::memcpy(dMat.data, D, 1*4*sizeof(float));
  451.  
  452.  
  453. cv::Mat Knew;
  454. kMat.copyTo(Knew);
  455.  
  456. cv::fisheye::initUndistortRectifyMap(kMat, dMat, cv::Mat(), Knew, cv::Size(750, 1000), CV_32FC1, map1, map2);
  457. Knew.release();
  458. kMat.release();
  459. dMat.release();
  460. }
  461.  
  462. +(UIImage *) undistortImage:(UIImage*)img
  463. {
  464. //cv::Mat image = img.CVMat;
  465. cv::Mat image = img.CVMatAlpha;
  466. cv::resize(image, image, cv::Size(750, 1000));
  467. cv::Mat undistortedImage;
  468.  
  469. cv::remap(image, undistortedImage, map1, map2, cv::INTER_LINEAR, cv::BORDER_CONSTANT);
  470.  
  471. //UIImage * result = [UIImage imageWithCVMat:undistortedImage];
  472. UIImage * result = [UIImage imageWithCVMatAlpha:undistortedImage];
  473. undistortedImage.release();
  474. image.release();
  475. return result;
  476. }
  477.  
  478. +(UIImage *) resizeImageToNormalSize:(UIImage*)inputImage {
  479. cv::Mat image = inputImage.CVMat;
  480. cv::resize(image, image, cv::Size(2048, 1024));
  481.  
  482. UIImage * result = [UIImage imageWithCVMatAlpha:image];
  483. image.release();
  484. return result;
  485. }
  486.  
  487.  
  488.  
  489.  
  490. +(float) calc_shift:(float)x1 :(float)x2 :(float)cx :(float)k
  491. {
  492. float thresh = 1;
  493. float x3 = x1+(x2-x1)*0.5f;
  494. float res1 = x1+((x1-cx)*k*((x1-cx)*(x1-cx)));
  495. float res3 = x3+((x3-cx)*k*((x3-cx)*(x3-cx)));
  496.  
  497. if(res1 > -thresh && res1 < thresh) {
  498. return x1;
  499. }
  500. if(res3 < 0) {
  501. return [OpenCVWrapper calc_shift:x3 :x2 :cx :k];
  502. } else {
  503. return [OpenCVWrapper calc_shift:x1 :x3 :cx :k];
  504. }
  505. }
  506.  
  507. +(float) getRadialX:(float)x :(float)y :(float)cx :(float)cy :(float)k :(bool)scale :(cv::Vec4f)props
  508. {
  509. float result;
  510. if (scale)
  511. {
  512. float xshift = props[0];
  513. float yshift = props[1];
  514. float xscale = props[2];
  515. float yscale = props[3];
  516.  
  517. x = (x*xscale+xshift);
  518. y = (y*yscale+yshift);
  519. result = x+((x-cx)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
  520. } else {
  521. result = x+((x-cx)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
  522. }
  523. return result;
  524. }
  525.  
  526. +(float) getRadialY:(float)x :(float)y :(float)cx :(float)cy :(float)k :(bool)scale :(cv::Vec4f)props
  527. {
  528. float result;
  529. if (scale)
  530. {
  531. float xshift = props[0];
  532. float yshift = props[1];
  533. float xscale = props[2];
  534. float yscale = props[3];
  535.  
  536. x = (x*xscale+xshift);
  537. y = (y*yscale+yshift);
  538. result = y+((y-cy)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
  539. } else {
  540. result = y+((y-cy)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
  541. }
  542. return result;
  543. }
  544.  
  545. +(void) fishEye:(cv::InputArray)_src :(cv::OutputArray)_dst :(double)Cx :(double)Cy :(double)k :(bool)scale
  546. {
  547. CV_Assert(Cx >= 0 && Cy >= 0 && k >= 0);
  548.  
  549. cv::Mat src = _src.getMat();
  550.  
  551. cv::Mat mapx(src.size(), CV_32FC1);
  552. cv::Mat mapy(src.size(), CV_32FC1);
  553.  
  554. int w = src.cols;
  555. int h = src.rows;
  556.  
  557. cv::Vec4f props;
  558. float xShift = [OpenCVWrapper calc_shift:0 :Cx-1 :Cx :k];
  559. props[0] = xShift;
  560. float newCenterX = w - Cx;
  561. float xShift2 = [OpenCVWrapper calc_shift:0 :newCenterX - 1 :newCenterX :k];
  562.  
  563. float yShift = [OpenCVWrapper calc_shift:0 :Cy-1 :Cy :k];
  564. props[1] = yShift;
  565. float newCenterY = h - Cy; // !!!
  566. float yShift2 = [OpenCVWrapper calc_shift:0 :newCenterY - 1 :newCenterY :k];
  567.  
  568. float xScale = (w-xShift-xShift2)/w;
  569. props[2] = xScale;
  570. float yScale = (h-yShift-yShift2)/h;
  571. props[3] = yScale;
  572.  
  573. float* p = mapx.ptr<float>(0);
  574.  
  575. for (int y = 0; y < h; y++)
  576. {
  577. for(int x = 0; x < w; x++)
  578. {
  579. *p++ = [OpenCVWrapper getRadialX:(float)x :(float)y :Cx :Cy :k :scale :props];
  580. }
  581. }
  582.  
  583. p = mapy.ptr<float>(0);
  584. for (int y = 0; y < h; y++)
  585. {
  586. for(int x = 0; x < w; x++)
  587. {
  588. *p++ = [OpenCVWrapper getRadialY:(float)x :(float)y :Cx :Cy :k :scale :props];
  589. }
  590. }
  591.  
  592. cv::remap(src, _dst, mapx, mapy, cv::INTER_LINEAR, cv::BORDER_CONSTANT);
  593. }
  594.  
  595.  
  596. +(UIImage *) distortImage:(UIImage*)img
  597. {
  598. cv::Mat undistortedImage = img.CVMatAlpha;
  599.  
  600. cv::Mat distortedImage;
  601.  
  602. [OpenCVWrapper fishEye:undistortedImage :distortedImage :undistortedImage.cols/2 :undistortedImage.rows/2 :0.001 :true];
  603.  
  604. cv::Mat distortedImageWithoutAlpha;
  605.  
  606. //cv::cvtColor(distortedImage, distortedImageWithoutAlpha, CV_RGBA2RGB);
  607.  
  608. //UIImage * result = [UIImage imageWithCVMat:distortedImage];
  609.  
  610. UIImage * result = [UIImage imageWithCVMatAlpha:distortedImage];
  611.  
  612. return result;
  613. }
  614.  
  615. +(UIImage *) removeBlackBackground:(UIImage*)img
  616. {
  617. cv::Mat src = img.CVMat;
  618. cv::Vec4b colorInPos = src.at<cv::Vec4b>(cv::Point(0,0));
  619. cv::Mat gray;
  620.  
  621. cv::cvtColor(src, gray, CV_BGR2GRAY);
  622.  
  623. cv::Mat mask;
  624.  
  625. cv::threshold(gray, mask, 100, 255, CV_THRESH_BINARY);
  626.  
  627. std::vector<cv::Mat> channels;
  628. cv::split(src, channels);
  629. channels.push_back(mask);
  630.  
  631. cv::Mat dst;
  632. cv::merge(channels, dst);
  633.  
  634. cv::Vec4b colorInDst = dst.at<cv::Vec4b>(cv::Point(0,0));
  635.  
  636. UIImage * result = [UIImage imageWithCVMat:dst];
  637.  
  638. return result;
  639. }
  640.  
  641. +(UIImage *) addStickerToPanorama:(UIImage*)panorama :(UIImage*)sticker :(NSArray*)geoXPx :(NSArray*)geoYPx
  642. {
  643. cv::Mat stickerMat = sticker.CVMatAlpha;
  644. cv::Mat panoramaMat = panorama.CVMatAlpha;
  645.  
  646. for (int i = 0; i < stickerMat.rows; i++) {
  647. for (int j = 0; j < stickerMat.cols; j++){
  648. NSArray *x = [geoXPx objectAtIndex: i];
  649. int xx = [(NSNumber*)[x objectAtIndex:j] intValue];
  650. NSArray *y = [geoYPx objectAtIndex: i];
  651. int yy = [(NSNumber*)[y objectAtIndex:j] intValue];
  652. //panoramaMat.at<cv::Point>(xx, yy) = stickerMat.at<cv::Point>(i,j);
  653. if (stickerMat.at<cv::Vec4b>(cv::Point(j,i))[3] != '\0') {
  654. panoramaMat.at<cv::Vec4b>(cv::Point(xx, yy)) = stickerMat.at<cv::Vec4b>(cv::Point(j, i));
  655. }
  656. }
  657. }
  658.  
  659. UIImage * result = [UIImage imageWithCVMatAlpha:panoramaMat];
  660.  
  661. return result;
  662. }
  663.  
  664. //+(UIImage *) equirectToStereo:(UIImage*)img :(NSArray*)xe :(NSArray*)ye
  665. //{
  666. // cv::Mat input = img.CVMatAlpha;
  667. // cv::Mat output; //= input;
  668. // input.copyTo(output);
  669. //
  670. // for(int i = 0; i < input.cols; i++){
  671. // for(int j = 0; j < input.rows; j++){
  672. // NSArray *x = [xe objectAtIndex: i];
  673. // int xx = [(NSNumber*)[x objectAtIndex:j] intValue];
  674. // NSArray *y = [ye objectAtIndex: i];
  675. // int yy = [(NSNumber*)[y objectAtIndex:j] intValue];
  676. // output.at<cv::Vec3b>(cv::Point(xx, yy)) = input.at<cv::Vec3b>(cv::Point(i, j));
  677. // }
  678. // }
  679. //
  680. // UIImage * result = [UIImage imageWithCVMatAlpha:output];
  681. //
  682. // return result;
  683. //}
  684.  
  685. +(UIImage *) renderProjection:(UIImage*)inputImage :(long)len
  686. {
  687. cv::Mat pano = inputImage.CVMat;
  688. cv::Mat convertedPano;
  689.  
  690. pano.convertTo(convertedPano, CV_16UC3);
  691.  
  692. cv::Mat output;
  693. output.create(len, len, CV_8UC4);
  694. long half_len = len/2;
  695. cv::Size sz = pano.size();
  696.  
  697. double k_pi = 3.1415926535897932384626433832795;
  698. double k_pi_inverse = 0.31830988618379067153776752674503;
  699.  
  700. for(long indexX = 0; indexX < len; ++indexX) {
  701. for(long indexY = 0; indexY < len; ++indexY){
  702. double sphereX = (indexX - half_len) * 10.0 / len;
  703. double sphereY = (indexY - half_len) * 10.0 / len;
  704. double Qx, Qy, Qz;
  705.  
  706. if ([OpenCVWrapper getIntersection:sphereX :sphereY :&Qx :&Qy :&Qz])
  707. {
  708. double theta = std::acos(Qz);
  709. double phi = std::atan2(Qy, Qx) + k_pi;
  710. theta = theta * k_pi_inverse;
  711. phi = phi * (0.5 * k_pi_inverse);
  712. double Sx = cv::min(sz.width - 2.0, sz.width * phi);
  713. double Sy = cv::min(sz.height - 2.0, sz.height * theta);
  714.  
  715. output.at<cv::Vec4b>(cv::Point(indexY, indexX)) = pano.at<cv::Vec4b>(cv::Point(Sx, Sy));//[OpenCVWrapper bilinearSample:convertedPano :Sx :Sy];
  716. }
  717. }
  718. }
  719.  
  720. cv::Mat outputResult;
  721. output.convertTo(outputResult, CV_8UC4);
  722. cv::Mat outputResultBGR;
  723. //cv::cvtColor(outputResult, outputResultBGR, CV_BGR2RGB);
  724.  
  725.  
  726. UIImage * result = [UIImage imageWithCVMat:outputResult];
  727. return result;
  728. }
  729.  
  730. +(bool) getIntersection:(double)u :(double)v :(double*)x :(double*)y :(double*)z
  731. {
  732. double Nx = 0.0;
  733. double Ny = 0.0;
  734. double Nz = 1.0;
  735. double dir_x = u-Nx;
  736. double dir_y = v-Ny;
  737. double dir_z = -1.0 - Nz;
  738.  
  739. double a = (dir_x*dir_x) + (dir_y*dir_y) + (dir_z*dir_z);
  740. double b = (dir_x*Nx) + (dir_y*Ny) + (dir_z*Nz);
  741.  
  742. b *= 2;
  743. double d = b*b;
  744. double q = -0.5 * (b-std::sqrt(d));
  745.  
  746. double t = q/a;
  747.  
  748. *x = (dir_x*t) + Nx;
  749. *y = (dir_y*t) + Ny;
  750. *z = (dir_z*t) + Nz;
  751.  
  752. return true;
  753. }
  754.  
  755. +(cv::Vec3s) bilinearSample:(cv::Mat)image :(double)x :(double)y
  756. {
  757. cv::Vec3s c00 = image.at<cv::Vec3s>((int)y, (int)x);
  758. cv::Vec3s c01 = image.at<cv::Vec3s>((int)y, (int)x+1);
  759. cv::Vec3s c10 = image.at<cv::Vec3s>((int)y+1, (int)x);
  760. cv::Vec3s c11 = image.at<cv::Vec3s>((int)y+1, (int)x+1);
  761.  
  762. double X0 = x-floor(x);
  763. double X1 = 1.0 - X0;
  764. double Y0 = y-floor(y);
  765. double Y1 = 1.0 - Y0;
  766.  
  767. double w00 = X0*Y0;
  768. double w01 = X1*Y0;
  769. double w10 = X0*Y1;
  770. double w11 = X1*Y1;
  771.  
  772. // VOZMOJNO NAOBOROT BUDET(RGB)
  773. short r = (c00[2] + c01[2] + c10[2] + c11[2])/4;//(short)(c00[2] * w00 + c01[2] * w01 + c10[2]*w10 + c11[2]*w11);
  774. short g = (c00[1] + c01[1] + c10[1] + c11[1])/4;//(short)(c00[1] * w00 + c01[1] * w01 + c10[1]*w10 + c11[1]*w11);
  775. short b = (c00[0] + c01[0] + c10[0] + c11[0])/4;//(short)(c00[0] * w00 + c01[0] * w01 + c10[0]*w10 + c11[0]*w11);
  776.  
  777. cv::Vec3s result = [OpenCVWrapper make_BGR:r :g :b];
  778.  
  779. return result;
  780. }
  781.  
  782. +(cv::Vec3s) make_BGR:(short)blue :(short)green :(short)red
  783. {
  784. cv::Vec3s result;
  785. result[0] = blue;
  786. result[1] = green;
  787. result[2] = red;
  788.  
  789. return result;
  790. }
  791.  
  792. +(UIImage *) addStickerToPanoImage:(UIImage*)pano :(UIImage*)sticker :(NSArray*)coordsToChange :(int)stickerWidth :(int)stickerHeight
  793. {
  794. cv::Mat inputPano = pano.CVMatAlpha;
  795. cv::Mat stickerMat = sticker.CVMatAlpha;
  796. cv::resize(stickerMat, stickerMat, cv::Size(stickerWidth, stickerHeight));
  797. for (int i = 0; i < stickerWidth; i++) {
  798. for (int j = 0; j < stickerHeight; j++) {
  799. NSArray *x = [coordsToChange objectAtIndex: i];
  800. NSArray *y = [x objectAtIndex: j];
  801. int xx = [(NSNumber*)[y objectAtIndex:0] intValue];
  802. int yy = [(NSNumber*)[y objectAtIndex:1] intValue];
  803. if (stickerMat.at<cv::Vec4b>(cv::Point(i, j))[3] != '\0') {
  804. inputPano.at<cv::Vec4b>(cv::Point(xx, yy)) = stickerMat.at<cv::Vec4b>(cv::Point(i, j));
  805. }
  806. }
  807. }
  808.  
  809. // for(int x = 0; x < xToChange.count; x++){
  810. // for(int y = 0; y < yToChange.count; y++)
  811. // {
  812. // int xPos = [(NSNumber*)[xToChange objectAtIndex: x] intValue];
  813. // int yPos = [(NSNumber*)[yToChange objectAtIndex: y] intValue];
  814. // if (stickerMat.at<cv::Vec4b>(cv::Point(y, x))[3] != '\0') {
  815. // inputPano.at<cv::Vec4b>(cv::Point(xPos, yPos)) = stickerMat.at<cv::Vec4b>(cv::Point(y, x));
  816. // }
  817. // }
  818. // }
  819.  
  820. UIImage * result = [UIImage imageWithCVMatAlpha:inputPano];
  821.  
  822. return result;
  823. }
  824.  
  825. +(UIImage *) getPanoScreenshot:(UIImage*)pano :(int)xStart :(int)xEnd :(int)yStart :(int)yEnd :(NSArray*)xToChange :(NSArray*)yToChange
  826. {
  827. cv::Mat inputPano = pano.CVMatAlpha;
  828.  
  829. //int xSize = xEnd-xStart;
  830. //int ySize = yEnd-yStart;
  831.  
  832. int xSize = xToChange.count;
  833. int ySize = yToChange.count;
  834.  
  835. cv::Mat outputPano(ySize, xSize, CV_8UC4);
  836.  
  837. for(int i = 0; i < ySize; i++) {
  838. for(int j = 0; j < xSize; j++) {
  839. int y = [(NSNumber*)[yToChange objectAtIndex: i] intValue];
  840. int x = [(NSNumber*)[xToChange objectAtIndex: j] intValue];
  841. outputPano.at<cv::Vec4b>(cv::Point(j,i)) = inputPano.at<cv::Vec4b>(cv::Point(x,y));
  842. }
  843. }
  844.  
  845. UIImage * result = [UIImage imageWithCVMatAlpha:outputPano];
  846.  
  847. return result;
  848. }
  849.  
  850. //
  851. //+(cv::Mat) calculateLUT:(cv::Mat)in_cdf_mat :(cv::Mat)dst_cdf_mat {
  852. // int last = 0;
  853. // double epsilon = 0.01;
  854. // cv::Mat M(256, 1, CV_8UC1);
  855. // for(int j = 0; j < in_cdf_mat.rows; j++) {
  856. // double F1j = in_cdf_mat.ptr(j, 0)[0];
  857. // for(int k = last; k < dst_cdf_mat.rows; k++) {
  858. // double F2k = dst_cdf_mat.ptr(k, 0)[0];
  859. // if(abs(F2k-F1j) < epsilon || F2k > F1j) {
  860. // double data[1] = {(double)k};
  861. // M = cv::Mat(j, 0, CV_8UC1, data);
  862. // last = k;
  863. // break;
  864. // }
  865. // }
  866. // }
  867. // return M;
  868. //}
  869. //
  870. //+(cv::Mat) calculateCDF:(cv::Mat)channel :(cv::Mat)cdf {
  871. // cv::Mat cdfResult;
  872. // for (int i = 1; i < 256; i++) {
  873. // double data[1] = {0};
  874. // data[0] = cdf.at<double[]>(i-1, 0)[1] + channel.at<double[]>(i, 0)[0];
  875. // cdf = cv::Mat(i,0, CV_8UC1, data);
  876. // }
  877. // cdfResult = cdf;
  878. // return cdfResult;
  879. //}
  880. //
  881. //+(cv::Mat) calcHistogram:(cv::Mat)image :(cv::Mat)y_hist :(cv::Mat)y_cdf {
  882. // cv::Mat ycrcb;
  883. //
  884. // cv::cvtColor(image, ycrcb, CV_BGR2YCrCb);
  885. // image.release();
  886. //
  887. // std::vector<cv::Mat> ycrcbChannels;
  888. // cv::split(ycrcb, ycrcbChannels);
  889. //
  890. // std::vector<cv::Mat> yList;
  891. // yList.push_back(ycrcbChannels[0]);
  892. //
  893. // std::vector<int> histSize(256);
  894. // std::vector<float> histRange(0, 256);
  895. //
  896. // cv::calcHist(yList, std::vector<int>(0), cv::Mat(), y_hist, histSize, histRange);
  897. // cv::normalize(y_hist, y_hist, 3, cv::NORM_MINMAX);
  898. //
  899. // cv::Mat cdfFinal;
  900. //
  901. // cdfFinal = [OpenCVWrapper calculateCDF:y_hist :y_cdf];
  902. //
  903. // cv::normalize(cdfFinal, cdfFinal, 3, cv::NORM_MINMAX);
  904. //
  905. // ycrcb.release();
  906. //
  907. // return cdfFinal;
  908. //}
  909. //
  910. //+(void) transformLight:(cv::Mat)inputImage :(cv::Mat*)outputImage :(cv::Mat)ylut { // esli chto ybrat' *, peredavat kak &image
  911. // cv::Mat imageYCrCb;
  912. // cv::cvtColor(inputImage, imageYCrCb, CV_BGR2YCrCb);
  913. //
  914. // cv::Mat y_channel;
  915. // cv::extractChannel(imageYCrCb, y_channel, 0);
  916. // cv::Mat cr_channel;
  917. // cv::extractChannel(imageYCrCb, cr_channel, 1);
  918. // cv::Mat cb_channel;
  919. // cv::extractChannel(imageYCrCb, cb_channel, 2);
  920. //
  921. // cv::LUT(y_channel, ylut, y_channel);
  922. //
  923. // std::vector<cv::Mat> ycrcbDest;
  924. // ycrcbDest.push_back(y_channel);
  925. // ycrcbDest.push_back(cr_channel);
  926. // ycrcbDest.push_back(cb_channel);
  927. //
  928. // cv::merge(ycrcbDest, *outputImage);
  929. //
  930. // cv::cvtColor(*outputImage, *outputImage, CV_YCrCb2BGR);
  931. //
  932. // y_channel.release();
  933. // cr_channel.release();
  934. // cb_channel.release();
  935. // imageYCrCb.release();
  936. //}
  937. //
  938. //+(UIImage *) matchHistograms:(UIImage*)inputImage :(UIImage*)targetImage {
  939. // cv::Mat input_y_hist;
  940. // cv::Mat target_y_hist;
  941. // cv::Mat input = inputImage.CVMat;
  942. // cv::Mat target = targetImage.CVMat;
  943. // cv::Mat input_y_cdf;
  944. // cv::Mat target_y_cdf;
  945. //
  946. // input_y_cdf = [OpenCVWrapper calcHistogram:input :input_y_hist :input_y_cdf];
  947. // target_y_cdf = [OpenCVWrapper calcHistogram:input :target_y_hist :target_y_cdf];
  948. //
  949. // cv::Mat ylut = [OpenCVWrapper calculateLUT:input_y_cdf :target_y_cdf];
  950. //
  951. // cv::Mat dst;
  952. // [OpenCVWrapper transformLight:input :&dst :ylut];
  953. //
  954. // UIImage * resultImage = [UIImage imageWithCVMat:dst];
  955. //
  956. // input_y_hist.release();
  957. // target_y_hist.release();
  958. // input.release();
  959. // target.release();
  960. // input_y_cdf.release();
  961. // target_y_cdf.release();
  962. // ylut.release();
  963. //
  964. // return resultImage;
  965. //}
  966.  
  967. +(UIImage *) equalizeIntensity:(UIImage*)inputImage {
  968. cv::Mat input = inputImage.CVMat;
  969.  
  970. if(input.channels() >= 3) {
  971. cv::Mat ycrcb;
  972. cv::cvtColor(input, ycrcb, CV_BGR2YCrCb);
  973.  
  974.  
  975. std::vector<cv::Mat> channels;
  976. cv::split(ycrcb, channels);
  977.  
  978. cv::equalizeHist(channels[0], channels[0]);
  979.  
  980. cv::Mat result;
  981. cv::merge(channels, ycrcb);
  982.  
  983. cv::cvtColor(ycrcb, result, CV_YCrCb2BGR);
  984.  
  985. UIImage * resultImage = [UIImage imageWithCVMat:result];
  986.  
  987. return resultImage;
  988. }
  989. return nil;
  990. }
  991.  
  992.  
  993.  
  994. /////////
  995.  
  996. +(void) do1ChnHist:(cv::Mat*)_i :(cv::Mat)mask :(double*)h :(double*)cdf {
  997. *_i = _i->reshape(1, 1);
  998. //cv::Mat _tm;
  999. //mask->copyTo(_tm);
  1000. //mask = mask.reshape(1, 1);
  1001. //for(int p = 0; p < _i->cols; p++) {
  1002. // *h += 1.0;
  1003. //}
  1004.  
  1005. cv::Mat _tmp(1, 256, CV_64FC1, &h);
  1006. double minVal, maxVal;
  1007. cv::minMaxLoc(_tmp, &minVal, &maxVal);
  1008. _tmp = _tmp / maxVal;
  1009.  
  1010. cdf[0] = h[0];
  1011. for(int j = 1; j < 256; j++) {
  1012. cdf[j] = cdf[j-1]+h[j];
  1013. }
  1014.  
  1015. _tmp.data = (uchar*)cdf;
  1016. cv::minMaxLoc(_tmp, &minVal, &maxVal);
  1017. _tmp = _tmp / maxVal;
  1018.  
  1019. //_i->release();
  1020. // _tm.release();
  1021. //_tmp.release();
  1022. }
  1023.  
  1024. +(UIImage *) histMatchRGB:(UIImage*)targetImage :(UIImage*)inputImage {
  1025. cv::Mat srcMat = targetImage.CVMat;
  1026. cv::Mat dstMat = inputImage.CVMat;
  1027.  
  1028. cv::Mat src;
  1029. cv::Mat dst;
  1030.  
  1031. cv::cvtColor(srcMat, src, CV_BGRA2BGR);
  1032. cv::cvtColor(dstMat, dst, CV_BGRA2BGR);
  1033.  
  1034. cv::Mat src_mask;
  1035. cv::Mat dst_mask;
  1036.  
  1037. std::vector<cv::Mat> chns;
  1038. cv::split(src, chns);
  1039. std::vector<cv::Mat> chns1;
  1040. cv::split(dst, chns1);
  1041.  
  1042. cv::Mat src_hist = cv::Mat::zeros(1, 256, CV_64FC1);
  1043. cv::Mat dst_hist = cv::Mat::zeros(1, 256, CV_64FC1);
  1044. cv::Mat src_cdf = cv::Mat::zeros(1, 256, CV_64FC1);
  1045. cv::Mat dst_cdf = cv::Mat::zeros(1, 256, CV_64FC1);
  1046.  
  1047. cv::Mat Mv(1, 256, CV_8UC1);
  1048. uchar *M = Mv.ptr<uchar>();
  1049.  
  1050.  
  1051. for (int i = 0; i < 3; i++) {
  1052. src_hist.setTo(cv::Scalar(0));
  1053. dst_hist.setTo(cv::Scalar(0));
  1054. src_cdf.setTo(cv::Scalar(0));
  1055. dst_cdf.setTo(cv::Scalar(0));
  1056.  
  1057. [OpenCVWrapper do1ChnHist:&chns[i] :src_mask :(double*)src_hist.data :(double*)src_cdf.data];
  1058. [OpenCVWrapper do1ChnHist:&chns1[i] :dst_mask :(double*)dst_hist.data :(double*)dst_cdf.data];
  1059.  
  1060. int last = 0;
  1061.  
  1062. double *_src_cdf = src_cdf.ptr<double>();
  1063. double *_dst_cdf = dst_cdf.ptr<double>();
  1064.  
  1065. for (int j = 0; j < src_cdf.cols; j++) {
  1066. double F1j = _src_cdf[j];
  1067.  
  1068. for (int k = last; k < dst_cdf.cols; k++) {
  1069. double F2k = _dst_cdf[k];
  1070. if(cv::abs(F2k-F1j) < 1 || F2k > F1j) {
  1071. M[j] = (uchar)k;
  1072. last = k;
  1073. break;
  1074. }
  1075. }
  1076. }
  1077.  
  1078. cv::Mat lut(1, 256, CV_8UC1, M);
  1079. cv::LUT(chns[i], lut, chns[i]);
  1080.  
  1081. lut.release();
  1082. }
  1083. src_cdf.release();
  1084. dst_cdf.release();
  1085. src_hist.release();
  1086. src_mask.release();
  1087. dst_hist.release();
  1088. dst_mask.release();
  1089.  
  1090. cv::Mat res;
  1091. cv::merge(chns, res);
  1092.  
  1093. res.copyTo(src);
  1094.  
  1095. UIImage * resultImage = [UIImage imageWithCVMat:dst];
  1096.  
  1097. res.release();
  1098. src.release();
  1099. dst.release();
  1100. srcMat.release();
  1101. dstMat.release();
  1102.  
  1103.  
  1104.  
  1105.  
  1106. return resultImage;
  1107. }
  1108.  
  1109. +(UIImage *) pyramideBlend:(UIImage*)firstImage :(UIImage*)secondImage {
  1110. return nil;
  1111. }
  1112.  
  1113. +(UIImage *) simplestCB:(UIImage*)inputPhoto :(float)percent {
  1114. cv::Mat inputBGRA = inputPhoto.CVMat;
  1115. cv::Mat input;
  1116. cv::cvtColor(inputBGRA, input, CV_BGRA2BGR);
  1117. CV_Assert(input.channels() == 3);
  1118. CV_Assert(percent > 0 && percent < 100);
  1119.  
  1120. float half_percent = percent / 200;
  1121. std::vector<cv::Mat> tmpsplit;
  1122. cv::split(input, tmpsplit);
  1123.  
  1124. for(int i = 0; i<3; i++) {
  1125. cv::Mat flat;
  1126. tmpsplit[i].reshape(1, 1).copyTo(flat);
  1127. cv::sort(flat, flat, CV_SORT_EVERY_ROW + CV_SORT_ASCENDING);
  1128. int lowval = flat.at<uchar>(cvFloor(((float)flat.cols) * half_percent));
  1129. int highval = flat.at<uchar>(cvCeil(((float)flat.cols) * (1 - half_percent)));
  1130.  
  1131. tmpsplit[i].setTo(lowval, tmpsplit[i] < lowval);
  1132. tmpsplit[i].setTo(highval, tmpsplit[i] > highval);
  1133.  
  1134. cv::normalize(tmpsplit[i], tmpsplit[i], 0, 255, cv::NORM_MINMAX);
  1135. }
  1136. cv::Mat outputPhoto;
  1137. cv::merge(tmpsplit, outputPhoto);
  1138.  
  1139. UIImage * resultImage = [UIImage imageWithCVMat:outputPhoto];
  1140.  
  1141. outputPhoto.release();
  1142. input.release();
  1143. inputBGRA.release();
  1144.  
  1145. return resultImage;
  1146. }
  1147.  
  1148. +(UIImage *) removeGreen:(UIImage*)inputPhoto {
  1149. cv::Mat dstPhoto = inputPhoto.CVMat;
  1150. cv::Mat dst;
  1151. cv::cvtColor(dstPhoto, dst, CV_BGRA2BGR);
  1152. for(int i = 501; i < 1532; i++) {
  1153. for(int k = 0; k < dst.rows; k++) {
  1154. dst.at<cv::Vec3b>(cv::Point(i, k))[0] = (uchar)(cv::min(255, (int)((int)dst.at<cv::Vec3b>(cv::Point(i, k))[0] * 1.03 + 7)));
  1155. dst.at<cv::Vec3b>(cv::Point(i, k))[1] = (uchar)(cv::min(255, (int)((int)dst.at<cv::Vec3b>(cv::Point(i, k))[1] * 0.95 + 7)));
  1156. dst.at<cv::Vec3b>(cv::Point(i, k))[2] = (uchar)(cv::min(255, (int)((int)dst.at<cv::Vec3b>(cv::Point(i, k))[2] * 1.02 + 7)));
  1157. }
  1158. }
  1159.  
  1160. for(int i = 0; i < 501; i++) {
  1161. for (int k = 0; k < dst.rows; k++) {
  1162. dst.at<cv::Vec3b>(cv::Point(i, k))[0] = (uchar)(cv::min(255, (int)((int)dst.at<cv::Vec3b>(cv::Point(i, k))[0] * 1.01 + 7)));
  1163. dst.at<cv::Vec3b>(cv::Point(i, k))[1] = (uchar)(cv::min(255, (int)((int)dst.at<cv::Vec3b>(cv::Point(i, k))[1] * 0.96 + 7)));
  1164. dst.at<cv::Vec3b>(cv::Point(i, k))[2] = (uchar)(cv::min(255, (int)((int)dst.at<cv::Vec3b>(cv::Point(i, k))[2] * 1.02 + 7)));
  1165. }
  1166. }
  1167.  
  1168. for(int i = 1532; i < 2048; i++) {
  1169. for (int k = 0; k < dst.rows; k++) {
  1170. dst.at<cv::Vec3b>(cv::Point(i, k))[0] = (uchar)(cv::min(255, (int)((int)dst.at<cv::Vec3b>(cv::Point(i, k))[0] * 1.01 + 7)));
  1171. dst.at<cv::Vec3b>(cv::Point(i, k))[1] = (uchar)(cv::min(255, (int)((int)dst.at<cv::Vec3b>(cv::Point(i, k))[1] * 0.96 + 7)));
  1172. dst.at<cv::Vec3b>(cv::Point(i, k))[2] = (uchar)(cv::min(255, (int)((int)dst.at<cv::Vec3b>(cv::Point(i, k))[2] * 1.02 + 7)));
  1173. }
  1174. }
  1175.  
  1176. UIImage * resultImage = [UIImage imageWithCVMat:dst];
  1177. dstPhoto.release();
  1178. dst.release();
  1179. return resultImage;
  1180. }
  1181.  
  1182. +(UIImage *) fixSaturation:(UIImage*)inputPhoto {
  1183. cv::Mat dstPhoto = inputPhoto.CVMat;
  1184. cv::Mat dstBGR;
  1185. cv::cvtColor(dstPhoto, dstBGR, CV_BGRA2BGR);
  1186. cv::Mat dstHSV;
  1187. cv::cvtColor(dstBGR, dstHSV, CV_BGR2HSV);
  1188.  
  1189. for(int i = 0; i < dstHSV.cols; i++) {
  1190. for(int k = 0; k < dstHSV.rows; k++) {
  1191. dstHSV.at<cv::Vec3b>(cv::Point(i,k))[1] = (uchar)((int)dstHSV.at<cv::Vec3b>(cv::Point(i,k))[1] * 1.02);
  1192. dstHSV.at<cv::Vec3b>(cv::Point(i,k))[2] = (uchar)((int)dstHSV.at<cv::Vec3b>(cv::Point(i,k))[1] * 0.8);
  1193. }
  1194. }
  1195.  
  1196. cv::Mat resultMat;
  1197. cv::cvtColor(dstHSV, resultMat, CV_HSV2BGR);
  1198. UIImage * resultImage = [UIImage imageWithCVMat:resultMat];
  1199. dstPhoto.release();
  1200. resultMat.release();
  1201. dstHSV.release();
  1202. return resultImage;
  1203. }
  1204.  
  1205.  
  1206. @end
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement