Advertisement
gitanshu

emgu cv panorama

Nov 24th, 2013
2,597
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C# 11.65 KB | None | 0 0
  1. using System;
  2. using System.Collections.Generic;
  3. using System.ComponentModel;
  4. using System.Data;
  5. using System.Drawing;
  6. using System.Linq;
  7. using System.Text;
  8. using System.Threading.Tasks;
  9. using System.Windows.Forms;
  10. using Emgu.CV;
  11. using Emgu.CV.CvEnum;
  12. using Emgu.CV.Features2D;
  13. using Emgu.CV.Structure;
  14. using Emgu.CV.UI;
  15. using Emgu.CV.Util;
  16. using Emgu.CV.GPU;
  17.  
  18.  
  19. namespace Project
  20. {
  21.     public partial class Form1 : Form
  22.     {
  23.         public Form1()
  24.         {
  25.             InitializeComponent();
  26.         }
  27.  
  28.         private void Form1_Load(object sender, EventArgs e)
  29.         {
  30.             Image<Bgr, float> one = new Image<Bgr, float>("D:\\Venice_panorama_part_01.jpg");
  31.             Image<Bgr, float> two = new Image<Bgr, float>("D:\\Venice_panorama_part_02.jpg");
  32.             Image<Bgr, float> third = new Image<Bgr, float>("D:\\Venice_panorama_part_03.jpg");
  33.             Image<Bgr, float> fourth = new Image<Bgr, float>("D:\\Venice_panorama_part_04.jpg");
  34.             Image<Bgr, float> fifth = new Image<Bgr, float>("D:\\Venice_panorama_part_05.jpg");
  35.             Image<Bgr, float> sixth = new Image<Bgr, float>("D:\\Venice_panorama_part_06.jpg");
  36.             Image<Bgr, float> seventh = new Image<Bgr, float>("D:\\Venice_panorama_part_07.jpg");
  37.             Image<Bgr, float> eighth = new Image<Bgr, float>("D:\\Venice_panorama_part_08.jpg");
  38.  
  39.  
  40.  
  41.             Image<Bgr, Byte> result = FindMatch(two, third);
  42.             result = convert(result);
  43.             Image<Bgr, float> twoPlusThree = result.Convert<Bgr, float>();
  44.  
  45.            
  46.  
  47.  
  48.             Image<Bgr, Byte> result2 = FindMatch(fourth, fifth);
  49.             result2 = convert(result2);
  50.             Image<Bgr, float> fourPlusFive = result2.Convert<Bgr, float>();
  51.  
  52.            
  53.  
  54.             Image<Bgr, Byte> result3 = FindMatch(sixth, seventh);
  55.             result3 = convert(result3);
  56.             Image<Bgr, float> sixPlusSeven = result3.Convert<Bgr, float>();
  57.  
  58.            
  59.  
  60.             Image<Bgr, Byte> result4 = FindMatch(one, twoPlusThree);
  61.             result4 = convert(result4);
  62.             Image<Bgr, float> oneTwoThree = result4.Convert<Bgr, float>();
  63.  
  64.            
  65.  
  66.             Image<Bgr, Byte> result5 = FindMatch(oneTwoThree, fourPlusFive);
  67.             result5 = convert(result5);
  68.             Image<Bgr, float> oneTwoThreeFourFive = result5.Convert<Bgr, float>();
  69.  
  70.            
  71.  
  72.             Image<Bgr, Byte> result6 = FindMatch(sixPlusSeven, eighth);
  73.             result6 = convert(result6);
  74.             Image<Bgr, float> sixSevenEigth = result6.Convert<Bgr, float>();
  75.  
  76.            
  77.  
  78.             Image<Bgr, Byte> result7 = FindMatch(oneTwoThreeFourFive, sixSevenEigth);
  79.  
  80.             result7 = convert(result7);
  81.  
  82.             result.Save("D:\\result1.jpg");
  83.             result2.Save("D:\\result2.jpg");
  84.             result3.Save("D:\\result3.jpg");
  85.             result4.Save("D:\\result4.jpg");
  86.             result5.Save("D:\\result5.jpg");
  87.             result6.Save("D:\\result6.jpg");
  88.             result7.Save("D:\\result7.jpg");
  89.             this.Close();
  90.  
  91.         }
  92.  
  93.         public static Image<Bgr, Byte> FindMatch(Image<Bgr, float> fImage, Image<Bgr, float> lImage)
  94.         {
  95.             HomographyMatrix homography = null;
  96.             SURFDetector surfCPU = new SURFDetector(500, false);
  97.            
  98.            
  99.             int k = 2;
  100.             double uniquenessThreshold = 0.8;
  101.             Matrix<int> indices;
  102.  
  103.             Matrix<byte> mask;
  104.  
  105.             VectorOfKeyPoint modelKeyPoints;
  106.             VectorOfKeyPoint observedKeyPoints;
  107.             Image<Gray, Byte> fImageG = fImage.Convert<Gray, Byte>();
  108.             Image<Gray, Byte> lImageG = lImage.Convert<Gray, Byte>();
  109.  
  110.             if (GpuInvoke.HasCuda)
  111.             {
  112.                 GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
  113.                 using (GpuImage<Gray, Byte> gpuModelImage = new GpuImage<Gray, byte>(fImageG))
  114.                 //extract features from the object image
  115.                 using (GpuMat<float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
  116.                 using (GpuMat<float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
  117.                 using (GpuBruteForceMatcher<float> matcher = new GpuBruteForceMatcher<float>(DistanceType.L2))
  118.                 {
  119.                     modelKeyPoints = new VectorOfKeyPoint();
  120.                     surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
  121.  
  122.                     // extract features from the observed image
  123.                     using (GpuImage<Gray, Byte> gpuObservedImage = new GpuImage<Gray, byte>(lImageG))
  124.                     using (GpuMat<float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
  125.                     using (GpuMat<float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
  126.                     using (GpuMat<int> gpuMatchIndices = new GpuMat<int>(gpuObservedDescriptors.Size.Height, k, 1, true))
  127.                     using (GpuMat<float> gpuMatchDist = new GpuMat<float>(gpuObservedDescriptors.Size.Height, k, 1, true))
  128.                     using (GpuMat<Byte> gpuMask = new GpuMat<byte>(gpuMatchIndices.Size.Height, 1, 1))
  129.                     using (Stream stream = new Stream())
  130.                     {
  131.                         matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
  132.                         indices = new Matrix<int>(gpuMatchIndices.Size);
  133.                         mask = new Matrix<byte>(gpuMask.Size);
  134.  
  135.                         //gpu implementation of voteForUniquess
  136.                         using (GpuMat<float> col0 = gpuMatchDist.Col(0))
  137.                         using (GpuMat<float> col1 = gpuMatchDist.Col(1))
  138.                         {
  139.                             GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
  140.                             GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
  141.                         }
  142.  
  143.                         observedKeyPoints = new VectorOfKeyPoint();
  144.                         surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);
  145.  
  146.                         //wait for the stream to complete its tasks
  147.                         //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
  148.                         stream.WaitForCompletion();
  149.  
  150.                         gpuMask.Download(mask);
  151.                         gpuMatchIndices.Download(indices);
  152.  
  153.                         if (GpuInvoke.CountNonZero(gpuMask) >= 4)
  154.                         {
  155.                             int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
  156.                             if (nonZeroCount >= 4)
  157.                                 homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
  158.                         }
  159.  
  160.                     }
  161.                 }
  162.             }
  163.             else
  164.             {
  165.                
  166.                
  167.  
  168.                 //extract features from the object image
  169.                 modelKeyPoints = new VectorOfKeyPoint();
  170.                 Matrix<float> modelDescriptors = surfCPU.DetectAndCompute(fImageG, null, modelKeyPoints);
  171.  
  172.  
  173.                 // extract features from the observed image
  174.                 observedKeyPoints = new VectorOfKeyPoint();
  175.                 Matrix<float> observedDescriptors = surfCPU.DetectAndCompute(lImageG, null, observedKeyPoints);
  176.                 BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
  177.                 matcher.Add(modelDescriptors);
  178.  
  179.                 indices = new Matrix<int>(observedDescriptors.Rows, k);
  180.                 using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
  181.                 {
  182.                     matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
  183.                     mask = new Matrix<byte>(dist.Rows, 1);
  184.                     mask.SetValue(255);
  185.                     Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
  186.                 }
  187.  
  188.                 int nonZeroCount = CvInvoke.cvCountNonZero(mask);
  189.                 if (nonZeroCount >= 4)
  190.                 {
  191.                     nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
  192.                     if (nonZeroCount >= 4)
  193.                         homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
  194.                 }
  195.             }
  196.             Image<Bgr, Byte> mImage = fImage.Convert<Bgr, Byte>();
  197.             Image<Bgr, Byte> oImage = lImage.Convert<Bgr, Byte>();
  198.             Image<Bgr, Byte> result = new Image<Bgr, byte>(mImage.Width + oImage.Width, mImage.Height);
  199.  
  200.             if (homography != null)
  201.             {  //draw a rectangle along the projected model
  202.                 Rectangle rect = fImage.ROI;
  203.                 PointF[] pts = new PointF[] {
  204.                new PointF(rect.Left, rect.Bottom),
  205.                new PointF(rect.Right, rect.Bottom),
  206.                new PointF(rect.Right, rect.Top),
  207.                new PointF(rect.Left, rect.Top)};
  208.                 homography.ProjectPoints(pts);
  209.  
  210.                 HomographyMatrix origin = new HomographyMatrix();                //I perform a copy of the left image with a not real shift operation on the origin
  211.                 origin.SetIdentity();
  212.                 origin.Data[0, 2] = 0;
  213.                 origin.Data[1, 2] = 0;
  214.                 Image<Bgr, Byte> mosaic = new Image<Bgr, byte>(mImage.Width + oImage.Width + 2000, mImage.Height*2);
  215.  
  216.                 Image<Bgr, byte> warp_image = mosaic.Clone();
  217.  
  218.                 mosaic = mImage.WarpPerspective(origin, mosaic.Width, mosaic.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_DEFAULT, new Bgr(0, 0, 0));
  219.  
  220.  
  221.                 warp_image = oImage.WarpPerspective(homography, warp_image.Width, warp_image.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_INVERSE_MAP, new Bgr(200, 0, 0));
  222.                 Image<Gray, byte> warp_image_mask = oImage.Convert<Gray, byte>();
  223.                 warp_image_mask.SetValue(new Gray(255));
  224.                 Image<Gray, byte> warp_mosaic_mask = mosaic.Convert<Gray, byte>();
  225.                 warp_mosaic_mask.SetZero();
  226.                 warp_mosaic_mask = warp_image_mask.WarpPerspective(homography, warp_mosaic_mask.Width, warp_mosaic_mask.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_INVERSE_MAP, new Gray(0));
  227.                 warp_image.Copy(mosaic, warp_mosaic_mask);
  228.                
  229.                 return mosaic;
  230.             }
  231.             return null;
  232.         }
  233.  
  234.         private Image<Bgr, Byte> convert(Image<Bgr, Byte> img)
  235.         {
  236.             Image<Gray, byte> imgGray = img.Convert<Gray, byte>();
  237.             Image<Gray, byte> mask = imgGray.CopyBlank();
  238.  
  239.             Contour<Point> largestContour = null;
  240.             double largestarea = 0;
  241.  
  242.             for (var contours = imgGray.FindContours(CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE,
  243.                 RETR_TYPE.CV_RETR_EXTERNAL); contours != null; contours = contours.HNext)
  244.             {
  245.                 if (contours.Area > largestarea)
  246.                 {
  247.                     largestarea = contours.Area;
  248.                     largestContour = contours;
  249.                 }
  250.             }
  251.             CvInvoke.cvSetImageROI(img, largestContour.BoundingRectangle);
  252.             return img;
  253.         }
  254.     }
  255. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement