Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- using System;
- using System.Collections.Generic;
- using System.ComponentModel;
- using System.Data;
- using System.Drawing;
- using System.Linq;
- using System.Text;
- using System.Threading.Tasks;
- using System.Windows.Forms;
- using Emgu.CV;
- using Emgu.CV.CvEnum;
- using Emgu.CV.Features2D;
- using Emgu.CV.Structure;
- using Emgu.CV.UI;
- using Emgu.CV.Util;
- using Emgu.CV.GPU;
- namespace Project
- {
- public partial class Form1 : Form
- {
- public Form1()
- {
- InitializeComponent();
- }
- private void Form1_Load(object sender, EventArgs e)
- {
- Image<Bgr, float> one = new Image<Bgr, float>("D:\\Venice_panorama_part_01.jpg");
- Image<Bgr, float> two = new Image<Bgr, float>("D:\\Venice_panorama_part_02.jpg");
- Image<Bgr, float> third = new Image<Bgr, float>("D:\\Venice_panorama_part_03.jpg");
- Image<Bgr, float> fourth = new Image<Bgr, float>("D:\\Venice_panorama_part_04.jpg");
- Image<Bgr, float> fifth = new Image<Bgr, float>("D:\\Venice_panorama_part_05.jpg");
- Image<Bgr, float> sixth = new Image<Bgr, float>("D:\\Venice_panorama_part_06.jpg");
- Image<Bgr, float> seventh = new Image<Bgr, float>("D:\\Venice_panorama_part_07.jpg");
- Image<Bgr, float> eighth = new Image<Bgr, float>("D:\\Venice_panorama_part_08.jpg");
- Image<Bgr, Byte> result = FindMatch(two, third);
- result = convert(result);
- Image<Bgr, float> twoPlusThree = result.Convert<Bgr, float>();
- Image<Bgr, Byte> result2 = FindMatch(fourth, fifth);
- result2 = convert(result2);
- Image<Bgr, float> fourPlusFive = result2.Convert<Bgr, float>();
- Image<Bgr, Byte> result3 = FindMatch(sixth, seventh);
- result3 = convert(result3);
- Image<Bgr, float> sixPlusSeven = result3.Convert<Bgr, float>();
- Image<Bgr, Byte> result4 = FindMatch(one, twoPlusThree);
- result4 = convert(result4);
- Image<Bgr, float> oneTwoThree = result4.Convert<Bgr, float>();
- Image<Bgr, Byte> result5 = FindMatch(oneTwoThree, fourPlusFive);
- result5 = convert(result5);
- Image<Bgr, float> oneTwoThreeFourFive = result5.Convert<Bgr, float>();
- Image<Bgr, Byte> result6 = FindMatch(sixPlusSeven, eighth);
- result6 = convert(result6);
- Image<Bgr, float> sixSevenEigth = result6.Convert<Bgr, float>();
- Image<Bgr, Byte> result7 = FindMatch(oneTwoThreeFourFive, sixSevenEigth);
- result7 = convert(result7);
- result.Save("D:\\result1.jpg");
- result2.Save("D:\\result2.jpg");
- result3.Save("D:\\result3.jpg");
- result4.Save("D:\\result4.jpg");
- result5.Save("D:\\result5.jpg");
- result6.Save("D:\\result6.jpg");
- result7.Save("D:\\result7.jpg");
- this.Close();
- }
- public static Image<Bgr, Byte> FindMatch(Image<Bgr, float> fImage, Image<Bgr, float> lImage)
- {
- HomographyMatrix homography = null;
- SURFDetector surfCPU = new SURFDetector(500, false);
- int k = 2;
- double uniquenessThreshold = 0.8;
- Matrix<int> indices;
- Matrix<byte> mask;
- VectorOfKeyPoint modelKeyPoints;
- VectorOfKeyPoint observedKeyPoints;
- Image<Gray, Byte> fImageG = fImage.Convert<Gray, Byte>();
- Image<Gray, Byte> lImageG = lImage.Convert<Gray, Byte>();
- if (GpuInvoke.HasCuda)
- {
- GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
- using (GpuImage<Gray, Byte> gpuModelImage = new GpuImage<Gray, byte>(fImageG))
- //extract features from the object image
- using (GpuMat<float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
- using (GpuMat<float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
- using (GpuBruteForceMatcher<float> matcher = new GpuBruteForceMatcher<float>(DistanceType.L2))
- {
- modelKeyPoints = new VectorOfKeyPoint();
- surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
- // extract features from the observed image
- using (GpuImage<Gray, Byte> gpuObservedImage = new GpuImage<Gray, byte>(lImageG))
- using (GpuMat<float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
- using (GpuMat<float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
- using (GpuMat<int> gpuMatchIndices = new GpuMat<int>(gpuObservedDescriptors.Size.Height, k, 1, true))
- using (GpuMat<float> gpuMatchDist = new GpuMat<float>(gpuObservedDescriptors.Size.Height, k, 1, true))
- using (GpuMat<Byte> gpuMask = new GpuMat<byte>(gpuMatchIndices.Size.Height, 1, 1))
- using (Stream stream = new Stream())
- {
- matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
- indices = new Matrix<int>(gpuMatchIndices.Size);
- mask = new Matrix<byte>(gpuMask.Size);
- //gpu implementation of voteForUniquess
- using (GpuMat<float> col0 = gpuMatchDist.Col(0))
- using (GpuMat<float> col1 = gpuMatchDist.Col(1))
- {
- GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
- GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
- }
- observedKeyPoints = new VectorOfKeyPoint();
- surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);
- //wait for the stream to complete its tasks
- //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
- stream.WaitForCompletion();
- gpuMask.Download(mask);
- gpuMatchIndices.Download(indices);
- if (GpuInvoke.CountNonZero(gpuMask) >= 4)
- {
- int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
- if (nonZeroCount >= 4)
- homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
- }
- }
- }
- }
- else
- {
- //extract features from the object image
- modelKeyPoints = new VectorOfKeyPoint();
- Matrix<float> modelDescriptors = surfCPU.DetectAndCompute(fImageG, null, modelKeyPoints);
- // extract features from the observed image
- observedKeyPoints = new VectorOfKeyPoint();
- Matrix<float> observedDescriptors = surfCPU.DetectAndCompute(lImageG, null, observedKeyPoints);
- BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
- matcher.Add(modelDescriptors);
- indices = new Matrix<int>(observedDescriptors.Rows, k);
- using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
- {
- matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
- mask = new Matrix<byte>(dist.Rows, 1);
- mask.SetValue(255);
- Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
- }
- int nonZeroCount = CvInvoke.cvCountNonZero(mask);
- if (nonZeroCount >= 4)
- {
- nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
- if (nonZeroCount >= 4)
- homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
- }
- }
- Image<Bgr, Byte> mImage = fImage.Convert<Bgr, Byte>();
- Image<Bgr, Byte> oImage = lImage.Convert<Bgr, Byte>();
- Image<Bgr, Byte> result = new Image<Bgr, byte>(mImage.Width + oImage.Width, mImage.Height);
- if (homography != null)
- { //draw a rectangle along the projected model
- Rectangle rect = fImage.ROI;
- PointF[] pts = new PointF[] {
- new PointF(rect.Left, rect.Bottom),
- new PointF(rect.Right, rect.Bottom),
- new PointF(rect.Right, rect.Top),
- new PointF(rect.Left, rect.Top)};
- homography.ProjectPoints(pts);
- HomographyMatrix origin = new HomographyMatrix(); //I perform a copy of the left image with a not real shift operation on the origin
- origin.SetIdentity();
- origin.Data[0, 2] = 0;
- origin.Data[1, 2] = 0;
- Image<Bgr, Byte> mosaic = new Image<Bgr, byte>(mImage.Width + oImage.Width + 2000, mImage.Height*2);
- Image<Bgr, byte> warp_image = mosaic.Clone();
- mosaic = mImage.WarpPerspective(origin, mosaic.Width, mosaic.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_DEFAULT, new Bgr(0, 0, 0));
- warp_image = oImage.WarpPerspective(homography, warp_image.Width, warp_image.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_INVERSE_MAP, new Bgr(200, 0, 0));
- Image<Gray, byte> warp_image_mask = oImage.Convert<Gray, byte>();
- warp_image_mask.SetValue(new Gray(255));
- Image<Gray, byte> warp_mosaic_mask = mosaic.Convert<Gray, byte>();
- warp_mosaic_mask.SetZero();
- warp_mosaic_mask = warp_image_mask.WarpPerspective(homography, warp_mosaic_mask.Width, warp_mosaic_mask.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_INVERSE_MAP, new Gray(0));
- warp_image.Copy(mosaic, warp_mosaic_mask);
- return mosaic;
- }
- return null;
- }
- private Image<Bgr, Byte> convert(Image<Bgr, Byte> img)
- {
- Image<Gray, byte> imgGray = img.Convert<Gray, byte>();
- Image<Gray, byte> mask = imgGray.CopyBlank();
- Contour<Point> largestContour = null;
- double largestarea = 0;
- for (var contours = imgGray.FindContours(CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE,
- RETR_TYPE.CV_RETR_EXTERNAL); contours != null; contours = contours.HNext)
- {
- if (contours.Area > largestarea)
- {
- largestarea = contours.Area;
- largestContour = contours;
- }
- }
- CvInvoke.cvSetImageROI(img, largestContour.BoundingRectangle);
- return img;
- }
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement