Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import cv2
- import numpy as np
- def find_segment(db_img, querry_img, threshold=100, return_area=False):
- grayscale_db = cv2.cvtColor(db_img, cv2.COLOR_BGR2GRAY)
- gray_scale_querry = cv2.cvtColor(querry_img, cv2.COLOR_BGR2GRAY)
- # Initiate SIFT detector
- sift = cv2.xfeatures2d.SIFT_create()
- kp1, des1 = sift.detectAndCompute(grayscale_db, None)
- kp2, des2 = sift.detectAndCompute(gray_scale_querry, None)
- FLANN_INDEX_KDTREE = 0
- index_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5)
- search_params = dict(checks=50)
- flann = cv2.FlannBasedMatcher(index_params, search_params)
- matches = flann.knnMatch(des1, des2, k=2)
- good = [m for m, n in matches if m.distance < 0.7 * n.distance]
- result_area = None
- h, w = grayscale_db.shape
- if len(good) > threshold:
- dst_pts = np.float32([kp2[m.trainIdx].pt for m in good]).reshape(-1, 1, 2)
- if return_area:
- return cv2.minAreaRect(dst_pts)
- src_pts = np.float32([kp1[m.queryIdx].pt for m in good]).reshape(-1, 1, 2)
- M, mask = cv2.findHomography(dst_pts, src_pts, cv2.RANSAC, 5.0)
- result_area = cv2.warpPerspective(querry_img, M, (w, h), borderMode=cv2.BORDER_CONSTANT,
- borderValue=(255, 255, 255), flags=cv2.INTER_LINEAR)
- # `c
- # else:
- # print("Not enough matches are found - %d/%d" % (len(good), threshold))
- return result_area
- querry = cv2.imread("E:/querry.jpg")
- db = cv2.imread("E:/db_img.jpg")
- segment = find_segment(db, querry)
- diff = cv2.absdiff(db, segment)
- cv2.imwrite("E:/diff.jpg", diff)
Add Comment
Please, Sign In to add comment