Advertisement
mechagical

Hand Recognition Example

Jun 25th, 2022 (edited)
630
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 4.66 KB | None | 0 0
  1. import numpy as np
  2. import cv2
  3. import math
  4. import socket
  5. import time
  6.  
  7. UDP_IP = "127.0.0.1"
  8. UDP_PORT = 5065
  9.  
  10. sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
  11.  
  12. last = []
  13.  
  14. # Open Camera
  15. try:
  16.     default = 0 # Try Changing it to 1 if webcam not found
  17.     capture = cv2.VideoCapture(default)
  18. except:
  19.     print("No Camera Source Found!")
  20.  
  21. while capture.isOpened():
  22.    
  23.     # Capture frames from the camera
  24.     ret, frame = capture.read()
  25.    
  26.     # Get hand data from the rectangle sub window  
  27.     cv2.rectangle(frame,(100,100),(300,300),(0,255,0),0)
  28.     crop_image = frame[100:500, 100:500]
  29.    
  30.     # Apply Gaussian blur
  31.     blur = cv2.GaussianBlur(crop_image, (3,3), 0)
  32.    
  33.     # Change color-space from BGR -> HSV
  34.     hsv = cv2.cvtColor(blur, cv2.COLOR_BGR2HSV)
  35.    
  36.     # Create a binary image with where white will be skin colors and rest is black
  37.     mask2 = cv2.inRange(hsv, np.array([2,0,0]), np.array([20,255,255]))
  38.    
  39.     # Kernel for morphological transformation    
  40.     kernel = np.ones((5,5))
  41.    
  42.     # Apply morphological transformations to filter out the background noise
  43.     dilation = cv2.dilate(mask2, kernel, iterations = 1)
  44.     erosion = cv2.erode(dilation, kernel, iterations = 1)    
  45.        
  46.     # Apply Gaussian Blur and Threshold
  47.     filtered = cv2.GaussianBlur(erosion, (3,3), 0)
  48.     ret,thresh = cv2.threshold(filtered, 127, 255, 0)
  49.    
  50.     # Show threshold image
  51.     # cv2.imshow("Thresholded", thresh)
  52.  
  53.     # Find contours
  54.     contours, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE )
  55.    
  56.     try:
  57.         # Find contour with maximum area
  58.         contour = max(contours, key = lambda x: cv2.contourArea(x))
  59.        
  60.         # Create bounding rectangle around the contour
  61.         x,y,w,h = cv2.boundingRect(contour)
  62.         cv2.rectangle(crop_image,(x,y),(x+w,y+h),(0,0,255),0)
  63.        
  64.         # Find convex hull
  65.         hull = cv2.convexHull(contour)
  66.        
  67.         # Draw contour
  68.         drawing = np.zeros(crop_image.shape, np.uint8)
  69.         cv2.drawContours(drawing,[contour],-1,(0,255,0),0)
  70.         cv2.drawContours(drawing,[hull],-1,(0,0,255),0)
  71.        
  72.         # Find convexity defects
  73.         hull = cv2.convexHull(contour, returnPoints=False)
  74.         defects = cv2.convexityDefects(contour,hull)
  75.        
  76.         # Use cosine rule to find angle of the far point from the start and end point i.e. the convex points (the finger
  77.         # tips) for all defects
  78.         count_defects = 0
  79.        
  80.         for i in range(defects.shape[0]):
  81.             s,e,f,d = defects[i,0]
  82.             start = tuple(contour[s][0])
  83.             end = tuple(contour[e][0])
  84.             far = tuple(contour[f][0])
  85.  
  86.             a = math.sqrt((end[0] - start[0])**2 + (end[1] - start[1])**2)
  87.             b = math.sqrt((far[0] - start[0])**2 + (far[1] - start[1])**2)
  88.             c = math.sqrt((end[0] - far[0])**2 + (end[1] - far[1])**2)
  89.             angle = (math.acos((b**2 + c**2 - a**2)/(2*b*c))*180)/3.14
  90.            
  91.             # if angle > 90 draw a circle at the far point
  92.             if angle <= 90:
  93.                 count_defects += 1
  94.                 cv2.circle(crop_image,far,1,[0,0,255],-1)
  95.  
  96.             cv2.line(crop_image,start,end,[0,255,0],2)
  97.  
  98.         # Print number of fingers
  99.  
  100.         print("Defects : ", count_defects)
  101.         if count_defects == 0:
  102.             cv2.putText(frame,"ZERO", (50,50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2)
  103.         elif count_defects == 1:
  104.             cv2.putText(frame,"TWO", (50,50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2)
  105.         elif count_defects == 2:
  106.             cv2.putText(frame, "THREE", (5,50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2)
  107.         elif count_defects == 3:
  108.             cv2.putText(frame,"FOUR", (50,50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2)
  109.         elif count_defects == 4:
  110.             cv2.putText(frame,"FIVE", (50,50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2)
  111.         else:
  112.             pass
  113.  
  114.         # Show required images
  115.         cv2.imshow("Full Frame", frame)
  116.         all_image = np.hstack((drawing, crop_image))
  117.         cv2.imshow('Recognition', all_image)
  118.  
  119.         last.append(count_defects)
  120.         if(len(last) > 5):
  121.             last = last[-5:]
  122.        
  123.         # print(last)
  124.  
  125.         # Check if previously hand was wide open (3/4 fingers in previous frames), and is now a fist (0 fingers)
  126.         if(count_defects == 0 and 4 in last):
  127.             last = []
  128.             sock.sendto( ("JUMP!").encode(), (UDP_IP, UDP_PORT) )
  129.             print("_"*10, "Jump Action Triggered!", "_"*10)
  130.     except:
  131.         pass
  132.  
  133.     # Close the camera if 'q' is pressed
  134.     if cv2.waitKey(1) == ord('q'):
  135.         break
  136.  
  137. capture.release()
  138. cv2.destroyAllWindows()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement