Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import io
- import cv2
- import numpy as np
- from google.cloud import firestore, storage
- import pickle
- from enum import Enum
- import math
- from modules.utils import get_intersection, perspective_M
- # db = firestore.Client()
- # client = storage.Client()
- #
- # in_bucket = client.get_bucket('recognition-frames')
- # blobs = in_bucket.list_blobs(prefix='rcg-client-00005/2019-10-16.15-54-19-00005')
- # blobs = in_bucket.list_blobs(prefix='rcg-client-00005/2019-11-30.09-34-57-00005')
- #
- # for blob in blobs:
- # b = blob.download_as_string()
- # nparr = np.fromstring(b, np.uint8)
- # img_np = cv2.imdecode(nparr, cv2.IMREAD_COLOR) # cv2.IMREAD_COLOR in OpenCV 3.1
- # print('Saving: ', blob.name)
- # cv2.imwrite("images/big_session/" + blob.name.replace("/", ""), img_np)
- #
- kpt_names = ['nose', 'neck',
- 'r_sho', 'r_elb', 'r_wri', 'l_sho', 'l_elb', 'l_wri',
- 'r_hip', 'r_knee', 'r_ank', 'l_hip', 'l_knee', 'l_ank',
- 'r_eye', 'l_eye',
- 'r_ear', 'l_ear']
- DIRECTION_FACTOR = 0.5
- RIGHT_WRIST_INDEX = 4
- RIGHT_ELBOW_INDEX = 3
- LEFT_WRIST_INDEX = 7
- LEFT_ELBOW_INDEX = 6
- YELLOW = (255, 255, 0)
- TEAL = (0, 255, 255)
- RED = (0, 0, 255)
- GREEN = (36,255,12)
- PROXIMITY_INFLUENCE = 1
- HORIZONTAL_DIST_INFLUENCE = 0.3
- HALF_POINT_X = 1296 / 2
- CART_FRONT_LEFT_CORNER = (490, 600)
- CART_FRONT_RIGHT_CORNER = (795, 595)
- CART_BACK_LEFT_CORNER = (300, 10)
- CART_BACK_RIGHT_CORNER = (900, 10)
- def keypoint_exists(keypoint):
- return keypoint[0] > 0 and keypoint[1] > 0
- def set_correct_bbox(coords, radius):
- """ Finds the top left and bottom right coordinates of a
- square based on it's center and radius
- Arguments:
- coords {list} -- coordinates (x,y) of the center
- radius {int} -- radius of the circle that would fit within the square
- Returns:
- list -- (x1, y1, x2, y2), top left and bottom right coordinates of the square
- """
- x, y = coords
- x1 = x - radius
- x2 = x + radius
- y1 = y - radius
- y2 = y + radius
- return int(x1), int(y1), int(x2), int(y2)
- def get_forearm_active_probability(img, wrist, elbow):
- vec_dir = wrist - elbow
- angle = abs(math.degrees(math.atan(float(vec_dir[1] / vec_dir[0]))))
- cv2.circle(img, CART_FRONT_LEFT_CORNER, 10, YELLOW, 3) # Debug
- cv2.circle(img, CART_FRONT_RIGHT_CORNER, 10, YELLOW, 3) # Debug
- cv2.circle(img, CART_BACK_LEFT_CORNER, 10, YELLOW, 3)
- cv2.circle(img, CART_BACK_RIGHT_CORNER, 10, YELLOW, 3)
- cv2.putText(img, str(angle),
- (wrist[0] + 10, wrist[1] + 10),
- cv2.FONT_HERSHEY_SIMPLEX,
- 0.5,
- (255, 255, 255),
- 2)
- return img
- def draw_bbox_based_on_arm(img, wrist, elbow, min_distance, color):
- vec_dir = wrist - elbow
- if min_distance <= 0:
- prox_radius = 0
- else:
- prox_radius = (2 * int(700 / min_distance * 35 + 50)) * PROXIMITY_INFLUENCE
- horizontal_radius = vec_dir[0] * HORIZONTAL_DIST_INFLUENCE
- radius = prox_radius + horizontal_radius
- bbox_center = wrist + DIRECTION_FACTOR * vec_dir
- x1, y1, x2, y2 = set_correct_bbox(bbox_center, radius)
- start_point = (x1, y1)
- end_point = (x2, y2)
- img = get_forearm_active_probability(img, wrist, elbow)
- cv2.rectangle(img, start_point, end_point, color, 2)
- if __name__ == "__main__":
- filehandle = io.open("images/big_session/_out.pkl", 'rb')
- # filehandle = io.open("images/out.pkl", 'rb')
- unpickler = pickle.Unpickler(filehandle)
- while filehandle.peek(1):
- (filename, current_poses) = unpickler.load()
- filename = filename.split('/', 1)
- filename = filename[0] + '/' + "big_session/" + "rcg-client-00005" + filename[1].replace("/", "")
- # filename = filename[0] + '/' + filename[1].replace("/", "")
- img = cv2.imread(filename)
- split = filename.rsplit('.', 1)[0].rsplit('-', 2)[-2:]
- try:
- left = int(split[0])
- right = int(split[1])
- except AttributeError:
- left = 0
- right = 0
- except ValueError:
- left = 0
- right = 0
- if left == 0:
- min_distance = right
- elif right == 0:
- min_distance = left
- else:
- min_distance = min(left, right)
- for pose in current_poses:
- if hasattr(pose, 'keypoints'):
- i = 0
- for keypt in pose.keypoints: # Debug
- x, y = keypt
- if x > 0 and y > 0:
- cv2.circle(img, (keypt[0], keypt[1]), 4, (255, 0, 0), -1)
- i += 1
- right_wrist, right_elbow = pose.keypoints[RIGHT_WRIST_INDEX], pose.keypoints[RIGHT_ELBOW_INDEX]
- if keypoint_exists(right_wrist) and keypoint_exists(right_elbow):
- cv2.circle(img, (right_wrist[0], right_wrist[1]), 10, YELLOW, 3) # Debug
- cv2.circle(img, (right_elbow[0], right_elbow[1]), 10, YELLOW, 3) # Debug
- draw_bbox_based_on_arm(img, right_wrist, right_elbow, min_distance, YELLOW)
- # TODO Detect if relevant
- elif keypoint_exists(right_wrist):
- cv2.circle(img, (right_wrist[0], right_wrist[1]), 10, RED, 3) # Debug
- draw_bbox_based_on_arm(img, right_wrist, (0, 0), min_distance, RED)
- left_wrist, left_elbow = pose.keypoints[LEFT_WRIST_INDEX], pose.keypoints[LEFT_ELBOW_INDEX]
- if keypoint_exists(left_wrist) and keypoint_exists(left_elbow):
- cv2.circle(img, (left_wrist[0], left_wrist[1]), 10, TEAL, 3) # Debug
- cv2.circle(img, (left_elbow[0], left_elbow[1]), 10, TEAL, 3) # Debug
- draw_bbox_based_on_arm(img, left_wrist, left_elbow, min_distance, TEAL)
- elif keypoint_exists(left_wrist):
- cv2.circle(img, (left_wrist[0], left_wrist[1]), 10, GREEN, 3) # Debug
- draw_bbox_based_on_arm(img, left_wrist, (0, 0), min_distance, GREEN)
- # TODO Detect if relevant
- # Fall back when no pose detected
- # if not (drew_left or drew_right) and min_distance != 0:
- # intersection, _ = get_intersection(left, right)
- # if intersection:
- # intersection = np.array([[intersection]], dtype='float32')
- # intersection = cv2.perspectiveTransform(intersection, perspective_M)
- # p = tuple(intersection[0][0])
- # abs_center = [p[0], p[1]]
- # radius = (2 * int(700 / min_distance * 35 + 50))
- # x1, y1, x2, y2 = set_correct_bbox(abs_center, radius)
- # cv2.rectangle(img, (x1, y1), (x2, y2), RED, 2)
- cv2.imshow("bla", img)
- cv2.waitKey(0)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement