Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/usr/bin/env python3
- import cv2
- import math
- import depthai as dai
- import contextlib
- import argparse
- from datetime import timedelta
- parser = argparse.ArgumentParser(epilog='Press C to capture a set of frames.')
- parser.add_argument('-f', '--fps', type=float, default=30,
- help='Camera sensor FPS, applied to all cams')
- args = parser.parse_args()
- cam_socket_opts = {
- 'rgb' : dai.CameraBoardSocket.RGB,
- 'left' : dai.CameraBoardSocket.LEFT,
- 'right': dai.CameraBoardSocket.RIGHT,
- 'camd': dai.CameraBoardSocket.CAM_D,
- }
- def create_pipeline():
- # Start defining a pipeline
- pipeline = dai.Pipeline()
- def create(c, output=False):
- xout = pipeline.create(dai.node.XLinkOut)
- xout.setStreamName(c)
- cam = pipeline.create(dai.node.ColorCamera)
- cam.setResolution(dai.ColorCameraProperties.SensorResolution.THE_800_P)
- cam.isp.link(xout.input)
- # cam.initialControl.setManualExposure(1000, 800)
- if output:
- cam.initialControl.setFrameSyncMode(dai.CameraControl.FrameSyncMode.OUTPUT)
- else:
- cam.initialControl.setFrameSyncMode(dai.CameraControl.FrameSyncMode.INPUT)
- cam.setBoardSocket(cam_socket_opts[c])
- cam.setFps(args.fps)
- create('left', output=True)
- create('right')
- create('rgb')
- create('camd')
- return pipeline
- # https://docs.python.org/3/library/contextlib.html#contextlib.ExitStack
- with contextlib.ExitStack() as stack:
- device_infos = dai.Device.getAllAvailableDevices()
- if len(device_infos) == 0: raise RuntimeError("No devices found!")
- else: print("Found", len(device_infos), "devices")
- queues = []
- for device_info in device_infos:
- # for ip in ips:
- # device_info = dai.DeviceInfo(ip)
- # Note: the pipeline isn't set here, as we don't know yet what device it is.
- # The extra arguments passed are required by the existing overload variants
- openvino_version = dai.OpenVINO.Version.VERSION_2021_4
- usb2_mode = False
- config = dai.Device.Config()
- # For FFC-4P R6 (revision 6) or higher, we changed FSIN_MODE_SELECT from GPIO6 to GPIO38!
- config.board.gpio[6] = dai.BoardConfig.GPIO(dai.BoardConfig.GPIO.OUTPUT,
- dai.BoardConfig.GPIO.Level.HIGH)
- device = stack.enter_context(dai.Device(config=config))
- cam_list = {'left', 'right', 'rgb', 'camd'}
- # cam_list = {'left', 'right'}
- # cam_list = {'right', 'camd'}
- print('Starting pipeline for', device.getMxId())
- # Get a customized pipeline based on identified device type
- device.startPipeline(create_pipeline())
- # Output queue will be used to get the rgb frames from the output defined above
- for cam in cam_list:
- queues.append({
- 'queue': device.getOutputQueue(name=cam, maxSize=4, blocking=False),
- 'msgs': [], # Frame msgs
- 'mx': device.getMxId(),
- 'cam': cam
- })
- def check_sync(queues, timestamp):
- matching_frames = []
- for q in queues:
- for i, msg in enumerate(q['msgs']):
- time_diff = abs(msg.getTimestamp() - timestamp)
- # So below 17ms @ 30 FPS => frames are in sync
- if time_diff <= timedelta(milliseconds=math.ceil(500 / args.fps)):
- matching_frames.append(i)
- break
- if len(matching_frames) == len(queues):
- # We have all frames synced. Remove the excess ones
- for i, q in enumerate(queues):
- q['msgs'] = q['msgs'][matching_frames[i]:]
- return True
- else:
- return False
- while True:
- for q in queues:
- new_msg = q['queue'].tryGet()
- if new_msg is not None:
- print(f"[{q['mx']}] Got {q['cam']} frame, seq: {new_msg.getSequenceNum()} TS: {new_msg.getTimestamp()}")
- q['msgs'].append(new_msg)
- if check_sync(queues, new_msg.getTimestamp()):
- for q in queues:
- imgFrame: dai.ImgFrame = q['msgs'].pop(0)
- frame = imgFrame.getCvFrame()
- # If monochrome, convert to RGB
- if frame.ndim == 2:
- frame = cv2.cvtColor(frame, cv2.COLOR_GRAY2RGB)
- # Write text
- frame = cv2.putText(frame, f"TS {imgFrame.getTimestamp(dai.CameraExposureOffset.MIDDLE)}", (10, 30), cv2.FONT_HERSHEY_TRIPLEX, 1.0, color=(127, 255, 0))
- frame = cv2.putText(frame, f"Seq {imgFrame.getSequenceNum()}, Exposure: {imgFrame.getExposureTime().total_seconds() * 1000:.2f}ms", (10, 80), cv2.FONT_HERSHEY_TRIPLEX, 1.0, color=(127, 255, 0))
- cv2.imshow(f"{q['cam']} - {q['mx']}", frame)
- if cv2.waitKey(1) == ord('q'):
- break
Advertisement
Add Comment
Please, Sign In to add comment