Advertisement
orgicus

oak-d-lite-rgb-d-sync

Apr 4th, 2022
1,630
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Diff 2.86 KB | None | 0 0
  1. --- C:\Users\george.profenza\Downloads\gp\depthai-experiments\gen2-syncing\host-multiple-OAK-sync.py
  2. +++ C:\Users\george.profenza\Downloads\gp\OpenCVSpatialAI\dai\sync-depth.py
  3. @@ -18,13 +18,8 @@
  4.      'left' : dai.CameraBoardSocket.LEFT,
  5.      'right': dai.CameraBoardSocket.RIGHT,
  6.  }
  7. -cam_instance = {
  8. -    'rgb'  : 0,
  9. -    'left' : 1,
  10. -    'right': 2,
  11. -}
  12.  
  13. -def create_pipeline(cam_list):
  14. +def create_pipeline(cam_list, id):
  15.      # Start defining a pipeline
  16.      pipeline = dai.Pipeline()
  17.      cam = {}
  18. @@ -37,12 +32,43 @@
  19.              cam[c].setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
  20.              cam[c].setIspScale(1, 3)  # 1920x1080 -> 1280x720
  21.              cam[c].isp.link(xout[c].input)
  22. -        else:
  23. +        elif c == 'left' or c == 'right':
  24.              cam[c] = pipeline.create(dai.node.MonoCamera)
  25.              cam[c].setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
  26.              cam[c].out.link(xout[c].input)
  27. +        else:
  28. +            print('disparity has no hardware links to setup')
  29. +            continue
  30.          cam[c].setBoardSocket(cam_socket_opts[c])
  31.          cam[c].setFps(args.fps)
  32. +    # depth setup
  33. +    print('setting up depth for id', id)
  34. +    # Configure stereo pair for depth estimation
  35. +    stereo = pipeline.createStereoDepth()
  36. +    # Checks occluded pixels and marks them as invalid
  37. +    stereo.setLeftRightCheck(True)
  38. +    
  39. +    # Configure left and right cameras to work as a stereo pair
  40. +    cam['left'].out.link(stereo.left)
  41. +    cam['right'].out.link(stereo.right)
  42. +
  43. +    # Set XlinkOut for disparity, rectifiedLeft, and rectifiedRight
  44. +    xoutDisp = pipeline.createXLinkOut()
  45. +    xoutDisp.setStreamName("disparity")
  46. +    
  47. +    xoutRectifiedLeft = pipeline.createXLinkOut()
  48. +    xoutRectifiedLeft.setStreamName("rectifiedLeft")
  49. +
  50. +    xoutRectifiedRight = pipeline.createXLinkOut()
  51. +    xoutRectifiedRight.setStreamName("rectifiedRight")
  52. +
  53. +    stereo.disparity.link(xoutDisp.input)
  54. +    
  55. +    stereo.rectifiedLeft.link(xoutRectifiedLeft.input)
  56. +    stereo.rectifiedRight.link(xoutRectifiedRight.input)
  57. +    
  58. +    print('depth for id', id, 'ready')
  59. +
  60.      return pipeline
  61.  
  62.  
  63. @@ -62,10 +88,11 @@
  64.          device = stack.enter_context(dai.Device(openvino_version, device_info, usb2_mode))
  65.  
  66.          stereo = 1 < len(device.getConnectedCameras())
  67. -        cam_list = {'rgb', 'left', 'right'} if stereo else {'rgb'}
  68. +        cam_list = {'rgb', 'left', 'right', 'disparity'} if stereo else {'rgb'}
  69.  
  70.          # Get a customized pipeline based on identified device type
  71. -        device.startPipeline(create_pipeline(cam_list))
  72. +        device.startPipeline(create_pipeline(cam_list, device.getMxId()))
  73. +        # device.startPipeline(getDepthPipeline())
  74.  
  75.          # Output queue will be used to get the rgb frames from the output defined above
  76.          for cam in cam_list:
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement