Advertisement
lamiastella

SR300Camera.cpp 5/23/2017

May 23rd, 2017
137
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C++ 11.21 KB | None | 0 0
  1. #define NOMINMAX
  2. #define _WINSOCKAPI_  
  3.  
  4. #include "SR300Camera.h"
  5. #include "RealSense/SenseManager.h"
  6. #include "RealSense/SampleReader.h"
  7. #include "util_render.h"
  8. #include "Visualizer.h"
  9. #include "RealSense/Session.h"
  10. #include <opencv2/opencv.hpp>
  11.  
  12.  
  13. #include <iostream>
  14.  
  15. /***
  16. Private constructor for the SR300 Camera depth sensor
  17. ***/
  18. using namespace std;
  19. //using namespace Intel::RealSense;
  20.  
  21. PXCSenseManager *pp = PXCSenseManager::CreateInstance();
  22. PXCCapture::Device *device;
  23. PXCCaptureManager *cm;
  24.  
  25. const int depth_fps = 30;
  26. const int depth_width = 640;
  27. const int depth_height = 480;
  28. cv::Size bufferSize;
  29. const PXCCapture::Sample *sample;
  30. //std::vector<CV_32FC3> xyzBuffer;
  31. std::vector<cv::Point3f> xyzBuffer;
  32.  
  33.  
  34. SR300Camera::SR300Camera(bool use_live_sensor)
  35. {
  36.  
  37.     CONFIDENCE_THRESHHOLD = (60.0f / 255.0f*500.0f);
  38.     //INVALID_FLAG_VALUE = PMD_FLAG_INVALID;
  39.     X_DIMENSION = depth_width;
  40.     Y_DIMENSION = depth_height;
  41.  
  42.     int num_pixels = X_DIMENSION * Y_DIMENSION;
  43.     PXCPoint3DF32* vertices = new PXCPoint3DF32[num_pixels];
  44.  
  45.     if (!use_live_sensor) {
  46.         return;
  47.     }
  48.  
  49.     int res;
  50.     std::cout << "Trying to open SR300 camera\n";
  51.  
  52.  
  53.  
  54.     if (!pp) {
  55.         wprintf_s(L"Unable to create the SenseManager\n");
  56.     }
  57.  
  58.     /* Sets file recording or playback */
  59.     cm = pp->QueryCaptureManager();
  60.     //Status sts= STATUS_NO_ERROR;
  61.     Status sts = STATUS_DATA_UNAVAILABLE; //mona is this the correct initialization?
  62.                                           // Create stream renders
  63.     UtilRender renderc(L"Color"), renderd(L"Depth"), renderi(L"IR"), renderr(L"Right"), renderl(L"Left");
  64.  
  65.     do {
  66.         bool revert = false;
  67.         pp->EnableStream(PXCCapture::StreamType::STREAM_TYPE_DEPTH,
  68.             X_DIMENSION, Y_DIMENSION, depth_fps);
  69.         revert = true;
  70.         sts = pp->Init();
  71.         if (sts < Status::STATUS_NO_ERROR) {
  72.  
  73.             if (revert) {
  74.  
  75.                 pp->Close();
  76.                 pp->EnableStream(PXCCapture::STREAM_TYPE_DEPTH);
  77.                 sts = pp->Init();
  78.                 if (sts < Status::STATUS_NO_ERROR) {
  79.                     pp->Close();
  80.                     //pp->EnableStream(Capture::STREAM_TYPE_COLOR);
  81.                     sts = pp->Init();
  82.                 }
  83.             }
  84.             if (sts < Status::STATUS_NO_ERROR) {
  85.                 wprintf_s(L"Failed to locate any video stream(s)\n");
  86.                 pp->Release();
  87.             }
  88.         }
  89.  
  90.         device = pp->QueryCaptureManager()->QueryDevice();
  91.  
  92.         device->ResetProperties(PXCCapture::STREAM_TYPE_ANY);
  93.  
  94.         for (int nframes = 0; ; nframes++) {
  95.             sts = pp->AcquireFrame(false);
  96.  
  97.             if (sts < Status::STATUS_NO_ERROR) {
  98.                 if (sts == Status::STATUS_STREAM_CONFIG_CHANGED) {
  99.                     wprintf_s(L"Stream configuration was changed, re-initilizing\n");
  100.                     pp->Close();
  101.                 }
  102.                 break;
  103.             }
  104.  
  105.             sample = pp->QuerySample();
  106.  
  107.             if (sample) {
  108.                 cv::Mat img;
  109.                 //ConvertPXCImageToOpenCVMat(sample->depth, &img);
  110.                 //cv::imshow("depth from OpenARK", Visualizer::visualizeDepthMap(img));
  111.                 if (sample->depth && !renderd.RenderFrame(sample->depth)) break;
  112.                 //if (sample->color && !renderc.RenderFrame(sample->color)) break;
  113.                 //if (sample->ir && !renderi.RenderFrame(sample->ir))    break;
  114.                 PXCImage::ImageData depthImage;
  115.                 PXCImage *depthMap = sample->depth;
  116.                 //depthMap->AcquireAccess(Image::ACCESS_READ, &depthImage);
  117.                 depthMap->AcquireAccess(PXCImage::ACCESS_WRITE, &depthImage);
  118.  
  119.                 PXCImage::ImageInfo imgInfo = depthMap->QueryInfo();
  120.                 int depth_stride = depthImage.pitches[0] / sizeof(pxcU16);
  121.                 PXCProjection * projection = device->CreateProjection();
  122.                 cout << "projection is: " << projection;
  123.                 cout << "device is: " << device;
  124.                 //int num_pixels = depth_height * depth_width;
  125.  
  126.                 //projection->QueryVertices(depthMap, vertices);
  127.                 pxcU16 *dpixels = (pxcU16*)depthImage.planes[0];
  128.                 unsigned int dpitch = depthImage.pitches[0] / sizeof(pxcU16);
  129.  
  130.                 /*for (int k = 0; k < num_pixels; k++) {
  131.                 cout << "xx is " << vertices[k].x << endl;
  132.                 cout << "yy is " << vertices[k].y << endl;
  133.                 cout << "zz is " << vertices[k].z << endl;
  134.                 }*/
  135.                 //cout << "xx is " << vertices[0].x << endl;
  136.                 //cout << "yy is " << vertices[0].y << endl;
  137.                 //cout << "zz is " << vertices[0].z << endl;
  138.                 //cout << "*******************" << endl;
  139.  
  140.                 PXCPoint3DF32* posUVZ = new PXCPoint3DF32[num_pixels];
  141.                 PXCPoint3DF32 *pos3D = new PXCPoint3DF32[num_pixels];
  142.  
  143.                 projection->QueryVertices(depthMap, pos3D);
  144.  
  145.                 for (int k = 0; k < num_pixels; k++) {
  146.                     if (pos3D[k].x != 0) {
  147.                         cout << " xx is: " << pos3D[k].x << endl;
  148.                     }
  149.                     if (pos3D[k].y != 0) {
  150.                         cout << " yy is: " << pos3D[k].y << endl;
  151.                     }
  152.                     if (pos3D[k].z != 0) {
  153.                         cout << " zz is: " << pos3D[k].z << endl;
  154.                     }
  155.                 }
  156.  
  157.  
  158.  
  159.                 /*
  160.                 for (int y = 0; y < Y_DIMENSION; y++) {
  161.                     for (int x = 0; x < X_DIMENSION; x++) {
  162.                         // populate the point with depth image pixel coordinates and the depth value
  163.                         posUVZ[y*Y_DIMENSION + x].x = (pxcF32)x;
  164.                         posUVZ[y*Y_DIMENSION + x].y = (pxcF32)y;
  165.                         posUVZ[y*Y_DIMENSION + x].z = dpixels[nframes*X_DIMENSION*Y_DIMENSION + y*Y_DIMENSION + x];
  166.                     }
  167.                 }
  168.                 */
  169.                 /*
  170.                 for (unsigned int yy = 0, k = 0; yy < depth_height; yy++)
  171.                 {
  172.                     for (unsigned int xx = 0; xx < depth_width; xx++, k++)
  173.                     {
  174.                         posUVZ[k].x = (pxcF32)xx;
  175.                         posUVZ[k].y = (pxcF32)yy;
  176.                         posUVZ[k].z = (pxcF32)dpixels[yy * dpitch + xx];
  177.                         //      cout << "xx is " << posUVZ[k].x << endl;
  178.                         //      cout << "yy is " << posUVZ[k].y << endl;
  179.                         //      cout << "zz is " << posUVZ[k].z<< endl;
  180.                     }
  181.                 }
  182.                 */
  183.                 //projection->ProjectDepthToCamera(num_pixels, posUVZ, pos3D);
  184.  
  185.  
  186.                 /*
  187.                 for (unsigned int yy = 0, k = 0; yy < Y_DIMENSION; yy++)
  188.                 {
  189.                 for (unsigned int xx = 0; xx < X_DIMENSION; xx++, k++)
  190.                 {
  191.                 posUVZ[k].x = (pxcF32)xx;
  192.                 posUVZ[k].y = (pxcF32)yy;
  193.                 posUVZ[k].z = (pxcF32)dpixels[yy * dpitch + xx];
  194.                 //  cout << "xx is " << posUVZ[k].x << endl;
  195.                 // cout << "yy is " << posUVZ[k].y << endl;
  196.                 // cout << "zz is " << posUVZ[k].z<< endl;
  197.                 }
  198.                 }
  199.                 */
  200.  
  201.  
  202.  
  203.  
  204.                 // convert the array of depth coordinates + depth value (posUVZ) into the world coordinates (pos3D) in mm
  205.  
  206.                 //Step 1: Set up the point cloud in camera coordinates and do the projection
  207.  
  208.  
  209.  
  210.                 //projection->ProjectDepthToCamera(num_pixels, posUVZ, pos3D);
  211.                 /*
  212.                 if (projection->ProjectDepthToCamera(num_pixels, posUVZ, pos3D) < PXC_STATUS_NO_ERROR)
  213.                 {
  214.                     delete[] posUVZ;
  215.                     delete[] pos3D;
  216.                     cout << "projection unsucessful";
  217.                     return;
  218.                 }
  219.  
  220.                 cout << "*************" << endl;
  221.                 for (unsigned int yy = 0, k = 0; yy < Y_DIMENSION; yy++)
  222.                 {
  223.                     for (unsigned int xx = 0; xx < X_DIMENSION; xx++, k++)
  224.                     {
  225.                         //cout << "xx is " << pos3D[k].x << endl;
  226.                         //cout << "yy is " << pos3D[k].y << endl;
  227.                         //cout << "zz is " << pos3D[k].z << endl;
  228.                         xyzBuffer.push_back(cv::Point3f(pos3D[k].x, pos3D[k].y, pos3D[k].z));
  229.                     }
  230.                 }
  231.                 cout << "/////////\\\\\\\\" << endl;
  232.                 projection->Release();
  233.                 depthMap->ReleaseAccess(&depthImage); //is this in the right place mona?
  234.                 memcpy(xyzMap.data, xyzBuffer.data(), xyzBuffer.size() * sizeof(CV_32FC3));
  235.                 */
  236.             }
  237.             printf("opened sensor\n");
  238.             pp->ReleaseFrame();
  239.             printf("acquired image\n");
  240.             cout << "xyzMap = " << endl << " " << xyzMap << endl << endl;
  241.  
  242.         }
  243.  
  244.     } while (sts == Status::STATUS_STREAM_CONFIG_CHANGED);
  245.  
  246.     //numRows = width and numCol = height? ?
  247.     //numPixels = dd.img.numRows * dd.img.numColumns; // Number of pixels in camera
  248.     numPixels = depth_width * depth_height;
  249.     dists = new float[3 * numPixels]; // Dists contains XYZ values. needs to be 3x the size of numPixels
  250.     amps = new float[numPixels];
  251.     //frame = cvCreateImage(cvSize(dd.img.numColumns, dd.img.numRows), 8, 3); // Create the frame
  252.     frame = cvCreateImage(cvSize(depth_width, depth_height), 8, 3); // Create the frame
  253.  
  254.     KF.init(6, 3, 0);
  255.     KF.transitionMatrix = (cv::Mat_<float>(6, 6) << 1, 0, 0, 1, 0, 0,
  256.         0, 1, 0, 0, 1, 0,
  257.         0, 0, 1, 0, 0, 1,
  258.         0, 0, 0, 1, 0, 0,
  259.         0, 0, 0, 0, 1, 0,
  260.         0, 0, 0, 0, 0, 1);
  261.     measurement = cv::Mat_<float>::zeros(3, 1);
  262.  
  263.     //Initaite Kalman
  264.     KF.statePre.setTo(0);
  265.     setIdentity(KF.measurementMatrix);
  266.     setIdentity(KF.processNoiseCov, cv::Scalar::all(.001)); // Adjust this for faster convergence - but higher noise
  267.     setIdentity(KF.measurementNoiseCov, cv::Scalar::all(1e-1));
  268.     setIdentity(KF.errorCovPost, cv::Scalar::all(.1));
  269.  
  270.     /* mona will test this
  271.     setIdentity(KF.measurementMatrix);
  272.     setIdentity(KF.processNoiseCov, Scalar::all(smoothness));
  273.     setIdentity(KF.measurementNoiseCov, Scalar::all(rapidness));
  274.     setIdentity(KF.errorCovPost, Scalar::all(.1));
  275.     */
  276. }
  277.  
  278. /***
  279. Public deconstructor for the SR300 Camera depth sensor
  280. ***/
  281. SR300Camera::~SR300Camera() {};
  282.  
  283. void SR300Camera::destroyInstance()
  284. {
  285.     printf("closing sensor\n");
  286.     pp->Release();
  287.     printf("sensor closed\n");
  288. }
  289.  
  290.  
  291.  
  292.  
  293.  
  294.  
  295.  
  296.  
  297. /***
  298. Create xyzMap, zMap, ampMap, and flagMap from sensor input
  299. ***/
  300. void SR300Camera::update()
  301. {
  302.     initilizeImages();
  303.  
  304.     fillInAmps();
  305. }
  306.  
  307.  
  308.  
  309. /***
  310. Reads the amplitude data from the sensor and fills in the matrix
  311. ***/
  312. void SR300Camera::fillInAmps()
  313. {
  314.     //int res = pmdGetAmplitudes(hnd, amps, numPixels * sizeof(float));
  315.     //float * dataPtr = amps;
  316.     //ampMap.data = (uchar *)amps;
  317. }
  318.  
  319. /*
  320. void SR300Camera::ConvertPXCImageToOpenCVMat(PXCImage *inImg, cv::Mat *outImg) const
  321. {
  322.     int cvDataType;
  323.     int cvDataWidth;
  324.  
  325.  
  326.     PXCImage::ImageData data;
  327.     inImg->AcquireAccess(PXCImage::ACCESS_READ, &data);
  328.     PXCImage::ImageInfo imgInfo = inImg->QueryInfo();
  329.  
  330.     switch (data.format) {
  331.        
  332.     case PXCImage::PIXEL_FORMAT_YUY2:
  333.     case PXCImage::PIXEL_FORMAT_NV12:
  334.         throw(0); // Not implemented
  335.     case PXCImage::PIXEL_FORMAT_RGB32:
  336.         cvDataType = CV_8UC4;
  337.         cvDataWidth = 4;
  338.         break;
  339.     case PXCImage::PIXEL_FORMAT_RGB24:
  340.         cvDataType = CV_8UC3;
  341.         cvDataWidth = 3;
  342.         break;
  343.     case PXCImage::PIXEL_FORMAT_Y8:  
  344.         cvDataType = CV_8U;
  345.         cvDataWidth = 1;
  346.         break;
  347.  
  348.    
  349.     case PXCImage::PIXEL_FORMAT_DEPTH:
  350.     case PXCImage::PIXEL_FORMAT_DEPTH_RAW:
  351.         cvDataType = CV_16U;
  352.         cvDataWidth = 2;
  353.         break;
  354.     case PXCImage::PIXEL_FORMAT_DEPTH_F32:
  355.         cvDataType = CV_32F;
  356.         cvDataWidth = 4;
  357.         break;
  358.  
  359.    
  360.     case PXCImage::PIXEL_FORMAT_Y16:          
  361.         cvDataType = CV_16U;
  362.         cvDataWidth = 2;
  363.         break;
  364.     case PXCImage::PIXEL_FORMAT_Y8_IR_RELATIVE:  
  365.         cvDataType = CV_8U;
  366.         cvDataWidth = 1;
  367.         break;
  368.     }
  369.  
  370.     // suppose that no other planes
  371.     if (data.planes[1] != NULL) throw(0); // not implemented
  372.                                           // suppose that no sub pixel padding needed
  373.     if (data.pitches[0] % cvDataWidth != 0) throw(0); // not implemented
  374.  
  375.     outImg->create(imgInfo.height, data.pitches[0] / cvDataWidth, cvDataType);
  376.  
  377.     memcpy(outImg->data, data.planes[0], imgInfo.height*imgInfo.width*cvDataWidth * sizeof(pxcBYTE));
  378.  
  379.     inImg->ReleaseAccess(&data);
  380. }
  381. */
  382.  
  383. /***
  384. Returns the X value at (i, j)
  385. ***/
  386. float SR300Camera::getX(int i, int j) const
  387. {
  388.     //int flat = j * dd.img.numColumns * 3 + i * 3;
  389.     int flat = j * depth_width * 3 + i * 3;
  390.     return dists[flat];
  391. }
  392.  
  393. /***
  394. Returns the Y value at (i, j)
  395. ***/
  396. float SR300Camera::getY(int i, int j) const
  397. {
  398.     //int flat = j * dd.img.numColumns * 3 + i * 3;
  399.     int flat = j * depth_width * 3 + i * 3;
  400.     return dists[flat + 1];
  401. }
  402.  
  403. /***
  404. Returns the Z value at (i, j)
  405. ***/
  406. float SR300Camera::getZ(int i, int j) const
  407. {
  408.     //int flat = j * dd.img.numColumns * 3 + i * 3;
  409.     int flat = j * depth_width * 3 + i * 3;
  410.     return dists[flat + 2];
  411. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement