Advertisement
lamiastella

SR300Camera.cpp

May 22nd, 2017
203
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C++ 12.67 KB | None | 0 0
  1. #include "SR300Camera.h"
  2. #include "RealSense/SenseManager.h"
  3. #include "RealSense/SampleReader.h"
  4. #include "util_render.h"
  5.  
  6.  
  7. #include <iostream>
  8.  
  9. /***
  10. Private constructor for the SR300 Camera depth sensor
  11. ***/
  12. using namespace std;
  13. using namespace Intel::RealSense;
  14.  
  15.  
  16. SenseManager *pp = SenseManager::CreateInstance();
  17. Capture::Device *device;
  18. CaptureManager *cm;
  19.  
  20. const int depth_fps = 30;
  21. int depth_width;
  22. int depth_height;
  23. cv::Size bufferSize;
  24. const PXCCapture::Sample *sample;
  25. std::vector<cv::Point3f> xyzBuffer;
  26.  
  27.  
  28.  
  29. SR300Camera::SR300Camera(bool use_live_sensor)
  30. {
  31.  
  32.     CONFIDENCE_THRESHHOLD = (60.0f / 255.0f*500.0f);
  33.     //INVALID_FLAG_VALUE = PMD_FLAG_INVALID;
  34.     X_DIMENSION = 176;
  35.     Y_DIMENSION = 120;
  36.  
  37.     if (!use_live_sensor) {
  38.         return;
  39.     }
  40.  
  41.     int res;
  42.     std::cout << "Trying to open SR300 camera\n";
  43.  
  44.    
  45.    
  46.     if (!pp) {
  47.         wprintf_s(L"Unable to create the SenseManager\n");
  48.     }
  49.  
  50.     /* Sets file recording or playback */
  51.     cm = pp->QueryCaptureManager();
  52.     Status sts= STATUS_NO_ERROR;
  53.     // Create stream renders
  54.     UtilRender renderc(L"Color"), renderd(L"Depth"), renderi(L"IR"), renderr(L"Right"), renderl(L"Left");
  55.    
  56.     do {
  57.         bool revert = false;
  58.        
  59.         DataDesc desc = {};
  60.        
  61.         if (cm->QueryCapture()) {
  62.             cm->QueryCapture()->QueryDeviceInfo(0, &desc.deviceInfo);
  63.         }
  64.         else {
  65.             desc.deviceInfo.streams = Capture::STREAM_TYPE_COLOR | Capture::STREAM_TYPE_DEPTH;
  66.             revert = true;
  67.         }
  68.         pp->EnableStreams(&desc);
  69.        
  70.         /*sts = pp->EnableStream(Capture::StreamType::STREAM_TYPE_DEPTH,
  71.             X_DIMENSION, Y_DIMENSION, depth_fps);
  72.        
  73.         if (sts < PXC_STATUS_NO_ERROR) {
  74.             throw std::runtime_error("depth stream encoutered an error!!");
  75.         }
  76.        
  77.        
  78.         VideoModule::DataDesc ddesc = {};
  79.         ddesc.deviceInfo.streams = PXCCapture::STREAM_TYPE_COLOR | PXCCapture::STREAM_TYPE_DEPTH;
  80.         pp->EnableStreams(&ddesc);
  81.         */
  82.  
  83.         sts = pp->Init();
  84.         if (sts < Status::STATUS_NO_ERROR) {
  85.             if (revert) {
  86.                
  87.                 pp->Close();
  88.                 pp->EnableStream(Capture::STREAM_TYPE_DEPTH);
  89.                 sts = pp->Init();
  90.                 if (sts < Status::STATUS_NO_ERROR) {
  91.                     pp->Close();
  92.                     //pp->EnableStream(Capture::STREAM_TYPE_COLOR);
  93.                     sts = pp->Init();
  94.                 }
  95.             }
  96.             if (sts < Status::STATUS_NO_ERROR) {
  97.                 wprintf_s(L"Failed to locate any video stream(s)\n");
  98.                 pp->Release();
  99.             }
  100.         }
  101.  
  102.  
  103.        
  104.         device = pp->QueryCaptureManager()->QueryDevice();
  105.  
  106.         device->ResetProperties(Capture::STREAM_TYPE_ANY);
  107.         cout << "accuracy is: "<<device->IVCAM_ACCURACY_MEDIAN;
  108.  
  109.  
  110.  
  111.         for (int nframes = 0; ; nframes++) {
  112.             sts = pp->AcquireFrame(false);
  113.  
  114.             if (sts < Status::STATUS_NO_ERROR) {
  115.                 if (sts == Status::STATUS_STREAM_CONFIG_CHANGED) {
  116.                     wprintf_s(L"Stream configuration was changed, re-initilizing\n");
  117.                     pp->Close();
  118.                 }
  119.                 break;
  120.             }
  121.  
  122.             sample = pp->QuerySample();
  123.  
  124.             //const Capture::Sample *sample = pp->QuerySample();
  125.             if (sample) {
  126.                 cv::Mat img;
  127.                 //Copy all the vertices from Point3DF32 to the mat image
  128.                 //QueryVertices(Image *depth, Point3DF32 *vertices)
  129.                 //sample->color->QueryInfo()
  130.                
  131.                 depth_width = sample->depth->QueryInfo().width;
  132.                 depth_height = sample->depth->QueryInfo().height;
  133.                 cout << "depth width is: " << depth_width << endl;
  134.                 cout << "depth heigth is: " << depth_height << endl;
  135.  
  136.                 //cv::imshow("depth from OpenARK", Visualizer::visualizeDepthMap(img))
  137.                 if (sample->depth && !renderd.RenderFrame(sample->depth)) break;
  138.                 if (sample->color && !renderc.RenderFrame(sample->color)) break;
  139.                 if (sample->ir && !renderi.RenderFrame(sample->ir))    break;
  140.  
  141.  
  142.                 Image::ImageData depthImage;
  143.                 Image *depthMap = sample->depth;
  144.                 depthMap->AcquireAccess(Image::ACCESS_READ, &depthImage);
  145.                 Image::ImageInfo imgInfo = depthMap->QueryInfo();
  146.                 int depth_stride = depthImage.pitches[0] / sizeof(pxcU16);
  147.  
  148.  
  149.                 Projection * projection = device->CreateProjection();
  150.                 cout << "projection is: " << projection;
  151.                 cout << "device is: " << device;
  152.                 //int num_pixels = depth_height * depth_width;
  153.                 int num_pixels = X_DIMENSION * Y_DIMENSION;
  154.                 Point3DF32* vertices = new Point3DF32[num_pixels];
  155.                 projection->QueryVertices(depthMap, vertices);
  156.                 pxcU16 *dpixels = (pxcU16*)depthImage.planes[0];
  157.                 unsigned int dpitch = depthImage.pitches[0] / sizeof(pxcU16);
  158.  
  159.                 for (int k = 0; k < num_pixels; k++) {
  160.                     cout << "xx is " << vertices[k].x << endl;
  161.                     cout << "yy is " << vertices[k].y << endl;
  162.                     cout << "zz is " << vertices[k].z << endl;
  163.                 }
  164.             }
  165.             printf("opened sensor\n");
  166.             pp->ReleaseFrame();
  167.             printf("acquired image\n");
  168.         }
  169.  
  170.     } while (sts == Status::STATUS_STREAM_CONFIG_CHANGED);
  171.    
  172.  
  173.  
  174.        
  175.     /*res = pmdOpen(&hnd, SOURCE_PLUGIN, SOURCE_PARAM, PROC_PLUGIN, PROC_PARAM); //Open the PMD sensor
  176.     if (res != PMD_OK)
  177.     {
  178.         pmdGetLastError(0, err, 128);
  179.         fprintf(stderr, "Could not connect: %s\n", err);
  180.         return;
  181.     }
  182.  
  183.     */
  184.  
  185.  
  186.     // res: Structure which contains various meta-information about the data delivered by your Nano.
  187.     // It is advisabe to always use the data delivered by this struct (for example the width and height of the imager
  188.     // and the image format). Please refer to the PMSDSK documentation for more information
  189.    
  190.     /*
  191.     res = pmdGetSourceDataDescription(hnd, &dd);
  192.     if (sts < STATUS_NO_ERROR) {
  193.         pp->Release();
  194.         return;
  195.     }
  196.     */
  197.  
  198.     /*
  199.     if (res != PMD_OK)
  200.     {
  201.         pmdGetLastError(hnd, err, 128);
  202.         fprintf(stderr, "Couldn't get data description: %s\n", err);
  203.         pp->Release();
  204.         //pmdClose(hnd);
  205.         return;
  206.     }
  207.    
  208.     printf("retrieved source data description\n");
  209.     if (dd.subHeaderType != PMD_IMAGE_DATA)
  210.     {
  211.         fprintf(stderr, "Source data is not an image!\n");
  212.         pmdClose(hnd);
  213.         return;
  214.     }
  215.     */
  216.     //numRows = width and numCol = height? ?
  217.     //numPixels = dd.img.numRows * dd.img.numColumns; // Number of pixels in camera
  218.     numPixels = depth_width * depth_height;
  219.     dists = new float[3 * numPixels]; // Dists contains XYZ values. needs to be 3x the size of numPixels
  220.     amps = new float[numPixels];
  221.     //frame = cvCreateImage(cvSize(dd.img.numColumns, dd.img.numRows), 8, 3); // Create the frame
  222.     frame = cvCreateImage(cvSize(depth_width, depth_height), 8, 3); // Create the frame
  223.  
  224.     KF.init(6, 3, 0);
  225.     KF.transitionMatrix = (cv::Mat_<float>(6, 6) << 1, 0, 0, 1, 0, 0,
  226.                                                     0, 1, 0, 0, 1, 0,
  227.                                                     0, 0, 1, 0, 0, 1,
  228.                                                     0, 0, 0, 1, 0, 0,
  229.                                                     0, 0, 0, 0, 1, 0,
  230.                                                     0, 0, 0, 0, 0, 1);
  231.     measurement = cv::Mat_<float>::zeros(3, 1);
  232.  
  233.     //Initaite Kalman
  234.     KF.statePre.setTo(0);
  235.     setIdentity(KF.measurementMatrix);
  236.     setIdentity(KF.processNoiseCov, cv::Scalar::all(.001)); // Adjust this for faster convergence - but higher noise
  237.     setIdentity(KF.measurementNoiseCov, cv::Scalar::all(1e-1));
  238.     setIdentity(KF.errorCovPost, cv::Scalar::all(.1));
  239.  
  240.     /* mona will test this
  241.     setIdentity(KF.measurementMatrix);
  242.     setIdentity(KF.processNoiseCov, Scalar::all(smoothness));
  243.     setIdentity(KF.measurementNoiseCov, Scalar::all(rapidness));
  244.     setIdentity(KF.errorCovPost, Scalar::all(.1));
  245.     */
  246. }
  247.  
  248. /***
  249. Public deconstructor for the SR300 Camera depth sensor
  250. ***/
  251. SR300Camera::~SR300Camera() {};
  252.  
  253. void SR300Camera::destroyInstance()
  254. {
  255.     printf("closing sensor\n");
  256.     pp->Release();
  257.     printf("sensor closed\n");
  258. }
  259.  
  260. /***
  261. Create xyzMap, zMap, ampMap, and flagMap from sensor input
  262. ***/
  263. void SR300Camera::update()
  264. {
  265.     initilizeImages();
  266.    
  267.     fillInAmps();
  268.     //fillInZCoords();
  269.     // Flags. Helps with denoising.
  270.     /*
  271.     unsigned *flags = new unsigned[ampMap.cols*ampMap.rows];
  272.     int res = pmdGetFlags(hnd, flags, numPixels * sizeof(unsigned));
  273.     flagMap.data = (uchar *)flags;
  274.  
  275.     pmdUpdate(hnd);
  276.     delete(flags);
  277.     */
  278.     //mona pp->ReleaseFrame(); //correct?
  279.     //mona pxcStatus sts = pp->AcquireFrame(true);
  280.     //if (sts < PXC_STATUS_NO_ERROR)
  281.     //  return;
  282.  
  283. }
  284.  
  285.  
  286. /***
  287. Reads the depth data from the sensor and fills in the matrix
  288. ***/
  289. /*
  290. void SR300Camera::fillInZCoords()
  291. {
  292.    
  293.     Image::ImageData depthImage;
  294.     Image *depthMap = sample->depth;
  295.     depthMap->AcquireAccess(Image::ACCESS_READ, &depthImage);
  296.     Image::ImageInfo imgInfo = depthMap->QueryInfo();
  297.     int depth_stride = depthImage.pitches[0] / sizeof(pxcU16);
  298.     unsigned int wxhDepth = depth_width * depth_height;
  299.  
  300.     auto sts = pp->EnableStream(Capture::StreamType::STREAM_TYPE_DEPTH,
  301.         depth_width, depth_height, depth_fps);
  302.     if (sts < PXC_STATUS_NO_ERROR) {
  303.         throw std::runtime_error("depth stream encoutered an error!!");
  304.     }
  305.     pp->Init();
  306.     device = pp->QueryCaptureManager()->QueryDevice();
  307.     //cout<<"accuracy is: "<<device->IVCAM_ACCURACY_MEDIAN;
  308.  
  309.     Projection * projection = device->CreateProjection();
  310.     cout << "projection is: " << projection;
  311.     cout << "device is: " << device;
  312.    
  313.     Point3DF32* vertices = new Point3DF32[wxhDepth];
  314.     projection->QueryVertices(depthMap, vertices);
  315.     pxcU16 *dpixels = (pxcU16*)depthImage.planes[0];
  316.     unsigned int dpitch = depthImage.pitches[0] / sizeof(pxcU16);
  317.    
  318.     for (int k = 0; k < wxhDepth; k++) {
  319.         cout << "xx is " << vertices[k].x << endl;
  320.         cout << "yy is " << vertices[k].y << endl;
  321.         cout << "zz is " << vertices[k].z << endl;
  322.     }
  323.    
  324.  
  325. }
  326.  
  327.     */
  328.  
  329.    
  330.     /*
  331.     for (unsigned int yy = 0, k = 0; yy < depth_height; yy++)
  332.     {
  333.         for (unsigned int xx = 0; xx < depth_width; xx++, k++)
  334.         {
  335.             cout << "xx is " << pos3D[k].x*1000.0 << endl;
  336.             cout << "yy is " << pos3D[k].y*1000.0 << endl;
  337.             cout << "zz is " << pos3D[k].z*1000.0 << endl;
  338.             xyzBuffer.push_back(cv::Point3f(pos3D[k].x, pos3D[k].y, pos3D[k].z));
  339.         }
  340.     }
  341.     */
  342.  
  343.     /*
  344.     for (int idx = 0; idx < wxhDepth; idx++) {
  345.  
  346.         cout << "x is " << pos3D[idx].x*1000.0 << endl;
  347.         cout << "y is " << pos3D[idx].y*1000.0 << endl;
  348.         cout << "z is " << pos3D[idx].z*1000.0 << endl;
  349.         xyzBuffer.push_back(cv::Point3f(pos3D[idx].x, pos3D[idx].y, pos3D[idx].z));
  350.     }
  351.     */
  352.    
  353.     //xyzMap = cv::Mat(xyzMap.size(), xyzMap.type, &pos3D);
  354.     //xyzMap = cv::Mat(xyzBuffer);
  355.     //cout << "xyzMap = " << endl << " " << xyzMap << endl << endl;
  356.  
  357.     //projection->Release();
  358.     //delete[] posUVZ;
  359.     //delete[] pos3D;
  360.    
  361. //};
  362.  
  363.  
  364.  
  365.  
  366. /***
  367. Reads the amplitude data from the sensor and fills in the matrix
  368. ***/
  369. void SR300Camera::fillInAmps()
  370. {
  371.     //int res = pmdGetAmplitudes(hnd, amps, numPixels * sizeof(float));
  372.     //float * dataPtr = amps;
  373.     //ampMap.data = (uchar *)amps;
  374. }
  375.  
  376. /***
  377. Returns the X value at (i, j)
  378. ***/
  379. float SR300Camera::getX(int i, int j) const
  380. {
  381.     //int flat = j * dd.img.numColumns * 3 + i * 3;
  382.     int flat = j * depth_width * 3 + i * 3;
  383.     return dists[flat];
  384. }
  385.  
  386. /***
  387. Returns the Y value at (i, j)
  388. ***/
  389. float SR300Camera::getY(int i, int j) const
  390. {
  391.     //int flat = j * dd.img.numColumns * 3 + i * 3;
  392.     int flat = j * depth_width * 3 + i * 3;
  393.     return dists[flat + 1];
  394. }
  395.  
  396. /***
  397. Returns the Z value at (i, j)
  398. ***/
  399. float SR300Camera::getZ(int i, int j) const
  400. {
  401.     //int flat = j * dd.img.numColumns * 3 + i * 3;
  402.     int flat = j * depth_width * 3 + i * 3;
  403.     return dists[flat + 2];
  404. }
  405.  
  406. /***
  407. Convert the depth coordinates to world coordinates
  408. Taken from the Projection.cpp from the RSSDK samples
  409. ***/
  410.  
  411. /*
  412. void SR300Camera::DepthToWorld(Image *depth, vector<PointF32> dcords, vector<PointF32> &wcords) const
  413. {
  414.     Image::ImageData ddata;
  415.     if (!projection) return;
  416.     if (Status::STATUS_NO_ERROR > depth->AcquireAccess(Image::ACCESS_READ, Image::PIXEL_FORMAT_DEPTH, &ddata))
  417.         return;
  418.     int dsize = dcords.size();
  419.     Point3DF32 invP3D = { -1.f, -1.f, 0.f };
  420.     vector<Point3DF32> dcordsAbs(dsize, invP3D);
  421.     vector<Point3DF32> wcords3D(dsize, invP3D);
  422.     PointF32 invP = { -1.f, -1.f };
  423.     wcords.resize(dsize, invP);
  424.     auto dwidth = static_cast<float>(depth->QueryInfo().width), dheight = static_cast<float>(depth->QueryInfo().height);
  425.     for (auto i = 0; i < dsize; i++) {
  426.         if (dcords[i].x < 0) continue;
  427.         dcordsAbs[i].x = dcords[i].x * dwidth, dcordsAbs[i].y = dcords[i].y * dheight;
  428.         dcordsAbs[i].z = static_cast<pxcF32>(reinterpret_cast<pxcI16*>(ddata.planes[0] + static_cast<int>(dcordsAbs[i].y) * ddata.pitches[0])[static_cast<int>(dcordsAbs[i].x)]);
  429.     }
  430.     depth->ReleaseAccess(&ddata);
  431.  
  432.     projection->ProjectDepthToCamera(dsize, &dcordsAbs[0], &wcords3D[0]);
  433.     for (auto i = 0; i < dsize; i++) {
  434.         if (dcordsAbs[i].z <= 0) wcords[i].x = wcords[i].y = -1.f;
  435.         else wcords[i].x = dcords[i].x; wcords[i].y = dcords[i].y;
  436.     }
  437. }
  438. */
  439.  
  440.  
  441. /*
  442. void SR300Camera::getXYZbuffer()
  443. {
  444.     Projection *projection = pp->QueryCaptureManager()->QueryDevice()->CreateProjection();
  445.     std::vector<Point3DF32> vertices;
  446.     vertices.resize(bufferSize.width * bufferSize.height);
  447.     projection->QueryVertices(sample->depth, &vertices[0]);
  448.     xyzBuffer.clear();
  449.    
  450.     for (int i = 0; i < bufferSize.width*bufferSize.height; i++) {
  451.         Point3f p;
  452.         p.x = vertices[i].x;
  453.         p.y = vertices[i].y;
  454.         p.z = vertices[i].z;
  455.         xyzBuffer.push_back(p);
  456.     }
  457.  
  458.     //xyzMap = cv::Mat(xyzMap.size(), xyzMap.type(), xyzBuffer);
  459.     //xyzMap = xyzBuffer;
  460.     cv::Mat xyzMat(xyzBuffer);
  461.     projection->Release();
  462. }
  463. */
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement