Advertisement
lamiastella

access violation vcruntime140.dll

May 23rd, 2017
162
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C++ 9.19 KB | None | 0 0
  1. #include "SR300Camera.h"
  2. #include "RealSense/SenseManager.h"
  3. #include "RealSense/SampleReader.h"
  4. #include "util_render.h"
  5.  
  6.  
  7. #include <iostream>
  8.  
  9. /***
  10. Private constructor for the SR300 Camera depth sensor
  11. ***/
  12. using namespace std;
  13. using namespace Intel::RealSense;
  14.  
  15.  
  16. SenseManager *pp = SenseManager::CreateInstance();
  17. Capture::Device *device;
  18. CaptureManager *cm;
  19.  
  20. const int depth_fps = 30;
  21. const int depth_width = 640;
  22. const int depth_height = 480;
  23. cv::Size bufferSize;
  24. const PXCCapture::Sample *sample;
  25. //std::vector<CV_32FC3> xyzBuffer;
  26. std::vector<cv::Point3f> xyzBuffer;
  27.  
  28.  
  29.  
  30. SR300Camera::SR300Camera(bool use_live_sensor)
  31. {
  32.  
  33.     CONFIDENCE_THRESHHOLD = (60.0f / 255.0f*500.0f);
  34.     //INVALID_FLAG_VALUE = PMD_FLAG_INVALID;
  35.     X_DIMENSION = depth_width;
  36.     Y_DIMENSION = depth_height;
  37.  
  38.     int num_pixels = X_DIMENSION * Y_DIMENSION;
  39.     Point3DF32* vertices = new Point3DF32[num_pixels];
  40.  
  41.     if (!use_live_sensor) {
  42.         return;
  43.     }
  44.  
  45.     int res;
  46.     std::cout << "Trying to open SR300 camera\n";
  47.  
  48.  
  49.  
  50.     if (!pp) {
  51.         wprintf_s(L"Unable to create the SenseManager\n");
  52.     }
  53.  
  54.     /* Sets file recording or playback */
  55.     cm = pp->QueryCaptureManager();
  56.     //Status sts= STATUS_NO_ERROR;
  57.     Status sts = STATUS_DATA_UNAVAILABLE; //mona is this the correct initialization?
  58.                                           // Create stream renders
  59.     UtilRender renderc(L"Color"), renderd(L"Depth"), renderi(L"IR"), renderr(L"Right"), renderl(L"Left");
  60.  
  61.     do {
  62.         bool revert = false;
  63.         /*
  64.         DataDesc desc = {};
  65.  
  66.         if (cm->QueryCapture()) {
  67.         cm->QueryCapture()->QueryDeviceInfo(0, &desc.deviceInfo);
  68.         }
  69.         else {
  70.         //desc.deviceInfo.streams = Capture::STREAM_TYPE_COLOR | Capture::STREAM_TYPE_DEPTH;
  71.         desc.deviceInfo.streams = Capture::STREAM_TYPE_DEPTH;
  72.         revert = true;
  73.         }
  74.         */
  75.         //pp->EnableStreams(&desc);
  76.         pp->EnableStream(Capture::StreamType::STREAM_TYPE_DEPTH,
  77.             X_DIMENSION, Y_DIMENSION, depth_fps);
  78.         revert = true;
  79.         sts = pp->Init();
  80.         if (sts < Status::STATUS_NO_ERROR) {
  81.  
  82.             if (revert) {
  83.  
  84.                 pp->Close();
  85.                 pp->EnableStream(Capture::STREAM_TYPE_DEPTH);
  86.                 sts = pp->Init();
  87.                 if (sts < Status::STATUS_NO_ERROR) {
  88.                     pp->Close();
  89.                     //pp->EnableStream(Capture::STREAM_TYPE_COLOR);
  90.                     sts = pp->Init();
  91.                 }
  92.             }
  93.             if (sts < Status::STATUS_NO_ERROR) {
  94.                 wprintf_s(L"Failed to locate any video stream(s)\n");
  95.                 pp->Release();
  96.             }
  97.         }
  98.  
  99.  
  100.  
  101.         device = pp->QueryCaptureManager()->QueryDevice();
  102.  
  103.         device->ResetProperties(Capture::STREAM_TYPE_ANY);
  104.         //cout << "accuracy is: "<<device->IVCAM_ACCURACY_MEDIAN;
  105.  
  106.  
  107.  
  108.         for (int nframes = 0; ; nframes++) {
  109.             sts = pp->AcquireFrame(false);
  110.  
  111.             if (sts < Status::STATUS_NO_ERROR) {
  112.                 if (sts == Status::STATUS_STREAM_CONFIG_CHANGED) {
  113.                     wprintf_s(L"Stream configuration was changed, re-initilizing\n");
  114.                     pp->Close();
  115.                 }
  116.                 break;
  117.             }
  118.  
  119.             sample = pp->QuerySample();
  120.  
  121.             //const Capture::Sample *sample = pp->QuerySample();
  122.             if (sample) {
  123.                 cv::Mat img;
  124.                 //Copy all the vertices from Point3DF32 to the mat image
  125.                 //QueryVertices(Image *depth, Point3DF32 *vertices)
  126.                 //sample->color->QueryInfo()
  127.  
  128.                 //cv::imshow("depth from OpenARK", Visualizer::visualizeDepthMap(img))
  129.                 if (sample->depth && !renderd.RenderFrame(sample->depth)) break;
  130.                 //if (sample->color && !renderc.RenderFrame(sample->color)) break;
  131.                 //if (sample->ir && !renderi.RenderFrame(sample->ir))    break;
  132.                 Image::ImageData depthImage;
  133.                 Image *depthMap = sample->depth;
  134.                 //depthMap->AcquireAccess(Image::ACCESS_READ, &depthImage);
  135.                 depthMap->AcquireAccess(Image::ACCESS_WRITE, &depthImage);
  136.  
  137.                 Image::ImageInfo imgInfo = depthMap->QueryInfo();
  138.                 int depth_stride = depthImage.pitches[0] / sizeof(pxcU16);
  139.                 Projection * projection = device->CreateProjection();
  140.                 cout << "projection is: " << projection;
  141.                 cout << "device is: " << device;
  142.                 //int num_pixels = depth_height * depth_width;
  143.  
  144.                 //projection->QueryVertices(depthMap, vertices);
  145.                 pxcU16 *dpixels = (pxcU16*)depthImage.planes[0];
  146.                 unsigned int dpitch = depthImage.pitches[0] / sizeof(pxcU16);
  147.  
  148.                 /*for (int k = 0; k < num_pixels; k++) {
  149.                 cout << "xx is " << vertices[k].x << endl;
  150.                 cout << "yy is " << vertices[k].y << endl;
  151.                 cout << "zz is " << vertices[k].z << endl;
  152.                 }*/
  153.                 //cout << "xx is " << vertices[0].x << endl;
  154.                 //cout << "yy is " << vertices[0].y << endl;
  155.                 //cout << "zz is " << vertices[0].z << endl;
  156.                 //cout << "*******************" << endl;
  157.  
  158.                 Point3DF32* posUVZ = new Point3DF32[num_pixels];
  159.                 Point3DF32 *pos3D = new Point3DF32[num_pixels];
  160.  
  161.                 for (int y = 0; y < Y_DIMENSION; y++) {
  162.                     for (int x = 0; x < X_DIMENSION; x++) {
  163.                         // populate the point with depth image pixel coordinates and the depth value
  164.                         posUVZ[y*Y_DIMENSION + x].x = (pxcF32)x;
  165.                         posUVZ[y*Y_DIMENSION + x].y = (pxcF32)y;
  166.                         posUVZ[y*Y_DIMENSION + x].z = dpixels[nframes*X_DIMENSION*Y_DIMENSION + y*Y_DIMENSION + x];
  167.                     }
  168.                 }
  169.  
  170.                 //projection->ProjectDepthToCamera(num_pixels, posUVZ, pos3D);
  171.  
  172.  
  173.                 /*
  174.                 for (unsigned int yy = 0, k = 0; yy < Y_DIMENSION; yy++)
  175.                 {
  176.                 for (unsigned int xx = 0; xx < X_DIMENSION; xx++, k++)
  177.                 {
  178.                 posUVZ[k].x = (pxcF32)xx;
  179.                 posUVZ[k].y = (pxcF32)yy;
  180.                 posUVZ[k].z = (pxcF32)dpixels[yy * dpitch + xx];
  181.                 //  cout << "xx is " << posUVZ[k].x << endl;
  182.                 // cout << "yy is " << posUVZ[k].y << endl;
  183.                 // cout << "zz is " << posUVZ[k].z<< endl;
  184.                 }
  185.                 }
  186.                 */
  187.  
  188.  
  189.  
  190.  
  191.                 // convert the array of depth coordinates + depth value (posUVZ) into the world coordinates (pos3D) in mm
  192.  
  193.                 //Step 1: Set up the point cloud in camera coordinates and do the projection
  194.  
  195.  
  196.  
  197.                 //projection->ProjectDepthToCamera(num_pixels, posUVZ, pos3D);
  198.  
  199.                 if (projection->ProjectDepthToCamera(num_pixels, posUVZ, pos3D) < PXC_STATUS_NO_ERROR)
  200.                 {
  201.                     delete[] posUVZ;
  202.                     delete[] pos3D;
  203.                     cout << "projection unsucessful";
  204.                     return;
  205.                 }
  206.  
  207.                 cout << "*************" << endl;
  208.                 for (unsigned int yy = 0, k = 0; yy < Y_DIMENSION; yy++)
  209.                 {
  210.                     for (unsigned int xx = 0; xx < X_DIMENSION; xx++, k++)
  211.                     {
  212.                         //cout << "xx is " << pos3D[k].x << endl;
  213.                         //cout << "yy is " << pos3D[k].y << endl;
  214.                         //cout << "zz is " << pos3D[k].z << endl;
  215.                         xyzBuffer.push_back(cv::Point3f(pos3D[k].x, pos3D[k].y, pos3D[k].z));
  216.                     }
  217.                 }
  218.                 cout << "/////////\\\\\\\\" << endl;
  219.                 projection->Release();
  220.                 depthMap->ReleaseAccess(&depthImage); //is this in the right place mona?
  221.                 memcpy(xyzMap.data, xyzBuffer.data(), xyzBuffer.size() * sizeof(CV_32FC3));
  222.             }
  223.             printf("opened sensor\n");
  224.             pp->ReleaseFrame();
  225.             printf("acquired image\n");
  226.             cout << "xyzMap = " << endl << " " << xyzMap << endl << endl;
  227.  
  228.         }
  229.  
  230.     } while (sts == Status::STATUS_STREAM_CONFIG_CHANGED);
  231.  
  232.     //numRows = width and numCol = height? ?
  233.     //numPixels = dd.img.numRows * dd.img.numColumns; // Number of pixels in camera
  234.     numPixels = depth_width * depth_height;
  235.     dists = new float[3 * numPixels]; // Dists contains XYZ values. needs to be 3x the size of numPixels
  236.     amps = new float[numPixels];
  237.     //frame = cvCreateImage(cvSize(dd.img.numColumns, dd.img.numRows), 8, 3); // Create the frame
  238.     frame = cvCreateImage(cvSize(depth_width, depth_height), 8, 3); // Create the frame
  239.  
  240.     KF.init(6, 3, 0);
  241.     KF.transitionMatrix = (cv::Mat_<float>(6, 6) << 1, 0, 0, 1, 0, 0,
  242.         0, 1, 0, 0, 1, 0,
  243.         0, 0, 1, 0, 0, 1,
  244.         0, 0, 0, 1, 0, 0,
  245.         0, 0, 0, 0, 1, 0,
  246.         0, 0, 0, 0, 0, 1);
  247.     measurement = cv::Mat_<float>::zeros(3, 1);
  248.  
  249.     //Initaite Kalman
  250.     KF.statePre.setTo(0);
  251.     setIdentity(KF.measurementMatrix);
  252.     setIdentity(KF.processNoiseCov, cv::Scalar::all(.001)); // Adjust this for faster convergence - but higher noise
  253.     setIdentity(KF.measurementNoiseCov, cv::Scalar::all(1e-1));
  254.     setIdentity(KF.errorCovPost, cv::Scalar::all(.1));
  255.  
  256.     /* mona will test this
  257.     setIdentity(KF.measurementMatrix);
  258.     setIdentity(KF.processNoiseCov, Scalar::all(smoothness));
  259.     setIdentity(KF.measurementNoiseCov, Scalar::all(rapidness));
  260.     setIdentity(KF.errorCovPost, Scalar::all(.1));
  261.     */
  262. }
  263.  
  264. /***
  265. Public deconstructor for the SR300 Camera depth sensor
  266. ***/
  267. SR300Camera::~SR300Camera() {};
  268.  
  269. void SR300Camera::destroyInstance()
  270. {
  271.     printf("closing sensor\n");
  272.     pp->Release();
  273.     printf("sensor closed\n");
  274. }
  275.  
  276. /***
  277. Create xyzMap, zMap, ampMap, and flagMap from sensor input
  278. ***/
  279. void SR300Camera::update()
  280. {
  281.     initilizeImages();
  282.  
  283.     fillInAmps();
  284. }
  285.  
  286.  
  287.  
  288. /***
  289. Reads the amplitude data from the sensor and fills in the matrix
  290. ***/
  291. void SR300Camera::fillInAmps()
  292. {
  293.     //int res = pmdGetAmplitudes(hnd, amps, numPixels * sizeof(float));
  294.     //float * dataPtr = amps;
  295.     //ampMap.data = (uchar *)amps;
  296. }
  297.  
  298. /***
  299. Returns the X value at (i, j)
  300. ***/
  301. float SR300Camera::getX(int i, int j) const
  302. {
  303.     //int flat = j * dd.img.numColumns * 3 + i * 3;
  304.     int flat = j * depth_width * 3 + i * 3;
  305.     return dists[flat];
  306. }
  307.  
  308. /***
  309. Returns the Y value at (i, j)
  310. ***/
  311. float SR300Camera::getY(int i, int j) const
  312. {
  313.     //int flat = j * dd.img.numColumns * 3 + i * 3;
  314.     int flat = j * depth_width * 3 + i * 3;
  315.     return dists[flat + 1];
  316. }
  317.  
  318. /***
  319. Returns the Z value at (i, j)
  320. ***/
  321. float SR300Camera::getZ(int i, int j) const
  322. {
  323.     //int flat = j * dd.img.numColumns * 3 + i * 3;
  324.     int flat = j * depth_width * 3 + i * 3;
  325.     return dists[flat + 2];
  326. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement