Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #include "SR300Camera.h"
- #include "RealSense/SenseManager.h"
- #include "RealSense/SampleReader.h"
- #include "util_render.h"
- #include "Visualizer.h"
- #include <iostream>
- /***
- Private constructor for the SR300 Camera depth sensor
- ***/
- using namespace std;
- //using namespace Intel::RealSense;
- PXCSenseManager *pp = PXCSenseManager::CreateInstance();
- PXCCapture::Device *device;
- PXCCaptureManager *cm;
- const int depth_fps = 30;
- const int depth_width = 640;
- const int depth_height = 480;
- cv::Size bufferSize;
- const PXCCapture::Sample *sample;
- //std::vector<CV_32FC3> xyzBuffer;
- std::vector<cv::Point3f> xyzBuffer;
- SR300Camera::SR300Camera(bool use_live_sensor)
- {
- CONFIDENCE_THRESHHOLD = (60.0f / 255.0f*500.0f);
- //INVALID_FLAG_VALUE = PMD_FLAG_INVALID;
- X_DIMENSION = depth_width;
- Y_DIMENSION = depth_height;
- int num_pixels = X_DIMENSION * Y_DIMENSION;
- PXCPoint3DF32* vertices = new PXCPoint3DF32[num_pixels];
- if (!use_live_sensor) {
- return;
- }
- int res;
- std::cout << "Trying to open SR300 camera\n";
- if (!pp) {
- wprintf_s(L"Unable to create the SenseManager\n");
- }
- /* Sets file recording or playback */
- cm = pp->QueryCaptureManager();
- //Status sts= STATUS_NO_ERROR;
- Status sts = STATUS_DATA_UNAVAILABLE; //mona is this the correct initialization?
- // Create stream renders
- UtilRender renderc(L"Color"), renderd(L"Depth"), renderi(L"IR"), renderr(L"Right"), renderl(L"Left");
- do {
- bool revert = false;
- pp->EnableStream(PXCCapture::StreamType::STREAM_TYPE_DEPTH,
- X_DIMENSION, Y_DIMENSION, depth_fps);
- revert = true;
- sts = pp->Init();
- if (sts < Status::STATUS_NO_ERROR) {
- if (revert) {
- pp->Close();
- pp->EnableStream(PXCCapture::STREAM_TYPE_DEPTH);
- sts = pp->Init();
- if (sts < Status::STATUS_NO_ERROR) {
- pp->Close();
- //pp->EnableStream(Capture::STREAM_TYPE_COLOR);
- sts = pp->Init();
- }
- }
- if (sts < Status::STATUS_NO_ERROR) {
- wprintf_s(L"Failed to locate any video stream(s)\n");
- pp->Release();
- }
- }
- device = pp->QueryCaptureManager()->QueryDevice();
- device->ResetProperties(PXCCapture::STREAM_TYPE_ANY);
- for (int nframes = 0; ; nframes++) {
- sts = pp->AcquireFrame(false);
- if (sts < Status::STATUS_NO_ERROR) {
- if (sts == Status::STATUS_STREAM_CONFIG_CHANGED) {
- wprintf_s(L"Stream configuration was changed, re-initilizing\n");
- pp->Close();
- }
- break;
- }
- sample = pp->QuerySample();
- if (sample) {
- cv::Mat img;
- SR300Camera::ConvertPXCImageToOpenCVMat(sample->depth, &img);
- cv::imshow("depth from OpenARK", Visualizer::visualizeDepthMap(img));
- if (sample->depth && !renderd.RenderFrame(sample->depth)) break;
- //if (sample->color && !renderc.RenderFrame(sample->color)) break;
- //if (sample->ir && !renderi.RenderFrame(sample->ir)) break;
- PXCImage::ImageData depthImage;
- PXCImage *depthMap = sample->depth;
- //depthMap->AcquireAccess(Image::ACCESS_READ, &depthImage);
- depthMap->AcquireAccess(PXCImage::ACCESS_WRITE, &depthImage);
- PXCImage::ImageInfo imgInfo = depthMap->QueryInfo();
- int depth_stride = depthImage.pitches[0] / sizeof(pxcU16);
- PXCProjection * projection = device->CreateProjection();
- cout << "projection is: " << projection;
- cout << "device is: " << device;
- //int num_pixels = depth_height * depth_width;
- //projection->QueryVertices(depthMap, vertices);
- pxcU16 *dpixels = (pxcU16*)depthImage.planes[0];
- unsigned int dpitch = depthImage.pitches[0] / sizeof(pxcU16);
- /*for (int k = 0; k < num_pixels; k++) {
- cout << "xx is " << vertices[k].x << endl;
- cout << "yy is " << vertices[k].y << endl;
- cout << "zz is " << vertices[k].z << endl;
- }*/
- //cout << "xx is " << vertices[0].x << endl;
- //cout << "yy is " << vertices[0].y << endl;
- //cout << "zz is " << vertices[0].z << endl;
- //cout << "*******************" << endl;
- PXCPoint3DF32* posUVZ = new PXCPoint3DF32[num_pixels];
- PXCPoint3DF32 *pos3D = new PXCPoint3DF32[num_pixels];
- for (int y = 0; y < Y_DIMENSION; y++) {
- for (int x = 0; x < X_DIMENSION; x++) {
- // populate the point with depth image pixel coordinates and the depth value
- posUVZ[y*Y_DIMENSION + x].x = (pxcF32)x;
- posUVZ[y*Y_DIMENSION + x].y = (pxcF32)y;
- posUVZ[y*Y_DIMENSION + x].z = dpixels[nframes*X_DIMENSION*Y_DIMENSION + y*Y_DIMENSION + x];
- }
- }
- //projection->ProjectDepthToCamera(num_pixels, posUVZ, pos3D);
- /*
- for (unsigned int yy = 0, k = 0; yy < Y_DIMENSION; yy++)
- {
- for (unsigned int xx = 0; xx < X_DIMENSION; xx++, k++)
- {
- posUVZ[k].x = (pxcF32)xx;
- posUVZ[k].y = (pxcF32)yy;
- posUVZ[k].z = (pxcF32)dpixels[yy * dpitch + xx];
- // cout << "xx is " << posUVZ[k].x << endl;
- // cout << "yy is " << posUVZ[k].y << endl;
- // cout << "zz is " << posUVZ[k].z<< endl;
- }
- }
- */
- // convert the array of depth coordinates + depth value (posUVZ) into the world coordinates (pos3D) in mm
- //Step 1: Set up the point cloud in camera coordinates and do the projection
- //projection->ProjectDepthToCamera(num_pixels, posUVZ, pos3D);
- if (projection->ProjectDepthToCamera(num_pixels, posUVZ, pos3D) < PXC_STATUS_NO_ERROR)
- {
- delete[] posUVZ;
- delete[] pos3D;
- cout << "projection unsucessful";
- return;
- }
- cout << "*************" << endl;
- for (unsigned int yy = 0, k = 0; yy < Y_DIMENSION; yy++)
- {
- for (unsigned int xx = 0; xx < X_DIMENSION; xx++, k++)
- {
- //cout << "xx is " << pos3D[k].x << endl;
- //cout << "yy is " << pos3D[k].y << endl;
- //cout << "zz is " << pos3D[k].z << endl;
- xyzBuffer.push_back(cv::Point3f(pos3D[k].x, pos3D[k].y, pos3D[k].z));
- }
- }
- cout << "/////////\\\\\\\\" << endl;
- projection->Release();
- depthMap->ReleaseAccess(&depthImage); //is this in the right place mona?
- memcpy(xyzMap.data, xyzBuffer.data(), xyzBuffer.size() * sizeof(CV_32FC3));
- }
- printf("opened sensor\n");
- pp->ReleaseFrame();
- printf("acquired image\n");
- cout << "xyzMap = " << endl << " " << xyzMap << endl << endl;
- }
- } while (sts == Status::STATUS_STREAM_CONFIG_CHANGED);
- //numRows = width and numCol = height? ?
- //numPixels = dd.img.numRows * dd.img.numColumns; // Number of pixels in camera
- numPixels = depth_width * depth_height;
- dists = new float[3 * numPixels]; // Dists contains XYZ values. needs to be 3x the size of numPixels
- amps = new float[numPixels];
- //frame = cvCreateImage(cvSize(dd.img.numColumns, dd.img.numRows), 8, 3); // Create the frame
- frame = cvCreateImage(cvSize(depth_width, depth_height), 8, 3); // Create the frame
- KF.init(6, 3, 0);
- KF.transitionMatrix = (cv::Mat_<float>(6, 6) << 1, 0, 0, 1, 0, 0,
- 0, 1, 0, 0, 1, 0,
- 0, 0, 1, 0, 0, 1,
- 0, 0, 0, 1, 0, 0,
- 0, 0, 0, 0, 1, 0,
- 0, 0, 0, 0, 0, 1);
- measurement = cv::Mat_<float>::zeros(3, 1);
- //Initaite Kalman
- KF.statePre.setTo(0);
- setIdentity(KF.measurementMatrix);
- setIdentity(KF.processNoiseCov, cv::Scalar::all(.001)); // Adjust this for faster convergence - but higher noise
- setIdentity(KF.measurementNoiseCov, cv::Scalar::all(1e-1));
- setIdentity(KF.errorCovPost, cv::Scalar::all(.1));
- /* mona will test this
- setIdentity(KF.measurementMatrix);
- setIdentity(KF.processNoiseCov, Scalar::all(smoothness));
- setIdentity(KF.measurementNoiseCov, Scalar::all(rapidness));
- setIdentity(KF.errorCovPost, Scalar::all(.1));
- */
- }
- /***
- Public deconstructor for the SR300 Camera depth sensor
- ***/
- SR300Camera::~SR300Camera() {};
- void SR300Camera::destroyInstance()
- {
- printf("closing sensor\n");
- pp->Release();
- printf("sensor closed\n");
- }
- /***
- Create xyzMap, zMap, ampMap, and flagMap from sensor input
- ***/
- void SR300Camera::update()
- {
- initilizeImages();
- fillInAmps();
- }
- /***
- Reads the amplitude data from the sensor and fills in the matrix
- ***/
- void SR300Camera::fillInAmps()
- {
- //int res = pmdGetAmplitudes(hnd, amps, numPixels * sizeof(float));
- //float * dataPtr = amps;
- //ampMap.data = (uchar *)amps;
- }
- void SR300Camera::ConvertPXCImageToOpenCVMat(PXCImage *inImg, cv::Mat *outImg) {
- int cvDataType;
- int cvDataWidth;
- PXCImage::ImageData data;
- inImg->AcquireAccess(PXCImage::ACCESS_READ, &data);
- PXCImage::ImageInfo imgInfo = inImg->QueryInfo();
- switch (data.format) {
- /* STREAM_TYPE_COLOR */
- case PXCImage::PIXEL_FORMAT_YUY2: /* YUY2 image */
- case PXCImage::PIXEL_FORMAT_NV12: /* NV12 image */
- throw(0); // Not implemented
- case PXCImage::PIXEL_FORMAT_RGB32: /* BGRA layout on a little-endian machine */
- cvDataType = CV_8UC4;
- cvDataWidth = 4;
- break;
- case PXCImage::PIXEL_FORMAT_RGB24: /* BGR layout on a little-endian machine */
- cvDataType = CV_8UC3;
- cvDataWidth = 3;
- break;
- case PXCImage::PIXEL_FORMAT_Y8: /* 8-Bit Gray Image, or IR 8-bit */
- cvDataType = CV_8U;
- cvDataWidth = 1;
- break;
- /* STREAM_TYPE_DEPTH */
- case PXCImage::PIXEL_FORMAT_DEPTH: /* 16-bit unsigned integer with precision mm. */
- case PXCImage::PIXEL_FORMAT_DEPTH_RAW: /* 16-bit unsigned integer with device specific precision (call device->QueryDepthUnit()) */
- cvDataType = CV_16U;
- cvDataWidth = 2;
- break;
- case PXCImage::PIXEL_FORMAT_DEPTH_F32: /* 32-bit float-point with precision mm. */
- cvDataType = CV_32F;
- cvDataWidth = 4;
- break;
- /* STREAM_TYPE_IR */
- case PXCImage::PIXEL_FORMAT_Y16: /* 16-Bit Gray Image */
- cvDataType = CV_16U;
- cvDataWidth = 2;
- break;
- case PXCImage::PIXEL_FORMAT_Y8_IR_RELATIVE: /* Relative IR Image */
- cvDataType = CV_8U;
- cvDataWidth = 1;
- break;
- }
- // suppose that no other planes
- if (data.planes[1] != NULL) throw(0); // not implemented
- // suppose that no sub pixel padding needed
- if (data.pitches[0] % cvDataWidth != 0) throw(0); // not implemented
- outImg->create(imgInfo.height, data.pitches[0] / cvDataWidth, cvDataType);
- memcpy(outImg->data, data.planes[0], imgInfo.height*imgInfo.width*cvDataWidth * sizeof(pxcBYTE));
- inImg->ReleaseAccess(&data);
- }
- /***
- Returns the X value at (i, j)
- ***/
- float SR300Camera::getX(int i, int j) const
- {
- //int flat = j * dd.img.numColumns * 3 + i * 3;
- int flat = j * depth_width * 3 + i * 3;
- return dists[flat];
- }
- /***
- Returns the Y value at (i, j)
- ***/
- float SR300Camera::getY(int i, int j) const
- {
- //int flat = j * dd.img.numColumns * 3 + i * 3;
- int flat = j * depth_width * 3 + i * 3;
- return dists[flat + 1];
- }
- /***
- Returns the Z value at (i, j)
- ***/
- float SR300Camera::getZ(int i, int j) const
- {
- //int flat = j * dd.img.numColumns * 3 + i * 3;
- int flat = j * depth_width * 3 + i * 3;
- return dists[flat + 2];
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement