Advertisement
Afuerg

Coin3D Oculus Rift

Feb 26th, 2014
91
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. #include <GL/glew.h>
  2. #include <sstream>
  3. #include <fstream>
  4.  
  5. #include <glm/glm.hpp>
  6. #include <glm/gtc/quaternion.hpp>
  7. #include <glm/gtc/matrix_transform.hpp>
  8. #include <glm/gtc/type_ptr.hpp>
  9. #include <glm/gtc/noise.hpp>
  10.  
  11. #include <iostream>
  12. #include <string>
  13. #include <map>
  14. //#include <stdint.h>
  15.  
  16. // Imports by Alexander
  17. #include <Inventor/nodes/SoSeparator.h>
  18. #include <Inventor/nodes/SoMaterial.h>
  19. #include <Inventor/nodes/SoSceneTexture2.h>
  20. #include <Inventor/nodes/SoCube.h>
  21. #include <Inventor/nodes/SoCone.h>
  22. #include <Inventor/nodes/SoSphere.h>
  23. #include <Inventor/nodes/SoCoordinate3.h>
  24. #include <Inventor/nodes/SoComplexity.h>
  25. #include <Inventor/nodes/SoCallback.h>
  26. #include <Inventor/nodes/SoTexture2.h>
  27. #include <Inventor/nodes/SoTextureCoordinate2.h>
  28. #include <Inventor/nodes/SoShaderProgram.h>
  29. #include <Inventor/nodes/SoFragmentShader.h>
  30. #include <Inventor/nodes/SoDirectionalLight.h>
  31. #include <Inventor/nodes/SoVertexShader.h>
  32. #include <Inventor/nodes/SoShaderObject.h>
  33. #include <Inventor/nodes/SoTriangleStripSet.h>
  34. #include <Inventor/nodes/SoPerspectiveCamera.h>
  35. #include <Inventor/nodes/SoTransform.h>
  36. #include <Inventor/nodes/SoShaderParameter.h>
  37. #include <Inventor/nodes/SoOrthographicCamera.h>
  38. #include <Inventor\fields\SoSFVec3f.h>
  39. #include <Inventor\SoSceneManager.h>
  40. #include <Inventor/SoOffscreenRenderer.h>
  41. #include <Inventor\actions\SoGLRenderAction.h>
  42.  
  43. #include <QApplication>
  44. #include <QKeyEvent>
  45.  
  46. #include <QGLShaderProgram>
  47. #include <QGLShader>
  48. #include <QGLWidget>
  49. //#include <qthread.h>
  50. #include "GLprogram.h"
  51.  
  52.  
  53. #ifdef __APPLE__
  54.     #include "CoreFoundation/CFBundle.h"
  55. #endif
  56.  
  57. #include "OVR.h"
  58. #undef new
  59.  
  60. #ifdef WIN32
  61.  
  62.     long millis() {
  63.         static long start = GetTickCount();
  64.         return GetTickCount() - start;
  65.     }
  66.  
  67. #else
  68.  
  69.     #include <sys/time.h>
  70.  
  71.     long millis() {
  72.         timeval time;
  73.         gettimeofday(&time, NULL);
  74.         long millis = (time.tv_sec * 1000) + (time.tv_usec / 1000);
  75.         static long start = millis;
  76.         return millis - start;
  77.     }
  78.  
  79. #endif
  80.  
  81. using namespace std;
  82. using namespace OVR;
  83. using namespace OVR::Util::Render;
  84.  
  85. // Some defines to make calculations below more transparent
  86. #define TRIANGLES_PER_FACE 2
  87. #define VERTICES_PER_TRIANGLE 3
  88. #define VERTICES_PER_EDGE 2
  89. #define FLOATS_PER_VERTEX 3
  90.  
  91. // Cube geometry
  92. #define VERT_COUNT 8
  93. #define FACE_COUNT 6
  94. #define EDGE_COUNT 12
  95.  
  96. #define CUBE_SIZE 0.4f
  97. #define CUBE_P (CUBE_SIZE / 2.0f)
  98. #define CUBE_N (-1.0f * CUBE_P)
  99.  
  100. #define ON 1.0
  101. #define PQ 0.25
  102.  
  103. #define RED 1, 0, 0
  104. #define GREEN 0, 1, 0
  105. #define BLUE 0, 0, 1
  106. #define YELLOW 1, 1, 0
  107. #define CYAN 0, 1, 1
  108. #define MAGENTA 1, 0, 1
  109.  
  110. // How big do we want our renderbuffer
  111. #define FRAMEBUFFER_OBJECT_SCALE 3
  112.  
  113. const glm::vec3 X_AXIS = glm::vec3(1.0f, 0.0f, 0.0f);
  114. const glm::vec3 Y_AXIS = glm::vec3(0.0f, 1.0f, 0.0f);
  115. const glm::vec3 Z_AXIS = glm::vec3(0.0f, 0.0f, 1.0f);        // Animate the cube
  116. const glm::vec3 CAMERA = glm::vec3(0.0f, 0.0f, 0.8f);
  117. const glm::vec3 ORIGIN = glm::vec3(0.0f, 0.0f, 0.0f);
  118. const glm::vec3 UP = Y_AXIS;
  119.  
  120. // Vertices for a unit cube centered at the origin
  121. const GLfloat CUBE_VERTEX_DATA[VERT_COUNT * FLOATS_PER_VERTEX] = {
  122.     CUBE_N, CUBE_N, CUBE_N, // Vertex 0 position
  123.     CUBE_P, CUBE_N, CUBE_N, // Vertex 1 position
  124.     CUBE_P, CUBE_P, CUBE_N, // Vertex 2 position
  125.     CUBE_N, CUBE_P, CUBE_N, // Vertex 3 position
  126.     CUBE_N, CUBE_N, CUBE_P, // Vertex 4 position
  127.     CUBE_P, CUBE_N, CUBE_P, // Vertex 5 position
  128.     CUBE_P, CUBE_P, CUBE_P, // Vertex 6 position
  129.     CUBE_N, CUBE_P, CUBE_P, // Vertex 7 position
  130. };
  131.  
  132.  
  133. const GLfloat CUBE_FACE_COLORS[] = {
  134.     RED, 1,
  135.     GREEN, 1,
  136.     BLUE, 1,
  137.     YELLOW, 1,
  138.     CYAN, 1,
  139.     MAGENTA, 1,
  140. };
  141.  
  142. // 6 sides * 2 triangles * 3 vertices
  143. const unsigned int CUBE_INDICES[FACE_COUNT * TRIANGLES_PER_FACE * VERTICES_PER_TRIANGLE ] = {
  144.    0, 4, 5, 0, 5, 1, // Face 0
  145.    1, 5, 6, 1, 6, 2, // Face 1
  146.    2, 6, 7, 2, 7, 3, // Face 2
  147.    3, 7, 4, 3, 4, 0, // Face 3
  148.    4, 7, 6, 4, 6, 5, // Face 4
  149.    3, 0, 1, 3, 1, 2  // Face 5
  150. };
  151.  
  152. //
  153. const unsigned int CUBE_WIRE_INDICES[EDGE_COUNT * VERTICES_PER_EDGE ] = {
  154.    0, 1, 1, 2, 2, 3, 3, 0, // square
  155.    4, 5, 5, 6, 6, 7, 7, 4, // facing square
  156.    0, 4, 1, 5, 2, 6, 3, 7, // transverse lines
  157. };
  158.  
  159. const GLfloat QUAD_VERTICES[] = {
  160.     -1, -1, 0, 0,
  161.      1, -1, 1, 0,
  162.      1,  1, 1, 1,
  163.     -1,  1, 0, 1,
  164. };
  165.  
  166. const GLuint QUAD_INDICES[] = {
  167.    2, 0, 3, 0, 1, 2,
  168. };
  169.  
  170.  
  171. #ifdef WIN32
  172.  
  173.     static string loadResource(const string& in) {
  174.         static HMODULE module = GetModuleHandle(NULL);
  175.         HRSRC res = FindResourceA(module, in.c_str(), "TEXTFILE");
  176.         HGLOBAL mem = LoadResource(module, res);
  177.         DWORD size = SizeofResource(module, res);
  178.         LPVOID data = LockResource(mem);
  179.         string result((const char*)data, size);
  180.         FreeResource(mem);
  181.         return result;
  182.     }
  183.  
  184. #else
  185.  
  186.     static string slurp(ifstream& in) {
  187.         stringstream sstr;
  188.         sstr << in.rdbuf();
  189.         string result = sstr.str();
  190.         assert(!result.empty());
  191.         return result;
  192.     }
  193.  
  194.     static string slurpFile(const string & in) {
  195.         ifstream ins(in.c_str());
  196.         assert(ins);
  197.         return slurp(ins);
  198.     }
  199.  
  200.     #ifdef __APPLE__
  201.         static string loadResource(const string& in) {
  202.             static CFBundleRef mainBundle = CFBundleGetMainBundle();
  203.             assert(mainBundle);
  204.  
  205.             CFStringRef stringRef = CFStringCreateWithCString(NULL, in.c_str(), kCFStringEncodingASCII);
  206.             assert(stringRef);
  207.             CFURLRef resourceURL = CFBundleCopyResourceURL(mainBundle, stringRef, NULL, NULL);
  208.             assert(resourceURL);
  209.             char *fileurl = new char[PATH_MAX];
  210.             auto result = CFURLGetFileSystemRepresentation(resourceURL, true, (UInt8*)fileurl, PATH_MAX);
  211.             assert(result);
  212.             return slurpFile(string(fileurl));
  213.         }
  214.  
  215.     #else
  216.         string executableDirectory(".");
  217.  
  218.         static string loadResource(const string& in) {
  219.             return slurpFile(executableDirectory + "/" + in);
  220.         }
  221.  
  222.     #endif // __APPLE__
  223.  
  224. #endif // WIN32
  225.  
  226. // A small class to encapsulate loading of shaders into a GL program
  227. class GLprogram {
  228.     static string getProgramLog(GLuint program) {
  229.         string log;
  230.         GLint infoLen = 0;
  231.         glGetProgramiv(program, GL_INFO_LOG_LENGTH, &infoLen);
  232.  
  233.         if (infoLen > 1) {
  234.             char* infoLog = new char[infoLen];
  235.             glGetProgramInfoLog(program, infoLen, NULL, infoLog);
  236.             log = string(infoLog);
  237.             delete[] infoLog;
  238.         }
  239.         return log;
  240.     }
  241.  
  242.     static string getShaderLog(GLuint shader) {
  243.         string log;
  244.         GLint infoLen = 0;
  245.         glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
  246.  
  247.         if (infoLen > 1) {
  248.             char* infoLog = new char[infoLen];
  249.             glGetShaderInfoLog(shader, infoLen, NULL, infoLog);
  250.             log = string(infoLog);
  251.             delete[] infoLog;
  252.         }
  253.         return log;
  254.     }
  255.  
  256.     static GLuint compileShader(GLuint type, const string shaderSrc) {
  257.         // Create the shader object
  258.         GLuint shader = glCreateShader(type);
  259.         assert(shader != 0);
  260.         const char * srcPtr = shaderSrc.c_str();
  261.         glShaderSource(shader, 1, &srcPtr, NULL);
  262.         glCompileShader(shader);
  263.         GLint compiled;
  264.         glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
  265.         if (compiled == 0) {
  266.             string errorLog = getShaderLog(shader);
  267.             cerr << errorLog << endl;
  268.         }
  269.         assert(compiled != 0);
  270.         return shader;
  271.     }
  272.    
  273.     static GLuint linkProgram(GLuint vertexShader, GLuint fragmentShader) {
  274.         GLuint program = glCreateProgram();
  275.         assert(program != 0);
  276.         glAttachShader(program, vertexShader);
  277.         glAttachShader(program, fragmentShader);
  278.         // Link the newProgram
  279.         glLinkProgram(program);
  280.         // Check the link status
  281.         GLint linked;
  282.         glGetProgramiv(program, GL_LINK_STATUS, &linked);
  283.         if (linked == 0) {
  284.             cerr << getProgramLog(program) << endl;
  285.         }
  286.         assert(linked != 0);
  287.         return program;
  288.     }
  289.  
  290.     GLuint vertexShader;
  291.     GLuint fragmentShader;
  292.     GLuint program;
  293.     typedef map<string, GLint> Map;
  294.     Map attributes;
  295.     Map uniforms;
  296.  
  297. public:
  298.     GLprogram() : vertexShader(0), fragmentShader(0), program(0), wasCalled(false) { }
  299.     bool wasCalled;
  300.  
  301.     void use() {
  302.         glUseProgram(program);
  303.     }
  304.  
  305.     void close() {
  306.         if (0 != program) {
  307.             glDeleteProgram(program);
  308.             program = 0;
  309.         }
  310.         if (0 != vertexShader) {
  311.             glDeleteShader(vertexShader);
  312.         }
  313.         if (0 != fragmentShader) {
  314.             glDeleteShader(fragmentShader);
  315.         }
  316.     }
  317.  
  318.     void open(const string & name) {
  319.         // load shaders only once
  320.         if(!wasCalled){
  321.             cout << "open shader " << name << "\n";
  322.             open(name + ".vs", name + ".fs");
  323.             wasCalled = true;
  324.         }
  325.     }
  326.  
  327.     void open(const string & vertexShaderFile, const string & fragmentShaderFile) {
  328.         string source = loadResource(vertexShaderFile);
  329.         vertexShader = compileShader(GL_VERTEX_SHADER, source);
  330.         source = loadResource(fragmentShaderFile);
  331.         fragmentShader = compileShader(GL_FRAGMENT_SHADER, source);
  332.         program = linkProgram(vertexShader, fragmentShader);
  333.         attributes.clear();
  334.         static GLchar GL_OUTPUT_BUFFER[8192];
  335.         int numVars;
  336.         glGetProgramiv(program, GL_ACTIVE_ATTRIBUTES, &numVars);
  337.         for (int i = 0; i < numVars; ++i) {
  338.             GLsizei bufSize = 8192;
  339.             GLsizei size; GLenum type;
  340.             glGetActiveAttrib(program, i, bufSize, &bufSize, &size, &type, GL_OUTPUT_BUFFER);
  341.             string name = string(GL_OUTPUT_BUFFER, bufSize);
  342.             GLint location = glGetAttribLocation(program, name.c_str());
  343.             attributes[name] = location;
  344.             cout << "Found attribute " << name << " at location " << location << endl;
  345.         }
  346.  
  347.         uniforms.clear();
  348.         glGetProgramiv(program, GL_ACTIVE_UNIFORMS, &numVars);
  349.         for (int i = 0; i < numVars; ++i) {
  350.             GLsizei bufSize = 8192;
  351.             GLsizei size;
  352.             GLenum type;
  353.             glGetActiveUniform(program, i, bufSize, &bufSize, &size, &type, GL_OUTPUT_BUFFER);
  354.             string name = string(GL_OUTPUT_BUFFER, bufSize);
  355.             GLint location = glGetUniformLocation(program, name.c_str());
  356.             uniforms[name] = location;
  357.             cout << "Found uniform " << name << " at location " << location << endl;
  358.         }
  359.     }
  360.  
  361.     GLint getUniformLocation(const string & uniform) const {
  362.         auto itr = uniforms.find(uniform);
  363.         if (uniforms.end() != itr) {
  364.             return itr->second;
  365.         }
  366.         return -1;
  367.     }
  368.  
  369.     GLint getAttributeLocation(const string & attribute) const {
  370.         Map::const_iterator itr = attributes.find(attribute);
  371.         if (attributes.end() != itr) {
  372.             return itr->second;
  373.         }
  374.         return -1;
  375.     }
  376.  
  377.     void uniformMat4(const string & uniform, const glm::mat4 & mat) const {
  378.         glUniformMatrix4fv(getUniformLocation(uniform), 1, GL_FALSE, glm::value_ptr(mat));
  379.     }
  380.  
  381.     void uniform4f(const string & uniform, float a, float b, float c, float d) const{
  382.         glUniform4f(getUniformLocation(uniform), a, b, c, d);
  383.     }
  384.  
  385.     void uniform4f(const string & uniform, const float * fv) const {
  386.         uniform4f(uniform, fv[0], fv[1], fv[2], fv[3]);
  387.     }
  388.  
  389.     void uniform2f(const string & uniform, float a, float b) const {
  390.         glUniform2f(getUniformLocation(uniform), a, b);
  391.     }
  392.  
  393.     void uniform2f(const string & uniform, const glm::vec2 & vec) const {
  394.         uniform2f(uniform, vec.x, vec.y);
  395.     }
  396. };
  397. //
  398. void checkGlError() {
  399.     GLenum error = glGetError();
  400.     if (error != GL_NO_ERROR) {
  401.         switch(error){
  402.             case GL_INVALID_ENUM:
  403.                 qDebug() << "OpenGL Error: GL_INVALID_ENUM";
  404.                 break;
  405.             case GL_INVALID_VALUE:
  406.                 qDebug() << "OpenGL Error: GL_INVALID_VALUE";
  407.                 break;
  408.             case GL_INVALID_OPERATION:
  409.                 qDebug() << "OpenGL Error: GL_INVALID_OPERATION";
  410.                 break;
  411.             case GL_INVALID_FRAMEBUFFER_OPERATION:
  412.                 qDebug() << "OpenGL Error: GL_INVALID_FRAMEBUFFER_OPERATION";
  413.                 break;
  414.             case GL_OUT_OF_MEMORY:
  415.                 qDebug() << "OpenGL Error: GL_OUT_OF_MEMORY";
  416.                 break;
  417.             case GL_STACK_UNDERFLOW:
  418.                 qDebug() << "OpenGL Error: GL_STACK_UNDERFLOW";
  419.                 break;
  420.             case GL_STACK_OVERFLOW:
  421.                 qDebug() << "OpenGL Error GL_STACK_OVERFLOW";
  422.                 break;
  423.         }
  424.     }
  425.     //assert(error == 0);
  426. }
  427.  
  428. const StereoEye EYES[2] = { StereoEye_Left, StereoEye_Right };
  429.  
  430. //
  431. class Example00 : public QGLWidget {
  432.     public slots:
  433.  
  434. protected:
  435.     enum Mode {
  436.         MONO, STEREO, STEREO_DISTORT
  437.     };
  438.  
  439.     glm::mat4 projection;
  440.     glm::mat4 modelview;
  441.     Ptr<SensorDevice> ovrSensor;
  442.     SensorFusion sensorFusion;
  443.     StereoConfig stereoConfig;
  444.  
  445.     // Provides the resolution and location of the Rift
  446.     HMDInfo hmdInfo;
  447.     // Calculated width and height of the per-eye rendering area used
  448.     int eyeWidth, eyeHeight;
  449.     // Calculated width and height of the frame buffer object used to contain
  450.     // intermediate results for the multipass render
  451.     int fboWidth, fboHeight;
  452.  
  453.     Mode renderMode;
  454.     bool useTracker;
  455.     long elapsed;
  456.  
  457.     GLuint cubeVertexBuffer;
  458.     GLuint cubeIndexBuffer;
  459.     GLuint cubeWireIndexBuffer;
  460.  
  461.     GLuint quadVertexBuffer;
  462.     GLuint quadIndexBuffer;
  463.  
  464.     GLprogram renderProgram;
  465.     GLprogram textureProgram;
  466.     GLprogram distortProgram;
  467.  
  468.     GLuint frameBuffer;
  469.     GLuint frameBufferTexture;
  470.     GLuint depthBuffer;
  471.  
  472.     // additions by Alexander
  473.     SoPerspectiveCamera *m_perspCam;
  474.     SoSeparator *m_root;
  475.    
  476.     GLuint offscreenBufferTexture;
  477.     GLuint backgroundimage;
  478.     GLuint backgroundSceneImage;
  479.     GLuint backgroundBufferImage;
  480.  
  481.     GLuint testFrameBuffer;
  482.     GLprogram ipoProgram;
  483.  
  484.     QGLFramebufferObject *fbo;
  485.     SoSceneManager *sceneManager;
  486.  
  487.     SoSeparator *root;
  488.     SbViewportRegion vpRegion;
  489.     SoPerspectiveCamera *perscam;
  490.  
  491.     void initializeGL(){
  492.         qDebug() << "initializeGL";
  493.         char *ver = (char *) glGetString(GL_VERSION);
  494.         qDebug() << "GL Version" << ver;
  495.  
  496.         // Since QGLFunctions is bugged, use glew for openGL access
  497.         glewInit();
  498.  
  499.         initOpenGL();
  500.         loadShaders();
  501.  
  502.         checkGlError();
  503.         modelview = glm::lookAt(CAMERA, ORIGIN, UP);
  504.         projection = glm::perspective(60.0f, (float)hmdInfo.HResolution / (float)hmdInfo.VResolution, 0.1f, 100.f);
  505.  
  506.         // Use Window Resolution for correct distortion
  507.         resize(hmdInfo.HResolution, hmdInfo.VResolution);
  508.         move(0,0);
  509.         setWindowTitle("Oculus QGLWidget");
  510.  
  511.         makeCurrent();
  512.         loadCoinScene();
  513.     }
  514.     //
  515.     void resizeGL(int w, int h){
  516.         qDebug() << "resizeGL";
  517.     }
  518.     //
  519.     void paintGL(){
  520.         draw();
  521.         update();
  522.     }
  523.     //
  524.     void loadCoinScene(){
  525.         qDebug() << "loadCoinScene";
  526.  
  527.         // Init Coin
  528.         SoDB::init();
  529.         // The root node
  530.         root = new SoSeparator;
  531.         root->ref();
  532.  
  533.         // It is mandatory to have at least one light for the offscreen renderer
  534.         SoDirectionalLight * light = new SoDirectionalLight;
  535.         root->addChild(light);
  536.         light->direction = SbVec3f(0, -0.5, -0.3);
  537.  
  538.         vpRegion.setViewportPixels(0, 0, hmdInfo.HResolution, hmdInfo.VResolution);
  539.            
  540.         perscam = new SoPerspectiveCamera();
  541.         root->addChild(perscam);
  542.  
  543.         SoMaterial * greenmaterial = new SoMaterial;
  544.         greenmaterial->diffuseColor.setValue(0, 0.5, 0.5);
  545.         SoCube * cube = new SoCube;
  546.         root->addChild(greenmaterial);
  547.         root->addChild(cube);
  548.            
  549.         // same as projection = glm::perspective(60.0f,
  550.         //              (float)hmdInfo.HResolution / (float)hmdInfo.VResolution, 0.1f, 100.f);
  551.         perscam->nearDistance = 0.1;
  552.         perscam->farDistance = 100.0f;
  553.         perscam->heightAngle = glm::radians(60.0f);
  554.         perscam->aspectRatio = (float)hmdInfo.HResolution / (float)hmdInfo.VResolution;
  555.         perscam->viewportMapping = SoPerspectiveCamera::LEAVE_ALONE;
  556.  
  557.         sceneManager = new SoSceneManager();
  558.         sceneManager->setSceneGraph(root);
  559.     }
  560. public:
  561.     Example00(QWidget *parent) : QGLWidget(parent), renderMode(MONO), useTracker(false), elapsed(0),
  562.         cubeVertexBuffer(0), cubeIndexBuffer(0), cubeWireIndexBuffer(0), quadVertexBuffer(0), quadIndexBuffer(0),
  563.         frameBuffer(0), frameBufferTexture(0), depthBuffer(0) {
  564.  
  565.         qDebug() << "Example00(QWidget *parent)";
  566.         initOculus();
  567.     }
  568.  
  569.     Example00() : renderMode(MONO), useTracker(false), elapsed(0),
  570.         cubeVertexBuffer(0), cubeIndexBuffer(0), cubeWireIndexBuffer(0), quadVertexBuffer(0), quadIndexBuffer(0),
  571.         frameBuffer(0), frameBufferTexture(0), depthBuffer(0) {
  572.  
  573.         qDebug() << "Example00()";
  574.         initOculus();
  575.     }
  576.     //
  577.     void initOculus(){
  578.         // do the master initialization for the Oculus VR SDK
  579.         OVR::System::Init();
  580.  
  581.         sensorFusion.SetGravityEnabled(false);
  582.         sensorFusion.SetPredictionEnabled(false);
  583.         sensorFusion.SetYawCorrectionEnabled(false);
  584.  
  585.         hmdInfo.HResolution = 1280;
  586.         hmdInfo.VResolution = 800;
  587.         hmdInfo.HScreenSize = 0.149759993f;
  588.         hmdInfo.VScreenSize = 0.0935999975f;
  589.         hmdInfo.VScreenCenter = 0.0467999987f;
  590.         hmdInfo.EyeToScreenDistance    = 0.0410000011f;
  591.         hmdInfo.LensSeparationDistance = 0.0635000020f;
  592.         hmdInfo.InterpupillaryDistance = 0.0640000030f;
  593.         hmdInfo.DistortionK[0] = 1.00000000f;
  594.         hmdInfo.DistortionK[1] = 0.219999999f;
  595.         hmdInfo.DistortionK[2] = 0.239999995f;
  596.         hmdInfo.DistortionK[3] = 0.000000000f;
  597.         hmdInfo.ChromaAbCorrection[0] = 0.995999992f;
  598.         hmdInfo.ChromaAbCorrection[1] = -0.00400000019f;
  599.         hmdInfo.ChromaAbCorrection[2] = 1.01400006f;
  600.         hmdInfo.ChromaAbCorrection[3] = 0.000000000f;
  601.         hmdInfo.DesktopX = 0;
  602.         hmdInfo.DesktopY = 0;
  603.  
  604.  
  605.         ///////////////////////////////////////////////////////////////////////////
  606.         // Initialize Oculus VR SDK and hardware
  607.         Ptr<DeviceManager> ovrManager = *DeviceManager::Create();
  608.         if (ovrManager) {
  609.             ovrSensor = *ovrManager->EnumerateDevices<SensorDevice>().CreateDevice();
  610.             if (ovrSensor) {
  611.                 useTracker = true;
  612.                 sensorFusion.AttachToSensor(ovrSensor);
  613.             }
  614.             Ptr<HMDDevice> ovrHmd = *ovrManager->EnumerateDevices<HMDDevice>().CreateDevice();
  615.             if (ovrHmd) {
  616.                 ovrHmd->GetDeviceInfo(&hmdInfo);
  617.             }
  618.             // The HMDInfo structure contains everything we need for now, so no
  619.             // need to keep the device handle around
  620.             ovrHmd.Clear();
  621.         }
  622.         // The device manager is reference counted and will be released automatically
  623.         // when our sensorObject is destroyed.
  624.         ovrManager.Clear();
  625.         stereoConfig.SetHMDInfo(hmdInfo);
  626.         stereoConfig.SetStereoMode(StereoMode::Stereo_None);
  627.     }
  628.     //
  629.     void initOpenGL(){
  630.         // Enable the zbuffer test
  631.         glEnable(GL_DEPTH_TEST);
  632.         glLineWidth(2.0f);
  633.         glHint(GL_LINE_SMOOTH_HINT, GL_NICEST);
  634.         glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
  635.  
  636.         glGenBuffers(1, &cubeVertexBuffer);
  637.         glBindBuffer(GL_ARRAY_BUFFER, cubeVertexBuffer);
  638.         glBufferData(GL_ARRAY_BUFFER,
  639.                 sizeof(GLfloat) * VERT_COUNT * VERTICES_PER_TRIANGLE, CUBE_VERTEX_DATA, GL_STATIC_DRAW);
  640.         glBindBuffer(GL_ARRAY_BUFFER, 0);
  641.  
  642.         glGenBuffers(1, &cubeIndexBuffer);
  643.         glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, cubeIndexBuffer);
  644.         glBufferData(GL_ELEMENT_ARRAY_BUFFER,
  645.                 sizeof(GLuint) * FACE_COUNT * TRIANGLES_PER_FACE * VERTICES_PER_TRIANGLE,
  646.                 CUBE_INDICES, GL_STATIC_DRAW);
  647.  
  648.         glGenBuffers(1, &cubeWireIndexBuffer);
  649.         glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, cubeWireIndexBuffer);
  650.         glBufferData(GL_ELEMENT_ARRAY_BUFFER,
  651.                 sizeof(GLuint) * EDGE_COUNT * VERTICES_PER_EDGE,
  652.                 CUBE_WIRE_INDICES, GL_STATIC_DRAW);
  653.         glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
  654.  
  655.         glGenBuffers(1, &quadVertexBuffer);
  656.         glGenBuffers(1, &quadIndexBuffer);
  657.         glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, quadIndexBuffer);
  658.         glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLuint) * 6, QUAD_INDICES, GL_STATIC_DRAW);
  659.  
  660.  
  661.         eyeWidth = hmdInfo.HResolution / 2;
  662.         eyeHeight = hmdInfo.VResolution;
  663.         fboWidth = eyeWidth * FRAMEBUFFER_OBJECT_SCALE;
  664.         fboHeight = eyeHeight * FRAMEBUFFER_OBJECT_SCALE;
  665.  
  666.         glGenFramebuffers(1, &frameBuffer);
  667.         glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
  668.  
  669.         glGenTextures(1, &frameBufferTexture);
  670.         glBindTexture(GL_TEXTURE_2D, frameBufferTexture);
  671.         glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  672.         glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  673.         glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  674.         glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  675.         // Allocate space for the texture
  676.         glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, fboWidth, fboHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
  677.  
  678.         glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, frameBufferTexture, 0);
  679.         glGenRenderbuffers(1, &depthBuffer);
  680.         glBindRenderbuffer(GL_RENDERBUFFER, depthBuffer);
  681.         glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, fboWidth, fboHeight);
  682.         glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthBuffer);
  683.         glEnable(GL_TEXTURE_2D);
  684.         glBindFramebuffer(GL_FRAMEBUFFER, 0);
  685.     }
  686.     // Create the rendering shaders
  687.     void loadShaders(){
  688.         renderProgram.open("Simple");
  689.         textureProgram.open("Texture");
  690.         distortProgram.open("Distort");
  691.     }
  692.     //
  693.     virtual ~Example00() {
  694.         sensorFusion.AttachToSensor(nullptr);
  695.         ovrSensor.Clear();
  696.         OVR::System::Destroy();
  697.     }
  698.     //
  699.     void keyPressEvent( QKeyEvent * event ){
  700.         //QQuaternion absRot = Iv3dUtils::toQQuaternion(m_perspCam->orientation.getValue());
  701.        
  702.         switch (event->key()){
  703.             case Qt::Key_P:
  704.                 renderMode = static_cast<Mode>((renderMode + 1) % 3);
  705.                 if (renderMode == MONO) {
  706.                     projection = glm::perspective(60.0f,
  707.                             (float)hmdInfo.HResolution / (float)hmdInfo.VResolution, 0.1f, 100.f);
  708.  
  709.                     perscam->heightAngle = glm::radians(60.0f);
  710.                     perscam->aspectRatio = (float)hmdInfo.HResolution / (float)hmdInfo.VResolution;
  711.  
  712.                     stereoConfig.SetStereoMode(StereoMode::Stereo_None);
  713.                 } else if (renderMode == STEREO) {
  714.                     projection = glm::perspective(60.0f,
  715.                             (float)hmdInfo.HResolution / 2.0f / (float)hmdInfo.VResolution, 0.1f, 100.f);
  716.  
  717.                     perscam->heightAngle = glm::radians(60.0f);
  718.                     perscam->aspectRatio = (float)hmdInfo.HResolution / 2.0f / (float)hmdInfo.VResolution;
  719.  
  720.                     stereoConfig.SetStereoMode(StereoMode::Stereo_LeftRight_Multipass);
  721.                 } else if (renderMode == STEREO_DISTORT) {
  722.                     projection = glm::perspective(stereoConfig.GetYFOVDegrees(),
  723.                             (float)hmdInfo.HResolution / 2.0f / (float)hmdInfo.VResolution, 0.1f, 100.f);
  724.  
  725.                     perscam->heightAngle = stereoConfig.GetYFOVRadians();
  726.                     perscam->aspectRatio = (float)hmdInfo.HResolution / 2.0f / (float)hmdInfo.VResolution;
  727.  
  728.                     stereoConfig.SetStereoMode(StereoMode::Stereo_LeftRight_Multipass);
  729.                 }
  730.                 break;
  731.         }
  732.         QWidget ::keyPressEvent(event);
  733.     }
  734.     //
  735.     virtual void update() {
  736.         long now = millis();
  737.         if (useTracker) {
  738.             // For some reason building the quaternion directly from the OVR
  739.             // x,y,z,w values does not work.  So instead we convert it into
  740.             // euler angles and construct our glm::quaternion from those
  741.  
  742.             // Fetch the pitch roll and yaw out of the sensorFusion device
  743.             glm::vec3 eulerAngles;
  744.             sensorFusion.GetOrientation().GetEulerAngles<Axis_X, Axis_Y, Axis_Z, Rotate_CW, Handed_R>(
  745.                 &eulerAngles.x, &eulerAngles.y, &eulerAngles.z);
  746.  
  747.             // Not convert it into a GLM quaternion.
  748.             glm::quat orientation = glm::quat(eulerAngles);
  749.  
  750.             // Most applications want take a basic camera postion and apply the
  751.             // orientation transform to it in this way:
  752.             // modelview = glm::mat4_cast(orientation) * glm::lookAt(CAMERA, ORIGIN, UP);
  753.  
  754.             // However for this demonstration we want the cube to remain
  755.             // centered in the viewport, and orbit our view around it.  This
  756.             // serves two purposes.
  757.             //
  758.             // First, it's not possible to see a blank screen in the event
  759.             // the HMD is oriented to point away from the origin of the scene.
  760.             //
  761.             // Second, a scene that has no points of reference other than a
  762.             // single small object can be disorienting, leaving the user
  763.             // feeling lost in a void.  Having a fixed object in the center
  764.             // of the screen that you appear to be moving around should
  765.             // provide less immersion, which in this instance is better
  766.             modelview = glm::lookAt(CAMERA, ORIGIN, UP) * glm::mat4_cast(orientation);
  767.  
  768.             //perscam->orientation = toSbRotation(getOrientation());    // experimental
  769.         } else {
  770.             // In the absence of head tracker information, we want to slowly
  771.             // rotate the cube so that the animation of the scene is apparent
  772.             static const float Y_ROTATION_RATE = 0.01f;
  773.             static const float Z_ROTATION_RATE = 0.05f;
  774.             modelview = glm::lookAt(CAMERA, ORIGIN, UP);
  775.             //modelview = glm::rotate(modelview, elapsed * Y_ROTATION_RATE, Y_AXIS);
  776.             //modelview = glm::rotate(modelview, elapsed * Z_ROTATION_RATE, Z_AXIS);
  777.         }
  778.         elapsed = now;
  779.  
  780.         QGLWidget::update();
  781.     }
  782.     //
  783.     virtual void draw() {
  784.         //perscam->viewAll(root, vpRegion); // experimental
  785.  
  786.         if (renderMode == MONO) {
  787.             // If we're not working stereo, we're just going to render the
  788.             // scene once, from a single position, directly to the back buffer
  789.             vpRegion.setViewportPixels(0, 0, hmdInfo.HResolution, hmdInfo.VResolution);
  790.  
  791.             glViewport(0, 0, hmdInfo.HResolution, hmdInfo.VResolution);
  792.             renderScene(glm::vec3(), glm::vec3(), stereoConfig.GetEyeRenderParams(StereoEye_Center));
  793.         } else {
  794.             // If we get here, we're rendering in stereo, so we have to render our output twice
  795.             // We have to explicitly clear the screen here.  the Clear command doesn't object the viewport
  796.             // and the clear command inside renderScene will only target the active framebuffer object.
  797.             glClearColor(0, 1, 0, 1);
  798.             glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  799.             for (int i = 0; i < 2; ++i) {
  800.                 StereoEye eye = EYES[i];
  801.                 glBindTexture(GL_TEXTURE_2D, 0);
  802.  
  803.                 // Compute the modelview and projection matrices for the rendered scene based on the eye and
  804.                 // whether or not we're doing side by side or rift rendering
  805.                 glm::vec3 eyeProjectionOffset;
  806.                 if (renderMode == STEREO_DISTORT) {
  807.                     eyeProjectionOffset = glm::vec3(-stereoConfig.GetProjectionCenterOffset() / 2.0f, 0, 0);
  808.                 }
  809.                 glm::vec3 eyeModelviewOffset = glm::vec3(-stereoConfig.GetIPD() / 2.0f, 0, 0);
  810.                 if (eye == StereoEye_Left) {
  811.                     eyeModelviewOffset *= -1;
  812.                     eyeProjectionOffset *= -1;
  813.                 }
  814.                
  815.         vpRegion.setViewportPixels(0, 0, fboWidth, fboHeight);
  816.  
  817.                 glViewport(0, 0, fboWidth, fboHeight);
  818.                 glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
  819.                 renderScene(eyeProjectionOffset, eyeModelviewOffset, stereoConfig.GetEyeRenderParams(eye));
  820.                 glBindFramebuffer(GL_FRAMEBUFFER, 0);
  821.  
  822.         // Setup the viewport for the eye to which we're rendering
  823.                 glViewport(1 + (eye == StereoEye_Left ? 0 : eyeWidth), 1, eyeWidth - 2, eyeHeight - 2);
  824.                 GLprogram & program = (renderMode == STEREO_DISTORT) ? distortProgram : textureProgram;
  825.                 program.use();
  826.                 GLint positionLocation = program.getAttributeLocation("Position");
  827.                 assert(positionLocation > -1);
  828.                 GLint texCoordLocation = program.getAttributeLocation("TexCoord");
  829.                 assert(texCoordLocation > -1);
  830.  
  831.                 float texL = 0, texR = 1, texT = 1, texB = 0;
  832.                 if (renderMode == STEREO_DISTORT) {
  833.                     // Pysical width of the viewport
  834.                     static float eyeScreenWidth = hmdInfo.HScreenSize / 2.0f;
  835.                     // The viewport goes from -1,1.  We want to get the offset
  836.                     // of the lens from the center of the viewport, so we only
  837.                     // want to look at the distance from 0, 1, so we divide in
  838.                     // half again
  839.                     static float halfEyeScreenWidth = eyeScreenWidth / 2.0f;
  840.  
  841.                     // The distance from the center of the display panel (NOT
  842.                     // the center of the viewport) to the lens axis
  843.                     static float lensDistanceFromScreenCenter = hmdInfo.LensSeparationDistance / 2.0f;
  844.  
  845.                     // Now we we want to turn the measurement from
  846.                     // millimeters into the range 0, 1
  847.                     static float lensDistanceFromViewportEdge = lensDistanceFromScreenCenter / halfEyeScreenWidth;
  848.  
  849.                     // Finally, we want the distnace from the center, not the
  850.                     // distance from the edge, so subtract the value from 1
  851.                     static float lensOffset = 1.0f - lensDistanceFromViewportEdge;
  852.                     static glm::vec2 aspect(1.0, (float)eyeWidth / (float)eyeHeight);
  853.  
  854.                     glm::vec2 lensCenter(lensOffset, 0);
  855.  
  856.                     // Texture coordinates need to be in lens-space for the
  857.                     // distort shader
  858.                     texL = -1 - lensOffset;
  859.                     texR = 1 - lensOffset;
  860.                     texT = 1 / aspect.y;
  861.                     texB = -1 / aspect.y;
  862.                     // Flip the values for the right eye
  863.                     if (eye != StereoEye_Left) {
  864.                         swap(texL, texR);
  865.                         texL *= -1;
  866.                         texR *= -1;
  867.                         lensCenter *= -1;
  868.                     }
  869.  
  870.                     static glm::vec2 distortionScale(1.0f / stereoConfig.GetDistortionScale(),
  871.                         1.0f / stereoConfig.GetDistortionScale());
  872.                     program.uniform2f("LensCenter", lensCenter);
  873.                     program.uniform2f("Aspect", aspect);
  874.                     program.uniform2f("DistortionScale", distortionScale);
  875.                     program.uniform4f("K", hmdInfo.DistortionK);
  876.                 }
  877.  
  878.                 // Vertices zeichnen
  879.                 const GLfloat quadVertices[] = {
  880.                     -1, -1, texL, texB,
  881.                      1, -1, texR, texB,
  882.                      1,  1, texR, texT,
  883.                     -1,  1, texL, texT,
  884.                 };
  885.  
  886.                 glBindTexture(GL_TEXTURE_2D, frameBufferTexture);
  887.                 glBindBuffer(GL_ARRAY_BUFFER, quadVertexBuffer);
  888.                 glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat) * 2 * 2 * 4, quadVertices, GL_DYNAMIC_DRAW);
  889.  
  890.                 int stride = sizeof(GLfloat) * 2 * 2;
  891.                 glEnableVertexAttribArray(positionLocation);
  892.                 glVertexAttribPointer(positionLocation, 2, GL_FLOAT, GL_FALSE, stride, 0);
  893.                 glEnableVertexAttribArray(texCoordLocation);
  894.                 glVertexAttribPointer(texCoordLocation, 2, GL_FLOAT, GL_FALSE, stride, (GLvoid*)(sizeof(GLfloat) * 2));
  895.  
  896.                 glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, quadIndexBuffer);
  897.                 glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, (GLvoid*)0);
  898.                 glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
  899.  
  900.                 glBindBuffer(GL_ARRAY_BUFFER, 0);
  901.             } // for
  902.         } // if
  903.     }
  904.     //
  905.     virtual void renderScene(const glm::vec3 & projectionOffset, const glm::vec3 & modelviewOffset, const StereoEyeParams eyeParam) {
  906.         glm::mat4 sceneProjection = glm::translate(glm::mat4(), projectionOffset) * projection;
  907.         glm::mat4 sceneModelview = glm::translate(glm::mat4(), modelviewOffset) * modelview;
  908.     glm::mat4 sceneProjModelProduct = sceneProjection * sceneModelview;
  909.  
  910.         // Clear the buffer
  911.         glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
  912.         glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  913.  
  914.         // Configure the GL pipeline for rendering our geometry
  915.         renderProgram.use();
  916.  
  917.         // Load the projection and modelview matrices into the program
  918.         renderProgram.uniformMat4("Projection", sceneProjection);
  919.         renderProgram.uniformMat4("ModelView", sceneModelview);
  920.        
  921.         // Load up our cube geometry (vertices and indices)
  922.         glBindBuffer(GL_ARRAY_BUFFER, cubeVertexBuffer);
  923.         glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, cubeIndexBuffer);
  924.  
  925.         // Bind the vertex data to the program
  926.         GLint positionLocation = renderProgram.getAttributeLocation("Position");
  927.         GLint colorLocation = renderProgram.getUniformLocation("Color");
  928.        
  929.         glEnableVertexAttribArray(positionLocation);
  930.         glVertexAttribPointer(positionLocation, 3, GL_FLOAT, GL_FALSE, 12, (GLvoid*)0);
  931.    
  932.         // Draw the cube faces, two calls for each face in order to set the color and then draw the geometry
  933.         for (uintptr_t i = 0; i < FACE_COUNT; ++i) {
  934.             renderProgram.uniform4f("Color", CUBE_FACE_COLORS + (i * 4));
  935.             glDrawElements(GL_TRIANGLES, TRIANGLES_PER_FACE * VERTICES_PER_TRIANGLE, GL_UNSIGNED_INT, (void*)(i * 6 * 4));
  936.         }
  937.  
  938.         // Now scale the modelview matrix slightly, so we can draw the cube outline
  939.         //glm::mat4 scaledCamera = glm::scale(sceneModelview, glm::vec3(1.01f));
  940.         //renderProgram.uniformMat4("ModelView", scaledCamera);
  941.  
  942.         // Drawing a white wireframe around the cube
  943.         //glUniform4f(colorLocation, 1, 1, 1, 1);
  944.  
  945.         /*glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, cubeWireIndexBuffer);
  946.         glDrawElements(GL_LINES, EDGE_COUNT * VERTICES_PER_EDGE, GL_UNSIGNED_INT, (void*)0);*/
  947.         //glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
  948.  
  949.         glBindBuffer(GL_ARRAY_BUFFER, 0);
  950.  
  951.     glUseProgram(0);
  952.  
  953.  
  954.     ///////////////////////////////////////////////////////////////////////////////////////////
  955.     //perscam->viewAll(root, vpRegion);
  956.  
  957.     sceneManager->setViewportRegion(vpRegion);
  958.     sceneManager->render(false, false);
  959.  
  960.     SbMatrix SbSceneProjModelView = toSbMatrix(sceneProjModelProduct);
  961.  
  962.     SbVec3f translation;
  963.     SbRotation rotation;
  964.     SbVec3f scaleFactor;
  965.     SbRotation scaleOrientation;
  966.     SbSceneProjModelView.getTransform(translation, rotation, scaleFactor, scaleOrientation);
  967.  
  968.     perscam->position = translation;
  969.     perscam->orientation = rotation;
  970.     }
  971.    
  972.     QQuaternion getOrientation() const{
  973.         // Create identity quaternion
  974.         QQuaternion osgQuat(0.0f, 0.0f, 0.0f, 1.0f);
  975.  
  976.         if (sensorFusion.IsAttachedToSensor()) {
  977.             OVR::Quatf quat = sensorFusion.GetOrientation();
  978.             osgQuat = QQuaternion(quat.w, quat.x, quat.y, quat.z);
  979.         }
  980.  
  981.         return osgQuat;
  982.     }
  983.     SbRotation toSbRotation(QQuaternion &q){
  984.         return SbRotation(q.x(), q.y(), q.z(),q.scalar());
  985.     }
  986.     SbVec3f toSbVec3f(const QVector3D &v ){
  987.         return SbVec3f(v.x(), v.y(), v.z());
  988.     }
  989.     SbVec3f toSbVec3f(const glm::vec3 &v ){
  990.         return SbVec3f(v.x, v.y, v.z);
  991.     }
  992.     QVector3D toQVector3D(const glm::vec3 &v ){
  993.         return QVector3D(v.x, v.y, v.z);
  994.     }
  995.     SbMatrix toSbMatrix(const glm::mat4 &m){
  996.         return SbMatrix(
  997.             m[0][0], m[0][1], m[0][2], m[0][3],
  998.             m[1][0], m[1][1], m[1][2], m[1][3],
  999.             m[2][0], m[2][1], m[2][2], m[2][3],
  1000.             m[3][0], m[3][1], m[3][2], m[3][3]
  1001.         );
  1002.     }
  1003. };
  1004.  
  1005.  
  1006. int main(int argc, char ** argv) {
  1007.     QApplication app(argc, argv);
  1008.  
  1009.     Example00 exmpl = Example00();
  1010.  
  1011.     // widget
  1012.     exmpl.show();
  1013.     app.exec();
  1014. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement