Elnidas

Untitled

Mar 30th, 2021
28
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 23.78 KB | None | 0 0
  1. #include <stdio.h>
  2. #include <math.h>
  3. #include "aruco.h"
  4. #include "cvdrawingutils.h"
  5. #include <fstream>
  6. #include <iostream>
  7. #include <stack>
  8. #include <opencv2/highgui/highgui.hpp>
  9. #include <opencv2/imgproc/imgproc.hpp>
  10. #include <opencv2/calib3d/calib3d.hpp>
  11.  
  12. #include <sstream>
  13. #include <string>
  14. #include <stdexcept>
  15.  
  16. #include <glad/glad.h>
  17. #include <GLFW/glfw3.h>
  18.  
  19. #include <glm/glm.hpp>
  20. #include <glm/gtc/matrix_transform.hpp>
  21. #include <glm/gtc/type_ptr.hpp>
  22. #include <glm/gtx/string_cast.hpp>
  23. #include <glm/gtx/transform.hpp>
  24. #include <glm/gtc/constants.hpp>
  25.  
  26. #include "shader.h"
  27. #include "opengl_tools.h"
  28.  
  29. void framebuffer_size_callback(GLFWwindow* window, int width, int height);
  30. void processInput(GLFWwindow *window);
  31.  
  32. // settings
  33. const unsigned int SCR_WIDTH = 1280;
  34. const unsigned int SCR_HEIGHT = 720;
  35.  
  36. #ifdef _WIN32
  37. std::string BASE_PATH = "C:/Users/Elnidas/Desktop/RV/rv_practica_tracking_alumno/";
  38. #else
  39. std::string BASE_PATH = "/Users/dancasas/Work/dev/realidad_virtual_2020/rv_practica_tracking/";
  40. #endif
  41.  
  42. // aruco
  43. float f = 0.0;
  44.  
  45. aruco::MarkerDetector MDetector;
  46. cv::VideoCapture TheVideoCapturer;
  47. std::vector<aruco::Marker> TheMarkers;
  48. cv::Mat TheInputImage, TheInputImageGrey, TheInputImageCopy;
  49. aruco::CameraParameters TheCameraParameters;
  50. int iDetectMode = 0, iMinMarkerSize = 0, iCorrectionRate = 0, iShowAllCandidates = 0, iEnclosed = 0, iThreshold, iCornerMode, iDictionaryIndex, iTrack = 0;
  51.  
  52. class CmdLineParser { int argc; char** argv; public:CmdLineParser(int _argc, char** _argv) : argc(_argc), argv(_argv) {} bool operator[](std::string param) { int idx = -1; for (int i = 0; i < argc && idx == -1; i++)if (std::string(argv[i]) == param)idx = i; return (idx != -1); } std::string operator()(std::string param, std::string defvalue = "-1") { int idx = -1; for (int i = 0; i < argc && idx == -1; i++)if (std::string(argv[i]) == param)idx = i; if (idx == -1)return defvalue; else return (argv[idx + 1]); } };
  53. struct TimerAvrg { std::vector<double> times; size_t curr = 0, n; std::chrono::high_resolution_clock::time_point begin, end; TimerAvrg(int _n = 30) { n = _n; times.reserve(n); }inline void start() { begin = std::chrono::high_resolution_clock::now(); }inline void stop() { end = std::chrono::high_resolution_clock::now(); double duration = double(std::chrono::duration_cast<std::chrono::microseconds>(end - begin).count())*1e-6; if (times.size() < n) times.push_back(duration); else { times[curr] = duration; curr++; if (curr >= times.size()) curr = 0; } }double getAvrg() { double sum = 0; for (auto t : times) sum += t; return sum / double(times.size()); } };
  54. TimerAvrg Fps;
  55.  
  56. // camera viewpoint
  57. glm::vec3 current_pos = glm::vec3(0.1f, 0.1f, 0.25f);
  58. std::queue<glm::vec3> previous_poses;
  59. std::queue<cv::Mat> previous_rvec;
  60. float data_orientation[10] = { 0,1,0 };
  61. glm::vec4 orientation = glm::vec4(0.0, 1.0, 0.0, 1.0);
  62. glm::vec3 light_position = glm::vec3(1.0f, 1.0f, 1.0f);
  63. glm::vec3 current_pos_left = glm::vec3(0.2f, 0.2f, 0.5f);
  64. glm::vec3 current_pos_right = glm::vec3(0.2f, 0.2f, 0.5f);
  65. glm::vec3 object_hand = glm::vec3(0.0f, 0.0f, 0.0f);
  66.  
  67. bool USE_ANAGLYPH = false;
  68. bool FRUSTRUM = false;
  69.  
  70. bool fromCV2GLM_3x3(const cv::Mat& cvmat, glm::mat3* glmmat) {
  71. if (cvmat.cols != 3 || cvmat.rows != 3 || cvmat.type() != CV_32FC1) {
  72. std::cout << "Matrix conversion error!" << std::endl;
  73. return false;
  74. }
  75. memcpy(glm::value_ptr(*glmmat), cvmat.data, 9 * sizeof(float));
  76. *glmmat = glm::transpose(*glmmat);
  77. return true;
  78. }
  79.  
  80. bool fromGLM2CV_3x3(const glm::mat3& glmmat, cv::Mat* cvmat) {
  81. if (cvmat->cols != 3 || cvmat->rows != 3) {
  82. (*cvmat) = cv::Mat(3, 3, CV_32F);
  83. }
  84. memcpy(cvmat->data, glm::value_ptr(glmmat), 9 * sizeof(float));
  85. *cvmat = cvmat->t();
  86. return true;
  87. }
  88.  
  89. cv::Mat compute_mean_cvMat(std::queue<cv::Mat> current_queue, int NUMBER_OF_FRAMES_TO_AVERAGE, bool VERBOSE=false)
  90. {
  91. std::queue<cv::Mat>current_queue_copy = current_queue;
  92. cv::Mat mean_current_queue(cv::Size(1, 3), CV_32F);
  93.  
  94. mean_current_queue.at<float>(0, 0) = 0.0f;
  95. mean_current_queue.at<float>(0, 1) = 0.0f;
  96. mean_current_queue.at<float>(0, 2) = 0.0f;
  97.  
  98. if(VERBOSE)
  99. printf("Queue status: ");
  100.  
  101. while (!current_queue_copy.empty())
  102. {
  103. if (VERBOSE)
  104. {
  105. printf("(% .02f ", current_queue_copy.front().at<float>(0, 0));
  106. printf("% .02f ", current_queue_copy.front().at<float>(0, 1));
  107. printf("% .02f), ", current_queue_copy.front().at<float>(0, 2));
  108. }
  109.  
  110. mean_current_queue.at<float>(0, 0) += current_queue_copy.front().at<float>(0, 0);
  111. mean_current_queue.at<float>(0, 1) += current_queue_copy.front().at<float>(0, 1);
  112. mean_current_queue.at<float>(0, 2) += current_queue_copy.front().at<float>(0, 2);
  113.  
  114. current_queue_copy.pop();
  115. }
  116. if (VERBOSE)
  117. printf("\n");
  118.  
  119. cv::Mat mean_cvMat(cv::Size(1, 3), CV_32F);
  120.  
  121. mean_cvMat.at<float>(0, 0) = mean_current_queue.at<float>(0, 0) / (float)NUMBER_OF_FRAMES_TO_AVERAGE;
  122. mean_cvMat.at<float>(0, 1) = mean_current_queue.at<float>(0, 1) / (float)NUMBER_OF_FRAMES_TO_AVERAGE;
  123. mean_cvMat.at<float>(0, 2) = mean_current_queue.at<float>(0, 2) / (float)NUMBER_OF_FRAMES_TO_AVERAGE;
  124.  
  125. return mean_cvMat;
  126. }
  127.  
  128. glm::vec3 compute_mean(std::queue<glm::vec3> previous_poses, int NUMBER_OF_FRAMES_TO_AVERAGE, bool VERBOSE = false)
  129. {
  130. std::queue<glm::vec3>previous_poses_copy = previous_poses;
  131.  
  132. glm::vec3 mean_current_pose = glm::vec3(0.0f, 0.0f, 0.0f);
  133.  
  134. if(VERBOSE)
  135. printf("Queue status: ");
  136.  
  137. while (!previous_poses_copy.empty())
  138. {
  139. if (VERBOSE)
  140. {
  141. printf("(% .02f ", previous_poses_copy.front().x);
  142. printf("% .02f ", previous_poses_copy.front().y);
  143. printf("% .02f), ", previous_poses_copy.front().z);
  144. }
  145.  
  146. mean_current_pose.x += previous_poses_copy.front().x;
  147. mean_current_pose.y += previous_poses_copy.front().y;
  148. mean_current_pose.z += previous_poses_copy.front().z;
  149.  
  150. previous_poses_copy.pop();
  151. }
  152. if (VERBOSE)
  153. printf("\n");
  154.  
  155. glm::vec3 current_pos_in;
  156.  
  157. current_pos_in.x = mean_current_pose.x / (float)NUMBER_OF_FRAMES_TO_AVERAGE;
  158. current_pos_in.y = mean_current_pose.y / (float)NUMBER_OF_FRAMES_TO_AVERAGE;
  159. current_pos_in.z = mean_current_pose.z / (float)NUMBER_OF_FRAMES_TO_AVERAGE;
  160.  
  161. return current_pos_in;
  162. }
  163.  
  164. int main(int argc, char **argv)
  165. {
  166. ToolsC * tools = new ToolsC(BASE_PATH);
  167.  
  168. CmdLineParser cml(argc, argv);
  169. // read camera parameters if passed
  170. if (cml["-c"])
  171. TheCameraParameters.readFromXMLFile(cml("-c"));
  172.  
  173. // glfw: initialize and configure
  174. // ------------------------------
  175. glfwInit();
  176. glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
  177. glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
  178. glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
  179.  
  180. #ifdef __APPLE__
  181. glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // uncomment this statement to fix compilation on OS X
  182. #endif
  183.  
  184. // glfw window creation
  185. // --------------------
  186. GLFWwindow* window = glfwCreateWindow(SCR_WIDTH, SCR_HEIGHT, "RV – Practica tracking", NULL, NULL);
  187. if (window == NULL)
  188. {
  189. std::cout << "Failed to create GLFW window" << std::endl;
  190. glfwTerminate();
  191. return -1;
  192. }
  193. glfwMakeContextCurrent(window);
  194. glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
  195.  
  196. // glad: load all OpenGL function pointers
  197. if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))
  198. {
  199. std::cout << "Failed to initialize GLAD" << std::endl;
  200. return -1;
  201. }
  202.  
  203. // configure global opengl state
  204. glEnable(GL_DEPTH_TEST);
  205.  
  206. // build and compile shaders
  207. Shader ourShader(std::string(BASE_PATH + "vert.vs").c_str(), std::string(BASE_PATH + "frag.fs").c_str());
  208. Shader ourShader2D(std::string(BASE_PATH + "vert2D.vs").c_str(), std::string(BASE_PATH + "frag2D.fs").c_str());
  209.  
  210. // load and create a texture
  211. tools->loadTextures();
  212.  
  213. // initializes vertex buffers
  214. tools->initRenderData();
  215.  
  216. // set up shader materials
  217. ourShader.use();
  218. ourShader.setInt("material.diffuse", 0);
  219. ourShader.setInt("material.specular", 1);
  220.  
  221. // set up shader materials
  222. ourShader2D.use();
  223. ourShader2D.setInt("image", 1);
  224.  
  225. // opens video input from webcam
  226. TheVideoCapturer.open(0);
  227.  
  228. // check video is open
  229. if (!TheVideoCapturer.isOpened())
  230. throw std::runtime_error("Could not open video");
  231.  
  232. // *********************************************
  233. // -- Initialization Up --
  234. // *********************************************
  235. glm::vec3 up_vector = glm::vec3(0, 0, 1.0);
  236. glm::vec3 look_vector = glm::vec3(0, 0.0, 0.0);
  237. std::queue<glm::vec3> previous_poses;
  238. std::queue<cv::Mat> previous_rvec;
  239. int num_frames = 0;
  240.  
  241. // *********************************************
  242. // -- Initialization Bloque2 --
  243. // *********************************************
  244. glm::mat4 model2 = glm::mat4(1.0f);
  245. glm::mat4 traslacion = glm::translate(glm::mat4(1.0f), glm::vec3(0.3f, 0.1f, 0.0f));
  246.  
  247.  
  248.  
  249. // render loop
  250. while (!glfwWindowShouldClose(window))
  251. {
  252. num_frames++;
  253.  
  254. // this will contain the image from the webcam
  255. cv::Mat frame;
  256.  
  257. // capture the next frame from the webcam
  258. TheVideoCapturer >> frame;
  259. cv::cvtColor(frame, frame, CV_RGB2BGR);
  260. //std::cout << "Frame size: " << frame.cols << " x " << frame.rows << " x " << frame.channels() << "\n";
  261.  
  262. //if (TheCameraParameters.isValid())
  263. // std::cout << "Parameters OK\n";
  264.  
  265. // *********************************************
  266. // -- Initialization Parameters --
  267. // *********************************************
  268.  
  269. float TheMarkerSize = 0.40f;
  270.  
  271.  
  272. float EyeSeparation = 0.065f; // Eye Separation
  273. float top, bottom, left, right;
  274. float Convergence = 1.0f; // Convergence
  275. float AspectRatio = 1.3333f; // Aspect Ratio
  276. float FOV = 45.0f; // FOV along Yin degrees
  277. float NearClipPlane = 0.01f; // Near ClippingDistance
  278. float FarClipPlane = 10.0f; // Far ClippingDistance
  279. float a, b, c;
  280.  
  281.  
  282.  
  283. int NUMBER_OF_FRAMES_TO_AVERAGE = 10;
  284.  
  285. //***************************************************
  286. // -- Inicio TAREA 1 --
  287. // -- Detect and Draw MARKERS --
  288. //***************************************************
  289. // detect markers
  290. TheMarkers = MDetector.detect(frame, TheCameraParameters, TheMarkerSize);
  291.  
  292.  
  293. // for each marker, draws it in the input image
  294. for (unsigned int i = 0; i < TheMarkers.size(); i++)
  295. {
  296. TheMarkers[i].draw(frame, cv::Scalar(255, 0, 255), 3);
  297. aruco::CvDrawingUtils::draw3dAxis(frame, TheMarkers[i], TheCameraParameters, 4);
  298. }
  299.  
  300.  
  301. //***************************************************
  302. // -- Fin TAREA 1 --
  303. //***************************************************
  304.  
  305. //***************************************************
  306. // Inicio TAREA 2 y 3
  307. // -- Transformations and Rotations --
  308. //***************************************************
  309.  
  310. if (TheMarkers.size() > 0)
  311. {
  312. bool marc = false;
  313. int marcId = -1;
  314.  
  315. for (size_t i = 0; i < TheMarkers.size(); i++)
  316. {
  317. if (TheMarkers[i].id == 229) {
  318. marc = true;
  319. marcId = i;
  320. }
  321. }
  322.  
  323.  
  324. if (marc) {
  325.  
  326.  
  327.  
  328. cv::Mat posicionMarcador = TheMarkers[marcId].Tvec;
  329.  
  330. float x0p = posicionMarcador.at<float>(0, 0);
  331. float y0p = posicionMarcador.at<float>(0, 1);
  332. float z0p = posicionMarcador.at<float>(0, 2);
  333.  
  334. current_pos.x = TheMarkers[marcId].Tvec.at<float>(0, 0);
  335. current_pos.y = TheMarkers[marcId].Tvec.at<float>(0, 1);
  336. current_pos.z = TheMarkers[marcId].Tvec.at<float>(0, 2);
  337.  
  338. previous_poses.push(current_pos);
  339.  
  340. if (num_frames >= NUMBER_OF_FRAMES_TO_AVERAGE) {
  341.  
  342. current_pos = compute_mean(previous_poses, NUMBER_OF_FRAMES_TO_AVERAGE);
  343. previous_poses.pop();
  344. }
  345.  
  346.  
  347. cv::Mat rotacionMarcador = TheMarkers[marcId].Rvec;
  348. cv::Mat rot;
  349. glm::mat3 rotacion;
  350.  
  351. cv::Rodrigues(rotacionMarcador, rot);
  352.  
  353. fromCV2GLM_3x3(rot, &(rotacion));
  354. up_vector = glm::vec3(0.0, 0.0, 1.0);
  355. //rotamos UP vector
  356. //printf("\nup_vector before: % .2f % .2f % .2f \n", up_vector.x, up_vector.y, up_vector.z);
  357. up_vector = rotacion* up_vector;
  358. //printf("up_vector after: % .2f % .2f % .2f \n\n", up_vector.x, up_vector.y, up_vector.z);
  359. }
  360.  
  361.  
  362. }
  363.  
  364. //***************************************************
  365. // Fin TAREA 2 y 3
  366. //***************************************************
  367.  
  368. //***************************************************
  369. // -- Projections and Views --
  370. //***************************************************
  371. glm::mat4 current_view = glm::mat4(1.0f);
  372. glm::mat4 projection = glm::mat4(1.0f);
  373.  
  374. projection = glm::perspective(glm::radians(45.0f), (float)SCR_WIDTH / (float)SCR_HEIGHT, NearClipPlane, FarClipPlane);
  375.  
  376. //printf("current_pos : % .2f % .2f % .2f \n", current_pos.x, current_pos.y, current_pos.z);
  377. //printf("look_vector : % .2f % .2f % .2f \n", look_vector.x, look_vector.y, look_vector.z);
  378. // printf("up_vector : % .2f % .2f % .2f \n\n", up_vector.x, up_vector.y, up_vector.z);
  379.  
  380. /*
  381. current_view = glm::lookAt(
  382. current_pos, // camera pos
  383. look_vector, // look at
  384. up_vector // up vector
  385. );
  386. //*/
  387. //
  388.  
  389. //***************************************************
  390. // -- Pre-Render -- //
  391. //***************************************************
  392.  
  393. // copies input image to m_textures[1]
  394. flip(frame, frame, -1);
  395. glBindTexture(GL_TEXTURE_2D, tools->m_textures[1]);
  396. glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, frame.cols, frame.rows, 0, GL_RGB, GL_UNSIGNED_BYTE, frame.ptr());
  397.  
  398. // GLFW process that reacts to input keys
  399. processInput(window);
  400.  
  401. // render
  402. glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
  403. glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // also clear the depth buffer now!
  404.  
  405.  
  406. //***************************************************
  407. // -- RENDERING --
  408. //***************************************************
  409.  
  410.  
  411. // TEXTURES
  412. glActiveTexture(GL_TEXTURE0);
  413. glBindTexture(GL_TEXTURE_2D, tools->m_textures[0]);
  414. glActiveTexture(GL_TEXTURE1);
  415. glBindTexture(GL_TEXTURE_2D, tools->m_textures[1]);
  416.  
  417. // activate shader
  418. ourShader.use();
  419.  
  420. // set camera view matrix
  421.  
  422. //Ya no se necesita esta camara view porque hacemos ahora los ojos
  423.  
  424. //ourShader.setMat4("view", current_view);
  425.  
  426. // LIGHTS
  427. ourShader.setVec3("light_position", light_position);
  428. ourShader.setVec3("light.direction", 0.0f, 0.0f, -1.0f);
  429. ourShader.setVec3("light.ambient", 0.5f, 0.5f, 0.5f);
  430. ourShader.setVec3("light.diffuse", 0.75f, 0.75f, 0.75f);
  431. ourShader.setVec3("light.specular", 0.5f, 0.5f, 0.5f);
  432.  
  433. // material properties
  434. ourShader.setFloat("material.shininess", 0.4f);
  435.  
  436. //No se usa porque vamos a hacer una nueva para los ojos :)
  437.  
  438. //ourShader.setMat4("projection", projection);
  439.  
  440.  
  441.  
  442.  
  443. //Calculo posicion Cubo que se mueve
  444.  
  445.  
  446. if (TheMarkers.size() > 0) {
  447. bool esta = false;
  448. int marcadorPos = 0;
  449. for (size_t i = 0; i < TheMarkers.size(); i++)
  450. {
  451. if (TheMarkers[i].id == 100) {
  452. esta = true;
  453. marcadorPos = i;
  454. }
  455. }
  456.  
  457. if (esta) {
  458. double box_position[3];
  459. double box_orientation[4];
  460. cv::Mat poscionMarcador = TheMarkers[marcadorPos].Tvec.clone();
  461. TheMarkers[marcadorPos].OgreGetPoseParameters(box_position, box_orientation);
  462. //poscionMarcador.
  463. float xPos = poscionMarcador.at<float>(0, 0);
  464. float yPos = poscionMarcador.at<float>(1, 0);
  465. float zPos = poscionMarcador.at<float>(2, 0);
  466. glm::vec4 MPos = glm::vec4(xPos, yPos, zPos, 0.0f);
  467.  
  468. glm::vec3 box_p = glm::vec3(box_position[0], box_position[1], box_position[2]);
  469. //glm::vec3 o = glm::vec3(mark_orientation[0], mark_orientation[1], mark_orientation[2]);
  470. //glm::vec3 o = glm::vec3(mark_orientation[0], -1.0f, mark_orientation[2]);
  471. std::cout << "MOVIENDO EL PALOOOOOOOO: " << std::endl;
  472. std::cout << "Hola esto es posicion PALO VECTOR: " << MPos[0] << " " << MPos[1] << " " << MPos[2] << std::endl;
  473. std::cout << "Hola esto es posicion PALO BOX: " << box_position[0] << " " << box_position[1] << " " << box_position[2] << std::endl;
  474.  
  475. traslacion[3].s = MPos.x;
  476. traslacion[3].t = MPos.y;
  477. //traslacion[3].w = MPos.z;
  478.  
  479. }
  480. //
  481. //traslacion = glm::translate(traslacion, box_p);
  482. }
  483.  
  484.  
  485.  
  486.  
  487.  
  488. /* *************************** */
  489. /* BLOQUE RENDER OJO IZQUIERDO */
  490. /* *************************** */
  491.  
  492. glColorMask(true, false, false, false);
  493.  
  494. // Ojo izquierdo
  495. top = NearClipPlane * tan(FOV / 2);
  496. bottom = -top;
  497. a = AspectRatio * tan(FOV / 2) * Convergence;
  498. b = a - EyeSeparation / 2;
  499. c = a + EyeSeparation / 2;
  500. left = -b * NearClipPlane / Convergence;
  501. right = c * NearClipPlane / Convergence;
  502.  
  503.  
  504.  
  505.  
  506. ourShader.use();
  507.  
  508. glm::vec3 leftEyePos = glm::vec3(current_pos.x - (EyeSeparation / 2), current_pos.y, current_pos.z);
  509. current_view = glm::lookAt(
  510. leftEyePos, // camera pos
  511. glm::vec3(-EyeSeparation / 2, 0, 0), // look at
  512. up_vector // up vector
  513. );
  514.  
  515. ourShader.setMat4("view", current_view); //Enlazamos la cámara al ojo izquierdo
  516.  
  517.  
  518. //Proyección con el toe-in
  519. ourShader.setMat4("projection", projection);
  520.  
  521.  
  522. //Proyeccion con el off axis
  523. // glm::mat4 projection_left_eye = glm::frustum(left, right, bottom, top, NearClipPlane, FarClipPlane);
  524.  
  525.  
  526. //PINTAMOS CUBOS
  527.  
  528.  
  529. // BOX1
  530. glBindVertexArray(tools->m_VAOs[2]);
  531.  
  532. // calculate the model matrix for each object and pass it to shader before drawing
  533. model2 = traslacion;
  534.  
  535. // sets model matrix
  536. ourShader.setMat4("model", model2);
  537.  
  538. // draws
  539. glDrawArrays(GL_TRIANGLES, 0, 36);
  540.  
  541. // calculate the model matrix for each object and pass it to shader before drawing
  542. glm::mat4 model = glm::mat4(1.0f);
  543. // sets model matrix
  544. ourShader.setMat4("model", model);
  545.  
  546. // draws
  547. glDrawArrays(GL_TRIANGLES, 0, 36);
  548. glBindVertexArray(0);
  549.  
  550. ourShader.setInt("material.specular", 0);
  551.  
  552.  
  553.  
  554.  
  555.  
  556. // FLOOR
  557. glActiveTexture(GL_TEXTURE0);
  558. glBindTexture(GL_TEXTURE_2D, tools->m_textures[2]);
  559. glBindVertexArray(tools->m_VAOs[1]);
  560. ourShader.setMat4("model", model);
  561. glDrawArrays(GL_TRIANGLES, 0, 6);
  562. glBindVertexArray(0);
  563.  
  564.  
  565.  
  566. // limpiamos buffer de profundidad
  567. glClear(GL_DEPTH_BUFFER_BIT);
  568.  
  569. // Only last mask
  570. glUseProgram(0);
  571.  
  572. /* *************************** */
  573. /* BLOQUE RENDER OJO DERECHO */
  574. /* *************************** */
  575.  
  576. //Activamos de nuevo las texturas
  577. glActiveTexture(GL_TEXTURE0);
  578. glBindTexture(GL_TEXTURE_2D, tools->m_textures[0]);
  579. glActiveTexture(GL_TEXTURE1);
  580. glBindTexture(GL_TEXTURE_2D, tools->m_textures[1]);
  581.  
  582. glColorMask(false, true, true, false);
  583.  
  584.  
  585. top = NearClipPlane * tan(FOV / 2);
  586. bottom = -top;
  587. a = AspectRatio * tan(FOV / 2) * Convergence;
  588. b = a - EyeSeparation / 2;
  589. c = a + EyeSeparation / 2;
  590. left = -c * NearClipPlane / Convergence;
  591. right = b * NearClipPlane / Convergence;
  592. glm::mat4 projection_right_eye = glm::frustum(left, right, bottom, top, NearClipPlane, FarClipPlane);
  593.  
  594.  
  595. ourShader.use();
  596.  
  597.  
  598. glm::vec3 rightEyePos = glm::vec3(current_pos.x + (EyeSeparation / 2), current_pos.y, current_pos.z);
  599. current_view = glm::lookAt(
  600. rightEyePos, // camera pos
  601. glm::vec3(EyeSeparation / 2, 0, 0), // look at
  602. up_vector
  603. );
  604.  
  605. ourShader.setMat4("view", current_view); //Enlazamos la cámara al ojo izquierdo
  606.  
  607. //proyección para toe-in
  608. ourShader.setMat4("projection", projection); //Enlazamos la matríz de proyección al nuevo punto de vista
  609.  
  610.  
  611. //proyección para off-axis
  612. //ourShader.setMat4("projection", projection_right_eye);
  613.  
  614.  
  615.  
  616.  
  617.  
  618.  
  619.  
  620. //PINTAMOS CUBOS
  621.  
  622.  
  623. // BOX1
  624. glBindVertexArray(tools->m_VAOs[2]);
  625.  
  626. // calculate the model matrix for each object and pass it to shader before drawing
  627. model2 = traslacion;
  628.  
  629. // sets model matrix
  630. ourShader.setMat4("model", model2);
  631.  
  632. // draws
  633. glDrawArrays(GL_TRIANGLES, 0, 36);
  634.  
  635. // calculate the model matrix for each object and pass it to shader before drawing
  636. model = glm::mat4(1.0f);
  637. // sets model matrix
  638. ourShader.setMat4("model", model);
  639.  
  640. // draws
  641. glDrawArrays(GL_TRIANGLES, 0, 36);
  642. glBindVertexArray(0);
  643.  
  644. ourShader.setInt("material.specular", 0);
  645.  
  646.  
  647.  
  648.  
  649.  
  650. // FLOOR
  651. glActiveTexture(GL_TEXTURE0);
  652. glBindTexture(GL_TEXTURE_2D, tools->m_textures[2]);
  653. glBindVertexArray(tools->m_VAOs[1]);
  654. ourShader.setMat4("model", model);
  655. glDrawArrays(GL_TRIANGLES, 0, 6);
  656. glBindVertexArray(0);
  657.  
  658.  
  659.  
  660. // limpiamos buffer de profundidad
  661. glClear(GL_DEPTH_BUFFER_BIT);
  662.  
  663.  
  664.  
  665.  
  666. // Only last mask
  667. glUseProgram(0);
  668.  
  669.  
  670.  
  671.  
  672. // ***************************
  673. // - WEBCAM -
  674. // ***************************
  675.  
  676. // configures projection matrix
  677. projection = glm::perspective(glm::radians(45.0f), (float)SCR_WIDTH / (float)SCR_HEIGHT, 0.1f, 100.0f);
  678.  
  679. glColorMask(true, true, true, false);
  680. glActiveTexture(GL_TEXTURE1);
  681. glBindTexture(GL_TEXTURE_2D, tools->m_textures[1]);
  682.  
  683. ourShader2D.use();
  684. float screen_width = 2.0f;
  685. glm::mat4 projection2D = glm::ortho(0.0f, (float)SCR_WIDTH, 0.0f, (float)SCR_HEIGHT, -1.0f, 1.0f);
  686. glm::mat4 model2D = glm::translate(glm::mat4(1.0f), glm::vec3((float)SCR_WIDTH / 2.0f - 160.0f, 540.0f, 0.f));
  687.  
  688. ourShader2D.setMat4("projection2D", projection2D);
  689. //std::cout << "glGetUniformLocation " << " :" << glGetUniformLocation(ourShader2D.ID, "projection2D");
  690. ourShader2D.setMat4("model2D", model2D);
  691.  
  692. glBindVertexArray(tools->m_VAOs[0]); //VAOs[0] is 2D quad for cam input
  693. glDrawArrays(GL_TRIANGLES, 0, 6);
  694. glBindVertexArray(0);
  695.  
  696. glUseProgram(0);
  697.  
  698. // glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
  699. glfwSwapBuffers(window);
  700. glfwPollEvents();
  701. }
  702.  
  703. // de-allocate all resources once they've outlived their purpose:
  704. glDeleteVertexArrays(1, &(tools->m_VAOs[2]));
  705. glDeleteBuffers(1, &(tools->m_VBOs[2]));
  706.  
  707. glDeleteVertexArrays(1, &(tools->m_VAOs[1]));
  708. glDeleteBuffers(1, &(tools->m_VBOs[1]));
  709.  
  710. glDeleteVertexArrays(1, &(tools->m_VAOs[0]));
  711. glDeleteBuffers(1, &(tools->m_VBOs[0]));
  712.  
  713. // glfw: terminate, clearing all previously allocated GLFW resources.
  714. glfwTerminate();
  715.  
  716. std::cout << "Bye!" << std::endl;
  717. return 0;
  718. }
  719.  
  720. // process all input: query GLFW whether relevant keys are pressed/released this frame and react accordingly
  721. void processInput(GLFWwindow *window)
  722. {
  723. if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS)
  724. glfwSetWindowShouldClose(window, true);
  725. }
  726.  
  727. // glfw: whenever the window size changed (by OS or user resize) this callback function executes
  728. void framebuffer_size_callback(GLFWwindow* window, int width, int height)
  729. {
  730. // make sure the viewport matches the new window dimensions; note that width and
  731. // height will be significantly larger than specified on retina displays.
  732. glViewport(0, 0, width, height);
  733. }
Add Comment
Please, Sign In to add comment