Guest User

Untitled

a guest
Oct 12th, 2020
9
318 days
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. use crate::shaders::packed_fragment::PACKED_FRAGMENT_SHADER;
  2. use crate::shaders::planar_fragment::PLANAR_FRAGMENT_SHADER;
  3. use crate::shaders::video_vertex::VIDEO_VERTEX_SHADER;
  4. use gl::types::*;
  5. use liborwell_rust::common::pixel_formats::{PixelFormat, GL_SYMBOLS};
  6. use liborwell_rust::common::profiler::Profiler;
  7. use liborwell_rust::common::renderer::{OnConsume, Renderer};
  8. use liborwell_rust::common::stoppable_thread::StoppableThread;
  9. use std::cell::RefCell;
  10. use std::ffi::CStr;
  11. use std::ffi::CString;
  12. use std::fs::File;
  13. use std::io::Read;
  14. use std::sync::atomic::AtomicBool;
  15. use std::sync::{Arc, Mutex};
  16. use std::time::Duration;
  17. //use liborwell_rust::common::pixel_format::PixelFormat;
  18. use liborwell_rust::common::decoded_packet::DecodedPacket;
  19. use liborwell_rust::common::ffmpeg_decoded_packet::FfmpegDecodedPacket;
  20. use liborwell_rust::common::ffmpeg_decoded_packet_simulated::FfmpegDecodedPacketSimulated;
  21. use liborwell_rust::common::runnable::Runnable;
  22.  
  23. use super::renderer;
  24. use super::renderer_error::RendererError;
  25. use super::shader::Shader;
  26. use super::vertex_array_object::VertexArrayObject;
  27. use super::vertex_buffer_object::VertexBufferObject;
  28.  
  29. const fps_measure_interval: Duration = Duration::from_millis(1000);
  30.  
  31. enum CurrentProgram {
  32.     PLANAR,
  33.     PACKED,
  34. }
  35.  
  36. struct GLRenderer {
  37.     fps: Arc<Mutex<Profiler<u32>>>,
  38.     stoppable_thread: StoppableThread,
  39.     pub decoded_frame: Option<DecodedPacket>,
  40.     current_fragment_program: Option<CurrentProgram>,
  41.     planar_program: Option<Shader>,
  42.     packed_program: Option<Shader>,
  43.     current_frame_width: Option<u32>,
  44.     current_frame_height: Option<u32>,
  45.     current_pixel_format: Option<PixelFormat>,
  46.     texture_id: [u32; GLRenderer::TEXTURE_NUMBER as usize],
  47.     texture_location: [i32; GLRenderer::TEXTURE_NUMBER as usize],
  48.     pixel_buffer_objects: [u32; GLRenderer::TEXTURE_NUMBER as usize],
  49.     vertex_buffer_object: Option<VertexBufferObject>,
  50.     vertex_array_object: Option<VertexArrayObject>,
  51.     alpha: i32,
  52.     texture_format: i32,
  53.     vertex_in_location: i32,
  54.     texture_in_location: i32,
  55. }
  56.  
  57. impl GLRenderer {
  58.     const TEXTURE_NUMBER: u8 = 3;
  59.     const VERTEX_POINTER: u8 = 0;
  60.     const FRAGMENT_POINTER: u8 = 1;
  61.     const VERTICES_TEXTURES: [f32; 20] = [
  62.         -1.0, -1.0, 0.0, 0.0, 1.0, 1.0, -1.0, 0.0, 1.0, 1.0, -1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0,
  63.         0.0, 1.0, 0.0,
  64.     ];
  65.     pub fn new() -> Self {
  66.         let r = GLRenderer {
  67.             stoppable_thread: StoppableThread::new(),
  68.             decoded_frame: None, //DecodedPacket::FFMPEG(FfmpegDecodedPacket::blank()),
  69.             fps: Arc::new(Mutex::new(Profiler::<u32>::new(fps_measure_interval))),
  70.             current_fragment_program: None,
  71.             planar_program: None,
  72.             packed_program: None,
  73.             current_frame_width: None,
  74.             current_frame_height: None,
  75.             current_pixel_format: None,
  76.             texture_id: [0; GLRenderer::TEXTURE_NUMBER as usize],
  77.             texture_location: [0; GLRenderer::TEXTURE_NUMBER as usize],
  78.             pixel_buffer_objects: [0; GLRenderer::TEXTURE_NUMBER as usize],
  79.             vertex_buffer_object: None,
  80.             vertex_array_object: None,
  81.             alpha: 0,
  82.             texture_format: 0,
  83.             vertex_in_location: 0,
  84.             texture_in_location: 0,
  85.         };
  86.         r
  87.     }
  88.  
  89.     //TODO: separate in 2 cases: u32 and i32, there are constants that could have those types
  90.     pub fn translate_gl(s: &GL_SYMBOLS) -> i32 {
  91.         match s {
  92.             GL_SYMBOLS::GL__RED => gl::RED as i32,
  93.             GL_SYMBOLS::GL__UNSIGNED_BYTE => gl::UNSIGNED_BYTE as i32,
  94.             _ => panic!("unsupported gl symbol format"),
  95.         }
  96.     }
  97.  
  98.     pub fn init_vertex_stuff(&mut self) {
  99.         self.planar_program = Some(Shader::new());
  100.         self.planar_program
  101.             .as_mut()
  102.             .unwrap()
  103.             .compile(VIDEO_VERTEX_SHADER, PLANAR_FRAGMENT_SHADER)
  104.             .unwrap();
  105.         /*
  106.         self.packed_program = Some(Shader::new());
  107.         self.packed_program
  108.             .as_mut()
  109.             .unwrap()
  110.             .compile(VIDEO_VERTEX_SHADER, PACKED_FRAGMENT_SHADER)
  111.             .unwrap();
  112.         */
  113.     }
  114.  
  115.     pub fn dimension_info(
  116.         frame: &DecodedPacket,
  117.     ) -> ([i32; 3], [i32; 3], [usize; 3], [usize; 3], [isize; 3]) {
  118.         let ffmpeg_decoded_packet = match frame {
  119.             //DecodedPacket::FFMPEG(ffmpeg_decoded_packet) => ffmpeg_decoded_packet,
  120.             DecodedPacket::FFMPEG_SIMULATED(ffmpeg_decoded_packet) => ffmpeg_decoded_packet,
  121.             _ => panic!("packet not supported yet"),
  122.         };
  123.         //Width of each plane Y,U,V or R,G,B
  124.         let mut width: [i32; 3] = [0; 3];
  125.         //Height of each plane
  126.         let mut height: [i32; 3] = [0; 3];
  127.         //Stride of each plane (explained below)
  128.         let mut linesize: [usize; 3] = [0; 3];
  129.         //Planesize of each plane (explained below)
  130.         let mut plane_size: [usize; 3] = [0; 3];
  131.         //Texture size of each OpenGL texture
  132.         let mut texture_size: [isize; 3] = [0; 3];
  133.         /*
  134.             Our Frame called `frame` has a PixelFormat (example: AV_PIX_FMT_YUV420P).
  135.             We're gonna get, in the list of PixelFormats, for parameters for this format.
  136.             The parameters are things like the ratio of U and V components of the YUV
  137.             component, in the case of an YUV frame, or the details about RGB in the
  138.             case of an RGB decodedFrame->
  139.         */
  140.         //let pixel_format = PixelFormat::YUV420P;
  141.         let pixel_format_translator =
  142.             Some(liborwell_rust::common::pixel_formats_impl::pixel_format_yuv420p);
  143.         for i in 0..3 {
  144.             let width_ratio = &pixel_format_translator.as_ref().unwrap().yuv_widths[i];
  145.             let height_ratio = &pixel_format_translator.as_ref().unwrap().yuv_heights[i];
  146.             /*
  147.                 linesize[i] is the stride (https://docs.microsoft.com/en-us/windows/win32/medfound/image-stride)
  148.                 for each plane.
  149.                 Basically, the stride must be greater or equal to the width of the plane,
  150.                 and is there for performance purposes.
  151.                 We need to render to a texture. Each plane has a planeSize[i],
  152.                 which is basically linesize[i]*height[i]. We need to take linesize in consideration
  153.                 when copying to the texture, which is, of course, of size width[i]*height[i].
  154.                 So, basically, we need to copy the plane to the texture ignoring the padding bytes
  155.                 that come after the stride.
  156.             */
  157.             //TODO: can linesize be <0?
  158.             linesize[i] = ffmpeg_decoded_packet.get_line_size(i);
  159.             width[i] = (ffmpeg_decoded_packet.get_width() * width_ratio.numerator
  160.                 / width_ratio.denominator) as i32;
  161.             height[i] = (ffmpeg_decoded_packet.get_height() * height_ratio.numerator
  162.                 / height_ratio.denominator) as i32;
  163.             plane_size[i] = linesize[i] * (height[i] as usize);
  164.             texture_size[i] = (width[i] * height[i]) as isize;
  165.             /*
  166.                 Now that we setted width, height, linesize and planarSize, our renderer can
  167.                 render the image by doing the correct copy taking the stride into account.
  168.             */
  169.         }
  170.         (width, height, linesize, plane_size, texture_size)
  171.     }
  172.  
  173.     pub fn parse_frame(&mut self, frame: &DecodedPacket) {
  174.         let (width, height, linesize, plane_size, texture_size) =
  175.             GLRenderer::dimension_info(&frame);
  176.         let ffmpeg_decoded_packet = match frame {
  177.             //DecodedPacket::FFMPEG(ffmpeg_decoded_packet) => ffmpeg_decoded_packet,
  178.             DecodedPacket::FFMPEG_SIMULATED(ffmpeg_decoded_packet) => ffmpeg_decoded_packet,
  179.             _ => panic!("packet not supported yet"),
  180.         };
  181.         let (frame_width, frame_height) = (
  182.             ffmpeg_decoded_packet.get_width(),
  183.             ffmpeg_decoded_packet.get_height(),
  184.         );
  185.         let pixel_format = Some(PixelFormat::YUV420P);
  186.         let pixel_format_translator =
  187.             Some(liborwell_rust::common::pixel_formats_impl::pixel_format_yuv420p);
  188.         /*
  189.             If any of these change: current_frame_width, current_frame_height, current_pixel_format,
  190.             we obviously need to recreate all textures
  191.         */
  192.         if self.current_frame_width != Some(frame_width)
  193.             || self.current_frame_height != Some(frame_height)
  194.             || self.current_pixel_format != pixel_format
  195.         {
  196.             let current_fragment_program: Option<&Shader>;
  197.             if pixel_format_translator.as_ref().unwrap().is_planar {
  198.                 self.planar_program.as_ref().unwrap().activate();
  199.                 self.current_fragment_program = Some(CurrentProgram::PLANAR);
  200.                 current_fragment_program = self.planar_program.as_ref();
  201.             } else {
  202.                 self.packed_program.as_ref().unwrap().activate();
  203.                 self.current_fragment_program = Some(CurrentProgram::PACKED);
  204.                 current_fragment_program = self.packed_program.as_ref();
  205.             }
  206.             println!(
  207.                 "current_fragment_program: {}",
  208.                 current_fragment_program.as_ref().unwrap().program()
  209.             );
  210.             unsafe {
  211.                 self.vertex_in_location = gl::GetAttribLocation(
  212.                     current_fragment_program.as_ref().unwrap().program(),
  213.                     CString::new("aPos").unwrap().as_ptr(),
  214.                 );
  215.                 println!("vertex_in_location: {}", self.vertex_in_location);
  216.                 self.texture_in_location = gl::GetAttribLocation(
  217.                     current_fragment_program.as_ref().unwrap().program(),
  218.                     CString::new("aTexCoord").unwrap().as_ptr(),
  219.                 );
  220.                 println!("texture_in_location: {}", self.texture_in_location);
  221.             }
  222.  
  223.             self.vertex_array_object = Some(VertexArrayObject::new());
  224.             self.vertex_buffer_object = Some(VertexBufferObject::new());
  225.  
  226.             println!(
  227.                 "frame has dimensions w: {}, h: {}
  228.                Gonna create textures:
  229.                Y: {}x{}
  230.                U: {}x{}
  231.                V: {}x{}",
  232.                 frame_width,
  233.                 frame_height,
  234.                 width[0],
  235.                 height[0],
  236.                 width[1],
  237.                 height[1],
  238.                 width[2],
  239.                 height[2]
  240.             );
  241.             unsafe {
  242.                 //gl::BindBuffer(gl::ARRAY_BUFFER, self.vertex_buffer_object.as_ref().unwrap().inner_value().unwrap());
  243.                 self.vertex_array_object
  244.                     .as_ref()
  245.                     .unwrap()
  246.                     .activate()
  247.                     .unwrap();
  248.                 self.vertex_buffer_object
  249.                     .as_ref()
  250.                     .unwrap()
  251.                     .activate()
  252.                     .unwrap();
  253.                 //println!("did bind, vertex_in_location: {}, texture_in_location: {}", self.vertex_in_location, self.texture_in_location);
  254.                 gl::BufferData(
  255.                     gl::ARRAY_BUFFER,
  256.                     std::mem::size_of_val(&GLRenderer::VERTICES_TEXTURES) as isize,
  257.                     GLRenderer::VERTICES_TEXTURES.as_ptr() as *const libc::c_void,
  258.                     gl::STATIC_DRAW,
  259.                 );
  260.  
  261.                 gl::VertexAttribPointer(
  262.                     self.vertex_in_location as u32,
  263.                     3,
  264.                     gl::FLOAT,
  265.                     gl::FALSE,
  266.                     (5 * ::std::mem::size_of::<f32>()) as i32,
  267.                     0 as *const libc::c_void,
  268.                 );
  269.                 gl::EnableVertexAttribArray(self.vertex_in_location as u32);
  270.  
  271.                 gl::VertexAttribPointer(
  272.                     self.texture_in_location as u32,
  273.                     2,
  274.                     gl::FLOAT,
  275.                     gl::FALSE,
  276.                     (5 * ::std::mem::size_of::<f32>()) as i32,
  277.                     3 as *const libc::c_void,
  278.                 );
  279.                 gl::EnableVertexAttribArray(self.texture_in_location as u32);
  280.  
  281.                 self.texture_location[0] = gl::GetUniformLocation(
  282.                     current_fragment_program.as_ref().unwrap().program(),
  283.                     CString::new("tex_y").unwrap().as_ptr(),
  284.                 );
  285.                 self.texture_location[1] = gl::GetUniformLocation(
  286.                     current_fragment_program.as_ref().unwrap().program(),
  287.                     CString::new("tex_u").unwrap().as_ptr(),
  288.                 );
  289.                 self.texture_location[2] = gl::GetUniformLocation(
  290.                     current_fragment_program.as_ref().unwrap().program(),
  291.                     CString::new("tex_v").unwrap().as_ptr(),
  292.                 );
  293.  
  294.                 println!("tex_y: {}", self.texture_location[0]);
  295.                 println!("tex_u: {}", self.texture_location[1]);
  296.                 println!("tex_v: {}", self.texture_location[2]);
  297.  
  298.                 //alpha = program->uniformLocation("alpha");
  299.                 self.alpha = gl::GetUniformLocation(
  300.                     current_fragment_program.as_ref().unwrap().program(),
  301.                     CString::new("alpha").unwrap().as_ptr(),
  302.                 );
  303.                 println!("alpha: {}", self.alpha);
  304.  
  305.                 gl::Uniform1f(self.alpha, 1.0);
  306.                 self.texture_format = gl::GetUniformLocation(
  307.                     current_fragment_program.as_ref().unwrap().program(),
  308.                     CString::new("tex_format").unwrap().as_ptr(),
  309.                 );
  310.                 println!("tex_format: {}", self.texture_format);
  311.             }
  312.  
  313.             //TODO: delete these textures
  314.             unsafe {
  315.                 gl::GenTextures(
  316.                     GLRenderer::TEXTURE_NUMBER as i32,
  317.                     self.texture_id.as_mut_ptr(),
  318.                 );
  319.                 gl::GenBuffers(3, self.pixel_buffer_objects.as_mut_ptr());
  320.             }
  321.             for i in 0..GLRenderer::TEXTURE_NUMBER {
  322.                 println!("texture_id[i]= {}", self.texture_id[i as usize]);
  323.                 unsafe {
  324.                     gl::BindTexture(gl::TEXTURE_2D, self.texture_id[i as usize]);
  325.                     //gl::BindTexture(gl::TEXTURE_2D, self.texture_id[i as usize]);
  326.                     //We need to call glBufferData at least once, so then we can use glBufferSubData
  327.                     gl::BindBuffer(
  328.                         gl::PIXEL_UNPACK_BUFFER,
  329.                         self.pixel_buffer_objects[i as usize],
  330.                     );
  331.                     gl::BufferData(
  332.                         gl::PIXEL_UNPACK_BUFFER as GLenum,
  333.                         (texture_size[i as usize] * std::mem::size_of::<u8>() as isize)as gl::types::GLsizeiptr,
  334.                         //std::mem::transmute(ffmpeg_decoded_packet.data(i.into())),//.as_ptr() as *mut libc::c_void,
  335.                         std::ptr::null(),
  336.                         gl::STREAM_DRAW as GLenum,
  337.                     );
  338.                     gl::TexImage2D(
  339.                         gl::TEXTURE_2D,
  340.                         0,
  341.                         GLRenderer::translate_gl(
  342.                             &pixel_format_translator
  343.                                 .as_ref()
  344.                                 .unwrap()
  345.                                 .yuv_internal_formats[i as usize],
  346.                         ),
  347.                         width[i as usize],
  348.                         height[i as usize],
  349.                         0,
  350.                         GLRenderer::translate_gl(
  351.                             &pixel_format_translator.as_ref().unwrap().yuv_gl_format[i as usize],
  352.                         ) as GLenum,
  353.                         GLRenderer::translate_gl(
  354.                             &pixel_format_translator.as_ref().unwrap().data_type,
  355.                         ) as GLenum,
  356.                         std::ptr::null(),
  357.                     );
  358.                     gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, gl::LINEAR as GLint);
  359.                     gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, gl::LINEAR as GLint);
  360.                     gl::TexParameteri(
  361.                         gl::TEXTURE_2D,
  362.                         gl::TEXTURE_WRAP_S,
  363.                         gl::CLAMP_TO_EDGE as GLint,
  364.                     );
  365.                     gl::TexParameteri(
  366.                         gl::TEXTURE_2D,
  367.                         gl::TEXTURE_WRAP_T,
  368.                         gl::CLAMP_TO_EDGE as GLint,
  369.                     );
  370.                     gl::TexParameteri(
  371.                         gl::TEXTURE_2D,
  372.                         gl::TEXTURE_WRAP_R,
  373.                         gl::CLAMP_TO_EDGE as GLint,
  374.                     );
  375.                 }
  376.             }
  377.         }
  378.     }
  379.  
  380.     pub fn draw(&self, frame: &DecodedPacket) {
  381.         let (width, height, linesize, plane_size, texture_size) =
  382.             GLRenderer::dimension_info(&frame);
  383.         let pixel_format_translator =
  384.             Some(liborwell_rust::common::pixel_formats_impl::pixel_format_yuv420p);
  385.         let ffmpeg_decoded_packet = match frame {
  386.             //DecodedPacket::FFMPEG(ffmpeg_decoded_packet) => ffmpeg_decoded_packet,
  387.             DecodedPacket::FFMPEG_SIMULATED(ffmpeg_decoded_packet) => ffmpeg_decoded_packet,
  388.             _ => panic!("packet not supported yet"),
  389.         };
  390.         unsafe {
  391.             gl::ClearColor(0.0f32, 0.0f32, 0.0f32, 1.0f32);
  392.             gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);
  393.         }
  394.  
  395.         let current_fragment_program: Option<&Shader>;
  396.         match self.current_fragment_program.as_ref().unwrap() {
  397.             CurrentProgram::PLANAR => current_fragment_program = self.planar_program.as_ref(),
  398.             CurrentProgram::PACKED => current_fragment_program = self.packed_program.as_ref(),
  399.         }
  400.         current_fragment_program.as_ref().unwrap().activate();
  401.         self.vertex_array_object
  402.             .as_ref()
  403.             .unwrap()
  404.             .activate()
  405.             .unwrap();
  406.         //To be done
  407.         for i in 0..1 {
  408.             for j in 0..GLRenderer::TEXTURE_NUMBER {
  409.                 //m_pbo[i][j] = QOpenGLBuffer(QOpenGLBuffer::PixelUnpackBuffer);
  410.                 //m_pbo[i][j].setUsagePattern(QOpenGLBuffer::StreamDraw);
  411.                 //m_pbo[i][j].create();
  412.             }
  413.         }
  414.         for j in 0..GLRenderer::TEXTURE_NUMBER {
  415.             unsafe {
  416.                 gl::ActiveTexture(gl::TEXTURE0 + (j as u32));
  417.                 gl::BindTexture(gl::TEXTURE_2D, self.texture_id[j as usize]);
  418.                 gl::BindBuffer(
  419.                     gl::PIXEL_UNPACK_BUFFER,
  420.                     self.pixel_buffer_objects[j as usize],
  421.                 );
  422.             }
  423.  
  424.             /*
  425.                 We're gonna write to our Pixel Buffer Object line by line ignoring the stride.
  426.                 There are height[j] lines for the current plane j, and linesize[j] is the stride
  427.                 for plane j.
  428.             */
  429.             unsafe {
  430.                 gl::BufferSubData(
  431.                     gl::PIXEL_UNPACK_BUFFER as GLenum,
  432.                     0,
  433.                     (width[j as usize] * std::mem::size_of::<u8>() as i32)as gl::types::GLsizeiptr,
  434.                     ffmpeg_decoded_packet.data(j as usize).as_ptr()
  435.                         as *const libc::c_void,
  436.                 );
  437.                 //println!("width[j]: {}", width[j as usize]);
  438.             }
  439.             /*
  440.             for i in 0..(height[j as usize] - 1) {
  441.                 let offset = (i * (linesize[j as usize] as i32)) as isize;
  442.                 let plane_pointer = ffmpeg_decoded_packet.data(j.into()).as_ptr();
  443.                 unsafe {
  444.                     gl::BufferSubData(
  445.                         gl::PIXEL_UNPACK_BUFFER as GLenum,
  446.                         offset,
  447.                         width[j as usize] as usize as gl::types::GLsizeiptr,
  448.                         plane_pointer.offset(offset) as *const u8 as *const libc::c_void,
  449.                     );
  450.                 }
  451.             }
  452.             */
  453.             unsafe {
  454.                 gl::TexSubImage2D(
  455.                     gl::TEXTURE_2D,
  456.                     0,
  457.                     0,
  458.                     0,
  459.                     width[j as usize],
  460.                     height[j as usize],
  461.                     GLRenderer::translate_gl(
  462.                         &pixel_format_translator.as_ref().unwrap().yuv_gl_format[j as usize],
  463.                     ) as GLenum,
  464.                     GLRenderer::translate_gl(&pixel_format_translator.as_ref().unwrap().data_type)
  465.                         as GLenum,
  466.                     std::ptr::null(), //ffmpeg_decoded_packet.data(j as usize).as_ptr() as *const u8 as *const libc::c_void//
  467.                 );
  468.                 gl::Uniform1i(self.texture_location[j as usize], j as GLint);
  469.             }
  470.         }
  471.         unsafe {
  472.             gl::Uniform1i(self.texture_format, 0);
  473.  
  474.             //gl::BindVertexArray(self.vertex_array_object);
  475.             self.vertex_array_object
  476.                 .as_ref()
  477.                 .unwrap()
  478.                 .activate()
  479.                 .unwrap();
  480.             gl::DrawArrays(gl::TRIANGLE_STRIP, 0, 4);
  481.         }
  482.     }
  483. }
  484.  
  485. pub struct SmartVideoRenderer {
  486.     on_consume: OnConsume,
  487.     stoppable_thread: StoppableThread,
  488.     scene: RefCell<GLRenderer>,
  489. }
  490.  
  491. impl Renderer for SmartVideoRenderer {
  492.     fn set_on_consume(&mut self, f: OnConsume) {
  493.         self.on_consume = f;
  494.     }
  495. }
  496.  
  497. impl SmartVideoRenderer {
  498.     pub fn new(on_consume: OnConsume) -> Self {
  499.         SmartVideoRenderer {
  500.             on_consume: on_consume,
  501.             stoppable_thread: StoppableThread::new(),
  502.             scene: RefCell::new(GLRenderer::new()),
  503.         }
  504.     }
  505. }
  506.  
  507. impl Runnable for SmartVideoRenderer {
  508.     fn run(&mut self) {
  509.         let width = 1280;
  510.         let height = 720;
  511.         let mut f = File::open("/home/dev/orwell/lab/orwell_gtk/assets/vaporwave.yuv")
  512.             .expect("Unable to open file");
  513.         let on_consume = self.on_consume.clone();
  514.         let mut decoded_packet =
  515.             DecodedPacket::FFMPEG_SIMULATED(FfmpegDecodedPacketSimulated::new(width, height));
  516.         //let decoded_packet = FfmpegDecodedPacketSimulated::new(width,height);
  517.         let ffmpeg_decoded_packet = match &mut decoded_packet {
  518.             DecodedPacket::FFMPEG_SIMULATED(f) => f,
  519.             _ => panic!("packet not supported"),
  520.         };
  521.         f.read_exact(ffmpeg_decoded_packet.y.as_mut_slice())
  522.             .unwrap();
  523.         f.read_exact(ffmpeg_decoded_packet.u.as_mut_slice())
  524.             .unwrap();
  525.         f.read_exact(ffmpeg_decoded_packet.v.as_mut_slice())
  526.             .unwrap();
  527.     }
  528. }
  529.  
  530. impl renderer::Renderer for SmartVideoRenderer {
  531.     fn initialize(&self) -> Result<(), RendererError> {
  532.         let renderer = unsafe {
  533.             let p = gl::GetString(gl::RENDERER);
  534.             CStr::from_ptr(p as *const i8)
  535.         };
  536.         println!("Renderer: {}", renderer.to_string_lossy());
  537.  
  538.         let version = unsafe {
  539.             let p = gl::GetString(gl::VERSION);
  540.             CStr::from_ptr(p as *const i8)
  541.         };
  542.         println!("OpenGL version supported: {}", version.to_string_lossy());
  543.         Ok(())
  544.     }
  545.  
  546.     fn finalize(&self) {
  547.         //self.scene.replace(SmartVideoRenderer::new());
  548.     }
  549.  
  550.     fn render(&self) {
  551.         println!("render called");
  552.         //self.scene.borrow_mut().draw();
  553.         let width = 1280;
  554.         let height = 720;
  555.         let mut f = File::open("/home/dev/orwell/lab/orwell_gtk/assets/vaporwave.yuv")
  556.             .expect("Unable to open file");
  557.  
  558.         let mut ffmpeg_decoded_packet = FfmpegDecodedPacketSimulated::new(width, height);
  559.        
  560.         f.read_exact(ffmpeg_decoded_packet.y.as_mut_slice())
  561.             .unwrap();
  562.         f.read_exact(ffmpeg_decoded_packet.u.as_mut_slice())
  563.             .unwrap();
  564.         f.read_exact(ffmpeg_decoded_packet.v.as_mut_slice())
  565.             .unwrap();
  566.        
  567.         let decoded_packet = DecodedPacket::FFMPEG_SIMULATED(ffmpeg_decoded_packet);
  568.         self.scene.borrow_mut().init_vertex_stuff();
  569.         self.scene.borrow_mut().parse_frame(&decoded_packet);
  570.         self.scene.borrow_mut().draw(&decoded_packet);
  571.         //Ok(());
  572.     }
  573. }
  574.  
RAW Paste Data