Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- CryVideoOpenCV.cpp
- /*************************************************************************
- Cry Video Source File.
- Copyright (C), Marwin Misselhorn 2012
- -------------------------------------------------------------------------
- *************************************************************************/
- #include "StdAfx.h"
- #include "CryVideoOpenCV.h"
- #include <CryLibrary.h>
- #include <IGameFramework.h>
- #include <IRenderer.h>
- #include <windows.h>
- #include <iostream>
- #include <opencv/cxcore.h>
- #include <opencv/cv.h>
- #include <opencv/highgui.h>
- #include "pixfc-sse.h"
- #include <pthread.h>
- //Test Network Sound
- /*
- #include <SDL.h>
- #include <SDL_thread.h>
- extern "C" {
- #include <libavformat/avformat.h>
- #include <libavcodec/avcodec.h>
- }
- #define SDL_AUDIO_BUFFER_SIZE 1024
- typedef struct PacketQueue
- {
- AVPacketList *first_pkt, *last_pkt;
- int nb_packets;
- int size;
- SDL_mutex *mutex;
- SDL_cond *cond;
- } PacketQueue;
- PacketQueue audioq;
- int audioStream = -1;
- int videoStream = -1;
- int quit = 0;
- SDL_Surface* screen = NULL;
- SDL_Surface* surface = NULL;
- AVFormatContext* pFormatCtx = NULL;
- AVCodecContext* aCodecCtx = NULL;
- AVCodecContext* pCodecCtx = NULL;
- AVCodecContext *aCodec = NULL;
- */
- /*
- #include <SDL.h>
- #include <GL/glew.h>
- #include <GL/wglew.h>
- */
- // We need shell api for Current Root Extraction.
- #include "shlwapi.h"
- #pragma comment(lib, "shlwapi.lib")
- using namespace std;
- using namespace cv;
- //Multi-Threading support
- typedef unsigned char byte;
- typedef struct _work_t {
- struct PixFcSSE *pixfc;
- byte *in, *out;
- uint32_t startIndex;
- } work_t;
- void* Thread(void *arg)
- {
- work_t *param = (work_t*) arg;
- param->pixfc->convert(param->pixfc, param->in, param->out, param->startIndex);
- return NULL;
- }
- //Test Network Sound
- /*
- void show_frame(IplImage* img)
- {
- if (!screen)
- {
- screen = SDL_SetVideoMode(img->width, img->height, 0, 0);
- if (!screen)
- {
- fprintf(stderr, "SDL: could not set video mode - exiting\n");
- exit(1);
- }
- }
- // Assuming IplImage packed as BGR 24bits
- SDL_Surface* surface = SDL_CreateRGBSurfaceFrom((void*)img->imageData,
- img->width,
- img->height,
- img->depth * img->nChannels,
- img->widthStep,
- 0xff0000, 0x00ff00, 0x0000ff, 0
- );
- SDL_BlitSurface(surface, 0, screen, 0);
- SDL_Flip(screen);
- }
- void packet_queue_init(PacketQueue *q)
- {
- memset(q, 0, sizeof(PacketQueue));
- q->mutex = SDL_CreateMutex();
- q->cond = SDL_CreateCond();
- }
- int packet_queue_put(PacketQueue *q, AVPacket *pkt)
- {
- AVPacketList *pkt1;
- if (av_dup_packet(pkt) < 0)
- {
- return -1;
- }
- //pkt1 = (AVPacketList*) av_malloc(sizeof(AVPacketList));
- pkt1 = (AVPacketList*) malloc(sizeof(AVPacketList));
- if (!pkt1) return -1;
- pkt1->pkt = *pkt;
- pkt1->next = NULL;
- SDL_LockMutex(q->mutex);
- if (!q->last_pkt)
- q->first_pkt = pkt1;
- else
- q->last_pkt->next = pkt1;
- q->last_pkt = pkt1;
- q->nb_packets++;
- q->size += pkt1->pkt.size;
- SDL_CondSignal(q->cond);
- SDL_UnlockMutex(q->mutex);
- return 0;
- }
- static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
- {
- AVPacketList *pkt1;
- int ret;
- SDL_LockMutex(q->mutex);
- for (;;)
- {
- if( quit)
- {
- ret = -1;
- break;
- }
- pkt1 = q->first_pkt;
- if (pkt1)
- {
- q->first_pkt = pkt1->next;
- if (!q->first_pkt)
- q->last_pkt = NULL;
- q->nb_packets--;
- q->size -= pkt1->pkt.size;
- *pkt = pkt1->pkt;
- //av_free(pkt1);
- free(pkt1);
- ret = 1;
- break;
- }
- else if (!block)
- {
- ret = 0;
- break;
- }
- else
- {
- SDL_CondWait(q->cond, q->mutex);
- }
- }
- SDL_UnlockMutex(q->mutex);
- return ret;
- }
- int audio_decode_frame(AVCodecContext *aCodecCtx, uint8_t *audio_buf, int buf_size)
- {
- static AVPacket pkt;
- static uint8_t *audio_pkt_data = NULL;
- static int audio_pkt_size = 0;
- int len1, data_size;
- for (;;)
- {
- while (audio_pkt_size > 0)
- {
- data_size = buf_size;
- len1 = avcodec_decode_audio2(aCodecCtx, (int16_t*)audio_buf, &data_size,
- audio_pkt_data, audio_pkt_size);
- if (len1 < 0)
- {
- // if error, skip frame
- audio_pkt_size = 0;
- break;
- }
- audio_pkt_data += len1;
- audio_pkt_size -= len1;
- if (data_size <= 0)
- {
- // No data yet, get more frames
- continue;
- }
- // We have data, return it and come back for more later
- return data_size;
- }
- if (pkt.data)
- av_free_packet(&pkt);
- if (quit) return -1;
- if (packet_queue_get(&audioq, &pkt, 1) < 0) return -1;
- audio_pkt_data = pkt.data;
- audio_pkt_size = pkt.size;
- }
- }
- void audio_callback(void *userdata, Uint8 *stream, int len)
- {
- }
- void setup_ffmpeg(char* filename)
- {
- if (av_open_input_file(&pFormatCtx, filename, NULL, 0, NULL) != 0)
- {
- fprintf(stderr, "FFmpeg failed to open file %s!\n", filename);
- exit(-1);
- }
- if (av_find_stream_info(pFormatCtx) < 0)
- {
- fprintf(stderr, "FFmpeg failed to retrieve stream info!\n");
- exit(-1);
- }
- // Dump information about file onto standard error
- dump_format(pFormatCtx, 0, filename, 0);
- // Find the first video stream
- int i = 0;
- for (i; i < pFormatCtx->nb_streams; i++)
- {
- if (pFormatCtx->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO && videoStream < 0)
- {
- videoStream = i;
- }
- if (pFormatCtx->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO && audioStream < 0)
- {
- audioStream = i;
- }
- }
- if (videoStream == -1)
- {
- fprintf(stderr, "No video stream found in %s!\n", filename);
- exit(-1);
- }
- if (audioStream == -1)
- {
- fprintf(stderr, "No audio stream found in %s!\n", filename);
- exit(-1);
- }
- // Get a pointer to the codec context for the audio stream
- aCodecCtx = pFormatCtx->streams[audioStream]->codec;
- // Set audio settings from codec info
- SDL_AudioSpec wanted_spec;
- wanted_spec.freq = aCodecCtx->sample_rate;
- wanted_spec.format = AUDIO_S16SYS;
- wanted_spec.channels = aCodecCtx->channels;
- wanted_spec.silence = 0;
- wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
- wanted_spec.callback = audio_callback;
- wanted_spec.userdata = aCodecCtx;
- aCodec = aCodecCtx;
- SDL_AudioSpec spec;
- //if (SDL_OpenAudio(&wanted_spec, &spec) < 0)
- //{
- // fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
- //exit(-1);
- //}
- AVCodec* aCodec = avcodec_find_decoder(aCodecCtx->codec_id);
- if (!aCodec)
- {
- fprintf(stderr, "Unsupported codec!\n");
- exit(-1);
- }
- avcodec_open(aCodecCtx, aCodec);
- // audio_st = pFormatCtx->streams[index]
- packet_queue_init(&audioq);
- //SDL_PauseAudio(0);
- // Get a pointer to the codec context for the video stream
- pCodecCtx = pFormatCtx->streams[videoStream]->codec;
- // Find the decoder for the video stream
- AVCodec* pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
- if (pCodec == NULL)
- {
- fprintf(stderr, "Unsupported codec!\n");
- exit(-1); // Codec not found
- }
- // Open codec
- if (avcodec_open(pCodecCtx, pCodec) < 0)
- {
- fprintf(stderr, "Unsupported codec!\n");
- exit(-1); // Could not open codec
- }
- }
- int TestFunc2(char* fileName)
- {
- AVFormatContext *pFormatCtx;
- AVCodec *pCodec_audio;
- AVCodecContext *aCodecCtx_audio = NULL;
- AVPacket packet;
- char* outfilename = "D:\\CryVideo_CE3_Build3.4.0\\Game\\Videos\\output.wav";
- int audioStream;
- int out_size, len, size;
- FILE *outfile;
- int16_t *outbuf;
- uint8_t inbuf[4096 + FF_INPUT_BUFFER_PADDING_SIZE];
- avcodec_init();
- // register all the codecs
- av_register_all();
- if(av_open_input_file(&pFormatCtx, fileName, NULL, 0, NULL) != 0) {
- cerr << "Archivo no encontrado" << endl;
- return -1; // Couldn't open file
- }
- if(av_find_stream_info(pFormatCtx) < 0) {
- cerr << " No encontro el stream de info" << endl;
- return -1;// Couldn't find stream information
- }
- dump_format(pFormatCtx, 0, fileName, 0);
- for(unsigned int i=0; i < pFormatCtx->nb_streams; i++)
- if(pFormatCtx->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO)
- audioStream = i;
- aCodecCtx_audio = pFormatCtx->streams[audioStream]->codec;
- pCodec_audio = avcodec_find_decoder(aCodecCtx_audio->codec_id);
- if(pCodec_audio == NULL) {
- cerr<< "Unsupported codec!" << endl;
- return -1; // Codec not found
- }
- if(avcodec_open(aCodecCtx_audio, pCodec_audio) < 0) {
- cerr << "No se pudo abrir el codec de audio" << endl;
- return -1;
- }
- outbuf = (int16_t*) av_malloc(AVCODEC_MAX_AUDIO_FRAME_SIZE);
- outfile = fopen(outfilename, "wb");
- if (!outfile) {
- exit(1);
- }
- av_init_packet(&packet);
- packet.data = inbuf;
- while(av_read_frame(pFormatCtx, &packet) == 0) {
- if(packet.stream_index == audioStream) {
- size = packet.size;
- if (size == 0) {
- cerr << "Size = 0 " << endl;
- break;
- }
- while(size > 0) {
- out_size = AVCODEC_MAX_AUDIO_FRAME_SIZE;
- len = avcodec_decode_audio3(aCodecCtx_audio, outbuf, &out_size , &packet);
- //av_free_packet(&packet);
- cout << len << endl;
- if(len == -1) {
- cerr << "Error while decoding" << endl;
- return 1;
- }
- if(out_size > 0) {
- fwrite(outbuf, 1, out_size, outfile);
- }
- size -= len;
- packet.data += len;
- }
- }
- }
- av_free_packet(&packet);
- fclose(outfile);
- free(outbuf);
- cout << "END CODE" << endl;
- avcodec_close(aCodecCtx_audio);
- av_free(aCodecCtx_audio);
- return 0;
- }
- int TestFunc(char* fileName)
- {
- av_register_all();
- // Init SDL
- if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER))
- {
- fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
- return -1;
- }
- // Init ffmpeg and setup some SDL stuff related to Audio
- setup_ffmpeg(fileName);
- VideoCapture cap(fileName);
- if (!cap.isOpened()) // check if we succeeded
- {
- std::cout << "Failed to load file!" << std::endl;
- return -1;
- }
- return 0;
- }
- */
- //////////////////////////////////////////////////////////////////////////
- // Initializes Root folder of the game.
- //////////////////////////////////////////////////////////////////////////
- void InitRootDir()
- {
- #ifdef WIN32
- WCHAR szExeFileName[_MAX_PATH];
- GetModuleFileNameW( GetModuleHandle(NULL), szExeFileName, sizeof(szExeFileName));
- PathRemoveFileSpecW(szExeFileName);
- // Remove Bin32/Bin64 folder/
- WCHAR* lpPath = StrStrIW(szExeFileName,L"\\Bin32");
- if (lpPath)
- *lpPath = 0;
- lpPath = StrStrIW(szExeFileName,L"\\Bin64");
- if (lpPath)
- *lpPath = 0;
- SetCurrentDirectoryW( szExeFileName );
- #endif
- }
- CCryVideoOpenCV::CCryVideoOpenCV()
- {
- started = false;
- g_pCryVideoOpenCV = this;
- g_pCryVideo = 0;
- swapBGRA = false;
- renderToWindow = false;
- manual_conversion = false;
- manual_conversion_OpenCV = false;
- force_manual_conversion = false;
- mode = 0;
- videoFileName = "";
- video_capture = NULL;
- source_image = NULL;
- dest_image = NULL;
- width = 0;
- height = 0;
- sizeTotal = 0;
- pixfc = NULL;
- pixfcOpenCV = NULL;
- yuv = NULL;
- rgb = NULL;
- rgba = NULL;
- pid = NULL;
- job = NULL;
- numThreads = 0;
- YUVSize = 0;
- RGBSize = 0;
- InitRootDir();
- }
- CCryVideoOpenCV::~CCryVideoOpenCV()
- {
- //if (m_pCryVideoWebM && mode == 2 || mode == 1)
- //CloseVideoFile();
- //Free WebM instance
- //if (m_pCryVideoWebM)
- //SAFE_DELETE(m_pCryVideoWebM);
- /*
- //Cleanup OpenGL
- glUseProgramObjectARB(0);
- if (PHandle)
- glDeleteObjectARB(PHandle);
- if (mhRC)
- {
- wglMakeCurrent( NULL, NULL );
- wglDeleteContext( mhRC );
- }
- if (mhWnd && mhDC)
- {
- ReleaseDC( mhWnd, mhDC );
- }
- mhWnd = NULL;
- mhDC = NULL;
- mhRC = NULL;
- */
- //SAFE_DELETE(videoFileName);
- //if (pixfc)
- //destroy_pixfc(pixfc);
- //if (pixfcOpenCV)
- //destroy_pixfc(pixfcOpenCV);
- g_pCryVideoOpenCV = 0;
- }
- bool CCryVideoOpenCV::Init(ISystem *pSystem, IGameFramework *pGameFramework, ICryVideo* pCryVideo)
- {
- started = false;
- gEnv = pSystem->GetGlobalEnvironment();
- m_pFramework = pGameFramework;
- g_pCryVideo = pCryVideo;
- //Test Network Sound
- //TestFunc("D:\\CryVideo_CE3_Build3.4.0\\Game\\Videos\\Library\\FahrenheitJadeVision.avi");
- //Create new WebM instance
- m_pCryVideoWebM = pCryVideo->CreateWebMInstance();
- //Init YUV-RGB conversion process
- //1. SSSE3 Only+Fast = max. 59-61fps (Intel Quad)
- //2. SSSE3 Only+Slow = max. 59fps (Intel Quad)
- //3. SSE2 Only+Fast = max. 58fps (Intel Quad)
- //4. NOSSE +Fast = max. 53fps (Intel Quad)
- //5. Hamers conversion = max. 50fps (Intel Quad)
- PixFcPixelFormat input_format = PixFcYUV420P;
- PixFcPixelFormat output_format;
- width = 1280;
- height = 720;
- manual_conversion = false;
- if (g_pCryVideo->IsRGB8Usable())
- {
- output_format = PixFcBGR24;
- }
- else
- {
- output_format = PixFcBGRA;
- }
- // Create struct pixfc WebM
- int result = create_pixfc(&pixfc, input_format, output_format, width, height, PixFcFlag_SSSE3OnlyNNb);
- if (result != 0)
- {
- result = create_pixfc(&pixfc, input_format, output_format, width, height, PixFcFlag_SSSE3Only);
- if (result != 0)
- {
- GameWarning("No SSSE3 support for WebM videos! Playback might be slower.");
- result = create_pixfc(&pixfc, input_format, output_format, width, height, PixFcFlag_SSE2OnlyNNb);
- if (result != 0)
- {
- GameWarning("No SSE2 support for WebM videos!! Playback is going to be slower.");
- result = create_pixfc(&pixfc, input_format, output_format, width, height, PixFcFlag_NoSSE);
- if (result != 0)
- {
- GameWarning("No SSE support for WebM videos!!! Doing manual conversion instead...");
- manual_conversion = true;
- }
- }
- }
- }
- // Create struct pixfc OpenCV
- PixFcPixelFormat input_formatOpenCV = PixFcBGR24;
- PixFcPixelFormat output_formatOpenCV;
- manual_conversion_OpenCV = false;
- //Check CVar
- ICVar* swapChannels = gEnv->pConsole->GetCVar("cv_swapBGRA");
- if (swapChannels && swapChannels->GetIVal() == 1)
- swapBGRA = true;
- else
- swapBGRA = false;
- if (swapBGRA)
- {
- output_formatOpenCV = PixFcARGB;
- }
- else
- {
- output_formatOpenCV = PixFcBGRA;
- }
- result = create_pixfc(&pixfcOpenCV, input_formatOpenCV, output_formatOpenCV, width, height, PixFcFlag_SSSE3OnlyNNb);
- if (result != 0)
- {
- result = create_pixfc(&pixfcOpenCV, input_formatOpenCV, output_formatOpenCV, width, height, PixFcFlag_SSSE3Only);
- if (result != 0)
- {
- GameWarning("No SSSE3 support for AVI videos! Playback might be slower.");
- result = create_pixfc(&pixfcOpenCV, input_formatOpenCV, output_formatOpenCV, width, height, PixFcFlag_SSE2OnlyNNb);
- if (result != 0)
- {
- GameWarning("No SSE2 support for AVI videos!! Playback is going to be slower.");
- result = create_pixfc(&pixfcOpenCV, input_formatOpenCV, output_formatOpenCV, width, height, PixFcFlag_NoSSE);
- if (result != 0)
- {
- GameWarning("No SSE support for AVI videos!!! Doing manual conversion instead...");
- manual_conversion_OpenCV = true;
- }
- }
- }
- }
- //Detect number of Cores
- SYSTEM_INFO sysinfo;
- GetSystemInfo( &sysinfo );
- numThreads = sysinfo.dwNumberOfProcessors;
- CryLogAlways("CryVideo: Use %i thread(s) for conversion!", numThreads);
- if (numThreads < 4)
- numThreadsAVI = 1;
- else
- numThreadsAVI = numThreads;
- /*
- //Init OpenGL
- mhWnd =(HWND)gEnv->pRenderer->GetHWND();
- //get the device context (DC)
- mhDC = GetDC( mhWnd );
- // set the pixel format for the DC
- PIXELFORMATDESCRIPTOR pfd;
- ZeroMemory( &pfd, sizeof( pfd ) );
- pfd.nSize = sizeof( pfd );
- pfd.nVersion = 1;
- pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL |
- PFD_DOUBLEBUFFER;
- //pfd.iPixelType = PFD_TYPE_RGBA;
- pfd.cColorBits = 24;
- pfd.cDepthBits = 16;
- pfd.iLayerType = PFD_MAIN_PLANE;
- int format = ChoosePixelFormat( mhDC, &pfd );
- SetPixelFormat( mhDC, format, &pfd );
- //create the render context (RC)
- mhRC = wglCreateContext( mhDC );
- //make it the current render context
- wglMakeCurrent( mhDC, mhRC );
- GLenum err = glewInit();
- if (GLEW_OK != err)
- {
- // Problem: glewInit failed, something is seriously wrong.
- CryLogAlways("Error: %s\n", glewGetErrorString(err));
- }
- wglMakeCurrent( NULL, NULL );
- */
- started = true;
- return true;
- }
- bool CCryVideoOpenCV::LoadVideoFile(const char* fileName, int format)
- {
- mode = format;
- //Check Debug CVars
- ICVar* renderToSecondWindow = gEnv->pConsole->GetCVar("cv_renderToSecondWindow");
- if (renderToSecondWindow && renderToSecondWindow->GetIVal() == 1)
- renderToWindow = true;
- else
- renderToWindow = false;
- //ICVar* forceConversion = gEnv->pConsole->GetCVar("cv_forceManualConversion");
- //if (forceConversion && forceConversion->GetIVal() == 1)
- //force_manual_conversion = true;
- //else
- //force_manual_conversion = false;
- if (mode == 1) //AVI
- {
- if (video_capture)
- CloseVideoFile();
- videoFileName = (char*)fileName;
- try
- {
- video_capture = cvCreateFileCapture(fileName);
- if (!video_capture)
- {
- CryLogAlways("OpenCV: Couldn't create file capture. Something is wrong with the video file. Maybe the right codec is not installed on the system?");
- return false;
- }
- source_image = cvCreateImage(cvGetSize(cvQueryFrame(video_capture)),IPL_DEPTH_8U,3);
- if (g_pCryVideo->IsRGB8Usable())
- {
- dest_image = cvCreateImage(cvGetSize(source_image),IPL_DEPTH_8U,3);
- }
- else
- {
- dest_image = cvCreateImage(cvGetSize(source_image),IPL_DEPTH_8U,4);
- }
- cvSetCaptureProperty(video_capture, CV_CAP_PROP_POS_AVI_RATIO, 0);
- return true;
- }
- catch (...)
- {
- }
- }
- else //WebM
- {
- m_pCryVideoWebM->CloseWebMFile();
- videoFileName = (char*)fileName;
- bool swapuv = false;
- if (gEnv->pRenderer->GetRenderType() == eRT_DX11)
- swapuv = true;
- bool result = m_pCryVideoWebM->LoadWebMFile(fileName, swapuv);
- if (result)
- return true;
- }
- return false;
- }
- bool CCryVideoOpenCV::CloseVideoFile()
- {
- if (mode == 1) //AVI
- {
- try
- {
- if (video_capture)
- {
- cvReleaseCapture(&video_capture);
- }
- }
- catch (...)
- {
- }
- //Render to Window
- ICVar* renderWindow = gEnv->pConsole->GetCVar("cv_renderToSecondWindow");
- if (renderWindow && renderWindow->GetIVal() == 1)
- cvDestroyAllWindows();
- video_capture = NULL;
- videoFileName = NULL;
- try
- {
- delete(rgb);
- delete(rgba);
- delete(job);
- delete(pid);
- delete(source_image);
- delete(dest_image);
- }
- catch (...)
- {
- }
- return true;
- }
- else //WebM
- {
- bool result = m_pCryVideoWebM->CloseWebMFile();
- videoFileName = NULL;
- //Render to Window
- ICVar* renderWindow = gEnv->pConsole->GetCVar("cv_renderToSecondWindow");
- if (renderWindow && renderWindow->GetIVal() == 1)
- cvDestroyAllWindows();
- /*
- //Cleanup OpenGL
- glUseProgramObjectARB(0);
- if (PHandle)
- glDeleteObjectARB(PHandle);
- if (mhRC)
- wglMakeCurrent( NULL, NULL );
- */
- try
- {
- delete(yuv);
- delete(rgba);
- delete(job);
- delete(pid);
- }
- catch (...)
- {
- }
- if (result)
- return true;
- }
- return false;
- }
- char* CCryVideoOpenCV::StartPlayback()
- {
- char* imageData = NULL;
- if (mode == 1) //AVI
- {
- CryLogAlways("OpenCV: Schedule AVI Playback");
- try
- {
- source_image=cvQueryFrame(video_capture);
- width = source_image->width;
- height = source_image->height;
- //Prepare CVars
- rgb = new char[(width*height)*3];
- rgba = new char[(width*height)*4];
- //Update conversion pointer
- pixfcOpenCV->width = width;
- pixfc->height = height/numThreadsAVI;
- pixfcOpenCV->pixel_count = pixfcOpenCV->width * pixfc->height;
- YUVSize = pixfcOpenCV->pixel_count*3;
- RGBSize = pixfcOpenCV->pixel_count*4;
- //Create multi thread object
- job = new work_t[numThreadsAVI];
- pid = new pthread_t[numThreadsAVI];
- for (int k=0;k<numThreadsAVI;k++)
- {
- job[k].pixfc = pixfcOpenCV;
- job[k].startIndex = 0;
- job[k].out = (byte*)rgba + (k*RGBSize);
- }
- if (g_pCryVideo->IsRGB8Usable())
- {
- if (swapBGRA)
- cvCvtColor(source_image, dest_image, CV_BGR2RGB);
- }
- else
- {
- if (manual_conversion_OpenCV || force_manual_conversion)
- {
- if (swapBGRA)
- cvCvtColor(source_image, dest_image, CV_BGR2RGBA);
- else
- cvCvtColor(source_image, dest_image, CV_BGR2BGRA);
- }
- else
- {
- rgb = source_image->imageData;
- pixfcOpenCV->convert(pixfcOpenCV, rgb, rgba, 0);
- }
- }
- cvSetCaptureProperty(video_capture, CV_CAP_PROP_POS_AVI_RATIO, 0);
- if (manual_conversion_OpenCV || force_manual_conversion || g_pCryVideo->IsRGB8Usable())
- if (!g_pCryVideo->IsRGB8Usable() || swapBGRA)
- imageData = dest_image->imageData;
- else
- imageData = source_image->imageData;
- else
- imageData = rgba;
- cvReleaseCapture(&video_capture);
- video_capture = cvCreateFileCapture(videoFileName);
- //Render to Window
- if (renderToWindow)
- {
- cvNamedWindow("CryVideo Player",CV_WINDOW_FREERATIO);
- cvResizeWindow("CryVideo Player", gEnv->pRenderer->GetWidth() / 2, gEnv->pRenderer->GetHeight() / 2);
- cvMoveWindow("CryVideo Player", 0,0);
- }
- if (!video_capture)
- {
- CryLogAlways("OpenCV: Couldn't create file capture. Something is wrong with the video file. Maybe the right codec is not installed on the system?");
- imageData = NULL;
- }
- }
- catch (...)
- {
- imageData = NULL;
- }
- }
- else //WebM
- {
- CryLogAlways("OpenCV: Schedule WebM Playback");
- width = GetVideoWidth();
- height = GetVideoHeight();
- sizeTotal = width * height;
- //Init YUV-RGB Vars
- yuv = new char[sizeTotal + ((sizeTotal / 4) * 2)];
- if (!g_pCryVideo->IsRGB8Usable())
- rgba = new char[sizeTotal * 4];
- else
- rgba = new char[sizeTotal * 3];
- //Render to Window
- if (renderToWindow)
- {
- cvNamedWindow("CryVideo Player",CV_WINDOW_FREERATIO);
- cvResizeWindow("CryVideo Player", gEnv->pRenderer->GetWidth() / 2, gEnv->pRenderer->GetHeight() / 2);
- cvMoveWindow("CryVideo Player", 0,0);
- }
- //Update conversion pointer
- pixfc->width = width;
- pixfc->height = height/numThreads;
- pixfc->pixel_count = sizeTotal;
- YUVSize = (uint32_t)(sizeTotal/numThreads);
- RGBSize = (sizeTotal*4)/numThreads;
- //Create multi thread object
- job = new work_t[numThreads];
- pid = new pthread_t[numThreads];
- for (int k=0;k<numThreads;k++)
- {
- job[k].pixfc = pixfc;
- job[k].startIndex = k*YUVSize;
- job[k].out = (byte*)rgba + (k*RGBSize);
- }
- //Get first frame
- imageData = GetNextFrame();
- //Re-init file
- m_pCryVideoWebM->CloseWebMFile();
- bool swapuv = false;
- if (gEnv->pRenderer->GetRenderType() == eRT_DX11)
- swapuv = true;
- m_pCryVideoWebM->LoadWebMFile(videoFileName, swapuv);
- }
- /*
- //Setup OpenGL
- useOpenGL = false;
- ICVar* OpenGL = gEnv->pConsole->GetCVar("cv_useOpenGL");
- if (OpenGL && OpenGL->GetIVal() == 1)
- useOpenGL = true;
- //make it the current render context
- wglMakeCurrent( mhDC, mhRC );
- GLhandleARB FSHandle;
- char *FProgram=
- "uniform sampler2D Ytex;\n"
- "void main(void) {\n"
- " float nx, ny, r, g, b, y, u, v;\n"
- " float u1,u2,v1,v2;\n"
- " nx = gl_TexCoord[0].x;\n"
- " ny = gl_TexCoord[0].y;\n"
- " y = texture2D(Ytex, vec2( (nx), (ny)*(4.0/6.0) )).r;\n"
- " u1 = texture2D(Ytex, vec2( (nx/2.0), (ny+4.0)/6.0 )).r;\n"
- " u2 = texture2D(Ytex, vec2( (nx/2.0)+0.5, (ny+4.0)/6.0 )).r;\n"
- " v1 = texture2D(Ytex, vec2( (nx/2.0), (ny+5.0)/6.0 )).r;\n"
- " v2 = texture2D(Ytex, vec2( (nx/2.0)+0.5, (ny+5.0)/6.0 )).r;\n"
- " y = 1.1643 * (y - 0.0625);\n"
- " u = (u1+u2)/2.0 - 0.5;\n"
- " v = (v1+v2)/2.0 - 0.5;\n"
- " r = y + 1.5958 * v;\n"
- " g = y - 0.39173 * u - 0.8129 * v;\n"
- " b = y + 2.017 * u;\n"
- " gl_FragColor=vec4(r,g,b,1.0);\n"
- "}\n";
- glMatrixMode(GL_PROJECTION);
- glLoadIdentity();
- glOrtho(-width,width,-height,height,-1,1);
- glViewport(0,0,width,height);
- glClearColor(0,0,0,0);
- glHint(GL_POLYGON_SMOOTH_HINT,GL_NICEST);
- // Set up program objects.
- PHandle=glCreateProgramObjectARB();
- FSHandle= glCreateShaderObjectARB(GL_FRAGMENT_SHADER_ARB);
- // Compile the shader.
- glShaderSourceARB(FSHandle,1,(const GLcharARB**)&FProgram,NULL);
- glCompileShaderARB(FSHandle);
- // Create a complete program object.
- glAttachObjectARB(PHandle,FSHandle);
- glLinkProgramARB(PHandle);
- // Finally, use the program.
- glUseProgramObjectARB(PHandle);
- // This might not be required, but should not hurt.
- glEnable(GL_TEXTURE_2D);
- glGenTextures(1, &texture); // Generate the YUV 4:2:0 handle
- glBindTexture(GL_TEXTURE_2D, texture); // and use it
- //glTexEnvf(GL_TEXTURE_2D, GL_TEXTURE_ENV_MODE, GL_REPLACE); // note that GL_REPLACE is certainly not the best thing for video mixing ...
- glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE,GL_DECAL);
- //glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); // Linear Filtering seem a good compromise between speed/quality
- //glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); // this seem the same thing for the magnification and minification
- if (mhRC)
- wglMakeCurrent( NULL, NULL );
- */
- //Test Network Sound
- /*
- AVPacket packet;
- VideoCapture cap("D:\\CryVideo_CE3_Build3.4.0\\Game\\Videos\\Library\\FahrenheitJadeVision.avi");
- bool exit = false;
- while (exit == false)
- {
- if (av_read_frame(pFormatCtx, &packet) >= 0)
- {
- if (packet.stream_index == videoStream)
- {
- // Actually this is were SYNC between audio/video would happen.
- // Right now I assume that every VIDEO packet contains an entire video frame, and that's not true. A video frame can be made by multiple packets!
- // But for the time being, assume 1 video frame == 1 video packet,
- // so instead of reading the frame through ffmpeg, I read it through OpenCV.
- Mat frame;
- cap >> frame; // get a new frame from camera
- // do some processing on the frame, either as a Mat or as IplImage.
- // For educational purposes, applying a lame grayscale conversion
- IplImage ipl_frame = frame;
- for (int i = 0; i < ipl_frame.width * ipl_frame.height * ipl_frame.nChannels; i += ipl_frame.nChannels)
- {
- ipl_frame.imageData[i] = (ipl_frame.imageData[i] + ipl_frame.imageData[i+1] + ipl_frame.imageData[i+2])/3; //B
- ipl_frame.imageData[i+1] = (ipl_frame.imageData[i] + ipl_frame.imageData[i+1] + ipl_frame.imageData[i+2])/3; //G
- ipl_frame.imageData[i+2] = (ipl_frame.imageData[i] + ipl_frame.imageData[i+1] + ipl_frame.imageData[i+2])/3; //R
- }
- // Display it on SDL window
- show_frame(&ipl_frame);
- av_free_packet(&packet);
- }
- else if (packet.stream_index == audioStream)
- {
- packet_queue_put(&audioq, &packet);
- exit = true;
- }
- else
- {
- av_free_packet(&packet);
- }
- SDL_Event event;
- SDL_PollEvent(&event);
- }
- }
- EntityId playerId = gEnv->pGame->GetIGameFramework()->GetClientActorId();
- CryLogAlways("OpenCV: Playback should now be ready");
- //ISound *pNetworkSound = gEnv->pSoundSystem->CreateNetworkSound(this, 16, 44100, SDL_AUDIO_BUFFER_SIZE, playerId);
- ISound *pNetworkSound = gEnv->pSoundSystem->CreateNetworkSound(this, 16, 3000, SDL_AUDIO_BUFFER_SIZE, playerId);
- if (pNetworkSound)
- {
- CryLogAlways("Netzwerk Sound wird gestartet!");
- pNetworkSound->Play();
- }
- */
- return imageData;
- }
- //Test Network Sound
- /*
- bool CCryVideoOpenCV::FillDataBuffer(unsigned int nBitsPerSample, unsigned int nSamplesPerSecond, unsigned int nNumSamples, void* pData )
- {
- CryLogAlways("Audio Data requested!");
- CryLogAlways("Number Samples: %i!", nNumSamples);
- Uint8 *stream = new Uint8[nNumSamples*nBitsPerSample];
- CryLogAlways("Fill in Data!");
- AVCodecContext *aCodecCtx = aCodec;
- int len1, audio_size;
- static uint8_t audio_buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
- static unsigned int audio_buf_size = 0;
- static unsigned int audio_buf_index = 0;
- while (nNumSamples > 0)
- {
- if (audio_buf_index >= audio_buf_size)
- {
- // We have already sent all our data; get more
- audio_size = audio_decode_frame(aCodecCtx, audio_buf, sizeof(audio_buf));
- if(audio_size < 0)
- {
- // If error, output silence
- audio_buf_size = 1024; // arbitrary?
- memset(audio_buf, 0, audio_buf_size);
- }
- else
- {
- audio_buf_size = audio_size;
- }
- audio_buf_index = 0;
- }
- len1 = audio_buf_size - audio_buf_index;
- if (len1 > nNumSamples)
- len1 = nNumSamples;
- memcpy(stream, (uint8_t *)audio_buf + audio_buf_index, len1);
- nNumSamples -= len1;
- stream += len1;
- audio_buf_index += len1;
- }
- CryLogAlways("Send Data!");
- pData = stream;
- CryLogAlways("Free Var!");
- free(stream);
- CryLogAlways("Finished!");
- return true;
- }
- */
- void CCryVideoOpenCV::SkipFrame()
- {
- if (mode == 1) //AVI
- {
- cvGrabFrame(video_capture);
- }
- else //WebM
- {
- m_pCryVideoWebM->GetNextFrame();
- }
- }
- int CCryVideoOpenCV::GetVideoWidth()
- {
- if (mode == 1) //AVI
- {
- int width = (int)cvGetCaptureProperty(video_capture, CV_CAP_PROP_FRAME_WIDTH);
- return width;
- }
- else //WebM
- {
- int width = m_pCryVideoWebM->GetVideoWidth();
- return width;
- }
- }
- int CCryVideoOpenCV::GetVideoHeight()
- {
- if (mode == 1) //AVI
- {
- int height = (int)cvGetCaptureProperty(video_capture, CV_CAP_PROP_FRAME_HEIGHT);
- return height;
- }
- else //WebM
- {
- int height = m_pCryVideoWebM->GetVideoHeight();
- return height;
- }
- }
- float CCryVideoOpenCV::GetVideoFrameRate()
- {
- if (mode == 1) //AVI
- {
- float frameRate = (float)cvGetCaptureProperty(video_capture, CV_CAP_PROP_FPS);
- return frameRate;
- }
- else //WebM
- {
- float frameRate = m_pCryVideoWebM->GetVideoFrameRate();
- return frameRate;
- }
- }
- int CCryVideoOpenCV::GetFrameCount()
- {
- if (mode == 1) //AVI
- {
- int frameCount = (int)cvGetCaptureProperty(video_capture, CV_CAP_PROP_FRAME_COUNT);
- return frameCount;
- }
- else //WebM
- {
- return -1;
- }
- }
- char* CCryVideoOpenCV::GetNextFrame()
- {
- char* imageData = NULL;
- //CryLogAlways("Get Next Frame!");
- //Test Network Sound
- /*
- AVPacket packet;
- VideoCapture cap("D:\\CryVideo_CE3_Build3.4.0\\Game\\Videos\\Library\\FahrenheitJadeVision.avi");
- bool exit = false;
- while (exit == false)
- {
- if (av_read_frame(pFormatCtx, &packet) >= 0)
- {
- if (packet.stream_index == videoStream)
- {
- // Actually this is were SYNC between audio/video would happen.
- // Right now I assume that every VIDEO packet contains an entire video frame, and that's not true. A video frame can be made by multiple packets!
- // But for the time being, assume 1 video frame == 1 video packet,
- // so instead of reading the frame through ffmpeg, I read it through OpenCV.
- Mat frame;
- cap >> frame; // get a new frame from camera
- // do some processing on the frame, either as a Mat or as IplImage.
- // For educational purposes, applying a lame grayscale conversion
- IplImage ipl_frame = frame;
- for (int i = 0; i < ipl_frame.width * ipl_frame.height * ipl_frame.nChannels; i += ipl_frame.nChannels)
- {
- ipl_frame.imageData[i] = (ipl_frame.imageData[i] + ipl_frame.imageData[i+1] + ipl_frame.imageData[i+2])/3; //B
- ipl_frame.imageData[i+1] = (ipl_frame.imageData[i] + ipl_frame.imageData[i+1] + ipl_frame.imageData[i+2])/3; //G
- ipl_frame.imageData[i+2] = (ipl_frame.imageData[i] + ipl_frame.imageData[i+1] + ipl_frame.imageData[i+2])/3; //R
- }
- // Display it on SDL window
- show_frame(&ipl_frame);
- av_free_packet(&packet);
- }
- else if (packet.stream_index == audioStream)
- {
- packet_queue_put(&audioq, &packet);
- exit = true;
- }
- else
- {
- av_free_packet(&packet);
- }
- SDL_Event event;
- SDL_PollEvent(&event);
- }
- else
- {
- SDL_FreeSurface(surface);
- SDL_Quit();
- // the camera will be deinitialized automatically in VideoCapture destructor
- // Close the codec
- avcodec_close(pCodecCtx);
- // Close the video file
- av_close_input_file(pFormatCtx);
- break;
- }
- }
- */
- if (mode == 1) //AVI
- {
- source_image=cvQueryFrame(video_capture);
- if (source_image)
- {
- if (g_pCryVideo->IsRGB8Usable())
- {
- if (swapBGRA)
- {
- cvCvtColor(source_image, dest_image, CV_BGR2RGB);
- imageData = dest_image->imageData;
- if (renderToWindow)
- cvShowImage("CryVideo Player",dest_image);
- }
- else
- {
- imageData = source_image->imageData;
- if (renderToWindow)
- cvShowImage("CryVideo Player",source_image);
- }
- }
- else
- {
- if (manual_conversion_OpenCV || force_manual_conversion)
- {
- if (swapBGRA)
- {
- cvCvtColor(source_image, dest_image, CV_BGR2RGBA);
- }
- else
- {
- cvCvtColor(source_image, dest_image, CV_BGR2BGRA);
- }
- imageData = dest_image->imageData;
- }
- else
- {
- rgb = source_image->imageData;
- //pixfcOpenCV->convert(pixfcOpenCV, rgb, rgba, 0);
- for (int k=0;k<numThreadsAVI;k++)
- {
- job[k].in = (byte*)rgb + (k*YUVSize);
- pthread_create(&pid[k], NULL, Thread, &job[k]);
- }
- // Join all the threads
- for (int k=0;k<numThreadsAVI;k++)
- {
- pthread_join(pid[k], NULL);
- }
- imageData = rgba;
- }
- if (renderToWindow)
- cvShowImage("CryVideo Player",dest_image);
- }
- }
- }
- else //WebM
- {
- //CryLogAlways("GetNextFrame!");
- bool success = m_pCryVideoWebM->GetNextFrame();
- if (success)
- {
- if (renderToWindow)
- if (!g_pCryVideo->IsRGB8Usable())
- dest_image = cvCreateImage(cvSize(width, height),IPL_DEPTH_8U,4);
- else
- dest_image = cvCreateImage(cvSize(width, height),IPL_DEPTH_8U,3);
- //Retrieve Next Frame
- bool result = m_pCryVideoWebM->RetrieveNextFrame(NULL, NULL, NULL, true, yuv);
- if (!result)
- goto fail;
- /*
- if (useOpenGL)
- {
- //make it the current render context
- wglMakeCurrent( mhDC, mhRC );
- glActiveTexture(texture);
- glBindTexture(GL_TEXTURE_2D, texture); // update the YUV video texturing unit
- //Update texture
- glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, (height*3/2), 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, yuv);
- const GLfloat vertexCoord[8] = {
- -width, +height,
- +width, +height,
- -width, -height,
- +width, -height,
- };
- const GLfloat textureCoord[8] = {
- 0, 1,
- 1, 1,
- 0, 0,
- 1, 0,
- };
- glEnableClientState(GL_VERTEX_ARRAY);
- glEnableClientState(GL_TEXTURE_COORD_ARRAY);
- glVertexPointer(2, GL_FLOAT, 0, vertexCoord);
- glTexCoordPointer(2, GL_FLOAT, 0, textureCoord);
- //Draw image
- glClear(GL_COLOR_BUFFER_BIT);
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
- glReadPixels(0,0,width,height,GL_BGRA,GL_UNSIGNED_BYTE,rgba);
- if (mhRC)
- wglMakeCurrent( NULL, NULL );
- goto end;
- }*/
- // Do conversion
- if (!manual_conversion && !force_manual_conversion)
- {
- //pixfc->convert(pixfc, yuv, rgba);
- for (int k=0;k<numThreads;k++)
- {
- job[k].in = (byte*)yuv;
- pthread_create(&pid[k], NULL, Thread, &job[k]);
- }
- // Join all the threads
- for (int k=0;k<numThreads;k++)
- {
- pthread_join(pid[k], NULL);
- }
- }
- else
- {
- if (!g_pCryVideo->IsRGB8Usable()) //RGBA || BGRA
- {
- long currentPos;
- int widthHelper;
- long sizeTotalHelper;
- unsigned char y2,u,v;
- int r,g,b,c,d,e;
- widthHelper = width / 2;
- sizeTotalHelper = sizeTotal + (sizeTotal / 4);
- for (int y=0; y < height; y=y+2)
- {
- for (int x=0; x < width; x=x+2)
- {
- //x=0; y=0
- y2 = yuv[y * width + x];
- u = yuv[(y / 2) * widthHelper + (x / 2) + sizeTotalHelper];
- v = yuv[(y / 2) * widthHelper + (x / 2) + sizeTotal];
- c = y2 - 16;
- d = u - 128;
- e = v - 128;
- r = ( 298 * c + 409 * e + 128) >> 8;
- g = ( 298 * c - 100 * d - 208 * e + 128) >> 8;
- b = ( 298 * c + 516 * d + 128) >> 8;
- if (r < 0) r = 0;
- else if (r > 255) r = 255;
- if (g < 0) g = 0;
- else if (g > 255) g = 255;
- if (b < 0) b = 0;
- else if (b > 255) b = 255;
- currentPos = (y*width)*4 + (x*4);
- rgba[currentPos+0] = r;
- rgba[currentPos+1] = g;
- rgba[currentPos+2] = b;
- rgba[currentPos+3] = 255;
- //x=0; y=1
- y2 = yuv[(y+1) * width + x];
- c = y2 - 16;
- r = ( 298 * c + 409 * e + 128) >> 8;
- g = ( 298 * c - 100 * d - 208 * e + 128) >> 8;
- b = ( 298 * c + 516 * d + 128) >> 8;
- if (r < 0) r = 0;
- else if (r > 255) r = 255;
- if (g < 0) g = 0;
- else if (g > 255) g = 255;
- if (b < 0) b = 0;
- else if (b > 255) b = 255;
- currentPos = ((y+1)*width)*4 + (x*4);
- rgba[currentPos+0] = r;
- rgba[currentPos+1] = g;
- rgba[currentPos+2] = b;
- rgba[currentPos+3] = 255;
- //x=1; y=0
- y2 = yuv[y * width + (x+1)];
- c = y2 - 16;
- r = ( 298 * c + 409 * e + 128) >> 8;
- g = ( 298 * c - 100 * d - 208 * e + 128) >> 8;
- b = ( 298 * c + 516 * d + 128) >> 8;
- if (r < 0) r = 0;
- else if (r > 255) r = 255;
- if (g < 0) g = 0;
- else if (g > 255) g = 255;
- if (b < 0) b = 0;
- else if (b > 255) b = 255;
- currentPos = (y*width)*4 + ((x+1)*4);
- rgba[currentPos+0] = r;
- rgba[currentPos+1] = g;
- rgba[currentPos+2] = b;
- rgba[currentPos+3] = 255;
- //x=1; y=1
- y2 = yuv[(y+1) * width + (x+1)];
- c = y2 - 16;
- r = ( 298 * c + 409 * e + 128) >> 8;
- g = ( 298 * c - 100 * d - 208 * e + 128) >> 8;
- b = ( 298 * c + 516 * d + 128) >> 8;
- if (r < 0) r = 0;
- else if (r > 255) r = 255;
- if (g < 0) g = 0;
- else if (g > 255) g = 255;
- if (b < 0) b = 0;
- else if (b > 255) b = 255;
- currentPos = ((y+1)*width)*4 + ((x+1)*4);
- rgba[currentPos+0] = r;
- rgba[currentPos+1] = g;
- rgba[currentPos+2] = b;
- rgba[currentPos+3] = 255;
- }
- }
- }
- else //RGB || BGR
- {
- long currentPos;
- int widthHelper;
- long sizeTotalHelper;
- unsigned char y2,u,v;
- int r,g,b,c,d,e;
- widthHelper = width / 2;
- sizeTotalHelper = sizeTotal + (sizeTotal / 4);
- for (int y=0; y < height; y=y+2)
- {
- for (int x=0; x < width; x=x+2)
- {
- //x=0; y=0
- y2 = yuv[y * width + x];
- u = yuv[(y / 2) * widthHelper + (x / 2) + sizeTotalHelper];
- v = yuv[(y / 2) * widthHelper + (x / 2) + sizeTotal];
- c = y2 - 16;
- d = u - 128;
- e = v - 128;
- r = ( 298 * c + 409 * e + 128) >> 8;
- g = ( 298 * c - 100 * d - 208 * e + 128) >> 8;
- b = ( 298 * c + 516 * d + 128) >> 8;
- if (r < 0) r = 0;
- else if (r > 255) r = 255;
- if (g < 0) g = 0;
- else if (g > 255) g = 255;
- if (b < 0) b = 0;
- else if (b > 255) b = 255;
- currentPos = (y*width)*3 + (x*3);
- rgba[currentPos+0] = r;
- rgba[currentPos+1] = g;
- rgba[currentPos+2] = b;
- //x=0; y=1
- y2 = yuv[(y+1) * width + x];
- c = y2 - 16;
- r = ( 298 * c + 409 * e + 128) >> 8;
- g = ( 298 * c - 100 * d - 208 * e + 128) >> 8;
- b = ( 298 * c + 516 * d + 128) >> 8;
- if (r < 0) r = 0;
- else if (r > 255) r = 255;
- if (g < 0) g = 0;
- else if (g > 255) g = 255;
- if (b < 0) b = 0;
- else if (b > 255) b = 255;
- currentPos = ((y+1)*width)*3 + (x*3);
- rgba[currentPos+0] = r;
- rgba[currentPos+1] = g;
- rgba[currentPos+2] = b;
- //x=1; y=0
- y2 = yuv[y * width + (x+1)];
- c = y2 - 16;
- r = ( 298 * c + 409 * e + 128) >> 8;
- g = ( 298 * c - 100 * d - 208 * e + 128) >> 8;
- b = ( 298 * c + 516 * d + 128) >> 8;
- if (r < 0) r = 0;
- else if (r > 255) r = 255;
- if (g < 0) g = 0;
- else if (g > 255) g = 255;
- if (b < 0) b = 0;
- else if (b > 255) b = 255;
- currentPos = (y*width)*3 + ((x+1)*3);
- rgba[currentPos+0] = r;
- rgba[currentPos+1] = g;
- rgba[currentPos+2] = b;
- //x=1; y=1
- y2 = yuv[(y+1) * width + (x+1)];
- c = y2 - 16;
- r = ( 298 * c + 409 * e + 128) >> 8;
- g = ( 298 * c - 100 * d - 208 * e + 128) >> 8;
- b = ( 298 * c + 516 * d + 128) >> 8;
- if (r < 0) r = 0;
- else if (r > 255) r = 255;
- if (g < 0) g = 0;
- else if (g > 255) g = 255;
- if (b < 0) b = 0;
- else if (b > 255) b = 255;
- currentPos = ((y+1)*width)*3 + ((x+1)*3);
- rgba[currentPos+0] = r;
- rgba[currentPos+1] = g;
- rgba[currentPos+2] = b;
- }
- }
- }
- }
- imageData = rgba;
- if (renderToWindow)
- {
- dest_image->imageData = imageData;
- cvShowImage("CryVideo Player",dest_image);
- cvReleaseImage(&dest_image);
- }
- }
- else
- {
- fail:
- imageData = NULL;
- }
- }
- return imageData;
- }
- void CCryVideoOpenCV::Shutdown()
- {
- this->~CCryVideoOpenCV();
- }
- void CCryVideoOpenCV::GetMemoryStatistics(ICrySizer * s) const
- {
- s->Add(*this);
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement