Advertisement
Guest User

Untitled

a guest
Mar 24th, 2016
102
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C++ 11.88 KB | None | 0 0
  1. //icaclient/develop/main/UKH/NetClient/main/linux/obj.h264/FFDecode.cpp#13 - edit change 297198 (ktext)
  2. /*****************************************************************************
  3.  *
  4.  *   FFDecode.cpp
  5.  *
  6.  *   H.264 v3 interface software fallback implementation.
  7.  *
  8.  *   $Id: //icaclient/unix13.3/client/unix/CitrixPluginSDK/H264_sample/FFDecode.cpp#1 $
  9.  *
  10.  *   Copyright 2013-2015 Citrix Systems, Inc.  All Rights Reserved.
  11.  *
  12.  *****************************************************************************/
  13.  
  14. #include <dlfcn.h>
  15. #include <linux/limits.h>
  16.  
  17. #include "FFDecode.h"
  18.  
  19. #define X11_SUPPORT // For now ...
  20. #ifdef X11_SUPPORT
  21. #include <X11/Xlib.h>
  22. #endif /* X11_SUPPORT */
  23.  
  24. static int buffer_open(URLContext* h, const char* buffname, int flags);
  25. static int buffer_read(URLContext* h, unsigned char *buf, int size);
  26. static int buffer_close(URLContext* h);
  27.  
  28. // Globals.
  29.  
  30. URLProtocol buffer_protocol = {
  31.     "buffer",                   // name
  32.     buffer_open,                // open
  33.     buffer_read,                // read
  34.     NULL,                       // write
  35.     NULL,                       // seek
  36.     buffer_close,               // close
  37.     NULL,                       // next
  38.     NULL,                       // read_pause
  39.     NULL,                       // read_seek
  40.     NULL,                       // get_file_handle
  41. };
  42.  
  43. AVInputFormat           *g_pH264Fmt = NULL;
  44. AVFrame                 *g_pAVFrame = NULL;
  45. AVFormatContext         *g_pFormatCtx = NULL;
  46.  
  47. HSO             g_pAVUtil;
  48. DLFFMPEG_FORMAT         g_FFLibFormat;
  49. DLFFMPEG_CODEC          g_FFLibCodec;
  50. DLFFMPEG_SWSCALE        g_FFLibSwScale;
  51.  
  52. PFFMPEG_FORMAT_CALL_TABLE   g_pFFMPEGFormatCallTable =  NULL;
  53. PFFMPEG_CODEC_CALL_TABLE        g_pFFMPEGCodecCallTable =   NULL;
  54. PFFMPEG_SWSCALE_CALL_TABLE      g_pFFMPEGSwscaleCallTable = NULL;
  55.  
  56. BYTE DefaultH264MagicBytes[H264_NAL_UNIT_DELIM_SIZE]  = {0, 0, 0, 1, 33, 225};
  57.  
  58. static HSO OpenDll(const char *ICARoot, const char* pDll);
  59.  
  60. static void LoadPreReqFunctions(const char *ICARoot)
  61. {
  62.     g_pAVUtil = OpenDll(ICARoot, "libavutil.so");
  63.     if (g_pAVUtil) {
  64.         /* Set logging level to QUIET unless instructed otherwise. */
  65.  
  66.         if (!getenv("CTXH264_FB_FFMPEGLOGGING")) {
  67.             PFN_av_log_set_level p_av_log_set_level;
  68.             p_av_log_set_level =
  69.                 (PFN_av_log_set_level)dlsym(g_pAVUtil, "av_log_set_level");
  70.             if (p_av_log_set_level) {
  71.                 p_av_log_set_level(AV_LOG_QUIET);
  72.             }
  73.         }
  74.     }
  75.  
  76.     g_FFLibCodec.dll = OpenDll(ICARoot, "libavcodec.so");
  77.     g_FFLibFormat.dll = OpenDll(ICARoot, "libavformat.so");
  78.     g_FFLibSwScale.dll = OpenDll(ICARoot, "libswscale.so");
  79.  
  80.     GET_FN_ADDR(g_FFLibCodec, avcodec_init);
  81.     GET_FN_ADDR(g_FFLibCodec, avcodec_find_decoder);
  82.     GET_FN_ADDR(g_FFLibCodec, avcodec_open);
  83.     GET_FN_ADDR(g_FFLibCodec, avcodec_alloc_frame);
  84.     GET_FN_ADDR(g_FFLibCodec, avcodec_decode_video);
  85. //  GET_FN_ADDR(g_FFLibCodec, avpicture_get_size);
  86.     GET_FN_ADDR(g_FFLibCodec, avpicture_fill);
  87.     GET_FN_ADDR(g_FFLibCodec, avcodec_close);
  88.  
  89.     GET_FN_ADDR(g_FFLibFormat, av_register_all);
  90.     GET_FN_ADDR(g_FFLibFormat, av_find_input_format);
  91.     GET_FN_ADDR(g_FFLibFormat, av_open_input_file);
  92.     GET_FN_ADDR(g_FFLibFormat, av_find_stream_info);
  93.     GET_FN_ADDR(g_FFLibFormat, av_read_frame);
  94. //  GET_FN_ADDR(g_FFLibFormat, av_destruct_packet);
  95.     GET_FN_ADDR(g_FFLibFormat, av_register_protocol);
  96.     GET_FN_ADDR(g_FFLibFormat, av_free);
  97.     GET_FN_ADDR(g_FFLibFormat, av_close_input_file);
  98.  
  99.     GET_FN_ADDR(g_FFLibSwScale, sws_getContext);
  100.     GET_FN_ADDR(g_FFLibSwScale, sws_scale);
  101.  
  102.     g_pFFMPEGCodecCallTable   = &g_FFLibCodec.fns;
  103.     g_pFFMPEGFormatCallTable  = &g_FFLibFormat.fns;
  104.     g_pFFMPEGSwscaleCallTable = &g_FFLibSwScale.fns;
  105. }
  106.  
  107. #define av_register_all \
  108.     (g_pFFMPEGFormatCallTable->pfn_av_register_all)
  109. #define av_register_protocol \
  110.     (g_pFFMPEGFormatCallTable->pfn_av_register_protocol)
  111. #define av_find_input_format \
  112.     (g_pFFMPEGFormatCallTable->pfn_av_find_input_format)
  113. #define av_open_input_file \
  114.     (g_pFFMPEGFormatCallTable->pfn_av_open_input_file)
  115.  
  116. #define av_find_stream_info \
  117.     (g_pFFMPEGFormatCallTable->pfn_av_find_stream_info)
  118. #define av_read_frame \
  119.     (g_pFFMPEGFormatCallTable->pfn_av_read_frame)
  120. #define av_free \
  121.     (g_pFFMPEGFormatCallTable->pfn_av_free)
  122. #define av_close_input_file \
  123.     (g_pFFMPEGFormatCallTable->pfn_av_close_input_file)
  124.  
  125. #define avcodec_init \
  126.     (g_pFFMPEGCodecCallTable->pfn_avcodec_init)
  127. #define avcodec_open \
  128.     (g_pFFMPEGCodecCallTable->pfn_avcodec_open)
  129. #define avcodec_find_decoder \
  130.     (g_pFFMPEGCodecCallTable->pfn_avcodec_find_decoder)
  131. #define avcodec_alloc_frame \
  132.     (g_pFFMPEGCodecCallTable->pfn_avcodec_alloc_frame)
  133. #define avpicture_fill \
  134.     (g_pFFMPEGCodecCallTable->pfn_avpicture_fill)
  135. #define avcodec_decode_video \
  136.     (g_pFFMPEGCodecCallTable->pfn_avcodec_decode_video)
  137. #define avcodec_close \
  138.     (g_pFFMPEGCodecCallTable->pfn_avcodec_close)
  139.  
  140. #define sws_scale \
  141.     (g_pFFMPEGSwscaleCallTable->pfn_sws_scale)
  142. #define sws_getContext \
  143.   (g_pFFMPEGSwscaleCallTable->pfn_sws_getContext)
  144.  
  145. static int buffer_open(URLContext* h, const char* buffname, int flags)
  146. {
  147.     REFERENCE_VAR (flags);
  148.  
  149.     // h->priv_data is of type BufferContext. It will be used in FFMPEG src.
  150.  
  151.     h->priv_data = (void *)buffname;
  152.     return 0;
  153. }
  154.  
  155. static int buffer_read(URLContext* h, unsigned char* buf, int size)
  156. {
  157.     int            iRet = size;
  158.     BufferContext *pBufferCtx = (BufferContext *)h->priv_data;
  159.     REFERENCE_VAR(buf);
  160.  
  161.     if (size <= (pBufferCtx->iBufferSize - pBufferCtx->iBytesRead)) {
  162.         pBufferCtx->iBytesRead += size;
  163.     } else {
  164.         iRet = -1;
  165.     }
  166.     return iRet;
  167. }
  168.  
  169. static int buffer_close(URLContext* h)
  170. {
  171.     REFERENCE_VAR(h);
  172.     return 0;
  173. }
  174.  
  175. static HSO OpenDll(const char *ICARoot, const char* pDll)
  176. {
  177.     HSO  hShared;
  178.     char DllPath[PATH_MAX];
  179.  
  180.     sprintf(DllPath, "%s/%s", ICARoot, pDll);
  181.    
  182.     hShared = dlopen(DllPath, RTLD_NOLOAD);
  183.     if (hShared != NULL) {
  184.         return hShared;
  185.     }
  186.     hShared = dlopen(DllPath, RTLD_NOW | RTLD_GLOBAL);
  187.     return hShared;
  188. }
  189.  
  190. static void InitOnce(FFMPEGCtx *pCtx, UINT32 uWidth, UINT32 uHeight)
  191. {
  192.     BufferContext  *pBufferCtx = &pCtx->BufferCtx;
  193.     AVCodec        *pCodec;
  194.     AVCodecContext *pCodecCtx = NULL;
  195.  
  196.     pBufferCtx->iBufferSize += H264_NAL_UNIT_DELIM_SIZE;
  197.     av_open_input_file(&pCtx->pFormatCtx, (const char *)pBufferCtx,
  198.                        g_pH264Fmt, 0, NULL);
  199.     av_find_stream_info(pCtx->pFormatCtx);
  200.  
  201.     // Find the first video stream.
  202.  
  203.     for (unsigned int i = 0; i < pCtx->pFormatCtx->nb_streams; i++) {
  204.         pCodecCtx = pCtx->pFormatCtx->streams[i]->codec;
  205.         if (pCodecCtx->codec_type == CODEC_TYPE_VIDEO) {
  206.             pCtx->VideoStreamIdx = i;
  207.             pCtx->pCodecCtx = pCodecCtx;
  208.             break;
  209.         }
  210.     }
  211.  
  212.     if (pCodecCtx) {
  213.         // Find the decoder for the video stream. It should be CODEC_ID_H264.
  214.  
  215.         pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
  216.         avcodec_open(pCodecCtx, pCodec);
  217.  
  218.         /* Use SWS_FAST_BILINEAR over SWS_BICUBIC: no noticeable
  219.          * image degradation but should perform better.
  220.          */
  221.  
  222.         pCtx->pImgConvCtx = sws_getContext(uWidth, uHeight, pCodecCtx->pix_fmt,
  223.                                            uWidth, uHeight,
  224.                                            PIX_FMT_BGRA, SWS_FAST_BILINEAR,
  225.                                            NULL, NULL, NULL);
  226.     } else {
  227.         pCodec = NULL;
  228.     }
  229.     pCtx->pCodec = pCodec;
  230. }
  231.  
  232. static bool ff_decode(FFMPEGCtx *pCtx, PBYTE pInData, UINT32 uInSize,
  233.                       UINT32 uWidth, UINT32 uHeight)
  234. {
  235.     int temp = 0;
  236.     BufferContext *pBufferCtx = &pCtx->BufferCtx;
  237.  
  238.     uInSize -= H264_NAL_UNIT_DELIM_SIZE; /* Discount extra allocation. */
  239.     pBufferCtx->iBufferSize = uInSize;
  240.     pBufferCtx->pBufferAddress = pInData;
  241.     memcpy(pCtx->ActualMagicBytes, pInData, H264_NAL_UNIT_DELIM_SIZE);
  242.  
  243.     /* Append the Magic Bytes to source. */
  244.  
  245.     memcpy(pInData + uInSize, DefaultH264MagicBytes, H264_NAL_UNIT_DELIM_SIZE);
  246.  
  247.     /* Check for one time init. */
  248.  
  249.     if (pCtx->pFormatCtx == NULL) {
  250.         InitOnce(pCtx, uWidth, uHeight);
  251.     } else {
  252.         PBYTE pNewBuf = pInData;
  253.  
  254.         /* Skip initial Frame Delimiter. */
  255.  
  256.         pNewBuf += H264_NAL_UNIT_DELIM_SIZE;
  257.         pBufferCtx->pBufferAddress = pNewBuf;
  258.         pCtx->pFormatCtx->pb->buffer_size = uInSize;
  259.         pCtx->pFormatCtx->pb->buffer = pNewBuf;
  260.     }
  261.  
  262.     AVPacket *pPacket = &pCtx->CtxAVPacket;
  263.     av_read_frame(pCtx->pFormatCtx, pPacket);
  264.  
  265.     memcpy(pPacket->data, pCtx->ActualMagicBytes, H264_NAL_UNIT_DELIM_SIZE);
  266.  
  267.     avcodec_decode_video(pCtx->pCodecCtx, pCtx->pFrameYUV, &temp,
  268.                          pPacket->data, pPacket->size);
  269.  
  270.     if (pPacket->destruct != NULL) {
  271.         pPacket->data = NULL;
  272.         pPacket->size = 0;
  273.     }
  274.     pBufferCtx->iBytesRead = 0;
  275.     return temp ? true : false;
  276. }
  277.  
  278. bool ff_decode_yuv(FFMPEGCtx *pCtx, PBYTE pInData, UINT32 uInSize,
  279.                    char *data, int *offsets, int *pitches,
  280.                    UINT32 uWidth, UINT32 uHeight)
  281. {
  282.     if (ff_decode(pCtx, pInData, uInSize, uWidth, uHeight)) {
  283.         UINT32    y;
  284.         AVPicture *p = (AVPicture *)pCtx->pFrameYUV;
  285.  
  286.         /* Copy full resolution Y (luminance) channel. */
  287.  
  288.         for (y = 0; y < uHeight; y++) {
  289.             memcpy(&data[offsets[0] + (y * pitches[0])],
  290.                    &p->data[0][y * p->linesize[0]], pitches[0]);
  291.         }
  292.  
  293.         /* Copy U and V planes (each having quarter resolution). */
  294.  
  295.         for (y = 0; y < uHeight / 2; y++) {
  296.             memcpy(&data[offsets[1] + (y * pitches[1])],
  297.                    &p->data[1][y * p->linesize[1]], pitches[1]);
  298.             memcpy(&data[offsets[2] + (y * pitches[2])],
  299.                    &p->data[2][y * p->linesize[2]], pitches[2]);
  300.         }
  301.         return true;
  302.     }
  303.     return false;
  304. }
  305.  
  306. #ifdef X11_SUPPORT
  307. bool ff_decode_rgb(FFMPEGCtx* pCtx, PBYTE pInData, UINT32 uInSize,
  308.                    XImage *img, UINT32 uWidth, UINT32 uHeight)
  309. {
  310.     if (ff_decode(pCtx, pInData, uInSize, uWidth, uHeight)) {
  311.         avpicture_fill((AVPicture*)pCtx->pFrameBGRA, (uint8_t *)img->data,
  312.                        PIX_FMT_BGRA, uWidth, uHeight);
  313.         pCtx->pFrameBGRA->linesize[0] =
  314.             (pCtx->pFrameBGRA->linesize[0] + 3) & ~3;
  315.  
  316.         /* Convert the image from its native format to BGR24 */
  317.  
  318.         sws_scale(pCtx->pImgConvCtx, &pCtx->pFrameYUV->data[0],
  319.                   &pCtx->pFrameYUV->linesize[0], 0, uHeight,
  320.                   &pCtx->pFrameBGRA->data[0], &pCtx->pFrameBGRA->linesize[0]);
  321.  
  322.         return true;
  323.     }
  324.     return false;
  325. }
  326. #endif /* X11_SUPPORT */
  327.  
  328. FFMPEGCtx *ff_open_ctx()
  329. {
  330.     FFMPEGCtx *pCtx= new FFMPEGCtx;
  331.  
  332.     memset(pCtx, 0, sizeof(*pCtx));
  333.     strcpy(&pCtx->BufferCtx.szProtoName[0], "buffer:");
  334.     pCtx->pFrameYUV = avcodec_alloc_frame();
  335.     pCtx->pFrameBGRA = avcodec_alloc_frame();
  336.     return pCtx;
  337. }
  338.  
  339. void ff_close_ctx(FFMPEGCtx *pCtx)
  340. {
  341.     av_free(pCtx->pFrameYUV);
  342.     av_free(pCtx->pFrameBGRA);
  343.     avcodec_close(pCtx->pCodecCtx);
  344.  
  345.     pCtx->pFormatCtx->pb->buffer      = NULL;
  346.     pCtx->pFormatCtx->pb->buffer_size = 0;
  347.     pCtx->pFormatCtx->pb->buf_ptr     = NULL;
  348.     pCtx->pFormatCtx->pb->buf_end     = NULL;
  349.     pCtx->pFormatCtx->pb->opaque      = 0;
  350.     av_close_input_file(pCtx->pFormatCtx);
  351.     delete pCtx;
  352. }
  353.  
  354. void ff_init(const char *ICARoot)
  355. {
  356.     LoadPreReqFunctions(ICARoot);
  357.     avcodec_init();
  358.     av_register_all();
  359.     av_register_protocol(&buffer_protocol);
  360.     g_pH264Fmt = av_find_input_format("h264");
  361. }
  362.  
  363. void ff_end(void)
  364. {
  365.     if (g_FFLibSwScale.dll)
  366.         dlclose(g_FFLibSwScale.dll);
  367.     if (g_FFLibFormat.dll)
  368.         dlclose(g_FFLibFormat.dll);
  369.     if (g_FFLibCodec.dll)
  370.     dlclose(g_FFLibCodec.dll);
  371.     if (g_pAVUtil)
  372.         dlclose(g_pAVUtil);
  373. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement