xmixahlx

ffmpeg v4l2-request-hwaccel-4.2.2-rkvdec 20200531 diff

May 31st, 2020
97
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/configure ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/configure
  2. --- ffmpeg_n4.2.2/configure 2020-05-21 20:25:05.153847173 -0700
  3. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/configure 2020-05-26 03:16:38.383175374 -0700
  4. @@ -271,6 +271,7 @@
  5. --enable-libtls enable LibreSSL (via libtls), needed for https support
  6. if openssl, gnutls or mbedtls is not used [no]
  7. --enable-libtwolame enable MP2 encoding via libtwolame [no]
  8. + --enable-libudev enable libudev [no]
  9. --enable-libv4l2 enable libv4l2/v4l-utils [no]
  10. --enable-libvidstab enable video stabilization using vid.stab [no]
  11. --enable-libvmaf enable vmaf filter via libvmaf [no]
  12. @@ -337,6 +338,7 @@
  13. --enable-omx-rpi enable OpenMAX IL code for Raspberry Pi [no]
  14. --enable-rkmpp enable Rockchip Media Process Platform code [no]
  15. --disable-v4l2-m2m disable V4L2 mem2mem code [autodetect]
  16. + --enable-v4l2-request enable V4L2 request API code [no]
  17. --disable-vaapi disable Video Acceleration API (mainly Unix/Intel) code [autodetect]
  18. --disable-vdpau disable Nvidia Video Decode and Presentation API for Unix code [autodetect]
  19. --disable-videotoolbox disable VideoToolbox code [autodetect]
  20. @@ -1797,6 +1799,7 @@
  21. libtesseract
  22. libtheora
  23. libtwolame
  24. + libudev
  25. libv4l2
  26. libvorbis
  27. libvpx
  28. @@ -1851,6 +1854,7 @@
  29. mmal
  30. omx
  31. opencl
  32. + v4l2_request
  33. "
  34.  
  35. DOCUMENT_LIST="
  36. @@ -2873,6 +2877,7 @@
  37. dxva2_deps="dxva2api_h DXVA2_ConfigPictureDecode ole32 user32"
  38. ffnvcodec_deps_any="libdl LoadLibrary"
  39. nvdec_deps="ffnvcodec"
  40. +v4l2_request_deps="linux_videodev2_h linux_media_h v4l2_timeval_to_ns libdrm libudev"
  41. vaapi_x11_deps="xlib"
  42. videotoolbox_hwaccel_deps="videotoolbox pthreads"
  43. videotoolbox_hwaccel_extralibs="-framework QuartzCore"
  44. @@ -2890,6 +2895,8 @@
  45. h264_dxva2_hwaccel_select="h264_decoder"
  46. h264_nvdec_hwaccel_deps="nvdec"
  47. h264_nvdec_hwaccel_select="h264_decoder"
  48. +h264_v4l2request_hwaccel_deps="v4l2_request"
  49. +h264_v4l2request_hwaccel_select="h264_decoder"
  50. h264_vaapi_hwaccel_deps="vaapi"
  51. h264_vaapi_hwaccel_select="h264_decoder"
  52. h264_vdpau_hwaccel_deps="vdpau"
  53. @@ -2904,6 +2911,8 @@
  54. hevc_dxva2_hwaccel_select="hevc_decoder"
  55. hevc_nvdec_hwaccel_deps="nvdec"
  56. hevc_nvdec_hwaccel_select="hevc_decoder"
  57. +hevc_v4l2request_hwaccel_deps="v4l2_request"
  58. +hevc_v4l2request_hwaccel_select="hevc_decoder"
  59. hevc_vaapi_hwaccel_deps="vaapi VAPictureParameterBufferHEVC"
  60. hevc_vaapi_hwaccel_select="hevc_decoder"
  61. hevc_vdpau_hwaccel_deps="vdpau VdpPictureInfoHEVC"
  62. @@ -2932,6 +2941,8 @@
  63. mpeg2_dxva2_hwaccel_select="mpeg2video_decoder"
  64. mpeg2_nvdec_hwaccel_deps="nvdec"
  65. mpeg2_nvdec_hwaccel_select="mpeg2video_decoder"
  66. +mpeg2_v4l2request_hwaccel_deps="v4l2_request mpeg2_v4l2_request"
  67. +mpeg2_v4l2request_hwaccel_select="mpeg2video_decoder"
  68. mpeg2_vaapi_hwaccel_deps="vaapi"
  69. mpeg2_vaapi_hwaccel_select="mpeg2video_decoder"
  70. mpeg2_vdpau_hwaccel_deps="vdpau"
  71. @@ -2962,6 +2973,8 @@
  72. vc1_vdpau_hwaccel_select="vc1_decoder"
  73. vp8_nvdec_hwaccel_deps="nvdec"
  74. vp8_nvdec_hwaccel_select="vp8_decoder"
  75. +vp8_v4l2request_hwaccel_deps="v4l2_request"
  76. +vp8_v4l2request_hwaccel_select="vp8_decoder"
  77. vp8_vaapi_hwaccel_deps="vaapi"
  78. vp8_vaapi_hwaccel_select="vp8_decoder"
  79. vp9_d3d11va_hwaccel_deps="d3d11va DXVA_PicParams_VP9"
  80. @@ -2972,6 +2985,8 @@
  81. vp9_dxva2_hwaccel_select="vp9_decoder"
  82. vp9_nvdec_hwaccel_deps="nvdec"
  83. vp9_nvdec_hwaccel_select="vp9_decoder"
  84. +vp9_v4l2request_hwaccel_deps="v4l2_request"
  85. +vp9_v4l2request_hwaccel_select="vp9_decoder"
  86. vp9_vaapi_hwaccel_deps="vaapi VADecPictureParameterBufferVP9_bit_depth"
  87. vp9_vaapi_hwaccel_select="vp9_decoder"
  88. wmv3_d3d11va_hwaccel_select="vc1_d3d11va_hwaccel"
  89. @@ -6270,6 +6285,7 @@
  90. enabled libtwolame && require libtwolame twolame.h twolame_init -ltwolame &&
  91. { check_lib libtwolame twolame.h twolame_encode_buffer_float32_interleaved -ltwolame ||
  92. die "ERROR: libtwolame must be installed and version must be >= 0.3.10"; }
  93. +enabled libudev && require_pkg_config libudev libudev libudev.h udev_new
  94. enabled libv4l2 && require_pkg_config libv4l2 libv4l2 libv4l2.h v4l2_ioctl
  95. enabled libvidstab && require_pkg_config libvidstab "vidstab >= 0.98" vid.stab/libvidstab.h vsMotionDetectInit
  96. enabled libvmaf && require_pkg_config libvmaf "libvmaf >= 1.3.9" libvmaf.h compute_vmaf
  97. @@ -6365,6 +6381,10 @@
  98. { enabled libdrm ||
  99. die "ERROR: rkmpp requires --enable-libdrm"; }
  100. }
  101. +enabled v4l2_request && { enabled libdrm ||
  102. + die "ERROR: v4l2-request requires --enable-libdrm"; } &&
  103. + { enabled libudev ||
  104. + die "ERROR: v4l2-request requires --enable-libudev"; }
  105. enabled vapoursynth && require_pkg_config vapoursynth "vapoursynth-script >= 42" VSScript.h vsscript_init
  106.  
  107.  
  108. @@ -6444,6 +6464,13 @@
  109. check_cc vp8_v4l2_m2m linux/videodev2.h "int i = V4L2_PIX_FMT_VP8;"
  110. check_cc vp9_v4l2_m2m linux/videodev2.h "int i = V4L2_PIX_FMT_VP9;"
  111.  
  112. +check_func_headers "linux/media.h linux/videodev2.h" v4l2_timeval_to_ns
  113. +check_cc h264_v4l2_request linux/videodev2.h "int i = V4L2_PIX_FMT_H264_SLICE;"
  114. +check_cc hevc_v4l2_request linux/videodev2.h "int i = V4L2_PIX_FMT_HEVC_SLICE;"
  115. +check_cc mpeg2_v4l2_request linux/videodev2.h "int i = V4L2_PIX_FMT_MPEG2_SLICE;"
  116. +check_cc vp8_v4l2_request linux/videodev2.h "int i = V4L2_PIX_FMT_VP8_FRAME;"
  117. +check_cc vp9_v4l2_request linux/videodev2.h "int i = V4L2_PIX_FMT_VP9_FRAME;"
  118. +
  119. check_headers sys/videoio.h
  120. test_code cc sys/videoio.h "struct v4l2_frmsizeenum vfse; vfse.discrete.width = 0;" && enable_sanitized struct_v4l2_frmivalenum_discrete
  121.  
  122. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/ffbuild/common.mak ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/ffbuild/common.mak
  123. --- ffmpeg_n4.2.2/ffbuild/common.mak 2020-05-21 20:25:05.183846597 -0700
  124. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/ffbuild/common.mak 2020-05-26 03:16:38.463175255 -0700
  125. @@ -79,7 +79,7 @@
  126.  
  127. %.o: %.asm
  128. $(COMPILE_X86ASM)
  129. - -$(if $(ASMSTRIPFLAGS), $(STRIP) $(ASMSTRIPFLAGS) $@)
  130. + $(if $(STRIP), $(if $(ASMSTRIPFLAGS), $(STRIP) $(ASMSTRIPFLAGS) $@))
  131.  
  132. %.o: %.rc
  133. $(WINDRES) $(IFLAGS) --preprocessor "$(DEPWINDRES) -E -xc-header -DRC_INVOKED $(CC_DEPFLAGS)" -o $@ $<
  134. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/ffbuild/version.sh ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/ffbuild/version.sh
  135. --- ffmpeg_n4.2.2/ffbuild/version.sh 2020-05-21 20:25:05.183846597 -0700
  136. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/ffbuild/version.sh 2020-05-26 03:16:38.463175255 -0700
  137. @@ -2,6 +2,7 @@
  138.  
  139. # Usage: version.sh <ffmpeg-root-dir> <output-version.h> <extra-version>
  140.  
  141. +if [ -d $1/.git ]; then # only check for a git rev, if the src tree is in a git repo
  142. # check for git short hash
  143. if ! test "$revision"; then
  144. if (cd "$1" && grep git RELEASE 2> /dev/null >/dev/null) ; then
  145. @@ -27,6 +28,7 @@
  146. git_hash="${srcdir##*-}";;
  147. esac
  148. fi
  149. +fi
  150.  
  151. # no revision number found
  152. test "$revision" || revision=$(cd "$1" && cat RELEASE 2> /dev/null)
  153. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/avcodec.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/avcodec.h
  154. --- ffmpeg_n4.2.2/libavcodec/avcodec.h 2020-05-21 20:25:05.263845060 -0700
  155. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/avcodec.h 2020-05-26 03:16:38.703174900 -0700
  156. @@ -3750,6 +3750,11 @@
  157. int (*end_frame)(AVCodecContext *avctx);
  158.  
  159. /**
  160. + * Called when frame is returned to api user.
  161. + */
  162. + int (*output_frame)(AVCodecContext *avctx, AVFrame *frame);
  163. +
  164. + /**
  165. * Size of per-frame hardware accelerator private data.
  166. *
  167. * Private data is allocated with av_mallocz() before
  168. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/decode.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/decode.c
  169. --- ffmpeg_n4.2.2/libavcodec/decode.c 2020-05-21 20:25:05.293844483 -0700
  170. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/decode.c 2020-05-26 03:16:38.813174738 -0700
  171. @@ -450,6 +450,9 @@
  172. emms_c();
  173. actual_got_frame = got_frame;
  174.  
  175. + if (got_frame && avctx->hwaccel && avctx->hwaccel->output_frame)
  176. + avctx->hwaccel->output_frame(avctx, frame);
  177. +
  178. if (avctx->codec->type == AVMEDIA_TYPE_VIDEO) {
  179. if (frame->flags & AV_FRAME_FLAG_DISCARD)
  180. got_frame = 0;
  181. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/dxva2.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/dxva2.c
  182. --- ffmpeg_n4.2.2/libavcodec/dxva2.c 2020-05-21 20:25:05.303844291 -0700
  183. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/dxva2.c 2020-05-26 03:16:38.853174679 -0700
  184. @@ -771,16 +771,18 @@
  185. #if CONFIG_D3D11VA
  186. if (avctx->pix_fmt == AV_PIX_FMT_D3D11)
  187. return (intptr_t)frame->data[1];
  188. - if (avctx->pix_fmt == AV_PIX_FMT_D3D11VA_VLD) {
  189. + if (avctx->pix_fmt == AV_PIX_FMT_D3D11VA_VLD && surface) {
  190. D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC viewDesc;
  191. ID3D11VideoDecoderOutputView_GetDesc((ID3D11VideoDecoderOutputView*) surface, &viewDesc);
  192. return viewDesc.Texture2D.ArraySlice;
  193. }
  194. #endif
  195. #if CONFIG_DXVA2
  196. - for (i = 0; i < DXVA_CONTEXT_COUNT(avctx, ctx); i++) {
  197. - if (avctx->pix_fmt == AV_PIX_FMT_DXVA2_VLD && ctx->dxva2.surface[i] == surface)
  198. - return i;
  199. + if (avctx->pix_fmt == AV_PIX_FMT_DXVA2_VLD) {
  200. + for (i = 0; i < DXVA_CONTEXT_COUNT(avctx, ctx); i++) {
  201. + if (ctx->dxva2.surface[i] == surface)
  202. + return i;
  203. + }
  204. }
  205. #endif
  206.  
  207. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/dxva2_h264.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/dxva2_h264.c
  208. --- ffmpeg_n4.2.2/libavcodec/dxva2_h264.c 2020-05-21 20:25:05.313844099 -0700
  209. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/dxva2_h264.c 2020-05-26 03:16:38.853174679 -0700
  210. @@ -504,6 +504,14 @@
  211.  
  212. if (ctx_pic->slice_count <= 0 || ctx_pic->bitstream_size <= 0)
  213. return -1;
  214. +
  215. + // Wait for an I-frame before start decoding. Workaround for ATI UVD and UVD+ GPUs
  216. + if (!h->got_first_iframe) {
  217. + if (!(ctx_pic->pp.wBitFields & (1 << 15)))
  218. + return -1;
  219. + h->got_first_iframe = 1;
  220. + }
  221. +
  222. ret = ff_dxva2_common_end_frame(avctx, h->cur_pic_ptr->f,
  223. &ctx_pic->pp, sizeof(ctx_pic->pp),
  224. &ctx_pic->qm, sizeof(ctx_pic->qm),
  225. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/h264-ctrls.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264-ctrls.h
  226. --- ffmpeg_n4.2.2/libavcodec/h264-ctrls.h 1969-12-31 16:00:00.000000000 -0800
  227. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264-ctrls.h 2020-05-26 03:16:38.933174560 -0700
  228. @@ -0,0 +1,219 @@
  229. +/* SPDX-License-Identifier: GPL-2.0 */
  230. +/*
  231. + * These are the H.264 state controls for use with stateless H.264
  232. + * codec drivers.
  233. + *
  234. + * It turns out that these structs are not stable yet and will undergo
  235. + * more changes. So keep them private until they are stable and ready to
  236. + * become part of the official public API.
  237. + */
  238. +
  239. +#ifndef _H264_CTRLS_H_
  240. +#define _H264_CTRLS_H_
  241. +
  242. +#include <linux/videodev2.h>
  243. +
  244. +/*
  245. + * Maximum DPB size, as specified by section 'A.3.1 Level limits
  246. + * common to the Baseline, Main, and Extended profiles'.
  247. + */
  248. +#define V4L2_H264_NUM_DPB_ENTRIES 16
  249. +
  250. +/* Our pixel format isn't stable at the moment */
  251. +#define V4L2_PIX_FMT_H264_SLICE v4l2_fourcc('S', '2', '6', '4') /* H264 parsed slices */
  252. +
  253. +/*
  254. + * This is put insanely high to avoid conflicting with controls that
  255. + * would be added during the phase where those controls are not
  256. + * stable. It should be fixed eventually.
  257. + */
  258. +#define V4L2_CID_MPEG_VIDEO_H264_SPS (V4L2_CID_MPEG_BASE+1000)
  259. +#define V4L2_CID_MPEG_VIDEO_H264_PPS (V4L2_CID_MPEG_BASE+1001)
  260. +#define V4L2_CID_MPEG_VIDEO_H264_SCALING_MATRIX (V4L2_CID_MPEG_BASE+1002)
  261. +#define V4L2_CID_MPEG_VIDEO_H264_SLICE_PARAMS (V4L2_CID_MPEG_BASE+1003)
  262. +#define V4L2_CID_MPEG_VIDEO_H264_DECODE_PARAMS (V4L2_CID_MPEG_BASE+1004)
  263. +#define V4L2_CID_MPEG_VIDEO_H264_DECODE_MODE (V4L2_CID_MPEG_BASE+1005)
  264. +#define V4L2_CID_MPEG_VIDEO_H264_START_CODE (V4L2_CID_MPEG_BASE+1006)
  265. +
  266. +/* enum v4l2_ctrl_type type values */
  267. +#define V4L2_CTRL_TYPE_H264_SPS 0x0110
  268. +#define V4L2_CTRL_TYPE_H264_PPS 0x0111
  269. +#define V4L2_CTRL_TYPE_H264_SCALING_MATRIX 0x0112
  270. +#define V4L2_CTRL_TYPE_H264_SLICE_PARAMS 0x0113
  271. +#define V4L2_CTRL_TYPE_H264_DECODE_PARAMS 0x0114
  272. +
  273. +enum v4l2_mpeg_video_h264_decode_mode {
  274. + V4L2_MPEG_VIDEO_H264_DECODE_MODE_SLICE_BASED,
  275. + V4L2_MPEG_VIDEO_H264_DECODE_MODE_FRAME_BASED,
  276. +};
  277. +
  278. +enum v4l2_mpeg_video_h264_start_code {
  279. + V4L2_MPEG_VIDEO_H264_START_CODE_NONE,
  280. + V4L2_MPEG_VIDEO_H264_START_CODE_ANNEX_B,
  281. +};
  282. +
  283. +#define V4L2_H264_SPS_CONSTRAINT_SET0_FLAG 0x01
  284. +#define V4L2_H264_SPS_CONSTRAINT_SET1_FLAG 0x02
  285. +#define V4L2_H264_SPS_CONSTRAINT_SET2_FLAG 0x04
  286. +#define V4L2_H264_SPS_CONSTRAINT_SET3_FLAG 0x08
  287. +#define V4L2_H264_SPS_CONSTRAINT_SET4_FLAG 0x10
  288. +#define V4L2_H264_SPS_CONSTRAINT_SET5_FLAG 0x20
  289. +
  290. +#define V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE 0x01
  291. +#define V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS 0x02
  292. +#define V4L2_H264_SPS_FLAG_DELTA_PIC_ORDER_ALWAYS_ZERO 0x04
  293. +#define V4L2_H264_SPS_FLAG_GAPS_IN_FRAME_NUM_VALUE_ALLOWED 0x08
  294. +#define V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY 0x10
  295. +#define V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD 0x20
  296. +#define V4L2_H264_SPS_FLAG_DIRECT_8X8_INFERENCE 0x40
  297. +
  298. +struct v4l2_ctrl_h264_sps {
  299. + __u8 profile_idc;
  300. + __u8 constraint_set_flags;
  301. + __u8 level_idc;
  302. + __u8 seq_parameter_set_id;
  303. + __u8 chroma_format_idc;
  304. + __u8 bit_depth_luma_minus8;
  305. + __u8 bit_depth_chroma_minus8;
  306. + __u8 log2_max_frame_num_minus4;
  307. + __u8 pic_order_cnt_type;
  308. + __u8 log2_max_pic_order_cnt_lsb_minus4;
  309. + __u8 max_num_ref_frames;
  310. + __u8 num_ref_frames_in_pic_order_cnt_cycle;
  311. + __s32 offset_for_ref_frame[255];
  312. + __s32 offset_for_non_ref_pic;
  313. + __s32 offset_for_top_to_bottom_field;
  314. + __u16 pic_width_in_mbs_minus1;
  315. + __u16 pic_height_in_map_units_minus1;
  316. + __u32 flags;
  317. +};
  318. +
  319. +#define V4L2_H264_PPS_FLAG_ENTROPY_CODING_MODE 0x0001
  320. +#define V4L2_H264_PPS_FLAG_BOTTOM_FIELD_PIC_ORDER_IN_FRAME_PRESENT 0x0002
  321. +#define V4L2_H264_PPS_FLAG_WEIGHTED_PRED 0x0004
  322. +#define V4L2_H264_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT 0x0008
  323. +#define V4L2_H264_PPS_FLAG_CONSTRAINED_INTRA_PRED 0x0010
  324. +#define V4L2_H264_PPS_FLAG_REDUNDANT_PIC_CNT_PRESENT 0x0020
  325. +#define V4L2_H264_PPS_FLAG_TRANSFORM_8X8_MODE 0x0040
  326. +#define V4L2_H264_PPS_FLAG_PIC_SCALING_MATRIX_PRESENT 0x0080
  327. +
  328. +struct v4l2_ctrl_h264_pps {
  329. + __u8 pic_parameter_set_id;
  330. + __u8 seq_parameter_set_id;
  331. + __u8 num_slice_groups_minus1;
  332. + __u8 num_ref_idx_l0_default_active_minus1;
  333. + __u8 num_ref_idx_l1_default_active_minus1;
  334. + __u8 weighted_bipred_idc;
  335. + __s8 pic_init_qp_minus26;
  336. + __s8 pic_init_qs_minus26;
  337. + __s8 chroma_qp_index_offset;
  338. + __s8 second_chroma_qp_index_offset;
  339. + __u16 flags;
  340. +};
  341. +
  342. +struct v4l2_ctrl_h264_scaling_matrix {
  343. + __u8 scaling_list_4x4[6][16];
  344. + __u8 scaling_list_8x8[6][64];
  345. +};
  346. +
  347. +struct v4l2_h264_weight_factors {
  348. + __s16 luma_weight[32];
  349. + __s16 luma_offset[32];
  350. + __s16 chroma_weight[32][2];
  351. + __s16 chroma_offset[32][2];
  352. +};
  353. +
  354. +struct v4l2_h264_pred_weight_table {
  355. + __u16 luma_log2_weight_denom;
  356. + __u16 chroma_log2_weight_denom;
  357. + struct v4l2_h264_weight_factors weight_factors[2];
  358. +};
  359. +
  360. +#define V4L2_H264_SLICE_TYPE_P 0
  361. +#define V4L2_H264_SLICE_TYPE_B 1
  362. +#define V4L2_H264_SLICE_TYPE_I 2
  363. +#define V4L2_H264_SLICE_TYPE_SP 3
  364. +#define V4L2_H264_SLICE_TYPE_SI 4
  365. +
  366. +#define V4L2_H264_SLICE_FLAG_FIELD_PIC 0x01
  367. +#define V4L2_H264_SLICE_FLAG_BOTTOM_FIELD 0x02
  368. +#define V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED 0x04
  369. +#define V4L2_H264_SLICE_FLAG_SP_FOR_SWITCH 0x08
  370. +
  371. +struct v4l2_ctrl_h264_slice_params {
  372. + /* Size in bytes, including header */
  373. + __u32 size;
  374. +
  375. + /* Offset in bytes to the start of slice in the OUTPUT buffer. */
  376. + __u32 start_byte_offset;
  377. +
  378. + /* Offset in bits to slice_data() from the beginning of this slice. */
  379. + __u32 header_bit_size;
  380. +
  381. + __u16 first_mb_in_slice;
  382. + __u8 slice_type;
  383. + __u8 pic_parameter_set_id;
  384. + __u8 colour_plane_id;
  385. + __u8 redundant_pic_cnt;
  386. + __u16 frame_num;
  387. + __u16 idr_pic_id;
  388. + __u16 pic_order_cnt_lsb;
  389. + __s32 delta_pic_order_cnt_bottom;
  390. + __s32 delta_pic_order_cnt0;
  391. + __s32 delta_pic_order_cnt1;
  392. +
  393. + struct v4l2_h264_pred_weight_table pred_weight_table;
  394. + /* Size in bits of dec_ref_pic_marking() syntax element. */
  395. + __u32 dec_ref_pic_marking_bit_size;
  396. + /* Size in bits of pic order count syntax. */
  397. + __u32 pic_order_cnt_bit_size;
  398. +
  399. + __u8 cabac_init_idc;
  400. + __s8 slice_qp_delta;
  401. + __s8 slice_qs_delta;
  402. + __u8 disable_deblocking_filter_idc;
  403. + __s8 slice_alpha_c0_offset_div2;
  404. + __s8 slice_beta_offset_div2;
  405. + __u8 num_ref_idx_l0_active_minus1;
  406. + __u8 num_ref_idx_l1_active_minus1;
  407. + __u32 slice_group_change_cycle;
  408. +
  409. + /*
  410. + * Entries on each list are indices into
  411. + * v4l2_ctrl_h264_decode_params.dpb[].
  412. + */
  413. + __u8 ref_pic_list0[32];
  414. + __u8 ref_pic_list1[32];
  415. +
  416. + __u32 flags;
  417. +};
  418. +
  419. +#define V4L2_H264_DPB_ENTRY_FLAG_TOP_REF 0x01
  420. +#define V4L2_H264_DPB_ENTRY_FLAG_BOTTOM_REF 0x02
  421. +#define V4L2_H264_DPB_ENTRY_FLAG_ACTIVE (0x01|0x02)
  422. +#define V4L2_H264_DPB_ENTRY_FLAG_LONG_TERM 0x04
  423. +#define V4L2_H264_DPB_ENTRY_FLAG_FIELD_PIC 0x08
  424. +#define V4L2_H264_DPB_ENTRY_FLAG_VALID 0x10
  425. +
  426. +struct v4l2_h264_dpb_entry {
  427. + __u64 reference_ts;
  428. + __u16 frame_num;
  429. + __u16 pic_num;
  430. + /* Note that field is indicated by v4l2_buffer.field */
  431. + __s32 top_field_order_cnt;
  432. + __s32 bottom_field_order_cnt;
  433. + __u32 flags; /* V4L2_H264_DPB_ENTRY_FLAG_* */
  434. +};
  435. +
  436. +#define V4L2_H264_DECODE_PARAM_FLAG_IDR_PIC 0x01
  437. +
  438. +struct v4l2_ctrl_h264_decode_params {
  439. + struct v4l2_h264_dpb_entry dpb[V4L2_H264_NUM_DPB_ENTRIES];
  440. + __u16 num_slices;
  441. + __u16 nal_ref_idc;
  442. + __s32 top_field_order_cnt;
  443. + __s32 bottom_field_order_cnt;
  444. + __u32 flags; /* V4L2_H264_DECODE_PARAM_FLAG_* */
  445. +};
  446. +
  447. +#endif
  448. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/h264dec.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264dec.c
  449. --- ffmpeg_n4.2.2/libavcodec/h264dec.c 2020-05-21 20:25:05.343843522 -0700
  450. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264dec.c 2020-05-26 03:16:38.963174516 -0700
  451. @@ -486,6 +486,7 @@
  452.  
  453. h->next_outputed_poc = INT_MIN;
  454. h->prev_interlaced_frame = 1;
  455. + h->got_first_iframe = 0;
  456. idr(h);
  457.  
  458. h->poc.prev_frame_num = -1;
  459. @@ -1081,6 +1082,9 @@
  460. #if CONFIG_H264_VIDEOTOOLBOX_HWACCEL
  461. HWACCEL_VIDEOTOOLBOX(h264),
  462. #endif
  463. +#if CONFIG_H264_V4L2REQUEST_HWACCEL
  464. + HWACCEL_V4L2REQUEST(h264),
  465. +#endif
  466. NULL
  467. },
  468. .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_EXPORTS_CROPPING,
  469. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/h264dec.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264dec.h
  470. --- ffmpeg_n4.2.2/libavcodec/h264dec.h 2020-05-21 20:25:05.343843522 -0700
  471. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264dec.h 2020-05-26 03:16:38.963174516 -0700
  472. @@ -184,6 +184,8 @@
  473. int slice_type_nos; ///< S free slice type (SI/SP are remapped to I/P)
  474. int slice_type_fixed;
  475.  
  476. + int idr_pic_id;
  477. +
  478. int qscale;
  479. int chroma_qp[2]; // QPc
  480. int qp_thresh; ///< QP threshold to skip loopfilter
  481. @@ -322,11 +324,13 @@
  482. MMCO mmco[MAX_MMCO_COUNT];
  483. int nb_mmco;
  484. int explicit_ref_marking;
  485. + int ref_pic_marking_size_in_bits;
  486.  
  487. int frame_num;
  488. int poc_lsb;
  489. int delta_poc_bottom;
  490. int delta_poc[2];
  491. + int pic_order_cnt_bit_size;
  492. int curr_pic_num;
  493. int max_pic_num;
  494. } H264SliceContext;
  495. @@ -533,6 +537,8 @@
  496. * slices) anymore */
  497. int setup_finished;
  498.  
  499. + int got_first_iframe;
  500. +
  501. int cur_chroma_format_idc;
  502. int cur_bit_depth_luma;
  503. int16_t slice_row[MAX_SLICES]; ///< to detect when MAX_SLICES is too low
  504. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/h264_slice.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264_slice.c
  505. --- ffmpeg_n4.2.2/libavcodec/h264_slice.c 2020-05-21 20:25:05.343843522 -0700
  506. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264_slice.c 2020-05-26 03:16:38.963174516 -0700
  507. @@ -765,6 +765,7 @@
  508. #define HWACCEL_MAX (CONFIG_H264_DXVA2_HWACCEL + \
  509. (CONFIG_H264_D3D11VA_HWACCEL * 2) + \
  510. CONFIG_H264_NVDEC_HWACCEL + \
  511. + CONFIG_H264_V4L2REQUEST_HWACCEL + \
  512. CONFIG_H264_VAAPI_HWACCEL + \
  513. CONFIG_H264_VIDEOTOOLBOX_HWACCEL + \
  514. CONFIG_H264_VDPAU_HWACCEL)
  515. @@ -790,10 +791,17 @@
  516. *fmt++ = AV_PIX_FMT_GBRP10;
  517. } else
  518. *fmt++ = AV_PIX_FMT_YUV444P10;
  519. - } else if (CHROMA422(h))
  520. + } else if (CHROMA422(h)) {
  521. +#if CONFIG_H264_V4L2REQUEST_HWACCEL
  522. + *fmt++ = AV_PIX_FMT_DRM_PRIME;
  523. +#endif
  524. *fmt++ = AV_PIX_FMT_YUV422P10;
  525. - else
  526. + } else {
  527. +#if CONFIG_H264_V4L2REQUEST_HWACCEL
  528. + *fmt++ = AV_PIX_FMT_DRM_PRIME;
  529. +#endif
  530. *fmt++ = AV_PIX_FMT_YUV420P10;
  531. + }
  532. break;
  533. case 12:
  534. if (CHROMA444(h)) {
  535. @@ -832,6 +840,9 @@
  536. else
  537. *fmt++ = AV_PIX_FMT_YUV444P;
  538. } else if (CHROMA422(h)) {
  539. +#if CONFIG_H264_V4L2REQUEST_HWACCEL
  540. + *fmt++ = AV_PIX_FMT_DRM_PRIME;
  541. +#endif
  542. if (h->avctx->color_range == AVCOL_RANGE_JPEG)
  543. *fmt++ = AV_PIX_FMT_YUVJ422P;
  544. else
  545. @@ -850,6 +861,9 @@
  546. #if CONFIG_H264_VIDEOTOOLBOX_HWACCEL
  547. *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
  548. #endif
  549. +#if CONFIG_H264_V4L2REQUEST_HWACCEL
  550. + *fmt++ = AV_PIX_FMT_DRM_PRIME;
  551. +#endif
  552. if (h->avctx->codec->pix_fmts)
  553. choices = h->avctx->codec->pix_fmts;
  554. else if (h->avctx->color_range == AVCOL_RANGE_JPEG)
  555. @@ -936,6 +950,7 @@
  556.  
  557. h->first_field = 0;
  558. h->prev_interlaced_frame = 1;
  559. + h->got_first_iframe = 0;
  560.  
  561. init_scan_tables(h);
  562. ret = ff_h264_alloc_tables(h);
  563. @@ -1731,7 +1746,7 @@
  564. unsigned int slice_type, tmp, i;
  565. int field_pic_flag, bottom_field_flag;
  566. int first_slice = sl == h->slice_ctx && !h->current_slice;
  567. - int picture_structure;
  568. + int picture_structure, pos;
  569.  
  570. if (first_slice)
  571. av_assert0(!h->setup_finished);
  572. @@ -1819,8 +1834,9 @@
  573. }
  574.  
  575. if (nal->type == H264_NAL_IDR_SLICE)
  576. - get_ue_golomb_long(&sl->gb); /* idr_pic_id */
  577. + sl->idr_pic_id = get_ue_golomb_long(&sl->gb);
  578.  
  579. + pos = sl->gb.index;
  580. if (sps->poc_type == 0) {
  581. sl->poc_lsb = get_bits(&sl->gb, sps->log2_max_poc_lsb);
  582.  
  583. @@ -1834,6 +1850,7 @@
  584. if (pps->pic_order_present == 1 && picture_structure == PICT_FRAME)
  585. sl->delta_poc[1] = get_se_golomb(&sl->gb);
  586. }
  587. + sl->pic_order_cnt_bit_size = sl->gb.index - pos;
  588.  
  589. sl->redundant_pic_count = 0;
  590. if (pps->redundant_pic_cnt_present)
  591. @@ -1873,9 +1890,11 @@
  592.  
  593. sl->explicit_ref_marking = 0;
  594. if (nal->ref_idc) {
  595. + int bit_pos = sl->gb.index;
  596. ret = ff_h264_decode_ref_pic_marking(sl, &sl->gb, nal, h->avctx);
  597. if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
  598. return AVERROR_INVALIDDATA;
  599. + sl->ref_pic_marking_size_in_bits = sl->gb.index - bit_pos;
  600. }
  601.  
  602. if (sl->slice_type_nos != AV_PICTURE_TYPE_I && pps->cabac) {
  603. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/hevc-ctrls.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hevc-ctrls.h
  604. --- ffmpeg_n4.2.2/libavcodec/hevc-ctrls.h 1969-12-31 16:00:00.000000000 -0800
  605. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hevc-ctrls.h 2020-05-26 03:16:38.973174501 -0700
  606. @@ -0,0 +1,238 @@
  607. +/* SPDX-License-Identifier: GPL-2.0 */
  608. +/*
  609. + * These are the HEVC state controls for use with stateless HEVC
  610. + * codec drivers.
  611. + *
  612. + * It turns out that these structs are not stable yet and will undergo
  613. + * more changes. So keep them private until they are stable and ready to
  614. + * become part of the official public API.
  615. + */
  616. +
  617. +#ifndef _HEVC_CTRLS_H_
  618. +#define _HEVC_CTRLS_H_
  619. +
  620. +#include <linux/videodev2.h>
  621. +
  622. +/* The pixel format isn't stable at the moment and will likely be renamed. */
  623. +#define V4L2_PIX_FMT_HEVC_SLICE v4l2_fourcc('S', '2', '6', '5') /* HEVC parsed slices */
  624. +
  625. +#define V4L2_CID_MPEG_VIDEO_HEVC_SPS (V4L2_CID_MPEG_BASE + 1008)
  626. +#define V4L2_CID_MPEG_VIDEO_HEVC_PPS (V4L2_CID_MPEG_BASE + 1009)
  627. +#define V4L2_CID_MPEG_VIDEO_HEVC_SLICE_PARAMS (V4L2_CID_MPEG_BASE + 1010)
  628. +#define V4L2_CID_MPEG_VIDEO_HEVC_SCALING_MATRIX (V4L2_CID_MPEG_BASE + 1011)
  629. +#define V4L2_CID_MPEG_VIDEO_HEVC_DECODE_MODE (V4L2_CID_MPEG_BASE + 1015)
  630. +#define V4L2_CID_MPEG_VIDEO_HEVC_START_CODE (V4L2_CID_MPEG_BASE + 1016)
  631. +
  632. +/* enum v4l2_ctrl_type type values */
  633. +#define V4L2_CTRL_TYPE_HEVC_SPS 0x0120
  634. +#define V4L2_CTRL_TYPE_HEVC_PPS 0x0121
  635. +#define V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS 0x0122
  636. +#define V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX 0x0123
  637. +
  638. +enum v4l2_mpeg_video_hevc_decode_mode {
  639. + V4L2_MPEG_VIDEO_HEVC_DECODE_MODE_SLICE_BASED,
  640. + V4L2_MPEG_VIDEO_HEVC_DECODE_MODE_FRAME_BASED,
  641. +};
  642. +
  643. +enum v4l2_mpeg_video_hevc_start_code {
  644. + V4L2_MPEG_VIDEO_HEVC_START_CODE_NONE,
  645. + V4L2_MPEG_VIDEO_HEVC_START_CODE_ANNEX_B,
  646. +};
  647. +
  648. +#define V4L2_HEVC_SLICE_TYPE_B 0
  649. +#define V4L2_HEVC_SLICE_TYPE_P 1
  650. +#define V4L2_HEVC_SLICE_TYPE_I 2
  651. +
  652. +#define V4L2_HEVC_SPS_FLAG_SEPARATE_COLOUR_PLANE (1ULL << 0)
  653. +#define V4L2_HEVC_SPS_FLAG_SCALING_LIST_ENABLED (1ULL << 1)
  654. +#define V4L2_HEVC_SPS_FLAG_AMP_ENABLED (1ULL << 2)
  655. +#define V4L2_HEVC_SPS_FLAG_SAMPLE_ADAPTIVE_OFFSET (1ULL << 3)
  656. +#define V4L2_HEVC_SPS_FLAG_PCM_ENABLED (1ULL << 4)
  657. +#define V4L2_HEVC_SPS_FLAG_PCM_LOOP_FILTER_DISABLED (1ULL << 5)
  658. +#define V4L2_HEVC_SPS_FLAG_LONG_TERM_REF_PICS_PRESENT (1ULL << 6)
  659. +#define V4L2_HEVC_SPS_FLAG_SPS_TEMPORAL_MVP_ENABLED (1ULL << 7)
  660. +#define V4L2_HEVC_SPS_FLAG_STRONG_INTRA_SMOOTHING_ENABLED (1ULL << 8)
  661. +
  662. +/* The controls are not stable at the moment and will likely be reworked. */
  663. +struct v4l2_ctrl_hevc_sps {
  664. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: Sequence parameter set */
  665. + __u8 video_parameter_set_id;
  666. + __u8 seq_parameter_set_id;
  667. + __u8 chroma_format_idc;
  668. + __u16 pic_width_in_luma_samples;
  669. + __u16 pic_height_in_luma_samples;
  670. + __u8 bit_depth_luma_minus8;
  671. + __u8 bit_depth_chroma_minus8;
  672. + __u8 log2_max_pic_order_cnt_lsb_minus4;
  673. + __u8 sps_max_dec_pic_buffering_minus1;
  674. + __u8 sps_max_num_reorder_pics;
  675. + __u8 sps_max_latency_increase_plus1;
  676. + __u8 log2_min_luma_coding_block_size_minus3;
  677. + __u8 log2_diff_max_min_luma_coding_block_size;
  678. + __u8 log2_min_luma_transform_block_size_minus2;
  679. + __u8 log2_diff_max_min_luma_transform_block_size;
  680. + __u8 max_transform_hierarchy_depth_inter;
  681. + __u8 max_transform_hierarchy_depth_intra;
  682. + __u8 pcm_sample_bit_depth_luma_minus1;
  683. + __u8 pcm_sample_bit_depth_chroma_minus1;
  684. + __u8 log2_min_pcm_luma_coding_block_size_minus3;
  685. + __u8 log2_diff_max_min_pcm_luma_coding_block_size;
  686. + __u8 num_short_term_ref_pic_sets;
  687. + __u8 num_long_term_ref_pics_sps;
  688. +
  689. + __u8 num_slices;
  690. + __u8 padding[6];
  691. +
  692. + __u64 flags;
  693. +};
  694. +
  695. +#define V4L2_HEVC_PPS_FLAG_DEPENDENT_SLICE_SEGMENT (1ULL << 0)
  696. +#define V4L2_HEVC_PPS_FLAG_OUTPUT_FLAG_PRESENT (1ULL << 1)
  697. +#define V4L2_HEVC_PPS_FLAG_SIGN_DATA_HIDING_ENABLED (1ULL << 2)
  698. +#define V4L2_HEVC_PPS_FLAG_CABAC_INIT_PRESENT (1ULL << 3)
  699. +#define V4L2_HEVC_PPS_FLAG_CONSTRAINED_INTRA_PRED (1ULL << 4)
  700. +#define V4L2_HEVC_PPS_FLAG_TRANSFORM_SKIP_ENABLED (1ULL << 5)
  701. +#define V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED (1ULL << 6)
  702. +#define V4L2_HEVC_PPS_FLAG_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT (1ULL << 7)
  703. +#define V4L2_HEVC_PPS_FLAG_WEIGHTED_PRED (1ULL << 8)
  704. +#define V4L2_HEVC_PPS_FLAG_WEIGHTED_BIPRED (1ULL << 9)
  705. +#define V4L2_HEVC_PPS_FLAG_TRANSQUANT_BYPASS_ENABLED (1ULL << 10)
  706. +#define V4L2_HEVC_PPS_FLAG_TILES_ENABLED (1ULL << 11)
  707. +#define V4L2_HEVC_PPS_FLAG_ENTROPY_CODING_SYNC_ENABLED (1ULL << 12)
  708. +#define V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED (1ULL << 13)
  709. +#define V4L2_HEVC_PPS_FLAG_PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED (1ULL << 14)
  710. +#define V4L2_HEVC_PPS_FLAG_DEBLOCKING_FILTER_OVERRIDE_ENABLED (1ULL << 15)
  711. +#define V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER (1ULL << 16)
  712. +#define V4L2_HEVC_PPS_FLAG_LISTS_MODIFICATION_PRESENT (1ULL << 17)
  713. +#define V4L2_HEVC_PPS_FLAG_SLICE_SEGMENT_HEADER_EXTENSION_PRESENT (1ULL << 18)
  714. +
  715. +struct v4l2_ctrl_hevc_pps {
  716. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: Picture parameter set */
  717. + __u8 pic_parameter_set_id;
  718. + __u8 num_extra_slice_header_bits;
  719. + __u8 num_ref_idx_l0_default_active_minus1;
  720. + __u8 num_ref_idx_l1_default_active_minus1;
  721. + __s8 init_qp_minus26;
  722. + __u8 diff_cu_qp_delta_depth;
  723. + __s8 pps_cb_qp_offset;
  724. + __s8 pps_cr_qp_offset;
  725. + __u8 num_tile_columns_minus1;
  726. + __u8 num_tile_rows_minus1;
  727. + __u8 column_width_minus1[20];
  728. + __u8 row_height_minus1[22];
  729. + __s8 pps_beta_offset_div2;
  730. + __s8 pps_tc_offset_div2;
  731. + __u8 log2_parallel_merge_level_minus2;
  732. +
  733. + __u8 padding;
  734. + __u64 flags;
  735. +};
  736. +
  737. +#define V4L2_HEVC_DPB_ENTRY_RPS_ST_CURR_BEFORE 0x01
  738. +#define V4L2_HEVC_DPB_ENTRY_RPS_ST_CURR_AFTER 0x02
  739. +#define V4L2_HEVC_DPB_ENTRY_RPS_LT_CURR 0x03
  740. +
  741. +#define V4L2_HEVC_DPB_ENTRIES_NUM_MAX 16
  742. +
  743. +struct v4l2_hevc_dpb_entry {
  744. + __u64 timestamp;
  745. + __u8 rps;
  746. + __u8 field_pic;
  747. + __u16 pic_order_cnt[2];
  748. + __u8 padding[2];
  749. +};
  750. +
  751. +struct v4l2_hevc_pred_weight_table {
  752. + __s8 delta_luma_weight_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX];
  753. + __s8 luma_offset_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX];
  754. + __s8 delta_chroma_weight_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX][2];
  755. + __s8 chroma_offset_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX][2];
  756. +
  757. + __s8 delta_luma_weight_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX];
  758. + __s8 luma_offset_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX];
  759. + __s8 delta_chroma_weight_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX][2];
  760. + __s8 chroma_offset_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX][2];
  761. +
  762. + __u8 padding[6];
  763. +
  764. + __u8 luma_log2_weight_denom;
  765. + __s8 delta_chroma_log2_weight_denom;
  766. +};
  767. +
  768. +#define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_SAO_LUMA (1ULL << 0)
  769. +#define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_SAO_CHROMA (1ULL << 1)
  770. +#define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_TEMPORAL_MVP_ENABLED (1ULL << 2)
  771. +#define V4L2_HEVC_SLICE_PARAMS_FLAG_MVD_L1_ZERO (1ULL << 3)
  772. +#define V4L2_HEVC_SLICE_PARAMS_FLAG_CABAC_INIT (1ULL << 4)
  773. +#define V4L2_HEVC_SLICE_PARAMS_FLAG_COLLOCATED_FROM_L0 (1ULL << 5)
  774. +#define V4L2_HEVC_SLICE_PARAMS_FLAG_USE_INTEGER_MV (1ULL << 6)
  775. +#define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_DEBLOCKING_FILTER_DISABLED (1ULL << 7)
  776. +#define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED (1ULL << 8)
  777. +
  778. +struct v4l2_ctrl_hevc_slice_params {
  779. + __u32 bit_size;
  780. + __u32 data_bit_offset;
  781. +
  782. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */
  783. + __u32 slice_segment_addr;
  784. + __u32 num_entry_point_offsets;
  785. +
  786. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: NAL unit header */
  787. + __u8 nal_unit_type;
  788. + __u8 nuh_temporal_id_plus1;
  789. +
  790. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */
  791. + __u8 slice_type;
  792. + __u8 colour_plane_id;
  793. + __u16 slice_pic_order_cnt;
  794. + __u8 num_ref_idx_l0_active_minus1;
  795. + __u8 num_ref_idx_l1_active_minus1;
  796. + __u8 collocated_ref_idx;
  797. + __u8 five_minus_max_num_merge_cand;
  798. + __s8 slice_qp_delta;
  799. + __s8 slice_cb_qp_offset;
  800. + __s8 slice_cr_qp_offset;
  801. + __s8 slice_act_y_qp_offset;
  802. + __s8 slice_act_cb_qp_offset;
  803. + __s8 slice_act_cr_qp_offset;
  804. + __s8 slice_beta_offset_div2;
  805. + __s8 slice_tc_offset_div2;
  806. +
  807. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: Picture timing SEI message */
  808. + __u8 pic_struct;
  809. +
  810. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */
  811. + __u8 num_active_dpb_entries;
  812. + __u8 ref_idx_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX];
  813. + __u8 ref_idx_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX];
  814. +
  815. + __u8 num_rps_poc_st_curr_before;
  816. + __u8 num_rps_poc_st_curr_after;
  817. + __u8 num_rps_poc_lt_curr;
  818. +
  819. + __u16 short_term_ref_pic_set_size;
  820. + __u16 long_term_ref_pic_set_size;
  821. +
  822. + __u8 padding[5];
  823. +
  824. + __u32 entry_point_offset_minus1[256];
  825. +
  826. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */
  827. + struct v4l2_hevc_dpb_entry dpb[V4L2_HEVC_DPB_ENTRIES_NUM_MAX];
  828. +
  829. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: Weighted prediction parameter */
  830. + struct v4l2_hevc_pred_weight_table pred_weight_table;
  831. +
  832. + __u64 flags;
  833. +};
  834. +
  835. +struct v4l2_ctrl_hevc_scaling_matrix {
  836. + __u8 scaling_list_4x4[6][16];
  837. + __u8 scaling_list_8x8[6][64];
  838. + __u8 scaling_list_16x16[6][64];
  839. + __u8 scaling_list_32x32[2][64];
  840. + __u8 scaling_list_dc_coef_16x16[6];
  841. + __u8 scaling_list_dc_coef_32x32[2];
  842. +};
  843. +
  844. +#endif
  845. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/hevcdec.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hevcdec.c
  846. --- ffmpeg_n4.2.2/libavcodec/hevcdec.c 2020-05-21 20:25:05.353843330 -0700
  847. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hevcdec.c 2020-05-26 03:16:38.983174486 -0700
  848. @@ -373,6 +373,7 @@
  849. #define HWACCEL_MAX (CONFIG_HEVC_DXVA2_HWACCEL + \
  850. CONFIG_HEVC_D3D11VA_HWACCEL * 2 + \
  851. CONFIG_HEVC_NVDEC_HWACCEL + \
  852. + CONFIG_HEVC_V4L2REQUEST_HWACCEL + \
  853. CONFIG_HEVC_VAAPI_HWACCEL + \
  854. CONFIG_HEVC_VIDEOTOOLBOX_HWACCEL + \
  855. CONFIG_HEVC_VDPAU_HWACCEL)
  856. @@ -400,6 +401,9 @@
  857. #if CONFIG_HEVC_VIDEOTOOLBOX_HWACCEL
  858. *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
  859. #endif
  860. +#if CONFIG_HEVC_V4L2REQUEST_HWACCEL
  861. + *fmt++ = AV_PIX_FMT_DRM_PRIME;
  862. +#endif
  863. break;
  864. case AV_PIX_FMT_YUV420P10:
  865. #if CONFIG_HEVC_DXVA2_HWACCEL
  866. @@ -418,6 +422,9 @@
  867. #if CONFIG_HEVC_NVDEC_HWACCEL
  868. *fmt++ = AV_PIX_FMT_CUDA;
  869. #endif
  870. +#if CONFIG_HEVC_V4L2REQUEST_HWACCEL
  871. + *fmt++ = AV_PIX_FMT_DRM_PRIME;
  872. +#endif
  873. break;
  874. case AV_PIX_FMT_YUV444P:
  875. #if CONFIG_HEVC_VDPAU_HWACCEL
  876. @@ -3593,6 +3600,9 @@
  877. #if CONFIG_HEVC_VIDEOTOOLBOX_HWACCEL
  878. HWACCEL_VIDEOTOOLBOX(hevc),
  879. #endif
  880. +#if CONFIG_HEVC_V4L2REQUEST_HWACCEL
  881. + HWACCEL_V4L2REQUEST(hevc),
  882. +#endif
  883. NULL
  884. },
  885. };
  886. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/hwaccel.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hwaccel.h
  887. --- ffmpeg_n4.2.2/libavcodec/hwaccel.h 2020-05-21 20:25:05.363843138 -0700
  888. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hwaccel.h 2020-05-26 03:16:39.023174427 -0700
  889. @@ -80,5 +80,7 @@
  890. HW_CONFIG_HWACCEL(0, 0, 1, D3D11VA_VLD, NONE, ff_ ## codec ## _d3d11va_hwaccel)
  891. #define HWACCEL_XVMC(codec) \
  892. HW_CONFIG_HWACCEL(0, 0, 1, XVMC, NONE, ff_ ## codec ## _xvmc_hwaccel)
  893. +#define HWACCEL_V4L2REQUEST(codec) \
  894. + HW_CONFIG_HWACCEL(1, 0, 0, DRM_PRIME, DRM, ff_ ## codec ## _v4l2request_hwaccel)
  895.  
  896. #endif /* AVCODEC_HWACCEL_H */
  897. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/hwaccels.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hwaccels.h
  898. --- ffmpeg_n4.2.2/libavcodec/hwaccels.h 2020-05-21 20:25:05.363843138 -0700
  899. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hwaccels.h 2020-05-26 03:16:39.023174427 -0700
  900. @@ -27,6 +27,7 @@
  901. extern const AVHWAccel ff_h264_d3d11va2_hwaccel;
  902. extern const AVHWAccel ff_h264_dxva2_hwaccel;
  903. extern const AVHWAccel ff_h264_nvdec_hwaccel;
  904. +extern const AVHWAccel ff_h264_v4l2request_hwaccel;
  905. extern const AVHWAccel ff_h264_vaapi_hwaccel;
  906. extern const AVHWAccel ff_h264_vdpau_hwaccel;
  907. extern const AVHWAccel ff_h264_videotoolbox_hwaccel;
  908. @@ -34,6 +35,7 @@
  909. extern const AVHWAccel ff_hevc_d3d11va2_hwaccel;
  910. extern const AVHWAccel ff_hevc_dxva2_hwaccel;
  911. extern const AVHWAccel ff_hevc_nvdec_hwaccel;
  912. +extern const AVHWAccel ff_hevc_v4l2request_hwaccel;
  913. extern const AVHWAccel ff_hevc_vaapi_hwaccel;
  914. extern const AVHWAccel ff_hevc_vdpau_hwaccel;
  915. extern const AVHWAccel ff_hevc_videotoolbox_hwaccel;
  916. @@ -47,6 +49,7 @@
  917. extern const AVHWAccel ff_mpeg2_d3d11va2_hwaccel;
  918. extern const AVHWAccel ff_mpeg2_nvdec_hwaccel;
  919. extern const AVHWAccel ff_mpeg2_dxva2_hwaccel;
  920. +extern const AVHWAccel ff_mpeg2_v4l2request_hwaccel;
  921. extern const AVHWAccel ff_mpeg2_vaapi_hwaccel;
  922. extern const AVHWAccel ff_mpeg2_vdpau_hwaccel;
  923. extern const AVHWAccel ff_mpeg2_videotoolbox_hwaccel;
  924. @@ -62,11 +65,13 @@
  925. extern const AVHWAccel ff_vc1_vaapi_hwaccel;
  926. extern const AVHWAccel ff_vc1_vdpau_hwaccel;
  927. extern const AVHWAccel ff_vp8_nvdec_hwaccel;
  928. +extern const AVHWAccel ff_vp8_v4l2request_hwaccel;
  929. extern const AVHWAccel ff_vp8_vaapi_hwaccel;
  930. extern const AVHWAccel ff_vp9_d3d11va_hwaccel;
  931. extern const AVHWAccel ff_vp9_d3d11va2_hwaccel;
  932. extern const AVHWAccel ff_vp9_dxva2_hwaccel;
  933. extern const AVHWAccel ff_vp9_nvdec_hwaccel;
  934. +extern const AVHWAccel ff_vp9_v4l2request_hwaccel;
  935. extern const AVHWAccel ff_vp9_vaapi_hwaccel;
  936. extern const AVHWAccel ff_wmv3_d3d11va_hwaccel;
  937. extern const AVHWAccel ff_wmv3_d3d11va2_hwaccel;
  938. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/Makefile ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/Makefile
  939. --- ffmpeg_n4.2.2/libavcodec/Makefile 2020-05-21 20:25:05.193846405 -0700
  940. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/Makefile 2020-05-26 03:16:38.493175211 -0700
  941. @@ -147,6 +147,7 @@
  942. OBJS-$(CONFIG_VP56DSP) += vp56dsp.o
  943. OBJS-$(CONFIG_VP8DSP) += vp8dsp.o
  944. OBJS-$(CONFIG_V4L2_M2M) += v4l2_m2m.o v4l2_context.o v4l2_buffers.o v4l2_fmt.o
  945. +OBJS-$(CONFIG_V4L2_REQUEST) += v4l2_request.o
  946. OBJS-$(CONFIG_WMA_FREQS) += wma_freqs.o
  947. OBJS-$(CONFIG_WMV2DSP) += wmv2dsp.o
  948.  
  949. @@ -871,6 +872,7 @@
  950. OBJS-$(CONFIG_H264_DXVA2_HWACCEL) += dxva2_h264.o
  951. OBJS-$(CONFIG_H264_NVDEC_HWACCEL) += nvdec_h264.o
  952. OBJS-$(CONFIG_H264_QSV_HWACCEL) += qsvdec_h2645.o
  953. +OBJS-$(CONFIG_H264_V4L2REQUEST_HWACCEL) += v4l2_request_h264.o
  954. OBJS-$(CONFIG_H264_VAAPI_HWACCEL) += vaapi_h264.o
  955. OBJS-$(CONFIG_H264_VDPAU_HWACCEL) += vdpau_h264.o
  956. OBJS-$(CONFIG_H264_VIDEOTOOLBOX_HWACCEL) += videotoolbox.o
  957. @@ -878,6 +880,7 @@
  958. OBJS-$(CONFIG_HEVC_DXVA2_HWACCEL) += dxva2_hevc.o
  959. OBJS-$(CONFIG_HEVC_NVDEC_HWACCEL) += nvdec_hevc.o
  960. OBJS-$(CONFIG_HEVC_QSV_HWACCEL) += qsvdec_h2645.o
  961. +OBJS-$(CONFIG_HEVC_V4L2REQUEST_HWACCEL) += v4l2_request_hevc.o
  962. OBJS-$(CONFIG_HEVC_VAAPI_HWACCEL) += vaapi_hevc.o
  963. OBJS-$(CONFIG_HEVC_VDPAU_HWACCEL) += vdpau_hevc.o
  964. OBJS-$(CONFIG_MJPEG_NVDEC_HWACCEL) += nvdec_mjpeg.o
  965. @@ -890,6 +893,7 @@
  966. OBJS-$(CONFIG_MPEG2_DXVA2_HWACCEL) += dxva2_mpeg2.o
  967. OBJS-$(CONFIG_MPEG2_NVDEC_HWACCEL) += nvdec_mpeg12.o
  968. OBJS-$(CONFIG_MPEG2_QSV_HWACCEL) += qsvdec_other.o
  969. +OBJS-$(CONFIG_MPEG2_V4L2REQUEST_HWACCEL) += v4l2_request_mpeg2.o
  970. OBJS-$(CONFIG_MPEG2_VAAPI_HWACCEL) += vaapi_mpeg2.o
  971. OBJS-$(CONFIG_MPEG2_VDPAU_HWACCEL) += vdpau_mpeg12.o
  972. OBJS-$(CONFIG_MPEG2_VIDEOTOOLBOX_HWACCEL) += videotoolbox.o
  973. @@ -905,10 +909,12 @@
  974. OBJS-$(CONFIG_VC1_VAAPI_HWACCEL) += vaapi_vc1.o
  975. OBJS-$(CONFIG_VC1_VDPAU_HWACCEL) += vdpau_vc1.o
  976. OBJS-$(CONFIG_VP8_NVDEC_HWACCEL) += nvdec_vp8.o
  977. +OBJS-$(CONFIG_VP8_V4L2REQUEST_HWACCEL) += v4l2_request_vp8.o
  978. OBJS-$(CONFIG_VP8_VAAPI_HWACCEL) += vaapi_vp8.o
  979. OBJS-$(CONFIG_VP9_D3D11VA_HWACCEL) += dxva2_vp9.o
  980. OBJS-$(CONFIG_VP9_DXVA2_HWACCEL) += dxva2_vp9.o
  981. OBJS-$(CONFIG_VP9_NVDEC_HWACCEL) += nvdec_vp9.o
  982. +OBJS-$(CONFIG_VP9_V4L2REQUEST_HWACCEL) += v4l2_request_vp9.o
  983. OBJS-$(CONFIG_VP9_VAAPI_HWACCEL) += vaapi_vp9.o
  984. OBJS-$(CONFIG_VP8_QSV_HWACCEL) += qsvdec_other.o
  985.  
  986. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/mpeg12dec.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/mpeg12dec.c
  987. --- ffmpeg_n4.2.2/libavcodec/mpeg12dec.c 2020-05-21 20:25:05.453841409 -0700
  988. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/mpeg12dec.c 2020-05-26 03:16:39.303174012 -0700
  989. @@ -1157,6 +1157,9 @@
  990. #if CONFIG_MPEG2_VIDEOTOOLBOX_HWACCEL
  991. AV_PIX_FMT_VIDEOTOOLBOX,
  992. #endif
  993. +#if CONFIG_MPEG2_V4L2REQUEST_HWACCEL
  994. + AV_PIX_FMT_DRM_PRIME,
  995. +#endif
  996. AV_PIX_FMT_YUV420P,
  997. AV_PIX_FMT_NONE
  998. };
  999. @@ -2942,6 +2945,9 @@
  1000. #if CONFIG_MPEG2_XVMC_HWACCEL
  1001. HWACCEL_XVMC(mpeg2),
  1002. #endif
  1003. +#if CONFIG_MPEG2_V4L2REQUEST_HWACCEL
  1004. + HWACCEL_V4L2REQUEST(mpeg2),
  1005. +#endif
  1006. NULL
  1007. },
  1008. };
  1009. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/mpeg2-ctrls.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/mpeg2-ctrls.h
  1010. --- ffmpeg_n4.2.2/libavcodec/mpeg2-ctrls.h 1969-12-31 16:00:00.000000000 -0800
  1011. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/mpeg2-ctrls.h 2020-05-26 03:16:39.303174012 -0700
  1012. @@ -0,0 +1,82 @@
  1013. +/* SPDX-License-Identifier: GPL-2.0 */
  1014. +/*
  1015. + * These are the MPEG2 state controls for use with stateless MPEG-2
  1016. + * codec drivers.
  1017. + *
  1018. + * It turns out that these structs are not stable yet and will undergo
  1019. + * more changes. So keep them private until they are stable and ready to
  1020. + * become part of the official public API.
  1021. + */
  1022. +
  1023. +#ifndef _MPEG2_CTRLS_H_
  1024. +#define _MPEG2_CTRLS_H_
  1025. +
  1026. +#define V4L2_CID_MPEG_VIDEO_MPEG2_SLICE_PARAMS (V4L2_CID_MPEG_BASE+250)
  1027. +#define V4L2_CID_MPEG_VIDEO_MPEG2_QUANTIZATION (V4L2_CID_MPEG_BASE+251)
  1028. +
  1029. +/* enum v4l2_ctrl_type type values */
  1030. +#define V4L2_CTRL_TYPE_MPEG2_SLICE_PARAMS 0x0103
  1031. +#define V4L2_CTRL_TYPE_MPEG2_QUANTIZATION 0x0104
  1032. +
  1033. +#define V4L2_MPEG2_PICTURE_CODING_TYPE_I 1
  1034. +#define V4L2_MPEG2_PICTURE_CODING_TYPE_P 2
  1035. +#define V4L2_MPEG2_PICTURE_CODING_TYPE_B 3
  1036. +#define V4L2_MPEG2_PICTURE_CODING_TYPE_D 4
  1037. +
  1038. +struct v4l2_mpeg2_sequence {
  1039. + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Sequence header */
  1040. + __u16 horizontal_size;
  1041. + __u16 vertical_size;
  1042. + __u32 vbv_buffer_size;
  1043. +
  1044. + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Sequence extension */
  1045. + __u16 profile_and_level_indication;
  1046. + __u8 progressive_sequence;
  1047. + __u8 chroma_format;
  1048. +};
  1049. +
  1050. +struct v4l2_mpeg2_picture {
  1051. + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Picture header */
  1052. + __u8 picture_coding_type;
  1053. +
  1054. + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Picture coding extension */
  1055. + __u8 f_code[2][2];
  1056. + __u8 intra_dc_precision;
  1057. + __u8 picture_structure;
  1058. + __u8 top_field_first;
  1059. + __u8 frame_pred_frame_dct;
  1060. + __u8 concealment_motion_vectors;
  1061. + __u8 q_scale_type;
  1062. + __u8 intra_vlc_format;
  1063. + __u8 alternate_scan;
  1064. + __u8 repeat_first_field;
  1065. + __u16 progressive_frame;
  1066. +};
  1067. +
  1068. +struct v4l2_ctrl_mpeg2_slice_params {
  1069. + __u32 bit_size;
  1070. + __u32 data_bit_offset;
  1071. + __u64 backward_ref_ts;
  1072. + __u64 forward_ref_ts;
  1073. +
  1074. + struct v4l2_mpeg2_sequence sequence;
  1075. + struct v4l2_mpeg2_picture picture;
  1076. +
  1077. + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Slice */
  1078. + __u32 quantiser_scale_code;
  1079. +};
  1080. +
  1081. +struct v4l2_ctrl_mpeg2_quantization {
  1082. + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Quant matrix extension */
  1083. + __u8 load_intra_quantiser_matrix;
  1084. + __u8 load_non_intra_quantiser_matrix;
  1085. + __u8 load_chroma_intra_quantiser_matrix;
  1086. + __u8 load_chroma_non_intra_quantiser_matrix;
  1087. +
  1088. + __u8 intra_quantiser_matrix[64];
  1089. + __u8 non_intra_quantiser_matrix[64];
  1090. + __u8 chroma_intra_quantiser_matrix[64];
  1091. + __u8 chroma_non_intra_quantiser_matrix[64];
  1092. +};
  1093. +
  1094. +#endif
  1095. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/v4l2_request.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request.c
  1096. --- ffmpeg_n4.2.2/libavcodec/v4l2_request.c 1969-12-31 16:00:00.000000000 -0800
  1097. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request.c 2020-05-26 03:16:39.653173493 -0700
  1098. @@ -0,0 +1,1055 @@
  1099. +/*
  1100. + * This file is part of FFmpeg.
  1101. + *
  1102. + * FFmpeg is free software; you can redistribute it and/or
  1103. + * modify it under the terms of the GNU Lesser General Public
  1104. + * License as published by the Free Software Foundation; either
  1105. + * version 2.1 of the License, or (at your option) any later version.
  1106. + *
  1107. + * FFmpeg is distributed in the hope that it will be useful,
  1108. + * but WITHOUT ANY WARRANTY; without even the implied warranty of
  1109. + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  1110. + * Lesser General Public License for more details.
  1111. + *
  1112. + * You should have received a copy of the GNU Lesser General Public
  1113. + * License along with FFmpeg; if not, write to the Free Software
  1114. + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  1115. + */
  1116. +
  1117. +#include <drm_fourcc.h>
  1118. +#include <linux/media.h>
  1119. +#include <sys/mman.h>
  1120. +#include <sys/types.h>
  1121. +#include <sys/stat.h>
  1122. +#include <fcntl.h>
  1123. +
  1124. +#include <sys/sysmacros.h>
  1125. +#include <libudev.h>
  1126. +
  1127. +#include "decode.h"
  1128. +#include "internal.h"
  1129. +#include "libavutil/pixdesc.h"
  1130. +#include "v4l2_request.h"
  1131. +
  1132. +#ifndef DRM_FORMAT_NV15
  1133. +#define DRM_FORMAT_NV15 fourcc_code('N', 'V', '1', '5')
  1134. +#endif
  1135. +
  1136. +#ifndef DRM_FORMAT_NV20
  1137. +#define DRM_FORMAT_NV20 fourcc_code('N', 'V', '2', '0')
  1138. +#endif
  1139. +
  1140. +uint64_t ff_v4l2_request_get_capture_timestamp(AVFrame *frame)
  1141. +{
  1142. + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)frame->data[0];
  1143. + return req ? v4l2_timeval_to_ns(&req->capture.buffer.timestamp) : 0;
  1144. +}
  1145. +
  1146. +int ff_v4l2_request_reset_frame(AVCodecContext *avctx, AVFrame *frame)
  1147. +{
  1148. + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)frame->data[0];
  1149. + memset(&req->drm, 0, sizeof(AVDRMFrameDescriptor));
  1150. + req->output.used = 0;
  1151. + return 0;
  1152. +}
  1153. +
  1154. +int ff_v4l2_request_append_output_buffer(AVCodecContext *avctx, AVFrame *frame, const uint8_t *data, uint32_t size)
  1155. +{
  1156. + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)frame->data[0];
  1157. + if (req->output.used + size + (AV_INPUT_BUFFER_PADDING_SIZE * 4) <= req->output.size) {
  1158. + memcpy(req->output.addr + req->output.used, data, size);
  1159. + req->output.used += size;
  1160. + } else {
  1161. + av_log(avctx, AV_LOG_ERROR, "%s: output.used=%u output.size=%u size=%u\n", __func__, req->output.used, req->output.size, size);
  1162. + }
  1163. + return 0;
  1164. +}
  1165. +
  1166. +static int v4l2_request_controls(V4L2RequestContext *ctx, int request_fd, unsigned long type, struct v4l2_ext_control *control, int count)
  1167. +{
  1168. + struct v4l2_ext_controls controls = {
  1169. + .controls = control,
  1170. + .count = count,
  1171. + .request_fd = request_fd,
  1172. + .which = (request_fd >= 0) ? V4L2_CTRL_WHICH_REQUEST_VAL : 0,
  1173. + };
  1174. +
  1175. + if (!control || !count)
  1176. + return 0;
  1177. +
  1178. + return ioctl(ctx->video_fd, type, &controls);
  1179. +}
  1180. +
  1181. +static int v4l2_request_set_controls(V4L2RequestContext *ctx, int request_fd, struct v4l2_ext_control *control, int count)
  1182. +{
  1183. + return v4l2_request_controls(ctx, request_fd, VIDIOC_S_EXT_CTRLS, control, count);
  1184. +}
  1185. +
  1186. +int ff_v4l2_request_set_controls(AVCodecContext *avctx, struct v4l2_ext_control *control, int count)
  1187. +{
  1188. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1189. + int ret;
  1190. +
  1191. + ret = v4l2_request_controls(ctx, -1, VIDIOC_S_EXT_CTRLS, control, count);
  1192. + if (ret < 0) {
  1193. + av_log(avctx, AV_LOG_ERROR, "%s: set controls failed, %s (%d)\n", __func__, strerror(errno), errno);
  1194. + return AVERROR(EINVAL);
  1195. + }
  1196. +
  1197. + return ret;
  1198. +}
  1199. +
  1200. +int ff_v4l2_request_get_controls(AVCodecContext *avctx, struct v4l2_ext_control *control, int count)
  1201. +{
  1202. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1203. + int ret;
  1204. +
  1205. + ret = v4l2_request_controls(ctx, -1, VIDIOC_G_EXT_CTRLS, control, count);
  1206. + if (ret < 0) {
  1207. + av_log(avctx, AV_LOG_ERROR, "%s: get controls failed, %s (%d)\n", __func__, strerror(errno), errno);
  1208. + return AVERROR(EINVAL);
  1209. + }
  1210. +
  1211. + return ret;
  1212. +}
  1213. +
  1214. +int ff_v4l2_request_query_control(AVCodecContext *avctx, struct v4l2_query_ext_ctrl *control)
  1215. +{
  1216. + int ret;
  1217. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1218. +
  1219. + ret = ioctl(ctx->video_fd, VIDIOC_QUERY_EXT_CTRL, control);
  1220. + if (ret < 0) {
  1221. + av_log(avctx, AV_LOG_ERROR, "%s: query control failed, %s (%d)\n", __func__, strerror(errno), errno);
  1222. + return AVERROR(EINVAL);
  1223. + }
  1224. +
  1225. + return 0;
  1226. +}
  1227. +
  1228. +int ff_v4l2_request_query_control_default_value(AVCodecContext *avctx, uint32_t id)
  1229. +{
  1230. + int ret;
  1231. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1232. + struct v4l2_queryctrl control = {
  1233. + .id = id,
  1234. + };
  1235. +
  1236. + ret = ioctl(ctx->video_fd, VIDIOC_QUERYCTRL, &control);
  1237. + if (ret < 0) {
  1238. + av_log(avctx, AV_LOG_ERROR, "%s: query control failed, %s (%d)\n", __func__, strerror(errno), errno);
  1239. + return AVERROR(EINVAL);
  1240. + }
  1241. +
  1242. + return control.default_value;
  1243. +}
  1244. +
  1245. +static int v4l2_request_queue_buffer(V4L2RequestContext *ctx, int request_fd, V4L2RequestBuffer *buf, uint32_t flags)
  1246. +{
  1247. + struct v4l2_plane planes[1] = {};
  1248. + struct v4l2_buffer buffer = {
  1249. + .type = buf->buffer.type,
  1250. + .memory = buf->buffer.memory,
  1251. + .index = buf->index,
  1252. + .timestamp.tv_usec = ctx->timestamp,
  1253. + .bytesused = buf->used,
  1254. + .request_fd = request_fd,
  1255. + .flags = ((request_fd >= 0) ? V4L2_BUF_FLAG_REQUEST_FD : 0) | flags,
  1256. + };
  1257. +
  1258. + buf->buffer.timestamp = buffer.timestamp;
  1259. +
  1260. + if (V4L2_TYPE_IS_MULTIPLANAR(buf->buffer.type)) {
  1261. + planes[0].bytesused = buf->used;
  1262. + buffer.bytesused = 0;
  1263. + buffer.length = 1;
  1264. + buffer.m.planes = planes;
  1265. + }
  1266. +
  1267. + return ioctl(ctx->video_fd, VIDIOC_QBUF, &buffer);
  1268. +}
  1269. +
  1270. +static int v4l2_request_dequeue_buffer(V4L2RequestContext *ctx, V4L2RequestBuffer *buf)
  1271. +{
  1272. + int ret;
  1273. + struct v4l2_plane planes[1] = {};
  1274. + struct v4l2_buffer buffer = {
  1275. + .type = buf->buffer.type,
  1276. + .memory = buf->buffer.memory,
  1277. + .index = buf->index,
  1278. + };
  1279. +
  1280. + if (V4L2_TYPE_IS_MULTIPLANAR(buf->buffer.type)) {
  1281. + buffer.length = 1;
  1282. + buffer.m.planes = planes;
  1283. + }
  1284. +
  1285. + ret = ioctl(ctx->video_fd, VIDIOC_DQBUF, &buffer);
  1286. + if (ret < 0)
  1287. + return ret;
  1288. +
  1289. + buf->buffer.timestamp = buffer.timestamp;
  1290. + return 0;
  1291. +}
  1292. +
  1293. +const uint32_t v4l2_request_capture_pixelformats_420[] = {
  1294. + V4L2_PIX_FMT_NV12,
  1295. +#ifdef DRM_FORMAT_MOD_ALLWINNER_TILED
  1296. + V4L2_PIX_FMT_SUNXI_TILED_NV12,
  1297. +#endif
  1298. +};
  1299. +
  1300. +const uint32_t v4l2_request_capture_pixelformats_420_10[] = {
  1301. +#ifdef V4L2_PIX_FMT_NV15
  1302. + V4L2_PIX_FMT_NV15,
  1303. +#endif
  1304. +};
  1305. +
  1306. +const uint32_t v4l2_request_capture_pixelformats_422[] = {
  1307. + V4L2_PIX_FMT_NV16,
  1308. +};
  1309. +
  1310. +const uint32_t v4l2_request_capture_pixelformats_422_10[] = {
  1311. +#ifdef V4L2_PIX_FMT_NV20
  1312. + V4L2_PIX_FMT_NV20,
  1313. +#endif
  1314. +};
  1315. +
  1316. +static int v4l2_request_set_drm_descriptor(V4L2RequestDescriptor *req, struct v4l2_format *format)
  1317. +{
  1318. + AVDRMFrameDescriptor *desc = &req->drm;
  1319. + AVDRMLayerDescriptor *layer = &desc->layers[0];
  1320. + uint32_t pixelformat = V4L2_TYPE_IS_MULTIPLANAR(format->type) ? format->fmt.pix_mp.pixelformat : format->fmt.pix.pixelformat;
  1321. +
  1322. + switch (pixelformat) {
  1323. + case V4L2_PIX_FMT_NV12:
  1324. + layer->format = DRM_FORMAT_NV12;
  1325. + desc->objects[0].format_modifier = DRM_FORMAT_MOD_LINEAR;
  1326. + break;
  1327. +#ifdef DRM_FORMAT_MOD_ALLWINNER_TILED
  1328. + case V4L2_PIX_FMT_SUNXI_TILED_NV12:
  1329. + layer->format = DRM_FORMAT_NV12;
  1330. + desc->objects[0].format_modifier = DRM_FORMAT_MOD_ALLWINNER_TILED;
  1331. + break;
  1332. +#endif
  1333. +#ifdef V4L2_PIX_FMT_NV15
  1334. + case V4L2_PIX_FMT_NV15:
  1335. + layer->format = DRM_FORMAT_NV15;
  1336. + desc->objects[0].format_modifier = DRM_FORMAT_MOD_LINEAR;
  1337. + break;
  1338. +#endif
  1339. + case V4L2_PIX_FMT_NV16:
  1340. + layer->format = DRM_FORMAT_NV16;
  1341. + desc->objects[0].format_modifier = DRM_FORMAT_MOD_LINEAR;
  1342. + break;
  1343. +#ifdef V4L2_PIX_FMT_NV20
  1344. + case V4L2_PIX_FMT_NV20:
  1345. + layer->format = DRM_FORMAT_NV20;
  1346. + desc->objects[0].format_modifier = DRM_FORMAT_MOD_LINEAR;
  1347. + break;
  1348. +#endif
  1349. + default:
  1350. + return -1;
  1351. + }
  1352. +
  1353. + desc->nb_objects = 1;
  1354. + desc->objects[0].fd = req->capture.fd;
  1355. + desc->objects[0].size = req->capture.size;
  1356. +
  1357. + desc->nb_layers = 1;
  1358. + layer->nb_planes = 2;
  1359. +
  1360. + layer->planes[0].object_index = 0;
  1361. + layer->planes[0].offset = 0;
  1362. + layer->planes[0].pitch = V4L2_TYPE_IS_MULTIPLANAR(format->type) ? format->fmt.pix_mp.plane_fmt[0].bytesperline : format->fmt.pix.bytesperline;
  1363. +
  1364. + layer->planes[1].object_index = 0;
  1365. + layer->planes[1].offset = layer->planes[0].pitch * (V4L2_TYPE_IS_MULTIPLANAR(format->type) ? format->fmt.pix_mp.height : format->fmt.pix.height);
  1366. + layer->planes[1].pitch = layer->planes[0].pitch;
  1367. +
  1368. + return 0;
  1369. +}
  1370. +
  1371. +static int v4l2_request_queue_decode(AVCodecContext *avctx, AVFrame *frame, struct v4l2_ext_control *control, int count, int first_slice, int last_slice)
  1372. +{
  1373. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1374. + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)frame->data[0];
  1375. + struct timeval tv = { 2, 0 };
  1376. + fd_set except_fds;
  1377. + int ret;
  1378. +
  1379. + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p used=%u controls=%d index=%d fd=%d request_fd=%d first_slice=%d last_slice=%d\n", __func__, avctx, req->output.used, count, req->capture.index, req->capture.fd, req->request_fd, first_slice, last_slice);
  1380. +
  1381. + if (first_slice)
  1382. + ctx->timestamp++;
  1383. +
  1384. + ret = v4l2_request_set_controls(ctx, req->request_fd, control, count);
  1385. + if (ret < 0) {
  1386. + av_log(avctx, AV_LOG_ERROR, "%s: set controls failed for request %d, %s (%d)\n", __func__, req->request_fd, strerror(errno), errno);
  1387. + return -1;
  1388. + }
  1389. +
  1390. + memset(req->output.addr + req->output.used, 0, AV_INPUT_BUFFER_PADDING_SIZE * 4);
  1391. +
  1392. + ret = v4l2_request_queue_buffer(ctx, req->request_fd, &req->output, last_slice ? 0 : V4L2_BUF_FLAG_M2M_HOLD_CAPTURE_BUF);
  1393. + if (ret < 0) {
  1394. + av_log(avctx, AV_LOG_ERROR, "%s: queue output buffer %d failed for request %d, %s (%d)\n", __func__, req->output.index, req->request_fd, strerror(errno), errno);
  1395. + return -1;
  1396. + }
  1397. +
  1398. + if (first_slice) {
  1399. + ret = v4l2_request_queue_buffer(ctx, -1, &req->capture, 0);
  1400. + if (ret < 0) {
  1401. + av_log(avctx, AV_LOG_ERROR, "%s: queue capture buffer %d failed for request %d, %s (%d)\n", __func__, req->capture.index, req->request_fd, strerror(errno), errno);
  1402. + return -1;
  1403. + }
  1404. + }
  1405. +
  1406. + // NOTE: do we need to dequeue when request fails/timeout?
  1407. +
  1408. + // 4. queue request and wait
  1409. + ret = ioctl(req->request_fd, MEDIA_REQUEST_IOC_QUEUE, NULL);
  1410. + if (ret < 0) {
  1411. + av_log(avctx, AV_LOG_ERROR, "%s: queue request %d failed, %s (%d)\n", __func__, req->request_fd, strerror(errno), errno);
  1412. + goto fail;
  1413. + }
  1414. +
  1415. + FD_ZERO(&except_fds);
  1416. + FD_SET(req->request_fd, &except_fds);
  1417. +
  1418. + ret = select(req->request_fd + 1, NULL, NULL, &except_fds, &tv);
  1419. + if (ret == 0) {
  1420. + av_log(avctx, AV_LOG_ERROR, "%s: request %d timeout\n", __func__, req->request_fd);
  1421. + goto fail;
  1422. + } else if (ret < 0) {
  1423. + av_log(avctx, AV_LOG_ERROR, "%s: select request %d failed, %s (%d)\n", __func__, req->request_fd, strerror(errno), errno);
  1424. + goto fail;
  1425. + }
  1426. +
  1427. + ret = v4l2_request_dequeue_buffer(ctx, &req->output);
  1428. + if (ret < 0) {
  1429. + av_log(avctx, AV_LOG_ERROR, "%s: dequeue output buffer %d failed for request %d, %s (%d)\n", __func__, req->output.index, req->request_fd, strerror(errno), errno);
  1430. + return -1;
  1431. + }
  1432. +
  1433. + if (last_slice) {
  1434. + ret = v4l2_request_dequeue_buffer(ctx, &req->capture);
  1435. + if (ret < 0) {
  1436. + av_log(avctx, AV_LOG_ERROR, "%s: dequeue capture buffer %d failed for request %d, %s (%d)\n", __func__, req->capture.index, req->request_fd, strerror(errno), errno);
  1437. + return -1;
  1438. + }
  1439. + }
  1440. +
  1441. + // TODO: check errors
  1442. + // buffer.flags & V4L2_BUF_FLAG_ERROR
  1443. +
  1444. + ret = ioctl(req->request_fd, MEDIA_REQUEST_IOC_REINIT, NULL);
  1445. + if (ret < 0) {
  1446. + av_log(avctx, AV_LOG_ERROR, "%s: reinit request %d failed, %s (%d)\n", __func__, req->request_fd, strerror(errno), errno);
  1447. + return -1;
  1448. + }
  1449. +
  1450. + if (last_slice)
  1451. + return v4l2_request_set_drm_descriptor(req, &ctx->format);
  1452. +
  1453. + return 0;
  1454. +
  1455. +fail:
  1456. + ret = v4l2_request_dequeue_buffer(ctx, &req->output);
  1457. + if (ret < 0)
  1458. + av_log(avctx, AV_LOG_ERROR, "%s: dequeue output buffer %d failed for request %d, %s (%d)\n", __func__, req->output.index, req->request_fd, strerror(errno), errno);
  1459. +
  1460. + ret = v4l2_request_dequeue_buffer(ctx, &req->capture);
  1461. + if (ret < 0)
  1462. + av_log(avctx, AV_LOG_ERROR, "%s: dequeue capture buffer %d failed for request %d, %s (%d)\n", __func__, req->capture.index, req->request_fd, strerror(errno), errno);
  1463. +
  1464. + ret = ioctl(req->request_fd, MEDIA_REQUEST_IOC_REINIT, NULL);
  1465. + if (ret < 0)
  1466. + av_log(avctx, AV_LOG_ERROR, "%s: reinit request %d failed, %s (%d)\n", __func__, req->request_fd, strerror(errno), errno);
  1467. +
  1468. + return -1;
  1469. +}
  1470. +
  1471. +int ff_v4l2_request_decode_slice(AVCodecContext *avctx, AVFrame *frame, struct v4l2_ext_control *control, int count, int first_slice, int last_slice)
  1472. +{
  1473. + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)frame->data[0];
  1474. +
  1475. + // fall back to queue each slice as a full frame
  1476. + if ((req->output.capabilities & V4L2_BUF_CAP_SUPPORTS_M2M_HOLD_CAPTURE_BUF) != V4L2_BUF_CAP_SUPPORTS_M2M_HOLD_CAPTURE_BUF)
  1477. + return v4l2_request_queue_decode(avctx, frame, control, count, 1, 1);
  1478. +
  1479. + return v4l2_request_queue_decode(avctx, frame, control, count, first_slice, last_slice);
  1480. +}
  1481. +
  1482. +int ff_v4l2_request_decode_frame(AVCodecContext *avctx, AVFrame *frame, struct v4l2_ext_control *control, int count)
  1483. +{
  1484. + return v4l2_request_queue_decode(avctx, frame, control, count, 1, 1);
  1485. +}
  1486. +
  1487. +int ff_v4l2_request_output_frame(AVCodecContext *avctx, AVFrame *frame)
  1488. +{
  1489. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1490. + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)frame->data[0];
  1491. +
  1492. + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p used=%u index=%d fd=%d request_fd=%d\n", __func__, avctx, req->output.used, req->capture.index, req->capture.fd, req->request_fd);
  1493. + return 0;
  1494. +}
  1495. +
  1496. +static int v4l2_request_try_format(AVCodecContext *avctx, enum v4l2_buf_type type, uint32_t pixelformat)
  1497. +{
  1498. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1499. + struct v4l2_fmtdesc fmtdesc = {
  1500. + .index = 0,
  1501. + .type = type,
  1502. + };
  1503. +
  1504. + if (V4L2_TYPE_IS_OUTPUT(type)) {
  1505. + struct v4l2_create_buffers buffers = {
  1506. + .count = 0,
  1507. + .memory = V4L2_MEMORY_MMAP,
  1508. + .format.type = type,
  1509. + };
  1510. +
  1511. + if (ioctl(ctx->video_fd, VIDIOC_CREATE_BUFS, &buffers) < 0) {
  1512. + av_log(avctx, AV_LOG_ERROR, "%s: create buffers failed for type %u, %s (%d)\n", __func__, type, strerror(errno), errno);
  1513. + return -1;
  1514. + }
  1515. +
  1516. + if ((buffers.capabilities & V4L2_BUF_CAP_SUPPORTS_REQUESTS) != V4L2_BUF_CAP_SUPPORTS_REQUESTS) {
  1517. + av_log(avctx, AV_LOG_INFO, "%s: output buffer type do not support requests, capabilities %u\n", __func__, buffers.capabilities);
  1518. + return -1;
  1519. + }
  1520. + }
  1521. +
  1522. + while (ioctl(ctx->video_fd, VIDIOC_ENUM_FMT, &fmtdesc) >= 0) {
  1523. + if (fmtdesc.pixelformat == pixelformat)
  1524. + return 0;
  1525. +
  1526. + fmtdesc.index++;
  1527. + }
  1528. +
  1529. + av_log(avctx, AV_LOG_INFO, "%s: pixelformat %u not supported for type %u\n", __func__, pixelformat, type);
  1530. + return -1;
  1531. +}
  1532. +
  1533. +static int v4l2_request_set_format(AVCodecContext *avctx, enum v4l2_buf_type type, uint32_t pixelformat, uint32_t buffersize)
  1534. +{
  1535. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1536. + struct v4l2_format format = {
  1537. + .type = type,
  1538. + };
  1539. +
  1540. + if (V4L2_TYPE_IS_MULTIPLANAR(type)) {
  1541. + format.fmt.pix_mp.width = avctx->coded_width;
  1542. + format.fmt.pix_mp.height = avctx->coded_height;
  1543. + format.fmt.pix_mp.pixelformat = pixelformat;
  1544. + format.fmt.pix_mp.plane_fmt[0].sizeimage = buffersize;
  1545. + format.fmt.pix_mp.num_planes = 1;
  1546. + } else {
  1547. + format.fmt.pix.width = avctx->coded_width;
  1548. + format.fmt.pix.height = avctx->coded_height;
  1549. + format.fmt.pix.pixelformat = pixelformat;
  1550. + format.fmt.pix.sizeimage = buffersize;
  1551. + }
  1552. +
  1553. + return ioctl(ctx->video_fd, VIDIOC_S_FMT, &format);
  1554. +}
  1555. +
  1556. +static int v4l2_request_select_capture_format(AVCodecContext *avctx)
  1557. +{
  1558. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1559. + enum v4l2_buf_type type = ctx->format.type;
  1560. +
  1561. +#if 0
  1562. + struct v4l2_format format = {
  1563. + .type = type,
  1564. + };
  1565. + struct v4l2_fmtdesc fmtdesc = {
  1566. + .index = 0,
  1567. + .type = type,
  1568. + };
  1569. + uint32_t pixelformat;
  1570. + int i;
  1571. +
  1572. + if (ioctl(ctx->video_fd, VIDIOC_G_FMT, &format) < 0) {
  1573. + av_log(avctx, AV_LOG_ERROR, "%s: get capture format failed, %s (%d)\n", __func__, strerror(errno), errno);
  1574. + return -1;
  1575. + }
  1576. +
  1577. + pixelformat = V4L2_TYPE_IS_MULTIPLANAR(type) ? format.fmt.pix_mp.pixelformat : format.fmt.pix.pixelformat;
  1578. +
  1579. + for (i = 0; i < FF_ARRAY_ELEMS(v4l2_request_capture_pixelformats); i++) {
  1580. + if (pixelformat == v4l2_request_capture_pixelformats[i])
  1581. + return v4l2_request_set_format(avctx, type, pixelformat, 0);
  1582. + }
  1583. +
  1584. + while (ioctl(ctx->video_fd, VIDIOC_ENUM_FMT, &fmtdesc) >= 0) {
  1585. + for (i = 0; i < FF_ARRAY_ELEMS(v4l2_request_capture_pixelformats); i++) {
  1586. + if (fmtdesc.pixelformat == v4l2_request_capture_pixelformats[i])
  1587. + return v4l2_request_set_format(avctx, type, fmtdesc.pixelformat, 0);
  1588. + }
  1589. +
  1590. + fmtdesc.index++;
  1591. + }
  1592. +#else
  1593. + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p pix_fmt=%s sw_pix_fmt=%s\n", __func__, avctx, av_get_pix_fmt_name(avctx->pix_fmt), av_get_pix_fmt_name(avctx->sw_pix_fmt));
  1594. + if (avctx->sw_pix_fmt == AV_PIX_FMT_YUV420P || avctx->sw_pix_fmt == AV_PIX_FMT_YUVJ420P) {
  1595. + for (int i = 0; i < FF_ARRAY_ELEMS(v4l2_request_capture_pixelformats_420); i++) {
  1596. + uint32_t pixelformat = v4l2_request_capture_pixelformats_420[i];
  1597. + if (!v4l2_request_try_format(avctx, type, pixelformat))
  1598. + return v4l2_request_set_format(avctx, type, pixelformat, 0);
  1599. + }
  1600. + } else if (avctx->sw_pix_fmt == AV_PIX_FMT_YUV420P10) {
  1601. + for (int i = 0; i < FF_ARRAY_ELEMS(v4l2_request_capture_pixelformats_420_10); i++) {
  1602. + uint32_t pixelformat = v4l2_request_capture_pixelformats_420_10[i];
  1603. + if (!v4l2_request_try_format(avctx, type, pixelformat))
  1604. + return v4l2_request_set_format(avctx, type, pixelformat, 0);
  1605. + }
  1606. + } else if (avctx->sw_pix_fmt == AV_PIX_FMT_YUV422P || avctx->sw_pix_fmt == AV_PIX_FMT_YUVJ422P) {
  1607. + for (int i = 0; i < FF_ARRAY_ELEMS(v4l2_request_capture_pixelformats_422); i++) {
  1608. + uint32_t pixelformat = v4l2_request_capture_pixelformats_422[i];
  1609. + if (!v4l2_request_try_format(avctx, type, pixelformat))
  1610. + return v4l2_request_set_format(avctx, type, pixelformat, 0);
  1611. + }
  1612. + } else if (avctx->sw_pix_fmt == AV_PIX_FMT_YUV422P10) {
  1613. + for (int i = 0; i < FF_ARRAY_ELEMS(v4l2_request_capture_pixelformats_422_10); i++) {
  1614. + uint32_t pixelformat = v4l2_request_capture_pixelformats_422_10[i];
  1615. + if (!v4l2_request_try_format(avctx, type, pixelformat))
  1616. + return v4l2_request_set_format(avctx, type, pixelformat, 0);
  1617. + }
  1618. + }
  1619. +#endif
  1620. +
  1621. + return -1;
  1622. +}
  1623. +
  1624. +static int v4l2_request_probe_video_device(struct udev_device *device, AVCodecContext *avctx, uint32_t pixelformat, uint32_t buffersize, struct v4l2_ext_control *control, int count)
  1625. +{
  1626. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1627. + int ret = AVERROR(EINVAL);
  1628. + struct v4l2_capability capability = {0};
  1629. + unsigned int capabilities = 0;
  1630. +
  1631. + const char *path = udev_device_get_devnode(device);
  1632. + if (!path) {
  1633. + av_log(avctx, AV_LOG_ERROR, "%s: get video device devnode failed\n", __func__);
  1634. + ret = AVERROR(EINVAL);
  1635. + goto fail;
  1636. + }
  1637. +
  1638. + ctx->video_fd = open(path, O_RDWR | O_NONBLOCK, 0);
  1639. + if (ctx->video_fd < 0) {
  1640. + av_log(avctx, AV_LOG_ERROR, "%s: opening %s failed, %s (%d)\n", __func__, path, strerror(errno), errno);
  1641. + ret = AVERROR(EINVAL);
  1642. + goto fail;
  1643. + }
  1644. +
  1645. + ret = ioctl(ctx->video_fd, VIDIOC_QUERYCAP, &capability);
  1646. + if (ret < 0) {
  1647. + av_log(avctx, AV_LOG_ERROR, "%s: get video capability failed, %s (%d)\n", __func__, strerror(errno), errno);
  1648. + ret = AVERROR(EINVAL);
  1649. + goto fail;
  1650. + }
  1651. +
  1652. + if (capability.capabilities & V4L2_CAP_DEVICE_CAPS)
  1653. + capabilities = capability.device_caps;
  1654. + else
  1655. + capabilities = capability.capabilities;
  1656. +
  1657. + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p ctx=%p path=%s capabilities=%u\n", __func__, avctx, ctx, path, capabilities);
  1658. +
  1659. + if ((capabilities & V4L2_CAP_STREAMING) != V4L2_CAP_STREAMING) {
  1660. + av_log(avctx, AV_LOG_ERROR, "%s: missing required streaming capability\n", __func__);
  1661. + ret = AVERROR(EINVAL);
  1662. + goto fail;
  1663. + }
  1664. +
  1665. + if ((capabilities & V4L2_CAP_VIDEO_M2M_MPLANE) == V4L2_CAP_VIDEO_M2M_MPLANE) {
  1666. + ctx->output_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
  1667. + ctx->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  1668. + } else if ((capabilities & V4L2_CAP_VIDEO_M2M) == V4L2_CAP_VIDEO_M2M) {
  1669. + ctx->output_type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
  1670. + ctx->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  1671. + } else {
  1672. + av_log(avctx, AV_LOG_ERROR, "%s: missing required mem2mem capability\n", __func__);
  1673. + ret = AVERROR(EINVAL);
  1674. + goto fail;
  1675. + }
  1676. +
  1677. + ret = v4l2_request_try_format(avctx, ctx->output_type, pixelformat);
  1678. + if (ret < 0) {
  1679. + av_log(avctx, AV_LOG_WARNING, "%s: try output format failed\n", __func__);
  1680. + ret = AVERROR(EINVAL);
  1681. + goto fail;
  1682. + }
  1683. +
  1684. + ret = v4l2_request_set_controls(ctx, -1, control, count);
  1685. + if (ret < 0) {
  1686. + av_log(avctx, AV_LOG_ERROR, "%s: set controls failed, %s (%d)\n", __func__, strerror(errno), errno);
  1687. + ret = AVERROR(EINVAL);
  1688. + goto fail;
  1689. + }
  1690. +
  1691. + ret = v4l2_request_set_format(avctx, ctx->output_type, pixelformat, buffersize);
  1692. + if (ret < 0) {
  1693. + av_log(avctx, AV_LOG_ERROR, "%s: set output format failed, %s (%d)\n", __func__, strerror(errno), errno);
  1694. + ret = AVERROR(EINVAL);
  1695. + goto fail;
  1696. + }
  1697. +
  1698. + ret = v4l2_request_select_capture_format(avctx);
  1699. + if (ret < 0) {
  1700. + av_log(avctx, AV_LOG_WARNING, "%s: select capture format failed\n", __func__);
  1701. + ret = AVERROR(EINVAL);
  1702. + goto fail;
  1703. + }
  1704. +
  1705. + return 0;
  1706. +
  1707. +fail:
  1708. + if (ctx->video_fd >= 0) {
  1709. + close(ctx->video_fd);
  1710. + ctx->video_fd = -1;
  1711. + }
  1712. + return ret;
  1713. +}
  1714. +
  1715. +static int v4l2_request_init_context(AVCodecContext *avctx)
  1716. +{
  1717. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1718. + int ret;
  1719. +
  1720. + ret = ioctl(ctx->video_fd, VIDIOC_G_FMT, &ctx->format);
  1721. + if (ret < 0) {
  1722. + av_log(avctx, AV_LOG_ERROR, "%s: get capture format failed, %s (%d)\n", __func__, strerror(errno), errno);
  1723. + ret = AVERROR(EINVAL);
  1724. + goto fail;
  1725. + }
  1726. +
  1727. + if (V4L2_TYPE_IS_MULTIPLANAR(ctx->format.type)) {
  1728. + av_log(avctx, AV_LOG_DEBUG, "%s: pixelformat=%d width=%u height=%u bytesperline=%u sizeimage=%u num_planes=%u\n", __func__, ctx->format.fmt.pix_mp.pixelformat, ctx->format.fmt.pix_mp.width, ctx->format.fmt.pix_mp.height, ctx->format.fmt.pix_mp.plane_fmt[0].bytesperline, ctx->format.fmt.pix_mp.plane_fmt[0].sizeimage, ctx->format.fmt.pix_mp.num_planes);
  1729. + } else {
  1730. + av_log(avctx, AV_LOG_DEBUG, "%s: pixelformat=%d width=%u height=%u bytesperline=%u sizeimage=%u\n", __func__, ctx->format.fmt.pix.pixelformat, ctx->format.fmt.pix.width, ctx->format.fmt.pix.height, ctx->format.fmt.pix.bytesperline, ctx->format.fmt.pix.sizeimage);
  1731. + }
  1732. +
  1733. + ret = ff_decode_get_hw_frames_ctx(avctx, AV_HWDEVICE_TYPE_DRM);
  1734. + if (ret < 0)
  1735. + goto fail;
  1736. +
  1737. + ret = ioctl(ctx->video_fd, VIDIOC_STREAMON, &ctx->output_type);
  1738. + if (ret < 0) {
  1739. + av_log(avctx, AV_LOG_ERROR, "%s: output stream on failed, %s (%d)\n", __func__, strerror(errno), errno);
  1740. + ret = AVERROR(EINVAL);
  1741. + goto fail;
  1742. + }
  1743. +
  1744. + ret = ioctl(ctx->video_fd, VIDIOC_STREAMON, &ctx->format.type);
  1745. + if (ret < 0) {
  1746. + av_log(avctx, AV_LOG_ERROR, "%s: capture stream on failed, %s (%d)\n", __func__, strerror(errno), errno);
  1747. + ret = AVERROR(EINVAL);
  1748. + goto fail;
  1749. + }
  1750. +
  1751. + return 0;
  1752. +
  1753. +fail:
  1754. + ff_v4l2_request_uninit(avctx);
  1755. + return ret;
  1756. +}
  1757. +
  1758. +static int v4l2_request_probe_media_device(struct udev_device *device, AVCodecContext *avctx, uint32_t pixelformat, uint32_t buffersize, struct v4l2_ext_control *control, int count)
  1759. +{
  1760. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1761. + int ret;
  1762. + struct media_device_info device_info = {0};
  1763. + struct media_v2_topology topology = {0};
  1764. + struct media_v2_interface *interfaces = NULL;
  1765. + struct udev *udev = udev_device_get_udev(device);
  1766. + struct udev_device *video_device;
  1767. + dev_t devnum;
  1768. +
  1769. + const char *path = udev_device_get_devnode(device);
  1770. + if (!path) {
  1771. + av_log(avctx, AV_LOG_ERROR, "%s: get media device devnode failed\n", __func__);
  1772. + ret = AVERROR(EINVAL);
  1773. + goto fail;
  1774. + }
  1775. +
  1776. + ctx->media_fd = open(path, O_RDWR, 0);
  1777. + if (ctx->media_fd < 0) {
  1778. + av_log(avctx, AV_LOG_ERROR, "%s: opening %s failed, %s (%d)\n", __func__, path, strerror(errno), errno);
  1779. + ret = AVERROR(EINVAL);
  1780. + goto fail;
  1781. + }
  1782. +
  1783. + ret = ioctl(ctx->media_fd, MEDIA_IOC_DEVICE_INFO, &device_info);
  1784. + if (ret < 0) {
  1785. + av_log(avctx, AV_LOG_ERROR, "%s: get media device info failed, %s (%d)\n", __func__, strerror(errno), errno);
  1786. + ret = AVERROR(EINVAL);
  1787. + goto fail;
  1788. + }
  1789. +
  1790. + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p ctx=%p path=%s driver=%s\n", __func__, avctx, ctx, path, device_info.driver);
  1791. +
  1792. + ret = ioctl(ctx->media_fd, MEDIA_IOC_G_TOPOLOGY, &topology);
  1793. + if (ret < 0) {
  1794. + av_log(avctx, AV_LOG_ERROR, "%s: get media topology failed, %s (%d)\n", __func__, strerror(errno), errno);
  1795. + ret = AVERROR(EINVAL);
  1796. + goto fail;
  1797. + }
  1798. +
  1799. + if (topology.num_interfaces <= 0) {
  1800. + av_log(avctx, AV_LOG_ERROR, "%s: media device has no interfaces\n", __func__);
  1801. + ret = AVERROR(EINVAL);
  1802. + goto fail;
  1803. + }
  1804. +
  1805. + interfaces = av_mallocz(topology.num_interfaces * sizeof(struct media_v2_interface));
  1806. + if (!interfaces) {
  1807. + av_log(avctx, AV_LOG_ERROR, "%s: allocating media interface struct failed\n", __func__);
  1808. + ret = AVERROR(ENOMEM);
  1809. + goto fail;
  1810. + }
  1811. +
  1812. + topology.ptr_interfaces = (__u64)(uintptr_t)interfaces;
  1813. + ret = ioctl(ctx->media_fd, MEDIA_IOC_G_TOPOLOGY, &topology);
  1814. + if (ret < 0) {
  1815. + av_log(avctx, AV_LOG_ERROR, "%s: get media topology failed, %s (%d)\n", __func__, strerror(errno), errno);
  1816. + ret = AVERROR(EINVAL);
  1817. + goto fail;
  1818. + }
  1819. +
  1820. + ret = AVERROR(EINVAL);
  1821. + for (int i = 0; i < topology.num_interfaces; i++) {
  1822. + if (interfaces[i].intf_type != MEDIA_INTF_T_V4L_VIDEO)
  1823. + continue;
  1824. +
  1825. + devnum = makedev(interfaces[i].devnode.major, interfaces[i].devnode.minor);
  1826. + video_device = udev_device_new_from_devnum(udev, 'c', devnum);
  1827. + if (!video_device) {
  1828. + av_log(avctx, AV_LOG_ERROR, "%s: video_device=%p\n", __func__, video_device);
  1829. + continue;
  1830. + }
  1831. +
  1832. + ret = v4l2_request_probe_video_device(video_device, avctx, pixelformat, buffersize, control, count);
  1833. + udev_device_unref(video_device);
  1834. +
  1835. + if (!ret)
  1836. + break;
  1837. + }
  1838. +
  1839. + av_freep(&interfaces);
  1840. + return ret;
  1841. +
  1842. +fail:
  1843. + av_freep(&interfaces);
  1844. + if (ctx->media_fd >= 0) {
  1845. + close(ctx->media_fd);
  1846. + ctx->media_fd = -1;
  1847. + }
  1848. + return ret;
  1849. +}
  1850. +
  1851. +int ff_v4l2_request_init(AVCodecContext *avctx, uint32_t pixelformat, uint32_t buffersize, struct v4l2_ext_control *control, int count)
  1852. +{
  1853. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1854. + int ret = AVERROR(EINVAL);
  1855. + struct udev *udev;
  1856. + struct udev_enumerate *enumerate;
  1857. + struct udev_list_entry *devices;
  1858. + struct udev_list_entry *entry;
  1859. + struct udev_device *device;
  1860. +
  1861. + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p hw_device_ctx=%p hw_frames_ctx=%p\n", __func__, avctx, avctx->hw_device_ctx, avctx->hw_frames_ctx);
  1862. +
  1863. + ctx->media_fd = -1;
  1864. + ctx->video_fd = -1;
  1865. + ctx->timestamp = 0;
  1866. +
  1867. + udev = udev_new();
  1868. + if (!udev) {
  1869. + av_log(avctx, AV_LOG_ERROR, "%s: allocating udev context failed\n", __func__);
  1870. + ret = AVERROR(ENOMEM);
  1871. + goto fail;
  1872. + }
  1873. +
  1874. + enumerate = udev_enumerate_new(udev);
  1875. + if (!enumerate) {
  1876. + av_log(avctx, AV_LOG_ERROR, "%s: allocating udev enumerator failed\n", __func__);
  1877. + ret = AVERROR(ENOMEM);
  1878. + goto fail;
  1879. + }
  1880. +
  1881. + udev_enumerate_add_match_subsystem(enumerate, "media");
  1882. + udev_enumerate_scan_devices(enumerate);
  1883. +
  1884. + devices = udev_enumerate_get_list_entry(enumerate);
  1885. + udev_list_entry_foreach(entry, devices) {
  1886. + const char *path = udev_list_entry_get_name(entry);
  1887. + if (!path)
  1888. + continue;
  1889. +
  1890. + device = udev_device_new_from_syspath(udev, path);
  1891. + if (!device)
  1892. + continue;
  1893. +
  1894. + ret = v4l2_request_probe_media_device(device, avctx, pixelformat, buffersize, control, count);
  1895. + udev_device_unref(device);
  1896. +
  1897. + if (!ret)
  1898. + break;
  1899. + }
  1900. +
  1901. + udev_enumerate_unref(enumerate);
  1902. +
  1903. + if (!ret)
  1904. + ret = v4l2_request_init_context(avctx);
  1905. +
  1906. +fail:
  1907. + udev_unref(udev);
  1908. + return ret;
  1909. +}
  1910. +
  1911. +int ff_v4l2_request_uninit(AVCodecContext *avctx)
  1912. +{
  1913. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1914. + int ret;
  1915. +
  1916. + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p ctx=%p\n", __func__, avctx, ctx);
  1917. +
  1918. + if (ctx->video_fd >= 0) {
  1919. + ret = ioctl(ctx->video_fd, VIDIOC_STREAMOFF, &ctx->output_type);
  1920. + if (ret < 0)
  1921. + av_log(avctx, AV_LOG_ERROR, "%s: output stream off failed, %s (%d)\n", __func__, strerror(errno), errno);
  1922. +
  1923. + ret = ioctl(ctx->video_fd, VIDIOC_STREAMOFF, &ctx->format.type);
  1924. + if (ret < 0)
  1925. + av_log(avctx, AV_LOG_ERROR, "%s: capture stream off failed, %s (%d)\n", __func__, strerror(errno), errno);
  1926. + }
  1927. +
  1928. + if (avctx->hw_frames_ctx) {
  1929. + AVHWFramesContext *hwfc = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
  1930. + av_buffer_pool_flush(hwfc->pool);
  1931. + }
  1932. +
  1933. + if (ctx->video_fd >= 0)
  1934. + close(ctx->video_fd);
  1935. +
  1936. + if (ctx->media_fd >= 0)
  1937. + close(ctx->media_fd);
  1938. +
  1939. + return 0;
  1940. +}
  1941. +
  1942. +static int v4l2_request_buffer_alloc(AVCodecContext *avctx, V4L2RequestBuffer *buf, enum v4l2_buf_type type)
  1943. +{
  1944. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  1945. + int ret;
  1946. + struct v4l2_plane planes[1] = {};
  1947. + struct v4l2_create_buffers buffers = {
  1948. + .count = 1,
  1949. + .memory = V4L2_MEMORY_MMAP,
  1950. + .format.type = type,
  1951. + };
  1952. +
  1953. + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p buf=%p type=%u\n", __func__, avctx, buf, type);
  1954. +
  1955. + ret = ioctl(ctx->video_fd, VIDIOC_G_FMT, &buffers.format);
  1956. + if (ret < 0) {
  1957. + av_log(avctx, AV_LOG_ERROR, "%s: get format failed for type %u, %s (%d)\n", __func__, type, strerror(errno), errno);
  1958. + return ret;
  1959. + }
  1960. +
  1961. + if (V4L2_TYPE_IS_MULTIPLANAR(buffers.format.type)) {
  1962. + av_log(avctx, AV_LOG_DEBUG, "%s: pixelformat=%d width=%u height=%u bytesperline=%u sizeimage=%u num_planes=%u\n", __func__, buffers.format.fmt.pix_mp.pixelformat, buffers.format.fmt.pix_mp.width, buffers.format.fmt.pix_mp.height, buffers.format.fmt.pix_mp.plane_fmt[0].bytesperline, buffers.format.fmt.pix_mp.plane_fmt[0].sizeimage, buffers.format.fmt.pix_mp.num_planes);
  1963. + } else {
  1964. + av_log(avctx, AV_LOG_DEBUG, "%s: pixelformat=%d width=%u height=%u bytesperline=%u sizeimage=%u\n", __func__, buffers.format.fmt.pix.pixelformat, buffers.format.fmt.pix.width, buffers.format.fmt.pix.height, buffers.format.fmt.pix.bytesperline, buffers.format.fmt.pix.sizeimage);
  1965. + }
  1966. +
  1967. + ret = ioctl(ctx->video_fd, VIDIOC_CREATE_BUFS, &buffers);
  1968. + if (ret < 0) {
  1969. + av_log(avctx, AV_LOG_ERROR, "%s: create buffers failed for type %u, %s (%d)\n", __func__, type, strerror(errno), errno);
  1970. + return ret;
  1971. + }
  1972. +
  1973. + if (V4L2_TYPE_IS_MULTIPLANAR(type)) {
  1974. + buf->width = buffers.format.fmt.pix_mp.width;
  1975. + buf->height = buffers.format.fmt.pix_mp.height;
  1976. + buf->size = buffers.format.fmt.pix_mp.plane_fmt[0].sizeimage;
  1977. + buf->buffer.length = 1;
  1978. + buf->buffer.m.planes = planes;
  1979. + } else {
  1980. + buf->width = buffers.format.fmt.pix.width;
  1981. + buf->height = buffers.format.fmt.pix.height;
  1982. + buf->size = buffers.format.fmt.pix.sizeimage;
  1983. + }
  1984. +
  1985. + buf->index = buffers.index;
  1986. + buf->capabilities = buffers.capabilities;
  1987. + buf->used = 0;
  1988. +
  1989. + buf->buffer.type = type;
  1990. + buf->buffer.memory = V4L2_MEMORY_MMAP;
  1991. + buf->buffer.index = buf->index;
  1992. +
  1993. + ret = ioctl(ctx->video_fd, VIDIOC_QUERYBUF, &buf->buffer);
  1994. + if (ret < 0) {
  1995. + av_log(avctx, AV_LOG_ERROR, "%s: query buffer %d failed, %s (%d)\n", __func__, buf->index, strerror(errno), errno);
  1996. + return ret;
  1997. + }
  1998. +
  1999. + if (V4L2_TYPE_IS_OUTPUT(type)) {
  2000. + void *addr = mmap(NULL, buf->size, PROT_READ | PROT_WRITE, MAP_SHARED, ctx->video_fd, V4L2_TYPE_IS_MULTIPLANAR(type) ? buf->buffer.m.planes[0].m.mem_offset : buf->buffer.m.offset);
  2001. + if (addr == MAP_FAILED) {
  2002. + av_log(avctx, AV_LOG_ERROR, "%s: mmap failed, %s (%d)\n", __func__, strerror(errno), errno);
  2003. + return -1;
  2004. + }
  2005. +
  2006. + buf->addr = (uint8_t*)addr;
  2007. + } else {
  2008. + struct v4l2_exportbuffer exportbuffer = {
  2009. + .type = type,
  2010. + .index = buf->index,
  2011. + .flags = O_RDONLY,
  2012. + };
  2013. +
  2014. + ret = ioctl(ctx->video_fd, VIDIOC_EXPBUF, &exportbuffer);
  2015. + if (ret < 0) {
  2016. + av_log(avctx, AV_LOG_ERROR, "%s: export buffer %d failed, %s (%d)\n", __func__, buf->index, strerror(errno), errno);
  2017. + return ret;
  2018. + }
  2019. +
  2020. + buf->fd = exportbuffer.fd;
  2021. + }
  2022. +
  2023. + av_log(avctx, AV_LOG_DEBUG, "%s: buf=%p index=%d fd=%d addr=%p width=%u height=%u size=%u\n", __func__, buf, buf->index, buf->fd, buf->addr, buf->width, buf->height, buf->size);
  2024. + return 0;
  2025. +}
  2026. +
  2027. +static void v4l2_request_buffer_free(V4L2RequestBuffer *buf)
  2028. +{
  2029. + av_log(NULL, AV_LOG_DEBUG, "%s: buf=%p index=%d fd=%d addr=%p width=%u height=%u size=%u\n", __func__, buf, buf->index, buf->fd, buf->addr, buf->width, buf->height, buf->size);
  2030. +
  2031. + if (buf->addr)
  2032. + munmap(buf->addr, buf->size);
  2033. +
  2034. + if (buf->fd >= 0)
  2035. + close(buf->fd);
  2036. +}
  2037. +
  2038. +static void v4l2_request_frame_free(void *opaque, uint8_t *data)
  2039. +{
  2040. + AVCodecContext *avctx = opaque;
  2041. + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)data;
  2042. +
  2043. + av_log(NULL, AV_LOG_DEBUG, "%s: avctx=%p data=%p request_fd=%d\n", __func__, avctx, data, req->request_fd);
  2044. +
  2045. + if (req->request_fd >= 0)
  2046. + close(req->request_fd);
  2047. +
  2048. + v4l2_request_buffer_free(&req->capture);
  2049. + v4l2_request_buffer_free(&req->output);
  2050. +
  2051. + av_free(data);
  2052. +}
  2053. +
  2054. +static AVBufferRef *v4l2_request_frame_alloc(void *opaque, int size)
  2055. +{
  2056. + AVCodecContext *avctx = opaque;
  2057. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  2058. + V4L2RequestDescriptor *req;
  2059. + AVBufferRef *ref;
  2060. + uint8_t *data;
  2061. + int ret;
  2062. +
  2063. + data = av_mallocz(size);
  2064. + if (!data)
  2065. + return NULL;
  2066. +
  2067. + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p size=%d data=%p\n", __func__, avctx, size, data);
  2068. +
  2069. + ref = av_buffer_create(data, size, v4l2_request_frame_free, avctx, 0);
  2070. + if (!ref) {
  2071. + av_freep(&data);
  2072. + return NULL;
  2073. + }
  2074. +
  2075. + req = (V4L2RequestDescriptor*)data;
  2076. + req->request_fd = -1;
  2077. + req->output.fd = -1;
  2078. + req->capture.fd = -1;
  2079. +
  2080. + ret = v4l2_request_buffer_alloc(avctx, &req->output, ctx->output_type);
  2081. + if (ret < 0) {
  2082. + av_buffer_unref(&ref);
  2083. + return NULL;
  2084. + }
  2085. +
  2086. + ret = v4l2_request_buffer_alloc(avctx, &req->capture, ctx->format.type);
  2087. + if (ret < 0) {
  2088. + av_buffer_unref(&ref);
  2089. + return NULL;
  2090. + }
  2091. +
  2092. + ret = ioctl(ctx->media_fd, MEDIA_IOC_REQUEST_ALLOC, &req->request_fd);
  2093. + if (ret < 0) {
  2094. + av_log(avctx, AV_LOG_ERROR, "%s: request alloc failed, %s (%d)\n", __func__, strerror(errno), errno);
  2095. + av_buffer_unref(&ref);
  2096. + return NULL;
  2097. + }
  2098. +
  2099. + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p size=%d data=%p request_fd=%d\n", __func__, avctx, size, data, req->request_fd);
  2100. + return ref;
  2101. +}
  2102. +
  2103. +static void v4l2_request_pool_free(void *opaque)
  2104. +{
  2105. + av_log(NULL, AV_LOG_DEBUG, "%s: opaque=%p\n", __func__, opaque);
  2106. +}
  2107. +
  2108. +static void v4l2_request_hwframe_ctx_free(AVHWFramesContext *hwfc)
  2109. +{
  2110. + av_log(NULL, AV_LOG_DEBUG, "%s: hwfc=%p pool=%p\n", __func__, hwfc, hwfc->pool);
  2111. +
  2112. + av_buffer_pool_flush(hwfc->pool);
  2113. + av_buffer_pool_uninit(&hwfc->pool);
  2114. +}
  2115. +
  2116. +int ff_v4l2_request_frame_params(AVCodecContext *avctx, AVBufferRef *hw_frames_ctx)
  2117. +{
  2118. + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
  2119. + AVHWFramesContext *hwfc = (AVHWFramesContext*)hw_frames_ctx->data;
  2120. +
  2121. + hwfc->format = AV_PIX_FMT_DRM_PRIME;
  2122. + hwfc->sw_format = AV_PIX_FMT_NV12;
  2123. + if (V4L2_TYPE_IS_MULTIPLANAR(ctx->format.type)) {
  2124. + hwfc->width = ctx->format.fmt.pix_mp.width;
  2125. + hwfc->height = ctx->format.fmt.pix_mp.height;
  2126. + } else {
  2127. + hwfc->width = ctx->format.fmt.pix.width;
  2128. + hwfc->height = ctx->format.fmt.pix.height;
  2129. + }
  2130. +
  2131. + hwfc->pool = av_buffer_pool_init2(sizeof(V4L2RequestDescriptor), avctx, v4l2_request_frame_alloc, v4l2_request_pool_free);
  2132. + if (!hwfc->pool)
  2133. + return AVERROR(ENOMEM);
  2134. +
  2135. + hwfc->free = v4l2_request_hwframe_ctx_free;
  2136. +
  2137. + hwfc->initial_pool_size = 1;
  2138. +
  2139. + switch (avctx->codec_id) {
  2140. + case AV_CODEC_ID_VP9:
  2141. + hwfc->initial_pool_size += 8;
  2142. + break;
  2143. + case AV_CODEC_ID_VP8:
  2144. + hwfc->initial_pool_size += 3;
  2145. + break;
  2146. + default:
  2147. + hwfc->initial_pool_size += 2;
  2148. + }
  2149. +
  2150. + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p ctx=%p hw_frames_ctx=%p hwfc=%p pool=%p width=%d height=%d initial_pool_size=%d\n", __func__, avctx, ctx, hw_frames_ctx, hwfc, hwfc->pool, hwfc->width, hwfc->height, hwfc->initial_pool_size);
  2151. +
  2152. + return 0;
  2153. +}
  2154. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/v4l2_request.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request.h
  2155. --- ffmpeg_n4.2.2/libavcodec/v4l2_request.h 1969-12-31 16:00:00.000000000 -0800
  2156. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request.h 2020-05-26 03:16:39.653173493 -0700
  2157. @@ -0,0 +1,79 @@
  2158. +/*
  2159. + * This file is part of FFmpeg.
  2160. + *
  2161. + * FFmpeg is free software; you can redistribute it and/or
  2162. + * modify it under the terms of the GNU Lesser General Public
  2163. + * License as published by the Free Software Foundation; either
  2164. + * version 2.1 of the License, or (at your option) any later version.
  2165. + *
  2166. + * FFmpeg is distributed in the hope that it will be useful,
  2167. + * but WITHOUT ANY WARRANTY; without even the implied warranty of
  2168. + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  2169. + * Lesser General Public License for more details.
  2170. + *
  2171. + * You should have received a copy of the GNU Lesser General Public
  2172. + * License along with FFmpeg; if not, write to the Free Software
  2173. + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  2174. + */
  2175. +
  2176. +#ifndef AVCODEC_V4L2_REQUEST_H
  2177. +#define AVCODEC_V4L2_REQUEST_H
  2178. +
  2179. +#include <linux/videodev2.h>
  2180. +
  2181. +#include "libavutil/hwcontext_drm.h"
  2182. +
  2183. +typedef struct V4L2RequestContext {
  2184. + int video_fd;
  2185. + int media_fd;
  2186. + enum v4l2_buf_type output_type;
  2187. + struct v4l2_format format;
  2188. + int timestamp;
  2189. +} V4L2RequestContext;
  2190. +
  2191. +typedef struct V4L2RequestBuffer {
  2192. + int index;
  2193. + int fd;
  2194. + uint8_t *addr;
  2195. + uint32_t width;
  2196. + uint32_t height;
  2197. + uint32_t size;
  2198. + uint32_t used;
  2199. + uint32_t capabilities;
  2200. + struct v4l2_buffer buffer;
  2201. +} V4L2RequestBuffer;
  2202. +
  2203. +typedef struct V4L2RequestDescriptor {
  2204. + AVDRMFrameDescriptor drm;
  2205. + int request_fd;
  2206. + V4L2RequestBuffer output;
  2207. + V4L2RequestBuffer capture;
  2208. +} V4L2RequestDescriptor;
  2209. +
  2210. +uint64_t ff_v4l2_request_get_capture_timestamp(AVFrame *frame);
  2211. +
  2212. +int ff_v4l2_request_reset_frame(AVCodecContext *avctx, AVFrame *frame);
  2213. +
  2214. +int ff_v4l2_request_append_output_buffer(AVCodecContext *avctx, AVFrame *frame, const uint8_t *data, uint32_t size);
  2215. +
  2216. +int ff_v4l2_request_set_controls(AVCodecContext *avctx, struct v4l2_ext_control *control, int count);
  2217. +
  2218. +int ff_v4l2_request_get_controls(AVCodecContext *avctx, struct v4l2_ext_control *control, int count);
  2219. +
  2220. +int ff_v4l2_request_query_control(AVCodecContext *avctx, struct v4l2_query_ext_ctrl *control);
  2221. +
  2222. +int ff_v4l2_request_query_control_default_value(AVCodecContext *avctx, uint32_t id);
  2223. +
  2224. +int ff_v4l2_request_decode_slice(AVCodecContext *avctx, AVFrame *frame, struct v4l2_ext_control *control, int count, int first_slice, int last_slice);
  2225. +
  2226. +int ff_v4l2_request_decode_frame(AVCodecContext *avctx, AVFrame *frame, struct v4l2_ext_control *control, int count);
  2227. +
  2228. +int ff_v4l2_request_output_frame(AVCodecContext *avctx, AVFrame *frame);
  2229. +
  2230. +int ff_v4l2_request_init(AVCodecContext *avctx, uint32_t pixelformat, uint32_t buffersize, struct v4l2_ext_control *control, int count);
  2231. +
  2232. +int ff_v4l2_request_uninit(AVCodecContext *avctx);
  2233. +
  2234. +int ff_v4l2_request_frame_params(AVCodecContext *avctx, AVBufferRef *hw_frames_ctx);
  2235. +
  2236. +#endif /* AVCODEC_V4L2_REQUEST_H */
  2237. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/v4l2_request_h264.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_h264.c
  2238. --- ffmpeg_n4.2.2/libavcodec/v4l2_request_h264.c 1969-12-31 16:00:00.000000000 -0800
  2239. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_h264.c 2020-05-26 03:16:39.653173493 -0700
  2240. @@ -0,0 +1,467 @@
  2241. +/*
  2242. + * This file is part of FFmpeg.
  2243. + *
  2244. + * FFmpeg is free software; you can redistribute it and/or
  2245. + * modify it under the terms of the GNU Lesser General Public
  2246. + * License as published by the Free Software Foundation; either
  2247. + * version 2.1 of the License, or (at your option) any later version.
  2248. + *
  2249. + * FFmpeg is distributed in the hope that it will be useful,
  2250. + * but WITHOUT ANY WARRANTY; without even the implied warranty of
  2251. + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  2252. + * Lesser General Public License for more details.
  2253. + *
  2254. + * You should have received a copy of the GNU Lesser General Public
  2255. + * License along with FFmpeg; if not, write to the Free Software
  2256. + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  2257. + */
  2258. +
  2259. +#include "h264dec.h"
  2260. +#include "hwaccel.h"
  2261. +#include "v4l2_request.h"
  2262. +#include "h264-ctrls.h"
  2263. +
  2264. +typedef struct V4L2RequestControlsH264 {
  2265. + struct v4l2_ctrl_h264_sps sps;
  2266. + struct v4l2_ctrl_h264_pps pps;
  2267. + struct v4l2_ctrl_h264_scaling_matrix scaling_matrix;
  2268. + struct v4l2_ctrl_h264_decode_params decode_params;
  2269. + struct v4l2_ctrl_h264_slice_params slice_params[MAX_SLICES];
  2270. + int first_slice;
  2271. +} V4L2RequestControlsH264;
  2272. +
  2273. +typedef struct V4L2RequestContextH264 {
  2274. + V4L2RequestContext base;
  2275. + int decode_mode;
  2276. + int start_code;
  2277. + int max_slices;
  2278. +} V4L2RequestContextH264;
  2279. +
  2280. +static uint8_t nalu_slice_start_code[] = { 0x00, 0x00, 0x01 };
  2281. +
  2282. +static void fill_weight_factors(struct v4l2_h264_weight_factors *factors, int list, const H264SliceContext *sl)
  2283. +{
  2284. + for (int i = 0; i < sl->ref_count[list]; i++) {
  2285. + if (sl->pwt.luma_weight_flag[list]) {
  2286. + factors->luma_weight[i] = sl->pwt.luma_weight[i][list][0];
  2287. + factors->luma_offset[i] = sl->pwt.luma_weight[i][list][1];
  2288. + } else {
  2289. + factors->luma_weight[i] = 1 << sl->pwt.luma_log2_weight_denom;
  2290. + factors->luma_offset[i] = 0;
  2291. + }
  2292. + for (int j = 0; j < 2; j++) {
  2293. + if (sl->pwt.chroma_weight_flag[list]) {
  2294. + factors->chroma_weight[i][j] = sl->pwt.chroma_weight[i][list][j][0];
  2295. + factors->chroma_offset[i][j] = sl->pwt.chroma_weight[i][list][j][1];
  2296. + } else {
  2297. + factors->chroma_weight[i][j] = 1 << sl->pwt.chroma_log2_weight_denom;
  2298. + factors->chroma_offset[i][j] = 0;
  2299. + }
  2300. + }
  2301. + }
  2302. +}
  2303. +
  2304. +static void fill_dpb_entry(struct v4l2_h264_dpb_entry *entry, const H264Picture *pic)
  2305. +{
  2306. + entry->reference_ts = ff_v4l2_request_get_capture_timestamp(pic->f);
  2307. + entry->frame_num = pic->frame_num;
  2308. + entry->pic_num = pic->pic_id;
  2309. + entry->flags = V4L2_H264_DPB_ENTRY_FLAG_VALID;
  2310. + if (pic->reference & PICT_TOP_FIELD)
  2311. + entry->flags |= V4L2_H264_DPB_ENTRY_FLAG_TOP_REF;
  2312. + if (pic->reference & PICT_BOTTOM_FIELD)
  2313. + entry->flags |= V4L2_H264_DPB_ENTRY_FLAG_BOTTOM_REF;
  2314. + if (pic->long_ref)
  2315. + entry->flags |= V4L2_H264_DPB_ENTRY_FLAG_LONG_TERM;
  2316. + if (pic->field_picture)
  2317. + entry->flags |= V4L2_H264_DPB_ENTRY_FLAG_FIELD_PIC;
  2318. + if (pic->field_poc[0] != INT_MAX)
  2319. + entry->top_field_order_cnt = pic->field_poc[0];
  2320. + if (pic->field_poc[1] != INT_MAX)
  2321. + entry->bottom_field_order_cnt = pic->field_poc[1];
  2322. +}
  2323. +
  2324. +static void fill_dpb(struct v4l2_ctrl_h264_decode_params *decode, const H264Context *h)
  2325. +{
  2326. + int entries = 0;
  2327. +
  2328. + for (int i = 0; i < h->short_ref_count; i++) {
  2329. + const H264Picture *pic = h->short_ref[i];
  2330. + if (pic && (pic->field_poc[0] != INT_MAX || pic->field_poc[1] != INT_MAX))
  2331. + fill_dpb_entry(&decode->dpb[entries++], pic);
  2332. + }
  2333. +
  2334. + if (!h->long_ref_count)
  2335. + return;
  2336. +
  2337. + for (int i = 0; i < FF_ARRAY_ELEMS(h->long_ref); i++) {
  2338. + const H264Picture *pic = h->long_ref[i];
  2339. + if (pic && (pic->field_poc[0] != INT_MAX || pic->field_poc[1] != INT_MAX))
  2340. + fill_dpb_entry(&decode->dpb[entries++], pic);
  2341. + }
  2342. +}
  2343. +
  2344. +static uint8_t get_dpb_index(struct v4l2_ctrl_h264_decode_params *decode, const H264Ref *ref)
  2345. +{
  2346. + uint64_t timestamp;
  2347. +
  2348. + if (!ref->parent)
  2349. + return 0;
  2350. +
  2351. + timestamp = ff_v4l2_request_get_capture_timestamp(ref->parent->f);
  2352. +
  2353. + for (uint8_t i = 0; i < FF_ARRAY_ELEMS(decode->dpb); i++) {
  2354. + struct v4l2_h264_dpb_entry *entry = &decode->dpb[i];
  2355. + if ((entry->flags & V4L2_H264_DPB_ENTRY_FLAG_VALID) &&
  2356. + entry->reference_ts == timestamp)
  2357. + // TODO: signal reference type, possible using top 2 bits
  2358. + return i | ((ref->reference & 3) << 6);
  2359. + }
  2360. +
  2361. + return 0;
  2362. +}
  2363. +
  2364. +static void fill_sps(struct v4l2_ctrl_h264_sps *ctrl, const H264Context *h)
  2365. +{
  2366. + const SPS *sps = h->ps.sps;
  2367. +
  2368. + *ctrl = (struct v4l2_ctrl_h264_sps) {
  2369. + .profile_idc = sps->profile_idc,
  2370. + .constraint_set_flags = sps->constraint_set_flags,
  2371. + .level_idc = sps->level_idc,
  2372. + .seq_parameter_set_id = sps->sps_id,
  2373. + .chroma_format_idc = sps->chroma_format_idc,
  2374. + .bit_depth_luma_minus8 = sps->bit_depth_luma - 8,
  2375. + .bit_depth_chroma_minus8 = sps->bit_depth_chroma - 8,
  2376. + .log2_max_frame_num_minus4 = sps->log2_max_frame_num - 4,
  2377. + .pic_order_cnt_type = sps->poc_type,
  2378. + .log2_max_pic_order_cnt_lsb_minus4 = sps->log2_max_poc_lsb - 4,
  2379. + .max_num_ref_frames = sps->ref_frame_count,
  2380. + .num_ref_frames_in_pic_order_cnt_cycle = sps->poc_cycle_length,
  2381. + //.offset_for_ref_frame[255] - not required? not set by libva-v4l2-request - copy sps->offset_for_ref_frame
  2382. + .offset_for_non_ref_pic = sps->offset_for_non_ref_pic,
  2383. + .offset_for_top_to_bottom_field = sps->offset_for_top_to_bottom_field,
  2384. + .pic_width_in_mbs_minus1 = h->mb_width - 1,
  2385. + .pic_height_in_map_units_minus1 = sps->frame_mbs_only_flag ? h->mb_height - 1 : h->mb_height / 2 - 1,
  2386. + };
  2387. +
  2388. + if (sps->residual_color_transform_flag)
  2389. + ctrl->flags |= V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE;
  2390. + if (sps->transform_bypass)
  2391. + ctrl->flags |= V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS;
  2392. + if (sps->delta_pic_order_always_zero_flag)
  2393. + ctrl->flags |= V4L2_H264_SPS_FLAG_DELTA_PIC_ORDER_ALWAYS_ZERO;
  2394. + if (sps->gaps_in_frame_num_allowed_flag)
  2395. + ctrl->flags |= V4L2_H264_SPS_FLAG_GAPS_IN_FRAME_NUM_VALUE_ALLOWED;
  2396. + if (sps->frame_mbs_only_flag)
  2397. + ctrl->flags |= V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY;
  2398. + if (sps->mb_aff)
  2399. + ctrl->flags |= V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD;
  2400. + if (sps->direct_8x8_inference_flag)
  2401. + ctrl->flags |= V4L2_H264_SPS_FLAG_DIRECT_8X8_INFERENCE;
  2402. +}
  2403. +
  2404. +static void fill_pps(struct v4l2_ctrl_h264_pps *ctrl, const H264Context *h)
  2405. +{
  2406. + const SPS *sps = h->ps.sps;
  2407. + const PPS *pps = h->ps.pps;
  2408. + const H264SliceContext *sl = &h->slice_ctx[0];
  2409. + int qp_bd_offset = 6 * (sps->bit_depth_luma - 8);
  2410. +
  2411. + *ctrl = (struct v4l2_ctrl_h264_pps) {
  2412. + .pic_parameter_set_id = sl->pps_id,
  2413. + .seq_parameter_set_id = pps->sps_id,
  2414. + .num_slice_groups_minus1 = pps->slice_group_count - 1,
  2415. + .num_ref_idx_l0_default_active_minus1 = pps->ref_count[0] - 1,
  2416. + .num_ref_idx_l1_default_active_minus1 = pps->ref_count[1] - 1,
  2417. + .weighted_bipred_idc = pps->weighted_bipred_idc,
  2418. + .pic_init_qp_minus26 = pps->init_qp - 26 - qp_bd_offset,
  2419. + .pic_init_qs_minus26 = pps->init_qs - 26 - qp_bd_offset,
  2420. + .chroma_qp_index_offset = pps->chroma_qp_index_offset[0],
  2421. + .second_chroma_qp_index_offset = pps->chroma_qp_index_offset[1],
  2422. + };
  2423. +
  2424. + if (pps->cabac)
  2425. + ctrl->flags |= V4L2_H264_PPS_FLAG_ENTROPY_CODING_MODE;
  2426. + if (pps->pic_order_present)
  2427. + ctrl->flags |= V4L2_H264_PPS_FLAG_BOTTOM_FIELD_PIC_ORDER_IN_FRAME_PRESENT;
  2428. + if (pps->weighted_pred)
  2429. + ctrl->flags |= V4L2_H264_PPS_FLAG_WEIGHTED_PRED;
  2430. + if (pps->deblocking_filter_parameters_present)
  2431. + ctrl->flags |= V4L2_H264_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT;
  2432. + if (pps->constrained_intra_pred)
  2433. + ctrl->flags |= V4L2_H264_PPS_FLAG_CONSTRAINED_INTRA_PRED;
  2434. + if (pps->redundant_pic_cnt_present)
  2435. + ctrl->flags |= V4L2_H264_PPS_FLAG_REDUNDANT_PIC_CNT_PRESENT;
  2436. + if (pps->transform_8x8_mode)
  2437. + ctrl->flags |= V4L2_H264_PPS_FLAG_TRANSFORM_8X8_MODE;
  2438. +}
  2439. +
  2440. +static int v4l2_request_h264_start_frame(AVCodecContext *avctx,
  2441. + av_unused const uint8_t *buffer,
  2442. + av_unused uint32_t size)
  2443. +{
  2444. + const H264Context *h = avctx->priv_data;
  2445. + const PPS *pps = h->ps.pps;
  2446. + const SPS *sps = h->ps.sps;
  2447. + V4L2RequestControlsH264 *controls = h->cur_pic_ptr->hwaccel_picture_private;
  2448. +
  2449. + fill_sps(&controls->sps, h);
  2450. + fill_pps(&controls->pps, h);
  2451. +
  2452. + memcpy(controls->scaling_matrix.scaling_list_4x4, pps->scaling_matrix4, sizeof(controls->scaling_matrix.scaling_list_4x4));
  2453. + memcpy(controls->scaling_matrix.scaling_list_8x8[0], pps->scaling_matrix8[0], sizeof(controls->scaling_matrix.scaling_list_8x8[0]));
  2454. + memcpy(controls->scaling_matrix.scaling_list_8x8[1], pps->scaling_matrix8[3], sizeof(controls->scaling_matrix.scaling_list_8x8[1]));
  2455. +
  2456. + if (sps->chroma_format_idc == 3) {
  2457. + memcpy(controls->scaling_matrix.scaling_list_8x8[2], pps->scaling_matrix8[1], sizeof(controls->scaling_matrix.scaling_list_8x8[2]));
  2458. + memcpy(controls->scaling_matrix.scaling_list_8x8[3], pps->scaling_matrix8[4], sizeof(controls->scaling_matrix.scaling_list_8x8[3]));
  2459. + memcpy(controls->scaling_matrix.scaling_list_8x8[4], pps->scaling_matrix8[2], sizeof(controls->scaling_matrix.scaling_list_8x8[4]));
  2460. + memcpy(controls->scaling_matrix.scaling_list_8x8[5], pps->scaling_matrix8[5], sizeof(controls->scaling_matrix.scaling_list_8x8[5]));
  2461. + }
  2462. +
  2463. + controls->decode_params = (struct v4l2_ctrl_h264_decode_params) {
  2464. + .num_slices = 0,
  2465. + .nal_ref_idc = h->nal_ref_idc,
  2466. + .top_field_order_cnt = h->cur_pic_ptr->field_poc[0] != INT_MAX ? h->cur_pic_ptr->field_poc[0] : 0,
  2467. + .bottom_field_order_cnt = h->cur_pic_ptr->field_poc[1] != INT_MAX ? h->cur_pic_ptr->field_poc[1] : 0,
  2468. + };
  2469. +
  2470. + if (h->picture_idr)
  2471. + controls->decode_params.flags |= V4L2_H264_DECODE_PARAM_FLAG_IDR_PIC;
  2472. +
  2473. + fill_dpb(&controls->decode_params, h);
  2474. +
  2475. + controls->first_slice = !FIELD_PICTURE(h) || h->first_field;
  2476. +
  2477. + return ff_v4l2_request_reset_frame(avctx, h->cur_pic_ptr->f);
  2478. +}
  2479. +
  2480. +static int v4l2_request_h264_queue_decode(AVCodecContext *avctx, int last_slice)
  2481. +{
  2482. + const H264Context *h = avctx->priv_data;
  2483. + V4L2RequestControlsH264 *controls = h->cur_pic_ptr->hwaccel_picture_private;
  2484. + V4L2RequestContextH264 *ctx = avctx->internal->hwaccel_priv_data;
  2485. +
  2486. + struct v4l2_ext_control control[] = {
  2487. + {
  2488. + .id = V4L2_CID_MPEG_VIDEO_H264_SPS,
  2489. + .ptr = &controls->sps,
  2490. + .size = sizeof(controls->sps),
  2491. + },
  2492. + {
  2493. + .id = V4L2_CID_MPEG_VIDEO_H264_PPS,
  2494. + .ptr = &controls->pps,
  2495. + .size = sizeof(controls->pps),
  2496. + },
  2497. + {
  2498. + .id = V4L2_CID_MPEG_VIDEO_H264_SCALING_MATRIX,
  2499. + .ptr = &controls->scaling_matrix,
  2500. + .size = sizeof(controls->scaling_matrix),
  2501. + },
  2502. + {
  2503. + .id = V4L2_CID_MPEG_VIDEO_H264_SLICE_PARAMS,
  2504. + .ptr = &controls->slice_params,
  2505. + .size = sizeof(controls->slice_params[0]) * FFMAX(FFMIN(controls->decode_params.num_slices, MAX_SLICES), ctx->max_slices),
  2506. + },
  2507. + {
  2508. + .id = V4L2_CID_MPEG_VIDEO_H264_DECODE_PARAMS,
  2509. + .ptr = &controls->decode_params,
  2510. + .size = sizeof(controls->decode_params),
  2511. + },
  2512. + };
  2513. +
  2514. + if (ctx->decode_mode == V4L2_MPEG_VIDEO_H264_DECODE_MODE_SLICE_BASED)
  2515. + return ff_v4l2_request_decode_slice(avctx, h->cur_pic_ptr->f, control, FF_ARRAY_ELEMS(control), controls->first_slice, last_slice);
  2516. +
  2517. + return ff_v4l2_request_decode_frame(avctx, h->cur_pic_ptr->f, control, FF_ARRAY_ELEMS(control));
  2518. +}
  2519. +
  2520. +static int v4l2_request_h264_decode_slice(AVCodecContext *avctx, const uint8_t *buffer, uint32_t size)
  2521. +{
  2522. + const H264Context *h = avctx->priv_data;
  2523. + const PPS *pps = h->ps.pps;
  2524. + const H264SliceContext *sl = &h->slice_ctx[0];
  2525. + V4L2RequestControlsH264 *controls = h->cur_pic_ptr->hwaccel_picture_private;
  2526. + V4L2RequestContextH264 *ctx = avctx->internal->hwaccel_priv_data;
  2527. + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)h->cur_pic_ptr->f->data[0];
  2528. + int i, ret, count, slice = FFMIN(controls->decode_params.num_slices, MAX_SLICES - 1);
  2529. +
  2530. + if (ctx->decode_mode == V4L2_MPEG_VIDEO_H264_DECODE_MODE_SLICE_BASED && slice) {
  2531. + ret = v4l2_request_h264_queue_decode(avctx, 0);
  2532. + if (ret)
  2533. + return ret;
  2534. +
  2535. + ff_v4l2_request_reset_frame(avctx, h->cur_pic_ptr->f);
  2536. + slice = controls->decode_params.num_slices = 0;
  2537. + controls->first_slice = 0;
  2538. + }
  2539. +
  2540. + controls->slice_params[slice] = (struct v4l2_ctrl_h264_slice_params) {
  2541. + /* Size in bytes, including header */
  2542. + .size = 0,
  2543. + .start_byte_offset = req->output.used,
  2544. + /* Offset in bits to slice_data() from the beginning of this slice. */
  2545. + .header_bit_size = get_bits_count(&sl->gb),
  2546. +
  2547. + .first_mb_in_slice = sl->first_mb_addr,
  2548. + .slice_type = ff_h264_get_slice_type(sl),
  2549. + .pic_parameter_set_id = sl->pps_id,
  2550. + .colour_plane_id = 0, /* what is this? */
  2551. + .frame_num = h->poc.frame_num,
  2552. + .idr_pic_id = sl->idr_pic_id,
  2553. + .pic_order_cnt_lsb = sl->poc_lsb,
  2554. + .delta_pic_order_cnt_bottom = sl->delta_poc_bottom,
  2555. + .delta_pic_order_cnt0 = sl->delta_poc[0],
  2556. + .delta_pic_order_cnt1 = sl->delta_poc[1],
  2557. + .redundant_pic_cnt = sl->redundant_pic_count,
  2558. +
  2559. + /* Size in bits of dec_ref_pic_marking() syntax element. */
  2560. + .dec_ref_pic_marking_bit_size = sl->ref_pic_marking_size_in_bits,
  2561. + /* Size in bits of pic order count syntax. */
  2562. + .pic_order_cnt_bit_size = sl->pic_order_cnt_bit_size,
  2563. +
  2564. + .cabac_init_idc = sl->cabac_init_idc,
  2565. + .slice_qp_delta = sl->qscale - pps->init_qp,
  2566. + .slice_qs_delta = 0, /* XXX not implemented by FFmpeg */
  2567. + .disable_deblocking_filter_idc = sl->deblocking_filter < 2 ? !sl->deblocking_filter : sl->deblocking_filter,
  2568. + .slice_alpha_c0_offset_div2 = sl->slice_alpha_c0_offset / 2,
  2569. + .slice_beta_offset_div2 = sl->slice_beta_offset / 2,
  2570. + .slice_group_change_cycle = 0, /* what is this? */
  2571. +
  2572. + .num_ref_idx_l0_active_minus1 = sl->list_count > 0 ? sl->ref_count[0] - 1 : 0,
  2573. + .num_ref_idx_l1_active_minus1 = sl->list_count > 1 ? sl->ref_count[1] - 1 : 0,
  2574. + };
  2575. +
  2576. + if (FIELD_PICTURE(h))
  2577. + controls->slice_params[slice].flags |= V4L2_H264_SLICE_FLAG_FIELD_PIC;
  2578. + if (h->picture_structure == PICT_BOTTOM_FIELD)
  2579. + controls->slice_params[slice].flags |= V4L2_H264_SLICE_FLAG_BOTTOM_FIELD;
  2580. + if (sl->slice_type == AV_PICTURE_TYPE_B && sl->direct_spatial_mv_pred)
  2581. + controls->slice_params[slice].flags |= V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED;
  2582. +
  2583. + controls->slice_params[slice].pred_weight_table.chroma_log2_weight_denom = sl->pwt.chroma_log2_weight_denom;
  2584. + controls->slice_params[slice].pred_weight_table.luma_log2_weight_denom = sl->pwt.luma_log2_weight_denom;
  2585. +
  2586. + count = sl->list_count > 0 ? sl->ref_count[0] : 0;
  2587. + for (i = 0; i < count; i++)
  2588. + controls->slice_params[slice].ref_pic_list0[i] = get_dpb_index(&controls->decode_params, &sl->ref_list[0][i]);
  2589. + if (count)
  2590. + fill_weight_factors(&controls->slice_params[slice].pred_weight_table.weight_factors[0], 0, sl);
  2591. +
  2592. + count = sl->list_count > 1 ? sl->ref_count[1] : 0;
  2593. + for (i = 0; i < count; i++)
  2594. + controls->slice_params[slice].ref_pic_list1[i] = get_dpb_index(&controls->decode_params, &sl->ref_list[1][i]);
  2595. + if (count)
  2596. + fill_weight_factors(&controls->slice_params[slice].pred_weight_table.weight_factors[1], 1, sl);
  2597. +
  2598. + if (ctx->start_code == V4L2_MPEG_VIDEO_H264_START_CODE_ANNEX_B) {
  2599. + ret = ff_v4l2_request_append_output_buffer(avctx, h->cur_pic_ptr->f, nalu_slice_start_code, 3);
  2600. + if (ret)
  2601. + return ret;
  2602. + }
  2603. +
  2604. + ret = ff_v4l2_request_append_output_buffer(avctx, h->cur_pic_ptr->f, buffer, size);
  2605. + if (ret)
  2606. + return ret;
  2607. +
  2608. + controls->slice_params[slice].size = req->output.used - controls->slice_params[slice].start_byte_offset;
  2609. + controls->decode_params.num_slices++;
  2610. + return 0;
  2611. +}
  2612. +
  2613. +static int v4l2_request_h264_end_frame(AVCodecContext *avctx)
  2614. +{
  2615. + const H264Context *h = avctx->priv_data;
  2616. + return v4l2_request_h264_queue_decode(avctx, !FIELD_PICTURE(h) || !h->first_field);
  2617. +}
  2618. +
  2619. +static int v4l2_request_h264_set_controls(AVCodecContext *avctx)
  2620. +{
  2621. + V4L2RequestContextH264 *ctx = avctx->internal->hwaccel_priv_data;
  2622. + int ret;
  2623. +
  2624. + struct v4l2_ext_control control[] = {
  2625. + { .id = V4L2_CID_MPEG_VIDEO_H264_DECODE_MODE, },
  2626. + { .id = V4L2_CID_MPEG_VIDEO_H264_START_CODE, },
  2627. + };
  2628. + struct v4l2_query_ext_ctrl slice_params = {
  2629. + .id = V4L2_CID_MPEG_VIDEO_H264_SLICE_PARAMS,
  2630. + };
  2631. +
  2632. + ctx->decode_mode = ff_v4l2_request_query_control_default_value(avctx, V4L2_CID_MPEG_VIDEO_H264_DECODE_MODE);
  2633. + if (ctx->decode_mode != V4L2_MPEG_VIDEO_H264_DECODE_MODE_SLICE_BASED &&
  2634. + ctx->decode_mode != V4L2_MPEG_VIDEO_H264_DECODE_MODE_FRAME_BASED) {
  2635. + av_log(avctx, AV_LOG_ERROR, "%s: unsupported decode mode, %d\n", __func__, ctx->decode_mode);
  2636. + return AVERROR(EINVAL);
  2637. + }
  2638. +
  2639. + ctx->start_code = ff_v4l2_request_query_control_default_value(avctx, V4L2_CID_MPEG_VIDEO_H264_START_CODE);
  2640. + if (ctx->start_code != V4L2_MPEG_VIDEO_H264_START_CODE_NONE &&
  2641. + ctx->start_code != V4L2_MPEG_VIDEO_H264_START_CODE_ANNEX_B) {
  2642. + av_log(avctx, AV_LOG_ERROR, "%s: unsupported start code, %d\n", __func__, ctx->start_code);
  2643. + return AVERROR(EINVAL);
  2644. + }
  2645. +
  2646. + ret = ff_v4l2_request_query_control(avctx, &slice_params);
  2647. + if (ret)
  2648. + return ret;
  2649. +
  2650. + ctx->max_slices = slice_params.elems;
  2651. + if (ctx->max_slices > MAX_SLICES) {
  2652. + av_log(avctx, AV_LOG_ERROR, "%s: unsupported max slices, %d\n", __func__, ctx->max_slices);
  2653. + return AVERROR(EINVAL);
  2654. + }
  2655. +
  2656. + control[0].value = ctx->decode_mode;
  2657. + control[1].value = ctx->start_code;
  2658. +
  2659. + return ff_v4l2_request_set_controls(avctx, control, FF_ARRAY_ELEMS(control));
  2660. +}
  2661. +
  2662. +static int v4l2_request_h264_init(AVCodecContext *avctx)
  2663. +{
  2664. + const H264Context *h = avctx->priv_data;
  2665. + struct v4l2_ctrl_h264_sps sps;
  2666. + struct v4l2_ctrl_h264_pps pps;
  2667. + int ret;
  2668. +
  2669. + struct v4l2_ext_control control[] = {
  2670. + {
  2671. + .id = V4L2_CID_MPEG_VIDEO_H264_SPS,
  2672. + .ptr = &sps,
  2673. + .size = sizeof(sps),
  2674. + },
  2675. + {
  2676. + .id = V4L2_CID_MPEG_VIDEO_H264_PPS,
  2677. + .ptr = &pps,
  2678. + .size = sizeof(pps),
  2679. + },
  2680. + };
  2681. +
  2682. + fill_sps(&sps, h);
  2683. + fill_pps(&pps, h);
  2684. +
  2685. + ret = ff_v4l2_request_init(avctx, V4L2_PIX_FMT_H264_SLICE, 4 * 1024 * 1024, control, FF_ARRAY_ELEMS(control));
  2686. + if (ret)
  2687. + return ret;
  2688. +
  2689. + return v4l2_request_h264_set_controls(avctx);
  2690. +}
  2691. +
  2692. +const AVHWAccel ff_h264_v4l2request_hwaccel = {
  2693. + .name = "h264_v4l2request",
  2694. + .type = AVMEDIA_TYPE_VIDEO,
  2695. + .id = AV_CODEC_ID_H264,
  2696. + .pix_fmt = AV_PIX_FMT_DRM_PRIME,
  2697. + .start_frame = v4l2_request_h264_start_frame,
  2698. + .decode_slice = v4l2_request_h264_decode_slice,
  2699. + .end_frame = v4l2_request_h264_end_frame,
  2700. + .output_frame = ff_v4l2_request_output_frame,
  2701. + .frame_priv_data_size = sizeof(V4L2RequestControlsH264),
  2702. + .init = v4l2_request_h264_init,
  2703. + .uninit = ff_v4l2_request_uninit,
  2704. + .priv_data_size = sizeof(V4L2RequestContextH264),
  2705. + .frame_params = ff_v4l2_request_frame_params,
  2706. + .caps_internal = HWACCEL_CAP_ASYNC_SAFE,
  2707. +};
  2708. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/v4l2_request_hevc.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_hevc.c
  2709. --- ffmpeg_n4.2.2/libavcodec/v4l2_request_hevc.c 1969-12-31 16:00:00.000000000 -0800
  2710. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_hevc.c 2020-05-26 03:16:39.653173493 -0700
  2711. @@ -0,0 +1,581 @@
  2712. +/*
  2713. + * This file is part of FFmpeg.
  2714. + *
  2715. + * FFmpeg is free software; you can redistribute it and/or
  2716. + * modify it under the terms of the GNU Lesser General Public
  2717. + * License as published by the Free Software Foundation; either
  2718. + * version 2.1 of the License, or (at your option) any later version.
  2719. + *
  2720. + * FFmpeg is distributed in the hope that it will be useful,
  2721. + * but WITHOUT ANY WARRANTY; without even the implied warranty of
  2722. + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  2723. + * Lesser General Public License for more details.
  2724. + *
  2725. + * You should have received a copy of the GNU Lesser General Public
  2726. + * License along with FFmpeg; if not, write to the Free Software
  2727. + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  2728. + */
  2729. +
  2730. +#include "hevcdec.h"
  2731. +#include "hwaccel.h"
  2732. +#include "v4l2_request.h"
  2733. +#include "hevc-ctrls.h"
  2734. +
  2735. +#define MAX_SLICES 16
  2736. +
  2737. +typedef struct V4L2RequestControlsHEVC {
  2738. + struct v4l2_ctrl_hevc_sps sps;
  2739. + struct v4l2_ctrl_hevc_pps pps;
  2740. + struct v4l2_ctrl_hevc_scaling_matrix scaling_matrix;
  2741. + struct v4l2_ctrl_hevc_slice_params slice_params[MAX_SLICES];
  2742. + int first_slice;
  2743. + int num_slices; //TODO: this should be in control
  2744. +} V4L2RequestControlsHEVC;
  2745. +
  2746. +typedef struct V4L2RequestContextHEVC {
  2747. + V4L2RequestContext base;
  2748. + int decode_mode;
  2749. + int start_code;
  2750. + int max_slices;
  2751. +} V4L2RequestContextHEVC;
  2752. +
  2753. +static uint8_t nalu_slice_start_code[] = { 0x00, 0x00, 0x01 };
  2754. +
  2755. +static void v4l2_request_hevc_fill_pred_table(const HEVCContext *h, struct v4l2_hevc_pred_weight_table *table)
  2756. +{
  2757. + int32_t luma_weight_denom, chroma_weight_denom;
  2758. + const SliceHeader *sh = &h->sh;
  2759. +
  2760. + if (sh->slice_type == HEVC_SLICE_I ||
  2761. + (sh->slice_type == HEVC_SLICE_P && !h->ps.pps->weighted_pred_flag) ||
  2762. + (sh->slice_type == HEVC_SLICE_B && !h->ps.pps->weighted_bipred_flag))
  2763. + return;
  2764. +
  2765. + table->luma_log2_weight_denom = sh->luma_log2_weight_denom;
  2766. +
  2767. + if (h->ps.sps->chroma_format_idc)
  2768. + table->delta_chroma_log2_weight_denom = sh->chroma_log2_weight_denom - sh->luma_log2_weight_denom;
  2769. +
  2770. + luma_weight_denom = (1 << sh->luma_log2_weight_denom);
  2771. + chroma_weight_denom = (1 << sh->chroma_log2_weight_denom);
  2772. +
  2773. + for (int i = 0; i < 15 && i < sh->nb_refs[L0]; i++) {
  2774. + table->delta_luma_weight_l0[i] = sh->luma_weight_l0[i] - luma_weight_denom;
  2775. + table->luma_offset_l0[i] = sh->luma_offset_l0[i];
  2776. + table->delta_chroma_weight_l0[i][0] = sh->chroma_weight_l0[i][0] - chroma_weight_denom;
  2777. + table->delta_chroma_weight_l0[i][1] = sh->chroma_weight_l0[i][1] - chroma_weight_denom;
  2778. + table->chroma_offset_l0[i][0] = sh->chroma_offset_l0[i][0];
  2779. + table->chroma_offset_l0[i][1] = sh->chroma_offset_l0[i][1];
  2780. + }
  2781. +
  2782. + if (sh->slice_type != HEVC_SLICE_B)
  2783. + return;
  2784. +
  2785. + for (int i = 0; i < 15 && i < sh->nb_refs[L1]; i++) {
  2786. + table->delta_luma_weight_l1[i] = sh->luma_weight_l1[i] - luma_weight_denom;
  2787. + table->luma_offset_l1[i] = sh->luma_offset_l1[i];
  2788. + table->delta_chroma_weight_l1[i][0] = sh->chroma_weight_l1[i][0] - chroma_weight_denom;
  2789. + table->delta_chroma_weight_l1[i][1] = sh->chroma_weight_l1[i][1] - chroma_weight_denom;
  2790. + table->chroma_offset_l1[i][0] = sh->chroma_offset_l1[i][0];
  2791. + table->chroma_offset_l1[i][1] = sh->chroma_offset_l1[i][1];
  2792. + }
  2793. +}
  2794. +
  2795. +static int find_frame_rps_type(const HEVCContext *h, uint64_t timestamp)
  2796. +{
  2797. + const HEVCFrame *frame;
  2798. + int i;
  2799. +
  2800. + for (i = 0; i < h->rps[ST_CURR_BEF].nb_refs; i++) {
  2801. + frame = h->rps[ST_CURR_BEF].ref[i];
  2802. + if (frame && timestamp == ff_v4l2_request_get_capture_timestamp(frame->frame))
  2803. + return V4L2_HEVC_DPB_ENTRY_RPS_ST_CURR_BEFORE;
  2804. + }
  2805. +
  2806. + for (i = 0; i < h->rps[ST_CURR_AFT].nb_refs; i++) {
  2807. + frame = h->rps[ST_CURR_AFT].ref[i];
  2808. + if (frame && timestamp == ff_v4l2_request_get_capture_timestamp(frame->frame))
  2809. + return V4L2_HEVC_DPB_ENTRY_RPS_ST_CURR_AFTER;
  2810. + }
  2811. +
  2812. + for (i = 0; i < h->rps[LT_CURR].nb_refs; i++) {
  2813. + frame = h->rps[LT_CURR].ref[i];
  2814. + if (frame && timestamp == ff_v4l2_request_get_capture_timestamp(frame->frame))
  2815. + return V4L2_HEVC_DPB_ENTRY_RPS_LT_CURR;
  2816. + }
  2817. +
  2818. + return 0;
  2819. +}
  2820. +
  2821. +static uint8_t get_ref_pic_index(const HEVCContext *h, const HEVCFrame *frame,
  2822. + struct v4l2_ctrl_hevc_slice_params *slice_params)
  2823. +{
  2824. + uint64_t timestamp;
  2825. +
  2826. + if (!frame)
  2827. + return 0;
  2828. +
  2829. + timestamp = ff_v4l2_request_get_capture_timestamp(frame->frame);
  2830. +
  2831. + for (uint8_t i = 0; i < slice_params->num_active_dpb_entries; i++) {
  2832. + struct v4l2_hevc_dpb_entry *entry = &slice_params->dpb[i];
  2833. + if (entry->timestamp == timestamp)
  2834. + return i;
  2835. + }
  2836. +
  2837. + return 0;
  2838. +}
  2839. +
  2840. +static void v4l2_request_hevc_fill_slice_params(const HEVCContext *h,
  2841. + struct v4l2_ctrl_hevc_slice_params *slice_params)
  2842. +{
  2843. + const HEVCFrame *pic = h->ref;
  2844. + const SliceHeader *sh = &h->sh;
  2845. + int i, entries = 0;
  2846. + RefPicList *rpl;
  2847. +
  2848. + *slice_params = (struct v4l2_ctrl_hevc_slice_params) {
  2849. + .bit_size = 0,
  2850. + .data_bit_offset = get_bits_count(&h->HEVClc->gb),
  2851. +
  2852. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */
  2853. + .slice_segment_addr = sh->slice_segment_addr,
  2854. +
  2855. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: NAL unit header */
  2856. + .nal_unit_type = h->nal_unit_type,
  2857. + .nuh_temporal_id_plus1 = h->temporal_id + 1,
  2858. +
  2859. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */
  2860. + .slice_type = sh->slice_type,
  2861. + .colour_plane_id = sh->colour_plane_id,
  2862. + .slice_pic_order_cnt = pic->poc,
  2863. + .num_ref_idx_l0_active_minus1 = sh->nb_refs[L0] ? sh->nb_refs[L0] - 1 : 0,
  2864. + .num_ref_idx_l1_active_minus1 = sh->nb_refs[L1] ? sh->nb_refs[L1] - 1 : 0,
  2865. + .collocated_ref_idx = sh->slice_temporal_mvp_enabled_flag ? sh->collocated_ref_idx : 0,
  2866. + .five_minus_max_num_merge_cand = sh->slice_type == HEVC_SLICE_I ? 0 : 5 - sh->max_num_merge_cand,
  2867. + .slice_qp_delta = sh->slice_qp_delta,
  2868. + .slice_cb_qp_offset = sh->slice_cb_qp_offset,
  2869. + .slice_cr_qp_offset = sh->slice_cr_qp_offset,
  2870. + .slice_act_y_qp_offset = 0,
  2871. + .slice_act_cb_qp_offset = 0,
  2872. + .slice_act_cr_qp_offset = 0,
  2873. + .slice_beta_offset_div2 = sh->beta_offset / 2,
  2874. + .slice_tc_offset_div2 = sh->tc_offset / 2,
  2875. +
  2876. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: Picture timing SEI message */
  2877. + .pic_struct = h->sei.picture_timing.picture_struct,
  2878. +
  2879. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */
  2880. + .num_rps_poc_st_curr_before = h->rps[ST_CURR_BEF].nb_refs,
  2881. + .num_rps_poc_st_curr_after = h->rps[ST_CURR_AFT].nb_refs,
  2882. + .num_rps_poc_lt_curr = h->rps[LT_CURR].nb_refs,
  2883. +
  2884. + .short_term_ref_pic_set_size = sh->short_term_ref_pic_set_size,
  2885. + .long_term_ref_pic_set_size = sh->long_term_ref_pic_set_size,
  2886. + };
  2887. +
  2888. + if (sh->slice_sample_adaptive_offset_flag[0])
  2889. + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_SAO_LUMA;
  2890. +
  2891. + if (sh->slice_sample_adaptive_offset_flag[1])
  2892. + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_SAO_CHROMA;
  2893. +
  2894. + if (sh->slice_temporal_mvp_enabled_flag)
  2895. + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_TEMPORAL_MVP_ENABLED;
  2896. +
  2897. + if (sh->mvd_l1_zero_flag)
  2898. + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_MVD_L1_ZERO;
  2899. +
  2900. + if (sh->cabac_init_flag)
  2901. + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_CABAC_INIT;
  2902. +
  2903. + if (sh->collocated_list == L0)
  2904. + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_COLLOCATED_FROM_L0;
  2905. +
  2906. + if (sh->disable_deblocking_filter_flag)
  2907. + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_DEBLOCKING_FILTER_DISABLED;
  2908. +
  2909. + if (sh->slice_loop_filter_across_slices_enabled_flag)
  2910. + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED;
  2911. +
  2912. + for (i = 0; i < FF_ARRAY_ELEMS(h->DPB); i++) {
  2913. + const HEVCFrame *frame = &h->DPB[i];
  2914. + if (frame != pic && (frame->flags & (HEVC_FRAME_FLAG_LONG_REF | HEVC_FRAME_FLAG_SHORT_REF))) {
  2915. + struct v4l2_hevc_dpb_entry *entry = &slice_params->dpb[entries++];
  2916. +
  2917. + entry->timestamp = ff_v4l2_request_get_capture_timestamp(frame->frame);
  2918. + entry->rps = find_frame_rps_type(h, entry->timestamp);
  2919. + entry->field_pic = frame->frame->interlaced_frame;
  2920. +
  2921. + /* TODO: Interleaved: Get the POC for each field. */
  2922. + entry->pic_order_cnt[0] = frame->poc;
  2923. + entry->pic_order_cnt[1] = frame->poc;
  2924. + }
  2925. + }
  2926. +
  2927. + slice_params->num_active_dpb_entries = entries;
  2928. +
  2929. + if (sh->slice_type != HEVC_SLICE_I) {
  2930. + rpl = &h->ref->refPicList[0];
  2931. + for (i = 0; i < rpl->nb_refs; i++)
  2932. + slice_params->ref_idx_l0[i] = get_ref_pic_index(h, rpl->ref[i], slice_params);
  2933. + }
  2934. +
  2935. + if (sh->slice_type == HEVC_SLICE_B) {
  2936. + rpl = &h->ref->refPicList[1];
  2937. + for (i = 0; i < rpl->nb_refs; i++)
  2938. + slice_params->ref_idx_l1[i] = get_ref_pic_index(h, rpl->ref[i], slice_params);
  2939. + }
  2940. +
  2941. + v4l2_request_hevc_fill_pred_table(h, &slice_params->pred_weight_table);
  2942. +
  2943. + slice_params->num_entry_point_offsets = sh->num_entry_point_offsets;
  2944. + if (slice_params->num_entry_point_offsets > 256) {
  2945. + slice_params->num_entry_point_offsets = 256;
  2946. + av_log(NULL, AV_LOG_ERROR, "%s: Currently only 256 entry points are supported, but slice has %d entry points.\n", __func__, sh->num_entry_point_offsets);
  2947. + }
  2948. +
  2949. + for (i = 0; i < slice_params->num_entry_point_offsets; i++)
  2950. + slice_params->entry_point_offset_minus1[i] = sh->entry_point_offset[i] - 1;
  2951. +}
  2952. +
  2953. +static void fill_sps(struct v4l2_ctrl_hevc_sps *ctrl, const HEVCContext *h)
  2954. +{
  2955. + const HEVCSPS *sps = h->ps.sps;
  2956. + const HEVCPPS *pps = h->ps.pps;
  2957. +
  2958. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: Sequence parameter set */
  2959. + *ctrl = (struct v4l2_ctrl_hevc_sps) {
  2960. + .video_parameter_set_id = sps->vps_id,
  2961. + .seq_parameter_set_id = pps->sps_id,
  2962. + .chroma_format_idc = sps->chroma_format_idc,
  2963. + .pic_width_in_luma_samples = sps->width,
  2964. + .pic_height_in_luma_samples = sps->height,
  2965. + .bit_depth_luma_minus8 = sps->bit_depth - 8,
  2966. + .bit_depth_chroma_minus8 = sps->bit_depth - 8,
  2967. + .log2_max_pic_order_cnt_lsb_minus4 = sps->log2_max_poc_lsb - 4,
  2968. + .sps_max_dec_pic_buffering_minus1 = sps->temporal_layer[sps->max_sub_layers - 1].max_dec_pic_buffering - 1,
  2969. + .sps_max_num_reorder_pics = sps->temporal_layer[sps->max_sub_layers - 1].num_reorder_pics,
  2970. + .sps_max_latency_increase_plus1 = sps->temporal_layer[sps->max_sub_layers - 1].max_latency_increase + 1,
  2971. + .log2_min_luma_coding_block_size_minus3 = sps->log2_min_cb_size - 3,
  2972. + .log2_diff_max_min_luma_coding_block_size = sps->log2_diff_max_min_coding_block_size,
  2973. + .log2_min_luma_transform_block_size_minus2 = sps->log2_min_tb_size - 2,
  2974. + .log2_diff_max_min_luma_transform_block_size = sps->log2_max_trafo_size - sps->log2_min_tb_size,
  2975. + .max_transform_hierarchy_depth_inter = sps->max_transform_hierarchy_depth_inter,
  2976. + .max_transform_hierarchy_depth_intra = sps->max_transform_hierarchy_depth_intra,
  2977. + .pcm_sample_bit_depth_luma_minus1 = sps->pcm.bit_depth - 1,
  2978. + .pcm_sample_bit_depth_chroma_minus1 = sps->pcm.bit_depth_chroma - 1,
  2979. + .log2_min_pcm_luma_coding_block_size_minus3 = sps->pcm.log2_min_pcm_cb_size - 3,
  2980. + .log2_diff_max_min_pcm_luma_coding_block_size = sps->pcm.log2_max_pcm_cb_size - sps->pcm.log2_min_pcm_cb_size,
  2981. + .num_short_term_ref_pic_sets = sps->nb_st_rps,
  2982. + .num_long_term_ref_pics_sps = sps->num_long_term_ref_pics_sps,
  2983. + };
  2984. +
  2985. + if (sps->separate_colour_plane_flag)
  2986. + ctrl->flags |= V4L2_HEVC_SPS_FLAG_SEPARATE_COLOUR_PLANE;
  2987. +
  2988. + if (sps->scaling_list_enable_flag)
  2989. + ctrl->flags |= V4L2_HEVC_SPS_FLAG_SCALING_LIST_ENABLED;
  2990. +
  2991. + if (sps->amp_enabled_flag)
  2992. + ctrl->flags |= V4L2_HEVC_SPS_FLAG_AMP_ENABLED;
  2993. +
  2994. + if (sps->sao_enabled)
  2995. + ctrl->flags |= V4L2_HEVC_SPS_FLAG_SAMPLE_ADAPTIVE_OFFSET;
  2996. +
  2997. + if (sps->pcm_enabled_flag)
  2998. + ctrl->flags |= V4L2_HEVC_SPS_FLAG_PCM_ENABLED;
  2999. +
  3000. + if (sps->pcm.loop_filter_disable_flag)
  3001. + ctrl->flags |= V4L2_HEVC_SPS_FLAG_PCM_LOOP_FILTER_DISABLED;
  3002. +
  3003. + if (sps->long_term_ref_pics_present_flag)
  3004. + ctrl->flags |= V4L2_HEVC_SPS_FLAG_LONG_TERM_REF_PICS_PRESENT;
  3005. +
  3006. + if (sps->sps_temporal_mvp_enabled_flag)
  3007. + ctrl->flags |= V4L2_HEVC_SPS_FLAG_SPS_TEMPORAL_MVP_ENABLED;
  3008. +
  3009. + if (sps->sps_strong_intra_smoothing_enable_flag)
  3010. + ctrl->flags |= V4L2_HEVC_SPS_FLAG_STRONG_INTRA_SMOOTHING_ENABLED;
  3011. +}
  3012. +
  3013. +static int v4l2_request_hevc_start_frame(AVCodecContext *avctx,
  3014. + av_unused const uint8_t *buffer,
  3015. + av_unused uint32_t size)
  3016. +{
  3017. + const HEVCContext *h = avctx->priv_data;
  3018. + const HEVCSPS *sps = h->ps.sps;
  3019. + const HEVCPPS *pps = h->ps.pps;
  3020. + const SliceHeader *sh = &h->sh;
  3021. + const ScalingList *sl = pps->scaling_list_data_present_flag ?
  3022. + &pps->scaling_list :
  3023. + sps->scaling_list_enable_flag ?
  3024. + &sps->scaling_list : NULL;
  3025. + V4L2RequestControlsHEVC *controls = h->ref->hwaccel_picture_private;
  3026. +
  3027. + fill_sps(&controls->sps, h);
  3028. +
  3029. + if (sl) {
  3030. + for (int i = 0; i < 6; i++) {
  3031. + for (int j = 0; j < 16; j++)
  3032. + controls->scaling_matrix.scaling_list_4x4[i][j] = sl->sl[0][i][j];
  3033. + for (int j = 0; j < 64; j++) {
  3034. + controls->scaling_matrix.scaling_list_8x8[i][j] = sl->sl[1][i][j];
  3035. + controls->scaling_matrix.scaling_list_16x16[i][j] = sl->sl[2][i][j];
  3036. + if (i < 2)
  3037. + controls->scaling_matrix.scaling_list_32x32[i][j] = sl->sl[3][i * 3][j];
  3038. + }
  3039. + controls->scaling_matrix.scaling_list_dc_coef_16x16[i] = sl->sl_dc[0][i];
  3040. + if (i < 2)
  3041. + controls->scaling_matrix.scaling_list_dc_coef_32x32[i] = sl->sl_dc[1][i * 3];
  3042. + }
  3043. + }
  3044. +
  3045. + /* ISO/IEC 23008-2, ITU-T Rec. H.265: Picture parameter set */
  3046. + controls->pps = (struct v4l2_ctrl_hevc_pps) {
  3047. + .pic_parameter_set_id = sh->pps_id,
  3048. + .num_ref_idx_l0_default_active_minus1 = pps->num_ref_idx_l0_default_active - 1,
  3049. + .num_ref_idx_l1_default_active_minus1 = pps->num_ref_idx_l1_default_active - 1,
  3050. + .num_extra_slice_header_bits = pps->num_extra_slice_header_bits,
  3051. + .init_qp_minus26 = pps->pic_init_qp_minus26,
  3052. + .diff_cu_qp_delta_depth = pps->diff_cu_qp_delta_depth,
  3053. + .pps_cb_qp_offset = pps->cb_qp_offset,
  3054. + .pps_cr_qp_offset = pps->cr_qp_offset,
  3055. + .pps_beta_offset_div2 = pps->beta_offset / 2,
  3056. + .pps_tc_offset_div2 = pps->tc_offset / 2,
  3057. + .log2_parallel_merge_level_minus2 = pps->log2_parallel_merge_level - 2,
  3058. + };
  3059. +
  3060. + if (pps->dependent_slice_segments_enabled_flag)
  3061. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_DEPENDENT_SLICE_SEGMENT;
  3062. +
  3063. + if (pps->output_flag_present_flag)
  3064. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_OUTPUT_FLAG_PRESENT;
  3065. +
  3066. + if (pps->sign_data_hiding_flag)
  3067. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_SIGN_DATA_HIDING_ENABLED;
  3068. +
  3069. + if (pps->cabac_init_present_flag)
  3070. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_CABAC_INIT_PRESENT;
  3071. +
  3072. + if (pps->constrained_intra_pred_flag)
  3073. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_CONSTRAINED_INTRA_PRED;
  3074. +
  3075. + if (pps->transform_skip_enabled_flag)
  3076. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_TRANSFORM_SKIP_ENABLED;
  3077. +
  3078. + if (pps->cu_qp_delta_enabled_flag)
  3079. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED;
  3080. +
  3081. + if (pps->pic_slice_level_chroma_qp_offsets_present_flag)
  3082. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT;
  3083. +
  3084. + if (pps->weighted_pred_flag)
  3085. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_WEIGHTED_PRED;
  3086. +
  3087. + if (pps->weighted_bipred_flag)
  3088. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_WEIGHTED_BIPRED;
  3089. +
  3090. + if (pps->transquant_bypass_enable_flag)
  3091. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_TRANSQUANT_BYPASS_ENABLED;
  3092. +
  3093. + if (pps->tiles_enabled_flag)
  3094. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_TILES_ENABLED;
  3095. +
  3096. + if (pps->entropy_coding_sync_enabled_flag)
  3097. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_ENTROPY_CODING_SYNC_ENABLED;
  3098. +
  3099. + if (pps->loop_filter_across_tiles_enabled_flag)
  3100. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED;
  3101. +
  3102. + if (pps->seq_loop_filter_across_slices_enabled_flag)
  3103. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED;
  3104. +
  3105. + if (pps->deblocking_filter_override_enabled_flag)
  3106. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_DEBLOCKING_FILTER_OVERRIDE_ENABLED;
  3107. +
  3108. + if (pps->disable_dbf)
  3109. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER;
  3110. +
  3111. + if (pps->lists_modification_present_flag)
  3112. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_LISTS_MODIFICATION_PRESENT;
  3113. +
  3114. + if (pps->slice_header_extension_present_flag)
  3115. + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_SLICE_SEGMENT_HEADER_EXTENSION_PRESENT;
  3116. +
  3117. + if (pps->tiles_enabled_flag) {
  3118. + controls->pps.num_tile_columns_minus1 = pps->num_tile_columns - 1;
  3119. + controls->pps.num_tile_rows_minus1 = pps->num_tile_rows - 1;
  3120. +
  3121. + for (int i = 0; i < pps->num_tile_columns; i++)
  3122. + controls->pps.column_width_minus1[i] = pps->column_width[i] - 1;
  3123. +
  3124. + for (int i = 0; i < pps->num_tile_rows; i++)
  3125. + controls->pps.row_height_minus1[i] = pps->row_height[i] - 1;
  3126. + }
  3127. +
  3128. + controls->first_slice = 1;
  3129. + controls->num_slices = 0;
  3130. +
  3131. + return ff_v4l2_request_reset_frame(avctx, h->ref->frame);
  3132. +}
  3133. +
  3134. +static int v4l2_request_hevc_queue_decode(AVCodecContext *avctx, int last_slice)
  3135. +{
  3136. + const HEVCContext *h = avctx->priv_data;
  3137. + V4L2RequestControlsHEVC *controls = h->ref->hwaccel_picture_private;
  3138. + V4L2RequestContextHEVC *ctx = avctx->internal->hwaccel_priv_data;
  3139. +
  3140. + struct v4l2_ext_control control[] = {
  3141. + {
  3142. + .id = V4L2_CID_MPEG_VIDEO_HEVC_SPS,
  3143. + .ptr = &controls->sps,
  3144. + .size = sizeof(controls->sps),
  3145. + },
  3146. + {
  3147. + .id = V4L2_CID_MPEG_VIDEO_HEVC_PPS,
  3148. + .ptr = &controls->pps,
  3149. + .size = sizeof(controls->pps),
  3150. + },
  3151. + {
  3152. + .id = V4L2_CID_MPEG_VIDEO_HEVC_SCALING_MATRIX,
  3153. + .ptr = &controls->scaling_matrix,
  3154. + .size = sizeof(controls->scaling_matrix),
  3155. + },
  3156. + {
  3157. + .id = V4L2_CID_MPEG_VIDEO_HEVC_SLICE_PARAMS,
  3158. + .ptr = &controls->slice_params,
  3159. + .size = sizeof(controls->slice_params[0]) * FFMAX(FFMIN(controls->num_slices, MAX_SLICES), ctx->max_slices),
  3160. + },
  3161. + };
  3162. +
  3163. + if (ctx->decode_mode == V4L2_MPEG_VIDEO_HEVC_DECODE_MODE_SLICE_BASED)
  3164. + return ff_v4l2_request_decode_slice(avctx, h->ref->frame, control, FF_ARRAY_ELEMS(control), controls->first_slice, last_slice);
  3165. +
  3166. + controls->sps.num_slices = controls->num_slices;
  3167. +
  3168. + return ff_v4l2_request_decode_frame(avctx, h->ref->frame, control, FF_ARRAY_ELEMS(control));
  3169. +}
  3170. +
  3171. +static int v4l2_request_hevc_decode_slice(AVCodecContext *avctx, const uint8_t *buffer, uint32_t size)
  3172. +{
  3173. + const HEVCContext *h = avctx->priv_data;
  3174. + V4L2RequestControlsHEVC *controls = h->ref->hwaccel_picture_private;
  3175. + V4L2RequestContextHEVC *ctx = avctx->internal->hwaccel_priv_data;
  3176. + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)h->ref->frame->data[0];
  3177. + int ret, slice = FFMIN(controls->num_slices, MAX_SLICES - 1);
  3178. +
  3179. + if (ctx->decode_mode == V4L2_MPEG_VIDEO_HEVC_DECODE_MODE_SLICE_BASED && slice) {
  3180. + ret = v4l2_request_hevc_queue_decode(avctx, 0);
  3181. + if (ret)
  3182. + return ret;
  3183. +
  3184. + ff_v4l2_request_reset_frame(avctx, h->ref->frame);
  3185. + slice = controls->num_slices = 0;
  3186. + controls->first_slice = 0;
  3187. + }
  3188. +
  3189. + v4l2_request_hevc_fill_slice_params(h, &controls->slice_params[slice]);
  3190. +
  3191. + if (ctx->start_code == V4L2_MPEG_VIDEO_HEVC_START_CODE_ANNEX_B) {
  3192. + ret = ff_v4l2_request_append_output_buffer(avctx, h->ref->frame, nalu_slice_start_code, 3);
  3193. + if (ret)
  3194. + return ret;
  3195. + }
  3196. +
  3197. + ret = ff_v4l2_request_append_output_buffer(avctx, h->ref->frame, buffer, size);
  3198. + if (ret)
  3199. + return ret;
  3200. +
  3201. + controls->slice_params[slice].bit_size = req->output.used * 8; //FIXME
  3202. + controls->num_slices++;
  3203. + return 0;
  3204. +}
  3205. +
  3206. +static int v4l2_request_hevc_end_frame(AVCodecContext *avctx)
  3207. +{
  3208. + return v4l2_request_hevc_queue_decode(avctx, 1);
  3209. +}
  3210. +
  3211. +static int v4l2_request_hevc_set_controls(AVCodecContext *avctx)
  3212. +{
  3213. + V4L2RequestContextHEVC *ctx = avctx->internal->hwaccel_priv_data;
  3214. + int ret;
  3215. +
  3216. + struct v4l2_ext_control control[] = {
  3217. + { .id = V4L2_CID_MPEG_VIDEO_HEVC_DECODE_MODE, },
  3218. + { .id = V4L2_CID_MPEG_VIDEO_HEVC_START_CODE, },
  3219. + };
  3220. + struct v4l2_query_ext_ctrl slice_params = {
  3221. + .id = V4L2_CID_MPEG_VIDEO_HEVC_SLICE_PARAMS,
  3222. + };
  3223. +
  3224. + ctx->decode_mode = ff_v4l2_request_query_control_default_value(avctx, V4L2_CID_MPEG_VIDEO_HEVC_DECODE_MODE);
  3225. + if (ctx->decode_mode != V4L2_MPEG_VIDEO_HEVC_DECODE_MODE_SLICE_BASED &&
  3226. + ctx->decode_mode != V4L2_MPEG_VIDEO_HEVC_DECODE_MODE_FRAME_BASED) {
  3227. + av_log(avctx, AV_LOG_ERROR, "%s: unsupported decode mode, %d\n", __func__, ctx->decode_mode);
  3228. + return AVERROR(EINVAL);
  3229. + }
  3230. +
  3231. + ctx->start_code = ff_v4l2_request_query_control_default_value(avctx, V4L2_CID_MPEG_VIDEO_HEVC_START_CODE);
  3232. + if (ctx->start_code != V4L2_MPEG_VIDEO_HEVC_START_CODE_NONE &&
  3233. + ctx->start_code != V4L2_MPEG_VIDEO_HEVC_START_CODE_ANNEX_B) {
  3234. + av_log(avctx, AV_LOG_ERROR, "%s: unsupported start code, %d\n", __func__, ctx->start_code);
  3235. + return AVERROR(EINVAL);
  3236. + }
  3237. +
  3238. + ret = ff_v4l2_request_query_control(avctx, &slice_params);
  3239. + if (ret)
  3240. + return ret;
  3241. +
  3242. + ctx->max_slices = slice_params.elems;
  3243. + if (ctx->max_slices > MAX_SLICES) {
  3244. + av_log(avctx, AV_LOG_ERROR, "%s: unsupported max slices, %d\n", __func__, ctx->max_slices);
  3245. + return AVERROR(EINVAL);
  3246. + }
  3247. +
  3248. + control[0].value = ctx->decode_mode;
  3249. + control[1].value = ctx->start_code;
  3250. +
  3251. + return ff_v4l2_request_set_controls(avctx, control, FF_ARRAY_ELEMS(control));
  3252. +}
  3253. +
  3254. +static int v4l2_request_hevc_init(AVCodecContext *avctx)
  3255. +{
  3256. + const HEVCContext *h = avctx->priv_data;
  3257. + struct v4l2_ctrl_hevc_sps sps;
  3258. + int ret;
  3259. +
  3260. + struct v4l2_ext_control control[] = {
  3261. + {
  3262. + .id = V4L2_CID_MPEG_VIDEO_HEVC_SPS,
  3263. + .ptr = &sps,
  3264. + .size = sizeof(sps),
  3265. + },
  3266. + };
  3267. +
  3268. + fill_sps(&sps, h);
  3269. +
  3270. + ret = ff_v4l2_request_init(avctx, V4L2_PIX_FMT_HEVC_SLICE, 4 * 1024 * 1024, control, FF_ARRAY_ELEMS(control));
  3271. + if (ret)
  3272. + return ret;
  3273. +
  3274. + return v4l2_request_hevc_set_controls(avctx);
  3275. +}
  3276. +
  3277. +const AVHWAccel ff_hevc_v4l2request_hwaccel = {
  3278. + .name = "hevc_v4l2request",
  3279. + .type = AVMEDIA_TYPE_VIDEO,
  3280. + .id = AV_CODEC_ID_HEVC,
  3281. + .pix_fmt = AV_PIX_FMT_DRM_PRIME,
  3282. + .start_frame = v4l2_request_hevc_start_frame,
  3283. + .decode_slice = v4l2_request_hevc_decode_slice,
  3284. + .end_frame = v4l2_request_hevc_end_frame,
  3285. + .output_frame = ff_v4l2_request_output_frame,
  3286. + .frame_priv_data_size = sizeof(V4L2RequestControlsHEVC),
  3287. + .init = v4l2_request_hevc_init,
  3288. + .uninit = ff_v4l2_request_uninit,
  3289. + .priv_data_size = sizeof(V4L2RequestContextHEVC),
  3290. + .frame_params = ff_v4l2_request_frame_params,
  3291. + .caps_internal = HWACCEL_CAP_ASYNC_SAFE,
  3292. +};
  3293. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/v4l2_request_mpeg2.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_mpeg2.c
  3294. --- ffmpeg_n4.2.2/libavcodec/v4l2_request_mpeg2.c 1969-12-31 16:00:00.000000000 -0800
  3295. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_mpeg2.c 2020-05-26 03:16:39.653173493 -0700
  3296. @@ -0,0 +1,156 @@
  3297. +/*
  3298. + * This file is part of FFmpeg.
  3299. + *
  3300. + * FFmpeg is free software; you can redistribute it and/or
  3301. + * modify it under the terms of the GNU Lesser General Public
  3302. + * License as published by the Free Software Foundation; either
  3303. + * version 2.1 of the License, or (at your option) any later version.
  3304. + *
  3305. + * FFmpeg is distributed in the hope that it will be useful,
  3306. + * but WITHOUT ANY WARRANTY; without even the implied warranty of
  3307. + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  3308. + * Lesser General Public License for more details.
  3309. + *
  3310. + * You should have received a copy of the GNU Lesser General Public
  3311. + * License along with FFmpeg; if not, write to the Free Software
  3312. + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  3313. + */
  3314. +
  3315. +#include "hwaccel.h"
  3316. +#include "mpegvideo.h"
  3317. +#include "v4l2_request.h"
  3318. +#include "mpeg2-ctrls.h"
  3319. +
  3320. +typedef struct V4L2RequestControlsMPEG2 {
  3321. + struct v4l2_ctrl_mpeg2_slice_params slice_params;
  3322. + struct v4l2_ctrl_mpeg2_quantization quantization;
  3323. +} V4L2RequestControlsMPEG2;
  3324. +
  3325. +static int v4l2_request_mpeg2_start_frame(AVCodecContext *avctx,
  3326. + av_unused const uint8_t *buffer,
  3327. + av_unused uint32_t size)
  3328. +{
  3329. + const MpegEncContext *s = avctx->priv_data;
  3330. + V4L2RequestControlsMPEG2 *controls = s->current_picture_ptr->hwaccel_picture_private;
  3331. + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)s->current_picture_ptr->f->data[0];
  3332. +
  3333. + controls->slice_params = (struct v4l2_ctrl_mpeg2_slice_params) {
  3334. + .bit_size = 0,
  3335. + .data_bit_offset = 0,
  3336. +
  3337. + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Slice */
  3338. + .quantiser_scale_code = s->qscale >> 1,
  3339. +
  3340. + .sequence = {
  3341. + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Sequence header */
  3342. + .horizontal_size = s->width,
  3343. + .vertical_size = s->height,
  3344. + .vbv_buffer_size = req->output.size,
  3345. +
  3346. + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Sequence extension */
  3347. + .profile_and_level_indication = 0,
  3348. + .progressive_sequence = s->progressive_sequence,
  3349. + .chroma_format = s->chroma_format,
  3350. + },
  3351. +
  3352. + .picture = {
  3353. + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Picture header */
  3354. + .picture_coding_type = s->pict_type,
  3355. +
  3356. + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Picture coding extension */
  3357. + .f_code[0][0] = s->mpeg_f_code[0][0],
  3358. + .f_code[0][1] = s->mpeg_f_code[0][1],
  3359. + .f_code[1][0] = s->mpeg_f_code[1][0],
  3360. + .f_code[1][1] = s->mpeg_f_code[1][1],
  3361. + .intra_dc_precision = s->intra_dc_precision,
  3362. + .picture_structure = s->picture_structure,
  3363. + .top_field_first = s->top_field_first,
  3364. + .frame_pred_frame_dct = s->frame_pred_frame_dct,
  3365. + .concealment_motion_vectors = s->concealment_motion_vectors,
  3366. + .q_scale_type = s->q_scale_type,
  3367. + .intra_vlc_format = s->intra_vlc_format,
  3368. + .alternate_scan = s->alternate_scan,
  3369. + .repeat_first_field = s->repeat_first_field,
  3370. + .progressive_frame = s->progressive_frame,
  3371. + },
  3372. + };
  3373. +
  3374. + switch (s->pict_type) {
  3375. + case AV_PICTURE_TYPE_B:
  3376. + controls->slice_params.backward_ref_ts = ff_v4l2_request_get_capture_timestamp(s->next_picture.f);
  3377. + // fall-through
  3378. + case AV_PICTURE_TYPE_P:
  3379. + controls->slice_params.forward_ref_ts = ff_v4l2_request_get_capture_timestamp(s->last_picture.f);
  3380. + }
  3381. +
  3382. + controls->quantization = (struct v4l2_ctrl_mpeg2_quantization) {
  3383. + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Quant matrix extension */
  3384. + .load_intra_quantiser_matrix = 1,
  3385. + .load_non_intra_quantiser_matrix = 1,
  3386. + .load_chroma_intra_quantiser_matrix = 1,
  3387. + .load_chroma_non_intra_quantiser_matrix = 1,
  3388. + };
  3389. +
  3390. + for (int i = 0; i < 64; i++) {
  3391. + int n = s->idsp.idct_permutation[ff_zigzag_direct[i]];
  3392. + controls->quantization.intra_quantiser_matrix[i] = s->intra_matrix[n];
  3393. + controls->quantization.non_intra_quantiser_matrix[i] = s->inter_matrix[n];
  3394. + controls->quantization.chroma_intra_quantiser_matrix[i] = s->chroma_intra_matrix[n];
  3395. + controls->quantization.chroma_non_intra_quantiser_matrix[i] = s->chroma_inter_matrix[n];
  3396. + }
  3397. +
  3398. + return ff_v4l2_request_reset_frame(avctx, s->current_picture_ptr->f);
  3399. +}
  3400. +
  3401. +static int v4l2_request_mpeg2_decode_slice(AVCodecContext *avctx, const uint8_t *buffer, uint32_t size)
  3402. +{
  3403. + const MpegEncContext *s = avctx->priv_data;
  3404. +
  3405. + return ff_v4l2_request_append_output_buffer(avctx, s->current_picture_ptr->f, buffer, size);
  3406. +}
  3407. +
  3408. +static int v4l2_request_mpeg2_end_frame(AVCodecContext *avctx)
  3409. +{
  3410. + const MpegEncContext *s = avctx->priv_data;
  3411. + V4L2RequestControlsMPEG2 *controls = s->current_picture_ptr->hwaccel_picture_private;
  3412. + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)s->current_picture_ptr->f->data[0];
  3413. +
  3414. + struct v4l2_ext_control control[] = {
  3415. + {
  3416. + .id = V4L2_CID_MPEG_VIDEO_MPEG2_SLICE_PARAMS,
  3417. + .ptr = &controls->slice_params,
  3418. + .size = sizeof(controls->slice_params),
  3419. + },
  3420. + {
  3421. + .id = V4L2_CID_MPEG_VIDEO_MPEG2_QUANTIZATION,
  3422. + .ptr = &controls->quantization,
  3423. + .size = sizeof(controls->quantization),
  3424. + },
  3425. + };
  3426. +
  3427. + controls->slice_params.bit_size = req->output.used * 8;
  3428. +
  3429. + return ff_v4l2_request_decode_frame(avctx, s->current_picture_ptr->f, control, FF_ARRAY_ELEMS(control));
  3430. +}
  3431. +
  3432. +static int v4l2_request_mpeg2_init(AVCodecContext *avctx)
  3433. +{
  3434. + return ff_v4l2_request_init(avctx, V4L2_PIX_FMT_MPEG2_SLICE, 1024 * 1024, NULL, 0);
  3435. +}
  3436. +
  3437. +const AVHWAccel ff_mpeg2_v4l2request_hwaccel = {
  3438. + .name = "mpeg2_v4l2request",
  3439. + .type = AVMEDIA_TYPE_VIDEO,
  3440. + .id = AV_CODEC_ID_MPEG2VIDEO,
  3441. + .pix_fmt = AV_PIX_FMT_DRM_PRIME,
  3442. + .start_frame = v4l2_request_mpeg2_start_frame,
  3443. + .decode_slice = v4l2_request_mpeg2_decode_slice,
  3444. + .end_frame = v4l2_request_mpeg2_end_frame,
  3445. + .output_frame = ff_v4l2_request_output_frame,
  3446. + .frame_priv_data_size = sizeof(V4L2RequestControlsMPEG2),
  3447. + .init = v4l2_request_mpeg2_init,
  3448. + .uninit = ff_v4l2_request_uninit,
  3449. + .priv_data_size = sizeof(V4L2RequestContext),
  3450. + .frame_params = ff_v4l2_request_frame_params,
  3451. + .caps_internal = HWACCEL_CAP_ASYNC_SAFE,
  3452. +};
  3453. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/v4l2_request_vp8.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_vp8.c
  3454. --- ffmpeg_n4.2.2/libavcodec/v4l2_request_vp8.c 1969-12-31 16:00:00.000000000 -0800
  3455. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_vp8.c 2020-05-26 03:16:39.653173493 -0700
  3456. @@ -0,0 +1,182 @@
  3457. +/*
  3458. + * This file is part of FFmpeg.
  3459. + *
  3460. + * FFmpeg is free software; you can redistribute it and/or
  3461. + * modify it under the terms of the GNU Lesser General Public
  3462. + * License as published by the Free Software Foundation; either
  3463. + * version 2.1 of the License, or (at your option) any later version.
  3464. + *
  3465. + * FFmpeg is distributed in the hope that it will be useful,
  3466. + * but WITHOUT ANY WARRANTY; without even the implied warranty of
  3467. + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  3468. + * Lesser General Public License for more details.
  3469. + *
  3470. + * You should have received a copy of the GNU Lesser General Public
  3471. + * License along with FFmpeg; if not, write to the Free Software
  3472. + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  3473. + */
  3474. +
  3475. +#include "hwaccel.h"
  3476. +#include "v4l2_request.h"
  3477. +#include "vp8.h"
  3478. +#include "vp8-ctrls.h"
  3479. +
  3480. +typedef struct V4L2RequestControlsVP8 {
  3481. + struct v4l2_ctrl_vp8_frame_header ctrl;
  3482. +} V4L2RequestControlsVP8;
  3483. +
  3484. +static int v4l2_request_vp8_start_frame(AVCodecContext *avctx,
  3485. + av_unused const uint8_t *buffer,
  3486. + av_unused uint32_t size)
  3487. +{
  3488. + const VP8Context *s = avctx->priv_data;
  3489. + V4L2RequestControlsVP8 *controls = s->framep[VP56_FRAME_CURRENT]->hwaccel_picture_private;
  3490. +
  3491. + memset(&controls->ctrl, 0, sizeof(controls->ctrl));
  3492. + return ff_v4l2_request_reset_frame(avctx, s->framep[VP56_FRAME_CURRENT]->tf.f);
  3493. +}
  3494. +
  3495. +static int v4l2_request_vp8_end_frame(AVCodecContext *avctx)
  3496. +{
  3497. + const VP8Context *s = avctx->priv_data;
  3498. + V4L2RequestControlsVP8 *controls = s->framep[VP56_FRAME_CURRENT]->hwaccel_picture_private;
  3499. + struct v4l2_ext_control control[] = {
  3500. + {
  3501. + .id = V4L2_CID_MPEG_VIDEO_VP8_FRAME_HEADER,
  3502. + .ptr = &controls->ctrl,
  3503. + .size = sizeof(controls->ctrl),
  3504. + },
  3505. + };
  3506. +
  3507. + return ff_v4l2_request_decode_frame(avctx, s->framep[VP56_FRAME_CURRENT]->tf.f,
  3508. + control, FF_ARRAY_ELEMS(control));
  3509. +}
  3510. +
  3511. +static int v4l2_request_vp8_decode_slice(AVCodecContext *avctx,
  3512. + const uint8_t *buffer,
  3513. + uint32_t size)
  3514. +{
  3515. + const VP8Context *s = avctx->priv_data;
  3516. + V4L2RequestControlsVP8 *controls = s->framep[VP56_FRAME_CURRENT]->hwaccel_picture_private;
  3517. + struct v4l2_ctrl_vp8_frame_header *hdr = &controls->ctrl;
  3518. + const uint8_t *data = buffer + 3 + 7 * s->keyframe;
  3519. + unsigned int i, j, k;
  3520. +
  3521. + hdr->version = s->profile & 0x3;
  3522. + hdr->width = avctx->width;
  3523. + hdr->height = avctx->height;
  3524. + /* FIXME: set ->xx_scale */
  3525. + hdr->prob_skip_false = s->prob->mbskip;
  3526. + hdr->prob_intra = s->prob->intra;
  3527. + hdr->prob_gf = s->prob->golden;
  3528. + hdr->prob_last = s->prob->last;
  3529. + hdr->first_part_size = s->header_partition_size;
  3530. + hdr->first_part_header_bits = (8 * (s->coder_state_at_header_end.input - data) -
  3531. + s->coder_state_at_header_end.bit_count - 8);
  3532. + hdr->num_dct_parts = s->num_coeff_partitions;
  3533. + for (i = 0; i < 8; i++)
  3534. + hdr->dct_part_sizes[i] = s->coeff_partition_size[i];
  3535. +
  3536. + hdr->coder_state.range = s->coder_state_at_header_end.range;
  3537. + hdr->coder_state.value = s->coder_state_at_header_end.value;
  3538. + hdr->coder_state.bit_count = s->coder_state_at_header_end.bit_count;
  3539. + if (s->framep[VP56_FRAME_PREVIOUS])
  3540. + hdr->last_frame_ts = ff_v4l2_request_get_capture_timestamp(s->framep[VP56_FRAME_PREVIOUS]->tf.f);
  3541. + if (s->framep[VP56_FRAME_GOLDEN])
  3542. + hdr->golden_frame_ts = ff_v4l2_request_get_capture_timestamp(s->framep[VP56_FRAME_GOLDEN]->tf.f);
  3543. + if (s->framep[VP56_FRAME_GOLDEN2])
  3544. + hdr->alt_frame_ts = ff_v4l2_request_get_capture_timestamp(s->framep[VP56_FRAME_GOLDEN2]->tf.f);
  3545. + hdr->flags |= s->invisible ? 0 : V4L2_VP8_FRAME_HEADER_FLAG_SHOW_FRAME;
  3546. + hdr->flags |= s->mbskip_enabled ? V4L2_VP8_FRAME_HEADER_FLAG_MB_NO_SKIP_COEFF : 0;
  3547. + hdr->flags |= (s->profile & 0x4) ? V4L2_VP8_FRAME_HEADER_FLAG_EXPERIMENTAL : 0;
  3548. + hdr->flags |= s->keyframe ? V4L2_VP8_FRAME_HEADER_FLAG_KEY_FRAME : 0;
  3549. + hdr->flags |= s->sign_bias[VP56_FRAME_GOLDEN] ? V4L2_VP8_FRAME_HEADER_FLAG_SIGN_BIAS_GOLDEN : 0;
  3550. + hdr->flags |= s->sign_bias[VP56_FRAME_GOLDEN2] ? V4L2_VP8_FRAME_HEADER_FLAG_SIGN_BIAS_ALT : 0;
  3551. + hdr->segment_header.flags |= s->segmentation.enabled ? V4L2_VP8_SEGMENT_HEADER_FLAG_ENABLED : 0;
  3552. + hdr->segment_header.flags |= s->segmentation.update_map ? V4L2_VP8_SEGMENT_HEADER_FLAG_UPDATE_MAP : 0;
  3553. + hdr->segment_header.flags |= s->segmentation.update_feature_data ? V4L2_VP8_SEGMENT_HEADER_FLAG_UPDATE_FEATURE_DATA : 0;
  3554. + hdr->segment_header.flags |= s->segmentation.absolute_vals ? 0 : V4L2_VP8_SEGMENT_HEADER_FLAG_DELTA_VALUE_MODE;
  3555. + for (i = 0; i < 4; i++) {
  3556. + hdr->segment_header.quant_update[i] = s->segmentation.base_quant[i];
  3557. + hdr->segment_header.lf_update[i] = s->segmentation.filter_level[i];
  3558. + }
  3559. +
  3560. + for (i = 0; i < 3; i++)
  3561. + hdr->segment_header.segment_probs[i] = s->prob->segmentid[i];
  3562. +
  3563. + hdr->lf_header.level = s->filter.level;
  3564. + hdr->lf_header.sharpness_level = s->filter.sharpness;
  3565. + hdr->lf_header.flags |= s->lf_delta.enabled ? V4L2_VP8_LF_HEADER_ADJ_ENABLE : 0;
  3566. + hdr->lf_header.flags |= s->lf_delta.update ? V4L2_VP8_LF_HEADER_DELTA_UPDATE : 0;
  3567. + hdr->lf_header.flags |= s->filter.simple ? V4L2_VP8_LF_FILTER_TYPE_SIMPLE : 0;
  3568. + for (i = 0; i < 4; i++) {
  3569. + hdr->lf_header.ref_frm_delta[i] = s->lf_delta.ref[i];
  3570. + hdr->lf_header.mb_mode_delta[i] = s->lf_delta.mode[i + MODE_I4x4];
  3571. + }
  3572. +
  3573. + // Probabilites
  3574. + if (s->keyframe) {
  3575. + static const uint8_t keyframe_y_mode_probs[4] = {
  3576. + 145, 156, 163, 128
  3577. + };
  3578. + static const uint8_t keyframe_uv_mode_probs[3] = {
  3579. + 142, 114, 183
  3580. + };
  3581. +
  3582. + memcpy(hdr->entropy_header.y_mode_probs, keyframe_y_mode_probs, 4);
  3583. + memcpy(hdr->entropy_header.uv_mode_probs, keyframe_uv_mode_probs, 3);
  3584. + } else {
  3585. + for (i = 0; i < 4; i++)
  3586. + hdr->entropy_header.y_mode_probs[i] = s->prob->pred16x16[i];
  3587. + for (i = 0; i < 3; i++)
  3588. + hdr->entropy_header.uv_mode_probs[i] = s->prob->pred8x8c[i];
  3589. + }
  3590. + for (i = 0; i < 2; i++)
  3591. + for (j = 0; j < 19; j++)
  3592. + hdr->entropy_header.mv_probs[i][j] = s->prob->mvc[i][j];
  3593. +
  3594. + for (i = 0; i < 4; i++) {
  3595. + for (j = 0; j < 8; j++) {
  3596. + static const int coeff_bands_inverse[8] = {
  3597. + 0, 1, 2, 3, 5, 6, 4, 15
  3598. + };
  3599. + int coeff_pos = coeff_bands_inverse[j];
  3600. +
  3601. + for (k = 0; k < 3; k++) {
  3602. + memcpy(hdr->entropy_header.coeff_probs[i][j][k],
  3603. + s->prob->token[i][coeff_pos][k], 11);
  3604. + }
  3605. + }
  3606. + }
  3607. +
  3608. + hdr->quant_header.y_ac_qi = s->quant.yac_qi;
  3609. + hdr->quant_header.y_dc_delta = s->quant.ydc_delta;
  3610. + hdr->quant_header.y2_dc_delta = s->quant.y2dc_delta;
  3611. + hdr->quant_header.y2_ac_delta = s->quant.y2ac_delta;
  3612. + hdr->quant_header.uv_dc_delta = s->quant.uvdc_delta;
  3613. + hdr->quant_header.uv_ac_delta = s->quant.uvac_delta;
  3614. +
  3615. + return ff_v4l2_request_append_output_buffer(avctx, s->framep[VP56_FRAME_CURRENT]->tf.f, buffer, size);
  3616. +}
  3617. +
  3618. +static int v4l2_request_vp8_init(AVCodecContext *avctx)
  3619. +{
  3620. + return ff_v4l2_request_init(avctx, V4L2_PIX_FMT_VP8_FRAME, 2 * 1024 * 1024, NULL, 0);
  3621. +}
  3622. +
  3623. +const AVHWAccel ff_vp8_v4l2request_hwaccel = {
  3624. + .name = "vp8_v4l2request",
  3625. + .type = AVMEDIA_TYPE_VIDEO,
  3626. + .id = AV_CODEC_ID_VP8,
  3627. + .pix_fmt = AV_PIX_FMT_DRM_PRIME,
  3628. + .start_frame = v4l2_request_vp8_start_frame,
  3629. + .decode_slice = v4l2_request_vp8_decode_slice,
  3630. + .end_frame = v4l2_request_vp8_end_frame,
  3631. + .output_frame = ff_v4l2_request_output_frame,
  3632. + .frame_priv_data_size = sizeof(V4L2RequestControlsVP8),
  3633. + .init = v4l2_request_vp8_init,
  3634. + .uninit = ff_v4l2_request_uninit,
  3635. + .priv_data_size = sizeof(V4L2RequestContext),
  3636. + .frame_params = ff_v4l2_request_frame_params,
  3637. + .caps_internal = HWACCEL_CAP_ASYNC_SAFE,
  3638. +};
  3639. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/v4l2_request_vp9.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_vp9.c
  3640. --- ffmpeg_n4.2.2/libavcodec/v4l2_request_vp9.c 1969-12-31 16:00:00.000000000 -0800
  3641. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_vp9.c 2020-05-26 03:16:39.653173493 -0700
  3642. @@ -0,0 +1,354 @@
  3643. +/*
  3644. + * This file is part of FFmpeg.
  3645. + *
  3646. + * FFmpeg is free software; you can redistribute it and/or
  3647. + * modify it under the terms of the GNU Lesser General Public
  3648. + * License as published by the Free Software Foundation; either
  3649. + * version 2.1 of the License, or (at your option) any later version.
  3650. + *
  3651. + * FFmpeg is distributed in the hope that it will be useful,
  3652. + * but WITHOUT ANY WARRANTY; without even the implied warranty of
  3653. + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  3654. + * Lesser General Public License for more details.
  3655. + *
  3656. + * You should have received a copy of the GNU Lesser General Public
  3657. + * License along with FFmpeg; if not, write to the Free Software
  3658. + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  3659. + */
  3660. +
  3661. +#include "hwaccel.h"
  3662. +#include "v4l2_request.h"
  3663. +#include "vp9dec.h"
  3664. +#include "vp9-ctrls.h"
  3665. +
  3666. +typedef struct V4L2RequestControlsVP9 {
  3667. + struct v4l2_ctrl_vp9_frame_decode_params decode_params;
  3668. +} V4L2RequestControlsVP9;
  3669. +
  3670. +static const uint8_t ff_to_v4l2_intramode[] = {
  3671. + [VERT_PRED] = V4L2_VP9_INTRA_PRED_MODE_V,
  3672. + [HOR_PRED] = V4L2_VP9_INTRA_PRED_MODE_H,
  3673. + [DC_PRED] = V4L2_VP9_INTRA_PRED_MODE_DC,
  3674. + [DIAG_DOWN_LEFT_PRED] = V4L2_VP9_INTRA_PRED_MODE_D45,
  3675. + [DIAG_DOWN_RIGHT_PRED] = V4L2_VP9_INTRA_PRED_MODE_D135,
  3676. + [VERT_RIGHT_PRED] = V4L2_VP9_INTRA_PRED_MODE_D117,
  3677. + [HOR_DOWN_PRED] = V4L2_VP9_INTRA_PRED_MODE_D153,
  3678. + [VERT_LEFT_PRED] = V4L2_VP9_INTRA_PRED_MODE_D63,
  3679. + [HOR_UP_PRED] = V4L2_VP9_INTRA_PRED_MODE_D207,
  3680. + [TM_VP8_PRED] = V4L2_VP9_INTRA_PRED_MODE_TM,
  3681. +};
  3682. +
  3683. +static int v4l2_request_vp9_set_frame_ctx(AVCodecContext *avctx, unsigned int id)
  3684. +{
  3685. + VP9Context *s = avctx->priv_data;
  3686. + struct v4l2_ctrl_vp9_frame_ctx fctx = {};
  3687. + struct v4l2_ext_control control[] = {
  3688. + {
  3689. + .id = V4L2_CID_MPEG_VIDEO_VP9_FRAME_CONTEXT(id),
  3690. + .ptr = &fctx,
  3691. + .size = sizeof(fctx),
  3692. + },
  3693. + };
  3694. +
  3695. + memcpy(fctx.probs.tx8, s->prob_ctx[id].p.tx8p, sizeof(s->prob_ctx[id].p.tx8p));
  3696. + memcpy(fctx.probs.tx16, s->prob_ctx[id].p.tx16p, sizeof(s->prob_ctx[id].p.tx16p));
  3697. + memcpy(fctx.probs.tx32, s->prob_ctx[id].p.tx32p, sizeof(s->prob_ctx[id].p.tx32p));
  3698. + memcpy(fctx.probs.coef, s->prob_ctx[id].coef, sizeof(s->prob_ctx[id].coef));
  3699. + memcpy(fctx.probs.skip, s->prob_ctx[id].p.skip, sizeof(s->prob_ctx[id].p.skip));
  3700. + memcpy(fctx.probs.inter_mode, s->prob_ctx[id].p.mv_mode, sizeof(s->prob_ctx[id].p.mv_mode));
  3701. + memcpy(fctx.probs.interp_filter, s->prob_ctx[id].p.filter, sizeof(s->prob_ctx[id].p.filter));
  3702. + memcpy(fctx.probs.is_inter, s->prob_ctx[id].p.intra, sizeof(s->prob_ctx[id].p.intra));
  3703. + memcpy(fctx.probs.comp_mode, s->prob_ctx[id].p.comp, sizeof(s->prob_ctx[id].p.comp));
  3704. + memcpy(fctx.probs.single_ref, s->prob_ctx[id].p.single_ref, sizeof(s->prob_ctx[id].p.single_ref));
  3705. + memcpy(fctx.probs.comp_ref, s->prob_ctx[id].p.comp_ref, sizeof(s->prob_ctx[id].p.comp_ref));
  3706. + memcpy(fctx.probs.y_mode, s->prob_ctx[id].p.y_mode, sizeof(s->prob_ctx[id].p.y_mode));
  3707. + for (unsigned i = 0; i < 10; i++)
  3708. + memcpy(fctx.probs.uv_mode[ff_to_v4l2_intramode[i]], s->prob_ctx[id].p.uv_mode[i], sizeof(s->prob_ctx[id].p.uv_mode[0]));
  3709. + for (unsigned i = 0; i < 4; i++)
  3710. + memcpy(fctx.probs.partition[i * 4], s->prob_ctx[id].p.partition[3 - i], sizeof(s->prob_ctx[id].p.partition[0]));
  3711. + memcpy(fctx.probs.mv.joint, s->prob_ctx[id].p.mv_joint, sizeof(s->prob_ctx[id].p.mv_joint));
  3712. + for (unsigned i = 0; i < 2; i++) {
  3713. + fctx.probs.mv.sign[i] = s->prob_ctx[id].p.mv_comp[i].sign;
  3714. + memcpy(fctx.probs.mv.class[i], s->prob_ctx[id].p.mv_comp[i].classes, sizeof(s->prob_ctx[id].p.mv_comp[0].classes));
  3715. + fctx.probs.mv.class0_bit[i] = s->prob_ctx[id].p.mv_comp[i].class0;
  3716. + memcpy(fctx.probs.mv.bits[i], s->prob_ctx[id].p.mv_comp[i].bits, sizeof(s->prob_ctx[id].p.mv_comp[0].bits));
  3717. + memcpy(fctx.probs.mv.class0_fr[i], s->prob_ctx[id].p.mv_comp[i].class0_fp, sizeof(s->prob_ctx[id].p.mv_comp[0].class0_fp));
  3718. + memcpy(fctx.probs.mv.fr[i], s->prob_ctx[id].p.mv_comp[i].fp, sizeof(s->prob_ctx[id].p.mv_comp[0].fp));
  3719. + fctx.probs.mv.class0_hp[i] = s->prob_ctx[id].p.mv_comp[i].class0_hp;
  3720. + fctx.probs.mv.hp[i] = s->prob_ctx[id].p.mv_comp[i].hp;
  3721. + }
  3722. +
  3723. + return ff_v4l2_request_set_controls(avctx, control, FF_ARRAY_ELEMS(control));
  3724. +}
  3725. +
  3726. +static int v4l2_request_vp9_get_frame_ctx(AVCodecContext *avctx, unsigned int id)
  3727. +{
  3728. + VP9Context *s = avctx->priv_data;
  3729. + struct v4l2_ctrl_vp9_frame_ctx fctx = {};
  3730. + struct v4l2_ext_control control[] = {
  3731. + {
  3732. + .id = V4L2_CID_MPEG_VIDEO_VP9_FRAME_CONTEXT(id),
  3733. + .ptr = &fctx,
  3734. + .size = sizeof(fctx),
  3735. + },
  3736. + };
  3737. +
  3738. + int ret = ff_v4l2_request_get_controls(avctx, control, FF_ARRAY_ELEMS(control));
  3739. + if (ret)
  3740. + return ret;
  3741. +
  3742. + memcpy(s->prob_ctx[id].p.tx8p, fctx.probs.tx8, sizeof(s->prob_ctx[id].p.tx8p));
  3743. + memcpy(s->prob_ctx[id].p.tx16p, fctx.probs.tx16, sizeof(s->prob_ctx[id].p.tx16p));
  3744. + memcpy(s->prob_ctx[id].p.tx32p, fctx.probs.tx32, sizeof(s->prob_ctx[id].p.tx32p));
  3745. + memcpy(s->prob_ctx[id].coef, fctx.probs.coef, sizeof(s->prob_ctx[id].coef));
  3746. + memcpy(s->prob_ctx[id].p.skip, fctx.probs.skip, sizeof(s->prob_ctx[id].p.skip));
  3747. + memcpy(s->prob_ctx[id].p.mv_mode, fctx.probs.inter_mode, sizeof(s->prob_ctx[id].p.mv_mode));
  3748. + memcpy(s->prob_ctx[id].p.filter, fctx.probs.interp_filter, sizeof(s->prob_ctx[id].p.filter));
  3749. + memcpy(s->prob_ctx[id].p.intra, fctx.probs.is_inter, sizeof(s->prob_ctx[id].p.intra));
  3750. + memcpy(s->prob_ctx[id].p.comp, fctx.probs.comp_mode, sizeof(s->prob_ctx[id].p.comp));
  3751. + memcpy(s->prob_ctx[id].p.single_ref, fctx.probs.single_ref, sizeof(s->prob_ctx[id].p.single_ref));
  3752. + memcpy(s->prob_ctx[id].p.comp_ref, fctx.probs.comp_ref, sizeof(s->prob_ctx[id].p.comp_ref));
  3753. + memcpy(s->prob_ctx[id].p.y_mode, fctx.probs.y_mode, sizeof(s->prob_ctx[id].p.y_mode));
  3754. + for (unsigned i = 0; i < 10; i++)
  3755. + memcpy(s->prob_ctx[id].p.uv_mode[i], fctx.probs.uv_mode[ff_to_v4l2_intramode[i]], sizeof(s->prob_ctx[id].p.uv_mode[0]));
  3756. + for (unsigned i = 0; i < 4; i++)
  3757. + memcpy(s->prob_ctx[id].p.partition[3 - i], fctx.probs.partition[i * 4], sizeof(s->prob_ctx[id].p.partition[0]));
  3758. + memcpy(s->prob_ctx[id].p.mv_joint, fctx.probs.mv.joint, sizeof(s->prob_ctx[id].p.mv_joint));
  3759. + for (unsigned i = 0; i < 2; i++) {
  3760. + s->prob_ctx[id].p.mv_comp[i].sign = fctx.probs.mv.sign[i];
  3761. + memcpy(s->prob_ctx[id].p.mv_comp[i].classes, fctx.probs.mv.class[i], sizeof(s->prob_ctx[id].p.mv_comp[0].classes));
  3762. + s->prob_ctx[id].p.mv_comp[i].class0 = fctx.probs.mv.class0_bit[i];
  3763. + memcpy(s->prob_ctx[id].p.mv_comp[i].bits, fctx.probs.mv.bits[i], sizeof(s->prob_ctx[id].p.mv_comp[0].bits));
  3764. + memcpy(s->prob_ctx[id].p.mv_comp[i].class0_fp, fctx.probs.mv.class0_fr[i], sizeof(s->prob_ctx[id].p.mv_comp[0].class0_fp));
  3765. + memcpy(s->prob_ctx[id].p.mv_comp[i].fp, fctx.probs.mv.fr[i], sizeof(s->prob_ctx[id].p.mv_comp[0].fp));
  3766. + s->prob_ctx[id].p.mv_comp[i].class0_hp = fctx.probs.mv.class0_hp[i];
  3767. + s->prob_ctx[id].p.mv_comp[i].hp = fctx.probs.mv.hp[i];
  3768. + }
  3769. +
  3770. + return 0;
  3771. +}
  3772. +
  3773. +static int v4l2_request_vp9_start_frame(AVCodecContext *avctx,
  3774. + av_unused const uint8_t *buffer,
  3775. + av_unused uint32_t size)
  3776. +{
  3777. + const VP9Context *s = avctx->priv_data;
  3778. + const VP9Frame *f = &s->s.frames[CUR_FRAME];
  3779. + V4L2RequestControlsVP9 *controls = f->hwaccel_picture_private;
  3780. + struct v4l2_ctrl_vp9_frame_decode_params *dec_params = &controls->decode_params;
  3781. + int ret;
  3782. +
  3783. + if (s->s.h.keyframe || s->s.h.errorres || (s->s.h.intraonly && s->s.h.resetctx == 3)) {
  3784. + for (unsigned i = 0; i < 4; i++) {
  3785. + ret = v4l2_request_vp9_set_frame_ctx(avctx, i);
  3786. + if (ret)
  3787. + return ret;
  3788. + }
  3789. + } else if (s->s.h.intraonly && s->s.h.resetctx == 2) {
  3790. + ret = v4l2_request_vp9_set_frame_ctx(avctx, s->s.h.framectxid);
  3791. + if (ret)
  3792. + return ret;
  3793. + }
  3794. +
  3795. + if (s->s.h.keyframe)
  3796. + dec_params->flags |= V4L2_VP9_FRAME_FLAG_KEY_FRAME;
  3797. + if (!s->s.h.invisible)
  3798. + dec_params->flags |= V4L2_VP9_FRAME_FLAG_SHOW_FRAME;
  3799. + if (s->s.h.errorres)
  3800. + dec_params->flags |= V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT;
  3801. + if (s->s.h.intraonly)
  3802. + dec_params->flags |= V4L2_VP9_FRAME_FLAG_INTRA_ONLY;
  3803. + if (!s->s.h.keyframe && s->s.h.highprecisionmvs)
  3804. + dec_params->flags |= V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV;
  3805. + if (s->s.h.refreshctx)
  3806. + dec_params->flags |= V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX;
  3807. + if (s->s.h.parallelmode)
  3808. + dec_params->flags |= V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE;
  3809. + if (s->ss_h)
  3810. + dec_params->flags |= V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING;
  3811. + if (s->ss_v)
  3812. + dec_params->flags |= V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING;
  3813. + if (avctx->color_range == AVCOL_RANGE_JPEG)
  3814. + dec_params->flags |= V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING;
  3815. +
  3816. + dec_params->compressed_header_size = s->s.h.compressed_header_size;
  3817. + dec_params->uncompressed_header_size = s->s.h.uncompressed_header_size;
  3818. + dec_params->profile = s->s.h.profile;
  3819. + dec_params->reset_frame_context = s->s.h.resetctx > 0 ? s->s.h.resetctx - 1 : 0;
  3820. + dec_params->frame_context_idx = s->s.h.framectxid;
  3821. + dec_params->bit_depth = s->s.h.bpp;
  3822. +
  3823. + dec_params->interpolation_filter = s->s.h.filtermode ^ (s->s.h.filtermode <= 1);
  3824. + dec_params->tile_cols_log2 = s->s.h.tiling.log2_tile_cols;
  3825. + dec_params->tile_rows_log2 = s->s.h.tiling.log2_tile_rows;
  3826. + dec_params->tx_mode = s->s.h.txfmmode;
  3827. + dec_params->reference_mode = s->s.h.comppredmode;
  3828. + dec_params->frame_width_minus_1 = s->w - 1;
  3829. + dec_params->frame_height_minus_1 = s->h - 1;
  3830. + //dec_params->render_width_minus_1 = avctx->width - 1;
  3831. + //dec_params->render_height_minus_1 = avctx->height - 1;
  3832. +
  3833. + for (unsigned i = 0; i < 3; i++) {
  3834. + const ThreadFrame *ref = &s->s.refs[s->s.h.refidx[i]];
  3835. + if (ref->f && ref->f->buf[0])
  3836. + dec_params->refs[i] = ff_v4l2_request_get_capture_timestamp(ref->f);
  3837. + }
  3838. +
  3839. + if (s->s.h.lf_delta.enabled)
  3840. + dec_params->lf.flags |= V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED;
  3841. + if (s->s.h.lf_delta.updated)
  3842. + dec_params->lf.flags |= V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE;
  3843. +
  3844. + dec_params->lf.level = s->s.h.filter.level;
  3845. + dec_params->lf.sharpness = s->s.h.filter.sharpness;
  3846. + for (unsigned i = 0; i < 4; i++)
  3847. + dec_params->lf.ref_deltas[i] = s->s.h.lf_delta.ref[i];
  3848. + for (unsigned i = 0; i < 2; i++)
  3849. + dec_params->lf.mode_deltas[i] = s->s.h.lf_delta.mode[i];
  3850. + for (unsigned i = 0; i < 8; i++) {
  3851. + for (unsigned j = 0; j < 4; j++)
  3852. + memcpy(dec_params->lf.level_lookup[i][j], s->s.h.segmentation.feat[i].lflvl[j], sizeof(dec_params->lf.level_lookup[0][0]));
  3853. + }
  3854. +
  3855. + dec_params->quant.base_q_idx = s->s.h.yac_qi;
  3856. + dec_params->quant.delta_q_y_dc = s->s.h.ydc_qdelta;
  3857. + dec_params->quant.delta_q_uv_dc = s->s.h.uvdc_qdelta;
  3858. + dec_params->quant.delta_q_uv_ac = s->s.h.uvac_qdelta;
  3859. +
  3860. + if (s->s.h.segmentation.enabled)
  3861. + dec_params->seg.flags |= V4L2_VP9_SEGMENTATION_FLAG_ENABLED;
  3862. + if (s->s.h.segmentation.update_map)
  3863. + dec_params->seg.flags |= V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP;
  3864. + if (s->s.h.segmentation.temporal)
  3865. + dec_params->seg.flags |= V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE;
  3866. + if (s->s.h.segmentation.update_data)
  3867. + dec_params->seg.flags |= V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA;
  3868. + if (s->s.h.segmentation.absolute_vals)
  3869. + dec_params->seg.flags |= V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE;
  3870. +
  3871. + for (unsigned i = 0; i < 7; i++)
  3872. + dec_params->seg.tree_probs[i] = s->s.h.segmentation.prob[i];
  3873. +
  3874. + if (s->s.h.segmentation.temporal) {
  3875. + for (unsigned i = 0; i < 3; i++)
  3876. + dec_params->seg.pred_probs[i] = s->s.h.segmentation.pred_prob[i];
  3877. + } else {
  3878. + memset(dec_params->seg.pred_probs, 255, sizeof(dec_params->seg.pred_probs));
  3879. + }
  3880. +
  3881. + for (unsigned i = 0; i < 8; i++) {
  3882. + if (s->s.h.segmentation.feat[i].q_enabled) {
  3883. + dec_params->seg.feature_enabled[i] |= 1 << V4L2_VP9_SEGMENT_FEATURE_QP_DELTA;
  3884. + dec_params->seg.feature_data[i][V4L2_VP9_SEGMENT_FEATURE_QP_DELTA] = s->s.h.segmentation.feat[i].q_val;
  3885. + }
  3886. +
  3887. + if (s->s.h.segmentation.feat[i].lf_enabled) {
  3888. + dec_params->seg.feature_enabled[i] |= 1 << V4L2_VP9_SEGMENT_FEATURE_LF;
  3889. + dec_params->seg.feature_data[i][V4L2_VP9_SEGMENT_FEATURE_LF] = s->s.h.segmentation.feat[i].lf_val;
  3890. + }
  3891. +
  3892. + if (s->s.h.segmentation.feat[i].ref_enabled) {
  3893. + dec_params->seg.feature_enabled[i] |= 1 << V4L2_VP9_SEGMENT_FEATURE_REF_FRAME;
  3894. + dec_params->seg.feature_data[i][V4L2_VP9_SEGMENT_FEATURE_REF_FRAME] = s->s.h.segmentation.feat[i].ref_val;
  3895. + }
  3896. +
  3897. + if (s->s.h.segmentation.feat[i].skip_enabled)
  3898. + dec_params->seg.feature_enabled[i] |= 1 << V4L2_VP9_SEGMENT_FEATURE_SKIP;
  3899. + }
  3900. +
  3901. + memcpy(dec_params->probs.tx8, s->prob.p.tx8p, sizeof(s->prob.p.tx8p));
  3902. + memcpy(dec_params->probs.tx16, s->prob.p.tx16p, sizeof(s->prob.p.tx16p));
  3903. + memcpy(dec_params->probs.tx32, s->prob.p.tx32p, sizeof(s->prob.p.tx32p));
  3904. + for (unsigned i = 0; i < 4; i++) {
  3905. + for (unsigned j = 0; j < 2; j++) {
  3906. + for (unsigned k = 0; k < 2; k++) {
  3907. + for (unsigned l = 0; l < 6; l++) {
  3908. + for (unsigned m = 0; m < 6; m++) {
  3909. + memcpy(dec_params->probs.coef[i][j][k][l][m], s->prob.coef[i][j][k][l][m], sizeof(dec_params->probs.coef[0][0][0][0][0]));
  3910. + }
  3911. + }
  3912. + }
  3913. + }
  3914. + }
  3915. + memcpy(dec_params->probs.skip, s->prob.p.skip, sizeof(s->prob.p.skip));
  3916. + memcpy(dec_params->probs.inter_mode, s->prob.p.mv_mode, sizeof(s->prob.p.mv_mode));
  3917. + memcpy(dec_params->probs.interp_filter, s->prob.p.filter, sizeof(s->prob.p.filter));
  3918. + memcpy(dec_params->probs.is_inter, s->prob.p.intra, sizeof(s->prob.p.intra));
  3919. + memcpy(dec_params->probs.comp_mode, s->prob.p.comp, sizeof(s->prob.p.comp));
  3920. + memcpy(dec_params->probs.single_ref, s->prob.p.single_ref, sizeof(s->prob.p.single_ref));
  3921. + memcpy(dec_params->probs.comp_ref, s->prob.p.comp_ref, sizeof(s->prob.p.comp_ref));
  3922. + memcpy(dec_params->probs.y_mode, s->prob.p.y_mode, sizeof(s->prob.p.y_mode));
  3923. + for (unsigned i = 0; i < 10; i++)
  3924. + memcpy(dec_params->probs.uv_mode[ff_to_v4l2_intramode[i]], s->prob.p.uv_mode[i], sizeof(s->prob.p.uv_mode[0]));
  3925. + for (unsigned i = 0; i < 4; i++)
  3926. + memcpy(dec_params->probs.partition[i * 4], s->prob.p.partition[3 - i], sizeof(s->prob.p.partition[0]));
  3927. + memcpy(dec_params->probs.mv.joint, s->prob.p.mv_joint, sizeof(s->prob.p.mv_joint));
  3928. + for (unsigned i = 0; i < 2; i++) {
  3929. + dec_params->probs.mv.sign[i] = s->prob.p.mv_comp[i].sign;
  3930. + memcpy(dec_params->probs.mv.class[i], s->prob.p.mv_comp[i].classes, sizeof(s->prob.p.mv_comp[0].classes));
  3931. + dec_params->probs.mv.class0_bit[i] = s->prob.p.mv_comp[i].class0;
  3932. + memcpy(dec_params->probs.mv.bits[i], s->prob.p.mv_comp[i].bits, sizeof(s->prob.p.mv_comp[0].bits));
  3933. + memcpy(dec_params->probs.mv.class0_fr[i], s->prob.p.mv_comp[i].class0_fp, sizeof(s->prob.p.mv_comp[0].class0_fp));
  3934. + memcpy(dec_params->probs.mv.fr[i], s->prob.p.mv_comp[i].fp, sizeof(s->prob.p.mv_comp[0].fp));
  3935. + dec_params->probs.mv.class0_hp[i] = s->prob.p.mv_comp[i].class0_hp;
  3936. + dec_params->probs.mv.hp[i] = s->prob.p.mv_comp[i].hp;
  3937. + }
  3938. +
  3939. + return ff_v4l2_request_reset_frame(avctx, f->tf.f);
  3940. +}
  3941. +
  3942. +static int v4l2_request_vp9_decode_slice(AVCodecContext *avctx, const uint8_t *buffer, uint32_t size)
  3943. +{
  3944. + const VP9Context *s = avctx->priv_data;
  3945. + const VP9Frame *f = &s->s.frames[CUR_FRAME];
  3946. +
  3947. + return ff_v4l2_request_append_output_buffer(avctx, f->tf.f, buffer, size);
  3948. +}
  3949. +
  3950. +static int v4l2_request_vp9_end_frame(AVCodecContext *avctx)
  3951. +{
  3952. + const VP9Context *s = avctx->priv_data;
  3953. + const VP9Frame *f = &s->s.frames[CUR_FRAME];
  3954. + V4L2RequestControlsVP9 *controls = f->hwaccel_picture_private;
  3955. + int ret;
  3956. +
  3957. + struct v4l2_ext_control control[] = {
  3958. + {
  3959. + .id = V4L2_CID_MPEG_VIDEO_VP9_FRAME_DECODE_PARAMS,
  3960. + .ptr = &controls->decode_params,
  3961. + .size = sizeof(controls->decode_params),
  3962. + },
  3963. + };
  3964. +
  3965. + ret = ff_v4l2_request_decode_frame(avctx, f->tf.f, control, FF_ARRAY_ELEMS(control));
  3966. + if (ret)
  3967. + return ret;
  3968. +
  3969. + if (!s->s.h.refreshctx)
  3970. + return 0;
  3971. +
  3972. + return v4l2_request_vp9_get_frame_ctx(avctx, s->s.h.framectxid);
  3973. +}
  3974. +
  3975. +static int v4l2_request_vp9_init(AVCodecContext *avctx)
  3976. +{
  3977. + // TODO: check V4L2_CID_MPEG_VIDEO_VP9_PROFILE
  3978. + return ff_v4l2_request_init(avctx, V4L2_PIX_FMT_VP9_FRAME, 3 * 1024 * 1024, NULL, 0);
  3979. +}
  3980. +
  3981. +const AVHWAccel ff_vp9_v4l2request_hwaccel = {
  3982. + .name = "vp9_v4l2request",
  3983. + .type = AVMEDIA_TYPE_VIDEO,
  3984. + .id = AV_CODEC_ID_VP9,
  3985. + .pix_fmt = AV_PIX_FMT_DRM_PRIME,
  3986. + .start_frame = v4l2_request_vp9_start_frame,
  3987. + .decode_slice = v4l2_request_vp9_decode_slice,
  3988. + .end_frame = v4l2_request_vp9_end_frame,
  3989. + .output_frame = ff_v4l2_request_output_frame,
  3990. + .frame_priv_data_size = sizeof(V4L2RequestControlsVP9),
  3991. + .init = v4l2_request_vp9_init,
  3992. + .uninit = ff_v4l2_request_uninit,
  3993. + .priv_data_size = sizeof(V4L2RequestContext),
  3994. + .frame_params = ff_v4l2_request_frame_params,
  3995. + .caps_internal = HWACCEL_CAP_ASYNC_SAFE,
  3996. +};
  3997. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/vaapi_h264.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vaapi_h264.c
  3998. --- ffmpeg_n4.2.2/libavcodec/vaapi_h264.c 2020-05-21 20:25:05.563839295 -0700
  3999. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vaapi_h264.c 2020-05-26 03:16:39.663173479 -0700
  4000. @@ -314,6 +314,11 @@
  4001. H264SliceContext *sl = &h->slice_ctx[0];
  4002. int ret;
  4003.  
  4004. + if (pic->nb_slices == 0) {
  4005. + ret = AVERROR_INVALIDDATA;
  4006. + goto finish;
  4007. + }
  4008. +
  4009. ret = ff_vaapi_decode_issue(avctx, pic);
  4010. if (ret < 0)
  4011. goto finish;
  4012. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/vp8.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp8.c
  4013. --- ffmpeg_n4.2.2/libavcodec/vp8.c 2020-05-21 20:25:05.583838911 -0700
  4014. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp8.c 2020-05-26 03:16:39.723173390 -0700
  4015. @@ -176,6 +176,9 @@
  4016. #if CONFIG_VP8_NVDEC_HWACCEL
  4017. AV_PIX_FMT_CUDA,
  4018. #endif
  4019. +#if CONFIG_VP8_V4L2REQUEST_HWACCEL
  4020. + AV_PIX_FMT_DRM_PRIME,
  4021. +#endif
  4022. AV_PIX_FMT_YUV420P,
  4023. AV_PIX_FMT_NONE,
  4024. };
  4025. @@ -198,7 +201,7 @@
  4026. return ret;
  4027. }
  4028.  
  4029. - if (!s->actually_webp && !is_vp7) {
  4030. + if (!s->actually_webp && !is_vp7 && s->pix_fmt == AV_PIX_FMT_NONE) {
  4031. s->pix_fmt = get_pixel_format(s);
  4032. if (s->pix_fmt < 0)
  4033. return AVERROR(EINVAL);
  4034. @@ -2982,6 +2985,9 @@
  4035. #if CONFIG_VP8_NVDEC_HWACCEL
  4036. HWACCEL_NVDEC(vp8),
  4037. #endif
  4038. +#if CONFIG_VP8_V4L2REQUEST_HWACCEL
  4039. + HWACCEL_V4L2REQUEST(vp8),
  4040. +#endif
  4041. NULL
  4042. },
  4043. };
  4044. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/vp8-ctrls.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp8-ctrls.h
  4045. --- ffmpeg_n4.2.2/libavcodec/vp8-ctrls.h 1969-12-31 16:00:00.000000000 -0800
  4046. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp8-ctrls.h 2020-05-26 03:16:39.723173390 -0700
  4047. @@ -0,0 +1,112 @@
  4048. +/* SPDX-License-Identifier: GPL-2.0 */
  4049. +/*
  4050. + * These are the VP8 state controls for use with stateless VP8
  4051. + * codec drivers.
  4052. + *
  4053. + * It turns out that these structs are not stable yet and will undergo
  4054. + * more changes. So keep them private until they are stable and ready to
  4055. + * become part of the official public API.
  4056. + */
  4057. +
  4058. +#ifndef _VP8_CTRLS_H_
  4059. +#define _VP8_CTRLS_H_
  4060. +
  4061. +#include <linux/types.h>
  4062. +
  4063. +#define V4L2_PIX_FMT_VP8_FRAME v4l2_fourcc('V', 'P', '8', 'F')
  4064. +
  4065. +#define V4L2_CID_MPEG_VIDEO_VP8_FRAME_HEADER (V4L2_CID_MPEG_BASE + 2000)
  4066. +#define V4L2_CTRL_TYPE_VP8_FRAME_HEADER 0x301
  4067. +
  4068. +#define V4L2_VP8_SEGMENT_HEADER_FLAG_ENABLED 0x01
  4069. +#define V4L2_VP8_SEGMENT_HEADER_FLAG_UPDATE_MAP 0x02
  4070. +#define V4L2_VP8_SEGMENT_HEADER_FLAG_UPDATE_FEATURE_DATA 0x04
  4071. +#define V4L2_VP8_SEGMENT_HEADER_FLAG_DELTA_VALUE_MODE 0x08
  4072. +
  4073. +struct v4l2_vp8_segment_header {
  4074. + __s8 quant_update[4];
  4075. + __s8 lf_update[4];
  4076. + __u8 segment_probs[3];
  4077. + __u8 padding;
  4078. + __u32 flags;
  4079. +};
  4080. +
  4081. +#define V4L2_VP8_LF_HEADER_ADJ_ENABLE 0x01
  4082. +#define V4L2_VP8_LF_HEADER_DELTA_UPDATE 0x02
  4083. +#define V4L2_VP8_LF_FILTER_TYPE_SIMPLE 0x04
  4084. +struct v4l2_vp8_loopfilter_header {
  4085. + __s8 ref_frm_delta[4];
  4086. + __s8 mb_mode_delta[4];
  4087. + __u8 sharpness_level;
  4088. + __u8 level;
  4089. + __u16 padding;
  4090. + __u32 flags;
  4091. +};
  4092. +
  4093. +struct v4l2_vp8_quantization_header {
  4094. + __u8 y_ac_qi;
  4095. + __s8 y_dc_delta;
  4096. + __s8 y2_dc_delta;
  4097. + __s8 y2_ac_delta;
  4098. + __s8 uv_dc_delta;
  4099. + __s8 uv_ac_delta;
  4100. + __u16 padding;
  4101. +};
  4102. +
  4103. +struct v4l2_vp8_entropy_header {
  4104. + __u8 coeff_probs[4][8][3][11];
  4105. + __u8 y_mode_probs[4];
  4106. + __u8 uv_mode_probs[3];
  4107. + __u8 mv_probs[2][19];
  4108. + __u8 padding[3];
  4109. +};
  4110. +
  4111. +struct v4l2_vp8_entropy_coder_state {
  4112. + __u8 range;
  4113. + __u8 value;
  4114. + __u8 bit_count;
  4115. + __u8 padding;
  4116. +};
  4117. +
  4118. +#define V4L2_VP8_FRAME_HEADER_FLAG_KEY_FRAME 0x01
  4119. +#define V4L2_VP8_FRAME_HEADER_FLAG_EXPERIMENTAL 0x02
  4120. +#define V4L2_VP8_FRAME_HEADER_FLAG_SHOW_FRAME 0x04
  4121. +#define V4L2_VP8_FRAME_HEADER_FLAG_MB_NO_SKIP_COEFF 0x08
  4122. +#define V4L2_VP8_FRAME_HEADER_FLAG_SIGN_BIAS_GOLDEN 0x10
  4123. +#define V4L2_VP8_FRAME_HEADER_FLAG_SIGN_BIAS_ALT 0x20
  4124. +
  4125. +#define VP8_FRAME_IS_KEY_FRAME(hdr) \
  4126. + (!!((hdr)->flags & V4L2_VP8_FRAME_HEADER_FLAG_KEY_FRAME))
  4127. +
  4128. +struct v4l2_ctrl_vp8_frame_header {
  4129. + struct v4l2_vp8_segment_header segment_header;
  4130. + struct v4l2_vp8_loopfilter_header lf_header;
  4131. + struct v4l2_vp8_quantization_header quant_header;
  4132. + struct v4l2_vp8_entropy_header entropy_header;
  4133. + struct v4l2_vp8_entropy_coder_state coder_state;
  4134. +
  4135. + __u16 width;
  4136. + __u16 height;
  4137. +
  4138. + __u8 horizontal_scale;
  4139. + __u8 vertical_scale;
  4140. +
  4141. + __u8 version;
  4142. + __u8 prob_skip_false;
  4143. + __u8 prob_intra;
  4144. + __u8 prob_last;
  4145. + __u8 prob_gf;
  4146. + __u8 num_dct_parts;
  4147. +
  4148. + __u32 first_part_size;
  4149. + __u32 first_part_header_bits;
  4150. + __u32 dct_part_sizes[8];
  4151. +
  4152. + __u64 last_frame_ts;
  4153. + __u64 golden_frame_ts;
  4154. + __u64 alt_frame_ts;
  4155. +
  4156. + __u64 flags;
  4157. +};
  4158. +
  4159. +#endif
  4160. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/vp9.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp9.c
  4161. --- ffmpeg_n4.2.2/libavcodec/vp9.c 2020-05-21 20:25:05.583838911 -0700
  4162. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp9.c 2020-05-26 03:16:39.733173375 -0700
  4163. @@ -173,6 +173,7 @@
  4164. #define HWACCEL_MAX (CONFIG_VP9_DXVA2_HWACCEL + \
  4165. CONFIG_VP9_D3D11VA_HWACCEL * 2 + \
  4166. CONFIG_VP9_NVDEC_HWACCEL + \
  4167. + CONFIG_VP9_V4L2REQUEST_HWACCEL + \
  4168. CONFIG_VP9_VAAPI_HWACCEL)
  4169. enum AVPixelFormat pix_fmts[HWACCEL_MAX + 2], *fmtp = pix_fmts;
  4170. VP9Context *s = avctx->priv_data;
  4171. @@ -202,6 +203,9 @@
  4172. #if CONFIG_VP9_VAAPI_HWACCEL
  4173. *fmtp++ = AV_PIX_FMT_VAAPI;
  4174. #endif
  4175. +#if CONFIG_VP9_V4L2REQUEST_HWACCEL
  4176. + *fmtp++ = AV_PIX_FMT_DRM_PRIME;
  4177. +#endif
  4178. break;
  4179. case AV_PIX_FMT_YUV420P12:
  4180. #if CONFIG_VP9_NVDEC_HWACCEL
  4181. @@ -210,6 +214,9 @@
  4182. #if CONFIG_VP9_VAAPI_HWACCEL
  4183. *fmtp++ = AV_PIX_FMT_VAAPI;
  4184. #endif
  4185. +#if CONFIG_VP9_V4L2REQUEST_HWACCEL
  4186. + *fmtp++ = AV_PIX_FMT_DRM_PRIME;
  4187. +#endif
  4188. break;
  4189. }
  4190.  
  4191. @@ -672,7 +679,8 @@
  4192. get_bits(&s->gb, 8) : 255;
  4193. }
  4194.  
  4195. - if (get_bits1(&s->gb)) {
  4196. + s->s.h.segmentation.update_data = get_bits1(&s->gb);
  4197. + if (s->s.h.segmentation.update_data) {
  4198. s->s.h.segmentation.absolute_vals = get_bits1(&s->gb);
  4199. for (i = 0; i < 8; i++) {
  4200. if ((s->s.h.segmentation.feat[i].q_enabled = get_bits1(&s->gb)))
  4201. @@ -1817,6 +1825,9 @@
  4202. #if CONFIG_VP9_VAAPI_HWACCEL
  4203. HWACCEL_VAAPI(vp9),
  4204. #endif
  4205. +#if CONFIG_VP9_V4L2REQUEST_HWACCEL
  4206. + HWACCEL_V4L2REQUEST(vp9),
  4207. +#endif
  4208. NULL
  4209. },
  4210. };
  4211. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/vp9-ctrls.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp9-ctrls.h
  4212. --- ffmpeg_n4.2.2/libavcodec/vp9-ctrls.h 1969-12-31 16:00:00.000000000 -0800
  4213. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp9-ctrls.h 2020-05-26 03:16:39.733173375 -0700
  4214. @@ -0,0 +1,485 @@
  4215. +/* SPDX-License-Identifier: GPL-2.0 */
  4216. +/*
  4217. + * These are the VP9 state controls for use with stateless VP9
  4218. + * codec drivers.
  4219. + *
  4220. + * It turns out that these structs are not stable yet and will undergo
  4221. + * more changes. So keep them private until they are stable and ready to
  4222. + * become part of the official public API.
  4223. + */
  4224. +
  4225. +#ifndef _VP9_CTRLS_H_
  4226. +#define _VP9_CTRLS_H_
  4227. +
  4228. +#include <linux/types.h>
  4229. +
  4230. +#define V4L2_PIX_FMT_VP9_FRAME v4l2_fourcc('V', 'P', '9', 'F')
  4231. +
  4232. +#define V4L2_CID_MPEG_VIDEO_VP9_FRAME_CONTEXT(i) (V4L2_CID_MPEG_BASE + 4000 + (i))
  4233. +#define V4L2_CID_MPEG_VIDEO_VP9_FRAME_DECODE_PARAMS (V4L2_CID_MPEG_BASE + 4004)
  4234. +#define V4L2_CTRL_TYPE_VP9_FRAME_CONTEXT 0x400
  4235. +#define V4L2_CTRL_TYPE_VP9_FRAME_DECODE_PARAMS 0x404
  4236. +
  4237. +/**
  4238. + * enum v4l2_vp9_loop_filter_flags - VP9 loop filter flags
  4239. + *
  4240. + * @V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED: the filter level depends on
  4241. + * the mode and reference frame used
  4242. + * to predict a block
  4243. + * @V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE: the bitstream contains additional
  4244. + * syntax elements that specify which
  4245. + * mode and reference frame deltas
  4246. + * are to be updated
  4247. + *
  4248. + * Those are the flags you should pass to &v4l2_vp9_loop_filter.flags. See
  4249. + * section '7.2.8 Loop filter semantics' of the VP9 specification for more
  4250. + * details.
  4251. + */
  4252. +enum v4l2_vp9_loop_filter_flags {
  4253. + V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED = 1 << 0,
  4254. + V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE = 1 << 1,
  4255. +};
  4256. +
  4257. +/**
  4258. + * struct v4l2_vp9_loop_filter - VP9 loop filter parameters
  4259. + *
  4260. + * @flags: combination of V4L2_VP9_LOOP_FILTER_FLAG_* flags
  4261. + * @level: indicates the loop filter strength
  4262. + * @sharpness: indicates the sharpness level
  4263. + * @ref_deltas: contains the adjustment needed for the filter level based on
  4264. + * the chosen reference frame
  4265. + * @mode_deltas: contains the adjustment needed for the filter level based on
  4266. + * the chosen mode
  4267. + * @level_lookup: level lookup table
  4268. + *
  4269. + * This structure contains all loop filter related parameters. See sections
  4270. + * '7.2.8 Loop filter semantics' and '8.8.1 Loop filter frame init process'
  4271. + * of the VP9 specification for more details.
  4272. + */
  4273. +struct v4l2_vp9_loop_filter {
  4274. + __u8 flags;
  4275. + __u8 level;
  4276. + __u8 sharpness;
  4277. + __s8 ref_deltas[4];
  4278. + __s8 mode_deltas[2];
  4279. + __u8 level_lookup[8][4][2];
  4280. +};
  4281. +
  4282. +/**
  4283. + * struct v4l2_vp9_quantization - VP9 quantization parameters
  4284. + *
  4285. + * @base_q_idx: indicates the base frame qindex
  4286. + * @delta_q_y_dc: indicates the Y DC quantizer relative to base_q_idx
  4287. + * @delta_q_uv_dc: indicates the UV DC quantizer relative to base_q_idx
  4288. + * @delta_q_uv_ac indicates the UV AC quantizer relative to base_q_idx
  4289. + * @padding: padding bytes to align things on 64 bits. Must be set to 0
  4290. + *
  4291. + * Encodes the quantization parameters. See section '7.2.9 Quantization params
  4292. + * syntax' of the VP9 specification for more details.
  4293. + */
  4294. +struct v4l2_vp9_quantization {
  4295. + __u8 base_q_idx;
  4296. + __s8 delta_q_y_dc;
  4297. + __s8 delta_q_uv_dc;
  4298. + __s8 delta_q_uv_ac;
  4299. + __u8 padding[4];
  4300. +};
  4301. +
  4302. +/**
  4303. + * enum v4l2_vp9_segmentation_flags - VP9 segmentation flags
  4304. + *
  4305. + * @V4L2_VP9_SEGMENTATION_FLAG_ENABLED: indicates that this frame makes use of
  4306. + * the segmentation tool
  4307. + * @V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP: indicates that the segmentation map
  4308. + * should be updated during the
  4309. + * decoding of this frame
  4310. + * @V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE: indicates that the updates to
  4311. + * the segmentation map are coded
  4312. + * relative to the existing
  4313. + * segmentation map
  4314. + * @V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA: indicates that new parameters are
  4315. + * about to be specified for each
  4316. + * segment
  4317. + * @V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE: indicates that the
  4318. + * segmentation parameters
  4319. + * represent the actual values
  4320. + * to be used
  4321. + *
  4322. + * Those are the flags you should pass to &v4l2_vp9_segmentation.flags. See
  4323. + * section '7.2.10 Segmentation params syntax' of the VP9 specification for
  4324. + * more details.
  4325. + */
  4326. +enum v4l2_vp9_segmentation_flags {
  4327. + V4L2_VP9_SEGMENTATION_FLAG_ENABLED = 1 << 0,
  4328. + V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP = 1 << 1,
  4329. + V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE = 1 << 2,
  4330. + V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA = 1 << 3,
  4331. + V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE = 1 << 4,
  4332. +};
  4333. +
  4334. +#define V4L2_VP9_SEGMENT_FEATURE_ENABLED(id) (1 << (id))
  4335. +#define V4L2_VP9_SEGMENT_FEATURE_ENABLED_MASK 0xf
  4336. +
  4337. +/**
  4338. + * enum v4l2_vp9_segment_feature - VP9 segment feature IDs
  4339. + *
  4340. + * @V4L2_VP9_SEGMENT_FEATURE_QP_DELTA: QP delta segment feature
  4341. + * @V4L2_VP9_SEGMENT_FEATURE_LF: loop filter segment feature
  4342. + * @V4L2_VP9_SEGMENT_FEATURE_REF_FRAME: reference frame segment feature
  4343. + * @V4L2_VP9_SEGMENT_FEATURE_SKIP: skip segment feature
  4344. + * @V4L2_VP9_SEGMENT_FEATURE_CNT: number of segment features
  4345. + *
  4346. + * Segment feature IDs. See section '7.2.10 Segmentation params syntax' of the
  4347. + * VP9 specification for more details.
  4348. + */
  4349. +enum v4l2_vp9_segment_feature {
  4350. + V4L2_VP9_SEGMENT_FEATURE_QP_DELTA,
  4351. + V4L2_VP9_SEGMENT_FEATURE_LF,
  4352. + V4L2_VP9_SEGMENT_FEATURE_REF_FRAME,
  4353. + V4L2_VP9_SEGMENT_FEATURE_SKIP,
  4354. + V4L2_VP9_SEGMENT_FEATURE_CNT,
  4355. +};
  4356. +
  4357. +/**
  4358. + * struct v4l2_vp9_segmentation - VP9 segmentation parameters
  4359. + *
  4360. + * @flags: combination of V4L2_VP9_SEGMENTATION_FLAG_* flags
  4361. + * @tree_probs: specifies the probability values to be used when
  4362. + * decoding a Segment-ID. See '5.15. Segmentation map'
  4363. + * section of the VP9 specification for more details.
  4364. + * @pred_prob: specifies the probability values to be used when decoding a
  4365. + * Predicted-Segment-ID. See '6.4.14. Get segment id syntax'
  4366. + * section of :ref:`vp9` for more details..
  4367. + * @padding: padding used to make things aligned on 64 bits. Shall be zero
  4368. + * filled
  4369. + * @feature_enabled: bitmask defining which features are enabled in each
  4370. + * segment
  4371. + * @feature_data: data attached to each feature. Data entry is only valid if
  4372. + * the feature is enabled
  4373. + *
  4374. + * Encodes the quantization parameters. See section '7.2.10 Segmentation
  4375. + * params syntax' of the VP9 specification for more details.
  4376. + */
  4377. +struct v4l2_vp9_segmentation {
  4378. + __u8 flags;
  4379. + __u8 tree_probs[7];
  4380. + __u8 pred_probs[3];
  4381. + __u8 padding[5];
  4382. + __u8 feature_enabled[8];
  4383. + __s16 feature_data[8][4];
  4384. +};
  4385. +
  4386. +/**
  4387. + * enum v4l2_vp9_intra_prediction_mode - VP9 Intra prediction modes
  4388. + *
  4389. + * @V4L2_VP9_INTRA_PRED_DC: DC intra prediction
  4390. + * @V4L2_VP9_INTRA_PRED_MODE_V: vertical intra prediction
  4391. + * @V4L2_VP9_INTRA_PRED_MODE_H: horizontal intra prediction
  4392. + * @V4L2_VP9_INTRA_PRED_MODE_D45: D45 intra prediction
  4393. + * @V4L2_VP9_INTRA_PRED_MODE_D135: D135 intra prediction
  4394. + * @V4L2_VP9_INTRA_PRED_MODE_D117: D117 intra prediction
  4395. + * @V4L2_VP9_INTRA_PRED_MODE_D153: D153 intra prediction
  4396. + * @V4L2_VP9_INTRA_PRED_MODE_D207: D207 intra prediction
  4397. + * @V4L2_VP9_INTRA_PRED_MODE_D63: D63 intra prediction
  4398. + * @V4L2_VP9_INTRA_PRED_MODE_TM: True Motion intra prediction
  4399. + *
  4400. + * See section '7.4.5 Intra frame mode info semantics' for more details.
  4401. + */
  4402. +enum v4l2_vp9_intra_prediction_mode {
  4403. + V4L2_VP9_INTRA_PRED_MODE_DC,
  4404. + V4L2_VP9_INTRA_PRED_MODE_V,
  4405. + V4L2_VP9_INTRA_PRED_MODE_H,
  4406. + V4L2_VP9_INTRA_PRED_MODE_D45,
  4407. + V4L2_VP9_INTRA_PRED_MODE_D135,
  4408. + V4L2_VP9_INTRA_PRED_MODE_D117,
  4409. + V4L2_VP9_INTRA_PRED_MODE_D153,
  4410. + V4L2_VP9_INTRA_PRED_MODE_D207,
  4411. + V4L2_VP9_INTRA_PRED_MODE_D63,
  4412. + V4L2_VP9_INTRA_PRED_MODE_TM,
  4413. +};
  4414. +
  4415. +/**
  4416. + * struct v4l2_vp9_mv_probabilities - VP9 Motion vector probabilities
  4417. + * @joint: motion vector joint probabilities
  4418. + * @sign: motion vector sign probabilities
  4419. + * @class: motion vector class probabilities
  4420. + * @class0_bit: motion vector class0 bit probabilities
  4421. + * @bits: motion vector bits probabilities
  4422. + * @class0_fr: motion vector class0 fractional bit probabilities
  4423. + * @fr: motion vector fractional bit probabilities
  4424. + * @class0_hp: motion vector class0 high precision fractional bit probabilities
  4425. + * @hp: motion vector high precision fractional bit probabilities
  4426. + */
  4427. +struct v4l2_vp9_mv_probabilities {
  4428. + __u8 joint[3];
  4429. + __u8 sign[2];
  4430. + __u8 class[2][10];
  4431. + __u8 class0_bit[2];
  4432. + __u8 bits[2][10];
  4433. + __u8 class0_fr[2][2][3];
  4434. + __u8 fr[2][3];
  4435. + __u8 class0_hp[2];
  4436. + __u8 hp[2];
  4437. +};
  4438. +
  4439. +/**
  4440. + * struct v4l2_vp9_probabilities - VP9 Probabilities
  4441. + *
  4442. + * @tx8: TX 8x8 probabilities
  4443. + * @tx16: TX 16x16 probabilities
  4444. + * @tx32: TX 32x32 probabilities
  4445. + * @coef: coefficient probabilities
  4446. + * @skip: skip probabilities
  4447. + * @inter_mode: inter mode probabilities
  4448. + * @interp_filter: interpolation filter probabilities
  4449. + * @is_inter: is inter-block probabilities
  4450. + * @comp_mode: compound prediction mode probabilities
  4451. + * @single_ref: single ref probabilities
  4452. + * @comp_ref: compound ref probabilities
  4453. + * @y_mode: Y prediction mode probabilities
  4454. + * @uv_mode: UV prediction mode probabilities
  4455. + * @partition: partition probabilities
  4456. + * @mv: motion vector probabilities
  4457. + *
  4458. + * Structure containing most VP9 probabilities. See the VP9 specification
  4459. + * for more details.
  4460. + */
  4461. +struct v4l2_vp9_probabilities {
  4462. + __u8 tx8[2][1];
  4463. + __u8 tx16[2][2];
  4464. + __u8 tx32[2][3];
  4465. + __u8 coef[4][2][2][6][6][3];
  4466. + __u8 skip[3];
  4467. + __u8 inter_mode[7][3];
  4468. + __u8 interp_filter[4][2];
  4469. + __u8 is_inter[4];
  4470. + __u8 comp_mode[5];
  4471. + __u8 single_ref[5][2];
  4472. + __u8 comp_ref[5];
  4473. + __u8 y_mode[4][9];
  4474. + __u8 uv_mode[10][9];
  4475. + __u8 partition[16][3];
  4476. +
  4477. + struct v4l2_vp9_mv_probabilities mv;
  4478. +};
  4479. +
  4480. +/**
  4481. + * enum v4l2_vp9_reset_frame_context - Valid values for
  4482. + * &v4l2_ctrl_vp9_frame_decode_params->reset_frame_context
  4483. + *
  4484. + * @V4L2_VP9_RESET_FRAME_CTX_NONE: don't reset any frame context
  4485. + * @V4L2_VP9_RESET_FRAME_CTX_SPEC: reset the frame context pointed by
  4486. + * &v4l2_ctrl_vp9_frame_decode_params.frame_context_idx
  4487. + * @V4L2_VP9_RESET_FRAME_CTX_ALL: reset all frame contexts
  4488. + *
  4489. + * See section '7.2 Uncompressed header semantics' of the VP9 specification
  4490. + * for more details.
  4491. + */
  4492. +enum v4l2_vp9_reset_frame_context {
  4493. + V4L2_VP9_RESET_FRAME_CTX_NONE,
  4494. + V4L2_VP9_RESET_FRAME_CTX_SPEC,
  4495. + V4L2_VP9_RESET_FRAME_CTX_ALL,
  4496. +};
  4497. +
  4498. +/**
  4499. + * enum v4l2_vp9_interpolation_filter - VP9 interpolation filter types
  4500. + *
  4501. + * @V4L2_VP9_INTERP_FILTER_8TAP: height tap filter
  4502. + * @V4L2_VP9_INTERP_FILTER_8TAP_SMOOTH: height tap smooth filter
  4503. + * @V4L2_VP9_INTERP_FILTER_8TAP_SHARP: height tap sharp filter
  4504. + * @V4L2_VP9_INTERP_FILTER_BILINEAR: bilinear filter
  4505. + * @V4L2_VP9_INTERP_FILTER_SWITCHABLE: filter selection is signaled at the
  4506. + * block level
  4507. + *
  4508. + * See section '7.2.7 Interpolation filter semantics' of the VP9 specification
  4509. + * for more details.
  4510. + */
  4511. +enum v4l2_vp9_interpolation_filter {
  4512. + V4L2_VP9_INTERP_FILTER_8TAP,
  4513. + V4L2_VP9_INTERP_FILTER_8TAP_SMOOTH,
  4514. + V4L2_VP9_INTERP_FILTER_8TAP_SHARP,
  4515. + V4L2_VP9_INTERP_FILTER_BILINEAR,
  4516. + V4L2_VP9_INTERP_FILTER_SWITCHABLE,
  4517. +};
  4518. +
  4519. +/**
  4520. + * enum v4l2_vp9_reference_mode - VP9 reference modes
  4521. + *
  4522. + * @V4L2_VP9_REF_MODE_SINGLE: indicates that all the inter blocks use only a
  4523. + * single reference frame to generate motion
  4524. + * compensated prediction
  4525. + * @V4L2_VP9_REF_MODE_COMPOUND: requires all the inter blocks to use compound
  4526. + * mode. Single reference frame prediction is not
  4527. + * allowed
  4528. + * @V4L2_VP9_REF_MODE_SELECT: allows each individual inter block to select
  4529. + * between single and compound prediction modes
  4530. + *
  4531. + * See section '7.3.6 Frame reference mode semantics' of the VP9 specification
  4532. + * for more details.
  4533. + */
  4534. +enum v4l2_vp9_reference_mode {
  4535. + V4L2_VP9_REF_MODE_SINGLE,
  4536. + V4L2_VP9_REF_MODE_COMPOUND,
  4537. + V4L2_VP9_REF_MODE_SELECT,
  4538. +};
  4539. +
  4540. +/**
  4541. + * enum v4l2_vp9_tx_mode - VP9 TX modes
  4542. + *
  4543. + * @V4L2_VP9_TX_MODE_ONLY_4X4: transform size is 4x4
  4544. + * @V4L2_VP9_TX_MODE_ALLOW_8X8: transform size can be up to 8x8
  4545. + * @V4L2_VP9_TX_MODE_ALLOW_16X16: transform size can be up to 16x16
  4546. + * @V4L2_VP9_TX_MODE_ALLOW_32X32: transform size can be up to 32x32
  4547. + * @V4L2_VP9_TX_MODE_SELECT: bitstream contains transform size for each block
  4548. + *
  4549. + * See section '7.3.1 Tx mode semantics' of the VP9 specification for more
  4550. + * details.
  4551. + */
  4552. +enum v4l2_vp9_tx_mode {
  4553. + V4L2_VP9_TX_MODE_ONLY_4X4,
  4554. + V4L2_VP9_TX_MODE_ALLOW_8X8,
  4555. + V4L2_VP9_TX_MODE_ALLOW_16X16,
  4556. + V4L2_VP9_TX_MODE_ALLOW_32X32,
  4557. + V4L2_VP9_TX_MODE_SELECT,
  4558. +};
  4559. +
  4560. +/**
  4561. + * enum v4l2_vp9_ref_id - VP9 Reference frame IDs
  4562. + *
  4563. + * @V4L2_REF_ID_LAST: last reference frame
  4564. + * @V4L2_REF_ID_GOLDEN: golden reference frame
  4565. + * @V4L2_REF_ID_ALTREF: alternative reference frame
  4566. + * @V4L2_REF_ID_CNT: number of reference frames
  4567. + *
  4568. + * See section '7.4.12 Ref frames semantics' of the VP9 specification for more
  4569. + * details.
  4570. + */
  4571. +enum v4l2_vp9_ref_id {
  4572. + V4L2_REF_ID_LAST,
  4573. + V4L2_REF_ID_GOLDEN,
  4574. + V4L2_REF_ID_ALTREF,
  4575. + V4L2_REF_ID_CNT,
  4576. +};
  4577. +
  4578. +/**
  4579. + * enum v4l2_vp9_frame_flags - VP9 frame flags
  4580. + * @V4L2_VP9_FRAME_FLAG_KEY_FRAME: the frame is a key frame
  4581. + * @V4L2_VP9_FRAME_FLAG_SHOW_FRAME: the frame should be displayed
  4582. + * @V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT: the decoding should be error resilient
  4583. + * @V4L2_VP9_FRAME_FLAG_INTRA_ONLY: the frame does not reference other frames
  4584. + * @V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV: the frame might can high precision
  4585. + * motion vectors
  4586. + * @V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX: frame context should be updated
  4587. + * after decoding
  4588. + * @V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE: parallel decoding is used
  4589. + * @V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING: vertical subsampling is enabled
  4590. + * @V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING: horizontal subsampling is enabled
  4591. + * @V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING: full UV range is used
  4592. + *
  4593. + * Check the VP9 specification for more details.
  4594. + */
  4595. +enum v4l2_vp9_frame_flags {
  4596. + V4L2_VP9_FRAME_FLAG_KEY_FRAME = 1 << 0,
  4597. + V4L2_VP9_FRAME_FLAG_SHOW_FRAME = 1 << 1,
  4598. + V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT = 1 << 2,
  4599. + V4L2_VP9_FRAME_FLAG_INTRA_ONLY = 1 << 3,
  4600. + V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV = 1 << 4,
  4601. + V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX = 1 << 5,
  4602. + V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE = 1 << 6,
  4603. + V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING = 1 << 7,
  4604. + V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING = 1 << 8,
  4605. + V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING = 1 << 9,
  4606. +};
  4607. +
  4608. +#define V4L2_VP9_PROFILE_MAX 3
  4609. +
  4610. +/**
  4611. + * struct v4l2_ctrl_vp9_frame_decode_params - VP9 frame decoding control
  4612. + *
  4613. + * @flags: combination of V4L2_VP9_FRAME_FLAG_* flags
  4614. + * @compressed_header_size: compressed header size in bytes
  4615. + * @uncompressed_header_size: uncompressed header size in bytes
  4616. + * @profile: VP9 profile. Can be 0, 1, 2 or 3
  4617. + * @reset_frame_context: specifies whether the frame context should be reset
  4618. + * to default values. See &v4l2_vp9_reset_frame_context
  4619. + * for more details
  4620. + * @frame_context_idx: frame context that should be used/updated
  4621. + * @bit_depth: bits per components. Can be 8, 10 or 12. Note that not all
  4622. + * profiles support 10 and/or 12 bits depths
  4623. + * @interpolation_filter: specifies the filter selection used for performing
  4624. + * inter prediction. See &v4l2_vp9_interpolation_filter
  4625. + * for more details
  4626. + * @tile_cols_log2: specifies the base 2 logarithm of the width of each tile
  4627. + * (where the width is measured in units of 8x8 blocks).
  4628. + * Shall be less than or equal to 6
  4629. + * @tile_rows_log2: specifies the base 2 logarithm of the height of each tile
  4630. + * (where the height is measured in units of 8x8 blocks)
  4631. + * @tx_mode: specifies the TX mode. See &v4l2_vp9_tx_mode for more details
  4632. + * @reference_mode: specifies the type of inter prediction to be used. See
  4633. + * &v4l2_vp9_reference_mode for more details
  4634. + * @padding: needed to make this struct 64 bit aligned. Shall be filled with
  4635. + * zeros
  4636. + * @frame_width_minus_1: add 1 to it and you'll get the frame width expressed
  4637. + * in pixels
  4638. + * @frame_height_minus_1: add 1 to it and you'll get the frame height expressed
  4639. + * in pixels
  4640. + * @frame_width_minus_1: add 1 to it and you'll get the expected render width
  4641. + * expressed in pixels. This is not used during the
  4642. + * decoding process but might be used by HW scalers to
  4643. + * prepare a frame that's ready for scanout
  4644. + * @frame_height_minus_1: add 1 to it and you'll get the expected render height
  4645. + * expressed in pixels. This is not used during the
  4646. + * decoding process but might be used by HW scalers to
  4647. + * prepare a frame that's ready for scanout
  4648. + * @refs: array of reference frames. See &v4l2_vp9_ref_id for more details
  4649. + * @lf: loop filter parameters. See &v4l2_vp9_loop_filter for more details
  4650. + * @quant: quantization parameters. See &v4l2_vp9_quantization for more details
  4651. + * @seg: segmentation parameters. See &v4l2_vp9_segmentation for more details
  4652. + * @probs: probabilities. See &v4l2_vp9_probabilities for more details
  4653. + */
  4654. +struct v4l2_ctrl_vp9_frame_decode_params {
  4655. + __u32 flags;
  4656. + __u16 compressed_header_size;
  4657. + __u16 uncompressed_header_size;
  4658. + __u8 profile;
  4659. + __u8 reset_frame_context;
  4660. + __u8 frame_context_idx;
  4661. + __u8 bit_depth;
  4662. + __u8 interpolation_filter;
  4663. + __u8 tile_cols_log2;
  4664. + __u8 tile_rows_log2;
  4665. + __u8 tx_mode;
  4666. + __u8 reference_mode;
  4667. + __u8 padding[6];
  4668. + __u16 frame_width_minus_1;
  4669. + __u16 frame_height_minus_1;
  4670. + __u16 render_width_minus_1;
  4671. + __u16 render_height_minus_1;
  4672. + __u64 refs[V4L2_REF_ID_CNT];
  4673. + struct v4l2_vp9_loop_filter lf;
  4674. + struct v4l2_vp9_quantization quant;
  4675. + struct v4l2_vp9_segmentation seg;
  4676. + struct v4l2_vp9_probabilities probs;
  4677. +};
  4678. +
  4679. +#define V4L2_VP9_NUM_FRAME_CTX 4
  4680. +
  4681. +/**
  4682. + * struct v4l2_ctrl_vp9_frame_ctx - VP9 frame context control
  4683. + *
  4684. + * @probs: VP9 probabilities
  4685. + *
  4686. + * This control is accessed in both direction. The user should initialize the
  4687. + * 4 contexts with default values just after starting the stream. Then before
  4688. + * decoding a frame it should query the current frame context (the one passed
  4689. + * through &v4l2_ctrl_vp9_frame_decode_params.frame_context_idx) to initialize
  4690. + * &v4l2_ctrl_vp9_frame_decode_params.probs. The probs are then adjusted based
  4691. + * on the bitstream info and passed to the kernel. The codec should update
  4692. + * the frame context after the frame has been decoded, so that next time
  4693. + * userspace query this context it contains the updated probabilities.
  4694. + */
  4695. +struct v4l2_ctrl_vp9_frame_ctx {
  4696. + struct v4l2_vp9_probabilities probs;
  4697. +};
  4698. +
  4699. +#endif /* _VP9_CTRLS_H_ */
  4700. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/vp9shared.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp9shared.h
  4701. --- ffmpeg_n4.2.2/libavcodec/vp9shared.h 2020-05-21 20:25:05.593838719 -0700
  4702. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp9shared.h 2020-05-26 03:16:39.753173345 -0700
  4703. @@ -131,6 +131,7 @@
  4704. uint8_t temporal;
  4705. uint8_t absolute_vals;
  4706. uint8_t update_map;
  4707. + uint8_t update_data;
  4708. uint8_t prob[7];
  4709. uint8_t pred_prob[3];
  4710. struct {
  4711. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavformat/rtsp.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavformat/rtsp.c
  4712. --- ffmpeg_n4.2.2/libavformat/rtsp.c 2020-05-21 20:25:05.823834299 -0700
  4713. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavformat/rtsp.c 2020-05-26 03:16:40.503172231 -0700
  4714. @@ -2334,7 +2334,9 @@
  4715. RTSPStream *rtsp_st;
  4716. int size, i, err;
  4717. char *content;
  4718. + const char *p, *sp="", *sources="", *sp2, *sources2;
  4719. char url[1024];
  4720. + char sources_buf[1024];
  4721.  
  4722. if (!ff_network_init())
  4723. return AVERROR(EIO);
  4724. @@ -2360,6 +2362,16 @@
  4725. av_freep(&content);
  4726. if (err) goto fail;
  4727.  
  4728. + /* Search for sources= tag in original URL for rtp protocol only */
  4729. + if (strncmp(s->url, "rtp://", 6) == 0) {
  4730. + p = strchr(s->url, '?');
  4731. + if (p && av_find_info_tag(sources_buf, sizeof(sources_buf), "sources", p)) {
  4732. + /* av_log(s, AV_LOG_VERBOSE, "sdp_read_header found sources %s\n", sources_buf); */
  4733. + sp = sources_buf;
  4734. + sources = "&sources=";
  4735. + }
  4736. + }
  4737. +
  4738. /* open each RTP stream */
  4739. for (i = 0; i < rt->nb_rtsp_streams; i++) {
  4740. char namebuf[50];
  4741. @@ -2377,12 +2389,22 @@
  4742. av_dict_free(&opts);
  4743. goto fail;
  4744. }
  4745. +
  4746. + /* Prepare to add sources to the url to be opened.
  4747. + Otherwise the join to the source specific muliticast will be missing */
  4748. + sources2 = sources;
  4749. + sp2 = sp;
  4750. + /* ignore sources from original URL, when sources are already set in rtsp_st */
  4751. + if (rtsp_st->nb_include_source_addrs > 0)
  4752. + sources2 = sp2 = "";
  4753. +
  4754. ff_url_join(url, sizeof(url), "rtp", NULL,
  4755. namebuf, rtsp_st->sdp_port,
  4756. - "?localport=%d&ttl=%d&connect=%d&write_to_source=%d",
  4757. + "?localport=%d&ttl=%d&connect=%d&write_to_source=%d%s%s",
  4758. rtsp_st->sdp_port, rtsp_st->sdp_ttl,
  4759. rt->rtsp_flags & RTSP_FLAG_FILTER_SRC ? 1 : 0,
  4760. - rt->rtsp_flags & RTSP_FLAG_RTCP_TO_SOURCE ? 1 : 0);
  4761. + rt->rtsp_flags & RTSP_FLAG_RTCP_TO_SOURCE ? 1 : 0,
  4762. + sources2, sp2);
  4763.  
  4764. append_source_addrs(url, sizeof(url), "sources",
  4765. rtsp_st->nb_include_source_addrs,
  4766. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavutil/buffer.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavutil/buffer.c
  4767. --- ffmpeg_n4.2.2/libavutil/buffer.c 2020-05-21 20:25:05.863833530 -0700
  4768. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavutil/buffer.c 2020-05-26 03:16:40.613172068 -0700
  4769. @@ -272,6 +272,19 @@
  4770. av_freep(&pool);
  4771. }
  4772.  
  4773. +void av_buffer_pool_flush(AVBufferPool *pool)
  4774. +{
  4775. + ff_mutex_lock(&pool->mutex);
  4776. + while (pool->pool) {
  4777. + BufferPoolEntry *buf = pool->pool;
  4778. + pool->pool = buf->next;
  4779. +
  4780. + buf->free(buf->opaque, buf->data);
  4781. + av_freep(&buf);
  4782. + }
  4783. + ff_mutex_unlock(&pool->mutex);
  4784. +}
  4785. +
  4786. void av_buffer_pool_uninit(AVBufferPool **ppool)
  4787. {
  4788. AVBufferPool *pool;
  4789. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavutil/buffer.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavutil/buffer.h
  4790. --- ffmpeg_n4.2.2/libavutil/buffer.h 2020-05-21 20:25:05.863833530 -0700
  4791. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavutil/buffer.h 2020-05-26 03:16:40.613172068 -0700
  4792. @@ -267,6 +267,11 @@
  4793. void (*pool_free)(void *opaque));
  4794.  
  4795. /**
  4796. + * Free all available buffers in a buffer pool.
  4797. + */
  4798. + void av_buffer_pool_flush(AVBufferPool *pool);
  4799. +
  4800. +/**
  4801. * Mark the pool as being available for freeing. It will actually be freed only
  4802. * once all the allocated buffers associated with the pool are released. Thus it
  4803. * is safe to call this function while some of the allocated buffers are still
  4804. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavutil/hwcontext_drm.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavutil/hwcontext_drm.c
  4805. --- ffmpeg_n4.2.2/libavutil/hwcontext_drm.c 2020-05-21 20:25:05.873833338 -0700
  4806. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavutil/hwcontext_drm.c 2020-05-26 03:16:40.643172023 -0700
  4807. @@ -43,6 +43,11 @@
  4808. AVDRMDeviceContext *hwctx = hwdev->hwctx;
  4809. drmVersionPtr version;
  4810.  
  4811. + if (device == NULL) {
  4812. + hwctx->fd = -1;
  4813. + return 0;
  4814. + }
  4815. +
  4816. hwctx->fd = open(device, O_RDWR);
  4817. if (hwctx->fd < 0)
  4818. return AVERROR(errno);
  4819. diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/Makefile ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/Makefile
  4820. --- ffmpeg_n4.2.2/Makefile 2020-05-21 20:25:05.143847365 -0700
  4821. +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/Makefile 2020-05-26 03:16:38.363175403 -0700
  4822. @@ -117,7 +117,7 @@
  4823. .version: M=@
  4824.  
  4825. libavutil/ffversion.h .version:
  4826. - $(M)$(VERSION_SH) $(SRC_PATH) libavutil/ffversion.h $(EXTRA_VERSION)
  4827. + $(M)$(VERSION_SH) $(SRC_PATH) libavutil/ffversion.h Kodi
  4828. $(Q)touch .version
  4829.  
  4830. # force version.sh to run whenever version might have changed
RAW Paste Data

Adblocker detected! Please consider disabling it...

We've detected AdBlock Plus or some other adblocking software preventing Pastebin.com from fully loading.

We don't have any obnoxious sound, or popup ads, we actively block these annoying types of ads!

Please add Pastebin.com to your ad blocker whitelist or disable your adblocking software.

×