Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/configure ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/configure
- --- ffmpeg_n4.2.2/configure 2020-05-21 20:25:05.153847173 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/configure 2020-05-26 03:16:38.383175374 -0700
- @@ -271,6 +271,7 @@
- --enable-libtls enable LibreSSL (via libtls), needed for https support
- if openssl, gnutls or mbedtls is not used [no]
- --enable-libtwolame enable MP2 encoding via libtwolame [no]
- + --enable-libudev enable libudev [no]
- --enable-libv4l2 enable libv4l2/v4l-utils [no]
- --enable-libvidstab enable video stabilization using vid.stab [no]
- --enable-libvmaf enable vmaf filter via libvmaf [no]
- @@ -337,6 +338,7 @@
- --enable-omx-rpi enable OpenMAX IL code for Raspberry Pi [no]
- --enable-rkmpp enable Rockchip Media Process Platform code [no]
- --disable-v4l2-m2m disable V4L2 mem2mem code [autodetect]
- + --enable-v4l2-request enable V4L2 request API code [no]
- --disable-vaapi disable Video Acceleration API (mainly Unix/Intel) code [autodetect]
- --disable-vdpau disable Nvidia Video Decode and Presentation API for Unix code [autodetect]
- --disable-videotoolbox disable VideoToolbox code [autodetect]
- @@ -1797,6 +1799,7 @@
- libtesseract
- libtheora
- libtwolame
- + libudev
- libv4l2
- libvorbis
- libvpx
- @@ -1851,6 +1854,7 @@
- mmal
- omx
- opencl
- + v4l2_request
- "
- DOCUMENT_LIST="
- @@ -2873,6 +2877,7 @@
- dxva2_deps="dxva2api_h DXVA2_ConfigPictureDecode ole32 user32"
- ffnvcodec_deps_any="libdl LoadLibrary"
- nvdec_deps="ffnvcodec"
- +v4l2_request_deps="linux_videodev2_h linux_media_h v4l2_timeval_to_ns libdrm libudev"
- vaapi_x11_deps="xlib"
- videotoolbox_hwaccel_deps="videotoolbox pthreads"
- videotoolbox_hwaccel_extralibs="-framework QuartzCore"
- @@ -2890,6 +2895,8 @@
- h264_dxva2_hwaccel_select="h264_decoder"
- h264_nvdec_hwaccel_deps="nvdec"
- h264_nvdec_hwaccel_select="h264_decoder"
- +h264_v4l2request_hwaccel_deps="v4l2_request"
- +h264_v4l2request_hwaccel_select="h264_decoder"
- h264_vaapi_hwaccel_deps="vaapi"
- h264_vaapi_hwaccel_select="h264_decoder"
- h264_vdpau_hwaccel_deps="vdpau"
- @@ -2904,6 +2911,8 @@
- hevc_dxva2_hwaccel_select="hevc_decoder"
- hevc_nvdec_hwaccel_deps="nvdec"
- hevc_nvdec_hwaccel_select="hevc_decoder"
- +hevc_v4l2request_hwaccel_deps="v4l2_request"
- +hevc_v4l2request_hwaccel_select="hevc_decoder"
- hevc_vaapi_hwaccel_deps="vaapi VAPictureParameterBufferHEVC"
- hevc_vaapi_hwaccel_select="hevc_decoder"
- hevc_vdpau_hwaccel_deps="vdpau VdpPictureInfoHEVC"
- @@ -2932,6 +2941,8 @@
- mpeg2_dxva2_hwaccel_select="mpeg2video_decoder"
- mpeg2_nvdec_hwaccel_deps="nvdec"
- mpeg2_nvdec_hwaccel_select="mpeg2video_decoder"
- +mpeg2_v4l2request_hwaccel_deps="v4l2_request mpeg2_v4l2_request"
- +mpeg2_v4l2request_hwaccel_select="mpeg2video_decoder"
- mpeg2_vaapi_hwaccel_deps="vaapi"
- mpeg2_vaapi_hwaccel_select="mpeg2video_decoder"
- mpeg2_vdpau_hwaccel_deps="vdpau"
- @@ -2962,6 +2973,8 @@
- vc1_vdpau_hwaccel_select="vc1_decoder"
- vp8_nvdec_hwaccel_deps="nvdec"
- vp8_nvdec_hwaccel_select="vp8_decoder"
- +vp8_v4l2request_hwaccel_deps="v4l2_request"
- +vp8_v4l2request_hwaccel_select="vp8_decoder"
- vp8_vaapi_hwaccel_deps="vaapi"
- vp8_vaapi_hwaccel_select="vp8_decoder"
- vp9_d3d11va_hwaccel_deps="d3d11va DXVA_PicParams_VP9"
- @@ -2972,6 +2985,8 @@
- vp9_dxva2_hwaccel_select="vp9_decoder"
- vp9_nvdec_hwaccel_deps="nvdec"
- vp9_nvdec_hwaccel_select="vp9_decoder"
- +vp9_v4l2request_hwaccel_deps="v4l2_request"
- +vp9_v4l2request_hwaccel_select="vp9_decoder"
- vp9_vaapi_hwaccel_deps="vaapi VADecPictureParameterBufferVP9_bit_depth"
- vp9_vaapi_hwaccel_select="vp9_decoder"
- wmv3_d3d11va_hwaccel_select="vc1_d3d11va_hwaccel"
- @@ -6270,6 +6285,7 @@
- enabled libtwolame && require libtwolame twolame.h twolame_init -ltwolame &&
- { check_lib libtwolame twolame.h twolame_encode_buffer_float32_interleaved -ltwolame ||
- die "ERROR: libtwolame must be installed and version must be >= 0.3.10"; }
- +enabled libudev && require_pkg_config libudev libudev libudev.h udev_new
- enabled libv4l2 && require_pkg_config libv4l2 libv4l2 libv4l2.h v4l2_ioctl
- enabled libvidstab && require_pkg_config libvidstab "vidstab >= 0.98" vid.stab/libvidstab.h vsMotionDetectInit
- enabled libvmaf && require_pkg_config libvmaf "libvmaf >= 1.3.9" libvmaf.h compute_vmaf
- @@ -6365,6 +6381,10 @@
- { enabled libdrm ||
- die "ERROR: rkmpp requires --enable-libdrm"; }
- }
- +enabled v4l2_request && { enabled libdrm ||
- + die "ERROR: v4l2-request requires --enable-libdrm"; } &&
- + { enabled libudev ||
- + die "ERROR: v4l2-request requires --enable-libudev"; }
- enabled vapoursynth && require_pkg_config vapoursynth "vapoursynth-script >= 42" VSScript.h vsscript_init
- @@ -6444,6 +6464,13 @@
- check_cc vp8_v4l2_m2m linux/videodev2.h "int i = V4L2_PIX_FMT_VP8;"
- check_cc vp9_v4l2_m2m linux/videodev2.h "int i = V4L2_PIX_FMT_VP9;"
- +check_func_headers "linux/media.h linux/videodev2.h" v4l2_timeval_to_ns
- +check_cc h264_v4l2_request linux/videodev2.h "int i = V4L2_PIX_FMT_H264_SLICE;"
- +check_cc hevc_v4l2_request linux/videodev2.h "int i = V4L2_PIX_FMT_HEVC_SLICE;"
- +check_cc mpeg2_v4l2_request linux/videodev2.h "int i = V4L2_PIX_FMT_MPEG2_SLICE;"
- +check_cc vp8_v4l2_request linux/videodev2.h "int i = V4L2_PIX_FMT_VP8_FRAME;"
- +check_cc vp9_v4l2_request linux/videodev2.h "int i = V4L2_PIX_FMT_VP9_FRAME;"
- +
- check_headers sys/videoio.h
- test_code cc sys/videoio.h "struct v4l2_frmsizeenum vfse; vfse.discrete.width = 0;" && enable_sanitized struct_v4l2_frmivalenum_discrete
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/ffbuild/common.mak ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/ffbuild/common.mak
- --- ffmpeg_n4.2.2/ffbuild/common.mak 2020-05-21 20:25:05.183846597 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/ffbuild/common.mak 2020-05-26 03:16:38.463175255 -0700
- @@ -79,7 +79,7 @@
- %.o: %.asm
- $(COMPILE_X86ASM)
- - -$(if $(ASMSTRIPFLAGS), $(STRIP) $(ASMSTRIPFLAGS) $@)
- + $(if $(STRIP), $(if $(ASMSTRIPFLAGS), $(STRIP) $(ASMSTRIPFLAGS) $@))
- %.o: %.rc
- $(WINDRES) $(IFLAGS) --preprocessor "$(DEPWINDRES) -E -xc-header -DRC_INVOKED $(CC_DEPFLAGS)" -o $@ $<
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/ffbuild/version.sh ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/ffbuild/version.sh
- --- ffmpeg_n4.2.2/ffbuild/version.sh 2020-05-21 20:25:05.183846597 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/ffbuild/version.sh 2020-05-26 03:16:38.463175255 -0700
- @@ -2,6 +2,7 @@
- # Usage: version.sh <ffmpeg-root-dir> <output-version.h> <extra-version>
- +if [ -d $1/.git ]; then # only check for a git rev, if the src tree is in a git repo
- # check for git short hash
- if ! test "$revision"; then
- if (cd "$1" && grep git RELEASE 2> /dev/null >/dev/null) ; then
- @@ -27,6 +28,7 @@
- git_hash="${srcdir##*-}";;
- esac
- fi
- +fi
- # no revision number found
- test "$revision" || revision=$(cd "$1" && cat RELEASE 2> /dev/null)
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/avcodec.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/avcodec.h
- --- ffmpeg_n4.2.2/libavcodec/avcodec.h 2020-05-21 20:25:05.263845060 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/avcodec.h 2020-05-26 03:16:38.703174900 -0700
- @@ -3750,6 +3750,11 @@
- int (*end_frame)(AVCodecContext *avctx);
- /**
- + * Called when frame is returned to api user.
- + */
- + int (*output_frame)(AVCodecContext *avctx, AVFrame *frame);
- +
- + /**
- * Size of per-frame hardware accelerator private data.
- *
- * Private data is allocated with av_mallocz() before
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/decode.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/decode.c
- --- ffmpeg_n4.2.2/libavcodec/decode.c 2020-05-21 20:25:05.293844483 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/decode.c 2020-05-26 03:16:38.813174738 -0700
- @@ -450,6 +450,9 @@
- emms_c();
- actual_got_frame = got_frame;
- + if (got_frame && avctx->hwaccel && avctx->hwaccel->output_frame)
- + avctx->hwaccel->output_frame(avctx, frame);
- +
- if (avctx->codec->type == AVMEDIA_TYPE_VIDEO) {
- if (frame->flags & AV_FRAME_FLAG_DISCARD)
- got_frame = 0;
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/dxva2.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/dxva2.c
- --- ffmpeg_n4.2.2/libavcodec/dxva2.c 2020-05-21 20:25:05.303844291 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/dxva2.c 2020-05-26 03:16:38.853174679 -0700
- @@ -771,16 +771,18 @@
- #if CONFIG_D3D11VA
- if (avctx->pix_fmt == AV_PIX_FMT_D3D11)
- return (intptr_t)frame->data[1];
- - if (avctx->pix_fmt == AV_PIX_FMT_D3D11VA_VLD) {
- + if (avctx->pix_fmt == AV_PIX_FMT_D3D11VA_VLD && surface) {
- D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC viewDesc;
- ID3D11VideoDecoderOutputView_GetDesc((ID3D11VideoDecoderOutputView*) surface, &viewDesc);
- return viewDesc.Texture2D.ArraySlice;
- }
- #endif
- #if CONFIG_DXVA2
- - for (i = 0; i < DXVA_CONTEXT_COUNT(avctx, ctx); i++) {
- - if (avctx->pix_fmt == AV_PIX_FMT_DXVA2_VLD && ctx->dxva2.surface[i] == surface)
- - return i;
- + if (avctx->pix_fmt == AV_PIX_FMT_DXVA2_VLD) {
- + for (i = 0; i < DXVA_CONTEXT_COUNT(avctx, ctx); i++) {
- + if (ctx->dxva2.surface[i] == surface)
- + return i;
- + }
- }
- #endif
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/dxva2_h264.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/dxva2_h264.c
- --- ffmpeg_n4.2.2/libavcodec/dxva2_h264.c 2020-05-21 20:25:05.313844099 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/dxva2_h264.c 2020-05-26 03:16:38.853174679 -0700
- @@ -504,6 +504,14 @@
- if (ctx_pic->slice_count <= 0 || ctx_pic->bitstream_size <= 0)
- return -1;
- +
- + // Wait for an I-frame before start decoding. Workaround for ATI UVD and UVD+ GPUs
- + if (!h->got_first_iframe) {
- + if (!(ctx_pic->pp.wBitFields & (1 << 15)))
- + return -1;
- + h->got_first_iframe = 1;
- + }
- +
- ret = ff_dxva2_common_end_frame(avctx, h->cur_pic_ptr->f,
- &ctx_pic->pp, sizeof(ctx_pic->pp),
- &ctx_pic->qm, sizeof(ctx_pic->qm),
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/h264-ctrls.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264-ctrls.h
- --- ffmpeg_n4.2.2/libavcodec/h264-ctrls.h 1969-12-31 16:00:00.000000000 -0800
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264-ctrls.h 2020-05-26 03:16:38.933174560 -0700
- @@ -0,0 +1,219 @@
- +/* SPDX-License-Identifier: GPL-2.0 */
- +/*
- + * These are the H.264 state controls for use with stateless H.264
- + * codec drivers.
- + *
- + * It turns out that these structs are not stable yet and will undergo
- + * more changes. So keep them private until they are stable and ready to
- + * become part of the official public API.
- + */
- +
- +#ifndef _H264_CTRLS_H_
- +#define _H264_CTRLS_H_
- +
- +#include <linux/videodev2.h>
- +
- +/*
- + * Maximum DPB size, as specified by section 'A.3.1 Level limits
- + * common to the Baseline, Main, and Extended profiles'.
- + */
- +#define V4L2_H264_NUM_DPB_ENTRIES 16
- +
- +/* Our pixel format isn't stable at the moment */
- +#define V4L2_PIX_FMT_H264_SLICE v4l2_fourcc('S', '2', '6', '4') /* H264 parsed slices */
- +
- +/*
- + * This is put insanely high to avoid conflicting with controls that
- + * would be added during the phase where those controls are not
- + * stable. It should be fixed eventually.
- + */
- +#define V4L2_CID_MPEG_VIDEO_H264_SPS (V4L2_CID_MPEG_BASE+1000)
- +#define V4L2_CID_MPEG_VIDEO_H264_PPS (V4L2_CID_MPEG_BASE+1001)
- +#define V4L2_CID_MPEG_VIDEO_H264_SCALING_MATRIX (V4L2_CID_MPEG_BASE+1002)
- +#define V4L2_CID_MPEG_VIDEO_H264_SLICE_PARAMS (V4L2_CID_MPEG_BASE+1003)
- +#define V4L2_CID_MPEG_VIDEO_H264_DECODE_PARAMS (V4L2_CID_MPEG_BASE+1004)
- +#define V4L2_CID_MPEG_VIDEO_H264_DECODE_MODE (V4L2_CID_MPEG_BASE+1005)
- +#define V4L2_CID_MPEG_VIDEO_H264_START_CODE (V4L2_CID_MPEG_BASE+1006)
- +
- +/* enum v4l2_ctrl_type type values */
- +#define V4L2_CTRL_TYPE_H264_SPS 0x0110
- +#define V4L2_CTRL_TYPE_H264_PPS 0x0111
- +#define V4L2_CTRL_TYPE_H264_SCALING_MATRIX 0x0112
- +#define V4L2_CTRL_TYPE_H264_SLICE_PARAMS 0x0113
- +#define V4L2_CTRL_TYPE_H264_DECODE_PARAMS 0x0114
- +
- +enum v4l2_mpeg_video_h264_decode_mode {
- + V4L2_MPEG_VIDEO_H264_DECODE_MODE_SLICE_BASED,
- + V4L2_MPEG_VIDEO_H264_DECODE_MODE_FRAME_BASED,
- +};
- +
- +enum v4l2_mpeg_video_h264_start_code {
- + V4L2_MPEG_VIDEO_H264_START_CODE_NONE,
- + V4L2_MPEG_VIDEO_H264_START_CODE_ANNEX_B,
- +};
- +
- +#define V4L2_H264_SPS_CONSTRAINT_SET0_FLAG 0x01
- +#define V4L2_H264_SPS_CONSTRAINT_SET1_FLAG 0x02
- +#define V4L2_H264_SPS_CONSTRAINT_SET2_FLAG 0x04
- +#define V4L2_H264_SPS_CONSTRAINT_SET3_FLAG 0x08
- +#define V4L2_H264_SPS_CONSTRAINT_SET4_FLAG 0x10
- +#define V4L2_H264_SPS_CONSTRAINT_SET5_FLAG 0x20
- +
- +#define V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE 0x01
- +#define V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS 0x02
- +#define V4L2_H264_SPS_FLAG_DELTA_PIC_ORDER_ALWAYS_ZERO 0x04
- +#define V4L2_H264_SPS_FLAG_GAPS_IN_FRAME_NUM_VALUE_ALLOWED 0x08
- +#define V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY 0x10
- +#define V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD 0x20
- +#define V4L2_H264_SPS_FLAG_DIRECT_8X8_INFERENCE 0x40
- +
- +struct v4l2_ctrl_h264_sps {
- + __u8 profile_idc;
- + __u8 constraint_set_flags;
- + __u8 level_idc;
- + __u8 seq_parameter_set_id;
- + __u8 chroma_format_idc;
- + __u8 bit_depth_luma_minus8;
- + __u8 bit_depth_chroma_minus8;
- + __u8 log2_max_frame_num_minus4;
- + __u8 pic_order_cnt_type;
- + __u8 log2_max_pic_order_cnt_lsb_minus4;
- + __u8 max_num_ref_frames;
- + __u8 num_ref_frames_in_pic_order_cnt_cycle;
- + __s32 offset_for_ref_frame[255];
- + __s32 offset_for_non_ref_pic;
- + __s32 offset_for_top_to_bottom_field;
- + __u16 pic_width_in_mbs_minus1;
- + __u16 pic_height_in_map_units_minus1;
- + __u32 flags;
- +};
- +
- +#define V4L2_H264_PPS_FLAG_ENTROPY_CODING_MODE 0x0001
- +#define V4L2_H264_PPS_FLAG_BOTTOM_FIELD_PIC_ORDER_IN_FRAME_PRESENT 0x0002
- +#define V4L2_H264_PPS_FLAG_WEIGHTED_PRED 0x0004
- +#define V4L2_H264_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT 0x0008
- +#define V4L2_H264_PPS_FLAG_CONSTRAINED_INTRA_PRED 0x0010
- +#define V4L2_H264_PPS_FLAG_REDUNDANT_PIC_CNT_PRESENT 0x0020
- +#define V4L2_H264_PPS_FLAG_TRANSFORM_8X8_MODE 0x0040
- +#define V4L2_H264_PPS_FLAG_PIC_SCALING_MATRIX_PRESENT 0x0080
- +
- +struct v4l2_ctrl_h264_pps {
- + __u8 pic_parameter_set_id;
- + __u8 seq_parameter_set_id;
- + __u8 num_slice_groups_minus1;
- + __u8 num_ref_idx_l0_default_active_minus1;
- + __u8 num_ref_idx_l1_default_active_minus1;
- + __u8 weighted_bipred_idc;
- + __s8 pic_init_qp_minus26;
- + __s8 pic_init_qs_minus26;
- + __s8 chroma_qp_index_offset;
- + __s8 second_chroma_qp_index_offset;
- + __u16 flags;
- +};
- +
- +struct v4l2_ctrl_h264_scaling_matrix {
- + __u8 scaling_list_4x4[6][16];
- + __u8 scaling_list_8x8[6][64];
- +};
- +
- +struct v4l2_h264_weight_factors {
- + __s16 luma_weight[32];
- + __s16 luma_offset[32];
- + __s16 chroma_weight[32][2];
- + __s16 chroma_offset[32][2];
- +};
- +
- +struct v4l2_h264_pred_weight_table {
- + __u16 luma_log2_weight_denom;
- + __u16 chroma_log2_weight_denom;
- + struct v4l2_h264_weight_factors weight_factors[2];
- +};
- +
- +#define V4L2_H264_SLICE_TYPE_P 0
- +#define V4L2_H264_SLICE_TYPE_B 1
- +#define V4L2_H264_SLICE_TYPE_I 2
- +#define V4L2_H264_SLICE_TYPE_SP 3
- +#define V4L2_H264_SLICE_TYPE_SI 4
- +
- +#define V4L2_H264_SLICE_FLAG_FIELD_PIC 0x01
- +#define V4L2_H264_SLICE_FLAG_BOTTOM_FIELD 0x02
- +#define V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED 0x04
- +#define V4L2_H264_SLICE_FLAG_SP_FOR_SWITCH 0x08
- +
- +struct v4l2_ctrl_h264_slice_params {
- + /* Size in bytes, including header */
- + __u32 size;
- +
- + /* Offset in bytes to the start of slice in the OUTPUT buffer. */
- + __u32 start_byte_offset;
- +
- + /* Offset in bits to slice_data() from the beginning of this slice. */
- + __u32 header_bit_size;
- +
- + __u16 first_mb_in_slice;
- + __u8 slice_type;
- + __u8 pic_parameter_set_id;
- + __u8 colour_plane_id;
- + __u8 redundant_pic_cnt;
- + __u16 frame_num;
- + __u16 idr_pic_id;
- + __u16 pic_order_cnt_lsb;
- + __s32 delta_pic_order_cnt_bottom;
- + __s32 delta_pic_order_cnt0;
- + __s32 delta_pic_order_cnt1;
- +
- + struct v4l2_h264_pred_weight_table pred_weight_table;
- + /* Size in bits of dec_ref_pic_marking() syntax element. */
- + __u32 dec_ref_pic_marking_bit_size;
- + /* Size in bits of pic order count syntax. */
- + __u32 pic_order_cnt_bit_size;
- +
- + __u8 cabac_init_idc;
- + __s8 slice_qp_delta;
- + __s8 slice_qs_delta;
- + __u8 disable_deblocking_filter_idc;
- + __s8 slice_alpha_c0_offset_div2;
- + __s8 slice_beta_offset_div2;
- + __u8 num_ref_idx_l0_active_minus1;
- + __u8 num_ref_idx_l1_active_minus1;
- + __u32 slice_group_change_cycle;
- +
- + /*
- + * Entries on each list are indices into
- + * v4l2_ctrl_h264_decode_params.dpb[].
- + */
- + __u8 ref_pic_list0[32];
- + __u8 ref_pic_list1[32];
- +
- + __u32 flags;
- +};
- +
- +#define V4L2_H264_DPB_ENTRY_FLAG_TOP_REF 0x01
- +#define V4L2_H264_DPB_ENTRY_FLAG_BOTTOM_REF 0x02
- +#define V4L2_H264_DPB_ENTRY_FLAG_ACTIVE (0x01|0x02)
- +#define V4L2_H264_DPB_ENTRY_FLAG_LONG_TERM 0x04
- +#define V4L2_H264_DPB_ENTRY_FLAG_FIELD_PIC 0x08
- +#define V4L2_H264_DPB_ENTRY_FLAG_VALID 0x10
- +
- +struct v4l2_h264_dpb_entry {
- + __u64 reference_ts;
- + __u16 frame_num;
- + __u16 pic_num;
- + /* Note that field is indicated by v4l2_buffer.field */
- + __s32 top_field_order_cnt;
- + __s32 bottom_field_order_cnt;
- + __u32 flags; /* V4L2_H264_DPB_ENTRY_FLAG_* */
- +};
- +
- +#define V4L2_H264_DECODE_PARAM_FLAG_IDR_PIC 0x01
- +
- +struct v4l2_ctrl_h264_decode_params {
- + struct v4l2_h264_dpb_entry dpb[V4L2_H264_NUM_DPB_ENTRIES];
- + __u16 num_slices;
- + __u16 nal_ref_idc;
- + __s32 top_field_order_cnt;
- + __s32 bottom_field_order_cnt;
- + __u32 flags; /* V4L2_H264_DECODE_PARAM_FLAG_* */
- +};
- +
- +#endif
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/h264dec.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264dec.c
- --- ffmpeg_n4.2.2/libavcodec/h264dec.c 2020-05-21 20:25:05.343843522 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264dec.c 2020-05-26 03:16:38.963174516 -0700
- @@ -486,6 +486,7 @@
- h->next_outputed_poc = INT_MIN;
- h->prev_interlaced_frame = 1;
- + h->got_first_iframe = 0;
- idr(h);
- h->poc.prev_frame_num = -1;
- @@ -1081,6 +1082,9 @@
- #if CONFIG_H264_VIDEOTOOLBOX_HWACCEL
- HWACCEL_VIDEOTOOLBOX(h264),
- #endif
- +#if CONFIG_H264_V4L2REQUEST_HWACCEL
- + HWACCEL_V4L2REQUEST(h264),
- +#endif
- NULL
- },
- .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_EXPORTS_CROPPING,
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/h264dec.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264dec.h
- --- ffmpeg_n4.2.2/libavcodec/h264dec.h 2020-05-21 20:25:05.343843522 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264dec.h 2020-05-26 03:16:38.963174516 -0700
- @@ -184,6 +184,8 @@
- int slice_type_nos; ///< S free slice type (SI/SP are remapped to I/P)
- int slice_type_fixed;
- + int idr_pic_id;
- +
- int qscale;
- int chroma_qp[2]; // QPc
- int qp_thresh; ///< QP threshold to skip loopfilter
- @@ -322,11 +324,13 @@
- MMCO mmco[MAX_MMCO_COUNT];
- int nb_mmco;
- int explicit_ref_marking;
- + int ref_pic_marking_size_in_bits;
- int frame_num;
- int poc_lsb;
- int delta_poc_bottom;
- int delta_poc[2];
- + int pic_order_cnt_bit_size;
- int curr_pic_num;
- int max_pic_num;
- } H264SliceContext;
- @@ -533,6 +537,8 @@
- * slices) anymore */
- int setup_finished;
- + int got_first_iframe;
- +
- int cur_chroma_format_idc;
- int cur_bit_depth_luma;
- int16_t slice_row[MAX_SLICES]; ///< to detect when MAX_SLICES is too low
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/h264_slice.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264_slice.c
- --- ffmpeg_n4.2.2/libavcodec/h264_slice.c 2020-05-21 20:25:05.343843522 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/h264_slice.c 2020-05-26 03:16:38.963174516 -0700
- @@ -765,6 +765,7 @@
- #define HWACCEL_MAX (CONFIG_H264_DXVA2_HWACCEL + \
- (CONFIG_H264_D3D11VA_HWACCEL * 2) + \
- CONFIG_H264_NVDEC_HWACCEL + \
- + CONFIG_H264_V4L2REQUEST_HWACCEL + \
- CONFIG_H264_VAAPI_HWACCEL + \
- CONFIG_H264_VIDEOTOOLBOX_HWACCEL + \
- CONFIG_H264_VDPAU_HWACCEL)
- @@ -790,10 +791,17 @@
- *fmt++ = AV_PIX_FMT_GBRP10;
- } else
- *fmt++ = AV_PIX_FMT_YUV444P10;
- - } else if (CHROMA422(h))
- + } else if (CHROMA422(h)) {
- +#if CONFIG_H264_V4L2REQUEST_HWACCEL
- + *fmt++ = AV_PIX_FMT_DRM_PRIME;
- +#endif
- *fmt++ = AV_PIX_FMT_YUV422P10;
- - else
- + } else {
- +#if CONFIG_H264_V4L2REQUEST_HWACCEL
- + *fmt++ = AV_PIX_FMT_DRM_PRIME;
- +#endif
- *fmt++ = AV_PIX_FMT_YUV420P10;
- + }
- break;
- case 12:
- if (CHROMA444(h)) {
- @@ -832,6 +840,9 @@
- else
- *fmt++ = AV_PIX_FMT_YUV444P;
- } else if (CHROMA422(h)) {
- +#if CONFIG_H264_V4L2REQUEST_HWACCEL
- + *fmt++ = AV_PIX_FMT_DRM_PRIME;
- +#endif
- if (h->avctx->color_range == AVCOL_RANGE_JPEG)
- *fmt++ = AV_PIX_FMT_YUVJ422P;
- else
- @@ -850,6 +861,9 @@
- #if CONFIG_H264_VIDEOTOOLBOX_HWACCEL
- *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
- #endif
- +#if CONFIG_H264_V4L2REQUEST_HWACCEL
- + *fmt++ = AV_PIX_FMT_DRM_PRIME;
- +#endif
- if (h->avctx->codec->pix_fmts)
- choices = h->avctx->codec->pix_fmts;
- else if (h->avctx->color_range == AVCOL_RANGE_JPEG)
- @@ -936,6 +950,7 @@
- h->first_field = 0;
- h->prev_interlaced_frame = 1;
- + h->got_first_iframe = 0;
- init_scan_tables(h);
- ret = ff_h264_alloc_tables(h);
- @@ -1731,7 +1746,7 @@
- unsigned int slice_type, tmp, i;
- int field_pic_flag, bottom_field_flag;
- int first_slice = sl == h->slice_ctx && !h->current_slice;
- - int picture_structure;
- + int picture_structure, pos;
- if (first_slice)
- av_assert0(!h->setup_finished);
- @@ -1819,8 +1834,9 @@
- }
- if (nal->type == H264_NAL_IDR_SLICE)
- - get_ue_golomb_long(&sl->gb); /* idr_pic_id */
- + sl->idr_pic_id = get_ue_golomb_long(&sl->gb);
- + pos = sl->gb.index;
- if (sps->poc_type == 0) {
- sl->poc_lsb = get_bits(&sl->gb, sps->log2_max_poc_lsb);
- @@ -1834,6 +1850,7 @@
- if (pps->pic_order_present == 1 && picture_structure == PICT_FRAME)
- sl->delta_poc[1] = get_se_golomb(&sl->gb);
- }
- + sl->pic_order_cnt_bit_size = sl->gb.index - pos;
- sl->redundant_pic_count = 0;
- if (pps->redundant_pic_cnt_present)
- @@ -1873,9 +1890,11 @@
- sl->explicit_ref_marking = 0;
- if (nal->ref_idc) {
- + int bit_pos = sl->gb.index;
- ret = ff_h264_decode_ref_pic_marking(sl, &sl->gb, nal, h->avctx);
- if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
- return AVERROR_INVALIDDATA;
- + sl->ref_pic_marking_size_in_bits = sl->gb.index - bit_pos;
- }
- if (sl->slice_type_nos != AV_PICTURE_TYPE_I && pps->cabac) {
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/hevc-ctrls.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hevc-ctrls.h
- --- ffmpeg_n4.2.2/libavcodec/hevc-ctrls.h 1969-12-31 16:00:00.000000000 -0800
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hevc-ctrls.h 2020-05-26 03:16:38.973174501 -0700
- @@ -0,0 +1,238 @@
- +/* SPDX-License-Identifier: GPL-2.0 */
- +/*
- + * These are the HEVC state controls for use with stateless HEVC
- + * codec drivers.
- + *
- + * It turns out that these structs are not stable yet and will undergo
- + * more changes. So keep them private until they are stable and ready to
- + * become part of the official public API.
- + */
- +
- +#ifndef _HEVC_CTRLS_H_
- +#define _HEVC_CTRLS_H_
- +
- +#include <linux/videodev2.h>
- +
- +/* The pixel format isn't stable at the moment and will likely be renamed. */
- +#define V4L2_PIX_FMT_HEVC_SLICE v4l2_fourcc('S', '2', '6', '5') /* HEVC parsed slices */
- +
- +#define V4L2_CID_MPEG_VIDEO_HEVC_SPS (V4L2_CID_MPEG_BASE + 1008)
- +#define V4L2_CID_MPEG_VIDEO_HEVC_PPS (V4L2_CID_MPEG_BASE + 1009)
- +#define V4L2_CID_MPEG_VIDEO_HEVC_SLICE_PARAMS (V4L2_CID_MPEG_BASE + 1010)
- +#define V4L2_CID_MPEG_VIDEO_HEVC_SCALING_MATRIX (V4L2_CID_MPEG_BASE + 1011)
- +#define V4L2_CID_MPEG_VIDEO_HEVC_DECODE_MODE (V4L2_CID_MPEG_BASE + 1015)
- +#define V4L2_CID_MPEG_VIDEO_HEVC_START_CODE (V4L2_CID_MPEG_BASE + 1016)
- +
- +/* enum v4l2_ctrl_type type values */
- +#define V4L2_CTRL_TYPE_HEVC_SPS 0x0120
- +#define V4L2_CTRL_TYPE_HEVC_PPS 0x0121
- +#define V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS 0x0122
- +#define V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX 0x0123
- +
- +enum v4l2_mpeg_video_hevc_decode_mode {
- + V4L2_MPEG_VIDEO_HEVC_DECODE_MODE_SLICE_BASED,
- + V4L2_MPEG_VIDEO_HEVC_DECODE_MODE_FRAME_BASED,
- +};
- +
- +enum v4l2_mpeg_video_hevc_start_code {
- + V4L2_MPEG_VIDEO_HEVC_START_CODE_NONE,
- + V4L2_MPEG_VIDEO_HEVC_START_CODE_ANNEX_B,
- +};
- +
- +#define V4L2_HEVC_SLICE_TYPE_B 0
- +#define V4L2_HEVC_SLICE_TYPE_P 1
- +#define V4L2_HEVC_SLICE_TYPE_I 2
- +
- +#define V4L2_HEVC_SPS_FLAG_SEPARATE_COLOUR_PLANE (1ULL << 0)
- +#define V4L2_HEVC_SPS_FLAG_SCALING_LIST_ENABLED (1ULL << 1)
- +#define V4L2_HEVC_SPS_FLAG_AMP_ENABLED (1ULL << 2)
- +#define V4L2_HEVC_SPS_FLAG_SAMPLE_ADAPTIVE_OFFSET (1ULL << 3)
- +#define V4L2_HEVC_SPS_FLAG_PCM_ENABLED (1ULL << 4)
- +#define V4L2_HEVC_SPS_FLAG_PCM_LOOP_FILTER_DISABLED (1ULL << 5)
- +#define V4L2_HEVC_SPS_FLAG_LONG_TERM_REF_PICS_PRESENT (1ULL << 6)
- +#define V4L2_HEVC_SPS_FLAG_SPS_TEMPORAL_MVP_ENABLED (1ULL << 7)
- +#define V4L2_HEVC_SPS_FLAG_STRONG_INTRA_SMOOTHING_ENABLED (1ULL << 8)
- +
- +/* The controls are not stable at the moment and will likely be reworked. */
- +struct v4l2_ctrl_hevc_sps {
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: Sequence parameter set */
- + __u8 video_parameter_set_id;
- + __u8 seq_parameter_set_id;
- + __u8 chroma_format_idc;
- + __u16 pic_width_in_luma_samples;
- + __u16 pic_height_in_luma_samples;
- + __u8 bit_depth_luma_minus8;
- + __u8 bit_depth_chroma_minus8;
- + __u8 log2_max_pic_order_cnt_lsb_minus4;
- + __u8 sps_max_dec_pic_buffering_minus1;
- + __u8 sps_max_num_reorder_pics;
- + __u8 sps_max_latency_increase_plus1;
- + __u8 log2_min_luma_coding_block_size_minus3;
- + __u8 log2_diff_max_min_luma_coding_block_size;
- + __u8 log2_min_luma_transform_block_size_minus2;
- + __u8 log2_diff_max_min_luma_transform_block_size;
- + __u8 max_transform_hierarchy_depth_inter;
- + __u8 max_transform_hierarchy_depth_intra;
- + __u8 pcm_sample_bit_depth_luma_minus1;
- + __u8 pcm_sample_bit_depth_chroma_minus1;
- + __u8 log2_min_pcm_luma_coding_block_size_minus3;
- + __u8 log2_diff_max_min_pcm_luma_coding_block_size;
- + __u8 num_short_term_ref_pic_sets;
- + __u8 num_long_term_ref_pics_sps;
- +
- + __u8 num_slices;
- + __u8 padding[6];
- +
- + __u64 flags;
- +};
- +
- +#define V4L2_HEVC_PPS_FLAG_DEPENDENT_SLICE_SEGMENT (1ULL << 0)
- +#define V4L2_HEVC_PPS_FLAG_OUTPUT_FLAG_PRESENT (1ULL << 1)
- +#define V4L2_HEVC_PPS_FLAG_SIGN_DATA_HIDING_ENABLED (1ULL << 2)
- +#define V4L2_HEVC_PPS_FLAG_CABAC_INIT_PRESENT (1ULL << 3)
- +#define V4L2_HEVC_PPS_FLAG_CONSTRAINED_INTRA_PRED (1ULL << 4)
- +#define V4L2_HEVC_PPS_FLAG_TRANSFORM_SKIP_ENABLED (1ULL << 5)
- +#define V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED (1ULL << 6)
- +#define V4L2_HEVC_PPS_FLAG_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT (1ULL << 7)
- +#define V4L2_HEVC_PPS_FLAG_WEIGHTED_PRED (1ULL << 8)
- +#define V4L2_HEVC_PPS_FLAG_WEIGHTED_BIPRED (1ULL << 9)
- +#define V4L2_HEVC_PPS_FLAG_TRANSQUANT_BYPASS_ENABLED (1ULL << 10)
- +#define V4L2_HEVC_PPS_FLAG_TILES_ENABLED (1ULL << 11)
- +#define V4L2_HEVC_PPS_FLAG_ENTROPY_CODING_SYNC_ENABLED (1ULL << 12)
- +#define V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED (1ULL << 13)
- +#define V4L2_HEVC_PPS_FLAG_PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED (1ULL << 14)
- +#define V4L2_HEVC_PPS_FLAG_DEBLOCKING_FILTER_OVERRIDE_ENABLED (1ULL << 15)
- +#define V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER (1ULL << 16)
- +#define V4L2_HEVC_PPS_FLAG_LISTS_MODIFICATION_PRESENT (1ULL << 17)
- +#define V4L2_HEVC_PPS_FLAG_SLICE_SEGMENT_HEADER_EXTENSION_PRESENT (1ULL << 18)
- +
- +struct v4l2_ctrl_hevc_pps {
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: Picture parameter set */
- + __u8 pic_parameter_set_id;
- + __u8 num_extra_slice_header_bits;
- + __u8 num_ref_idx_l0_default_active_minus1;
- + __u8 num_ref_idx_l1_default_active_minus1;
- + __s8 init_qp_minus26;
- + __u8 diff_cu_qp_delta_depth;
- + __s8 pps_cb_qp_offset;
- + __s8 pps_cr_qp_offset;
- + __u8 num_tile_columns_minus1;
- + __u8 num_tile_rows_minus1;
- + __u8 column_width_minus1[20];
- + __u8 row_height_minus1[22];
- + __s8 pps_beta_offset_div2;
- + __s8 pps_tc_offset_div2;
- + __u8 log2_parallel_merge_level_minus2;
- +
- + __u8 padding;
- + __u64 flags;
- +};
- +
- +#define V4L2_HEVC_DPB_ENTRY_RPS_ST_CURR_BEFORE 0x01
- +#define V4L2_HEVC_DPB_ENTRY_RPS_ST_CURR_AFTER 0x02
- +#define V4L2_HEVC_DPB_ENTRY_RPS_LT_CURR 0x03
- +
- +#define V4L2_HEVC_DPB_ENTRIES_NUM_MAX 16
- +
- +struct v4l2_hevc_dpb_entry {
- + __u64 timestamp;
- + __u8 rps;
- + __u8 field_pic;
- + __u16 pic_order_cnt[2];
- + __u8 padding[2];
- +};
- +
- +struct v4l2_hevc_pred_weight_table {
- + __s8 delta_luma_weight_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX];
- + __s8 luma_offset_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX];
- + __s8 delta_chroma_weight_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX][2];
- + __s8 chroma_offset_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX][2];
- +
- + __s8 delta_luma_weight_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX];
- + __s8 luma_offset_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX];
- + __s8 delta_chroma_weight_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX][2];
- + __s8 chroma_offset_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX][2];
- +
- + __u8 padding[6];
- +
- + __u8 luma_log2_weight_denom;
- + __s8 delta_chroma_log2_weight_denom;
- +};
- +
- +#define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_SAO_LUMA (1ULL << 0)
- +#define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_SAO_CHROMA (1ULL << 1)
- +#define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_TEMPORAL_MVP_ENABLED (1ULL << 2)
- +#define V4L2_HEVC_SLICE_PARAMS_FLAG_MVD_L1_ZERO (1ULL << 3)
- +#define V4L2_HEVC_SLICE_PARAMS_FLAG_CABAC_INIT (1ULL << 4)
- +#define V4L2_HEVC_SLICE_PARAMS_FLAG_COLLOCATED_FROM_L0 (1ULL << 5)
- +#define V4L2_HEVC_SLICE_PARAMS_FLAG_USE_INTEGER_MV (1ULL << 6)
- +#define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_DEBLOCKING_FILTER_DISABLED (1ULL << 7)
- +#define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED (1ULL << 8)
- +
- +struct v4l2_ctrl_hevc_slice_params {
- + __u32 bit_size;
- + __u32 data_bit_offset;
- +
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */
- + __u32 slice_segment_addr;
- + __u32 num_entry_point_offsets;
- +
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: NAL unit header */
- + __u8 nal_unit_type;
- + __u8 nuh_temporal_id_plus1;
- +
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */
- + __u8 slice_type;
- + __u8 colour_plane_id;
- + __u16 slice_pic_order_cnt;
- + __u8 num_ref_idx_l0_active_minus1;
- + __u8 num_ref_idx_l1_active_minus1;
- + __u8 collocated_ref_idx;
- + __u8 five_minus_max_num_merge_cand;
- + __s8 slice_qp_delta;
- + __s8 slice_cb_qp_offset;
- + __s8 slice_cr_qp_offset;
- + __s8 slice_act_y_qp_offset;
- + __s8 slice_act_cb_qp_offset;
- + __s8 slice_act_cr_qp_offset;
- + __s8 slice_beta_offset_div2;
- + __s8 slice_tc_offset_div2;
- +
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: Picture timing SEI message */
- + __u8 pic_struct;
- +
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */
- + __u8 num_active_dpb_entries;
- + __u8 ref_idx_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX];
- + __u8 ref_idx_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX];
- +
- + __u8 num_rps_poc_st_curr_before;
- + __u8 num_rps_poc_st_curr_after;
- + __u8 num_rps_poc_lt_curr;
- +
- + __u16 short_term_ref_pic_set_size;
- + __u16 long_term_ref_pic_set_size;
- +
- + __u8 padding[5];
- +
- + __u32 entry_point_offset_minus1[256];
- +
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */
- + struct v4l2_hevc_dpb_entry dpb[V4L2_HEVC_DPB_ENTRIES_NUM_MAX];
- +
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: Weighted prediction parameter */
- + struct v4l2_hevc_pred_weight_table pred_weight_table;
- +
- + __u64 flags;
- +};
- +
- +struct v4l2_ctrl_hevc_scaling_matrix {
- + __u8 scaling_list_4x4[6][16];
- + __u8 scaling_list_8x8[6][64];
- + __u8 scaling_list_16x16[6][64];
- + __u8 scaling_list_32x32[2][64];
- + __u8 scaling_list_dc_coef_16x16[6];
- + __u8 scaling_list_dc_coef_32x32[2];
- +};
- +
- +#endif
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/hevcdec.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hevcdec.c
- --- ffmpeg_n4.2.2/libavcodec/hevcdec.c 2020-05-21 20:25:05.353843330 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hevcdec.c 2020-05-26 03:16:38.983174486 -0700
- @@ -373,6 +373,7 @@
- #define HWACCEL_MAX (CONFIG_HEVC_DXVA2_HWACCEL + \
- CONFIG_HEVC_D3D11VA_HWACCEL * 2 + \
- CONFIG_HEVC_NVDEC_HWACCEL + \
- + CONFIG_HEVC_V4L2REQUEST_HWACCEL + \
- CONFIG_HEVC_VAAPI_HWACCEL + \
- CONFIG_HEVC_VIDEOTOOLBOX_HWACCEL + \
- CONFIG_HEVC_VDPAU_HWACCEL)
- @@ -400,6 +401,9 @@
- #if CONFIG_HEVC_VIDEOTOOLBOX_HWACCEL
- *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
- #endif
- +#if CONFIG_HEVC_V4L2REQUEST_HWACCEL
- + *fmt++ = AV_PIX_FMT_DRM_PRIME;
- +#endif
- break;
- case AV_PIX_FMT_YUV420P10:
- #if CONFIG_HEVC_DXVA2_HWACCEL
- @@ -418,6 +422,9 @@
- #if CONFIG_HEVC_NVDEC_HWACCEL
- *fmt++ = AV_PIX_FMT_CUDA;
- #endif
- +#if CONFIG_HEVC_V4L2REQUEST_HWACCEL
- + *fmt++ = AV_PIX_FMT_DRM_PRIME;
- +#endif
- break;
- case AV_PIX_FMT_YUV444P:
- #if CONFIG_HEVC_VDPAU_HWACCEL
- @@ -3593,6 +3600,9 @@
- #if CONFIG_HEVC_VIDEOTOOLBOX_HWACCEL
- HWACCEL_VIDEOTOOLBOX(hevc),
- #endif
- +#if CONFIG_HEVC_V4L2REQUEST_HWACCEL
- + HWACCEL_V4L2REQUEST(hevc),
- +#endif
- NULL
- },
- };
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/hwaccel.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hwaccel.h
- --- ffmpeg_n4.2.2/libavcodec/hwaccel.h 2020-05-21 20:25:05.363843138 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hwaccel.h 2020-05-26 03:16:39.023174427 -0700
- @@ -80,5 +80,7 @@
- HW_CONFIG_HWACCEL(0, 0, 1, D3D11VA_VLD, NONE, ff_ ## codec ## _d3d11va_hwaccel)
- #define HWACCEL_XVMC(codec) \
- HW_CONFIG_HWACCEL(0, 0, 1, XVMC, NONE, ff_ ## codec ## _xvmc_hwaccel)
- +#define HWACCEL_V4L2REQUEST(codec) \
- + HW_CONFIG_HWACCEL(1, 0, 0, DRM_PRIME, DRM, ff_ ## codec ## _v4l2request_hwaccel)
- #endif /* AVCODEC_HWACCEL_H */
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/hwaccels.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hwaccels.h
- --- ffmpeg_n4.2.2/libavcodec/hwaccels.h 2020-05-21 20:25:05.363843138 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/hwaccels.h 2020-05-26 03:16:39.023174427 -0700
- @@ -27,6 +27,7 @@
- extern const AVHWAccel ff_h264_d3d11va2_hwaccel;
- extern const AVHWAccel ff_h264_dxva2_hwaccel;
- extern const AVHWAccel ff_h264_nvdec_hwaccel;
- +extern const AVHWAccel ff_h264_v4l2request_hwaccel;
- extern const AVHWAccel ff_h264_vaapi_hwaccel;
- extern const AVHWAccel ff_h264_vdpau_hwaccel;
- extern const AVHWAccel ff_h264_videotoolbox_hwaccel;
- @@ -34,6 +35,7 @@
- extern const AVHWAccel ff_hevc_d3d11va2_hwaccel;
- extern const AVHWAccel ff_hevc_dxva2_hwaccel;
- extern const AVHWAccel ff_hevc_nvdec_hwaccel;
- +extern const AVHWAccel ff_hevc_v4l2request_hwaccel;
- extern const AVHWAccel ff_hevc_vaapi_hwaccel;
- extern const AVHWAccel ff_hevc_vdpau_hwaccel;
- extern const AVHWAccel ff_hevc_videotoolbox_hwaccel;
- @@ -47,6 +49,7 @@
- extern const AVHWAccel ff_mpeg2_d3d11va2_hwaccel;
- extern const AVHWAccel ff_mpeg2_nvdec_hwaccel;
- extern const AVHWAccel ff_mpeg2_dxva2_hwaccel;
- +extern const AVHWAccel ff_mpeg2_v4l2request_hwaccel;
- extern const AVHWAccel ff_mpeg2_vaapi_hwaccel;
- extern const AVHWAccel ff_mpeg2_vdpau_hwaccel;
- extern const AVHWAccel ff_mpeg2_videotoolbox_hwaccel;
- @@ -62,11 +65,13 @@
- extern const AVHWAccel ff_vc1_vaapi_hwaccel;
- extern const AVHWAccel ff_vc1_vdpau_hwaccel;
- extern const AVHWAccel ff_vp8_nvdec_hwaccel;
- +extern const AVHWAccel ff_vp8_v4l2request_hwaccel;
- extern const AVHWAccel ff_vp8_vaapi_hwaccel;
- extern const AVHWAccel ff_vp9_d3d11va_hwaccel;
- extern const AVHWAccel ff_vp9_d3d11va2_hwaccel;
- extern const AVHWAccel ff_vp9_dxva2_hwaccel;
- extern const AVHWAccel ff_vp9_nvdec_hwaccel;
- +extern const AVHWAccel ff_vp9_v4l2request_hwaccel;
- extern const AVHWAccel ff_vp9_vaapi_hwaccel;
- extern const AVHWAccel ff_wmv3_d3d11va_hwaccel;
- extern const AVHWAccel ff_wmv3_d3d11va2_hwaccel;
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/Makefile ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/Makefile
- --- ffmpeg_n4.2.2/libavcodec/Makefile 2020-05-21 20:25:05.193846405 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/Makefile 2020-05-26 03:16:38.493175211 -0700
- @@ -147,6 +147,7 @@
- OBJS-$(CONFIG_VP56DSP) += vp56dsp.o
- OBJS-$(CONFIG_VP8DSP) += vp8dsp.o
- OBJS-$(CONFIG_V4L2_M2M) += v4l2_m2m.o v4l2_context.o v4l2_buffers.o v4l2_fmt.o
- +OBJS-$(CONFIG_V4L2_REQUEST) += v4l2_request.o
- OBJS-$(CONFIG_WMA_FREQS) += wma_freqs.o
- OBJS-$(CONFIG_WMV2DSP) += wmv2dsp.o
- @@ -871,6 +872,7 @@
- OBJS-$(CONFIG_H264_DXVA2_HWACCEL) += dxva2_h264.o
- OBJS-$(CONFIG_H264_NVDEC_HWACCEL) += nvdec_h264.o
- OBJS-$(CONFIG_H264_QSV_HWACCEL) += qsvdec_h2645.o
- +OBJS-$(CONFIG_H264_V4L2REQUEST_HWACCEL) += v4l2_request_h264.o
- OBJS-$(CONFIG_H264_VAAPI_HWACCEL) += vaapi_h264.o
- OBJS-$(CONFIG_H264_VDPAU_HWACCEL) += vdpau_h264.o
- OBJS-$(CONFIG_H264_VIDEOTOOLBOX_HWACCEL) += videotoolbox.o
- @@ -878,6 +880,7 @@
- OBJS-$(CONFIG_HEVC_DXVA2_HWACCEL) += dxva2_hevc.o
- OBJS-$(CONFIG_HEVC_NVDEC_HWACCEL) += nvdec_hevc.o
- OBJS-$(CONFIG_HEVC_QSV_HWACCEL) += qsvdec_h2645.o
- +OBJS-$(CONFIG_HEVC_V4L2REQUEST_HWACCEL) += v4l2_request_hevc.o
- OBJS-$(CONFIG_HEVC_VAAPI_HWACCEL) += vaapi_hevc.o
- OBJS-$(CONFIG_HEVC_VDPAU_HWACCEL) += vdpau_hevc.o
- OBJS-$(CONFIG_MJPEG_NVDEC_HWACCEL) += nvdec_mjpeg.o
- @@ -890,6 +893,7 @@
- OBJS-$(CONFIG_MPEG2_DXVA2_HWACCEL) += dxva2_mpeg2.o
- OBJS-$(CONFIG_MPEG2_NVDEC_HWACCEL) += nvdec_mpeg12.o
- OBJS-$(CONFIG_MPEG2_QSV_HWACCEL) += qsvdec_other.o
- +OBJS-$(CONFIG_MPEG2_V4L2REQUEST_HWACCEL) += v4l2_request_mpeg2.o
- OBJS-$(CONFIG_MPEG2_VAAPI_HWACCEL) += vaapi_mpeg2.o
- OBJS-$(CONFIG_MPEG2_VDPAU_HWACCEL) += vdpau_mpeg12.o
- OBJS-$(CONFIG_MPEG2_VIDEOTOOLBOX_HWACCEL) += videotoolbox.o
- @@ -905,10 +909,12 @@
- OBJS-$(CONFIG_VC1_VAAPI_HWACCEL) += vaapi_vc1.o
- OBJS-$(CONFIG_VC1_VDPAU_HWACCEL) += vdpau_vc1.o
- OBJS-$(CONFIG_VP8_NVDEC_HWACCEL) += nvdec_vp8.o
- +OBJS-$(CONFIG_VP8_V4L2REQUEST_HWACCEL) += v4l2_request_vp8.o
- OBJS-$(CONFIG_VP8_VAAPI_HWACCEL) += vaapi_vp8.o
- OBJS-$(CONFIG_VP9_D3D11VA_HWACCEL) += dxva2_vp9.o
- OBJS-$(CONFIG_VP9_DXVA2_HWACCEL) += dxva2_vp9.o
- OBJS-$(CONFIG_VP9_NVDEC_HWACCEL) += nvdec_vp9.o
- +OBJS-$(CONFIG_VP9_V4L2REQUEST_HWACCEL) += v4l2_request_vp9.o
- OBJS-$(CONFIG_VP9_VAAPI_HWACCEL) += vaapi_vp9.o
- OBJS-$(CONFIG_VP8_QSV_HWACCEL) += qsvdec_other.o
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/mpeg12dec.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/mpeg12dec.c
- --- ffmpeg_n4.2.2/libavcodec/mpeg12dec.c 2020-05-21 20:25:05.453841409 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/mpeg12dec.c 2020-05-26 03:16:39.303174012 -0700
- @@ -1157,6 +1157,9 @@
- #if CONFIG_MPEG2_VIDEOTOOLBOX_HWACCEL
- AV_PIX_FMT_VIDEOTOOLBOX,
- #endif
- +#if CONFIG_MPEG2_V4L2REQUEST_HWACCEL
- + AV_PIX_FMT_DRM_PRIME,
- +#endif
- AV_PIX_FMT_YUV420P,
- AV_PIX_FMT_NONE
- };
- @@ -2942,6 +2945,9 @@
- #if CONFIG_MPEG2_XVMC_HWACCEL
- HWACCEL_XVMC(mpeg2),
- #endif
- +#if CONFIG_MPEG2_V4L2REQUEST_HWACCEL
- + HWACCEL_V4L2REQUEST(mpeg2),
- +#endif
- NULL
- },
- };
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/mpeg2-ctrls.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/mpeg2-ctrls.h
- --- ffmpeg_n4.2.2/libavcodec/mpeg2-ctrls.h 1969-12-31 16:00:00.000000000 -0800
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/mpeg2-ctrls.h 2020-05-26 03:16:39.303174012 -0700
- @@ -0,0 +1,82 @@
- +/* SPDX-License-Identifier: GPL-2.0 */
- +/*
- + * These are the MPEG2 state controls for use with stateless MPEG-2
- + * codec drivers.
- + *
- + * It turns out that these structs are not stable yet and will undergo
- + * more changes. So keep them private until they are stable and ready to
- + * become part of the official public API.
- + */
- +
- +#ifndef _MPEG2_CTRLS_H_
- +#define _MPEG2_CTRLS_H_
- +
- +#define V4L2_CID_MPEG_VIDEO_MPEG2_SLICE_PARAMS (V4L2_CID_MPEG_BASE+250)
- +#define V4L2_CID_MPEG_VIDEO_MPEG2_QUANTIZATION (V4L2_CID_MPEG_BASE+251)
- +
- +/* enum v4l2_ctrl_type type values */
- +#define V4L2_CTRL_TYPE_MPEG2_SLICE_PARAMS 0x0103
- +#define V4L2_CTRL_TYPE_MPEG2_QUANTIZATION 0x0104
- +
- +#define V4L2_MPEG2_PICTURE_CODING_TYPE_I 1
- +#define V4L2_MPEG2_PICTURE_CODING_TYPE_P 2
- +#define V4L2_MPEG2_PICTURE_CODING_TYPE_B 3
- +#define V4L2_MPEG2_PICTURE_CODING_TYPE_D 4
- +
- +struct v4l2_mpeg2_sequence {
- + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Sequence header */
- + __u16 horizontal_size;
- + __u16 vertical_size;
- + __u32 vbv_buffer_size;
- +
- + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Sequence extension */
- + __u16 profile_and_level_indication;
- + __u8 progressive_sequence;
- + __u8 chroma_format;
- +};
- +
- +struct v4l2_mpeg2_picture {
- + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Picture header */
- + __u8 picture_coding_type;
- +
- + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Picture coding extension */
- + __u8 f_code[2][2];
- + __u8 intra_dc_precision;
- + __u8 picture_structure;
- + __u8 top_field_first;
- + __u8 frame_pred_frame_dct;
- + __u8 concealment_motion_vectors;
- + __u8 q_scale_type;
- + __u8 intra_vlc_format;
- + __u8 alternate_scan;
- + __u8 repeat_first_field;
- + __u16 progressive_frame;
- +};
- +
- +struct v4l2_ctrl_mpeg2_slice_params {
- + __u32 bit_size;
- + __u32 data_bit_offset;
- + __u64 backward_ref_ts;
- + __u64 forward_ref_ts;
- +
- + struct v4l2_mpeg2_sequence sequence;
- + struct v4l2_mpeg2_picture picture;
- +
- + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Slice */
- + __u32 quantiser_scale_code;
- +};
- +
- +struct v4l2_ctrl_mpeg2_quantization {
- + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Quant matrix extension */
- + __u8 load_intra_quantiser_matrix;
- + __u8 load_non_intra_quantiser_matrix;
- + __u8 load_chroma_intra_quantiser_matrix;
- + __u8 load_chroma_non_intra_quantiser_matrix;
- +
- + __u8 intra_quantiser_matrix[64];
- + __u8 non_intra_quantiser_matrix[64];
- + __u8 chroma_intra_quantiser_matrix[64];
- + __u8 chroma_non_intra_quantiser_matrix[64];
- +};
- +
- +#endif
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/v4l2_request.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request.c
- --- ffmpeg_n4.2.2/libavcodec/v4l2_request.c 1969-12-31 16:00:00.000000000 -0800
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request.c 2020-05-26 03:16:39.653173493 -0700
- @@ -0,0 +1,1055 @@
- +/*
- + * This file is part of FFmpeg.
- + *
- + * FFmpeg is free software; you can redistribute it and/or
- + * modify it under the terms of the GNU Lesser General Public
- + * License as published by the Free Software Foundation; either
- + * version 2.1 of the License, or (at your option) any later version.
- + *
- + * FFmpeg is distributed in the hope that it will be useful,
- + * but WITHOUT ANY WARRANTY; without even the implied warranty of
- + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- + * Lesser General Public License for more details.
- + *
- + * You should have received a copy of the GNU Lesser General Public
- + * License along with FFmpeg; if not, write to the Free Software
- + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- + */
- +
- +#include <drm_fourcc.h>
- +#include <linux/media.h>
- +#include <sys/mman.h>
- +#include <sys/types.h>
- +#include <sys/stat.h>
- +#include <fcntl.h>
- +
- +#include <sys/sysmacros.h>
- +#include <libudev.h>
- +
- +#include "decode.h"
- +#include "internal.h"
- +#include "libavutil/pixdesc.h"
- +#include "v4l2_request.h"
- +
- +#ifndef DRM_FORMAT_NV15
- +#define DRM_FORMAT_NV15 fourcc_code('N', 'V', '1', '5')
- +#endif
- +
- +#ifndef DRM_FORMAT_NV20
- +#define DRM_FORMAT_NV20 fourcc_code('N', 'V', '2', '0')
- +#endif
- +
- +uint64_t ff_v4l2_request_get_capture_timestamp(AVFrame *frame)
- +{
- + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)frame->data[0];
- + return req ? v4l2_timeval_to_ns(&req->capture.buffer.timestamp) : 0;
- +}
- +
- +int ff_v4l2_request_reset_frame(AVCodecContext *avctx, AVFrame *frame)
- +{
- + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)frame->data[0];
- + memset(&req->drm, 0, sizeof(AVDRMFrameDescriptor));
- + req->output.used = 0;
- + return 0;
- +}
- +
- +int ff_v4l2_request_append_output_buffer(AVCodecContext *avctx, AVFrame *frame, const uint8_t *data, uint32_t size)
- +{
- + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)frame->data[0];
- + if (req->output.used + size + (AV_INPUT_BUFFER_PADDING_SIZE * 4) <= req->output.size) {
- + memcpy(req->output.addr + req->output.used, data, size);
- + req->output.used += size;
- + } else {
- + av_log(avctx, AV_LOG_ERROR, "%s: output.used=%u output.size=%u size=%u\n", __func__, req->output.used, req->output.size, size);
- + }
- + return 0;
- +}
- +
- +static int v4l2_request_controls(V4L2RequestContext *ctx, int request_fd, unsigned long type, struct v4l2_ext_control *control, int count)
- +{
- + struct v4l2_ext_controls controls = {
- + .controls = control,
- + .count = count,
- + .request_fd = request_fd,
- + .which = (request_fd >= 0) ? V4L2_CTRL_WHICH_REQUEST_VAL : 0,
- + };
- +
- + if (!control || !count)
- + return 0;
- +
- + return ioctl(ctx->video_fd, type, &controls);
- +}
- +
- +static int v4l2_request_set_controls(V4L2RequestContext *ctx, int request_fd, struct v4l2_ext_control *control, int count)
- +{
- + return v4l2_request_controls(ctx, request_fd, VIDIOC_S_EXT_CTRLS, control, count);
- +}
- +
- +int ff_v4l2_request_set_controls(AVCodecContext *avctx, struct v4l2_ext_control *control, int count)
- +{
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + int ret;
- +
- + ret = v4l2_request_controls(ctx, -1, VIDIOC_S_EXT_CTRLS, control, count);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: set controls failed, %s (%d)\n", __func__, strerror(errno), errno);
- + return AVERROR(EINVAL);
- + }
- +
- + return ret;
- +}
- +
- +int ff_v4l2_request_get_controls(AVCodecContext *avctx, struct v4l2_ext_control *control, int count)
- +{
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + int ret;
- +
- + ret = v4l2_request_controls(ctx, -1, VIDIOC_G_EXT_CTRLS, control, count);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: get controls failed, %s (%d)\n", __func__, strerror(errno), errno);
- + return AVERROR(EINVAL);
- + }
- +
- + return ret;
- +}
- +
- +int ff_v4l2_request_query_control(AVCodecContext *avctx, struct v4l2_query_ext_ctrl *control)
- +{
- + int ret;
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- +
- + ret = ioctl(ctx->video_fd, VIDIOC_QUERY_EXT_CTRL, control);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: query control failed, %s (%d)\n", __func__, strerror(errno), errno);
- + return AVERROR(EINVAL);
- + }
- +
- + return 0;
- +}
- +
- +int ff_v4l2_request_query_control_default_value(AVCodecContext *avctx, uint32_t id)
- +{
- + int ret;
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + struct v4l2_queryctrl control = {
- + .id = id,
- + };
- +
- + ret = ioctl(ctx->video_fd, VIDIOC_QUERYCTRL, &control);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: query control failed, %s (%d)\n", __func__, strerror(errno), errno);
- + return AVERROR(EINVAL);
- + }
- +
- + return control.default_value;
- +}
- +
- +static int v4l2_request_queue_buffer(V4L2RequestContext *ctx, int request_fd, V4L2RequestBuffer *buf, uint32_t flags)
- +{
- + struct v4l2_plane planes[1] = {};
- + struct v4l2_buffer buffer = {
- + .type = buf->buffer.type,
- + .memory = buf->buffer.memory,
- + .index = buf->index,
- + .timestamp.tv_usec = ctx->timestamp,
- + .bytesused = buf->used,
- + .request_fd = request_fd,
- + .flags = ((request_fd >= 0) ? V4L2_BUF_FLAG_REQUEST_FD : 0) | flags,
- + };
- +
- + buf->buffer.timestamp = buffer.timestamp;
- +
- + if (V4L2_TYPE_IS_MULTIPLANAR(buf->buffer.type)) {
- + planes[0].bytesused = buf->used;
- + buffer.bytesused = 0;
- + buffer.length = 1;
- + buffer.m.planes = planes;
- + }
- +
- + return ioctl(ctx->video_fd, VIDIOC_QBUF, &buffer);
- +}
- +
- +static int v4l2_request_dequeue_buffer(V4L2RequestContext *ctx, V4L2RequestBuffer *buf)
- +{
- + int ret;
- + struct v4l2_plane planes[1] = {};
- + struct v4l2_buffer buffer = {
- + .type = buf->buffer.type,
- + .memory = buf->buffer.memory,
- + .index = buf->index,
- + };
- +
- + if (V4L2_TYPE_IS_MULTIPLANAR(buf->buffer.type)) {
- + buffer.length = 1;
- + buffer.m.planes = planes;
- + }
- +
- + ret = ioctl(ctx->video_fd, VIDIOC_DQBUF, &buffer);
- + if (ret < 0)
- + return ret;
- +
- + buf->buffer.timestamp = buffer.timestamp;
- + return 0;
- +}
- +
- +const uint32_t v4l2_request_capture_pixelformats_420[] = {
- + V4L2_PIX_FMT_NV12,
- +#ifdef DRM_FORMAT_MOD_ALLWINNER_TILED
- + V4L2_PIX_FMT_SUNXI_TILED_NV12,
- +#endif
- +};
- +
- +const uint32_t v4l2_request_capture_pixelformats_420_10[] = {
- +#ifdef V4L2_PIX_FMT_NV15
- + V4L2_PIX_FMT_NV15,
- +#endif
- +};
- +
- +const uint32_t v4l2_request_capture_pixelformats_422[] = {
- + V4L2_PIX_FMT_NV16,
- +};
- +
- +const uint32_t v4l2_request_capture_pixelformats_422_10[] = {
- +#ifdef V4L2_PIX_FMT_NV20
- + V4L2_PIX_FMT_NV20,
- +#endif
- +};
- +
- +static int v4l2_request_set_drm_descriptor(V4L2RequestDescriptor *req, struct v4l2_format *format)
- +{
- + AVDRMFrameDescriptor *desc = &req->drm;
- + AVDRMLayerDescriptor *layer = &desc->layers[0];
- + uint32_t pixelformat = V4L2_TYPE_IS_MULTIPLANAR(format->type) ? format->fmt.pix_mp.pixelformat : format->fmt.pix.pixelformat;
- +
- + switch (pixelformat) {
- + case V4L2_PIX_FMT_NV12:
- + layer->format = DRM_FORMAT_NV12;
- + desc->objects[0].format_modifier = DRM_FORMAT_MOD_LINEAR;
- + break;
- +#ifdef DRM_FORMAT_MOD_ALLWINNER_TILED
- + case V4L2_PIX_FMT_SUNXI_TILED_NV12:
- + layer->format = DRM_FORMAT_NV12;
- + desc->objects[0].format_modifier = DRM_FORMAT_MOD_ALLWINNER_TILED;
- + break;
- +#endif
- +#ifdef V4L2_PIX_FMT_NV15
- + case V4L2_PIX_FMT_NV15:
- + layer->format = DRM_FORMAT_NV15;
- + desc->objects[0].format_modifier = DRM_FORMAT_MOD_LINEAR;
- + break;
- +#endif
- + case V4L2_PIX_FMT_NV16:
- + layer->format = DRM_FORMAT_NV16;
- + desc->objects[0].format_modifier = DRM_FORMAT_MOD_LINEAR;
- + break;
- +#ifdef V4L2_PIX_FMT_NV20
- + case V4L2_PIX_FMT_NV20:
- + layer->format = DRM_FORMAT_NV20;
- + desc->objects[0].format_modifier = DRM_FORMAT_MOD_LINEAR;
- + break;
- +#endif
- + default:
- + return -1;
- + }
- +
- + desc->nb_objects = 1;
- + desc->objects[0].fd = req->capture.fd;
- + desc->objects[0].size = req->capture.size;
- +
- + desc->nb_layers = 1;
- + layer->nb_planes = 2;
- +
- + layer->planes[0].object_index = 0;
- + layer->planes[0].offset = 0;
- + layer->planes[0].pitch = V4L2_TYPE_IS_MULTIPLANAR(format->type) ? format->fmt.pix_mp.plane_fmt[0].bytesperline : format->fmt.pix.bytesperline;
- +
- + layer->planes[1].object_index = 0;
- + layer->planes[1].offset = layer->planes[0].pitch * (V4L2_TYPE_IS_MULTIPLANAR(format->type) ? format->fmt.pix_mp.height : format->fmt.pix.height);
- + layer->planes[1].pitch = layer->planes[0].pitch;
- +
- + return 0;
- +}
- +
- +static int v4l2_request_queue_decode(AVCodecContext *avctx, AVFrame *frame, struct v4l2_ext_control *control, int count, int first_slice, int last_slice)
- +{
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)frame->data[0];
- + struct timeval tv = { 2, 0 };
- + fd_set except_fds;
- + int ret;
- +
- + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p used=%u controls=%d index=%d fd=%d request_fd=%d first_slice=%d last_slice=%d\n", __func__, avctx, req->output.used, count, req->capture.index, req->capture.fd, req->request_fd, first_slice, last_slice);
- +
- + if (first_slice)
- + ctx->timestamp++;
- +
- + ret = v4l2_request_set_controls(ctx, req->request_fd, control, count);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: set controls failed for request %d, %s (%d)\n", __func__, req->request_fd, strerror(errno), errno);
- + return -1;
- + }
- +
- + memset(req->output.addr + req->output.used, 0, AV_INPUT_BUFFER_PADDING_SIZE * 4);
- +
- + ret = v4l2_request_queue_buffer(ctx, req->request_fd, &req->output, last_slice ? 0 : V4L2_BUF_FLAG_M2M_HOLD_CAPTURE_BUF);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: queue output buffer %d failed for request %d, %s (%d)\n", __func__, req->output.index, req->request_fd, strerror(errno), errno);
- + return -1;
- + }
- +
- + if (first_slice) {
- + ret = v4l2_request_queue_buffer(ctx, -1, &req->capture, 0);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: queue capture buffer %d failed for request %d, %s (%d)\n", __func__, req->capture.index, req->request_fd, strerror(errno), errno);
- + return -1;
- + }
- + }
- +
- + // NOTE: do we need to dequeue when request fails/timeout?
- +
- + // 4. queue request and wait
- + ret = ioctl(req->request_fd, MEDIA_REQUEST_IOC_QUEUE, NULL);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: queue request %d failed, %s (%d)\n", __func__, req->request_fd, strerror(errno), errno);
- + goto fail;
- + }
- +
- + FD_ZERO(&except_fds);
- + FD_SET(req->request_fd, &except_fds);
- +
- + ret = select(req->request_fd + 1, NULL, NULL, &except_fds, &tv);
- + if (ret == 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: request %d timeout\n", __func__, req->request_fd);
- + goto fail;
- + } else if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: select request %d failed, %s (%d)\n", __func__, req->request_fd, strerror(errno), errno);
- + goto fail;
- + }
- +
- + ret = v4l2_request_dequeue_buffer(ctx, &req->output);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: dequeue output buffer %d failed for request %d, %s (%d)\n", __func__, req->output.index, req->request_fd, strerror(errno), errno);
- + return -1;
- + }
- +
- + if (last_slice) {
- + ret = v4l2_request_dequeue_buffer(ctx, &req->capture);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: dequeue capture buffer %d failed for request %d, %s (%d)\n", __func__, req->capture.index, req->request_fd, strerror(errno), errno);
- + return -1;
- + }
- + }
- +
- + // TODO: check errors
- + // buffer.flags & V4L2_BUF_FLAG_ERROR
- +
- + ret = ioctl(req->request_fd, MEDIA_REQUEST_IOC_REINIT, NULL);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: reinit request %d failed, %s (%d)\n", __func__, req->request_fd, strerror(errno), errno);
- + return -1;
- + }
- +
- + if (last_slice)
- + return v4l2_request_set_drm_descriptor(req, &ctx->format);
- +
- + return 0;
- +
- +fail:
- + ret = v4l2_request_dequeue_buffer(ctx, &req->output);
- + if (ret < 0)
- + av_log(avctx, AV_LOG_ERROR, "%s: dequeue output buffer %d failed for request %d, %s (%d)\n", __func__, req->output.index, req->request_fd, strerror(errno), errno);
- +
- + ret = v4l2_request_dequeue_buffer(ctx, &req->capture);
- + if (ret < 0)
- + av_log(avctx, AV_LOG_ERROR, "%s: dequeue capture buffer %d failed for request %d, %s (%d)\n", __func__, req->capture.index, req->request_fd, strerror(errno), errno);
- +
- + ret = ioctl(req->request_fd, MEDIA_REQUEST_IOC_REINIT, NULL);
- + if (ret < 0)
- + av_log(avctx, AV_LOG_ERROR, "%s: reinit request %d failed, %s (%d)\n", __func__, req->request_fd, strerror(errno), errno);
- +
- + return -1;
- +}
- +
- +int ff_v4l2_request_decode_slice(AVCodecContext *avctx, AVFrame *frame, struct v4l2_ext_control *control, int count, int first_slice, int last_slice)
- +{
- + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)frame->data[0];
- +
- + // fall back to queue each slice as a full frame
- + if ((req->output.capabilities & V4L2_BUF_CAP_SUPPORTS_M2M_HOLD_CAPTURE_BUF) != V4L2_BUF_CAP_SUPPORTS_M2M_HOLD_CAPTURE_BUF)
- + return v4l2_request_queue_decode(avctx, frame, control, count, 1, 1);
- +
- + return v4l2_request_queue_decode(avctx, frame, control, count, first_slice, last_slice);
- +}
- +
- +int ff_v4l2_request_decode_frame(AVCodecContext *avctx, AVFrame *frame, struct v4l2_ext_control *control, int count)
- +{
- + return v4l2_request_queue_decode(avctx, frame, control, count, 1, 1);
- +}
- +
- +int ff_v4l2_request_output_frame(AVCodecContext *avctx, AVFrame *frame)
- +{
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)frame->data[0];
- +
- + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p used=%u index=%d fd=%d request_fd=%d\n", __func__, avctx, req->output.used, req->capture.index, req->capture.fd, req->request_fd);
- + return 0;
- +}
- +
- +static int v4l2_request_try_format(AVCodecContext *avctx, enum v4l2_buf_type type, uint32_t pixelformat)
- +{
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + struct v4l2_fmtdesc fmtdesc = {
- + .index = 0,
- + .type = type,
- + };
- +
- + if (V4L2_TYPE_IS_OUTPUT(type)) {
- + struct v4l2_create_buffers buffers = {
- + .count = 0,
- + .memory = V4L2_MEMORY_MMAP,
- + .format.type = type,
- + };
- +
- + if (ioctl(ctx->video_fd, VIDIOC_CREATE_BUFS, &buffers) < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: create buffers failed for type %u, %s (%d)\n", __func__, type, strerror(errno), errno);
- + return -1;
- + }
- +
- + if ((buffers.capabilities & V4L2_BUF_CAP_SUPPORTS_REQUESTS) != V4L2_BUF_CAP_SUPPORTS_REQUESTS) {
- + av_log(avctx, AV_LOG_INFO, "%s: output buffer type do not support requests, capabilities %u\n", __func__, buffers.capabilities);
- + return -1;
- + }
- + }
- +
- + while (ioctl(ctx->video_fd, VIDIOC_ENUM_FMT, &fmtdesc) >= 0) {
- + if (fmtdesc.pixelformat == pixelformat)
- + return 0;
- +
- + fmtdesc.index++;
- + }
- +
- + av_log(avctx, AV_LOG_INFO, "%s: pixelformat %u not supported for type %u\n", __func__, pixelformat, type);
- + return -1;
- +}
- +
- +static int v4l2_request_set_format(AVCodecContext *avctx, enum v4l2_buf_type type, uint32_t pixelformat, uint32_t buffersize)
- +{
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + struct v4l2_format format = {
- + .type = type,
- + };
- +
- + if (V4L2_TYPE_IS_MULTIPLANAR(type)) {
- + format.fmt.pix_mp.width = avctx->coded_width;
- + format.fmt.pix_mp.height = avctx->coded_height;
- + format.fmt.pix_mp.pixelformat = pixelformat;
- + format.fmt.pix_mp.plane_fmt[0].sizeimage = buffersize;
- + format.fmt.pix_mp.num_planes = 1;
- + } else {
- + format.fmt.pix.width = avctx->coded_width;
- + format.fmt.pix.height = avctx->coded_height;
- + format.fmt.pix.pixelformat = pixelformat;
- + format.fmt.pix.sizeimage = buffersize;
- + }
- +
- + return ioctl(ctx->video_fd, VIDIOC_S_FMT, &format);
- +}
- +
- +static int v4l2_request_select_capture_format(AVCodecContext *avctx)
- +{
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + enum v4l2_buf_type type = ctx->format.type;
- +
- +#if 0
- + struct v4l2_format format = {
- + .type = type,
- + };
- + struct v4l2_fmtdesc fmtdesc = {
- + .index = 0,
- + .type = type,
- + };
- + uint32_t pixelformat;
- + int i;
- +
- + if (ioctl(ctx->video_fd, VIDIOC_G_FMT, &format) < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: get capture format failed, %s (%d)\n", __func__, strerror(errno), errno);
- + return -1;
- + }
- +
- + pixelformat = V4L2_TYPE_IS_MULTIPLANAR(type) ? format.fmt.pix_mp.pixelformat : format.fmt.pix.pixelformat;
- +
- + for (i = 0; i < FF_ARRAY_ELEMS(v4l2_request_capture_pixelformats); i++) {
- + if (pixelformat == v4l2_request_capture_pixelformats[i])
- + return v4l2_request_set_format(avctx, type, pixelformat, 0);
- + }
- +
- + while (ioctl(ctx->video_fd, VIDIOC_ENUM_FMT, &fmtdesc) >= 0) {
- + for (i = 0; i < FF_ARRAY_ELEMS(v4l2_request_capture_pixelformats); i++) {
- + if (fmtdesc.pixelformat == v4l2_request_capture_pixelformats[i])
- + return v4l2_request_set_format(avctx, type, fmtdesc.pixelformat, 0);
- + }
- +
- + fmtdesc.index++;
- + }
- +#else
- + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p pix_fmt=%s sw_pix_fmt=%s\n", __func__, avctx, av_get_pix_fmt_name(avctx->pix_fmt), av_get_pix_fmt_name(avctx->sw_pix_fmt));
- + if (avctx->sw_pix_fmt == AV_PIX_FMT_YUV420P || avctx->sw_pix_fmt == AV_PIX_FMT_YUVJ420P) {
- + for (int i = 0; i < FF_ARRAY_ELEMS(v4l2_request_capture_pixelformats_420); i++) {
- + uint32_t pixelformat = v4l2_request_capture_pixelformats_420[i];
- + if (!v4l2_request_try_format(avctx, type, pixelformat))
- + return v4l2_request_set_format(avctx, type, pixelformat, 0);
- + }
- + } else if (avctx->sw_pix_fmt == AV_PIX_FMT_YUV420P10) {
- + for (int i = 0; i < FF_ARRAY_ELEMS(v4l2_request_capture_pixelformats_420_10); i++) {
- + uint32_t pixelformat = v4l2_request_capture_pixelformats_420_10[i];
- + if (!v4l2_request_try_format(avctx, type, pixelformat))
- + return v4l2_request_set_format(avctx, type, pixelformat, 0);
- + }
- + } else if (avctx->sw_pix_fmt == AV_PIX_FMT_YUV422P || avctx->sw_pix_fmt == AV_PIX_FMT_YUVJ422P) {
- + for (int i = 0; i < FF_ARRAY_ELEMS(v4l2_request_capture_pixelformats_422); i++) {
- + uint32_t pixelformat = v4l2_request_capture_pixelformats_422[i];
- + if (!v4l2_request_try_format(avctx, type, pixelformat))
- + return v4l2_request_set_format(avctx, type, pixelformat, 0);
- + }
- + } else if (avctx->sw_pix_fmt == AV_PIX_FMT_YUV422P10) {
- + for (int i = 0; i < FF_ARRAY_ELEMS(v4l2_request_capture_pixelformats_422_10); i++) {
- + uint32_t pixelformat = v4l2_request_capture_pixelformats_422_10[i];
- + if (!v4l2_request_try_format(avctx, type, pixelformat))
- + return v4l2_request_set_format(avctx, type, pixelformat, 0);
- + }
- + }
- +#endif
- +
- + return -1;
- +}
- +
- +static int v4l2_request_probe_video_device(struct udev_device *device, AVCodecContext *avctx, uint32_t pixelformat, uint32_t buffersize, struct v4l2_ext_control *control, int count)
- +{
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + int ret = AVERROR(EINVAL);
- + struct v4l2_capability capability = {0};
- + unsigned int capabilities = 0;
- +
- + const char *path = udev_device_get_devnode(device);
- + if (!path) {
- + av_log(avctx, AV_LOG_ERROR, "%s: get video device devnode failed\n", __func__);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + ctx->video_fd = open(path, O_RDWR | O_NONBLOCK, 0);
- + if (ctx->video_fd < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: opening %s failed, %s (%d)\n", __func__, path, strerror(errno), errno);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + ret = ioctl(ctx->video_fd, VIDIOC_QUERYCAP, &capability);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: get video capability failed, %s (%d)\n", __func__, strerror(errno), errno);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + if (capability.capabilities & V4L2_CAP_DEVICE_CAPS)
- + capabilities = capability.device_caps;
- + else
- + capabilities = capability.capabilities;
- +
- + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p ctx=%p path=%s capabilities=%u\n", __func__, avctx, ctx, path, capabilities);
- +
- + if ((capabilities & V4L2_CAP_STREAMING) != V4L2_CAP_STREAMING) {
- + av_log(avctx, AV_LOG_ERROR, "%s: missing required streaming capability\n", __func__);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + if ((capabilities & V4L2_CAP_VIDEO_M2M_MPLANE) == V4L2_CAP_VIDEO_M2M_MPLANE) {
- + ctx->output_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- + ctx->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- + } else if ((capabilities & V4L2_CAP_VIDEO_M2M) == V4L2_CAP_VIDEO_M2M) {
- + ctx->output_type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
- + ctx->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- + } else {
- + av_log(avctx, AV_LOG_ERROR, "%s: missing required mem2mem capability\n", __func__);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + ret = v4l2_request_try_format(avctx, ctx->output_type, pixelformat);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_WARNING, "%s: try output format failed\n", __func__);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + ret = v4l2_request_set_controls(ctx, -1, control, count);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: set controls failed, %s (%d)\n", __func__, strerror(errno), errno);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + ret = v4l2_request_set_format(avctx, ctx->output_type, pixelformat, buffersize);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: set output format failed, %s (%d)\n", __func__, strerror(errno), errno);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + ret = v4l2_request_select_capture_format(avctx);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_WARNING, "%s: select capture format failed\n", __func__);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + return 0;
- +
- +fail:
- + if (ctx->video_fd >= 0) {
- + close(ctx->video_fd);
- + ctx->video_fd = -1;
- + }
- + return ret;
- +}
- +
- +static int v4l2_request_init_context(AVCodecContext *avctx)
- +{
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + int ret;
- +
- + ret = ioctl(ctx->video_fd, VIDIOC_G_FMT, &ctx->format);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: get capture format failed, %s (%d)\n", __func__, strerror(errno), errno);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + if (V4L2_TYPE_IS_MULTIPLANAR(ctx->format.type)) {
- + av_log(avctx, AV_LOG_DEBUG, "%s: pixelformat=%d width=%u height=%u bytesperline=%u sizeimage=%u num_planes=%u\n", __func__, ctx->format.fmt.pix_mp.pixelformat, ctx->format.fmt.pix_mp.width, ctx->format.fmt.pix_mp.height, ctx->format.fmt.pix_mp.plane_fmt[0].bytesperline, ctx->format.fmt.pix_mp.plane_fmt[0].sizeimage, ctx->format.fmt.pix_mp.num_planes);
- + } else {
- + av_log(avctx, AV_LOG_DEBUG, "%s: pixelformat=%d width=%u height=%u bytesperline=%u sizeimage=%u\n", __func__, ctx->format.fmt.pix.pixelformat, ctx->format.fmt.pix.width, ctx->format.fmt.pix.height, ctx->format.fmt.pix.bytesperline, ctx->format.fmt.pix.sizeimage);
- + }
- +
- + ret = ff_decode_get_hw_frames_ctx(avctx, AV_HWDEVICE_TYPE_DRM);
- + if (ret < 0)
- + goto fail;
- +
- + ret = ioctl(ctx->video_fd, VIDIOC_STREAMON, &ctx->output_type);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: output stream on failed, %s (%d)\n", __func__, strerror(errno), errno);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + ret = ioctl(ctx->video_fd, VIDIOC_STREAMON, &ctx->format.type);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: capture stream on failed, %s (%d)\n", __func__, strerror(errno), errno);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + return 0;
- +
- +fail:
- + ff_v4l2_request_uninit(avctx);
- + return ret;
- +}
- +
- +static int v4l2_request_probe_media_device(struct udev_device *device, AVCodecContext *avctx, uint32_t pixelformat, uint32_t buffersize, struct v4l2_ext_control *control, int count)
- +{
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + int ret;
- + struct media_device_info device_info = {0};
- + struct media_v2_topology topology = {0};
- + struct media_v2_interface *interfaces = NULL;
- + struct udev *udev = udev_device_get_udev(device);
- + struct udev_device *video_device;
- + dev_t devnum;
- +
- + const char *path = udev_device_get_devnode(device);
- + if (!path) {
- + av_log(avctx, AV_LOG_ERROR, "%s: get media device devnode failed\n", __func__);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + ctx->media_fd = open(path, O_RDWR, 0);
- + if (ctx->media_fd < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: opening %s failed, %s (%d)\n", __func__, path, strerror(errno), errno);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + ret = ioctl(ctx->media_fd, MEDIA_IOC_DEVICE_INFO, &device_info);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: get media device info failed, %s (%d)\n", __func__, strerror(errno), errno);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p ctx=%p path=%s driver=%s\n", __func__, avctx, ctx, path, device_info.driver);
- +
- + ret = ioctl(ctx->media_fd, MEDIA_IOC_G_TOPOLOGY, &topology);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: get media topology failed, %s (%d)\n", __func__, strerror(errno), errno);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + if (topology.num_interfaces <= 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: media device has no interfaces\n", __func__);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + interfaces = av_mallocz(topology.num_interfaces * sizeof(struct media_v2_interface));
- + if (!interfaces) {
- + av_log(avctx, AV_LOG_ERROR, "%s: allocating media interface struct failed\n", __func__);
- + ret = AVERROR(ENOMEM);
- + goto fail;
- + }
- +
- + topology.ptr_interfaces = (__u64)(uintptr_t)interfaces;
- + ret = ioctl(ctx->media_fd, MEDIA_IOC_G_TOPOLOGY, &topology);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: get media topology failed, %s (%d)\n", __func__, strerror(errno), errno);
- + ret = AVERROR(EINVAL);
- + goto fail;
- + }
- +
- + ret = AVERROR(EINVAL);
- + for (int i = 0; i < topology.num_interfaces; i++) {
- + if (interfaces[i].intf_type != MEDIA_INTF_T_V4L_VIDEO)
- + continue;
- +
- + devnum = makedev(interfaces[i].devnode.major, interfaces[i].devnode.minor);
- + video_device = udev_device_new_from_devnum(udev, 'c', devnum);
- + if (!video_device) {
- + av_log(avctx, AV_LOG_ERROR, "%s: video_device=%p\n", __func__, video_device);
- + continue;
- + }
- +
- + ret = v4l2_request_probe_video_device(video_device, avctx, pixelformat, buffersize, control, count);
- + udev_device_unref(video_device);
- +
- + if (!ret)
- + break;
- + }
- +
- + av_freep(&interfaces);
- + return ret;
- +
- +fail:
- + av_freep(&interfaces);
- + if (ctx->media_fd >= 0) {
- + close(ctx->media_fd);
- + ctx->media_fd = -1;
- + }
- + return ret;
- +}
- +
- +int ff_v4l2_request_init(AVCodecContext *avctx, uint32_t pixelformat, uint32_t buffersize, struct v4l2_ext_control *control, int count)
- +{
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + int ret = AVERROR(EINVAL);
- + struct udev *udev;
- + struct udev_enumerate *enumerate;
- + struct udev_list_entry *devices;
- + struct udev_list_entry *entry;
- + struct udev_device *device;
- +
- + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p hw_device_ctx=%p hw_frames_ctx=%p\n", __func__, avctx, avctx->hw_device_ctx, avctx->hw_frames_ctx);
- +
- + ctx->media_fd = -1;
- + ctx->video_fd = -1;
- + ctx->timestamp = 0;
- +
- + udev = udev_new();
- + if (!udev) {
- + av_log(avctx, AV_LOG_ERROR, "%s: allocating udev context failed\n", __func__);
- + ret = AVERROR(ENOMEM);
- + goto fail;
- + }
- +
- + enumerate = udev_enumerate_new(udev);
- + if (!enumerate) {
- + av_log(avctx, AV_LOG_ERROR, "%s: allocating udev enumerator failed\n", __func__);
- + ret = AVERROR(ENOMEM);
- + goto fail;
- + }
- +
- + udev_enumerate_add_match_subsystem(enumerate, "media");
- + udev_enumerate_scan_devices(enumerate);
- +
- + devices = udev_enumerate_get_list_entry(enumerate);
- + udev_list_entry_foreach(entry, devices) {
- + const char *path = udev_list_entry_get_name(entry);
- + if (!path)
- + continue;
- +
- + device = udev_device_new_from_syspath(udev, path);
- + if (!device)
- + continue;
- +
- + ret = v4l2_request_probe_media_device(device, avctx, pixelformat, buffersize, control, count);
- + udev_device_unref(device);
- +
- + if (!ret)
- + break;
- + }
- +
- + udev_enumerate_unref(enumerate);
- +
- + if (!ret)
- + ret = v4l2_request_init_context(avctx);
- +
- +fail:
- + udev_unref(udev);
- + return ret;
- +}
- +
- +int ff_v4l2_request_uninit(AVCodecContext *avctx)
- +{
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + int ret;
- +
- + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p ctx=%p\n", __func__, avctx, ctx);
- +
- + if (ctx->video_fd >= 0) {
- + ret = ioctl(ctx->video_fd, VIDIOC_STREAMOFF, &ctx->output_type);
- + if (ret < 0)
- + av_log(avctx, AV_LOG_ERROR, "%s: output stream off failed, %s (%d)\n", __func__, strerror(errno), errno);
- +
- + ret = ioctl(ctx->video_fd, VIDIOC_STREAMOFF, &ctx->format.type);
- + if (ret < 0)
- + av_log(avctx, AV_LOG_ERROR, "%s: capture stream off failed, %s (%d)\n", __func__, strerror(errno), errno);
- + }
- +
- + if (avctx->hw_frames_ctx) {
- + AVHWFramesContext *hwfc = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
- + av_buffer_pool_flush(hwfc->pool);
- + }
- +
- + if (ctx->video_fd >= 0)
- + close(ctx->video_fd);
- +
- + if (ctx->media_fd >= 0)
- + close(ctx->media_fd);
- +
- + return 0;
- +}
- +
- +static int v4l2_request_buffer_alloc(AVCodecContext *avctx, V4L2RequestBuffer *buf, enum v4l2_buf_type type)
- +{
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + int ret;
- + struct v4l2_plane planes[1] = {};
- + struct v4l2_create_buffers buffers = {
- + .count = 1,
- + .memory = V4L2_MEMORY_MMAP,
- + .format.type = type,
- + };
- +
- + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p buf=%p type=%u\n", __func__, avctx, buf, type);
- +
- + ret = ioctl(ctx->video_fd, VIDIOC_G_FMT, &buffers.format);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: get format failed for type %u, %s (%d)\n", __func__, type, strerror(errno), errno);
- + return ret;
- + }
- +
- + if (V4L2_TYPE_IS_MULTIPLANAR(buffers.format.type)) {
- + av_log(avctx, AV_LOG_DEBUG, "%s: pixelformat=%d width=%u height=%u bytesperline=%u sizeimage=%u num_planes=%u\n", __func__, buffers.format.fmt.pix_mp.pixelformat, buffers.format.fmt.pix_mp.width, buffers.format.fmt.pix_mp.height, buffers.format.fmt.pix_mp.plane_fmt[0].bytesperline, buffers.format.fmt.pix_mp.plane_fmt[0].sizeimage, buffers.format.fmt.pix_mp.num_planes);
- + } else {
- + av_log(avctx, AV_LOG_DEBUG, "%s: pixelformat=%d width=%u height=%u bytesperline=%u sizeimage=%u\n", __func__, buffers.format.fmt.pix.pixelformat, buffers.format.fmt.pix.width, buffers.format.fmt.pix.height, buffers.format.fmt.pix.bytesperline, buffers.format.fmt.pix.sizeimage);
- + }
- +
- + ret = ioctl(ctx->video_fd, VIDIOC_CREATE_BUFS, &buffers);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: create buffers failed for type %u, %s (%d)\n", __func__, type, strerror(errno), errno);
- + return ret;
- + }
- +
- + if (V4L2_TYPE_IS_MULTIPLANAR(type)) {
- + buf->width = buffers.format.fmt.pix_mp.width;
- + buf->height = buffers.format.fmt.pix_mp.height;
- + buf->size = buffers.format.fmt.pix_mp.plane_fmt[0].sizeimage;
- + buf->buffer.length = 1;
- + buf->buffer.m.planes = planes;
- + } else {
- + buf->width = buffers.format.fmt.pix.width;
- + buf->height = buffers.format.fmt.pix.height;
- + buf->size = buffers.format.fmt.pix.sizeimage;
- + }
- +
- + buf->index = buffers.index;
- + buf->capabilities = buffers.capabilities;
- + buf->used = 0;
- +
- + buf->buffer.type = type;
- + buf->buffer.memory = V4L2_MEMORY_MMAP;
- + buf->buffer.index = buf->index;
- +
- + ret = ioctl(ctx->video_fd, VIDIOC_QUERYBUF, &buf->buffer);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: query buffer %d failed, %s (%d)\n", __func__, buf->index, strerror(errno), errno);
- + return ret;
- + }
- +
- + if (V4L2_TYPE_IS_OUTPUT(type)) {
- + void *addr = mmap(NULL, buf->size, PROT_READ | PROT_WRITE, MAP_SHARED, ctx->video_fd, V4L2_TYPE_IS_MULTIPLANAR(type) ? buf->buffer.m.planes[0].m.mem_offset : buf->buffer.m.offset);
- + if (addr == MAP_FAILED) {
- + av_log(avctx, AV_LOG_ERROR, "%s: mmap failed, %s (%d)\n", __func__, strerror(errno), errno);
- + return -1;
- + }
- +
- + buf->addr = (uint8_t*)addr;
- + } else {
- + struct v4l2_exportbuffer exportbuffer = {
- + .type = type,
- + .index = buf->index,
- + .flags = O_RDONLY,
- + };
- +
- + ret = ioctl(ctx->video_fd, VIDIOC_EXPBUF, &exportbuffer);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: export buffer %d failed, %s (%d)\n", __func__, buf->index, strerror(errno), errno);
- + return ret;
- + }
- +
- + buf->fd = exportbuffer.fd;
- + }
- +
- + av_log(avctx, AV_LOG_DEBUG, "%s: buf=%p index=%d fd=%d addr=%p width=%u height=%u size=%u\n", __func__, buf, buf->index, buf->fd, buf->addr, buf->width, buf->height, buf->size);
- + return 0;
- +}
- +
- +static void v4l2_request_buffer_free(V4L2RequestBuffer *buf)
- +{
- + av_log(NULL, AV_LOG_DEBUG, "%s: buf=%p index=%d fd=%d addr=%p width=%u height=%u size=%u\n", __func__, buf, buf->index, buf->fd, buf->addr, buf->width, buf->height, buf->size);
- +
- + if (buf->addr)
- + munmap(buf->addr, buf->size);
- +
- + if (buf->fd >= 0)
- + close(buf->fd);
- +}
- +
- +static void v4l2_request_frame_free(void *opaque, uint8_t *data)
- +{
- + AVCodecContext *avctx = opaque;
- + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)data;
- +
- + av_log(NULL, AV_LOG_DEBUG, "%s: avctx=%p data=%p request_fd=%d\n", __func__, avctx, data, req->request_fd);
- +
- + if (req->request_fd >= 0)
- + close(req->request_fd);
- +
- + v4l2_request_buffer_free(&req->capture);
- + v4l2_request_buffer_free(&req->output);
- +
- + av_free(data);
- +}
- +
- +static AVBufferRef *v4l2_request_frame_alloc(void *opaque, int size)
- +{
- + AVCodecContext *avctx = opaque;
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + V4L2RequestDescriptor *req;
- + AVBufferRef *ref;
- + uint8_t *data;
- + int ret;
- +
- + data = av_mallocz(size);
- + if (!data)
- + return NULL;
- +
- + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p size=%d data=%p\n", __func__, avctx, size, data);
- +
- + ref = av_buffer_create(data, size, v4l2_request_frame_free, avctx, 0);
- + if (!ref) {
- + av_freep(&data);
- + return NULL;
- + }
- +
- + req = (V4L2RequestDescriptor*)data;
- + req->request_fd = -1;
- + req->output.fd = -1;
- + req->capture.fd = -1;
- +
- + ret = v4l2_request_buffer_alloc(avctx, &req->output, ctx->output_type);
- + if (ret < 0) {
- + av_buffer_unref(&ref);
- + return NULL;
- + }
- +
- + ret = v4l2_request_buffer_alloc(avctx, &req->capture, ctx->format.type);
- + if (ret < 0) {
- + av_buffer_unref(&ref);
- + return NULL;
- + }
- +
- + ret = ioctl(ctx->media_fd, MEDIA_IOC_REQUEST_ALLOC, &req->request_fd);
- + if (ret < 0) {
- + av_log(avctx, AV_LOG_ERROR, "%s: request alloc failed, %s (%d)\n", __func__, strerror(errno), errno);
- + av_buffer_unref(&ref);
- + return NULL;
- + }
- +
- + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p size=%d data=%p request_fd=%d\n", __func__, avctx, size, data, req->request_fd);
- + return ref;
- +}
- +
- +static void v4l2_request_pool_free(void *opaque)
- +{
- + av_log(NULL, AV_LOG_DEBUG, "%s: opaque=%p\n", __func__, opaque);
- +}
- +
- +static void v4l2_request_hwframe_ctx_free(AVHWFramesContext *hwfc)
- +{
- + av_log(NULL, AV_LOG_DEBUG, "%s: hwfc=%p pool=%p\n", __func__, hwfc, hwfc->pool);
- +
- + av_buffer_pool_flush(hwfc->pool);
- + av_buffer_pool_uninit(&hwfc->pool);
- +}
- +
- +int ff_v4l2_request_frame_params(AVCodecContext *avctx, AVBufferRef *hw_frames_ctx)
- +{
- + V4L2RequestContext *ctx = avctx->internal->hwaccel_priv_data;
- + AVHWFramesContext *hwfc = (AVHWFramesContext*)hw_frames_ctx->data;
- +
- + hwfc->format = AV_PIX_FMT_DRM_PRIME;
- + hwfc->sw_format = AV_PIX_FMT_NV12;
- + if (V4L2_TYPE_IS_MULTIPLANAR(ctx->format.type)) {
- + hwfc->width = ctx->format.fmt.pix_mp.width;
- + hwfc->height = ctx->format.fmt.pix_mp.height;
- + } else {
- + hwfc->width = ctx->format.fmt.pix.width;
- + hwfc->height = ctx->format.fmt.pix.height;
- + }
- +
- + hwfc->pool = av_buffer_pool_init2(sizeof(V4L2RequestDescriptor), avctx, v4l2_request_frame_alloc, v4l2_request_pool_free);
- + if (!hwfc->pool)
- + return AVERROR(ENOMEM);
- +
- + hwfc->free = v4l2_request_hwframe_ctx_free;
- +
- + hwfc->initial_pool_size = 1;
- +
- + switch (avctx->codec_id) {
- + case AV_CODEC_ID_VP9:
- + hwfc->initial_pool_size += 8;
- + break;
- + case AV_CODEC_ID_VP8:
- + hwfc->initial_pool_size += 3;
- + break;
- + default:
- + hwfc->initial_pool_size += 2;
- + }
- +
- + av_log(avctx, AV_LOG_DEBUG, "%s: avctx=%p ctx=%p hw_frames_ctx=%p hwfc=%p pool=%p width=%d height=%d initial_pool_size=%d\n", __func__, avctx, ctx, hw_frames_ctx, hwfc, hwfc->pool, hwfc->width, hwfc->height, hwfc->initial_pool_size);
- +
- + return 0;
- +}
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/v4l2_request.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request.h
- --- ffmpeg_n4.2.2/libavcodec/v4l2_request.h 1969-12-31 16:00:00.000000000 -0800
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request.h 2020-05-26 03:16:39.653173493 -0700
- @@ -0,0 +1,79 @@
- +/*
- + * This file is part of FFmpeg.
- + *
- + * FFmpeg is free software; you can redistribute it and/or
- + * modify it under the terms of the GNU Lesser General Public
- + * License as published by the Free Software Foundation; either
- + * version 2.1 of the License, or (at your option) any later version.
- + *
- + * FFmpeg is distributed in the hope that it will be useful,
- + * but WITHOUT ANY WARRANTY; without even the implied warranty of
- + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- + * Lesser General Public License for more details.
- + *
- + * You should have received a copy of the GNU Lesser General Public
- + * License along with FFmpeg; if not, write to the Free Software
- + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- + */
- +
- +#ifndef AVCODEC_V4L2_REQUEST_H
- +#define AVCODEC_V4L2_REQUEST_H
- +
- +#include <linux/videodev2.h>
- +
- +#include "libavutil/hwcontext_drm.h"
- +
- +typedef struct V4L2RequestContext {
- + int video_fd;
- + int media_fd;
- + enum v4l2_buf_type output_type;
- + struct v4l2_format format;
- + int timestamp;
- +} V4L2RequestContext;
- +
- +typedef struct V4L2RequestBuffer {
- + int index;
- + int fd;
- + uint8_t *addr;
- + uint32_t width;
- + uint32_t height;
- + uint32_t size;
- + uint32_t used;
- + uint32_t capabilities;
- + struct v4l2_buffer buffer;
- +} V4L2RequestBuffer;
- +
- +typedef struct V4L2RequestDescriptor {
- + AVDRMFrameDescriptor drm;
- + int request_fd;
- + V4L2RequestBuffer output;
- + V4L2RequestBuffer capture;
- +} V4L2RequestDescriptor;
- +
- +uint64_t ff_v4l2_request_get_capture_timestamp(AVFrame *frame);
- +
- +int ff_v4l2_request_reset_frame(AVCodecContext *avctx, AVFrame *frame);
- +
- +int ff_v4l2_request_append_output_buffer(AVCodecContext *avctx, AVFrame *frame, const uint8_t *data, uint32_t size);
- +
- +int ff_v4l2_request_set_controls(AVCodecContext *avctx, struct v4l2_ext_control *control, int count);
- +
- +int ff_v4l2_request_get_controls(AVCodecContext *avctx, struct v4l2_ext_control *control, int count);
- +
- +int ff_v4l2_request_query_control(AVCodecContext *avctx, struct v4l2_query_ext_ctrl *control);
- +
- +int ff_v4l2_request_query_control_default_value(AVCodecContext *avctx, uint32_t id);
- +
- +int ff_v4l2_request_decode_slice(AVCodecContext *avctx, AVFrame *frame, struct v4l2_ext_control *control, int count, int first_slice, int last_slice);
- +
- +int ff_v4l2_request_decode_frame(AVCodecContext *avctx, AVFrame *frame, struct v4l2_ext_control *control, int count);
- +
- +int ff_v4l2_request_output_frame(AVCodecContext *avctx, AVFrame *frame);
- +
- +int ff_v4l2_request_init(AVCodecContext *avctx, uint32_t pixelformat, uint32_t buffersize, struct v4l2_ext_control *control, int count);
- +
- +int ff_v4l2_request_uninit(AVCodecContext *avctx);
- +
- +int ff_v4l2_request_frame_params(AVCodecContext *avctx, AVBufferRef *hw_frames_ctx);
- +
- +#endif /* AVCODEC_V4L2_REQUEST_H */
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/v4l2_request_h264.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_h264.c
- --- ffmpeg_n4.2.2/libavcodec/v4l2_request_h264.c 1969-12-31 16:00:00.000000000 -0800
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_h264.c 2020-05-26 03:16:39.653173493 -0700
- @@ -0,0 +1,467 @@
- +/*
- + * This file is part of FFmpeg.
- + *
- + * FFmpeg is free software; you can redistribute it and/or
- + * modify it under the terms of the GNU Lesser General Public
- + * License as published by the Free Software Foundation; either
- + * version 2.1 of the License, or (at your option) any later version.
- + *
- + * FFmpeg is distributed in the hope that it will be useful,
- + * but WITHOUT ANY WARRANTY; without even the implied warranty of
- + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- + * Lesser General Public License for more details.
- + *
- + * You should have received a copy of the GNU Lesser General Public
- + * License along with FFmpeg; if not, write to the Free Software
- + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- + */
- +
- +#include "h264dec.h"
- +#include "hwaccel.h"
- +#include "v4l2_request.h"
- +#include "h264-ctrls.h"
- +
- +typedef struct V4L2RequestControlsH264 {
- + struct v4l2_ctrl_h264_sps sps;
- + struct v4l2_ctrl_h264_pps pps;
- + struct v4l2_ctrl_h264_scaling_matrix scaling_matrix;
- + struct v4l2_ctrl_h264_decode_params decode_params;
- + struct v4l2_ctrl_h264_slice_params slice_params[MAX_SLICES];
- + int first_slice;
- +} V4L2RequestControlsH264;
- +
- +typedef struct V4L2RequestContextH264 {
- + V4L2RequestContext base;
- + int decode_mode;
- + int start_code;
- + int max_slices;
- +} V4L2RequestContextH264;
- +
- +static uint8_t nalu_slice_start_code[] = { 0x00, 0x00, 0x01 };
- +
- +static void fill_weight_factors(struct v4l2_h264_weight_factors *factors, int list, const H264SliceContext *sl)
- +{
- + for (int i = 0; i < sl->ref_count[list]; i++) {
- + if (sl->pwt.luma_weight_flag[list]) {
- + factors->luma_weight[i] = sl->pwt.luma_weight[i][list][0];
- + factors->luma_offset[i] = sl->pwt.luma_weight[i][list][1];
- + } else {
- + factors->luma_weight[i] = 1 << sl->pwt.luma_log2_weight_denom;
- + factors->luma_offset[i] = 0;
- + }
- + for (int j = 0; j < 2; j++) {
- + if (sl->pwt.chroma_weight_flag[list]) {
- + factors->chroma_weight[i][j] = sl->pwt.chroma_weight[i][list][j][0];
- + factors->chroma_offset[i][j] = sl->pwt.chroma_weight[i][list][j][1];
- + } else {
- + factors->chroma_weight[i][j] = 1 << sl->pwt.chroma_log2_weight_denom;
- + factors->chroma_offset[i][j] = 0;
- + }
- + }
- + }
- +}
- +
- +static void fill_dpb_entry(struct v4l2_h264_dpb_entry *entry, const H264Picture *pic)
- +{
- + entry->reference_ts = ff_v4l2_request_get_capture_timestamp(pic->f);
- + entry->frame_num = pic->frame_num;
- + entry->pic_num = pic->pic_id;
- + entry->flags = V4L2_H264_DPB_ENTRY_FLAG_VALID;
- + if (pic->reference & PICT_TOP_FIELD)
- + entry->flags |= V4L2_H264_DPB_ENTRY_FLAG_TOP_REF;
- + if (pic->reference & PICT_BOTTOM_FIELD)
- + entry->flags |= V4L2_H264_DPB_ENTRY_FLAG_BOTTOM_REF;
- + if (pic->long_ref)
- + entry->flags |= V4L2_H264_DPB_ENTRY_FLAG_LONG_TERM;
- + if (pic->field_picture)
- + entry->flags |= V4L2_H264_DPB_ENTRY_FLAG_FIELD_PIC;
- + if (pic->field_poc[0] != INT_MAX)
- + entry->top_field_order_cnt = pic->field_poc[0];
- + if (pic->field_poc[1] != INT_MAX)
- + entry->bottom_field_order_cnt = pic->field_poc[1];
- +}
- +
- +static void fill_dpb(struct v4l2_ctrl_h264_decode_params *decode, const H264Context *h)
- +{
- + int entries = 0;
- +
- + for (int i = 0; i < h->short_ref_count; i++) {
- + const H264Picture *pic = h->short_ref[i];
- + if (pic && (pic->field_poc[0] != INT_MAX || pic->field_poc[1] != INT_MAX))
- + fill_dpb_entry(&decode->dpb[entries++], pic);
- + }
- +
- + if (!h->long_ref_count)
- + return;
- +
- + for (int i = 0; i < FF_ARRAY_ELEMS(h->long_ref); i++) {
- + const H264Picture *pic = h->long_ref[i];
- + if (pic && (pic->field_poc[0] != INT_MAX || pic->field_poc[1] != INT_MAX))
- + fill_dpb_entry(&decode->dpb[entries++], pic);
- + }
- +}
- +
- +static uint8_t get_dpb_index(struct v4l2_ctrl_h264_decode_params *decode, const H264Ref *ref)
- +{
- + uint64_t timestamp;
- +
- + if (!ref->parent)
- + return 0;
- +
- + timestamp = ff_v4l2_request_get_capture_timestamp(ref->parent->f);
- +
- + for (uint8_t i = 0; i < FF_ARRAY_ELEMS(decode->dpb); i++) {
- + struct v4l2_h264_dpb_entry *entry = &decode->dpb[i];
- + if ((entry->flags & V4L2_H264_DPB_ENTRY_FLAG_VALID) &&
- + entry->reference_ts == timestamp)
- + // TODO: signal reference type, possible using top 2 bits
- + return i | ((ref->reference & 3) << 6);
- + }
- +
- + return 0;
- +}
- +
- +static void fill_sps(struct v4l2_ctrl_h264_sps *ctrl, const H264Context *h)
- +{
- + const SPS *sps = h->ps.sps;
- +
- + *ctrl = (struct v4l2_ctrl_h264_sps) {
- + .profile_idc = sps->profile_idc,
- + .constraint_set_flags = sps->constraint_set_flags,
- + .level_idc = sps->level_idc,
- + .seq_parameter_set_id = sps->sps_id,
- + .chroma_format_idc = sps->chroma_format_idc,
- + .bit_depth_luma_minus8 = sps->bit_depth_luma - 8,
- + .bit_depth_chroma_minus8 = sps->bit_depth_chroma - 8,
- + .log2_max_frame_num_minus4 = sps->log2_max_frame_num - 4,
- + .pic_order_cnt_type = sps->poc_type,
- + .log2_max_pic_order_cnt_lsb_minus4 = sps->log2_max_poc_lsb - 4,
- + .max_num_ref_frames = sps->ref_frame_count,
- + .num_ref_frames_in_pic_order_cnt_cycle = sps->poc_cycle_length,
- + //.offset_for_ref_frame[255] - not required? not set by libva-v4l2-request - copy sps->offset_for_ref_frame
- + .offset_for_non_ref_pic = sps->offset_for_non_ref_pic,
- + .offset_for_top_to_bottom_field = sps->offset_for_top_to_bottom_field,
- + .pic_width_in_mbs_minus1 = h->mb_width - 1,
- + .pic_height_in_map_units_minus1 = sps->frame_mbs_only_flag ? h->mb_height - 1 : h->mb_height / 2 - 1,
- + };
- +
- + if (sps->residual_color_transform_flag)
- + ctrl->flags |= V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE;
- + if (sps->transform_bypass)
- + ctrl->flags |= V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS;
- + if (sps->delta_pic_order_always_zero_flag)
- + ctrl->flags |= V4L2_H264_SPS_FLAG_DELTA_PIC_ORDER_ALWAYS_ZERO;
- + if (sps->gaps_in_frame_num_allowed_flag)
- + ctrl->flags |= V4L2_H264_SPS_FLAG_GAPS_IN_FRAME_NUM_VALUE_ALLOWED;
- + if (sps->frame_mbs_only_flag)
- + ctrl->flags |= V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY;
- + if (sps->mb_aff)
- + ctrl->flags |= V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD;
- + if (sps->direct_8x8_inference_flag)
- + ctrl->flags |= V4L2_H264_SPS_FLAG_DIRECT_8X8_INFERENCE;
- +}
- +
- +static void fill_pps(struct v4l2_ctrl_h264_pps *ctrl, const H264Context *h)
- +{
- + const SPS *sps = h->ps.sps;
- + const PPS *pps = h->ps.pps;
- + const H264SliceContext *sl = &h->slice_ctx[0];
- + int qp_bd_offset = 6 * (sps->bit_depth_luma - 8);
- +
- + *ctrl = (struct v4l2_ctrl_h264_pps) {
- + .pic_parameter_set_id = sl->pps_id,
- + .seq_parameter_set_id = pps->sps_id,
- + .num_slice_groups_minus1 = pps->slice_group_count - 1,
- + .num_ref_idx_l0_default_active_minus1 = pps->ref_count[0] - 1,
- + .num_ref_idx_l1_default_active_minus1 = pps->ref_count[1] - 1,
- + .weighted_bipred_idc = pps->weighted_bipred_idc,
- + .pic_init_qp_minus26 = pps->init_qp - 26 - qp_bd_offset,
- + .pic_init_qs_minus26 = pps->init_qs - 26 - qp_bd_offset,
- + .chroma_qp_index_offset = pps->chroma_qp_index_offset[0],
- + .second_chroma_qp_index_offset = pps->chroma_qp_index_offset[1],
- + };
- +
- + if (pps->cabac)
- + ctrl->flags |= V4L2_H264_PPS_FLAG_ENTROPY_CODING_MODE;
- + if (pps->pic_order_present)
- + ctrl->flags |= V4L2_H264_PPS_FLAG_BOTTOM_FIELD_PIC_ORDER_IN_FRAME_PRESENT;
- + if (pps->weighted_pred)
- + ctrl->flags |= V4L2_H264_PPS_FLAG_WEIGHTED_PRED;
- + if (pps->deblocking_filter_parameters_present)
- + ctrl->flags |= V4L2_H264_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT;
- + if (pps->constrained_intra_pred)
- + ctrl->flags |= V4L2_H264_PPS_FLAG_CONSTRAINED_INTRA_PRED;
- + if (pps->redundant_pic_cnt_present)
- + ctrl->flags |= V4L2_H264_PPS_FLAG_REDUNDANT_PIC_CNT_PRESENT;
- + if (pps->transform_8x8_mode)
- + ctrl->flags |= V4L2_H264_PPS_FLAG_TRANSFORM_8X8_MODE;
- +}
- +
- +static int v4l2_request_h264_start_frame(AVCodecContext *avctx,
- + av_unused const uint8_t *buffer,
- + av_unused uint32_t size)
- +{
- + const H264Context *h = avctx->priv_data;
- + const PPS *pps = h->ps.pps;
- + const SPS *sps = h->ps.sps;
- + V4L2RequestControlsH264 *controls = h->cur_pic_ptr->hwaccel_picture_private;
- +
- + fill_sps(&controls->sps, h);
- + fill_pps(&controls->pps, h);
- +
- + memcpy(controls->scaling_matrix.scaling_list_4x4, pps->scaling_matrix4, sizeof(controls->scaling_matrix.scaling_list_4x4));
- + memcpy(controls->scaling_matrix.scaling_list_8x8[0], pps->scaling_matrix8[0], sizeof(controls->scaling_matrix.scaling_list_8x8[0]));
- + memcpy(controls->scaling_matrix.scaling_list_8x8[1], pps->scaling_matrix8[3], sizeof(controls->scaling_matrix.scaling_list_8x8[1]));
- +
- + if (sps->chroma_format_idc == 3) {
- + memcpy(controls->scaling_matrix.scaling_list_8x8[2], pps->scaling_matrix8[1], sizeof(controls->scaling_matrix.scaling_list_8x8[2]));
- + memcpy(controls->scaling_matrix.scaling_list_8x8[3], pps->scaling_matrix8[4], sizeof(controls->scaling_matrix.scaling_list_8x8[3]));
- + memcpy(controls->scaling_matrix.scaling_list_8x8[4], pps->scaling_matrix8[2], sizeof(controls->scaling_matrix.scaling_list_8x8[4]));
- + memcpy(controls->scaling_matrix.scaling_list_8x8[5], pps->scaling_matrix8[5], sizeof(controls->scaling_matrix.scaling_list_8x8[5]));
- + }
- +
- + controls->decode_params = (struct v4l2_ctrl_h264_decode_params) {
- + .num_slices = 0,
- + .nal_ref_idc = h->nal_ref_idc,
- + .top_field_order_cnt = h->cur_pic_ptr->field_poc[0] != INT_MAX ? h->cur_pic_ptr->field_poc[0] : 0,
- + .bottom_field_order_cnt = h->cur_pic_ptr->field_poc[1] != INT_MAX ? h->cur_pic_ptr->field_poc[1] : 0,
- + };
- +
- + if (h->picture_idr)
- + controls->decode_params.flags |= V4L2_H264_DECODE_PARAM_FLAG_IDR_PIC;
- +
- + fill_dpb(&controls->decode_params, h);
- +
- + controls->first_slice = !FIELD_PICTURE(h) || h->first_field;
- +
- + return ff_v4l2_request_reset_frame(avctx, h->cur_pic_ptr->f);
- +}
- +
- +static int v4l2_request_h264_queue_decode(AVCodecContext *avctx, int last_slice)
- +{
- + const H264Context *h = avctx->priv_data;
- + V4L2RequestControlsH264 *controls = h->cur_pic_ptr->hwaccel_picture_private;
- + V4L2RequestContextH264 *ctx = avctx->internal->hwaccel_priv_data;
- +
- + struct v4l2_ext_control control[] = {
- + {
- + .id = V4L2_CID_MPEG_VIDEO_H264_SPS,
- + .ptr = &controls->sps,
- + .size = sizeof(controls->sps),
- + },
- + {
- + .id = V4L2_CID_MPEG_VIDEO_H264_PPS,
- + .ptr = &controls->pps,
- + .size = sizeof(controls->pps),
- + },
- + {
- + .id = V4L2_CID_MPEG_VIDEO_H264_SCALING_MATRIX,
- + .ptr = &controls->scaling_matrix,
- + .size = sizeof(controls->scaling_matrix),
- + },
- + {
- + .id = V4L2_CID_MPEG_VIDEO_H264_SLICE_PARAMS,
- + .ptr = &controls->slice_params,
- + .size = sizeof(controls->slice_params[0]) * FFMAX(FFMIN(controls->decode_params.num_slices, MAX_SLICES), ctx->max_slices),
- + },
- + {
- + .id = V4L2_CID_MPEG_VIDEO_H264_DECODE_PARAMS,
- + .ptr = &controls->decode_params,
- + .size = sizeof(controls->decode_params),
- + },
- + };
- +
- + if (ctx->decode_mode == V4L2_MPEG_VIDEO_H264_DECODE_MODE_SLICE_BASED)
- + return ff_v4l2_request_decode_slice(avctx, h->cur_pic_ptr->f, control, FF_ARRAY_ELEMS(control), controls->first_slice, last_slice);
- +
- + return ff_v4l2_request_decode_frame(avctx, h->cur_pic_ptr->f, control, FF_ARRAY_ELEMS(control));
- +}
- +
- +static int v4l2_request_h264_decode_slice(AVCodecContext *avctx, const uint8_t *buffer, uint32_t size)
- +{
- + const H264Context *h = avctx->priv_data;
- + const PPS *pps = h->ps.pps;
- + const H264SliceContext *sl = &h->slice_ctx[0];
- + V4L2RequestControlsH264 *controls = h->cur_pic_ptr->hwaccel_picture_private;
- + V4L2RequestContextH264 *ctx = avctx->internal->hwaccel_priv_data;
- + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)h->cur_pic_ptr->f->data[0];
- + int i, ret, count, slice = FFMIN(controls->decode_params.num_slices, MAX_SLICES - 1);
- +
- + if (ctx->decode_mode == V4L2_MPEG_VIDEO_H264_DECODE_MODE_SLICE_BASED && slice) {
- + ret = v4l2_request_h264_queue_decode(avctx, 0);
- + if (ret)
- + return ret;
- +
- + ff_v4l2_request_reset_frame(avctx, h->cur_pic_ptr->f);
- + slice = controls->decode_params.num_slices = 0;
- + controls->first_slice = 0;
- + }
- +
- + controls->slice_params[slice] = (struct v4l2_ctrl_h264_slice_params) {
- + /* Size in bytes, including header */
- + .size = 0,
- + .start_byte_offset = req->output.used,
- + /* Offset in bits to slice_data() from the beginning of this slice. */
- + .header_bit_size = get_bits_count(&sl->gb),
- +
- + .first_mb_in_slice = sl->first_mb_addr,
- + .slice_type = ff_h264_get_slice_type(sl),
- + .pic_parameter_set_id = sl->pps_id,
- + .colour_plane_id = 0, /* what is this? */
- + .frame_num = h->poc.frame_num,
- + .idr_pic_id = sl->idr_pic_id,
- + .pic_order_cnt_lsb = sl->poc_lsb,
- + .delta_pic_order_cnt_bottom = sl->delta_poc_bottom,
- + .delta_pic_order_cnt0 = sl->delta_poc[0],
- + .delta_pic_order_cnt1 = sl->delta_poc[1],
- + .redundant_pic_cnt = sl->redundant_pic_count,
- +
- + /* Size in bits of dec_ref_pic_marking() syntax element. */
- + .dec_ref_pic_marking_bit_size = sl->ref_pic_marking_size_in_bits,
- + /* Size in bits of pic order count syntax. */
- + .pic_order_cnt_bit_size = sl->pic_order_cnt_bit_size,
- +
- + .cabac_init_idc = sl->cabac_init_idc,
- + .slice_qp_delta = sl->qscale - pps->init_qp,
- + .slice_qs_delta = 0, /* XXX not implemented by FFmpeg */
- + .disable_deblocking_filter_idc = sl->deblocking_filter < 2 ? !sl->deblocking_filter : sl->deblocking_filter,
- + .slice_alpha_c0_offset_div2 = sl->slice_alpha_c0_offset / 2,
- + .slice_beta_offset_div2 = sl->slice_beta_offset / 2,
- + .slice_group_change_cycle = 0, /* what is this? */
- +
- + .num_ref_idx_l0_active_minus1 = sl->list_count > 0 ? sl->ref_count[0] - 1 : 0,
- + .num_ref_idx_l1_active_minus1 = sl->list_count > 1 ? sl->ref_count[1] - 1 : 0,
- + };
- +
- + if (FIELD_PICTURE(h))
- + controls->slice_params[slice].flags |= V4L2_H264_SLICE_FLAG_FIELD_PIC;
- + if (h->picture_structure == PICT_BOTTOM_FIELD)
- + controls->slice_params[slice].flags |= V4L2_H264_SLICE_FLAG_BOTTOM_FIELD;
- + if (sl->slice_type == AV_PICTURE_TYPE_B && sl->direct_spatial_mv_pred)
- + controls->slice_params[slice].flags |= V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED;
- +
- + controls->slice_params[slice].pred_weight_table.chroma_log2_weight_denom = sl->pwt.chroma_log2_weight_denom;
- + controls->slice_params[slice].pred_weight_table.luma_log2_weight_denom = sl->pwt.luma_log2_weight_denom;
- +
- + count = sl->list_count > 0 ? sl->ref_count[0] : 0;
- + for (i = 0; i < count; i++)
- + controls->slice_params[slice].ref_pic_list0[i] = get_dpb_index(&controls->decode_params, &sl->ref_list[0][i]);
- + if (count)
- + fill_weight_factors(&controls->slice_params[slice].pred_weight_table.weight_factors[0], 0, sl);
- +
- + count = sl->list_count > 1 ? sl->ref_count[1] : 0;
- + for (i = 0; i < count; i++)
- + controls->slice_params[slice].ref_pic_list1[i] = get_dpb_index(&controls->decode_params, &sl->ref_list[1][i]);
- + if (count)
- + fill_weight_factors(&controls->slice_params[slice].pred_weight_table.weight_factors[1], 1, sl);
- +
- + if (ctx->start_code == V4L2_MPEG_VIDEO_H264_START_CODE_ANNEX_B) {
- + ret = ff_v4l2_request_append_output_buffer(avctx, h->cur_pic_ptr->f, nalu_slice_start_code, 3);
- + if (ret)
- + return ret;
- + }
- +
- + ret = ff_v4l2_request_append_output_buffer(avctx, h->cur_pic_ptr->f, buffer, size);
- + if (ret)
- + return ret;
- +
- + controls->slice_params[slice].size = req->output.used - controls->slice_params[slice].start_byte_offset;
- + controls->decode_params.num_slices++;
- + return 0;
- +}
- +
- +static int v4l2_request_h264_end_frame(AVCodecContext *avctx)
- +{
- + const H264Context *h = avctx->priv_data;
- + return v4l2_request_h264_queue_decode(avctx, !FIELD_PICTURE(h) || !h->first_field);
- +}
- +
- +static int v4l2_request_h264_set_controls(AVCodecContext *avctx)
- +{
- + V4L2RequestContextH264 *ctx = avctx->internal->hwaccel_priv_data;
- + int ret;
- +
- + struct v4l2_ext_control control[] = {
- + { .id = V4L2_CID_MPEG_VIDEO_H264_DECODE_MODE, },
- + { .id = V4L2_CID_MPEG_VIDEO_H264_START_CODE, },
- + };
- + struct v4l2_query_ext_ctrl slice_params = {
- + .id = V4L2_CID_MPEG_VIDEO_H264_SLICE_PARAMS,
- + };
- +
- + ctx->decode_mode = ff_v4l2_request_query_control_default_value(avctx, V4L2_CID_MPEG_VIDEO_H264_DECODE_MODE);
- + if (ctx->decode_mode != V4L2_MPEG_VIDEO_H264_DECODE_MODE_SLICE_BASED &&
- + ctx->decode_mode != V4L2_MPEG_VIDEO_H264_DECODE_MODE_FRAME_BASED) {
- + av_log(avctx, AV_LOG_ERROR, "%s: unsupported decode mode, %d\n", __func__, ctx->decode_mode);
- + return AVERROR(EINVAL);
- + }
- +
- + ctx->start_code = ff_v4l2_request_query_control_default_value(avctx, V4L2_CID_MPEG_VIDEO_H264_START_CODE);
- + if (ctx->start_code != V4L2_MPEG_VIDEO_H264_START_CODE_NONE &&
- + ctx->start_code != V4L2_MPEG_VIDEO_H264_START_CODE_ANNEX_B) {
- + av_log(avctx, AV_LOG_ERROR, "%s: unsupported start code, %d\n", __func__, ctx->start_code);
- + return AVERROR(EINVAL);
- + }
- +
- + ret = ff_v4l2_request_query_control(avctx, &slice_params);
- + if (ret)
- + return ret;
- +
- + ctx->max_slices = slice_params.elems;
- + if (ctx->max_slices > MAX_SLICES) {
- + av_log(avctx, AV_LOG_ERROR, "%s: unsupported max slices, %d\n", __func__, ctx->max_slices);
- + return AVERROR(EINVAL);
- + }
- +
- + control[0].value = ctx->decode_mode;
- + control[1].value = ctx->start_code;
- +
- + return ff_v4l2_request_set_controls(avctx, control, FF_ARRAY_ELEMS(control));
- +}
- +
- +static int v4l2_request_h264_init(AVCodecContext *avctx)
- +{
- + const H264Context *h = avctx->priv_data;
- + struct v4l2_ctrl_h264_sps sps;
- + struct v4l2_ctrl_h264_pps pps;
- + int ret;
- +
- + struct v4l2_ext_control control[] = {
- + {
- + .id = V4L2_CID_MPEG_VIDEO_H264_SPS,
- + .ptr = &sps,
- + .size = sizeof(sps),
- + },
- + {
- + .id = V4L2_CID_MPEG_VIDEO_H264_PPS,
- + .ptr = &pps,
- + .size = sizeof(pps),
- + },
- + };
- +
- + fill_sps(&sps, h);
- + fill_pps(&pps, h);
- +
- + ret = ff_v4l2_request_init(avctx, V4L2_PIX_FMT_H264_SLICE, 4 * 1024 * 1024, control, FF_ARRAY_ELEMS(control));
- + if (ret)
- + return ret;
- +
- + return v4l2_request_h264_set_controls(avctx);
- +}
- +
- +const AVHWAccel ff_h264_v4l2request_hwaccel = {
- + .name = "h264_v4l2request",
- + .type = AVMEDIA_TYPE_VIDEO,
- + .id = AV_CODEC_ID_H264,
- + .pix_fmt = AV_PIX_FMT_DRM_PRIME,
- + .start_frame = v4l2_request_h264_start_frame,
- + .decode_slice = v4l2_request_h264_decode_slice,
- + .end_frame = v4l2_request_h264_end_frame,
- + .output_frame = ff_v4l2_request_output_frame,
- + .frame_priv_data_size = sizeof(V4L2RequestControlsH264),
- + .init = v4l2_request_h264_init,
- + .uninit = ff_v4l2_request_uninit,
- + .priv_data_size = sizeof(V4L2RequestContextH264),
- + .frame_params = ff_v4l2_request_frame_params,
- + .caps_internal = HWACCEL_CAP_ASYNC_SAFE,
- +};
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/v4l2_request_hevc.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_hevc.c
- --- ffmpeg_n4.2.2/libavcodec/v4l2_request_hevc.c 1969-12-31 16:00:00.000000000 -0800
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_hevc.c 2020-05-26 03:16:39.653173493 -0700
- @@ -0,0 +1,581 @@
- +/*
- + * This file is part of FFmpeg.
- + *
- + * FFmpeg is free software; you can redistribute it and/or
- + * modify it under the terms of the GNU Lesser General Public
- + * License as published by the Free Software Foundation; either
- + * version 2.1 of the License, or (at your option) any later version.
- + *
- + * FFmpeg is distributed in the hope that it will be useful,
- + * but WITHOUT ANY WARRANTY; without even the implied warranty of
- + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- + * Lesser General Public License for more details.
- + *
- + * You should have received a copy of the GNU Lesser General Public
- + * License along with FFmpeg; if not, write to the Free Software
- + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- + */
- +
- +#include "hevcdec.h"
- +#include "hwaccel.h"
- +#include "v4l2_request.h"
- +#include "hevc-ctrls.h"
- +
- +#define MAX_SLICES 16
- +
- +typedef struct V4L2RequestControlsHEVC {
- + struct v4l2_ctrl_hevc_sps sps;
- + struct v4l2_ctrl_hevc_pps pps;
- + struct v4l2_ctrl_hevc_scaling_matrix scaling_matrix;
- + struct v4l2_ctrl_hevc_slice_params slice_params[MAX_SLICES];
- + int first_slice;
- + int num_slices; //TODO: this should be in control
- +} V4L2RequestControlsHEVC;
- +
- +typedef struct V4L2RequestContextHEVC {
- + V4L2RequestContext base;
- + int decode_mode;
- + int start_code;
- + int max_slices;
- +} V4L2RequestContextHEVC;
- +
- +static uint8_t nalu_slice_start_code[] = { 0x00, 0x00, 0x01 };
- +
- +static void v4l2_request_hevc_fill_pred_table(const HEVCContext *h, struct v4l2_hevc_pred_weight_table *table)
- +{
- + int32_t luma_weight_denom, chroma_weight_denom;
- + const SliceHeader *sh = &h->sh;
- +
- + if (sh->slice_type == HEVC_SLICE_I ||
- + (sh->slice_type == HEVC_SLICE_P && !h->ps.pps->weighted_pred_flag) ||
- + (sh->slice_type == HEVC_SLICE_B && !h->ps.pps->weighted_bipred_flag))
- + return;
- +
- + table->luma_log2_weight_denom = sh->luma_log2_weight_denom;
- +
- + if (h->ps.sps->chroma_format_idc)
- + table->delta_chroma_log2_weight_denom = sh->chroma_log2_weight_denom - sh->luma_log2_weight_denom;
- +
- + luma_weight_denom = (1 << sh->luma_log2_weight_denom);
- + chroma_weight_denom = (1 << sh->chroma_log2_weight_denom);
- +
- + for (int i = 0; i < 15 && i < sh->nb_refs[L0]; i++) {
- + table->delta_luma_weight_l0[i] = sh->luma_weight_l0[i] - luma_weight_denom;
- + table->luma_offset_l0[i] = sh->luma_offset_l0[i];
- + table->delta_chroma_weight_l0[i][0] = sh->chroma_weight_l0[i][0] - chroma_weight_denom;
- + table->delta_chroma_weight_l0[i][1] = sh->chroma_weight_l0[i][1] - chroma_weight_denom;
- + table->chroma_offset_l0[i][0] = sh->chroma_offset_l0[i][0];
- + table->chroma_offset_l0[i][1] = sh->chroma_offset_l0[i][1];
- + }
- +
- + if (sh->slice_type != HEVC_SLICE_B)
- + return;
- +
- + for (int i = 0; i < 15 && i < sh->nb_refs[L1]; i++) {
- + table->delta_luma_weight_l1[i] = sh->luma_weight_l1[i] - luma_weight_denom;
- + table->luma_offset_l1[i] = sh->luma_offset_l1[i];
- + table->delta_chroma_weight_l1[i][0] = sh->chroma_weight_l1[i][0] - chroma_weight_denom;
- + table->delta_chroma_weight_l1[i][1] = sh->chroma_weight_l1[i][1] - chroma_weight_denom;
- + table->chroma_offset_l1[i][0] = sh->chroma_offset_l1[i][0];
- + table->chroma_offset_l1[i][1] = sh->chroma_offset_l1[i][1];
- + }
- +}
- +
- +static int find_frame_rps_type(const HEVCContext *h, uint64_t timestamp)
- +{
- + const HEVCFrame *frame;
- + int i;
- +
- + for (i = 0; i < h->rps[ST_CURR_BEF].nb_refs; i++) {
- + frame = h->rps[ST_CURR_BEF].ref[i];
- + if (frame && timestamp == ff_v4l2_request_get_capture_timestamp(frame->frame))
- + return V4L2_HEVC_DPB_ENTRY_RPS_ST_CURR_BEFORE;
- + }
- +
- + for (i = 0; i < h->rps[ST_CURR_AFT].nb_refs; i++) {
- + frame = h->rps[ST_CURR_AFT].ref[i];
- + if (frame && timestamp == ff_v4l2_request_get_capture_timestamp(frame->frame))
- + return V4L2_HEVC_DPB_ENTRY_RPS_ST_CURR_AFTER;
- + }
- +
- + for (i = 0; i < h->rps[LT_CURR].nb_refs; i++) {
- + frame = h->rps[LT_CURR].ref[i];
- + if (frame && timestamp == ff_v4l2_request_get_capture_timestamp(frame->frame))
- + return V4L2_HEVC_DPB_ENTRY_RPS_LT_CURR;
- + }
- +
- + return 0;
- +}
- +
- +static uint8_t get_ref_pic_index(const HEVCContext *h, const HEVCFrame *frame,
- + struct v4l2_ctrl_hevc_slice_params *slice_params)
- +{
- + uint64_t timestamp;
- +
- + if (!frame)
- + return 0;
- +
- + timestamp = ff_v4l2_request_get_capture_timestamp(frame->frame);
- +
- + for (uint8_t i = 0; i < slice_params->num_active_dpb_entries; i++) {
- + struct v4l2_hevc_dpb_entry *entry = &slice_params->dpb[i];
- + if (entry->timestamp == timestamp)
- + return i;
- + }
- +
- + return 0;
- +}
- +
- +static void v4l2_request_hevc_fill_slice_params(const HEVCContext *h,
- + struct v4l2_ctrl_hevc_slice_params *slice_params)
- +{
- + const HEVCFrame *pic = h->ref;
- + const SliceHeader *sh = &h->sh;
- + int i, entries = 0;
- + RefPicList *rpl;
- +
- + *slice_params = (struct v4l2_ctrl_hevc_slice_params) {
- + .bit_size = 0,
- + .data_bit_offset = get_bits_count(&h->HEVClc->gb),
- +
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */
- + .slice_segment_addr = sh->slice_segment_addr,
- +
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: NAL unit header */
- + .nal_unit_type = h->nal_unit_type,
- + .nuh_temporal_id_plus1 = h->temporal_id + 1,
- +
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */
- + .slice_type = sh->slice_type,
- + .colour_plane_id = sh->colour_plane_id,
- + .slice_pic_order_cnt = pic->poc,
- + .num_ref_idx_l0_active_minus1 = sh->nb_refs[L0] ? sh->nb_refs[L0] - 1 : 0,
- + .num_ref_idx_l1_active_minus1 = sh->nb_refs[L1] ? sh->nb_refs[L1] - 1 : 0,
- + .collocated_ref_idx = sh->slice_temporal_mvp_enabled_flag ? sh->collocated_ref_idx : 0,
- + .five_minus_max_num_merge_cand = sh->slice_type == HEVC_SLICE_I ? 0 : 5 - sh->max_num_merge_cand,
- + .slice_qp_delta = sh->slice_qp_delta,
- + .slice_cb_qp_offset = sh->slice_cb_qp_offset,
- + .slice_cr_qp_offset = sh->slice_cr_qp_offset,
- + .slice_act_y_qp_offset = 0,
- + .slice_act_cb_qp_offset = 0,
- + .slice_act_cr_qp_offset = 0,
- + .slice_beta_offset_div2 = sh->beta_offset / 2,
- + .slice_tc_offset_div2 = sh->tc_offset / 2,
- +
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: Picture timing SEI message */
- + .pic_struct = h->sei.picture_timing.picture_struct,
- +
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */
- + .num_rps_poc_st_curr_before = h->rps[ST_CURR_BEF].nb_refs,
- + .num_rps_poc_st_curr_after = h->rps[ST_CURR_AFT].nb_refs,
- + .num_rps_poc_lt_curr = h->rps[LT_CURR].nb_refs,
- +
- + .short_term_ref_pic_set_size = sh->short_term_ref_pic_set_size,
- + .long_term_ref_pic_set_size = sh->long_term_ref_pic_set_size,
- + };
- +
- + if (sh->slice_sample_adaptive_offset_flag[0])
- + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_SAO_LUMA;
- +
- + if (sh->slice_sample_adaptive_offset_flag[1])
- + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_SAO_CHROMA;
- +
- + if (sh->slice_temporal_mvp_enabled_flag)
- + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_TEMPORAL_MVP_ENABLED;
- +
- + if (sh->mvd_l1_zero_flag)
- + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_MVD_L1_ZERO;
- +
- + if (sh->cabac_init_flag)
- + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_CABAC_INIT;
- +
- + if (sh->collocated_list == L0)
- + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_COLLOCATED_FROM_L0;
- +
- + if (sh->disable_deblocking_filter_flag)
- + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_DEBLOCKING_FILTER_DISABLED;
- +
- + if (sh->slice_loop_filter_across_slices_enabled_flag)
- + slice_params->flags |= V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED;
- +
- + for (i = 0; i < FF_ARRAY_ELEMS(h->DPB); i++) {
- + const HEVCFrame *frame = &h->DPB[i];
- + if (frame != pic && (frame->flags & (HEVC_FRAME_FLAG_LONG_REF | HEVC_FRAME_FLAG_SHORT_REF))) {
- + struct v4l2_hevc_dpb_entry *entry = &slice_params->dpb[entries++];
- +
- + entry->timestamp = ff_v4l2_request_get_capture_timestamp(frame->frame);
- + entry->rps = find_frame_rps_type(h, entry->timestamp);
- + entry->field_pic = frame->frame->interlaced_frame;
- +
- + /* TODO: Interleaved: Get the POC for each field. */
- + entry->pic_order_cnt[0] = frame->poc;
- + entry->pic_order_cnt[1] = frame->poc;
- + }
- + }
- +
- + slice_params->num_active_dpb_entries = entries;
- +
- + if (sh->slice_type != HEVC_SLICE_I) {
- + rpl = &h->ref->refPicList[0];
- + for (i = 0; i < rpl->nb_refs; i++)
- + slice_params->ref_idx_l0[i] = get_ref_pic_index(h, rpl->ref[i], slice_params);
- + }
- +
- + if (sh->slice_type == HEVC_SLICE_B) {
- + rpl = &h->ref->refPicList[1];
- + for (i = 0; i < rpl->nb_refs; i++)
- + slice_params->ref_idx_l1[i] = get_ref_pic_index(h, rpl->ref[i], slice_params);
- + }
- +
- + v4l2_request_hevc_fill_pred_table(h, &slice_params->pred_weight_table);
- +
- + slice_params->num_entry_point_offsets = sh->num_entry_point_offsets;
- + if (slice_params->num_entry_point_offsets > 256) {
- + slice_params->num_entry_point_offsets = 256;
- + av_log(NULL, AV_LOG_ERROR, "%s: Currently only 256 entry points are supported, but slice has %d entry points.\n", __func__, sh->num_entry_point_offsets);
- + }
- +
- + for (i = 0; i < slice_params->num_entry_point_offsets; i++)
- + slice_params->entry_point_offset_minus1[i] = sh->entry_point_offset[i] - 1;
- +}
- +
- +static void fill_sps(struct v4l2_ctrl_hevc_sps *ctrl, const HEVCContext *h)
- +{
- + const HEVCSPS *sps = h->ps.sps;
- + const HEVCPPS *pps = h->ps.pps;
- +
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: Sequence parameter set */
- + *ctrl = (struct v4l2_ctrl_hevc_sps) {
- + .video_parameter_set_id = sps->vps_id,
- + .seq_parameter_set_id = pps->sps_id,
- + .chroma_format_idc = sps->chroma_format_idc,
- + .pic_width_in_luma_samples = sps->width,
- + .pic_height_in_luma_samples = sps->height,
- + .bit_depth_luma_minus8 = sps->bit_depth - 8,
- + .bit_depth_chroma_minus8 = sps->bit_depth - 8,
- + .log2_max_pic_order_cnt_lsb_minus4 = sps->log2_max_poc_lsb - 4,
- + .sps_max_dec_pic_buffering_minus1 = sps->temporal_layer[sps->max_sub_layers - 1].max_dec_pic_buffering - 1,
- + .sps_max_num_reorder_pics = sps->temporal_layer[sps->max_sub_layers - 1].num_reorder_pics,
- + .sps_max_latency_increase_plus1 = sps->temporal_layer[sps->max_sub_layers - 1].max_latency_increase + 1,
- + .log2_min_luma_coding_block_size_minus3 = sps->log2_min_cb_size - 3,
- + .log2_diff_max_min_luma_coding_block_size = sps->log2_diff_max_min_coding_block_size,
- + .log2_min_luma_transform_block_size_minus2 = sps->log2_min_tb_size - 2,
- + .log2_diff_max_min_luma_transform_block_size = sps->log2_max_trafo_size - sps->log2_min_tb_size,
- + .max_transform_hierarchy_depth_inter = sps->max_transform_hierarchy_depth_inter,
- + .max_transform_hierarchy_depth_intra = sps->max_transform_hierarchy_depth_intra,
- + .pcm_sample_bit_depth_luma_minus1 = sps->pcm.bit_depth - 1,
- + .pcm_sample_bit_depth_chroma_minus1 = sps->pcm.bit_depth_chroma - 1,
- + .log2_min_pcm_luma_coding_block_size_minus3 = sps->pcm.log2_min_pcm_cb_size - 3,
- + .log2_diff_max_min_pcm_luma_coding_block_size = sps->pcm.log2_max_pcm_cb_size - sps->pcm.log2_min_pcm_cb_size,
- + .num_short_term_ref_pic_sets = sps->nb_st_rps,
- + .num_long_term_ref_pics_sps = sps->num_long_term_ref_pics_sps,
- + };
- +
- + if (sps->separate_colour_plane_flag)
- + ctrl->flags |= V4L2_HEVC_SPS_FLAG_SEPARATE_COLOUR_PLANE;
- +
- + if (sps->scaling_list_enable_flag)
- + ctrl->flags |= V4L2_HEVC_SPS_FLAG_SCALING_LIST_ENABLED;
- +
- + if (sps->amp_enabled_flag)
- + ctrl->flags |= V4L2_HEVC_SPS_FLAG_AMP_ENABLED;
- +
- + if (sps->sao_enabled)
- + ctrl->flags |= V4L2_HEVC_SPS_FLAG_SAMPLE_ADAPTIVE_OFFSET;
- +
- + if (sps->pcm_enabled_flag)
- + ctrl->flags |= V4L2_HEVC_SPS_FLAG_PCM_ENABLED;
- +
- + if (sps->pcm.loop_filter_disable_flag)
- + ctrl->flags |= V4L2_HEVC_SPS_FLAG_PCM_LOOP_FILTER_DISABLED;
- +
- + if (sps->long_term_ref_pics_present_flag)
- + ctrl->flags |= V4L2_HEVC_SPS_FLAG_LONG_TERM_REF_PICS_PRESENT;
- +
- + if (sps->sps_temporal_mvp_enabled_flag)
- + ctrl->flags |= V4L2_HEVC_SPS_FLAG_SPS_TEMPORAL_MVP_ENABLED;
- +
- + if (sps->sps_strong_intra_smoothing_enable_flag)
- + ctrl->flags |= V4L2_HEVC_SPS_FLAG_STRONG_INTRA_SMOOTHING_ENABLED;
- +}
- +
- +static int v4l2_request_hevc_start_frame(AVCodecContext *avctx,
- + av_unused const uint8_t *buffer,
- + av_unused uint32_t size)
- +{
- + const HEVCContext *h = avctx->priv_data;
- + const HEVCSPS *sps = h->ps.sps;
- + const HEVCPPS *pps = h->ps.pps;
- + const SliceHeader *sh = &h->sh;
- + const ScalingList *sl = pps->scaling_list_data_present_flag ?
- + &pps->scaling_list :
- + sps->scaling_list_enable_flag ?
- + &sps->scaling_list : NULL;
- + V4L2RequestControlsHEVC *controls = h->ref->hwaccel_picture_private;
- +
- + fill_sps(&controls->sps, h);
- +
- + if (sl) {
- + for (int i = 0; i < 6; i++) {
- + for (int j = 0; j < 16; j++)
- + controls->scaling_matrix.scaling_list_4x4[i][j] = sl->sl[0][i][j];
- + for (int j = 0; j < 64; j++) {
- + controls->scaling_matrix.scaling_list_8x8[i][j] = sl->sl[1][i][j];
- + controls->scaling_matrix.scaling_list_16x16[i][j] = sl->sl[2][i][j];
- + if (i < 2)
- + controls->scaling_matrix.scaling_list_32x32[i][j] = sl->sl[3][i * 3][j];
- + }
- + controls->scaling_matrix.scaling_list_dc_coef_16x16[i] = sl->sl_dc[0][i];
- + if (i < 2)
- + controls->scaling_matrix.scaling_list_dc_coef_32x32[i] = sl->sl_dc[1][i * 3];
- + }
- + }
- +
- + /* ISO/IEC 23008-2, ITU-T Rec. H.265: Picture parameter set */
- + controls->pps = (struct v4l2_ctrl_hevc_pps) {
- + .pic_parameter_set_id = sh->pps_id,
- + .num_ref_idx_l0_default_active_minus1 = pps->num_ref_idx_l0_default_active - 1,
- + .num_ref_idx_l1_default_active_minus1 = pps->num_ref_idx_l1_default_active - 1,
- + .num_extra_slice_header_bits = pps->num_extra_slice_header_bits,
- + .init_qp_minus26 = pps->pic_init_qp_minus26,
- + .diff_cu_qp_delta_depth = pps->diff_cu_qp_delta_depth,
- + .pps_cb_qp_offset = pps->cb_qp_offset,
- + .pps_cr_qp_offset = pps->cr_qp_offset,
- + .pps_beta_offset_div2 = pps->beta_offset / 2,
- + .pps_tc_offset_div2 = pps->tc_offset / 2,
- + .log2_parallel_merge_level_minus2 = pps->log2_parallel_merge_level - 2,
- + };
- +
- + if (pps->dependent_slice_segments_enabled_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_DEPENDENT_SLICE_SEGMENT;
- +
- + if (pps->output_flag_present_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_OUTPUT_FLAG_PRESENT;
- +
- + if (pps->sign_data_hiding_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_SIGN_DATA_HIDING_ENABLED;
- +
- + if (pps->cabac_init_present_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_CABAC_INIT_PRESENT;
- +
- + if (pps->constrained_intra_pred_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_CONSTRAINED_INTRA_PRED;
- +
- + if (pps->transform_skip_enabled_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_TRANSFORM_SKIP_ENABLED;
- +
- + if (pps->cu_qp_delta_enabled_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED;
- +
- + if (pps->pic_slice_level_chroma_qp_offsets_present_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT;
- +
- + if (pps->weighted_pred_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_WEIGHTED_PRED;
- +
- + if (pps->weighted_bipred_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_WEIGHTED_BIPRED;
- +
- + if (pps->transquant_bypass_enable_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_TRANSQUANT_BYPASS_ENABLED;
- +
- + if (pps->tiles_enabled_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_TILES_ENABLED;
- +
- + if (pps->entropy_coding_sync_enabled_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_ENTROPY_CODING_SYNC_ENABLED;
- +
- + if (pps->loop_filter_across_tiles_enabled_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED;
- +
- + if (pps->seq_loop_filter_across_slices_enabled_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED;
- +
- + if (pps->deblocking_filter_override_enabled_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_DEBLOCKING_FILTER_OVERRIDE_ENABLED;
- +
- + if (pps->disable_dbf)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER;
- +
- + if (pps->lists_modification_present_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_LISTS_MODIFICATION_PRESENT;
- +
- + if (pps->slice_header_extension_present_flag)
- + controls->pps.flags |= V4L2_HEVC_PPS_FLAG_SLICE_SEGMENT_HEADER_EXTENSION_PRESENT;
- +
- + if (pps->tiles_enabled_flag) {
- + controls->pps.num_tile_columns_minus1 = pps->num_tile_columns - 1;
- + controls->pps.num_tile_rows_minus1 = pps->num_tile_rows - 1;
- +
- + for (int i = 0; i < pps->num_tile_columns; i++)
- + controls->pps.column_width_minus1[i] = pps->column_width[i] - 1;
- +
- + for (int i = 0; i < pps->num_tile_rows; i++)
- + controls->pps.row_height_minus1[i] = pps->row_height[i] - 1;
- + }
- +
- + controls->first_slice = 1;
- + controls->num_slices = 0;
- +
- + return ff_v4l2_request_reset_frame(avctx, h->ref->frame);
- +}
- +
- +static int v4l2_request_hevc_queue_decode(AVCodecContext *avctx, int last_slice)
- +{
- + const HEVCContext *h = avctx->priv_data;
- + V4L2RequestControlsHEVC *controls = h->ref->hwaccel_picture_private;
- + V4L2RequestContextHEVC *ctx = avctx->internal->hwaccel_priv_data;
- +
- + struct v4l2_ext_control control[] = {
- + {
- + .id = V4L2_CID_MPEG_VIDEO_HEVC_SPS,
- + .ptr = &controls->sps,
- + .size = sizeof(controls->sps),
- + },
- + {
- + .id = V4L2_CID_MPEG_VIDEO_HEVC_PPS,
- + .ptr = &controls->pps,
- + .size = sizeof(controls->pps),
- + },
- + {
- + .id = V4L2_CID_MPEG_VIDEO_HEVC_SCALING_MATRIX,
- + .ptr = &controls->scaling_matrix,
- + .size = sizeof(controls->scaling_matrix),
- + },
- + {
- + .id = V4L2_CID_MPEG_VIDEO_HEVC_SLICE_PARAMS,
- + .ptr = &controls->slice_params,
- + .size = sizeof(controls->slice_params[0]) * FFMAX(FFMIN(controls->num_slices, MAX_SLICES), ctx->max_slices),
- + },
- + };
- +
- + if (ctx->decode_mode == V4L2_MPEG_VIDEO_HEVC_DECODE_MODE_SLICE_BASED)
- + return ff_v4l2_request_decode_slice(avctx, h->ref->frame, control, FF_ARRAY_ELEMS(control), controls->first_slice, last_slice);
- +
- + controls->sps.num_slices = controls->num_slices;
- +
- + return ff_v4l2_request_decode_frame(avctx, h->ref->frame, control, FF_ARRAY_ELEMS(control));
- +}
- +
- +static int v4l2_request_hevc_decode_slice(AVCodecContext *avctx, const uint8_t *buffer, uint32_t size)
- +{
- + const HEVCContext *h = avctx->priv_data;
- + V4L2RequestControlsHEVC *controls = h->ref->hwaccel_picture_private;
- + V4L2RequestContextHEVC *ctx = avctx->internal->hwaccel_priv_data;
- + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)h->ref->frame->data[0];
- + int ret, slice = FFMIN(controls->num_slices, MAX_SLICES - 1);
- +
- + if (ctx->decode_mode == V4L2_MPEG_VIDEO_HEVC_DECODE_MODE_SLICE_BASED && slice) {
- + ret = v4l2_request_hevc_queue_decode(avctx, 0);
- + if (ret)
- + return ret;
- +
- + ff_v4l2_request_reset_frame(avctx, h->ref->frame);
- + slice = controls->num_slices = 0;
- + controls->first_slice = 0;
- + }
- +
- + v4l2_request_hevc_fill_slice_params(h, &controls->slice_params[slice]);
- +
- + if (ctx->start_code == V4L2_MPEG_VIDEO_HEVC_START_CODE_ANNEX_B) {
- + ret = ff_v4l2_request_append_output_buffer(avctx, h->ref->frame, nalu_slice_start_code, 3);
- + if (ret)
- + return ret;
- + }
- +
- + ret = ff_v4l2_request_append_output_buffer(avctx, h->ref->frame, buffer, size);
- + if (ret)
- + return ret;
- +
- + controls->slice_params[slice].bit_size = req->output.used * 8; //FIXME
- + controls->num_slices++;
- + return 0;
- +}
- +
- +static int v4l2_request_hevc_end_frame(AVCodecContext *avctx)
- +{
- + return v4l2_request_hevc_queue_decode(avctx, 1);
- +}
- +
- +static int v4l2_request_hevc_set_controls(AVCodecContext *avctx)
- +{
- + V4L2RequestContextHEVC *ctx = avctx->internal->hwaccel_priv_data;
- + int ret;
- +
- + struct v4l2_ext_control control[] = {
- + { .id = V4L2_CID_MPEG_VIDEO_HEVC_DECODE_MODE, },
- + { .id = V4L2_CID_MPEG_VIDEO_HEVC_START_CODE, },
- + };
- + struct v4l2_query_ext_ctrl slice_params = {
- + .id = V4L2_CID_MPEG_VIDEO_HEVC_SLICE_PARAMS,
- + };
- +
- + ctx->decode_mode = ff_v4l2_request_query_control_default_value(avctx, V4L2_CID_MPEG_VIDEO_HEVC_DECODE_MODE);
- + if (ctx->decode_mode != V4L2_MPEG_VIDEO_HEVC_DECODE_MODE_SLICE_BASED &&
- + ctx->decode_mode != V4L2_MPEG_VIDEO_HEVC_DECODE_MODE_FRAME_BASED) {
- + av_log(avctx, AV_LOG_ERROR, "%s: unsupported decode mode, %d\n", __func__, ctx->decode_mode);
- + return AVERROR(EINVAL);
- + }
- +
- + ctx->start_code = ff_v4l2_request_query_control_default_value(avctx, V4L2_CID_MPEG_VIDEO_HEVC_START_CODE);
- + if (ctx->start_code != V4L2_MPEG_VIDEO_HEVC_START_CODE_NONE &&
- + ctx->start_code != V4L2_MPEG_VIDEO_HEVC_START_CODE_ANNEX_B) {
- + av_log(avctx, AV_LOG_ERROR, "%s: unsupported start code, %d\n", __func__, ctx->start_code);
- + return AVERROR(EINVAL);
- + }
- +
- + ret = ff_v4l2_request_query_control(avctx, &slice_params);
- + if (ret)
- + return ret;
- +
- + ctx->max_slices = slice_params.elems;
- + if (ctx->max_slices > MAX_SLICES) {
- + av_log(avctx, AV_LOG_ERROR, "%s: unsupported max slices, %d\n", __func__, ctx->max_slices);
- + return AVERROR(EINVAL);
- + }
- +
- + control[0].value = ctx->decode_mode;
- + control[1].value = ctx->start_code;
- +
- + return ff_v4l2_request_set_controls(avctx, control, FF_ARRAY_ELEMS(control));
- +}
- +
- +static int v4l2_request_hevc_init(AVCodecContext *avctx)
- +{
- + const HEVCContext *h = avctx->priv_data;
- + struct v4l2_ctrl_hevc_sps sps;
- + int ret;
- +
- + struct v4l2_ext_control control[] = {
- + {
- + .id = V4L2_CID_MPEG_VIDEO_HEVC_SPS,
- + .ptr = &sps,
- + .size = sizeof(sps),
- + },
- + };
- +
- + fill_sps(&sps, h);
- +
- + ret = ff_v4l2_request_init(avctx, V4L2_PIX_FMT_HEVC_SLICE, 4 * 1024 * 1024, control, FF_ARRAY_ELEMS(control));
- + if (ret)
- + return ret;
- +
- + return v4l2_request_hevc_set_controls(avctx);
- +}
- +
- +const AVHWAccel ff_hevc_v4l2request_hwaccel = {
- + .name = "hevc_v4l2request",
- + .type = AVMEDIA_TYPE_VIDEO,
- + .id = AV_CODEC_ID_HEVC,
- + .pix_fmt = AV_PIX_FMT_DRM_PRIME,
- + .start_frame = v4l2_request_hevc_start_frame,
- + .decode_slice = v4l2_request_hevc_decode_slice,
- + .end_frame = v4l2_request_hevc_end_frame,
- + .output_frame = ff_v4l2_request_output_frame,
- + .frame_priv_data_size = sizeof(V4L2RequestControlsHEVC),
- + .init = v4l2_request_hevc_init,
- + .uninit = ff_v4l2_request_uninit,
- + .priv_data_size = sizeof(V4L2RequestContextHEVC),
- + .frame_params = ff_v4l2_request_frame_params,
- + .caps_internal = HWACCEL_CAP_ASYNC_SAFE,
- +};
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/v4l2_request_mpeg2.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_mpeg2.c
- --- ffmpeg_n4.2.2/libavcodec/v4l2_request_mpeg2.c 1969-12-31 16:00:00.000000000 -0800
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_mpeg2.c 2020-05-26 03:16:39.653173493 -0700
- @@ -0,0 +1,156 @@
- +/*
- + * This file is part of FFmpeg.
- + *
- + * FFmpeg is free software; you can redistribute it and/or
- + * modify it under the terms of the GNU Lesser General Public
- + * License as published by the Free Software Foundation; either
- + * version 2.1 of the License, or (at your option) any later version.
- + *
- + * FFmpeg is distributed in the hope that it will be useful,
- + * but WITHOUT ANY WARRANTY; without even the implied warranty of
- + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- + * Lesser General Public License for more details.
- + *
- + * You should have received a copy of the GNU Lesser General Public
- + * License along with FFmpeg; if not, write to the Free Software
- + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- + */
- +
- +#include "hwaccel.h"
- +#include "mpegvideo.h"
- +#include "v4l2_request.h"
- +#include "mpeg2-ctrls.h"
- +
- +typedef struct V4L2RequestControlsMPEG2 {
- + struct v4l2_ctrl_mpeg2_slice_params slice_params;
- + struct v4l2_ctrl_mpeg2_quantization quantization;
- +} V4L2RequestControlsMPEG2;
- +
- +static int v4l2_request_mpeg2_start_frame(AVCodecContext *avctx,
- + av_unused const uint8_t *buffer,
- + av_unused uint32_t size)
- +{
- + const MpegEncContext *s = avctx->priv_data;
- + V4L2RequestControlsMPEG2 *controls = s->current_picture_ptr->hwaccel_picture_private;
- + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)s->current_picture_ptr->f->data[0];
- +
- + controls->slice_params = (struct v4l2_ctrl_mpeg2_slice_params) {
- + .bit_size = 0,
- + .data_bit_offset = 0,
- +
- + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Slice */
- + .quantiser_scale_code = s->qscale >> 1,
- +
- + .sequence = {
- + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Sequence header */
- + .horizontal_size = s->width,
- + .vertical_size = s->height,
- + .vbv_buffer_size = req->output.size,
- +
- + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Sequence extension */
- + .profile_and_level_indication = 0,
- + .progressive_sequence = s->progressive_sequence,
- + .chroma_format = s->chroma_format,
- + },
- +
- + .picture = {
- + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Picture header */
- + .picture_coding_type = s->pict_type,
- +
- + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Picture coding extension */
- + .f_code[0][0] = s->mpeg_f_code[0][0],
- + .f_code[0][1] = s->mpeg_f_code[0][1],
- + .f_code[1][0] = s->mpeg_f_code[1][0],
- + .f_code[1][1] = s->mpeg_f_code[1][1],
- + .intra_dc_precision = s->intra_dc_precision,
- + .picture_structure = s->picture_structure,
- + .top_field_first = s->top_field_first,
- + .frame_pred_frame_dct = s->frame_pred_frame_dct,
- + .concealment_motion_vectors = s->concealment_motion_vectors,
- + .q_scale_type = s->q_scale_type,
- + .intra_vlc_format = s->intra_vlc_format,
- + .alternate_scan = s->alternate_scan,
- + .repeat_first_field = s->repeat_first_field,
- + .progressive_frame = s->progressive_frame,
- + },
- + };
- +
- + switch (s->pict_type) {
- + case AV_PICTURE_TYPE_B:
- + controls->slice_params.backward_ref_ts = ff_v4l2_request_get_capture_timestamp(s->next_picture.f);
- + // fall-through
- + case AV_PICTURE_TYPE_P:
- + controls->slice_params.forward_ref_ts = ff_v4l2_request_get_capture_timestamp(s->last_picture.f);
- + }
- +
- + controls->quantization = (struct v4l2_ctrl_mpeg2_quantization) {
- + /* ISO/IEC 13818-2, ITU-T Rec. H.262: Quant matrix extension */
- + .load_intra_quantiser_matrix = 1,
- + .load_non_intra_quantiser_matrix = 1,
- + .load_chroma_intra_quantiser_matrix = 1,
- + .load_chroma_non_intra_quantiser_matrix = 1,
- + };
- +
- + for (int i = 0; i < 64; i++) {
- + int n = s->idsp.idct_permutation[ff_zigzag_direct[i]];
- + controls->quantization.intra_quantiser_matrix[i] = s->intra_matrix[n];
- + controls->quantization.non_intra_quantiser_matrix[i] = s->inter_matrix[n];
- + controls->quantization.chroma_intra_quantiser_matrix[i] = s->chroma_intra_matrix[n];
- + controls->quantization.chroma_non_intra_quantiser_matrix[i] = s->chroma_inter_matrix[n];
- + }
- +
- + return ff_v4l2_request_reset_frame(avctx, s->current_picture_ptr->f);
- +}
- +
- +static int v4l2_request_mpeg2_decode_slice(AVCodecContext *avctx, const uint8_t *buffer, uint32_t size)
- +{
- + const MpegEncContext *s = avctx->priv_data;
- +
- + return ff_v4l2_request_append_output_buffer(avctx, s->current_picture_ptr->f, buffer, size);
- +}
- +
- +static int v4l2_request_mpeg2_end_frame(AVCodecContext *avctx)
- +{
- + const MpegEncContext *s = avctx->priv_data;
- + V4L2RequestControlsMPEG2 *controls = s->current_picture_ptr->hwaccel_picture_private;
- + V4L2RequestDescriptor *req = (V4L2RequestDescriptor*)s->current_picture_ptr->f->data[0];
- +
- + struct v4l2_ext_control control[] = {
- + {
- + .id = V4L2_CID_MPEG_VIDEO_MPEG2_SLICE_PARAMS,
- + .ptr = &controls->slice_params,
- + .size = sizeof(controls->slice_params),
- + },
- + {
- + .id = V4L2_CID_MPEG_VIDEO_MPEG2_QUANTIZATION,
- + .ptr = &controls->quantization,
- + .size = sizeof(controls->quantization),
- + },
- + };
- +
- + controls->slice_params.bit_size = req->output.used * 8;
- +
- + return ff_v4l2_request_decode_frame(avctx, s->current_picture_ptr->f, control, FF_ARRAY_ELEMS(control));
- +}
- +
- +static int v4l2_request_mpeg2_init(AVCodecContext *avctx)
- +{
- + return ff_v4l2_request_init(avctx, V4L2_PIX_FMT_MPEG2_SLICE, 1024 * 1024, NULL, 0);
- +}
- +
- +const AVHWAccel ff_mpeg2_v4l2request_hwaccel = {
- + .name = "mpeg2_v4l2request",
- + .type = AVMEDIA_TYPE_VIDEO,
- + .id = AV_CODEC_ID_MPEG2VIDEO,
- + .pix_fmt = AV_PIX_FMT_DRM_PRIME,
- + .start_frame = v4l2_request_mpeg2_start_frame,
- + .decode_slice = v4l2_request_mpeg2_decode_slice,
- + .end_frame = v4l2_request_mpeg2_end_frame,
- + .output_frame = ff_v4l2_request_output_frame,
- + .frame_priv_data_size = sizeof(V4L2RequestControlsMPEG2),
- + .init = v4l2_request_mpeg2_init,
- + .uninit = ff_v4l2_request_uninit,
- + .priv_data_size = sizeof(V4L2RequestContext),
- + .frame_params = ff_v4l2_request_frame_params,
- + .caps_internal = HWACCEL_CAP_ASYNC_SAFE,
- +};
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/v4l2_request_vp8.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_vp8.c
- --- ffmpeg_n4.2.2/libavcodec/v4l2_request_vp8.c 1969-12-31 16:00:00.000000000 -0800
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_vp8.c 2020-05-26 03:16:39.653173493 -0700
- @@ -0,0 +1,182 @@
- +/*
- + * This file is part of FFmpeg.
- + *
- + * FFmpeg is free software; you can redistribute it and/or
- + * modify it under the terms of the GNU Lesser General Public
- + * License as published by the Free Software Foundation; either
- + * version 2.1 of the License, or (at your option) any later version.
- + *
- + * FFmpeg is distributed in the hope that it will be useful,
- + * but WITHOUT ANY WARRANTY; without even the implied warranty of
- + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- + * Lesser General Public License for more details.
- + *
- + * You should have received a copy of the GNU Lesser General Public
- + * License along with FFmpeg; if not, write to the Free Software
- + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- + */
- +
- +#include "hwaccel.h"
- +#include "v4l2_request.h"
- +#include "vp8.h"
- +#include "vp8-ctrls.h"
- +
- +typedef struct V4L2RequestControlsVP8 {
- + struct v4l2_ctrl_vp8_frame_header ctrl;
- +} V4L2RequestControlsVP8;
- +
- +static int v4l2_request_vp8_start_frame(AVCodecContext *avctx,
- + av_unused const uint8_t *buffer,
- + av_unused uint32_t size)
- +{
- + const VP8Context *s = avctx->priv_data;
- + V4L2RequestControlsVP8 *controls = s->framep[VP56_FRAME_CURRENT]->hwaccel_picture_private;
- +
- + memset(&controls->ctrl, 0, sizeof(controls->ctrl));
- + return ff_v4l2_request_reset_frame(avctx, s->framep[VP56_FRAME_CURRENT]->tf.f);
- +}
- +
- +static int v4l2_request_vp8_end_frame(AVCodecContext *avctx)
- +{
- + const VP8Context *s = avctx->priv_data;
- + V4L2RequestControlsVP8 *controls = s->framep[VP56_FRAME_CURRENT]->hwaccel_picture_private;
- + struct v4l2_ext_control control[] = {
- + {
- + .id = V4L2_CID_MPEG_VIDEO_VP8_FRAME_HEADER,
- + .ptr = &controls->ctrl,
- + .size = sizeof(controls->ctrl),
- + },
- + };
- +
- + return ff_v4l2_request_decode_frame(avctx, s->framep[VP56_FRAME_CURRENT]->tf.f,
- + control, FF_ARRAY_ELEMS(control));
- +}
- +
- +static int v4l2_request_vp8_decode_slice(AVCodecContext *avctx,
- + const uint8_t *buffer,
- + uint32_t size)
- +{
- + const VP8Context *s = avctx->priv_data;
- + V4L2RequestControlsVP8 *controls = s->framep[VP56_FRAME_CURRENT]->hwaccel_picture_private;
- + struct v4l2_ctrl_vp8_frame_header *hdr = &controls->ctrl;
- + const uint8_t *data = buffer + 3 + 7 * s->keyframe;
- + unsigned int i, j, k;
- +
- + hdr->version = s->profile & 0x3;
- + hdr->width = avctx->width;
- + hdr->height = avctx->height;
- + /* FIXME: set ->xx_scale */
- + hdr->prob_skip_false = s->prob->mbskip;
- + hdr->prob_intra = s->prob->intra;
- + hdr->prob_gf = s->prob->golden;
- + hdr->prob_last = s->prob->last;
- + hdr->first_part_size = s->header_partition_size;
- + hdr->first_part_header_bits = (8 * (s->coder_state_at_header_end.input - data) -
- + s->coder_state_at_header_end.bit_count - 8);
- + hdr->num_dct_parts = s->num_coeff_partitions;
- + for (i = 0; i < 8; i++)
- + hdr->dct_part_sizes[i] = s->coeff_partition_size[i];
- +
- + hdr->coder_state.range = s->coder_state_at_header_end.range;
- + hdr->coder_state.value = s->coder_state_at_header_end.value;
- + hdr->coder_state.bit_count = s->coder_state_at_header_end.bit_count;
- + if (s->framep[VP56_FRAME_PREVIOUS])
- + hdr->last_frame_ts = ff_v4l2_request_get_capture_timestamp(s->framep[VP56_FRAME_PREVIOUS]->tf.f);
- + if (s->framep[VP56_FRAME_GOLDEN])
- + hdr->golden_frame_ts = ff_v4l2_request_get_capture_timestamp(s->framep[VP56_FRAME_GOLDEN]->tf.f);
- + if (s->framep[VP56_FRAME_GOLDEN2])
- + hdr->alt_frame_ts = ff_v4l2_request_get_capture_timestamp(s->framep[VP56_FRAME_GOLDEN2]->tf.f);
- + hdr->flags |= s->invisible ? 0 : V4L2_VP8_FRAME_HEADER_FLAG_SHOW_FRAME;
- + hdr->flags |= s->mbskip_enabled ? V4L2_VP8_FRAME_HEADER_FLAG_MB_NO_SKIP_COEFF : 0;
- + hdr->flags |= (s->profile & 0x4) ? V4L2_VP8_FRAME_HEADER_FLAG_EXPERIMENTAL : 0;
- + hdr->flags |= s->keyframe ? V4L2_VP8_FRAME_HEADER_FLAG_KEY_FRAME : 0;
- + hdr->flags |= s->sign_bias[VP56_FRAME_GOLDEN] ? V4L2_VP8_FRAME_HEADER_FLAG_SIGN_BIAS_GOLDEN : 0;
- + hdr->flags |= s->sign_bias[VP56_FRAME_GOLDEN2] ? V4L2_VP8_FRAME_HEADER_FLAG_SIGN_BIAS_ALT : 0;
- + hdr->segment_header.flags |= s->segmentation.enabled ? V4L2_VP8_SEGMENT_HEADER_FLAG_ENABLED : 0;
- + hdr->segment_header.flags |= s->segmentation.update_map ? V4L2_VP8_SEGMENT_HEADER_FLAG_UPDATE_MAP : 0;
- + hdr->segment_header.flags |= s->segmentation.update_feature_data ? V4L2_VP8_SEGMENT_HEADER_FLAG_UPDATE_FEATURE_DATA : 0;
- + hdr->segment_header.flags |= s->segmentation.absolute_vals ? 0 : V4L2_VP8_SEGMENT_HEADER_FLAG_DELTA_VALUE_MODE;
- + for (i = 0; i < 4; i++) {
- + hdr->segment_header.quant_update[i] = s->segmentation.base_quant[i];
- + hdr->segment_header.lf_update[i] = s->segmentation.filter_level[i];
- + }
- +
- + for (i = 0; i < 3; i++)
- + hdr->segment_header.segment_probs[i] = s->prob->segmentid[i];
- +
- + hdr->lf_header.level = s->filter.level;
- + hdr->lf_header.sharpness_level = s->filter.sharpness;
- + hdr->lf_header.flags |= s->lf_delta.enabled ? V4L2_VP8_LF_HEADER_ADJ_ENABLE : 0;
- + hdr->lf_header.flags |= s->lf_delta.update ? V4L2_VP8_LF_HEADER_DELTA_UPDATE : 0;
- + hdr->lf_header.flags |= s->filter.simple ? V4L2_VP8_LF_FILTER_TYPE_SIMPLE : 0;
- + for (i = 0; i < 4; i++) {
- + hdr->lf_header.ref_frm_delta[i] = s->lf_delta.ref[i];
- + hdr->lf_header.mb_mode_delta[i] = s->lf_delta.mode[i + MODE_I4x4];
- + }
- +
- + // Probabilites
- + if (s->keyframe) {
- + static const uint8_t keyframe_y_mode_probs[4] = {
- + 145, 156, 163, 128
- + };
- + static const uint8_t keyframe_uv_mode_probs[3] = {
- + 142, 114, 183
- + };
- +
- + memcpy(hdr->entropy_header.y_mode_probs, keyframe_y_mode_probs, 4);
- + memcpy(hdr->entropy_header.uv_mode_probs, keyframe_uv_mode_probs, 3);
- + } else {
- + for (i = 0; i < 4; i++)
- + hdr->entropy_header.y_mode_probs[i] = s->prob->pred16x16[i];
- + for (i = 0; i < 3; i++)
- + hdr->entropy_header.uv_mode_probs[i] = s->prob->pred8x8c[i];
- + }
- + for (i = 0; i < 2; i++)
- + for (j = 0; j < 19; j++)
- + hdr->entropy_header.mv_probs[i][j] = s->prob->mvc[i][j];
- +
- + for (i = 0; i < 4; i++) {
- + for (j = 0; j < 8; j++) {
- + static const int coeff_bands_inverse[8] = {
- + 0, 1, 2, 3, 5, 6, 4, 15
- + };
- + int coeff_pos = coeff_bands_inverse[j];
- +
- + for (k = 0; k < 3; k++) {
- + memcpy(hdr->entropy_header.coeff_probs[i][j][k],
- + s->prob->token[i][coeff_pos][k], 11);
- + }
- + }
- + }
- +
- + hdr->quant_header.y_ac_qi = s->quant.yac_qi;
- + hdr->quant_header.y_dc_delta = s->quant.ydc_delta;
- + hdr->quant_header.y2_dc_delta = s->quant.y2dc_delta;
- + hdr->quant_header.y2_ac_delta = s->quant.y2ac_delta;
- + hdr->quant_header.uv_dc_delta = s->quant.uvdc_delta;
- + hdr->quant_header.uv_ac_delta = s->quant.uvac_delta;
- +
- + return ff_v4l2_request_append_output_buffer(avctx, s->framep[VP56_FRAME_CURRENT]->tf.f, buffer, size);
- +}
- +
- +static int v4l2_request_vp8_init(AVCodecContext *avctx)
- +{
- + return ff_v4l2_request_init(avctx, V4L2_PIX_FMT_VP8_FRAME, 2 * 1024 * 1024, NULL, 0);
- +}
- +
- +const AVHWAccel ff_vp8_v4l2request_hwaccel = {
- + .name = "vp8_v4l2request",
- + .type = AVMEDIA_TYPE_VIDEO,
- + .id = AV_CODEC_ID_VP8,
- + .pix_fmt = AV_PIX_FMT_DRM_PRIME,
- + .start_frame = v4l2_request_vp8_start_frame,
- + .decode_slice = v4l2_request_vp8_decode_slice,
- + .end_frame = v4l2_request_vp8_end_frame,
- + .output_frame = ff_v4l2_request_output_frame,
- + .frame_priv_data_size = sizeof(V4L2RequestControlsVP8),
- + .init = v4l2_request_vp8_init,
- + .uninit = ff_v4l2_request_uninit,
- + .priv_data_size = sizeof(V4L2RequestContext),
- + .frame_params = ff_v4l2_request_frame_params,
- + .caps_internal = HWACCEL_CAP_ASYNC_SAFE,
- +};
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/v4l2_request_vp9.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_vp9.c
- --- ffmpeg_n4.2.2/libavcodec/v4l2_request_vp9.c 1969-12-31 16:00:00.000000000 -0800
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/v4l2_request_vp9.c 2020-05-26 03:16:39.653173493 -0700
- @@ -0,0 +1,354 @@
- +/*
- + * This file is part of FFmpeg.
- + *
- + * FFmpeg is free software; you can redistribute it and/or
- + * modify it under the terms of the GNU Lesser General Public
- + * License as published by the Free Software Foundation; either
- + * version 2.1 of the License, or (at your option) any later version.
- + *
- + * FFmpeg is distributed in the hope that it will be useful,
- + * but WITHOUT ANY WARRANTY; without even the implied warranty of
- + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- + * Lesser General Public License for more details.
- + *
- + * You should have received a copy of the GNU Lesser General Public
- + * License along with FFmpeg; if not, write to the Free Software
- + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- + */
- +
- +#include "hwaccel.h"
- +#include "v4l2_request.h"
- +#include "vp9dec.h"
- +#include "vp9-ctrls.h"
- +
- +typedef struct V4L2RequestControlsVP9 {
- + struct v4l2_ctrl_vp9_frame_decode_params decode_params;
- +} V4L2RequestControlsVP9;
- +
- +static const uint8_t ff_to_v4l2_intramode[] = {
- + [VERT_PRED] = V4L2_VP9_INTRA_PRED_MODE_V,
- + [HOR_PRED] = V4L2_VP9_INTRA_PRED_MODE_H,
- + [DC_PRED] = V4L2_VP9_INTRA_PRED_MODE_DC,
- + [DIAG_DOWN_LEFT_PRED] = V4L2_VP9_INTRA_PRED_MODE_D45,
- + [DIAG_DOWN_RIGHT_PRED] = V4L2_VP9_INTRA_PRED_MODE_D135,
- + [VERT_RIGHT_PRED] = V4L2_VP9_INTRA_PRED_MODE_D117,
- + [HOR_DOWN_PRED] = V4L2_VP9_INTRA_PRED_MODE_D153,
- + [VERT_LEFT_PRED] = V4L2_VP9_INTRA_PRED_MODE_D63,
- + [HOR_UP_PRED] = V4L2_VP9_INTRA_PRED_MODE_D207,
- + [TM_VP8_PRED] = V4L2_VP9_INTRA_PRED_MODE_TM,
- +};
- +
- +static int v4l2_request_vp9_set_frame_ctx(AVCodecContext *avctx, unsigned int id)
- +{
- + VP9Context *s = avctx->priv_data;
- + struct v4l2_ctrl_vp9_frame_ctx fctx = {};
- + struct v4l2_ext_control control[] = {
- + {
- + .id = V4L2_CID_MPEG_VIDEO_VP9_FRAME_CONTEXT(id),
- + .ptr = &fctx,
- + .size = sizeof(fctx),
- + },
- + };
- +
- + memcpy(fctx.probs.tx8, s->prob_ctx[id].p.tx8p, sizeof(s->prob_ctx[id].p.tx8p));
- + memcpy(fctx.probs.tx16, s->prob_ctx[id].p.tx16p, sizeof(s->prob_ctx[id].p.tx16p));
- + memcpy(fctx.probs.tx32, s->prob_ctx[id].p.tx32p, sizeof(s->prob_ctx[id].p.tx32p));
- + memcpy(fctx.probs.coef, s->prob_ctx[id].coef, sizeof(s->prob_ctx[id].coef));
- + memcpy(fctx.probs.skip, s->prob_ctx[id].p.skip, sizeof(s->prob_ctx[id].p.skip));
- + memcpy(fctx.probs.inter_mode, s->prob_ctx[id].p.mv_mode, sizeof(s->prob_ctx[id].p.mv_mode));
- + memcpy(fctx.probs.interp_filter, s->prob_ctx[id].p.filter, sizeof(s->prob_ctx[id].p.filter));
- + memcpy(fctx.probs.is_inter, s->prob_ctx[id].p.intra, sizeof(s->prob_ctx[id].p.intra));
- + memcpy(fctx.probs.comp_mode, s->prob_ctx[id].p.comp, sizeof(s->prob_ctx[id].p.comp));
- + memcpy(fctx.probs.single_ref, s->prob_ctx[id].p.single_ref, sizeof(s->prob_ctx[id].p.single_ref));
- + memcpy(fctx.probs.comp_ref, s->prob_ctx[id].p.comp_ref, sizeof(s->prob_ctx[id].p.comp_ref));
- + memcpy(fctx.probs.y_mode, s->prob_ctx[id].p.y_mode, sizeof(s->prob_ctx[id].p.y_mode));
- + for (unsigned i = 0; i < 10; i++)
- + memcpy(fctx.probs.uv_mode[ff_to_v4l2_intramode[i]], s->prob_ctx[id].p.uv_mode[i], sizeof(s->prob_ctx[id].p.uv_mode[0]));
- + for (unsigned i = 0; i < 4; i++)
- + memcpy(fctx.probs.partition[i * 4], s->prob_ctx[id].p.partition[3 - i], sizeof(s->prob_ctx[id].p.partition[0]));
- + memcpy(fctx.probs.mv.joint, s->prob_ctx[id].p.mv_joint, sizeof(s->prob_ctx[id].p.mv_joint));
- + for (unsigned i = 0; i < 2; i++) {
- + fctx.probs.mv.sign[i] = s->prob_ctx[id].p.mv_comp[i].sign;
- + memcpy(fctx.probs.mv.class[i], s->prob_ctx[id].p.mv_comp[i].classes, sizeof(s->prob_ctx[id].p.mv_comp[0].classes));
- + fctx.probs.mv.class0_bit[i] = s->prob_ctx[id].p.mv_comp[i].class0;
- + memcpy(fctx.probs.mv.bits[i], s->prob_ctx[id].p.mv_comp[i].bits, sizeof(s->prob_ctx[id].p.mv_comp[0].bits));
- + memcpy(fctx.probs.mv.class0_fr[i], s->prob_ctx[id].p.mv_comp[i].class0_fp, sizeof(s->prob_ctx[id].p.mv_comp[0].class0_fp));
- + memcpy(fctx.probs.mv.fr[i], s->prob_ctx[id].p.mv_comp[i].fp, sizeof(s->prob_ctx[id].p.mv_comp[0].fp));
- + fctx.probs.mv.class0_hp[i] = s->prob_ctx[id].p.mv_comp[i].class0_hp;
- + fctx.probs.mv.hp[i] = s->prob_ctx[id].p.mv_comp[i].hp;
- + }
- +
- + return ff_v4l2_request_set_controls(avctx, control, FF_ARRAY_ELEMS(control));
- +}
- +
- +static int v4l2_request_vp9_get_frame_ctx(AVCodecContext *avctx, unsigned int id)
- +{
- + VP9Context *s = avctx->priv_data;
- + struct v4l2_ctrl_vp9_frame_ctx fctx = {};
- + struct v4l2_ext_control control[] = {
- + {
- + .id = V4L2_CID_MPEG_VIDEO_VP9_FRAME_CONTEXT(id),
- + .ptr = &fctx,
- + .size = sizeof(fctx),
- + },
- + };
- +
- + int ret = ff_v4l2_request_get_controls(avctx, control, FF_ARRAY_ELEMS(control));
- + if (ret)
- + return ret;
- +
- + memcpy(s->prob_ctx[id].p.tx8p, fctx.probs.tx8, sizeof(s->prob_ctx[id].p.tx8p));
- + memcpy(s->prob_ctx[id].p.tx16p, fctx.probs.tx16, sizeof(s->prob_ctx[id].p.tx16p));
- + memcpy(s->prob_ctx[id].p.tx32p, fctx.probs.tx32, sizeof(s->prob_ctx[id].p.tx32p));
- + memcpy(s->prob_ctx[id].coef, fctx.probs.coef, sizeof(s->prob_ctx[id].coef));
- + memcpy(s->prob_ctx[id].p.skip, fctx.probs.skip, sizeof(s->prob_ctx[id].p.skip));
- + memcpy(s->prob_ctx[id].p.mv_mode, fctx.probs.inter_mode, sizeof(s->prob_ctx[id].p.mv_mode));
- + memcpy(s->prob_ctx[id].p.filter, fctx.probs.interp_filter, sizeof(s->prob_ctx[id].p.filter));
- + memcpy(s->prob_ctx[id].p.intra, fctx.probs.is_inter, sizeof(s->prob_ctx[id].p.intra));
- + memcpy(s->prob_ctx[id].p.comp, fctx.probs.comp_mode, sizeof(s->prob_ctx[id].p.comp));
- + memcpy(s->prob_ctx[id].p.single_ref, fctx.probs.single_ref, sizeof(s->prob_ctx[id].p.single_ref));
- + memcpy(s->prob_ctx[id].p.comp_ref, fctx.probs.comp_ref, sizeof(s->prob_ctx[id].p.comp_ref));
- + memcpy(s->prob_ctx[id].p.y_mode, fctx.probs.y_mode, sizeof(s->prob_ctx[id].p.y_mode));
- + for (unsigned i = 0; i < 10; i++)
- + memcpy(s->prob_ctx[id].p.uv_mode[i], fctx.probs.uv_mode[ff_to_v4l2_intramode[i]], sizeof(s->prob_ctx[id].p.uv_mode[0]));
- + for (unsigned i = 0; i < 4; i++)
- + memcpy(s->prob_ctx[id].p.partition[3 - i], fctx.probs.partition[i * 4], sizeof(s->prob_ctx[id].p.partition[0]));
- + memcpy(s->prob_ctx[id].p.mv_joint, fctx.probs.mv.joint, sizeof(s->prob_ctx[id].p.mv_joint));
- + for (unsigned i = 0; i < 2; i++) {
- + s->prob_ctx[id].p.mv_comp[i].sign = fctx.probs.mv.sign[i];
- + memcpy(s->prob_ctx[id].p.mv_comp[i].classes, fctx.probs.mv.class[i], sizeof(s->prob_ctx[id].p.mv_comp[0].classes));
- + s->prob_ctx[id].p.mv_comp[i].class0 = fctx.probs.mv.class0_bit[i];
- + memcpy(s->prob_ctx[id].p.mv_comp[i].bits, fctx.probs.mv.bits[i], sizeof(s->prob_ctx[id].p.mv_comp[0].bits));
- + memcpy(s->prob_ctx[id].p.mv_comp[i].class0_fp, fctx.probs.mv.class0_fr[i], sizeof(s->prob_ctx[id].p.mv_comp[0].class0_fp));
- + memcpy(s->prob_ctx[id].p.mv_comp[i].fp, fctx.probs.mv.fr[i], sizeof(s->prob_ctx[id].p.mv_comp[0].fp));
- + s->prob_ctx[id].p.mv_comp[i].class0_hp = fctx.probs.mv.class0_hp[i];
- + s->prob_ctx[id].p.mv_comp[i].hp = fctx.probs.mv.hp[i];
- + }
- +
- + return 0;
- +}
- +
- +static int v4l2_request_vp9_start_frame(AVCodecContext *avctx,
- + av_unused const uint8_t *buffer,
- + av_unused uint32_t size)
- +{
- + const VP9Context *s = avctx->priv_data;
- + const VP9Frame *f = &s->s.frames[CUR_FRAME];
- + V4L2RequestControlsVP9 *controls = f->hwaccel_picture_private;
- + struct v4l2_ctrl_vp9_frame_decode_params *dec_params = &controls->decode_params;
- + int ret;
- +
- + if (s->s.h.keyframe || s->s.h.errorres || (s->s.h.intraonly && s->s.h.resetctx == 3)) {
- + for (unsigned i = 0; i < 4; i++) {
- + ret = v4l2_request_vp9_set_frame_ctx(avctx, i);
- + if (ret)
- + return ret;
- + }
- + } else if (s->s.h.intraonly && s->s.h.resetctx == 2) {
- + ret = v4l2_request_vp9_set_frame_ctx(avctx, s->s.h.framectxid);
- + if (ret)
- + return ret;
- + }
- +
- + if (s->s.h.keyframe)
- + dec_params->flags |= V4L2_VP9_FRAME_FLAG_KEY_FRAME;
- + if (!s->s.h.invisible)
- + dec_params->flags |= V4L2_VP9_FRAME_FLAG_SHOW_FRAME;
- + if (s->s.h.errorres)
- + dec_params->flags |= V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT;
- + if (s->s.h.intraonly)
- + dec_params->flags |= V4L2_VP9_FRAME_FLAG_INTRA_ONLY;
- + if (!s->s.h.keyframe && s->s.h.highprecisionmvs)
- + dec_params->flags |= V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV;
- + if (s->s.h.refreshctx)
- + dec_params->flags |= V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX;
- + if (s->s.h.parallelmode)
- + dec_params->flags |= V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE;
- + if (s->ss_h)
- + dec_params->flags |= V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING;
- + if (s->ss_v)
- + dec_params->flags |= V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING;
- + if (avctx->color_range == AVCOL_RANGE_JPEG)
- + dec_params->flags |= V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING;
- +
- + dec_params->compressed_header_size = s->s.h.compressed_header_size;
- + dec_params->uncompressed_header_size = s->s.h.uncompressed_header_size;
- + dec_params->profile = s->s.h.profile;
- + dec_params->reset_frame_context = s->s.h.resetctx > 0 ? s->s.h.resetctx - 1 : 0;
- + dec_params->frame_context_idx = s->s.h.framectxid;
- + dec_params->bit_depth = s->s.h.bpp;
- +
- + dec_params->interpolation_filter = s->s.h.filtermode ^ (s->s.h.filtermode <= 1);
- + dec_params->tile_cols_log2 = s->s.h.tiling.log2_tile_cols;
- + dec_params->tile_rows_log2 = s->s.h.tiling.log2_tile_rows;
- + dec_params->tx_mode = s->s.h.txfmmode;
- + dec_params->reference_mode = s->s.h.comppredmode;
- + dec_params->frame_width_minus_1 = s->w - 1;
- + dec_params->frame_height_minus_1 = s->h - 1;
- + //dec_params->render_width_minus_1 = avctx->width - 1;
- + //dec_params->render_height_minus_1 = avctx->height - 1;
- +
- + for (unsigned i = 0; i < 3; i++) {
- + const ThreadFrame *ref = &s->s.refs[s->s.h.refidx[i]];
- + if (ref->f && ref->f->buf[0])
- + dec_params->refs[i] = ff_v4l2_request_get_capture_timestamp(ref->f);
- + }
- +
- + if (s->s.h.lf_delta.enabled)
- + dec_params->lf.flags |= V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED;
- + if (s->s.h.lf_delta.updated)
- + dec_params->lf.flags |= V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE;
- +
- + dec_params->lf.level = s->s.h.filter.level;
- + dec_params->lf.sharpness = s->s.h.filter.sharpness;
- + for (unsigned i = 0; i < 4; i++)
- + dec_params->lf.ref_deltas[i] = s->s.h.lf_delta.ref[i];
- + for (unsigned i = 0; i < 2; i++)
- + dec_params->lf.mode_deltas[i] = s->s.h.lf_delta.mode[i];
- + for (unsigned i = 0; i < 8; i++) {
- + for (unsigned j = 0; j < 4; j++)
- + memcpy(dec_params->lf.level_lookup[i][j], s->s.h.segmentation.feat[i].lflvl[j], sizeof(dec_params->lf.level_lookup[0][0]));
- + }
- +
- + dec_params->quant.base_q_idx = s->s.h.yac_qi;
- + dec_params->quant.delta_q_y_dc = s->s.h.ydc_qdelta;
- + dec_params->quant.delta_q_uv_dc = s->s.h.uvdc_qdelta;
- + dec_params->quant.delta_q_uv_ac = s->s.h.uvac_qdelta;
- +
- + if (s->s.h.segmentation.enabled)
- + dec_params->seg.flags |= V4L2_VP9_SEGMENTATION_FLAG_ENABLED;
- + if (s->s.h.segmentation.update_map)
- + dec_params->seg.flags |= V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP;
- + if (s->s.h.segmentation.temporal)
- + dec_params->seg.flags |= V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE;
- + if (s->s.h.segmentation.update_data)
- + dec_params->seg.flags |= V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA;
- + if (s->s.h.segmentation.absolute_vals)
- + dec_params->seg.flags |= V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE;
- +
- + for (unsigned i = 0; i < 7; i++)
- + dec_params->seg.tree_probs[i] = s->s.h.segmentation.prob[i];
- +
- + if (s->s.h.segmentation.temporal) {
- + for (unsigned i = 0; i < 3; i++)
- + dec_params->seg.pred_probs[i] = s->s.h.segmentation.pred_prob[i];
- + } else {
- + memset(dec_params->seg.pred_probs, 255, sizeof(dec_params->seg.pred_probs));
- + }
- +
- + for (unsigned i = 0; i < 8; i++) {
- + if (s->s.h.segmentation.feat[i].q_enabled) {
- + dec_params->seg.feature_enabled[i] |= 1 << V4L2_VP9_SEGMENT_FEATURE_QP_DELTA;
- + dec_params->seg.feature_data[i][V4L2_VP9_SEGMENT_FEATURE_QP_DELTA] = s->s.h.segmentation.feat[i].q_val;
- + }
- +
- + if (s->s.h.segmentation.feat[i].lf_enabled) {
- + dec_params->seg.feature_enabled[i] |= 1 << V4L2_VP9_SEGMENT_FEATURE_LF;
- + dec_params->seg.feature_data[i][V4L2_VP9_SEGMENT_FEATURE_LF] = s->s.h.segmentation.feat[i].lf_val;
- + }
- +
- + if (s->s.h.segmentation.feat[i].ref_enabled) {
- + dec_params->seg.feature_enabled[i] |= 1 << V4L2_VP9_SEGMENT_FEATURE_REF_FRAME;
- + dec_params->seg.feature_data[i][V4L2_VP9_SEGMENT_FEATURE_REF_FRAME] = s->s.h.segmentation.feat[i].ref_val;
- + }
- +
- + if (s->s.h.segmentation.feat[i].skip_enabled)
- + dec_params->seg.feature_enabled[i] |= 1 << V4L2_VP9_SEGMENT_FEATURE_SKIP;
- + }
- +
- + memcpy(dec_params->probs.tx8, s->prob.p.tx8p, sizeof(s->prob.p.tx8p));
- + memcpy(dec_params->probs.tx16, s->prob.p.tx16p, sizeof(s->prob.p.tx16p));
- + memcpy(dec_params->probs.tx32, s->prob.p.tx32p, sizeof(s->prob.p.tx32p));
- + for (unsigned i = 0; i < 4; i++) {
- + for (unsigned j = 0; j < 2; j++) {
- + for (unsigned k = 0; k < 2; k++) {
- + for (unsigned l = 0; l < 6; l++) {
- + for (unsigned m = 0; m < 6; m++) {
- + memcpy(dec_params->probs.coef[i][j][k][l][m], s->prob.coef[i][j][k][l][m], sizeof(dec_params->probs.coef[0][0][0][0][0]));
- + }
- + }
- + }
- + }
- + }
- + memcpy(dec_params->probs.skip, s->prob.p.skip, sizeof(s->prob.p.skip));
- + memcpy(dec_params->probs.inter_mode, s->prob.p.mv_mode, sizeof(s->prob.p.mv_mode));
- + memcpy(dec_params->probs.interp_filter, s->prob.p.filter, sizeof(s->prob.p.filter));
- + memcpy(dec_params->probs.is_inter, s->prob.p.intra, sizeof(s->prob.p.intra));
- + memcpy(dec_params->probs.comp_mode, s->prob.p.comp, sizeof(s->prob.p.comp));
- + memcpy(dec_params->probs.single_ref, s->prob.p.single_ref, sizeof(s->prob.p.single_ref));
- + memcpy(dec_params->probs.comp_ref, s->prob.p.comp_ref, sizeof(s->prob.p.comp_ref));
- + memcpy(dec_params->probs.y_mode, s->prob.p.y_mode, sizeof(s->prob.p.y_mode));
- + for (unsigned i = 0; i < 10; i++)
- + memcpy(dec_params->probs.uv_mode[ff_to_v4l2_intramode[i]], s->prob.p.uv_mode[i], sizeof(s->prob.p.uv_mode[0]));
- + for (unsigned i = 0; i < 4; i++)
- + memcpy(dec_params->probs.partition[i * 4], s->prob.p.partition[3 - i], sizeof(s->prob.p.partition[0]));
- + memcpy(dec_params->probs.mv.joint, s->prob.p.mv_joint, sizeof(s->prob.p.mv_joint));
- + for (unsigned i = 0; i < 2; i++) {
- + dec_params->probs.mv.sign[i] = s->prob.p.mv_comp[i].sign;
- + memcpy(dec_params->probs.mv.class[i], s->prob.p.mv_comp[i].classes, sizeof(s->prob.p.mv_comp[0].classes));
- + dec_params->probs.mv.class0_bit[i] = s->prob.p.mv_comp[i].class0;
- + memcpy(dec_params->probs.mv.bits[i], s->prob.p.mv_comp[i].bits, sizeof(s->prob.p.mv_comp[0].bits));
- + memcpy(dec_params->probs.mv.class0_fr[i], s->prob.p.mv_comp[i].class0_fp, sizeof(s->prob.p.mv_comp[0].class0_fp));
- + memcpy(dec_params->probs.mv.fr[i], s->prob.p.mv_comp[i].fp, sizeof(s->prob.p.mv_comp[0].fp));
- + dec_params->probs.mv.class0_hp[i] = s->prob.p.mv_comp[i].class0_hp;
- + dec_params->probs.mv.hp[i] = s->prob.p.mv_comp[i].hp;
- + }
- +
- + return ff_v4l2_request_reset_frame(avctx, f->tf.f);
- +}
- +
- +static int v4l2_request_vp9_decode_slice(AVCodecContext *avctx, const uint8_t *buffer, uint32_t size)
- +{
- + const VP9Context *s = avctx->priv_data;
- + const VP9Frame *f = &s->s.frames[CUR_FRAME];
- +
- + return ff_v4l2_request_append_output_buffer(avctx, f->tf.f, buffer, size);
- +}
- +
- +static int v4l2_request_vp9_end_frame(AVCodecContext *avctx)
- +{
- + const VP9Context *s = avctx->priv_data;
- + const VP9Frame *f = &s->s.frames[CUR_FRAME];
- + V4L2RequestControlsVP9 *controls = f->hwaccel_picture_private;
- + int ret;
- +
- + struct v4l2_ext_control control[] = {
- + {
- + .id = V4L2_CID_MPEG_VIDEO_VP9_FRAME_DECODE_PARAMS,
- + .ptr = &controls->decode_params,
- + .size = sizeof(controls->decode_params),
- + },
- + };
- +
- + ret = ff_v4l2_request_decode_frame(avctx, f->tf.f, control, FF_ARRAY_ELEMS(control));
- + if (ret)
- + return ret;
- +
- + if (!s->s.h.refreshctx)
- + return 0;
- +
- + return v4l2_request_vp9_get_frame_ctx(avctx, s->s.h.framectxid);
- +}
- +
- +static int v4l2_request_vp9_init(AVCodecContext *avctx)
- +{
- + // TODO: check V4L2_CID_MPEG_VIDEO_VP9_PROFILE
- + return ff_v4l2_request_init(avctx, V4L2_PIX_FMT_VP9_FRAME, 3 * 1024 * 1024, NULL, 0);
- +}
- +
- +const AVHWAccel ff_vp9_v4l2request_hwaccel = {
- + .name = "vp9_v4l2request",
- + .type = AVMEDIA_TYPE_VIDEO,
- + .id = AV_CODEC_ID_VP9,
- + .pix_fmt = AV_PIX_FMT_DRM_PRIME,
- + .start_frame = v4l2_request_vp9_start_frame,
- + .decode_slice = v4l2_request_vp9_decode_slice,
- + .end_frame = v4l2_request_vp9_end_frame,
- + .output_frame = ff_v4l2_request_output_frame,
- + .frame_priv_data_size = sizeof(V4L2RequestControlsVP9),
- + .init = v4l2_request_vp9_init,
- + .uninit = ff_v4l2_request_uninit,
- + .priv_data_size = sizeof(V4L2RequestContext),
- + .frame_params = ff_v4l2_request_frame_params,
- + .caps_internal = HWACCEL_CAP_ASYNC_SAFE,
- +};
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/vaapi_h264.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vaapi_h264.c
- --- ffmpeg_n4.2.2/libavcodec/vaapi_h264.c 2020-05-21 20:25:05.563839295 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vaapi_h264.c 2020-05-26 03:16:39.663173479 -0700
- @@ -314,6 +314,11 @@
- H264SliceContext *sl = &h->slice_ctx[0];
- int ret;
- + if (pic->nb_slices == 0) {
- + ret = AVERROR_INVALIDDATA;
- + goto finish;
- + }
- +
- ret = ff_vaapi_decode_issue(avctx, pic);
- if (ret < 0)
- goto finish;
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/vp8.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp8.c
- --- ffmpeg_n4.2.2/libavcodec/vp8.c 2020-05-21 20:25:05.583838911 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp8.c 2020-05-26 03:16:39.723173390 -0700
- @@ -176,6 +176,9 @@
- #if CONFIG_VP8_NVDEC_HWACCEL
- AV_PIX_FMT_CUDA,
- #endif
- +#if CONFIG_VP8_V4L2REQUEST_HWACCEL
- + AV_PIX_FMT_DRM_PRIME,
- +#endif
- AV_PIX_FMT_YUV420P,
- AV_PIX_FMT_NONE,
- };
- @@ -198,7 +201,7 @@
- return ret;
- }
- - if (!s->actually_webp && !is_vp7) {
- + if (!s->actually_webp && !is_vp7 && s->pix_fmt == AV_PIX_FMT_NONE) {
- s->pix_fmt = get_pixel_format(s);
- if (s->pix_fmt < 0)
- return AVERROR(EINVAL);
- @@ -2982,6 +2985,9 @@
- #if CONFIG_VP8_NVDEC_HWACCEL
- HWACCEL_NVDEC(vp8),
- #endif
- +#if CONFIG_VP8_V4L2REQUEST_HWACCEL
- + HWACCEL_V4L2REQUEST(vp8),
- +#endif
- NULL
- },
- };
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/vp8-ctrls.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp8-ctrls.h
- --- ffmpeg_n4.2.2/libavcodec/vp8-ctrls.h 1969-12-31 16:00:00.000000000 -0800
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp8-ctrls.h 2020-05-26 03:16:39.723173390 -0700
- @@ -0,0 +1,112 @@
- +/* SPDX-License-Identifier: GPL-2.0 */
- +/*
- + * These are the VP8 state controls for use with stateless VP8
- + * codec drivers.
- + *
- + * It turns out that these structs are not stable yet and will undergo
- + * more changes. So keep them private until they are stable and ready to
- + * become part of the official public API.
- + */
- +
- +#ifndef _VP8_CTRLS_H_
- +#define _VP8_CTRLS_H_
- +
- +#include <linux/types.h>
- +
- +#define V4L2_PIX_FMT_VP8_FRAME v4l2_fourcc('V', 'P', '8', 'F')
- +
- +#define V4L2_CID_MPEG_VIDEO_VP8_FRAME_HEADER (V4L2_CID_MPEG_BASE + 2000)
- +#define V4L2_CTRL_TYPE_VP8_FRAME_HEADER 0x301
- +
- +#define V4L2_VP8_SEGMENT_HEADER_FLAG_ENABLED 0x01
- +#define V4L2_VP8_SEGMENT_HEADER_FLAG_UPDATE_MAP 0x02
- +#define V4L2_VP8_SEGMENT_HEADER_FLAG_UPDATE_FEATURE_DATA 0x04
- +#define V4L2_VP8_SEGMENT_HEADER_FLAG_DELTA_VALUE_MODE 0x08
- +
- +struct v4l2_vp8_segment_header {
- + __s8 quant_update[4];
- + __s8 lf_update[4];
- + __u8 segment_probs[3];
- + __u8 padding;
- + __u32 flags;
- +};
- +
- +#define V4L2_VP8_LF_HEADER_ADJ_ENABLE 0x01
- +#define V4L2_VP8_LF_HEADER_DELTA_UPDATE 0x02
- +#define V4L2_VP8_LF_FILTER_TYPE_SIMPLE 0x04
- +struct v4l2_vp8_loopfilter_header {
- + __s8 ref_frm_delta[4];
- + __s8 mb_mode_delta[4];
- + __u8 sharpness_level;
- + __u8 level;
- + __u16 padding;
- + __u32 flags;
- +};
- +
- +struct v4l2_vp8_quantization_header {
- + __u8 y_ac_qi;
- + __s8 y_dc_delta;
- + __s8 y2_dc_delta;
- + __s8 y2_ac_delta;
- + __s8 uv_dc_delta;
- + __s8 uv_ac_delta;
- + __u16 padding;
- +};
- +
- +struct v4l2_vp8_entropy_header {
- + __u8 coeff_probs[4][8][3][11];
- + __u8 y_mode_probs[4];
- + __u8 uv_mode_probs[3];
- + __u8 mv_probs[2][19];
- + __u8 padding[3];
- +};
- +
- +struct v4l2_vp8_entropy_coder_state {
- + __u8 range;
- + __u8 value;
- + __u8 bit_count;
- + __u8 padding;
- +};
- +
- +#define V4L2_VP8_FRAME_HEADER_FLAG_KEY_FRAME 0x01
- +#define V4L2_VP8_FRAME_HEADER_FLAG_EXPERIMENTAL 0x02
- +#define V4L2_VP8_FRAME_HEADER_FLAG_SHOW_FRAME 0x04
- +#define V4L2_VP8_FRAME_HEADER_FLAG_MB_NO_SKIP_COEFF 0x08
- +#define V4L2_VP8_FRAME_HEADER_FLAG_SIGN_BIAS_GOLDEN 0x10
- +#define V4L2_VP8_FRAME_HEADER_FLAG_SIGN_BIAS_ALT 0x20
- +
- +#define VP8_FRAME_IS_KEY_FRAME(hdr) \
- + (!!((hdr)->flags & V4L2_VP8_FRAME_HEADER_FLAG_KEY_FRAME))
- +
- +struct v4l2_ctrl_vp8_frame_header {
- + struct v4l2_vp8_segment_header segment_header;
- + struct v4l2_vp8_loopfilter_header lf_header;
- + struct v4l2_vp8_quantization_header quant_header;
- + struct v4l2_vp8_entropy_header entropy_header;
- + struct v4l2_vp8_entropy_coder_state coder_state;
- +
- + __u16 width;
- + __u16 height;
- +
- + __u8 horizontal_scale;
- + __u8 vertical_scale;
- +
- + __u8 version;
- + __u8 prob_skip_false;
- + __u8 prob_intra;
- + __u8 prob_last;
- + __u8 prob_gf;
- + __u8 num_dct_parts;
- +
- + __u32 first_part_size;
- + __u32 first_part_header_bits;
- + __u32 dct_part_sizes[8];
- +
- + __u64 last_frame_ts;
- + __u64 golden_frame_ts;
- + __u64 alt_frame_ts;
- +
- + __u64 flags;
- +};
- +
- +#endif
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/vp9.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp9.c
- --- ffmpeg_n4.2.2/libavcodec/vp9.c 2020-05-21 20:25:05.583838911 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp9.c 2020-05-26 03:16:39.733173375 -0700
- @@ -173,6 +173,7 @@
- #define HWACCEL_MAX (CONFIG_VP9_DXVA2_HWACCEL + \
- CONFIG_VP9_D3D11VA_HWACCEL * 2 + \
- CONFIG_VP9_NVDEC_HWACCEL + \
- + CONFIG_VP9_V4L2REQUEST_HWACCEL + \
- CONFIG_VP9_VAAPI_HWACCEL)
- enum AVPixelFormat pix_fmts[HWACCEL_MAX + 2], *fmtp = pix_fmts;
- VP9Context *s = avctx->priv_data;
- @@ -202,6 +203,9 @@
- #if CONFIG_VP9_VAAPI_HWACCEL
- *fmtp++ = AV_PIX_FMT_VAAPI;
- #endif
- +#if CONFIG_VP9_V4L2REQUEST_HWACCEL
- + *fmtp++ = AV_PIX_FMT_DRM_PRIME;
- +#endif
- break;
- case AV_PIX_FMT_YUV420P12:
- #if CONFIG_VP9_NVDEC_HWACCEL
- @@ -210,6 +214,9 @@
- #if CONFIG_VP9_VAAPI_HWACCEL
- *fmtp++ = AV_PIX_FMT_VAAPI;
- #endif
- +#if CONFIG_VP9_V4L2REQUEST_HWACCEL
- + *fmtp++ = AV_PIX_FMT_DRM_PRIME;
- +#endif
- break;
- }
- @@ -672,7 +679,8 @@
- get_bits(&s->gb, 8) : 255;
- }
- - if (get_bits1(&s->gb)) {
- + s->s.h.segmentation.update_data = get_bits1(&s->gb);
- + if (s->s.h.segmentation.update_data) {
- s->s.h.segmentation.absolute_vals = get_bits1(&s->gb);
- for (i = 0; i < 8; i++) {
- if ((s->s.h.segmentation.feat[i].q_enabled = get_bits1(&s->gb)))
- @@ -1817,6 +1825,9 @@
- #if CONFIG_VP9_VAAPI_HWACCEL
- HWACCEL_VAAPI(vp9),
- #endif
- +#if CONFIG_VP9_V4L2REQUEST_HWACCEL
- + HWACCEL_V4L2REQUEST(vp9),
- +#endif
- NULL
- },
- };
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/vp9-ctrls.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp9-ctrls.h
- --- ffmpeg_n4.2.2/libavcodec/vp9-ctrls.h 1969-12-31 16:00:00.000000000 -0800
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp9-ctrls.h 2020-05-26 03:16:39.733173375 -0700
- @@ -0,0 +1,485 @@
- +/* SPDX-License-Identifier: GPL-2.0 */
- +/*
- + * These are the VP9 state controls for use with stateless VP9
- + * codec drivers.
- + *
- + * It turns out that these structs are not stable yet and will undergo
- + * more changes. So keep them private until they are stable and ready to
- + * become part of the official public API.
- + */
- +
- +#ifndef _VP9_CTRLS_H_
- +#define _VP9_CTRLS_H_
- +
- +#include <linux/types.h>
- +
- +#define V4L2_PIX_FMT_VP9_FRAME v4l2_fourcc('V', 'P', '9', 'F')
- +
- +#define V4L2_CID_MPEG_VIDEO_VP9_FRAME_CONTEXT(i) (V4L2_CID_MPEG_BASE + 4000 + (i))
- +#define V4L2_CID_MPEG_VIDEO_VP9_FRAME_DECODE_PARAMS (V4L2_CID_MPEG_BASE + 4004)
- +#define V4L2_CTRL_TYPE_VP9_FRAME_CONTEXT 0x400
- +#define V4L2_CTRL_TYPE_VP9_FRAME_DECODE_PARAMS 0x404
- +
- +/**
- + * enum v4l2_vp9_loop_filter_flags - VP9 loop filter flags
- + *
- + * @V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED: the filter level depends on
- + * the mode and reference frame used
- + * to predict a block
- + * @V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE: the bitstream contains additional
- + * syntax elements that specify which
- + * mode and reference frame deltas
- + * are to be updated
- + *
- + * Those are the flags you should pass to &v4l2_vp9_loop_filter.flags. See
- + * section '7.2.8 Loop filter semantics' of the VP9 specification for more
- + * details.
- + */
- +enum v4l2_vp9_loop_filter_flags {
- + V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED = 1 << 0,
- + V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE = 1 << 1,
- +};
- +
- +/**
- + * struct v4l2_vp9_loop_filter - VP9 loop filter parameters
- + *
- + * @flags: combination of V4L2_VP9_LOOP_FILTER_FLAG_* flags
- + * @level: indicates the loop filter strength
- + * @sharpness: indicates the sharpness level
- + * @ref_deltas: contains the adjustment needed for the filter level based on
- + * the chosen reference frame
- + * @mode_deltas: contains the adjustment needed for the filter level based on
- + * the chosen mode
- + * @level_lookup: level lookup table
- + *
- + * This structure contains all loop filter related parameters. See sections
- + * '7.2.8 Loop filter semantics' and '8.8.1 Loop filter frame init process'
- + * of the VP9 specification for more details.
- + */
- +struct v4l2_vp9_loop_filter {
- + __u8 flags;
- + __u8 level;
- + __u8 sharpness;
- + __s8 ref_deltas[4];
- + __s8 mode_deltas[2];
- + __u8 level_lookup[8][4][2];
- +};
- +
- +/**
- + * struct v4l2_vp9_quantization - VP9 quantization parameters
- + *
- + * @base_q_idx: indicates the base frame qindex
- + * @delta_q_y_dc: indicates the Y DC quantizer relative to base_q_idx
- + * @delta_q_uv_dc: indicates the UV DC quantizer relative to base_q_idx
- + * @delta_q_uv_ac indicates the UV AC quantizer relative to base_q_idx
- + * @padding: padding bytes to align things on 64 bits. Must be set to 0
- + *
- + * Encodes the quantization parameters. See section '7.2.9 Quantization params
- + * syntax' of the VP9 specification for more details.
- + */
- +struct v4l2_vp9_quantization {
- + __u8 base_q_idx;
- + __s8 delta_q_y_dc;
- + __s8 delta_q_uv_dc;
- + __s8 delta_q_uv_ac;
- + __u8 padding[4];
- +};
- +
- +/**
- + * enum v4l2_vp9_segmentation_flags - VP9 segmentation flags
- + *
- + * @V4L2_VP9_SEGMENTATION_FLAG_ENABLED: indicates that this frame makes use of
- + * the segmentation tool
- + * @V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP: indicates that the segmentation map
- + * should be updated during the
- + * decoding of this frame
- + * @V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE: indicates that the updates to
- + * the segmentation map are coded
- + * relative to the existing
- + * segmentation map
- + * @V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA: indicates that new parameters are
- + * about to be specified for each
- + * segment
- + * @V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE: indicates that the
- + * segmentation parameters
- + * represent the actual values
- + * to be used
- + *
- + * Those are the flags you should pass to &v4l2_vp9_segmentation.flags. See
- + * section '7.2.10 Segmentation params syntax' of the VP9 specification for
- + * more details.
- + */
- +enum v4l2_vp9_segmentation_flags {
- + V4L2_VP9_SEGMENTATION_FLAG_ENABLED = 1 << 0,
- + V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP = 1 << 1,
- + V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE = 1 << 2,
- + V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA = 1 << 3,
- + V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE = 1 << 4,
- +};
- +
- +#define V4L2_VP9_SEGMENT_FEATURE_ENABLED(id) (1 << (id))
- +#define V4L2_VP9_SEGMENT_FEATURE_ENABLED_MASK 0xf
- +
- +/**
- + * enum v4l2_vp9_segment_feature - VP9 segment feature IDs
- + *
- + * @V4L2_VP9_SEGMENT_FEATURE_QP_DELTA: QP delta segment feature
- + * @V4L2_VP9_SEGMENT_FEATURE_LF: loop filter segment feature
- + * @V4L2_VP9_SEGMENT_FEATURE_REF_FRAME: reference frame segment feature
- + * @V4L2_VP9_SEGMENT_FEATURE_SKIP: skip segment feature
- + * @V4L2_VP9_SEGMENT_FEATURE_CNT: number of segment features
- + *
- + * Segment feature IDs. See section '7.2.10 Segmentation params syntax' of the
- + * VP9 specification for more details.
- + */
- +enum v4l2_vp9_segment_feature {
- + V4L2_VP9_SEGMENT_FEATURE_QP_DELTA,
- + V4L2_VP9_SEGMENT_FEATURE_LF,
- + V4L2_VP9_SEGMENT_FEATURE_REF_FRAME,
- + V4L2_VP9_SEGMENT_FEATURE_SKIP,
- + V4L2_VP9_SEGMENT_FEATURE_CNT,
- +};
- +
- +/**
- + * struct v4l2_vp9_segmentation - VP9 segmentation parameters
- + *
- + * @flags: combination of V4L2_VP9_SEGMENTATION_FLAG_* flags
- + * @tree_probs: specifies the probability values to be used when
- + * decoding a Segment-ID. See '5.15. Segmentation map'
- + * section of the VP9 specification for more details.
- + * @pred_prob: specifies the probability values to be used when decoding a
- + * Predicted-Segment-ID. See '6.4.14. Get segment id syntax'
- + * section of :ref:`vp9` for more details..
- + * @padding: padding used to make things aligned on 64 bits. Shall be zero
- + * filled
- + * @feature_enabled: bitmask defining which features are enabled in each
- + * segment
- + * @feature_data: data attached to each feature. Data entry is only valid if
- + * the feature is enabled
- + *
- + * Encodes the quantization parameters. See section '7.2.10 Segmentation
- + * params syntax' of the VP9 specification for more details.
- + */
- +struct v4l2_vp9_segmentation {
- + __u8 flags;
- + __u8 tree_probs[7];
- + __u8 pred_probs[3];
- + __u8 padding[5];
- + __u8 feature_enabled[8];
- + __s16 feature_data[8][4];
- +};
- +
- +/**
- + * enum v4l2_vp9_intra_prediction_mode - VP9 Intra prediction modes
- + *
- + * @V4L2_VP9_INTRA_PRED_DC: DC intra prediction
- + * @V4L2_VP9_INTRA_PRED_MODE_V: vertical intra prediction
- + * @V4L2_VP9_INTRA_PRED_MODE_H: horizontal intra prediction
- + * @V4L2_VP9_INTRA_PRED_MODE_D45: D45 intra prediction
- + * @V4L2_VP9_INTRA_PRED_MODE_D135: D135 intra prediction
- + * @V4L2_VP9_INTRA_PRED_MODE_D117: D117 intra prediction
- + * @V4L2_VP9_INTRA_PRED_MODE_D153: D153 intra prediction
- + * @V4L2_VP9_INTRA_PRED_MODE_D207: D207 intra prediction
- + * @V4L2_VP9_INTRA_PRED_MODE_D63: D63 intra prediction
- + * @V4L2_VP9_INTRA_PRED_MODE_TM: True Motion intra prediction
- + *
- + * See section '7.4.5 Intra frame mode info semantics' for more details.
- + */
- +enum v4l2_vp9_intra_prediction_mode {
- + V4L2_VP9_INTRA_PRED_MODE_DC,
- + V4L2_VP9_INTRA_PRED_MODE_V,
- + V4L2_VP9_INTRA_PRED_MODE_H,
- + V4L2_VP9_INTRA_PRED_MODE_D45,
- + V4L2_VP9_INTRA_PRED_MODE_D135,
- + V4L2_VP9_INTRA_PRED_MODE_D117,
- + V4L2_VP9_INTRA_PRED_MODE_D153,
- + V4L2_VP9_INTRA_PRED_MODE_D207,
- + V4L2_VP9_INTRA_PRED_MODE_D63,
- + V4L2_VP9_INTRA_PRED_MODE_TM,
- +};
- +
- +/**
- + * struct v4l2_vp9_mv_probabilities - VP9 Motion vector probabilities
- + * @joint: motion vector joint probabilities
- + * @sign: motion vector sign probabilities
- + * @class: motion vector class probabilities
- + * @class0_bit: motion vector class0 bit probabilities
- + * @bits: motion vector bits probabilities
- + * @class0_fr: motion vector class0 fractional bit probabilities
- + * @fr: motion vector fractional bit probabilities
- + * @class0_hp: motion vector class0 high precision fractional bit probabilities
- + * @hp: motion vector high precision fractional bit probabilities
- + */
- +struct v4l2_vp9_mv_probabilities {
- + __u8 joint[3];
- + __u8 sign[2];
- + __u8 class[2][10];
- + __u8 class0_bit[2];
- + __u8 bits[2][10];
- + __u8 class0_fr[2][2][3];
- + __u8 fr[2][3];
- + __u8 class0_hp[2];
- + __u8 hp[2];
- +};
- +
- +/**
- + * struct v4l2_vp9_probabilities - VP9 Probabilities
- + *
- + * @tx8: TX 8x8 probabilities
- + * @tx16: TX 16x16 probabilities
- + * @tx32: TX 32x32 probabilities
- + * @coef: coefficient probabilities
- + * @skip: skip probabilities
- + * @inter_mode: inter mode probabilities
- + * @interp_filter: interpolation filter probabilities
- + * @is_inter: is inter-block probabilities
- + * @comp_mode: compound prediction mode probabilities
- + * @single_ref: single ref probabilities
- + * @comp_ref: compound ref probabilities
- + * @y_mode: Y prediction mode probabilities
- + * @uv_mode: UV prediction mode probabilities
- + * @partition: partition probabilities
- + * @mv: motion vector probabilities
- + *
- + * Structure containing most VP9 probabilities. See the VP9 specification
- + * for more details.
- + */
- +struct v4l2_vp9_probabilities {
- + __u8 tx8[2][1];
- + __u8 tx16[2][2];
- + __u8 tx32[2][3];
- + __u8 coef[4][2][2][6][6][3];
- + __u8 skip[3];
- + __u8 inter_mode[7][3];
- + __u8 interp_filter[4][2];
- + __u8 is_inter[4];
- + __u8 comp_mode[5];
- + __u8 single_ref[5][2];
- + __u8 comp_ref[5];
- + __u8 y_mode[4][9];
- + __u8 uv_mode[10][9];
- + __u8 partition[16][3];
- +
- + struct v4l2_vp9_mv_probabilities mv;
- +};
- +
- +/**
- + * enum v4l2_vp9_reset_frame_context - Valid values for
- + * &v4l2_ctrl_vp9_frame_decode_params->reset_frame_context
- + *
- + * @V4L2_VP9_RESET_FRAME_CTX_NONE: don't reset any frame context
- + * @V4L2_VP9_RESET_FRAME_CTX_SPEC: reset the frame context pointed by
- + * &v4l2_ctrl_vp9_frame_decode_params.frame_context_idx
- + * @V4L2_VP9_RESET_FRAME_CTX_ALL: reset all frame contexts
- + *
- + * See section '7.2 Uncompressed header semantics' of the VP9 specification
- + * for more details.
- + */
- +enum v4l2_vp9_reset_frame_context {
- + V4L2_VP9_RESET_FRAME_CTX_NONE,
- + V4L2_VP9_RESET_FRAME_CTX_SPEC,
- + V4L2_VP9_RESET_FRAME_CTX_ALL,
- +};
- +
- +/**
- + * enum v4l2_vp9_interpolation_filter - VP9 interpolation filter types
- + *
- + * @V4L2_VP9_INTERP_FILTER_8TAP: height tap filter
- + * @V4L2_VP9_INTERP_FILTER_8TAP_SMOOTH: height tap smooth filter
- + * @V4L2_VP9_INTERP_FILTER_8TAP_SHARP: height tap sharp filter
- + * @V4L2_VP9_INTERP_FILTER_BILINEAR: bilinear filter
- + * @V4L2_VP9_INTERP_FILTER_SWITCHABLE: filter selection is signaled at the
- + * block level
- + *
- + * See section '7.2.7 Interpolation filter semantics' of the VP9 specification
- + * for more details.
- + */
- +enum v4l2_vp9_interpolation_filter {
- + V4L2_VP9_INTERP_FILTER_8TAP,
- + V4L2_VP9_INTERP_FILTER_8TAP_SMOOTH,
- + V4L2_VP9_INTERP_FILTER_8TAP_SHARP,
- + V4L2_VP9_INTERP_FILTER_BILINEAR,
- + V4L2_VP9_INTERP_FILTER_SWITCHABLE,
- +};
- +
- +/**
- + * enum v4l2_vp9_reference_mode - VP9 reference modes
- + *
- + * @V4L2_VP9_REF_MODE_SINGLE: indicates that all the inter blocks use only a
- + * single reference frame to generate motion
- + * compensated prediction
- + * @V4L2_VP9_REF_MODE_COMPOUND: requires all the inter blocks to use compound
- + * mode. Single reference frame prediction is not
- + * allowed
- + * @V4L2_VP9_REF_MODE_SELECT: allows each individual inter block to select
- + * between single and compound prediction modes
- + *
- + * See section '7.3.6 Frame reference mode semantics' of the VP9 specification
- + * for more details.
- + */
- +enum v4l2_vp9_reference_mode {
- + V4L2_VP9_REF_MODE_SINGLE,
- + V4L2_VP9_REF_MODE_COMPOUND,
- + V4L2_VP9_REF_MODE_SELECT,
- +};
- +
- +/**
- + * enum v4l2_vp9_tx_mode - VP9 TX modes
- + *
- + * @V4L2_VP9_TX_MODE_ONLY_4X4: transform size is 4x4
- + * @V4L2_VP9_TX_MODE_ALLOW_8X8: transform size can be up to 8x8
- + * @V4L2_VP9_TX_MODE_ALLOW_16X16: transform size can be up to 16x16
- + * @V4L2_VP9_TX_MODE_ALLOW_32X32: transform size can be up to 32x32
- + * @V4L2_VP9_TX_MODE_SELECT: bitstream contains transform size for each block
- + *
- + * See section '7.3.1 Tx mode semantics' of the VP9 specification for more
- + * details.
- + */
- +enum v4l2_vp9_tx_mode {
- + V4L2_VP9_TX_MODE_ONLY_4X4,
- + V4L2_VP9_TX_MODE_ALLOW_8X8,
- + V4L2_VP9_TX_MODE_ALLOW_16X16,
- + V4L2_VP9_TX_MODE_ALLOW_32X32,
- + V4L2_VP9_TX_MODE_SELECT,
- +};
- +
- +/**
- + * enum v4l2_vp9_ref_id - VP9 Reference frame IDs
- + *
- + * @V4L2_REF_ID_LAST: last reference frame
- + * @V4L2_REF_ID_GOLDEN: golden reference frame
- + * @V4L2_REF_ID_ALTREF: alternative reference frame
- + * @V4L2_REF_ID_CNT: number of reference frames
- + *
- + * See section '7.4.12 Ref frames semantics' of the VP9 specification for more
- + * details.
- + */
- +enum v4l2_vp9_ref_id {
- + V4L2_REF_ID_LAST,
- + V4L2_REF_ID_GOLDEN,
- + V4L2_REF_ID_ALTREF,
- + V4L2_REF_ID_CNT,
- +};
- +
- +/**
- + * enum v4l2_vp9_frame_flags - VP9 frame flags
- + * @V4L2_VP9_FRAME_FLAG_KEY_FRAME: the frame is a key frame
- + * @V4L2_VP9_FRAME_FLAG_SHOW_FRAME: the frame should be displayed
- + * @V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT: the decoding should be error resilient
- + * @V4L2_VP9_FRAME_FLAG_INTRA_ONLY: the frame does not reference other frames
- + * @V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV: the frame might can high precision
- + * motion vectors
- + * @V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX: frame context should be updated
- + * after decoding
- + * @V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE: parallel decoding is used
- + * @V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING: vertical subsampling is enabled
- + * @V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING: horizontal subsampling is enabled
- + * @V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING: full UV range is used
- + *
- + * Check the VP9 specification for more details.
- + */
- +enum v4l2_vp9_frame_flags {
- + V4L2_VP9_FRAME_FLAG_KEY_FRAME = 1 << 0,
- + V4L2_VP9_FRAME_FLAG_SHOW_FRAME = 1 << 1,
- + V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT = 1 << 2,
- + V4L2_VP9_FRAME_FLAG_INTRA_ONLY = 1 << 3,
- + V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV = 1 << 4,
- + V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX = 1 << 5,
- + V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE = 1 << 6,
- + V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING = 1 << 7,
- + V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING = 1 << 8,
- + V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING = 1 << 9,
- +};
- +
- +#define V4L2_VP9_PROFILE_MAX 3
- +
- +/**
- + * struct v4l2_ctrl_vp9_frame_decode_params - VP9 frame decoding control
- + *
- + * @flags: combination of V4L2_VP9_FRAME_FLAG_* flags
- + * @compressed_header_size: compressed header size in bytes
- + * @uncompressed_header_size: uncompressed header size in bytes
- + * @profile: VP9 profile. Can be 0, 1, 2 or 3
- + * @reset_frame_context: specifies whether the frame context should be reset
- + * to default values. See &v4l2_vp9_reset_frame_context
- + * for more details
- + * @frame_context_idx: frame context that should be used/updated
- + * @bit_depth: bits per components. Can be 8, 10 or 12. Note that not all
- + * profiles support 10 and/or 12 bits depths
- + * @interpolation_filter: specifies the filter selection used for performing
- + * inter prediction. See &v4l2_vp9_interpolation_filter
- + * for more details
- + * @tile_cols_log2: specifies the base 2 logarithm of the width of each tile
- + * (where the width is measured in units of 8x8 blocks).
- + * Shall be less than or equal to 6
- + * @tile_rows_log2: specifies the base 2 logarithm of the height of each tile
- + * (where the height is measured in units of 8x8 blocks)
- + * @tx_mode: specifies the TX mode. See &v4l2_vp9_tx_mode for more details
- + * @reference_mode: specifies the type of inter prediction to be used. See
- + * &v4l2_vp9_reference_mode for more details
- + * @padding: needed to make this struct 64 bit aligned. Shall be filled with
- + * zeros
- + * @frame_width_minus_1: add 1 to it and you'll get the frame width expressed
- + * in pixels
- + * @frame_height_minus_1: add 1 to it and you'll get the frame height expressed
- + * in pixels
- + * @frame_width_minus_1: add 1 to it and you'll get the expected render width
- + * expressed in pixels. This is not used during the
- + * decoding process but might be used by HW scalers to
- + * prepare a frame that's ready for scanout
- + * @frame_height_minus_1: add 1 to it and you'll get the expected render height
- + * expressed in pixels. This is not used during the
- + * decoding process but might be used by HW scalers to
- + * prepare a frame that's ready for scanout
- + * @refs: array of reference frames. See &v4l2_vp9_ref_id for more details
- + * @lf: loop filter parameters. See &v4l2_vp9_loop_filter for more details
- + * @quant: quantization parameters. See &v4l2_vp9_quantization for more details
- + * @seg: segmentation parameters. See &v4l2_vp9_segmentation for more details
- + * @probs: probabilities. See &v4l2_vp9_probabilities for more details
- + */
- +struct v4l2_ctrl_vp9_frame_decode_params {
- + __u32 flags;
- + __u16 compressed_header_size;
- + __u16 uncompressed_header_size;
- + __u8 profile;
- + __u8 reset_frame_context;
- + __u8 frame_context_idx;
- + __u8 bit_depth;
- + __u8 interpolation_filter;
- + __u8 tile_cols_log2;
- + __u8 tile_rows_log2;
- + __u8 tx_mode;
- + __u8 reference_mode;
- + __u8 padding[6];
- + __u16 frame_width_minus_1;
- + __u16 frame_height_minus_1;
- + __u16 render_width_minus_1;
- + __u16 render_height_minus_1;
- + __u64 refs[V4L2_REF_ID_CNT];
- + struct v4l2_vp9_loop_filter lf;
- + struct v4l2_vp9_quantization quant;
- + struct v4l2_vp9_segmentation seg;
- + struct v4l2_vp9_probabilities probs;
- +};
- +
- +#define V4L2_VP9_NUM_FRAME_CTX 4
- +
- +/**
- + * struct v4l2_ctrl_vp9_frame_ctx - VP9 frame context control
- + *
- + * @probs: VP9 probabilities
- + *
- + * This control is accessed in both direction. The user should initialize the
- + * 4 contexts with default values just after starting the stream. Then before
- + * decoding a frame it should query the current frame context (the one passed
- + * through &v4l2_ctrl_vp9_frame_decode_params.frame_context_idx) to initialize
- + * &v4l2_ctrl_vp9_frame_decode_params.probs. The probs are then adjusted based
- + * on the bitstream info and passed to the kernel. The codec should update
- + * the frame context after the frame has been decoded, so that next time
- + * userspace query this context it contains the updated probabilities.
- + */
- +struct v4l2_ctrl_vp9_frame_ctx {
- + struct v4l2_vp9_probabilities probs;
- +};
- +
- +#endif /* _VP9_CTRLS_H_ */
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavcodec/vp9shared.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp9shared.h
- --- ffmpeg_n4.2.2/libavcodec/vp9shared.h 2020-05-21 20:25:05.593838719 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavcodec/vp9shared.h 2020-05-26 03:16:39.753173345 -0700
- @@ -131,6 +131,7 @@
- uint8_t temporal;
- uint8_t absolute_vals;
- uint8_t update_map;
- + uint8_t update_data;
- uint8_t prob[7];
- uint8_t pred_prob[3];
- struct {
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavformat/rtsp.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavformat/rtsp.c
- --- ffmpeg_n4.2.2/libavformat/rtsp.c 2020-05-21 20:25:05.823834299 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavformat/rtsp.c 2020-05-26 03:16:40.503172231 -0700
- @@ -2334,7 +2334,9 @@
- RTSPStream *rtsp_st;
- int size, i, err;
- char *content;
- + const char *p, *sp="", *sources="", *sp2, *sources2;
- char url[1024];
- + char sources_buf[1024];
- if (!ff_network_init())
- return AVERROR(EIO);
- @@ -2360,6 +2362,16 @@
- av_freep(&content);
- if (err) goto fail;
- + /* Search for sources= tag in original URL for rtp protocol only */
- + if (strncmp(s->url, "rtp://", 6) == 0) {
- + p = strchr(s->url, '?');
- + if (p && av_find_info_tag(sources_buf, sizeof(sources_buf), "sources", p)) {
- + /* av_log(s, AV_LOG_VERBOSE, "sdp_read_header found sources %s\n", sources_buf); */
- + sp = sources_buf;
- + sources = "&sources=";
- + }
- + }
- +
- /* open each RTP stream */
- for (i = 0; i < rt->nb_rtsp_streams; i++) {
- char namebuf[50];
- @@ -2377,12 +2389,22 @@
- av_dict_free(&opts);
- goto fail;
- }
- +
- + /* Prepare to add sources to the url to be opened.
- + Otherwise the join to the source specific muliticast will be missing */
- + sources2 = sources;
- + sp2 = sp;
- + /* ignore sources from original URL, when sources are already set in rtsp_st */
- + if (rtsp_st->nb_include_source_addrs > 0)
- + sources2 = sp2 = "";
- +
- ff_url_join(url, sizeof(url), "rtp", NULL,
- namebuf, rtsp_st->sdp_port,
- - "?localport=%d&ttl=%d&connect=%d&write_to_source=%d",
- + "?localport=%d&ttl=%d&connect=%d&write_to_source=%d%s%s",
- rtsp_st->sdp_port, rtsp_st->sdp_ttl,
- rt->rtsp_flags & RTSP_FLAG_FILTER_SRC ? 1 : 0,
- - rt->rtsp_flags & RTSP_FLAG_RTCP_TO_SOURCE ? 1 : 0);
- + rt->rtsp_flags & RTSP_FLAG_RTCP_TO_SOURCE ? 1 : 0,
- + sources2, sp2);
- append_source_addrs(url, sizeof(url), "sources",
- rtsp_st->nb_include_source_addrs,
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavutil/buffer.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavutil/buffer.c
- --- ffmpeg_n4.2.2/libavutil/buffer.c 2020-05-21 20:25:05.863833530 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavutil/buffer.c 2020-05-26 03:16:40.613172068 -0700
- @@ -272,6 +272,19 @@
- av_freep(&pool);
- }
- +void av_buffer_pool_flush(AVBufferPool *pool)
- +{
- + ff_mutex_lock(&pool->mutex);
- + while (pool->pool) {
- + BufferPoolEntry *buf = pool->pool;
- + pool->pool = buf->next;
- +
- + buf->free(buf->opaque, buf->data);
- + av_freep(&buf);
- + }
- + ff_mutex_unlock(&pool->mutex);
- +}
- +
- void av_buffer_pool_uninit(AVBufferPool **ppool)
- {
- AVBufferPool *pool;
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavutil/buffer.h ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavutil/buffer.h
- --- ffmpeg_n4.2.2/libavutil/buffer.h 2020-05-21 20:25:05.863833530 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavutil/buffer.h 2020-05-26 03:16:40.613172068 -0700
- @@ -267,6 +267,11 @@
- void (*pool_free)(void *opaque));
- /**
- + * Free all available buffers in a buffer pool.
- + */
- + void av_buffer_pool_flush(AVBufferPool *pool);
- +
- +/**
- * Mark the pool as being available for freeing. It will actually be freed only
- * once all the allocated buffers associated with the pool are released. Thus it
- * is safe to call this function while some of the allocated buffers are still
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/libavutil/hwcontext_drm.c ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavutil/hwcontext_drm.c
- --- ffmpeg_n4.2.2/libavutil/hwcontext_drm.c 2020-05-21 20:25:05.873833338 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/libavutil/hwcontext_drm.c 2020-05-26 03:16:40.643172023 -0700
- @@ -43,6 +43,11 @@
- AVDRMDeviceContext *hwctx = hwdev->hwctx;
- drmVersionPtr version;
- + if (device == NULL) {
- + hwctx->fd = -1;
- + return 0;
- + }
- +
- hwctx->fd = open(device, O_RDWR);
- if (hwctx->fd < 0)
- return AVERROR(errno);
- diff -uNr '--exclude=.git' '--exclude=sunxi' ffmpeg_n4.2.2/Makefile ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/Makefile
- --- ffmpeg_n4.2.2/Makefile 2020-05-21 20:25:05.143847365 -0700
- +++ ffmpeg_v4l2-request-hwaccel-4.2.2-rkvdec/Makefile 2020-05-26 03:16:38.363175403 -0700
- @@ -117,7 +117,7 @@
- .version: M=@
- libavutil/ffversion.h .version:
- - $(M)$(VERSION_SH) $(SRC_PATH) libavutil/ffversion.h $(EXTRA_VERSION)
- + $(M)$(VERSION_SH) $(SRC_PATH) libavutil/ffversion.h Kodi
- $(Q)touch .version
- # force version.sh to run whenever version might have changed
RAW Paste Data