On Tue, Jun 6, 2017 at 6:51 PM, wm4 <[email protected]> wrote:
> This also adds support to avconv (which is trivial due to the new
> hwaccel API being generic enough). For now, this keeps avconv_dxva2.c as
> "dxva2-old", although it doesn't work as avconv.c can't handle multiple
> hwaccels with the same pixfmt. It will be removed in a later commit.
>
> The new decoder setup code in dxva2.c is significantly based on work by
> Steve Lhomme <[email protected]>, but with heavy changes/rewrites.
> ---
> Changelog | 1 +
> avtools/avconv.h | 2 +
> avtools/avconv_opt.c | 8 +-
> configure | 10 +
> doc/APIchanges | 6 +
> libavcodec/allcodecs.c | 5 +
> libavcodec/dxva2.c | 663
> +++++++++++++++++++++++++++++++++++++++++++-
> libavcodec/dxva2_h264.c | 22 ++
> libavcodec/dxva2_hevc.c | 22 ++
> libavcodec/dxva2_internal.h | 43 ++-
> libavcodec/dxva2_mpeg2.c | 22 ++
> libavcodec/dxva2_vc1.c | 44 +++
> libavcodec/h264_slice.c | 3 +-
> libavcodec/hevcdec.c | 3 +-
> libavcodec/mpeg12dec.c | 1 +
> libavcodec/vc1dec.c | 1 +
> libavcodec/version.h | 4 +-
> libavutil/hwcontext_dxva2.h | 3 +
> 18 files changed, 852 insertions(+), 11 deletions(-)
>
> diff --git a/Changelog b/Changelog
> index 6fd30fddb9..e44df54c93 100644
> --- a/Changelog
> +++ b/Changelog
> @@ -15,6 +15,7 @@ version <next>:
> - VP9 superframe split/merge bitstream filters
> - FM Screen Capture Codec decoder
> - ClearVideo decoder (I-frames only)
> +- support for decoding through D3D11VA in avconv
>
>
> version 12:
> diff --git a/avtools/avconv.h b/avtools/avconv.h
> index 3354c50444..fe2bb313b7 100644
> --- a/avtools/avconv.h
> +++ b/avtools/avconv.h
> @@ -54,9 +54,11 @@ enum HWAccelID {
> HWACCEL_AUTO,
> HWACCEL_VDPAU,
> HWACCEL_DXVA2,
> + HWACCEL_DXVA2_OLD,
> HWACCEL_VDA,
> HWACCEL_QSV,
> HWACCEL_VAAPI,
> + HWACCEL_D3D11VA,
> };
>
> typedef struct HWAccel {
> diff --git a/avtools/avconv_opt.c b/avtools/avconv_opt.c
> index 9839a2269e..7e4f124610 100644
> --- a/avtools/avconv_opt.c
> +++ b/avtools/avconv_opt.c
> @@ -60,8 +60,14 @@ const HWAccel hwaccels[] = {
> { "vdpau", hwaccel_decode_init, HWACCEL_VDPAU, AV_PIX_FMT_VDPAU,
> AV_HWDEVICE_TYPE_VDPAU },
> #endif
> +#if CONFIG_D3D11VA
> + { "d3d11va", hwaccel_decode_init, HWACCEL_D3D11VA, AV_PIX_FMT_D3D11,
> + AV_HWDEVICE_TYPE_D3D11VA },
> +#endif
> #if HAVE_DXVA2_LIB
> - { "dxva2", dxva2_init, HWACCEL_DXVA2, AV_PIX_FMT_DXVA2_VLD,
> + { "dxva2", hwaccel_decode_init, HWACCEL_DXVA2, AV_PIX_FMT_DXVA2_VLD,
> + AV_HWDEVICE_TYPE_DXVA2 },
> + { "dxva2-old", dxva2_init, HWACCEL_DXVA2_OLD, AV_PIX_FMT_DXVA2_VLD,
> AV_HWDEVICE_TYPE_NONE },
> #endif
> #if CONFIG_VDA
> diff --git a/configure b/configure
> index 481ce2674a..5fd535b80d 100755
> --- a/configure
> +++ b/configure
> @@ -2175,6 +2175,8 @@ h263_vaapi_hwaccel_deps="vaapi"
> h263_vaapi_hwaccel_select="h263_decoder"
> h264_d3d11va_hwaccel_deps="d3d11va"
> h264_d3d11va_hwaccel_select="h264_decoder"
> +h264_d3d11va2_hwaccel_deps="d3d11va"
> +h264_d3d11va2_hwaccel_select="h264_decoder"
> h264_dxva2_hwaccel_deps="dxva2"
> h264_dxva2_hwaccel_select="h264_decoder"
> h264_mmal_hwaccel_deps="mmal"
> @@ -2189,6 +2191,8 @@ h264_vdpau_hwaccel_deps="vdpau"
> h264_vdpau_hwaccel_select="h264_decoder"
> hevc_d3d11va_hwaccel_deps="d3d11va DXVA_PicParams_HEVC"
> hevc_d3d11va_hwaccel_select="hevc_decoder"
> +hevc_d3d11va2_hwaccel_deps="d3d11va DXVA_PicParams_HEVC"
> +hevc_d3d11va2_hwaccel_select="hevc_decoder"
> hevc_dxva2_hwaccel_deps="dxva2 DXVA_PicParams_HEVC"
> hevc_dxva2_hwaccel_select="hevc_decoder"
> hevc_qsv_hwaccel_deps="libmfx"
> @@ -2200,6 +2204,8 @@ mpeg1_vdpau_hwaccel_deps="vdpau"
> mpeg1_vdpau_hwaccel_select="mpeg1video_decoder"
> mpeg2_d3d11va_hwaccel_deps="d3d11va"
> mpeg2_d3d11va_hwaccel_select="mpeg2video_decoder"
> +mpeg2_d3d11va2_hwaccel_deps="d3d11va"
> +mpeg2_d3d11va2_hwaccel_select="mpeg2video_decoder"
> mpeg2_dxva2_hwaccel_deps="dxva2"
> mpeg2_dxva2_hwaccel_select="mpeg2video_decoder"
> mpeg2_mmal_hwaccel_deps="mmal"
> @@ -2214,6 +2220,8 @@ mpeg4_vdpau_hwaccel_deps="vdpau"
> mpeg4_vdpau_hwaccel_select="mpeg4_decoder"
> vc1_d3d11va_hwaccel_deps="d3d11va"
> vc1_d3d11va_hwaccel_select="vc1_decoder"
> +vc1_d3d11va2_hwaccel_deps="d3d11va"
> +vc1_d3d11va2_hwaccel_select="vc1_decoder"
> vc1_dxva2_hwaccel_deps="dxva2"
> vc1_dxva2_hwaccel_select="vc1_decoder"
> vc1_mmal_hwaccel_deps="mmal"
> @@ -2226,6 +2234,7 @@ vp8_qsv_hwaccel_deps="libmfx"
> vp8_vaapi_hwaccel_deps="vaapi VAPictureParameterBufferVP8"
> vp8_vaapi_hwaccel_select="vp8_decoder"
> wmv3_d3d11va_hwaccel_select="vc1_d3d11va_hwaccel"
> +wmv3_d3d11va2_hwaccel_select="vc1_d3d11va2_hwaccel"
> wmv3_dxva2_hwaccel_select="vc1_dxva2_hwaccel"
> wmv3_vaapi_hwaccel_select="vc1_vaapi_hwaccel"
> wmv3_vdpau_hwaccel_select="vc1_vdpau_hwaccel"
> @@ -4673,6 +4682,7 @@ check_builtin stdatomic_h stdatomic.h "atomic_int foo;
> atomic_store(&foo, 0)"
> check_lib shell32 "windows.h shellapi.h" CommandLineToArgvW -lshell32
> check_lib wincrypt "windows.h wincrypt.h" CryptGenRandom -ladvapi32
> check_lib psapi "windows.h psapi.h" GetProcessMemoryInfo -lpsapi
> +check_lib ole32 "windows.h" CoTaskMemFree -lole32
>
> check_struct "sys/time.h sys/resource.h" "struct rusage" ru_maxrss
>
> diff --git a/doc/APIchanges b/doc/APIchanges
> index a81e41833d..0f7c839573 100644
> --- a/doc/APIchanges
> +++ b/doc/APIchanges
> @@ -13,6 +13,12 @@ libavutil: 2017-03-23
>
> API changes, most recent first:
>
> +2017-xx-xx - xxxxxxx - lavc 58.4.0 - avcodec.h
> + DXVA2 and D3D11 hardware accelerated decoding now supports the new hwaccel
> API,
> + which can create the decoder context and allocate hardware frame
> automatically.
> + See AVCodecContext.hw_device_ctx and AVCodecContext.hw_frames_ctx. For
> D3D11,
> + the new AV_PIX_FMT_D3D11 pixfmt must be used with the new API.
> +
> 2017-xx-xx - xxxxxxx - lavu 56.2.0 - hwcontext.h
> Add AV_HWDEVICE_TYPE_D3D11VA and AV_PIX_FMT_D3D11.
>
> diff --git a/libavcodec/allcodecs.c b/libavcodec/allcodecs.c
> index dc9a961440..70c35e9b4d 100644
> --- a/libavcodec/allcodecs.c
> +++ b/libavcodec/allcodecs.c
> @@ -69,6 +69,7 @@ void avcodec_register_all(void)
> /* hardware accelerators */
> REGISTER_HWACCEL(H263_VAAPI, h263_vaapi);
> REGISTER_HWACCEL(H264_D3D11VA, h264_d3d11va);
> + REGISTER_HWACCEL(H264_D3D11VA2, h264_d3d11va2);
> REGISTER_HWACCEL(H264_DXVA2, h264_dxva2);
> REGISTER_HWACCEL(H264_MMAL, h264_mmal);
> REGISTER_HWACCEL(H264_QSV, h264_qsv);
> @@ -77,12 +78,14 @@ void avcodec_register_all(void)
> REGISTER_HWACCEL(H264_VDA_OLD, h264_vda_old);
> REGISTER_HWACCEL(H264_VDPAU, h264_vdpau);
> REGISTER_HWACCEL(HEVC_D3D11VA, hevc_d3d11va);
> + REGISTER_HWACCEL(HEVC_D3D11VA2, hevc_d3d11va2);
> REGISTER_HWACCEL(HEVC_DXVA2, hevc_dxva2);
> REGISTER_HWACCEL(HEVC_QSV, hevc_qsv);
> REGISTER_HWACCEL(HEVC_VAAPI, hevc_vaapi);
> REGISTER_HWACCEL(HEVC_VDPAU, hevc_vdpau);
> REGISTER_HWACCEL(MPEG1_VDPAU, mpeg1_vdpau);
> REGISTER_HWACCEL(MPEG2_D3D11VA, mpeg2_d3d11va);
> + REGISTER_HWACCEL(MPEG2_D3D11VA2, mpeg2_d3d11va2);
> REGISTER_HWACCEL(MPEG2_DXVA2, mpeg2_dxva2);
> REGISTER_HWACCEL(MPEG2_MMAL, mpeg2_mmal);
> REGISTER_HWACCEL(MPEG2_QSV, mpeg2_qsv);
> @@ -91,6 +94,7 @@ void avcodec_register_all(void)
> REGISTER_HWACCEL(MPEG4_VAAPI, mpeg4_vaapi);
> REGISTER_HWACCEL(MPEG4_VDPAU, mpeg4_vdpau);
> REGISTER_HWACCEL(VC1_D3D11VA, vc1_d3d11va);
> + REGISTER_HWACCEL(VC1_D3D11VA2, vc1_d3d11va2);
> REGISTER_HWACCEL(VC1_DXVA2, vc1_dxva2);
> REGISTER_HWACCEL(VC1_QSV, vc1_qsv);
> REGISTER_HWACCEL(VC1_VAAPI, vc1_vaapi);
> @@ -99,6 +103,7 @@ void avcodec_register_all(void)
> REGISTER_HWACCEL(VP8_QSV, vp8_qsv);
> REGISTER_HWACCEL(VP8_VAAPI, vp8_vaapi);
> REGISTER_HWACCEL(WMV3_D3D11VA, wmv3_d3d11va);
> + REGISTER_HWACCEL(WMV3_D3D11VA2, wmv3_d3d11va2);
> REGISTER_HWACCEL(WMV3_DXVA2, wmv3_dxva2);
> REGISTER_HWACCEL(WMV3_VAAPI, wmv3_vaapi);
> REGISTER_HWACCEL(WMV3_VDPAU, wmv3_vdpau);
> diff --git a/libavcodec/dxva2.c b/libavcodec/dxva2.c
> index 0d4effd228..4c8a5c4136 100644
> --- a/libavcodec/dxva2.c
> +++ b/libavcodec/dxva2.c
> @@ -22,20 +22,446 @@
>
> #include <assert.h>
> #include <string.h>
> +#include <initguid.h>
>
> +#include "libavutil/common.h"
> #include "libavutil/log.h"
> #include "libavutil/time.h"
>
> #include "avcodec.h"
> #include "dxva2_internal.h"
>
> +/* define all the GUIDs used directly here,
> + to avoid problems with inconsistent dxva2api.h versions in mingw-w64 and
> different MSVC version */
> +DEFINE_GUID(ff_DXVA2_ModeMPEG2_VLD, 0xee27417f,
> 0x5e28,0x4e65,0xbe,0xea,0x1d,0x26,0xb5,0x08,0xad,0xc9);
> +DEFINE_GUID(ff_DXVA2_ModeMPEG2and1_VLD, 0x86695f12,
> 0x340e,0x4f04,0x9f,0xd3,0x92,0x53,0xdd,0x32,0x74,0x60);
> +DEFINE_GUID(ff_DXVA2_ModeH264_E, 0x1b81be68,
> 0xa0c7,0x11d3,0xb9,0x84,0x00,0xc0,0x4f,0x2e,0x73,0xc5);
> +DEFINE_GUID(ff_DXVA2_ModeH264_F, 0x1b81be69,
> 0xa0c7,0x11d3,0xb9,0x84,0x00,0xc0,0x4f,0x2e,0x73,0xc5);
> +DEFINE_GUID(ff_DXVADDI_Intel_ModeH264_E, 0x604F8E68,
> 0x4951,0x4C54,0x88,0xFE,0xAB,0xD2,0x5C,0x15,0xB3,0xD6);
> +DEFINE_GUID(ff_DXVA2_ModeVC1_D, 0x1b81beA3,
> 0xa0c7,0x11d3,0xb9,0x84,0x00,0xc0,0x4f,0x2e,0x73,0xc5);
> +DEFINE_GUID(ff_DXVA2_ModeVC1_D2010, 0x1b81beA4,
> 0xa0c7,0x11d3,0xb9,0x84,0x00,0xc0,0x4f,0x2e,0x73,0xc5);
> +DEFINE_GUID(ff_DXVA2_ModeHEVC_VLD_Main, 0x5b11d51b,
> 0x2f4c,0x4452,0xbc,0xc3,0x09,0xf2,0xa1,0x16,0x0c,0xc0);
> +DEFINE_GUID(ff_DXVA2_ModeHEVC_VLD_Main10,0x107af0e0,
> 0xef1a,0x4d19,0xab,0xa8,0x67,0xa1,0x63,0x07,0x3d,0x13);
> +DEFINE_GUID(ff_DXVA2_NoEncrypt, 0x1b81beD0,
> 0xa0c7,0x11d3,0xb9,0x84,0x00,0xc0,0x4f,0x2e,0x73,0xc5);
> +DEFINE_GUID(ff_GUID_NULL, 0x00000000,
> 0x0000,0x0000,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00);
> +DEFINE_GUID(ff_IID_IDirectXVideoDecoderService,
> 0xfc51a551,0xd5e7,0x11d9,0xaf,0x55,0x00,0x05,0x4e,0x43,0xff,0x02);
> +
> +typedef struct dxva_mode {
> + const GUID *guid;
> + enum AVCodecID codec;
> +} dxva_mode;
> +
> +static const dxva_mode dxva_modes[] = {
> + /* MPEG-2 */
> + { &ff_DXVA2_ModeMPEG2_VLD, AV_CODEC_ID_MPEG2VIDEO },
> + { &ff_DXVA2_ModeMPEG2and1_VLD, AV_CODEC_ID_MPEG2VIDEO },
> +
> + /* H.264 */
> + { &ff_DXVA2_ModeH264_F, AV_CODEC_ID_H264 },
> + { &ff_DXVA2_ModeH264_E, AV_CODEC_ID_H264 },
> + /* Intel specific H.264 mode */
> + { &ff_DXVADDI_Intel_ModeH264_E, AV_CODEC_ID_H264 },
> +
> + /* VC-1 / WMV3 */
> + { &ff_DXVA2_ModeVC1_D2010, AV_CODEC_ID_VC1 },
> + { &ff_DXVA2_ModeVC1_D2010, AV_CODEC_ID_WMV3 },
> + { &ff_DXVA2_ModeVC1_D, AV_CODEC_ID_VC1 },
> + { &ff_DXVA2_ModeVC1_D, AV_CODEC_ID_WMV3 },
> +
> + /* HEVC/H.265 */
> + { &ff_DXVA2_ModeHEVC_VLD_Main, AV_CODEC_ID_HEVC },
> + { &ff_DXVA2_ModeHEVC_VLD_Main10, AV_CODEC_ID_HEVC },
> +
> + { NULL, 0 },
> +};
> +
> +static int dxva_get_decoder_configuration(AVCodecContext *avctx,
> + const void *cfg_list,
> + unsigned cfg_count)
> +{
> + FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx);
> + unsigned i, best_score = 0;
> + int best_cfg = -1;
> +
> + for (i = 0; i < cfg_count; i++) {
> + unsigned score;
> + UINT ConfigBitstreamRaw;
> + GUID guidConfigBitstreamEncryption;
> +
> +#if CONFIG_D3D11VA
> + if (sctx->pix_fmt == AV_PIX_FMT_D3D11) {
> + D3D11_VIDEO_DECODER_CONFIG *cfg = &((D3D11_VIDEO_DECODER_CONFIG
> *)cfg_list)[i];
> + ConfigBitstreamRaw = cfg->ConfigBitstreamRaw;
> + guidConfigBitstreamEncryption =
> cfg->guidConfigBitstreamEncryption;
> + }
> +#endif
> +#if CONFIG_DXVA2
> + if (sctx->pix_fmt == AV_PIX_FMT_DXVA2_VLD) {
> + DXVA2_ConfigPictureDecode *cfg = &((DXVA2_ConfigPictureDecode
> *)cfg_list)[i];
> + ConfigBitstreamRaw = cfg->ConfigBitstreamRaw;
> + guidConfigBitstreamEncryption =
> cfg->guidConfigBitstreamEncryption;
> + }
> +#endif
> +
> + if (ConfigBitstreamRaw == 1)
> + score = 1;
> + else if (avctx->codec_id == AV_CODEC_ID_H264 && ConfigBitstreamRaw
> == 2)
> + score = 2;
> + else
> + continue;
> + if (IsEqualGUID(&guidConfigBitstreamEncryption, &ff_DXVA2_NoEncrypt))
> + score += 16;
> + if (score > best_score) {
> + best_score = score;
> + best_cfg = i;
> + }
> + }
> +
> + if (!best_score) {
> + av_log(avctx, AV_LOG_VERBOSE, "No valid decoder configuration
> available\n");
> + return AVERROR(EINVAL);
> + }
> +
> + return best_cfg;
> +}
> +
> +#if CONFIG_D3D11VA
> +static int d3d11va_validate_output(void *service, GUID guid, void
> *surface_format)
> +{
> + HRESULT hr;
> + BOOL is_supported = FALSE;
> + hr = ID3D11VideoDevice_CheckVideoDecoderFormat((ID3D11VideoDevice
> *)service,
> + &guid,
> + *(DXGI_FORMAT
> *)surface_format,
> + &is_supported);
> + return SUCCEEDED(hr) && is_supported;
> +}
> +#endif
> +
> +#if CONFIG_DXVA2
> +static int dxva2_validate_output(void *decoder_service, GUID guid, void
> *surface_format)
> +{
> + HRESULT hr;
> + int ret = 0;
> + unsigned j, target_count;
> + D3DFORMAT *target_list;
> + hr =
> IDirectXVideoDecoderService_GetDecoderRenderTargets((IDirectXVideoDecoderService
> *)decoder_service, &guid, &target_count, &target_list);
> + if (SUCCEEDED(hr)) {
> + for (j = 0; j < target_count; j++) {
> + const D3DFORMAT format = target_list[j];
> + if (format == *(D3DFORMAT *)surface_format) {
> + ret = 1;
> + break;
> + }
> + }
> + CoTaskMemFree(target_list);
> + }
> + return ret;
> +}
> +#endif
> +
> +static int dxva_get_decoder_guid(AVCodecContext *avctx, void *service, void
> *surface_format,
> + unsigned guid_count, const GUID *guid_list,
> GUID *decoder_guid)
> +{
> + FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx);
> + unsigned i, j;
> +
> + *decoder_guid = ff_GUID_NULL;
> + for (i = 0; dxva_modes[i].guid; i++) {
> + const dxva_mode *mode = &dxva_modes[i];
> + int validate;
> + if (mode->codec != avctx->codec_id)
> + continue;
> +
> + for (j = 0; j < guid_count; j++) {
> + if (IsEqualGUID(mode->guid, &guid_list[j]))
> + break;
> + }
> + if (j == guid_count)
> + continue;
> +
> +#if CONFIG_D3D11VA
> + if (sctx->pix_fmt == AV_PIX_FMT_D3D11)
> + validate = d3d11va_validate_output(service, *mode->guid,
> surface_format);
> +#endif
> +#if CONFIG_DXVA2
> + if (sctx->pix_fmt == AV_PIX_FMT_DXVA2_VLD)
> + validate = dxva2_validate_output(service, *mode->guid,
> surface_format);
> +#endif
> + if (validate) {
> + *decoder_guid = *mode->guid;
> + break;
> + }
> + }
> +
> + if (IsEqualGUID(decoder_guid, &ff_GUID_NULL)) {
> + av_log(avctx, AV_LOG_VERBOSE, "No decoder device for codec found\n");
> + return AVERROR(EINVAL);
> + }
> +
> + if (IsEqualGUID(decoder_guid, &ff_DXVADDI_Intel_ModeH264_E))
> + sctx->workaround |= FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO;
> +
> + return 0;
> +}
> +
> +static void bufref_free_interface(void *opaque, uint8_t *data)
> +{
> + IUnknown_Release((IUnknown *)opaque);
> +}
> +
> +static AVBufferRef *bufref_wrap_interface(IUnknown *iface)
> +{
> + return av_buffer_create((uint8_t*)iface, 1, bufref_free_interface,
> iface, 0);
> +}
> +
> +#if CONFIG_DXVA2
> +
> +static int dxva2_get_decoder_configuration(AVCodecContext *avctx, const GUID
> *device_guid,
> + const DXVA2_VideoDesc *desc,
> + DXVA2_ConfigPictureDecode *config)
> +{
> + FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx);
> + unsigned cfg_count;
> + DXVA2_ConfigPictureDecode *cfg_list;
> + HRESULT hr;
> + int ret;
> +
> + hr =
> IDirectXVideoDecoderService_GetDecoderConfigurations(sctx->dxva2_service,
> device_guid, desc, NULL, &cfg_count, &cfg_list);
> + if (FAILED(hr)) {
> + av_log(avctx, AV_LOG_ERROR, "Unable to retrieve decoder
> configurations\n");
> + return AVERROR(EINVAL);
> + }
> +
> + ret = dxva_get_decoder_configuration(avctx, cfg_list, cfg_count);
> + if (ret >= 0)
> + *config = cfg_list[ret];
> + CoTaskMemFree(cfg_list);
> + return ret;
> +}
> +
> +static int dxva2_create_decoder(AVCodecContext *avctx)
> +{
> + FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx);
> + GUID *guid_list;
> + unsigned guid_count;
> + GUID device_guid;
> + D3DFORMAT surface_format = avctx->sw_pix_fmt == AV_PIX_FMT_YUV420P10 ?
> + MKTAG('P', '0', '1', '0') : MKTAG('N', 'V',
> '1', '2');
> + DXVA2_VideoDesc desc = { 0 };
> + DXVA2_ConfigPictureDecode config;
> + HRESULT hr;
> + int ret;
> + HANDLE device_handle;
> + AVHWFramesContext *frames_ctx =
> (AVHWFramesContext*)avctx->hw_frames_ctx->data;
> + AVDXVA2FramesContext *frames_hwctx = frames_ctx->hwctx;
> + AVDXVA2DeviceContext *device_hwctx = frames_ctx->device_ctx->hwctx;
> +
> + hr = IDirect3DDeviceManager9_OpenDeviceHandle(device_hwctx->devmgr,
> + &device_handle);
> + if (FAILED(hr)) {
> + av_log(avctx, AV_LOG_ERROR, "Failed to open a device handle\n");
> + goto fail;
> + }
> +
> + hr = IDirect3DDeviceManager9_GetVideoService(device_hwctx->devmgr,
> device_handle,
> +
> &ff_IID_IDirectXVideoDecoderService,
> + (void
> **)&sctx->dxva2_service);
> + IDirect3DDeviceManager9_CloseDeviceHandle(device_hwctx->devmgr,
> device_handle);
> + if (FAILED(hr)) {
> + av_log(avctx, AV_LOG_ERROR, "Failed to create
> IDirectXVideoDecoderService\n");
> + goto fail;
> + }
> +
> + hr =
> IDirectXVideoDecoderService_GetDecoderDeviceGuids(sctx->dxva2_service,
> &guid_count, &guid_list);
> + if (FAILED(hr)) {
> + av_log(avctx, AV_LOG_ERROR, "Failed to retrieve decoder device
> GUIDs\n");
> + goto fail;
> + }
> +
> + ret = dxva_get_decoder_guid(avctx, sctx->dxva2_service, &surface_format,
> + guid_count, guid_list, &device_guid);
> + CoTaskMemFree(guid_list);
> + if (ret < 0) {
> + goto fail;
> + }
> +
> + desc.SampleWidth = avctx->coded_width;
> + desc.SampleHeight = avctx->coded_height;
> + desc.Format = surface_format;
> +
> + ret = dxva2_get_decoder_configuration(avctx, &device_guid, &desc,
> &config);
> + if (ret < 0) {
> + goto fail;
> + }
> +
> + hr = IDirectXVideoDecoderService_CreateVideoDecoder(sctx->dxva2_service,
> &device_guid,
> + &desc, &config,
> frames_hwctx->surfaces,
> +
> frames_hwctx->nb_surfaces, &sctx->dxva2_decoder);
> + if (FAILED(hr)) {
> + av_log(avctx, AV_LOG_ERROR, "Failed to create DXVA2 video
> decoder\n");
> + goto fail;
> + }
> +
> + sctx->dxva2_config = config;
> +
> + sctx->decoder_ref = bufref_wrap_interface((IUnknown
> *)sctx->dxva2_decoder);
> + if (!sctx->decoder_ref)
> + return AVERROR(ENOMEM);
> +
> + return 0;
> +fail:
> + return AVERROR(EINVAL);
> +}
> +
> +#endif
> +
> +#if CONFIG_D3D11VA
> +
> +static int d3d11va_get_decoder_configuration(AVCodecContext *avctx,
> + ID3D11VideoDevice *video_device,
> + const D3D11_VIDEO_DECODER_DESC
> *desc,
> + D3D11_VIDEO_DECODER_CONFIG
> *config)
> +{
> + FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx);
> + unsigned cfg_count = 0;
> + D3D11_VIDEO_DECODER_CONFIG *cfg_list = NULL;
> + HRESULT hr;
> + int i, ret;
> +
> + hr = ID3D11VideoDevice_GetVideoDecoderConfigCount(video_device, desc,
> &cfg_count);
> + if (FAILED(hr)) {
> + av_log(avctx, AV_LOG_ERROR, "Unable to retrieve decoder
> configurations\n");
> + return AVERROR(EINVAL);
> + }
> +
> + cfg_list = av_malloc_array(cfg_count,
> sizeof(D3D11_VIDEO_DECODER_CONFIG));
> + if (cfg_list == NULL)
> + return AVERROR(ENOMEM);
> + for (i = 0; i < cfg_count; i++) {
> + hr = ID3D11VideoDevice_GetVideoDecoderConfig(video_device, desc, i,
> &cfg_list[i]);
> + if (FAILED(hr)) {
> + av_log(avctx, AV_LOG_ERROR, "Unable to retrieve decoder
> configurations. (hr=0x%lX)\n", hr);
> + av_free(cfg_list);
> + return AVERROR(EINVAL);
> + }
> + }
> +
> + ret = dxva_get_decoder_configuration(avctx, cfg_list, cfg_count);
> + if (ret >= 0)
> + *config = cfg_list[ret];
> + av_free(cfg_list);
> + return ret;
> +}
> +
> +static int d3d11va_create_decoder(AVCodecContext *avctx)
> +{
> + FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx);
> + GUID *guid_list;
> + unsigned guid_count, i;
> + GUID decoder_guid;
> + DXGI_FORMAT surface_format = avctx->sw_pix_fmt == AV_PIX_FMT_YUV420P10 ?
> + DXGI_FORMAT_P010 : DXGI_FORMAT_NV12;
> + D3D11_VIDEO_DECODER_DESC desc = { 0 };
> + D3D11_VIDEO_DECODER_CONFIG config;
> + AVHWFramesContext *frames_ctx = (AVHWFramesContext
> *)avctx->hw_frames_ctx->data;
> + AVD3D11VADeviceContext *device_hwctx = frames_ctx->device_ctx->hwctx;
> + AVD3D11VAFramesContext *frames_hwctx = frames_ctx->hwctx;
> + D3D11_TEXTURE2D_DESC texdesc;
> + HRESULT hr;
> + int ret;
> +
> + if (!frames_hwctx->texture) {
> + av_log(avctx, AV_LOG_ERROR, "AVD3D11VAFramesContext.texture not
> set.\n");
> + return AVERROR(EINVAL);
> + }
> + ID3D11Texture2D_GetDesc(frames_hwctx->texture, &texdesc);
> +
> + guid_count =
> ID3D11VideoDevice_GetVideoDecoderProfileCount(device_hwctx->video_device);
> + guid_list = av_malloc_array(guid_count, sizeof(*guid_list));
> + if (guid_list == NULL || guid_count == 0) {
> + av_log(avctx, AV_LOG_ERROR, "Failed to get the decoder GUIDs\n");
> + av_free(guid_list);
> + return AVERROR(EINVAL);
> + }
> + for (i = 0; i < guid_count; i++) {
> + hr =
> ID3D11VideoDevice_GetVideoDecoderProfile(device_hwctx->video_device, i,
> &guid_list[i]);
> + if (FAILED(hr)) {
> + av_log(avctx, AV_LOG_ERROR, "Failed to retrieve decoder GUID
> %d\n", i);
> + av_free(guid_list);
> + return AVERROR(EINVAL);
> + }
> + }
> +
> + ret = dxva_get_decoder_guid(avctx, device_hwctx->video_device,
> &surface_format,
> + guid_count, guid_list, &decoder_guid);
> + av_free(guid_list);
> + if (ret < 0)
> + return AVERROR(EINVAL);
> +
> + desc.SampleWidth = avctx->coded_width;
> + desc.SampleHeight = avctx->coded_height;
> + desc.OutputFormat = surface_format;
> + desc.Guid = decoder_guid;
> +
> + ret = d3d11va_get_decoder_configuration(avctx,
> device_hwctx->video_device, &desc, &config);
> + if (ret < 0)
> + return AVERROR(EINVAL);
> +
> + sctx->d3d11_views = av_mallocz_array(texdesc.ArraySize,
> sizeof(sctx->d3d11_views[0]));
> + if (!sctx->d3d11_views)
> + return AVERROR(ENOMEM);
> + sctx->nb_d3d11_views = texdesc.ArraySize;
> +
> + for (i = 0; i < sctx->nb_d3d11_views; i++) {
> + D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC viewDesc = {
> + .DecodeProfile = decoder_guid,
> + .ViewDimension = D3D11_VDOV_DIMENSION_TEXTURE2D,
> + .Texture2D = {
> + .ArraySlice = i,
> + }
> + };
> + hr =
> ID3D11VideoDevice_CreateVideoDecoderOutputView(device_hwctx->video_device,
> +
> (ID3D11Resource*) frames_hwctx->texture,
> + &viewDesc,
> +
> (ID3D11VideoDecoderOutputView**) &sctx->d3d11_views[i]);
> + if (FAILED(hr)) {
> + av_log(avctx, AV_LOG_ERROR, "Could not create the decoder output
> view %d\n", i);
> + return AVERROR_UNKNOWN;
> + }
> + }
> +
> + hr = ID3D11VideoDevice_CreateVideoDecoder(device_hwctx->video_device,
> &desc,
> + &config, &sctx->d3d11_decoder);
> + if (FAILED(hr)) {
> + av_log(avctx, AV_LOG_ERROR, "Failed to create D3D11VA video
> decoder\n");
> + return AVERROR(EINVAL);
> + }
> +
> + sctx->d3d11_config = config;
> + sctx->d3d11_texture = frames_hwctx->texture;
> +
> + sctx->decoder_ref = bufref_wrap_interface((IUnknown
> *)sctx->d3d11_decoder);
> + if (!sctx->decoder_ref)
> + return AVERROR(ENOMEM);
> +
> + return 0;
> +}
> +
> +#endif
> +
> static void ff_dxva2_lock(AVCodecContext *avctx)
> {
> #if CONFIG_D3D11VA
> if (ff_dxva2_is_d3d11(avctx)) {
> + FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx);
> AVDXVAContext *ctx = DXVA_CONTEXT(avctx);
> if (D3D11VA_CONTEXT(ctx)->context_mutex != INVALID_HANDLE_VALUE)
> WaitForSingleObjectEx(D3D11VA_CONTEXT(ctx)->context_mutex,
> INFINITE, FALSE);
> + if (sctx->device_ctx) {
> + AVD3D11VADeviceContext *hwctx = sctx->device_ctx->hwctx;
> + hwctx->lock(hwctx->lock_ctx);
> + }
> }
> #endif
> }
> @@ -44,15 +470,216 @@ static void ff_dxva2_unlock(AVCodecContext *avctx)
> {
> #if CONFIG_D3D11VA
> if (ff_dxva2_is_d3d11(avctx)) {
> + FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx);
> AVDXVAContext *ctx = DXVA_CONTEXT(avctx);
> if (D3D11VA_CONTEXT(ctx)->context_mutex != INVALID_HANDLE_VALUE)
> ReleaseMutex(D3D11VA_CONTEXT(ctx)->context_mutex);
> + if (sctx->device_ctx) {
> + AVD3D11VADeviceContext *hwctx = sctx->device_ctx->hwctx;
> + hwctx->unlock(hwctx->lock_ctx);
> + }
> }
> #endif
> }
>
> -static void *get_surface(const AVFrame *frame)
> +// This must work before the decoder is created.
> +// This somehow needs to be exported to the user.
> +static void dxva_adjust_hwframes(AVCodecContext *avctx, AVHWFramesContext
> *frames_ctx)
> {
> + FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx);
> + int surface_alignment, num_surfaces;
> +
> + frames_ctx->format = sctx->pix_fmt;
> +
> + /* decoding MPEG-2 requires additional alignment on some Intel GPUs,
> + but it causes issues for H.264 on certain AMD GPUs..... */
> + if (avctx->codec_id == AV_CODEC_ID_MPEG2VIDEO)
> + surface_alignment = 32;
> + /* the HEVC DXVA2 spec asks for 128 pixel aligned surfaces to ensure
> + all coding features have enough room to work with */
> + else if (avctx->codec_id == AV_CODEC_ID_HEVC)
> + surface_alignment = 128;
> + else
> + surface_alignment = 16;
> +
> + /* 4 base work surfaces */
> + num_surfaces = 4;
> +
> + /* add surfaces based on number of possible refs */
> + if (avctx->codec_id == AV_CODEC_ID_H264 || avctx->codec_id ==
> AV_CODEC_ID_HEVC)
> + num_surfaces += 16;
> + else
> + num_surfaces += 2;
> +
> + /* add extra surfaces for frame threading */
> + if (avctx->active_thread_type & FF_THREAD_FRAME)
> + num_surfaces += avctx->thread_count;
> +
> + frames_ctx->sw_format = avctx->sw_pix_fmt == AV_PIX_FMT_YUV420P10 ?
> + AV_PIX_FMT_P010 : AV_PIX_FMT_NV12;
> + frames_ctx->width = FFALIGN(avctx->coded_width, surface_alignment);
> + frames_ctx->height = FFALIGN(avctx->coded_height, surface_alignment);
> + frames_ctx->initial_pool_size = num_surfaces;
> +
> +
> +#if CONFIG_DXVA2
> + if (frames_ctx->format == AV_PIX_FMT_DXVA2_VLD) {
> + AVDXVA2FramesContext *frames_hwctx = frames_ctx->hwctx;
> +
> + frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
> + }
> +#endif
> +
> +#if CONFIG_D3D11VA
> + if (frames_ctx->format == AV_PIX_FMT_D3D11) {
> + AVD3D11VAFramesContext *frames_hwctx = frames_ctx->hwctx;
> +
> + frames_hwctx->BindFlags |= D3D11_BIND_DECODER;
> + }
> +#endif
> +}
> +
> +int ff_dxva2_decode_init(AVCodecContext *avctx)
> +{
> + FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx);
> + AVHWFramesContext *frames_ctx = NULL;
> + int ret = 0;
> +
> + // Old API.
> + if (avctx->hwaccel_context)
> + return 0;
> +
> + // (avctx->pix_fmt is not updated yet at this point)
> + sctx->pix_fmt = avctx->hwaccel->pix_fmt;
> +
> + if (avctx->codec_id == AV_CODEC_ID_H264 &&
> + (avctx->profile & ~FF_PROFILE_H264_CONSTRAINED) >
> FF_PROFILE_H264_HIGH) {
> + av_log(avctx, AV_LOG_VERBOSE, "Unsupported H.264 profile for DXVA
> HWAccel: %d\n",avctx->profile);
> + return AVERROR(ENOTSUP);
> + }
> +
> + if (avctx->codec_id == AV_CODEC_ID_HEVC &&
> + avctx->profile != FF_PROFILE_HEVC_MAIN && avctx->profile !=
> FF_PROFILE_HEVC_MAIN_10) {
> + av_log(avctx, AV_LOG_VERBOSE, "Unsupported HEVC profile for DXVA
> HWAccel: %d\n", avctx->profile);
> + return AVERROR(ENOTSUP);
> + }
MPEG2 should also be limited to FF_PROFILE_MPEG2_SIMPLE and
FF_PROFILE_MPEG2_MAIN profiles. And only one of the decoders can
support both.
> + if (!avctx->hw_frames_ctx && !avctx->hw_device_ctx) {
> + av_log(avctx, AV_LOG_ERROR, "Either a hw_frames_ctx or a
> hw_device_ctx needs to be set for hardware decoding.\n");
> + return AVERROR(EINVAL);
> + }
> +
> + if (avctx->hw_frames_ctx) {
> + frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
> + } else {
> + avctx->hw_frames_ctx = av_hwframe_ctx_alloc(avctx->hw_device_ctx);
> + if (!avctx->hw_frames_ctx)
> + return AVERROR(ENOMEM);
> +
> + frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
> +
> + dxva_adjust_hwframes(avctx, frames_ctx);
> +
> + ret = av_hwframe_ctx_init(avctx->hw_frames_ctx);
> + if (ret < 0)
> + goto fail;
> + }
> +
> + sctx->device_ctx = frames_ctx->device_ctx;
> +
> + if (frames_ctx->format != sctx->pix_fmt ||
> + !((sctx->pix_fmt == AV_PIX_FMT_D3D11 && CONFIG_D3D11VA) ||
> + (sctx->pix_fmt == AV_PIX_FMT_DXVA2_VLD && CONFIG_DXVA2))) {
> + av_log(avctx, AV_LOG_ERROR, "Invalid pixfmt for hwaccel!\n");
> + ret = AVERROR(EINVAL);
> + goto fail;
> + }
> +
> +#if CONFIG_D3D11VA
> + if (sctx->pix_fmt == AV_PIX_FMT_D3D11) {
> + AVD3D11VADeviceContext *device_hwctx = frames_ctx->device_ctx->hwctx;
> + AVD3D11VAContext *d3d11_ctx = &sctx->ctx.d3d11va;
> + HRESULT hr;
> +
> + ff_dxva2_lock(avctx);
> + ret = d3d11va_create_decoder(avctx);
> + ff_dxva2_unlock(avctx);
> + if (ret < 0)
> + goto fail;
> +
> + d3d11_ctx->decoder = sctx->d3d11_decoder;
> + d3d11_ctx->video_context = device_hwctx->video_context;
> + d3d11_ctx->cfg = &sctx->d3d11_config;
> + d3d11_ctx->surface_count = sctx->nb_d3d11_views;
> + d3d11_ctx->surface = sctx->d3d11_views;
> + d3d11_ctx->workaround = sctx->workaround;
> + d3d11_ctx->context_mutex = INVALID_HANDLE_VALUE;
> + }
> +#endif
> +
> +#if CONFIG_DXVA2
> + if (sctx->pix_fmt == AV_PIX_FMT_DXVA2_VLD) {
> + AVDXVA2FramesContext *frames_hwctx = frames_ctx->hwctx;
> + struct dxva_context *dxva_ctx = &sctx->ctx.dxva2;
> +
> + ff_dxva2_lock(avctx);
> + ret = dxva2_create_decoder(avctx);
> + ff_dxva2_unlock(avctx);
> + if (ret < 0)
> + goto fail;
> +
> + dxva_ctx->decoder = sctx->dxva2_decoder;
> + dxva_ctx->cfg = &sctx->dxva2_config;
> + dxva_ctx->surface = frames_hwctx->surfaces;
> + dxva_ctx->surface_count = frames_hwctx->nb_surfaces;
> + dxva_ctx->workaround = sctx->workaround;
> + }
> +#endif
> +
> + return 0;
> +
> +fail:
> + ff_dxva2_decode_uninit(avctx);
> + return ret;
> +}
> +
> +int ff_dxva2_decode_uninit(AVCodecContext *avctx)
> +{
> + FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx);
> + int i;
> +
> + av_buffer_unref(&sctx->decoder_ref);
> +
> +#if CONFIG_D3D11VA
> + for (i = 0; i < sctx->nb_d3d11_views; i++) {
> + if (sctx->d3d11_views[i])
> + ID3D11VideoDecoderOutputView_Release(sctx->d3d11_views[i]);
> + }
> + av_freep(&sctx->d3d11_views);
> +#endif
> +
> +#if CONFIG_DXVA2
> + if (sctx->dxva2_service)
> + IDirectXVideoDecoderService_Release(sctx->dxva2_service);
> +#endif
> +
> + return 0;
> +}
> +
> +static void *get_surface(AVCodecContext *avctx, const AVFrame *frame)
> +{
> +#if CONFIG_D3D11VA
> + if (frame->format == AV_PIX_FMT_D3D11) {
> + FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx);
> + intptr_t index = (intptr_t)frame->data[1];
> + if (index < 0 || index >= sctx->nb_d3d11_views ||
> + sctx->d3d11_texture != (ID3D11Texture2D *)frame->data[0]) {
> + av_log(avctx, AV_LOG_ERROR, "get_buffer frame is invalid!\n");
> + return NULL;
> + }
> + return sctx->d3d11_views[index];
> + }
> +#endif
> return frame->data[3];
> }
>
> @@ -60,10 +687,12 @@ unsigned ff_dxva2_get_surface_index(const AVCodecContext
> *avctx,
> const AVDXVAContext *ctx,
> const AVFrame *frame)
> {
> - void *surface = get_surface(frame);
> + void *surface = get_surface(avctx, frame);
> unsigned i;
>
> #if CONFIG_D3D11VA
> + if (avctx->pix_fmt == AV_PIX_FMT_D3D11)
> + return (intptr_t)frame->data[1];
> if (avctx->pix_fmt == AV_PIX_FMT_D3D11VA_VLD) {
> D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC viewDesc;
> ID3D11VideoDecoderOutputView_GetDesc((ID3D11VideoDecoderOutputView*)
> surface, &viewDesc);
> @@ -154,6 +783,22 @@ int ff_dxva2_commit_buffer(AVCodecContext *avctx,
> return result;
> }
>
> +static int frame_add_buf(AVFrame *frame, AVBufferRef *ref)
> +{
> + int i;
> +
> + for (i = 0; i < AV_NUM_DATA_POINTERS; i++) {
> + if (!frame->buf[i]) {
> + frame->buf[i] = av_buffer_ref(ref);
> + return frame->buf[i] ? 0 : AVERROR(ENOMEM);
> + }
> + }
> +
> + // For now we expect that the caller does not use more than
> + // AV_NUM_DATA_POINTERS-1 buffers if the user uses a custom pool.
> + return AVERROR(EINVAL);
> +}
> +
> int ff_dxva2_common_end_frame(AVCodecContext *avctx, AVFrame *frame,
> const void *pp, unsigned pp_size,
> const void *qm, unsigned qm_size,
> @@ -173,19 +818,26 @@ int ff_dxva2_common_end_frame(AVCodecContext *avctx,
> AVFrame *frame,
> int result, runs = 0;
> HRESULT hr;
> unsigned type;
> + FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx);
> +
> + if (sctx->decoder_ref) {
> + result = frame_add_buf(frame, sctx->decoder_ref);
> + if (result < 0)
> + return result;
> + }
>
> do {
> ff_dxva2_lock(avctx);
> #if CONFIG_D3D11VA
> if (ff_dxva2_is_d3d11(avctx))
> hr =
> ID3D11VideoContext_DecoderBeginFrame(D3D11VA_CONTEXT(ctx)->video_context,
> D3D11VA_CONTEXT(ctx)->decoder,
> - get_surface(frame),
> + get_surface(avctx,
> frame),
> 0, NULL);
> #endif
> #if CONFIG_DXVA2
> if (avctx->pix_fmt == AV_PIX_FMT_DXVA2_VLD)
> hr = IDirectXVideoDecoder_BeginFrame(DXVA2_CONTEXT(ctx)->decoder,
> - get_surface(frame),
> + get_surface(avctx, frame),
> NULL);
> #endif
> if (hr != E_PENDING || ++runs > 50)
> @@ -315,7 +967,8 @@ end:
> int ff_dxva2_is_d3d11(const AVCodecContext *avctx)
> {
> #if CONFIG_D3D11VA
> - return avctx->pix_fmt == AV_PIX_FMT_D3D11VA_VLD;
> + return avctx->pix_fmt == AV_PIX_FMT_D3D11VA_VLD ||
> + avctx->pix_fmt == AV_PIX_FMT_D3D11;
> #else
> return 0;
> #endif
> diff --git a/libavcodec/dxva2_h264.c b/libavcodec/dxva2_h264.c
> index f58a45807f..de0885058a 100644
> --- a/libavcodec/dxva2_h264.c
> +++ b/libavcodec/dxva2_h264.c
> @@ -518,10 +518,13 @@ AVHWAccel ff_h264_dxva2_hwaccel = {
> .type = AVMEDIA_TYPE_VIDEO,
> .id = AV_CODEC_ID_H264,
> .pix_fmt = AV_PIX_FMT_DXVA2_VLD,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> .start_frame = dxva2_h264_start_frame,
> .decode_slice = dxva2_h264_decode_slice,
> .end_frame = dxva2_h264_end_frame,
> .frame_priv_data_size = sizeof(struct dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> };
> #endif
>
> @@ -531,9 +534,28 @@ AVHWAccel ff_h264_d3d11va_hwaccel = {
> .type = AVMEDIA_TYPE_VIDEO,
> .id = AV_CODEC_ID_H264,
> .pix_fmt = AV_PIX_FMT_D3D11VA_VLD,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> .start_frame = dxva2_h264_start_frame,
> .decode_slice = dxva2_h264_decode_slice,
> .end_frame = dxva2_h264_end_frame,
> .frame_priv_data_size = sizeof(struct dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> +};
> +#endif
> +
> +#if CONFIG_H264_D3D11VA2_HWACCEL
> +AVHWAccel ff_h264_d3d11va2_hwaccel = {
> + .name = "h264_d3d11va2",
> + .type = AVMEDIA_TYPE_VIDEO,
> + .id = AV_CODEC_ID_H264,
> + .pix_fmt = AV_PIX_FMT_D3D11,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> + .start_frame = dxva2_h264_start_frame,
> + .decode_slice = dxva2_h264_decode_slice,
> + .end_frame = dxva2_h264_end_frame,
> + .frame_priv_data_size = sizeof(struct dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> };
> #endif
> diff --git a/libavcodec/dxva2_hevc.c b/libavcodec/dxva2_hevc.c
> index f2bb8b26a5..4bff26d6a8 100644
> --- a/libavcodec/dxva2_hevc.c
> +++ b/libavcodec/dxva2_hevc.c
> @@ -427,10 +427,13 @@ AVHWAccel ff_hevc_dxva2_hwaccel = {
> .type = AVMEDIA_TYPE_VIDEO,
> .id = AV_CODEC_ID_HEVC,
> .pix_fmt = AV_PIX_FMT_DXVA2_VLD,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> .start_frame = dxva2_hevc_start_frame,
> .decode_slice = dxva2_hevc_decode_slice,
> .end_frame = dxva2_hevc_end_frame,
> .frame_priv_data_size = sizeof(struct hevc_dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> };
> #endif
>
> @@ -440,9 +443,28 @@ AVHWAccel ff_hevc_d3d11va_hwaccel = {
> .type = AVMEDIA_TYPE_VIDEO,
> .id = AV_CODEC_ID_HEVC,
> .pix_fmt = AV_PIX_FMT_D3D11VA_VLD,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> .start_frame = dxva2_hevc_start_frame,
> .decode_slice = dxva2_hevc_decode_slice,
> .end_frame = dxva2_hevc_end_frame,
> .frame_priv_data_size = sizeof(struct hevc_dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> +};
> +#endif
> +
> +#if CONFIG_HEVC_D3D11VA2_HWACCEL
> +AVHWAccel ff_hevc_d3d11va2_hwaccel = {
> + .name = "hevc_d3d11va2",
> + .type = AVMEDIA_TYPE_VIDEO,
> + .id = AV_CODEC_ID_HEVC,
> + .pix_fmt = AV_PIX_FMT_D3D11,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> + .start_frame = dxva2_hevc_start_frame,
> + .decode_slice = dxva2_hevc_decode_slice,
> + .end_frame = dxva2_hevc_end_frame,
> + .frame_priv_data_size = sizeof(struct hevc_dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> };
> #endif
> diff --git a/libavcodec/dxva2_internal.h b/libavcodec/dxva2_internal.h
> index 04d20493f9..affb675de5 100644
> --- a/libavcodec/dxva2_internal.h
> +++ b/libavcodec/dxva2_internal.h
> @@ -32,9 +32,11 @@
>
> #if CONFIG_DXVA2
> #include "dxva2.h"
> +#include "libavutil/hwcontext_dxva2.h"
> #endif
> #if CONFIG_D3D11VA
> #include "d3d11va.h"
> +#include "libavutil/hwcontext_d3d11va.h"
> #endif
> #if HAVE_DXVA_H
> /* When targeting WINAPI_FAMILY_PHONE_APP or WINAPI_FAMILY_APP, dxva.h
> @@ -46,7 +48,10 @@
> #include <dxva.h>
> #endif
>
> +#include "libavutil/hwcontext.h"
> +
> #include "avcodec.h"
> +#include "internal.h"
>
> typedef void DECODER_BUFFER_DESC;
>
> @@ -59,7 +64,39 @@ typedef union {
> #endif
> } AVDXVAContext;
>
> -#define DXVA_CONTEXT(avctx) ((AVDXVAContext*)(avctx)->hwaccel_context)
> +typedef struct FFDXVASharedContext {
> + AVBufferRef *decoder_ref;
> +
> + // FF_DXVA2_WORKAROUND_* flags
> + uint64_t workaround;
> +
> + // E.g. AV_PIX_FMT_D3D11 (same as AVCodecContext.pix_fmt, except during
> init)
> + enum AVPixelFormat pix_fmt;
> +
> + AVHWDeviceContext *device_ctx;
> +
> +#if CONFIG_D3D11VA
> + ID3D11VideoDecoder *d3d11_decoder;
> + D3D11_VIDEO_DECODER_CONFIG d3d11_config;
> + ID3D11VideoDecoderOutputView **d3d11_views;
> + int nb_d3d11_views;
> + ID3D11Texture2D *d3d11_texture;
> +#endif
> +
> +#if CONFIG_DXVA2
> + IDirectXVideoDecoder *dxva2_decoder;
> + IDirectXVideoDecoderService *dxva2_service;
> + DXVA2_ConfigPictureDecode dxva2_config;
> +#endif
> +
> + // Legacy (but used by code outside of setup)
> + // In generic mode, DXVA_CONTEXT() will return a pointer to this.
> + AVDXVAContext ctx;
> +} FFDXVASharedContext;
> +
> +#define DXVA_SHARED_CONTEXT(avctx)
> ((FFDXVASharedContext*)((avctx)->internal->hwaccel_priv_data))
> +
> +#define DXVA_CONTEXT(avctx) (AVDXVAContext*)((avctx)->hwaccel_context ?
> (avctx)->hwaccel_context : (&(DXVA_SHARED_CONTEXT(avctx)->ctx)))
>
> #define D3D11VA_CONTEXT(ctx) (&ctx->d3d11va)
> #define DXVA2_CONTEXT(ctx) (&ctx->dxva2)
> @@ -115,6 +152,10 @@ int ff_dxva2_common_end_frame(AVCodecContext *, AVFrame
> *,
> DECODER_BUFFER_DESC *bs,
> DECODER_BUFFER_DESC
> *slice));
>
> +int ff_dxva2_decode_init(AVCodecContext *avctx);
> +
> +int ff_dxva2_decode_uninit(AVCodecContext *avctx);
> +
> int ff_dxva2_is_d3d11(const AVCodecContext *avctx);
>
> #endif /* AVCODEC_DXVA2_INTERNAL_H */
> diff --git a/libavcodec/dxva2_mpeg2.c b/libavcodec/dxva2_mpeg2.c
> index 6ae30a5773..ab80ca300a 100644
> --- a/libavcodec/dxva2_mpeg2.c
> +++ b/libavcodec/dxva2_mpeg2.c
> @@ -323,10 +323,13 @@ AVHWAccel ff_mpeg2_dxva2_hwaccel = {
> .type = AVMEDIA_TYPE_VIDEO,
> .id = AV_CODEC_ID_MPEG2VIDEO,
> .pix_fmt = AV_PIX_FMT_DXVA2_VLD,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> .start_frame = dxva2_mpeg2_start_frame,
> .decode_slice = dxva2_mpeg2_decode_slice,
> .end_frame = dxva2_mpeg2_end_frame,
> .frame_priv_data_size = sizeof(struct dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> };
> #endif
>
> @@ -336,9 +339,28 @@ AVHWAccel ff_mpeg2_d3d11va_hwaccel = {
> .type = AVMEDIA_TYPE_VIDEO,
> .id = AV_CODEC_ID_MPEG2VIDEO,
> .pix_fmt = AV_PIX_FMT_D3D11VA_VLD,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> .start_frame = dxva2_mpeg2_start_frame,
> .decode_slice = dxva2_mpeg2_decode_slice,
> .end_frame = dxva2_mpeg2_end_frame,
> .frame_priv_data_size = sizeof(struct dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> +};
> +#endif
> +
> +#if CONFIG_MPEG2_D3D11VA2_HWACCEL
> +AVHWAccel ff_mpeg2_d3d11va2_hwaccel = {
> + .name = "mpeg2_d3d11va2",
> + .type = AVMEDIA_TYPE_VIDEO,
> + .id = AV_CODEC_ID_MPEG2VIDEO,
> + .pix_fmt = AV_PIX_FMT_D3D11,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> + .start_frame = dxva2_mpeg2_start_frame,
> + .decode_slice = dxva2_mpeg2_decode_slice,
> + .end_frame = dxva2_mpeg2_end_frame,
> + .frame_priv_data_size = sizeof(struct dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> };
> #endif
> diff --git a/libavcodec/dxva2_vc1.c b/libavcodec/dxva2_vc1.c
> index b63580ed2e..22d3d299b6 100644
> --- a/libavcodec/dxva2_vc1.c
> +++ b/libavcodec/dxva2_vc1.c
> @@ -323,10 +323,13 @@ AVHWAccel ff_wmv3_dxva2_hwaccel = {
> .type = AVMEDIA_TYPE_VIDEO,
> .id = AV_CODEC_ID_WMV3,
> .pix_fmt = AV_PIX_FMT_DXVA2_VLD,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> .start_frame = dxva2_vc1_start_frame,
> .decode_slice = dxva2_vc1_decode_slice,
> .end_frame = dxva2_vc1_end_frame,
> .frame_priv_data_size = sizeof(struct dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> };
> #endif
>
> @@ -336,10 +339,13 @@ AVHWAccel ff_vc1_dxva2_hwaccel = {
> .type = AVMEDIA_TYPE_VIDEO,
> .id = AV_CODEC_ID_VC1,
> .pix_fmt = AV_PIX_FMT_DXVA2_VLD,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> .start_frame = dxva2_vc1_start_frame,
> .decode_slice = dxva2_vc1_decode_slice,
> .end_frame = dxva2_vc1_end_frame,
> .frame_priv_data_size = sizeof(struct dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> };
> #endif
>
> @@ -349,10 +355,29 @@ AVHWAccel ff_wmv3_d3d11va_hwaccel = {
> .type = AVMEDIA_TYPE_VIDEO,
> .id = AV_CODEC_ID_WMV3,
> .pix_fmt = AV_PIX_FMT_D3D11VA_VLD,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> .start_frame = dxva2_vc1_start_frame,
> .decode_slice = dxva2_vc1_decode_slice,
> .end_frame = dxva2_vc1_end_frame,
> .frame_priv_data_size = sizeof(struct dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> +};
> +#endif
> +
> +#if CONFIG_WMV3_D3D11VA2_HWACCEL
> +AVHWAccel ff_wmv3_d3d11va2_hwaccel = {
> + .name = "wmv3_d3d11va2",
> + .type = AVMEDIA_TYPE_VIDEO,
> + .id = AV_CODEC_ID_WMV3,
> + .pix_fmt = AV_PIX_FMT_D3D11,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> + .start_frame = dxva2_vc1_start_frame,
> + .decode_slice = dxva2_vc1_decode_slice,
> + .end_frame = dxva2_vc1_end_frame,
> + .frame_priv_data_size = sizeof(struct dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> };
> #endif
>
> @@ -362,9 +387,28 @@ AVHWAccel ff_vc1_d3d11va_hwaccel = {
> .type = AVMEDIA_TYPE_VIDEO,
> .id = AV_CODEC_ID_VC1,
> .pix_fmt = AV_PIX_FMT_D3D11VA_VLD,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> + .start_frame = dxva2_vc1_start_frame,
> + .decode_slice = dxva2_vc1_decode_slice,
> + .end_frame = dxva2_vc1_end_frame,
> + .frame_priv_data_size = sizeof(struct dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> +};
> +#endif
> +
> +#if CONFIG_VC1_D3D11VA2_HWACCEL
> +AVHWAccel ff_vc1_d3d11va2_hwaccel = {
> + .name = "vc1_d3d11va2",
> + .type = AVMEDIA_TYPE_VIDEO,
> + .id = AV_CODEC_ID_VC1,
> + .pix_fmt = AV_PIX_FMT_D3D11,
> + .init = ff_dxva2_decode_init,
> + .uninit = ff_dxva2_decode_uninit,
> .start_frame = dxva2_vc1_start_frame,
> .decode_slice = dxva2_vc1_decode_slice,
> .end_frame = dxva2_vc1_end_frame,
> .frame_priv_data_size = sizeof(struct dxva2_picture_context),
> + .priv_data_size = sizeof(FFDXVASharedContext),
> };
> #endif
> diff --git a/libavcodec/h264_slice.c b/libavcodec/h264_slice.c
> index 95e366605c..c9f1dbb86f 100644
> --- a/libavcodec/h264_slice.c
> +++ b/libavcodec/h264_slice.c
> @@ -717,7 +717,7 @@ static void init_scan_tables(H264Context *h)
> static enum AVPixelFormat get_pixel_format(H264Context *h)
> {
> #define HWACCEL_MAX (CONFIG_H264_DXVA2_HWACCEL + \
> - CONFIG_H264_D3D11VA_HWACCEL + \
> + (CONFIG_H264_D3D11VA_HWACCEL * 2) + \
> CONFIG_H264_VAAPI_HWACCEL + \
> (CONFIG_H264_VDA_HWACCEL * 2) + \
> CONFIG_H264_VDPAU_HWACCEL)
> @@ -769,6 +769,7 @@ static enum AVPixelFormat get_pixel_format(H264Context *h)
> #endif
> #if CONFIG_H264_D3D11VA_HWACCEL
> *fmt++ = AV_PIX_FMT_D3D11VA_VLD;
> + *fmt++ = AV_PIX_FMT_D3D11;
> #endif
> #if CONFIG_H264_VAAPI_HWACCEL
> *fmt++ = AV_PIX_FMT_VAAPI;
> diff --git a/libavcodec/hevcdec.c b/libavcodec/hevcdec.c
> index 69d5908551..7a9182af9b 100644
> --- a/libavcodec/hevcdec.c
> +++ b/libavcodec/hevcdec.c
> @@ -383,7 +383,7 @@ static void export_stream_params(AVCodecContext *avctx,
> const HEVCParamSets *ps,
>
> static enum AVPixelFormat get_format(HEVCContext *s, const HEVCSPS *sps)
> {
> - #define HWACCEL_MAX (CONFIG_HEVC_DXVA2_HWACCEL +
> CONFIG_HEVC_D3D11VA_HWACCEL + \
> + #define HWACCEL_MAX (CONFIG_HEVC_DXVA2_HWACCEL +
> CONFIG_HEVC_D3D11VA_HWACCEL * 2 + \
> CONFIG_HEVC_VAAPI_HWACCEL +
> CONFIG_HEVC_VDPAU_HWACCEL)
> enum AVPixelFormat pix_fmts[HWACCEL_MAX + 2], *fmt = pix_fmts;
>
> @@ -391,6 +391,7 @@ static enum AVPixelFormat get_format(HEVCContext *s,
> const HEVCSPS *sps)
> sps->pix_fmt == AV_PIX_FMT_YUV420P10) {
> #if CONFIG_HEVC_D3D11VA_HWACCEL
> *fmt++ = AV_PIX_FMT_D3D11VA_VLD;
> + *fmt++ = AV_PIX_FMT_D3D11;
Can this be configured when building ? Since I will not support the
new format anytime soon I'd rather not try this for every file we
decode for nothing.
> #endif
> #if CONFIG_HEVC_DXVA2_HWACCEL
> *fmt++ = AV_PIX_FMT_DXVA2_VLD;
> diff --git a/libavcodec/mpeg12dec.c b/libavcodec/mpeg12dec.c
> index 6702ad1ed7..9a9a92701a 100644
> --- a/libavcodec/mpeg12dec.c
> +++ b/libavcodec/mpeg12dec.c
> @@ -1111,6 +1111,7 @@ static const enum AVPixelFormat
> mpeg12_hwaccel_pixfmt_list_420[] = {
> #endif
> #if CONFIG_MPEG2_D3D11VA_HWACCEL
> AV_PIX_FMT_D3D11VA_VLD,
> + AV_PIX_FMT_D3D11,
> #endif
> #if CONFIG_MPEG2_VAAPI_HWACCEL
> AV_PIX_FMT_VAAPI,
> diff --git a/libavcodec/vc1dec.c b/libavcodec/vc1dec.c
> index 51745c12ec..5e00a33e3d 100644
> --- a/libavcodec/vc1dec.c
> +++ b/libavcodec/vc1dec.c
> @@ -962,6 +962,7 @@ static const enum AVPixelFormat
> vc1_hwaccel_pixfmt_list_420[] = {
> #endif
> #if CONFIG_VC1_D3D11VA_HWACCEL
> AV_PIX_FMT_D3D11VA_VLD,
> + AV_PIX_FMT_D3D11,
> #endif
> #if CONFIG_VC1_VAAPI_HWACCEL
> AV_PIX_FMT_VAAPI,
> diff --git a/libavcodec/version.h b/libavcodec/version.h
> index 88f17a1f78..bc5b8304bd 100644
> --- a/libavcodec/version.h
> +++ b/libavcodec/version.h
> @@ -28,8 +28,8 @@
> #include "libavutil/version.h"
>
> #define LIBAVCODEC_VERSION_MAJOR 58
> -#define LIBAVCODEC_VERSION_MINOR 3
> -#define LIBAVCODEC_VERSION_MICRO 1
> +#define LIBAVCODEC_VERSION_MINOR 4
> +#define LIBAVCODEC_VERSION_MICRO 0
>
> #define LIBAVCODEC_VERSION_INT AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, \
> LIBAVCODEC_VERSION_MINOR, \
> diff --git a/libavutil/hwcontext_dxva2.h b/libavutil/hwcontext_dxva2.h
> index 2290c26066..c8e7a5c978 100644
> --- a/libavutil/hwcontext_dxva2.h
> +++ b/libavutil/hwcontext_dxva2.h
> @@ -65,6 +65,9 @@ typedef struct AVDXVA2FramesContext {
> *
> * If it is non-NULL, libavutil will call IDirectXVideoDecoder_Release()
> on
> * it just before the internal surface pool is freed.
> + *
> + * This is for convenience only. Some code uses other methods to manage
> the
> + * decoder reference.
> */
> IDirectXVideoDecoder *decoder_to_release;
> } AVDXVA2FramesContext;
> --
> 2.11.0
>
> _______________________________________________
> libav-devel mailing list
> [email protected]
> https://lists.libav.org/mailman/listinfo/libav-devel
_______________________________________________
libav-devel mailing list
[email protected]
https://lists.libav.org/mailman/listinfo/libav-devel