On Tue, Jun 06, 2017 at 06:51:07PM +0200, wm4 wrote:
> --- a/configure
> +++ b/configure
> @@ -2166,7 +2166,7 @@ zmbv_encoder_deps="zlib"
>  
>  # hardware accelerators
>  d3d11va_deps="d3d11_h dxva_h ID3D11VideoDecoder"
> -dxva2_deps="dxva2api_h DXVA2_ConfigPictureDecode"
> +dxva2_deps="dxva2api_h DXVA2_ConfigPictureDecode ole32"

This looks wrong. Why does dxva2 suddenly depend on ole32?

> @@ -4877,6 +4877,10 @@ if enabled libxcb; then
>          check_pkg_config libxcb_xfixes xcb-xfixes xcb/xfixes.h 
> xcb_xfixes_get_cursor_image
>  fi
>  
> +enabled d3d11va &&
> +    check_type "windows.h d3d11.h" ID3D11VideoDevice ||
> +    disable d3d11va

This should be below

  check_type "d3d9.h dxva2api.h" DXVA2_ConfigPictureDecode -D_WIN32_WINNT=0x0602

> --- /dev/null
> +++ b/libavutil/hwcontext_d3d11va.c
> @@ -0,0 +1,490 @@
> +static void free_texture(void *opaque, uint8_t *data)
> +{
> +    ID3D11Texture2D_Release((ID3D11Texture2D *)opaque);

pointless void* cast.

> +static AVBufferRef *d3d11va_pool_alloc(void *opaque, int size)
> +{
> +    AVHWFramesContext        *ctx = (AVHWFramesContext*)opaque;

same

> +static int d3d11va_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
> +{
> +    frame->data[0] = (uint8_t *)desc->texture;
> +    frame->data[1] = (uint8_t *)(intptr_t)desc->index;

Is this double cast necessary?

> +static int d3d11va_transfer_data(AVHWFramesContext *ctx, AVFrame *dst,
> +                                 const AVFrame *src)
> +{
> +    // (The interface types are compatible.)
> +    ID3D11Resource *texture = (ID3D11Resource *)(ID3D11Texture2D 
> *)frame->data[0];

.. another shady double cast ..

> +static int d3d11va_device_init(AVHWDeviceContext *hwdev)
> +{
> +    if (!device_hwctx->lock) {
> +        device_hwctx->lock_ctx = CreateMutex(NULL, 0, NULL);
> +        if (device_hwctx->lock_ctx == INVALID_HANDLE_VALUE) {
> +            av_log(NULL, AV_LOG_ERROR, "Failed to create a mutex\n");
> +            return AVERROR(EINVAL);
> +        }
> +        device_hwctx->lock = d3d11va_default_lock;
> +        device_hwctx->unlock = d3d11va_default_unlock;

nit: align

> --- /dev/null
> +++ b/libavutil/hwcontext_d3d11va.h
> @@ -0,0 +1,160 @@
> +typedef struct AVD3D11FrameDescriptor {
> +    /**
> +     * The texture in which the frame is located in. The reference count is

  The texture in which the frame is located.

or

  The texture the frame is located in.

> +    /**
> +     * The index into the array texture element representing the frame, or 0
> +     * if the texture is not an array texture.
> +     *
> +     * Normally stored in AVFrame.data[1] (casted from intptr_t).

cast_

> --- a/libavutil/pixfmt.h
> +++ b/libavutil/pixfmt.h
> @@ -237,6 +237,18 @@ enum AVPixelFormat {
>      AV_PIX_FMT_GBRAP10BE,  ///< planar GBR 4:4:4:4 40bpp, big-endian
>      AV_PIX_FMT_GBRAP10LE,  ///< planar GBR 4:4:4:4 40bpp, little-endian
>  
> +    /**
> +     * Hardware surfaces for Direct3D11.
> +     *
> +     * This is preferred over the legacy AV_PIX_FMT_D3D11VA_VLD. The new 
> D3D11
> +     * hwaccel API and filtering support AV_PIX_FMT_D3D11 only.
> +     *
> +     * data[0] contains a ID3D11Texture2D pointer, and data[1] contains the
> +     * texture array index of the frame as intptr_t if the ID3D11Texture2D is
> +     * an array texture (or always 0 if it's a normal texture)

.

Diego
_______________________________________________
libav-devel mailing list
[email protected]
https://lists.libav.org/mailman/listinfo/libav-devel

Reply via email to