Araz Iusubov:
> From: ffmpeg-devel <ffmpeg-devel-boun...@ffmpeg.org> On Behalf Of Araz
> Iusubov
> Sent: Thursday, July 17, 2025 9:30 PM
> To: ffmpeg-devel@ffmpeg.org
> Cc: Araz Iusubov <primeadv...@gmail.com>
> Subject: [FFmpeg-devel] [PATCH, v6] avcodec/d3d12va_encode: texture array
> support for HEVC
> 
> This patch adds support for the texture array feature used by AMD boards in 
> the
> D3D12 HEVC encoder.
> In texture array mode, a single texture array is shared for all reference and
> reconstructed pictures using different subresources.
> The implementation ensures compatibility and has been successfully tested on
> AMD, Intel, and NVIDIA GPUs.
> 
> v2 updates:
> 1. The reference to MaxL1ReferencesForB for the H.264 codec was updated to
> use the corresponding H.264 field instead of the HEVC one.
> 2. Max_subresource_array_size calculation was adjusted by removing the
> D3D12VA_VIDEO_ENC_ASYNC_DEPTH offset.
> 
> v3 updates:
> 1. Fixed a type mismatch by explicitly casting AVD3D12VAFrame* to
> (uint8_t*) when assigning to data[0].
> 2. Adjusted logging format specifier for HRESULT to use `%lx`.
> 
> v4 updates:
> 1. Moved texture array management to hwcontext_d3d12va for proper
> abstraction.
> 2. Added `texture_array` and `texture_array_size` fields to
> AVD3D12VAFramesContext.
> 3. Implemented shared texture array allocation during `av_hwframe_ctx_init`.
> 4. Frames now receive unique subresource indices via
> `d3d12va_pool_alloc_texture_array`.
> 5. Removed `d3d12va_create_texture_array`, allocation is now handled entirely
> within hwcontext.
> 6. Encoder now uses subresource indices provided by hwcontext instead of
> managing them manually.
> 
> v5 updates:
> No changes, resubmitted as v4 was missed by patchwork.
> 
> v6 updates:
> 1. Minor cosmetic fixes according to review of v5 2. Bumped lavu version to
> 60.5.100 and updated APIchanges
> 
> ---
>  doc/APIchanges                   |   4 +
>  libavcodec/d3d12va_encode.c      | 184 +++++++++++++++++++++++--------
>  libavcodec/d3d12va_encode.h      |  12 ++
>  libavcodec/d3d12va_encode_hevc.c |   5 +-
>  libavutil/hwcontext_d3d12va.c    |  65 ++++++++++-
>  libavutil/hwcontext_d3d12va.h    |  18 +++
>  libavutil/version.h              |   4 +-
>  7 files changed, 240 insertions(+), 52 deletions(-)
> 
> diff --git a/doc/APIchanges b/doc/APIchanges index d6e38245f8..eab06cd251
> 100644
> --- a/doc/APIchanges
> +++ b/doc/APIchanges
> @@ -2,6 +2,10 @@ The last version increases of all libraries were on 
> 2025-03-28
> 
>  API changes, most recent first:
> 
> +2025-07-xx - xxxxxxxxxx - lavu 60.5.100 - hwcontext_d3d12va.h
> +  Add support for texture array mode AVD3D12VAFrame.subresource_index,
> +  AVD3D12VAFramesContext.texture_array and texture_array_size
> +
>  2025-07-xx - xxxxxxxxxd - lavfi 11.2.100 - avfilter.h
>    Add AVFilterGraph->max_buffered_frames.
> 
> diff --git a/libavcodec/d3d12va_encode.c b/libavcodec/d3d12va_encode.c index
> e24a5b8d24..1f202f512c 100644
> --- a/libavcodec/d3d12va_encode.c
> +++ b/libavcodec/d3d12va_encode.c
> @@ -191,7 +191,8 @@ static int d3d12va_encode_issue(AVCodecContext
> *avctx,
>      FFHWBaseEncodeContext *base_ctx = avctx->priv_data;
>      D3D12VAEncodeContext       *ctx = avctx->priv_data;
>      D3D12VAEncodePicture       *pic = base_pic->priv;
> -    AVD3D12VAFramesContext *frames_hwctx = base_ctx->input_frames-
> >hwctx;
> +    AVD3D12VAFramesContext     *frames_hwctx_input = base_ctx-
> >input_frames->hwctx;
> +    AVD3D12VAFramesContext     *frames_hwctx_recon = base_ctx-
> >recon_frames->hwctx;
>      int err, i, j;
>      HRESULT hr;
>      char data[MAX_PARAM_BUFFER_SIZE];
> @@ -221,7 +222,7 @@ static int d3d12va_encode_issue(AVCodecContext
> *avctx,
>      D3D12_VIDEO_ENCODER_RESOLVE_METADATA_INPUT_ARGUMENTS
> input_metadata = {
>          .EncoderCodec = ctx->codec->d3d12_codec,
>          .EncoderProfile = ctx->profile->d3d12_profile,
> -        .EncoderInputFormat = frames_hwctx->format,
> +        .EncoderInputFormat = frames_hwctx_input->format,
>          .EncodedPictureEffectiveResolution = ctx->resolution,
>      };
> 
> @@ -268,6 +269,8 @@ static int d3d12va_encode_issue(AVCodecContext
> *avctx,
>      av_log(avctx, AV_LOG_DEBUG, "Recon surface is %p.\n",
>             pic->recon_surface->texture);
> 
> +    pic->subresource_index = ctx->is_texture_array ?
> + pic->recon_surface->subresource_index : 0;
> +
>      pic->output_buffer_ref = av_buffer_pool_get(ctx->output_buffer_pool);
>      if (!pic->output_buffer_ref) {
>          err = AVERROR(ENOMEM);
> @@ -325,11 +328,26 @@ static int d3d12va_encode_issue(AVCodecContext
> *avctx,
>              goto fail;
>          }
> 
> +        if (ctx->is_texture_array) {
> +            d3d12_refs.pSubresources = av_calloc(d3d12_refs.NumTexture2Ds,
> +                                                
> sizeof(*d3d12_refs.pSubresources));
> +            if (!d3d12_refs.pSubresources) {
> +                err = AVERROR(ENOMEM);
> +                goto fail;
> +            }
> +        }
> +
>          i = 0;
> -        for (j = 0; j < base_pic->nb_refs[0]; j++)
> -            d3d12_refs.ppTexture2Ds[i++] = ((D3D12VAEncodePicture *)base_pic-
> >refs[0][j]->priv)->recon_surface->texture;
> -        for (j = 0; j < base_pic->nb_refs[1]; j++)
> -            d3d12_refs.ppTexture2Ds[i++] = ((D3D12VAEncodePicture *)base_pic-
> >refs[1][j]->priv)->recon_surface->texture;
> +        for (j = 0; j < base_pic->nb_refs[0]; j++) {
> +            d3d12_refs.ppTexture2Ds[i]  = ((D3D12VAEncodePicture *)base_pic-
> >refs[0][j]->priv)->recon_surface->texture;
> +            d3d12_refs.pSubresources[i] = ctx->is_texture_array ?
> ((D3D12VAEncodePicture *)base_pic->refs[0][j]->priv)->subresource_index : 0;
> +            i++;
> +        }
> +        for (j = 0; j < base_pic->nb_refs[1]; j++) {
> +            d3d12_refs.ppTexture2Ds[i]  = ((D3D12VAEncodePicture *)base_pic-
> >refs[1][j]->priv)->recon_surface->texture;
> +            d3d12_refs.pSubresources[i] = ctx->is_texture_array ?
> ((D3D12VAEncodePicture *)base_pic->refs[1][j]->priv)->subresource_index : 0;
> +            i++;
> +        }
>      }
> 
>      input_args.PictureControlDesc.IntraRefreshFrameIndex  = 0; @@ -343,7
> +361,7 @@ static int d3d12va_encode_issue(AVCodecContext *avctx,
>      output_args.Bitstream.pBuffer                                    = 
> pic->output_buffer;
>      output_args.Bitstream.FrameStartOffset                           = pic-
> >aligned_header_size;
>      output_args.ReconstructedPicture.pReconstructedPicture           = pic-
> >recon_surface->texture;
> -    output_args.ReconstructedPicture.ReconstructedPictureSubresource = 0;
> +    output_args.ReconstructedPicture.ReconstructedPictureSubresource =
> + ctx->is_texture_array ? pic->subresource_index : 0;
>      output_args.EncoderOutputMetadata.pBuffer                        = pic-
> >encoded_metadata;
>      output_args.EncoderOutputMetadata.Offset                         = 0;
> 
> @@ -369,52 +387,99 @@ static int d3d12va_encode_issue(AVCodecContext
> *avctx,
>          goto fail;
>      }
> 
> -#define TRANSITION_BARRIER(res, before, after)                      \
> +#define TRANSITION_BARRIER(res, subres, before, after)              \
>      (D3D12_RESOURCE_BARRIER) {                                      \
>          .Type  = D3D12_RESOURCE_BARRIER_TYPE_TRANSITION,            \
>          .Flags = D3D12_RESOURCE_BARRIER_FLAG_NONE,                  \
>          .Transition = {                                             \
>              .pResource   = res,                                     \
> -            .Subresource = D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES, \
> +            .Subresource = subres,                                  \
>              .StateBefore = before,                                  \
>              .StateAfter  = after,                                   \
>          },                                                          \
>      }
> 
>      barriers[0] = TRANSITION_BARRIER(pic->input_surface->texture,
> +
> + D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
>                                       D3D12_RESOURCE_STATE_COMMON,
>                                       D3D12_RESOURCE_STATE_VIDEO_ENCODE_READ);
>      barriers[1] = TRANSITION_BARRIER(pic->output_buffer,
> +
> + D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
>                                       D3D12_RESOURCE_STATE_COMMON,
>                                       
> D3D12_RESOURCE_STATE_VIDEO_ENCODE_WRITE);
> -    barriers[2] = TRANSITION_BARRIER(pic->recon_surface->texture,
> -                                     D3D12_RESOURCE_STATE_COMMON,
> -                                     
> D3D12_RESOURCE_STATE_VIDEO_ENCODE_WRITE);
> -    barriers[3] = TRANSITION_BARRIER(pic->encoded_metadata,
> +    barriers[2] = TRANSITION_BARRIER(pic->encoded_metadata,
> +
> + D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
>                                       D3D12_RESOURCE_STATE_COMMON,
>                                       
> D3D12_RESOURCE_STATE_VIDEO_ENCODE_WRITE);
> -    barriers[4] = TRANSITION_BARRIER(pic->resolved_metadata,
> +    barriers[3] = TRANSITION_BARRIER(pic->resolved_metadata,
> +
> + D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
>                                       D3D12_RESOURCE_STATE_COMMON,
>                                       
> D3D12_RESOURCE_STATE_VIDEO_ENCODE_WRITE);
> 
> -    ID3D12VideoEncodeCommandList2_ResourceBarrier(cmd_list, 5, barriers);
> +    ID3D12VideoEncodeCommandList2_ResourceBarrier(cmd_list, 4,
> + barriers);
> 
> -    if (d3d12_refs.NumTexture2Ds) {
> -        D3D12_RESOURCE_BARRIER refs_barriers[3];
> -
> -        for (i = 0; i < d3d12_refs.NumTexture2Ds; i++)
> -            refs_barriers[i] = TRANSITION_BARRIER(d3d12_refs.ppTexture2Ds[i],
> -                                                  
> D3D12_RESOURCE_STATE_COMMON,
> -                                                  
> D3D12_RESOURCE_STATE_VIDEO_ENCODE_READ);
> -
> -        ID3D12VideoEncodeCommandList2_ResourceBarrier(cmd_list,
> d3d12_refs.NumTexture2Ds,
> -                                                      refs_barriers);
> +    //set transit barriers for reference pic and recon pic
> +    int barriers_ref_index = 0;
> +    D3D12_RESOURCE_BARRIER *barriers_ref = NULL;
> +    if (ctx->is_texture_array) {
> +        barriers_ref = av_calloc(frames_hwctx_recon->texture_array_size * 
> ctx-
> >plane_count,
> +            sizeof(D3D12_RESOURCE_BARRIER));
> +    } else {
> +        barriers_ref =
> av_calloc(MAX_DPB_SIZE,sizeof(D3D12_RESOURCE_BARRIER));
> +    }
> +
> +    if (ctx->is_texture_array) {
> +         // In Texture array mode, the D3D12 uses the same texture array
> (resource)for all
> +         // the reference pics in ppTexture2Ds and also for the
> pReconstructedPicture,
> +         // just different subresources.
> +        D3D12_RESOURCE_DESC references_tex_array_desc = { 0 };
> +
> + pic->recon_surface->texture->lpVtbl->GetDesc(pic->recon_surface->textu
> + re, &references_tex_array_desc);
> +
> +        for (uint32_t reference_subresource = 0; reference_subresource <
> references_tex_array_desc.DepthOrArraySize;
> +            reference_subresource++) {
> +
> +            //D3D12 DecomposeSubresource
> +            uint32_t mip_slice, plane_slice, array_slice, array_size;
> +            array_size = references_tex_array_desc.DepthOrArraySize;
> +            mip_slice = reference_subresource %
> references_tex_array_desc.MipLevels;
> +            array_slice = (reference_subresource /
> + references_tex_array_desc.MipLevels) % array_size;
> +
> +            for (plane_slice = 0; plane_slice < ctx->plane_count; 
> plane_slice++) {
> +                //Calculate the subresource index
> +                uint32_t planeOutputSubresource = mip_slice + array_slice *
> references_tex_array_desc.MipLevels +
> +                                        plane_slice * 
> references_tex_array_desc.MipLevels *
> array_size;
> +                if (reference_subresource == pic->subresource_index) {
> +                    barriers_ref[barriers_ref_index++] = 
> TRANSITION_BARRIER(pic-
> >recon_surface->texture, planeOutputSubresource,
> +                                        D3D12_RESOURCE_STATE_COMMON,
> +                                        
> D3D12_RESOURCE_STATE_VIDEO_ENCODE_WRITE);
> +                } else {
> +                    barriers_ref[barriers_ref_index++] = 
> TRANSITION_BARRIER(pic-
> >recon_surface->texture, planeOutputSubresource,
> +                                        D3D12_RESOURCE_STATE_COMMON,
> +                                        
> D3D12_RESOURCE_STATE_VIDEO_ENCODE_READ);
> +                }
> +            }
> +        }
> +    } else {
> +        barriers_ref[barriers_ref_index++] = TRANSITION_BARRIER(pic-
> >recon_surface->texture,
> +                                        
> D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
> +                                        D3D12_RESOURCE_STATE_COMMON,
> +
> + D3D12_RESOURCE_STATE_VIDEO_ENCODE_WRITE);
> +
> +        if (d3d12_refs.NumTexture2Ds) {
> +            for (i = 0; i < d3d12_refs.NumTexture2Ds; i++)
> +                barriers_ref[barriers_ref_index++] =
> TRANSITION_BARRIER(d3d12_refs.ppTexture2Ds[i],
> +
> D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
> +                                                    
> D3D12_RESOURCE_STATE_COMMON,
> +
> D3D12_RESOURCE_STATE_VIDEO_ENCODE_READ);
> +        }
>      }
> +    ID3D12VideoEncodeCommandList2_ResourceBarrier(cmd_list,
> + barriers_ref_index, barriers_ref);
> 
>      ID3D12VideoEncodeCommandList2_EncodeFrame(cmd_list, ctx->encoder, ctx-
> >encoder_heap,
>                                                &input_args, &output_args);
> 
>      barriers[3] = TRANSITION_BARRIER(pic->encoded_metadata,
> +
> + D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
>                                       D3D12_RESOURCE_STATE_VIDEO_ENCODE_WRITE,
>                                       D3D12_RESOURCE_STATE_VIDEO_ENCODE_READ);
> 
> @@ -422,35 +487,35 @@ static int d3d12va_encode_issue(AVCodecContext
> *avctx,
> 
> 
> ID3D12VideoEncodeCommandList2_ResolveEncoderOutputMetadata(cmd_list,
> &input_metadata, &output_metadata);
> 
> -    if (d3d12_refs.NumTexture2Ds) {
> -        D3D12_RESOURCE_BARRIER refs_barriers[3];
> -
> -        for (i = 0; i < d3d12_refs.NumTexture2Ds; i++)
> -            refs_barriers[i] = TRANSITION_BARRIER(d3d12_refs.ppTexture2Ds[i],
> -                                                  
> D3D12_RESOURCE_STATE_VIDEO_ENCODE_READ,
> -                                                  
> D3D12_RESOURCE_STATE_COMMON);
> -
> -        ID3D12VideoEncodeCommandList2_ResourceBarrier(cmd_list,
> d3d12_refs.NumTexture2Ds,
> -                                                      refs_barriers);
> +    //swap the barriers_ref transition state
> +    if (barriers_ref_index > 0) {
> +        for (i = 0; i < barriers_ref_index; i++) {
> +            D3D12_RESOURCE_STATES temp_statue =
> barriers_ref[i].Transition.StateBefore;
> +            barriers_ref[i].Transition.StateBefore =
> barriers_ref[i].Transition.StateAfter;
> +            barriers_ref[i].Transition.StateAfter = temp_statue;
> +        }
> +        ID3D12VideoEncodeCommandList2_ResourceBarrier(cmd_list,
> barriers_ref_index,
> +                                                      barriers_ref);
>      }
> 
>      barriers[0] = TRANSITION_BARRIER(pic->input_surface->texture,
> +
> + D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
>                                       D3D12_RESOURCE_STATE_VIDEO_ENCODE_READ,
>                                       D3D12_RESOURCE_STATE_COMMON);
>      barriers[1] = TRANSITION_BARRIER(pic->output_buffer,
> +
> + D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
>                                       D3D12_RESOURCE_STATE_VIDEO_ENCODE_WRITE,
>                                       D3D12_RESOURCE_STATE_COMMON);
> -    barriers[2] = TRANSITION_BARRIER(pic->recon_surface->texture,
> -                                     D3D12_RESOURCE_STATE_VIDEO_ENCODE_WRITE,
> -                                     D3D12_RESOURCE_STATE_COMMON);
> -    barriers[3] = TRANSITION_BARRIER(pic->encoded_metadata,
> +    barriers[2] = TRANSITION_BARRIER(pic->encoded_metadata,
> +
> + D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
>                                       D3D12_RESOURCE_STATE_VIDEO_ENCODE_READ,
>                                       D3D12_RESOURCE_STATE_COMMON);
> -    barriers[4] = TRANSITION_BARRIER(pic->resolved_metadata,
> +    barriers[3] = TRANSITION_BARRIER(pic->resolved_metadata,
> +
> + D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
>                                       D3D12_RESOURCE_STATE_VIDEO_ENCODE_WRITE,
>                                       D3D12_RESOURCE_STATE_COMMON);
> 
> -    ID3D12VideoEncodeCommandList2_ResourceBarrier(cmd_list, 5, barriers);
> +    ID3D12VideoEncodeCommandList2_ResourceBarrier(cmd_list, 4,
> + barriers);
> 
>      hr = ID3D12VideoEncodeCommandList2_Close(cmd_list);
>      if (FAILED(hr)) {
> @@ -489,6 +554,14 @@ static int d3d12va_encode_issue(AVCodecContext
> *avctx,
>      if (d3d12_refs.ppTexture2Ds)
>          av_freep(&d3d12_refs.ppTexture2Ds);
> 
> +    if (ctx->is_texture_array) {
> +        if (d3d12_refs.pSubresources)
> +            av_freep(&d3d12_refs.pSubresources);
> +    }
> +
> +    if (barriers_ref)
> +        av_freep(&barriers_ref);
> +
>      return 0;
> 
>  fail:
> @@ -498,6 +571,14 @@ fail:
>      if (d3d12_refs.ppTexture2Ds)
>          av_freep(&d3d12_refs.ppTexture2Ds);
> 
> +    if (ctx->is_texture_array) {
> +        if (d3d12_refs.pSubresources)
> +            av_freep(&d3d12_refs.pSubresources);
> +    }
> +
> +    if (barriers_ref)
> +        av_freep(&barriers_ref);
> +
>      if (ctx->codec->free_picture_params)
>          ctx->codec->free_picture_params(pic);
> 
> @@ -1341,6 +1422,7 @@ fail:
>  static int d3d12va_encode_create_recon_frames(AVCodecContext *avctx)  {
>      FFHWBaseEncodeContext *base_ctx = avctx->priv_data;
> +    D3D12VAEncodeContext       *ctx = avctx->priv_data;
>      AVD3D12VAFramesContext *hwctx;
>      enum AVPixelFormat recon_format;
>      int err;
> @@ -1364,6 +1446,8 @@ static int
> d3d12va_encode_create_recon_frames(AVCodecContext *avctx)
>      hwctx->flags = D3D12_RESOURCE_FLAG_VIDEO_ENCODE_REFERENCE_ONLY
> |
>                     D3D12_RESOURCE_FLAG_DENY_SHADER_RESOURCE;
> 
> +    hwctx->texture_array_size = ctx->is_texture_array ? MAX_DPB_SIZE +
> + 1 : 0;
> +
>      err = av_hwframe_ctx_init(base_ctx->recon_frames_ref);
>      if (err < 0) {
>          av_log(avctx, AV_LOG_ERROR, "Failed to initialise reconstructed "
> @@ -1396,6 +1480,7 @@ int ff_d3d12va_encode_init(AVCodecContext *avctx)
>      FFHWBaseEncodeContext *base_ctx = avctx->priv_data;
>      D3D12VAEncodeContext       *ctx = avctx->priv_data;
>      D3D12_FEATURE_DATA_VIDEO_FEATURE_AREA_SUPPORT support = { 0 };
> +    D3D12_FEATURE_DATA_FORMAT_INFO format_info = {0};
>      int err;
>      HRESULT hr;
> 
> @@ -1431,6 +1516,15 @@ int ff_d3d12va_encode_init(AVCodecContext *avctx)
>          goto fail;
>      }
> 
> +    format_info.Format = ((AVD3D12VAFramesContext*)base_ctx->input_frames-
> >hwctx)->format;
> +    if (FAILED(ID3D12VideoDevice_CheckFeatureSupport(ctx->hwctx->device,
> D3D12_FEATURE_FORMAT_INFO,
> +        &format_info, sizeof(format_info)))) {
> +        av_log(avctx, AV_LOG_ERROR, "Failed to query format plane count:
> 0x%x\n", hr);
> +        err = AVERROR_EXTERNAL;
> +        goto fail;
> +    }
> +    ctx->plane_count = format_info.PlaneCount;
> +
>      err = d3d12va_encode_set_profile(avctx);
>      if (err < 0)
>          goto fail;
> @@ -1458,10 +1552,6 @@ int ff_d3d12va_encode_init(AVCodecContext *avctx)
>      if (err < 0)
>          goto fail;
> 
> -    err = d3d12va_encode_create_recon_frames(avctx);
> -    if (err < 0)
> -        goto fail;
> -
>      err = d3d12va_encode_prepare_output_buffers(avctx);
>      if (err < 0)
>          goto fail;
> @@ -1487,6 +1577,10 @@ int ff_d3d12va_encode_init(AVCodecContext *avctx)
>              goto fail;
>      }
> 
> +    err = d3d12va_encode_create_recon_frames(avctx);
> +    if (err < 0)
> +        goto fail;
> +
>      base_ctx->output_delay = base_ctx->b_per_p;
>      base_ctx->decode_delay = base_ctx->max_b_depth;
> 
> diff --git a/libavcodec/d3d12va_encode.h b/libavcodec/d3d12va_encode.h index
> 3b0b8153d5..c8e64ddffd 100644
> --- a/libavcodec/d3d12va_encode.h
> +++ b/libavcodec/d3d12va_encode.h
> @@ -52,6 +52,8 @@ typedef struct D3D12VAEncodePicture {
>      ID3D12Resource *encoded_metadata;
>      ID3D12Resource *resolved_metadata;
> 
> +    int            subresource_index;
> +
>      D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA pic_ctl;
> 
>      int             fence_value;
> @@ -189,6 +191,16 @@ typedef struct D3D12VAEncodeContext {
>       */
>      AVBufferPool *output_buffer_pool;
> 
> +    /**
> +     * Flag indicates if the HW is texture array mode.
> +     */
> +    int is_texture_array;
> +
> +    /**
> +     * The number of planes in the input DXGI FORMAT .
> +     */
> +    int plane_count;
> +
>      /**
>       * D3D12 video encoder.
>       */
> diff --git a/libavcodec/d3d12va_encode_hevc.c
> b/libavcodec/d3d12va_encode_hevc.c
> index 938ba01f54..7e1d973f7e 100644
> --- a/libavcodec/d3d12va_encode_hevc.c
> +++ b/libavcodec/d3d12va_encode_hevc.c
> @@ -280,9 +280,8 @@ static int
> d3d12va_encode_hevc_init_sequence_params(AVCodecContext *avctx)
>      }
> 
>      if (support.SupportFlags &
> D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RECONSTRUCTED_FRAMES_REQUIR
> E_TEXTURE_ARRAYS) {
> -        av_log(avctx, AV_LOG_ERROR, "D3D12 video encode on this device 
> requires
> texture array support, "
> -               "but it's not implemented.\n");
> -        return AVERROR_PATCHWELCOME;
> +        ctx->is_texture_array = 1;
> +        av_log(avctx, AV_LOG_DEBUG, "D3D12 video encode on this device
> + uses texture array mode.\n");
>      }
> 
>      desc = av_pix_fmt_desc_get(base_ctx->input_frames->sw_format);
> diff --git a/libavutil/hwcontext_d3d12va.c b/libavutil/hwcontext_d3d12va.c 
> index
> 6507cf69c1..05418c9d94 100644
> --- a/libavutil/hwcontext_d3d12va.c
> +++ b/libavutil/hwcontext_d3d12va.c
> @@ -49,6 +49,7 @@ typedef struct D3D12VAFramesContext {
>      ID3D12GraphicsCommandList *command_list;
>      AVD3D12VASyncContext       sync_ctx;
>      UINT                       luma_component_size;
> +    int                        nb_surfaces_used;
>  } D3D12VAFramesContext;
> 
>  typedef struct D3D12VADevicePriv {
> @@ -174,7 +175,8 @@ fail:
> 
>  static void d3d12va_frames_uninit(AVHWFramesContext *ctx)  {
> -    D3D12VAFramesContext *s = ctx->hwctx;
> +    D3D12VAFramesContext   *s     = ctx->hwctx;
> +    AVD3D12VAFramesContext *hwctx = ctx->hwctx;
> 
>      D3D12_OBJECT_RELEASE(s->sync_ctx.fence);
>      if (s->sync_ctx.event)
> @@ -185,6 +187,11 @@ static void
> d3d12va_frames_uninit(AVHWFramesContext *ctx)
>      D3D12_OBJECT_RELEASE(s->command_allocator);
>      D3D12_OBJECT_RELEASE(s->command_list);
>      D3D12_OBJECT_RELEASE(s->command_queue);
> +
> +    if (hwctx->texture_array) {
> +        D3D12_OBJECT_RELEASE(hwctx->texture_array);
> +        hwctx->texture_array = NULL;
> +    }
>  }
> 
>  static int d3d12va_frames_get_constraints(AVHWDeviceContext *ctx, const void
> *hwconfig, AVHWFramesConstraints *constraints) @@ -228,6 +235,28 @@
> static void free_texture(void *opaque, uint8_t *data)
>      av_freep(&data);
>  }
> 
> +static AVBufferRef *d3d12va_pool_alloc_texture_array(AVHWFramesContext
> +*ctx) {
> +    AVD3D12VAFrame         *desc  = av_mallocz(sizeof(*desc));
> +    D3D12VAFramesContext   *s     = ctx->hwctx;
> +    AVD3D12VAFramesContext *hwctx = ctx->hwctx;
> +    AVBufferRef *buf;
> +
> +    // In Texture array mode, the D3D12 uses the same texture address for 
> all the
> pictures,
> +    //just different subresources.
> +    desc->subresource_index = s->nb_surfaces_used;
> +    desc->texture = hwctx->texture_array;
> +
> +    buf = av_buffer_create((uint8_t *)desc, sizeof(*desc), NULL, NULL,
> + 0);
> +
> +    if (!buf) {
> +        av_free(desc);
> +        return NULL;
> +    }
> +    s->nb_surfaces_used++;
> +    return buf;
> +}
> +
>  static AVBufferRef *d3d12va_pool_alloc(void *opaque, size_t size)  {
>      AVHWFramesContext      *ctx          = (AVHWFramesContext *)opaque;
> @@ -236,6 +265,11 @@ static AVBufferRef *d3d12va_pool_alloc(void *opaque,
> size_t size)
> 
>      AVBufferRef *buf;
>      AVD3D12VAFrame *frame;
> +
> +    //For texture array mode, no need to create texture.
> +    if (hwctx->texture_array_size > 0)
> +        return d3d12va_pool_alloc_texture_array(ctx);
> +
>      D3D12_HEAP_PROPERTIES props = { .Type = D3D12_HEAP_TYPE_DEFAULT };
>      D3D12_RESOURCE_DESC desc = {
>          .Dimension        = D3D12_RESOURCE_DIMENSION_TEXTURE2D,
> @@ -280,7 +314,9 @@ fail:
> 
>  static int d3d12va_frames_init(AVHWFramesContext *ctx)  {
> -    AVD3D12VAFramesContext *hwctx = ctx->hwctx;
> +    AVD3D12VAFramesContext *hwctx        = ctx->hwctx;
> +    AVD3D12VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
> +
>      int i;
> 
>      for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) { @@ -298,6 
> +334,31
> @@ static int d3d12va_frames_init(AVHWFramesContext *ctx)
>          return AVERROR(EINVAL);
>      }
> 
> +    //For texture array mode, create texture array resource in the init 
> stage.
> +    //This texture array will be used for all the pictures,but with different
> subresources.
> +    if (hwctx->texture_array_size > 0) {
> +        D3D12_HEAP_PROPERTIES props = { .Type = D3D12_HEAP_TYPE_DEFAULT
> + };
> +
> +        D3D12_RESOURCE_DESC desc = {
> +            .Dimension        = D3D12_RESOURCE_DIMENSION_TEXTURE2D,
> +            .Alignment        = 0,
> +            .Width            = ctx->width,
> +            .Height           = ctx->height,
> +            .DepthOrArraySize = hwctx->texture_array_size,
> +            .MipLevels        = 1,
> +            .Format           = hwctx->format,
> +            .SampleDesc       = {.Count = 1, .Quality = 0 },
> +            .Layout           = D3D12_TEXTURE_LAYOUT_UNKNOWN,
> +            .Flags            = hwctx->flags,
> +        };
> +
> +        if (FAILED(ID3D12Device_CreateCommittedResource(device_hwctx-
> >device, &props, D3D12_HEAP_FLAG_NONE, &desc,
> +            D3D12_RESOURCE_STATE_COMMON, NULL, &IID_ID3D12Resource,
> (void **)&hwctx->texture_array))) {
> +            av_log(ctx, AV_LOG_ERROR, "Could not create the texture\n");
> +            return AVERROR(EINVAL);
> +        }
> +    }
> +
>      ffhwframesctx(ctx)->pool_internal =
> av_buffer_pool_init2(sizeof(AVD3D12VAFrame),
>          ctx, d3d12va_pool_alloc, NULL);
> 
> diff --git a/libavutil/hwcontext_d3d12va.h b/libavutil/hwcontext_d3d12va.h
> index 212a6a6146..d48d847d11 100644
> --- a/libavutil/hwcontext_d3d12va.h
> +++ b/libavutil/hwcontext_d3d12va.h
> @@ -111,6 +111,11 @@ typedef struct AVD3D12VAFrame {
>       */
>      ID3D12Resource *texture;
> 
> +    /**
> +     * In texture array mode, the index of subresource
> +     */
> +    int subresource_index;
> +
>      /**
>       * The sync context for the texture
>       *
> @@ -137,6 +142,19 @@ typedef struct AVD3D12VAFramesContext {
>       * @see https://learn.microsoft.com/en-us/windows/win32/api/d3d12/ne-
> d3d12-d3d12_resource_flags
>       */
>      D3D12_RESOURCE_FLAGS flags;
> +
> +    /**
> +     * In texture array mode, the D3D12 uses the same the same texture array
> (resource)for all
> +     * pictures.
> +     */
> +    ID3D12Resource *texture_array;
> +
> +    /**
> +     * In texture array mode, the D3D12 uses the same texture array 
> (resource)for
> all
> +     * pictures, but different subresources to represent each picture.
> +     * This is the size of the texture array (in number of subresources).
> +     */
> +    int texture_array_size;
>  } AVD3D12VAFramesContext;
> 
>  #endif /* AVUTIL_HWCONTEXT_D3D12VA_H */ diff --git a/libavutil/version.h
> b/libavutil/version.h index 2c0aa3e35e..29cd31e229 100644
> --- a/libavutil/version.h
> +++ b/libavutil/version.h
> @@ -79,8 +79,8 @@
>   */
> 
>  #define LIBAVUTIL_VERSION_MAJOR  60
> -#define LIBAVUTIL_VERSION_MINOR   4
> -#define LIBAVUTIL_VERSION_MICRO 101
> +#define LIBAVUTIL_VERSION_MINOR   5
> +#define LIBAVUTIL_VERSION_MICRO 100
> 
>  #define LIBAVUTIL_VERSION_INT
> AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, \
>                                                 LIBAVUTIL_VERSION_MINOR, \
> --
> 2.45.2.windows.1
> 

LGTM, will apply
_______________________________________________
ffmpeg-devel mailing list
ffmpeg-devel@ffmpeg.org
https://ffmpeg.org/mailman/listinfo/ffmpeg-devel

To unsubscribe, visit link above, or email
ffmpeg-devel-requ...@ffmpeg.org with subject "unsubscribe".

Reply via email to