On 4/23/2025 5:45 PM, Mark Thompson wrote:
---
  configure                |   1 +
  libavcodec/Makefile      |   1 +
  libavcodec/allcodecs.c   |   1 +
  libavcodec/apv_decode.c  | 403 +++++++++++++++++++++++++++++++++++++++
  libavcodec/apv_decode.h  |  80 ++++++++
  libavcodec/apv_dsp.c     | 136 +++++++++++++
  libavcodec/apv_dsp.h     |  37 ++++
  libavcodec/apv_entropy.c | 200 +++++++++++++++++++
  8 files changed, 859 insertions(+)
  create mode 100644 libavcodec/apv_decode.c
  create mode 100644 libavcodec/apv_decode.h
  create mode 100644 libavcodec/apv_dsp.c
  create mode 100644 libavcodec/apv_dsp.h
  create mode 100644 libavcodec/apv_entropy.c

diff --git a/configure b/configure
index ca404d2797..ee270b770c 100755
--- a/configure
+++ b/configure
@@ -2935,6 +2935,7 @@ apng_decoder_select="inflate_wrapper"
  apng_encoder_select="deflate_wrapper llvidencdsp"
  aptx_encoder_select="audio_frame_queue"
  aptx_hd_encoder_select="audio_frame_queue"
+apv_decoder_select="cbs_apv"
  asv1_decoder_select="blockdsp bswapdsp idctdsp"
  asv1_encoder_select="aandcttables bswapdsp fdctdsp pixblockdsp"
  asv2_decoder_select="blockdsp bswapdsp idctdsp"
diff --git a/libavcodec/Makefile b/libavcodec/Makefile
index a5f5c4e904..e674671460 100644
--- a/libavcodec/Makefile
+++ b/libavcodec/Makefile
@@ -244,6 +244,7 @@ OBJS-$(CONFIG_APTX_HD_DECODER)         += aptxdec.o aptx.o
  OBJS-$(CONFIG_APTX_HD_ENCODER)         += aptxenc.o aptx.o
  OBJS-$(CONFIG_APNG_DECODER)            += png.o pngdec.o pngdsp.o
  OBJS-$(CONFIG_APNG_ENCODER)            += png.o pngenc.o
+OBJS-$(CONFIG_APV_DECODER)             += apv_decode.o apv_entropy.o apv_dsp.o
  OBJS-$(CONFIG_ARBC_DECODER)            += arbc.o
  OBJS-$(CONFIG_ARGO_DECODER)            += argo.o
  OBJS-$(CONFIG_SSA_DECODER)             += assdec.o ass.o
diff --git a/libavcodec/allcodecs.c b/libavcodec/allcodecs.c
index f10519617e..09f06c71d6 100644
--- a/libavcodec/allcodecs.c
+++ b/libavcodec/allcodecs.c
@@ -47,6 +47,7 @@ extern const FFCodec ff_anm_decoder;
  extern const FFCodec ff_ansi_decoder;
  extern const FFCodec ff_apng_encoder;
  extern const FFCodec ff_apng_decoder;
+extern const FFCodec ff_apv_decoder;
  extern const FFCodec ff_arbc_decoder;
  extern const FFCodec ff_argo_decoder;
  extern const FFCodec ff_asv1_encoder;
diff --git a/libavcodec/apv_decode.c b/libavcodec/apv_decode.c
new file mode 100644
index 0000000000..0cc4f57dab
--- /dev/null
+++ b/libavcodec/apv_decode.c
@@ -0,0 +1,403 @@
+/*
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "libavutil/mastering_display_metadata.h"
+#include "libavutil/mem_internal.h"
+#include "libavutil/pixdesc.h"
+
+#include "apv.h"
+#include "apv_decode.h"
+#include "apv_dsp.h"
+#include "avcodec.h"
+#include "cbs.h"
+#include "cbs_apv.h"
+#include "codec_internal.h"
+#include "decode.h"
+#include "thread.h"
+
+
+typedef struct APVDecodeContext {
+    CodedBitstreamContext *cbc;
+    APVDSPContext dsp;
+
+    CodedBitstreamFragment au;
+    APVDerivedTileInfo tile_info;
+
+    APVVLCLUT decode_lut;
+
+    AVFrame *output_frame;
+} APVDecodeContext;
+
+static const enum AVPixelFormat apv_format_table[5][5] = {
+    { AV_PIX_FMT_GRAY8,    AV_PIX_FMT_GRAY10,     AV_PIX_FMT_GRAY12,     
AV_PIX_FMT_GRAY14, AV_PIX_FMT_GRAY16 },
+    { 0 }, // 4:2:0 is not valid.
+    { AV_PIX_FMT_YUV422P,  AV_PIX_FMT_YUV422P10,  AV_PIX_FMT_YUV422P12,  
AV_PIX_FMT_GRAY14, AV_PIX_FMT_YUV422P16 },
+    { AV_PIX_FMT_YUV444P,  AV_PIX_FMT_YUV444P10,  AV_PIX_FMT_YUV444P12,  
AV_PIX_FMT_GRAY14, AV_PIX_FMT_YUV444P16 },
+    { AV_PIX_FMT_YUVA444P, AV_PIX_FMT_YUVA444P10, AV_PIX_FMT_YUVA444P12, 
AV_PIX_FMT_GRAY14, AV_PIX_FMT_YUVA444P16 },
+};
+
+static int apv_decode_check_format(AVCodecContext *avctx,
+                                   const APVRawFrameHeader *header)
+{
+    int err, bit_depth;
+
+    avctx->profile = header->frame_info.profile_idc;
+    avctx->level   = header->frame_info.level_idc;
+
+    bit_depth = header->frame_info.bit_depth_minus8 + 8;
+    if (bit_depth < 8 || bit_depth > 16 || bit_depth % 2) {
+        avpriv_request_sample(avctx, "Bit depth %d", bit_depth);
+        return AVERROR_PATCHWELCOME;
+    }
+    avctx->pix_fmt =
+        apv_format_table[header->frame_info.chroma_format_idc][bit_depth - 4 
>> 2];
+
+    err = ff_set_dimensions(avctx,
+                            FFALIGN(header->frame_info.frame_width,  16),
+                            FFALIGN(header->frame_info.frame_height, 16));
+    if (err < 0) {
+        // Unsupported frame size.
+        return err;
+    }
+    avctx->width  = header->frame_info.frame_width;
+    avctx->height = header->frame_info.frame_height;
+
+    avctx->sample_aspect_ratio = (AVRational){ 1, 1 };
+
+    avctx->color_primaries = header->color_primaries;
+    avctx->color_trc       = header->transfer_characteristics;
+    avctx->colorspace      = header->matrix_coefficients;
+    avctx->color_range     = header->full_range_flag ? AVCOL_RANGE_JPEG
+                                                     : AVCOL_RANGE_MPEG;
+    avctx->chroma_sample_location = AVCHROMA_LOC_TOPLEFT;
+
+    avctx->refs = 0;
+    avctx->has_b_frames = 0;
+
+    return 0;
+}
+
+static av_cold int apv_decode_init(AVCodecContext *avctx)
+{
+    APVDecodeContext *apv = avctx->priv_data;
+    int err;
+
+    err = ff_cbs_init(&apv->cbc, AV_CODEC_ID_APV, avctx);
+    if (err < 0)
+        return err;
+
+    ff_apv_entropy_build_decode_lut(&apv->decode_lut);
+
+    ff_apv_dsp_init(&apv->dsp);
+
+    if (avctx->extradata) {
+        av_log(avctx, AV_LOG_WARNING,
+               "APV does not support extradata.\n");

Either remove this in preparation for extradata to be exported/generated, or only print it if avctx->internal->is_copy is false. Otherwise it will be print spammed thread_count times when using frame threading.

+    }
+
+    return 0;
+}
+
+static av_cold int apv_decode_close(AVCodecContext *avctx)
+{
+    APVDecodeContext *apv = avctx->priv_data;
+
+    ff_cbs_fragment_free(&apv->au);
+    ff_cbs_close(&apv->cbc);
+
+    return 0;
+}
+
+static int apv_decode_block(AVCodecContext *avctx,
+                            void *output,
+                            ptrdiff_t pitch,
+                            GetBitContext *gbc,
+                            APVEntropyState *entropy_state,
+                            int bit_depth,
+                            int qp_shift,
+                            const uint16_t *qmatrix)
+{
+    APVDecodeContext *apv = avctx->priv_data;
+    int err;
+
+    LOCAL_ALIGNED_32(int16_t, coeff, [64]);
+
+    err = ff_apv_entropy_decode_block(coeff, gbc, entropy_state);
+    if (err < 0)
+        return 0;
+
+    apv->dsp.decode_transquant(output, pitch,
+                               coeff, qmatrix,
+                               bit_depth, qp_shift);
+
+    return 0;
+}
+
+static int apv_decode_tile_component(AVCodecContext *avctx, void *data,
+                                     int job, int thread)
+{
+    APVRawFrame                      *input = data;
+    APVDecodeContext                   *apv = avctx->priv_data;
+    const CodedBitstreamAPVContext *apv_cbc = apv->cbc->priv_data;
+    const APVDerivedTileInfo     *tile_info = &apv_cbc->tile_info;
+
+    int tile_index = job / apv_cbc->num_comp;
+    int comp_index = job % apv_cbc->num_comp;
+
+    const AVPixFmtDescriptor *pix_fmt_desc =
+        av_pix_fmt_desc_get(avctx->pix_fmt);
+
+    int sub_w = comp_index == 0 ? 1 : pix_fmt_desc->log2_chroma_w + 1;
+    int sub_h = comp_index == 0 ? 1 : pix_fmt_desc->log2_chroma_h + 1;
+
+    APVRawTile *tile = &input->tile[tile_index];
+
+    int tile_y = tile_index / tile_info->tile_cols;
+    int tile_x = tile_index % tile_info->tile_cols;
+
+    int tile_start_x = tile_info->col_starts[tile_x];
+    int tile_start_y = tile_info->row_starts[tile_y];
+
+    int tile_width  = tile_info->col_starts[tile_x + 1] - tile_start_x;
+    int tile_height = tile_info->row_starts[tile_y + 1] - tile_start_y;
+
+    int tile_mb_width  = tile_width  / APV_MB_WIDTH;
+    int tile_mb_height = tile_height / APV_MB_HEIGHT;
+
+    int blk_mb_width  = 2 / sub_w;
+    int blk_mb_height = 2 / sub_h;
+
+    int bit_depth;
+    int qp_shift;
+    LOCAL_ALIGNED_32(uint16_t, qmatrix_scaled, [64]);
+
+    GetBitContext gbc;
+
+    APVEntropyState entropy_state = {
+        .log_ctx           = avctx,
+        .decode_lut        = &apv->decode_lut,
+        .prev_dc           = 0,
+        .prev_dc_diff      = 20,
+        .prev_1st_ac_level = 0,
+    };
+
+    init_get_bits8(&gbc, tile->tile_data[comp_index],
+                   tile->tile_header.tile_data_size[comp_index]);
+
+    // Combine the bitstream quantisation matrix with the qp scaling
+    // in advance.  (Including qp_shift as well would overflow 16 bits.)
+    // Fix the row ordering at the same time.
+    {
+        static const uint8_t apv_level_scale[6] = { 40, 45, 51, 57, 64, 71 };
+        int qp = tile->tile_header.tile_qp[comp_index];
+        int level_scale = apv_level_scale[qp % 6];
+
+        bit_depth = apv_cbc->bit_depth;
+        qp_shift  = qp / 6;
+
+        for (int y = 0; y < 8; y++) {
+            for (int x = 0; x < 8; x++)
+                qmatrix_scaled[y * 8 + x] = level_scale *
+                    
input->frame_header.quantization_matrix.q_matrix[comp_index][x][y];
+        }
+    }
+
+    for (int mb_y = 0; mb_y < tile_mb_height; mb_y++) {
+        for (int mb_x = 0; mb_x < tile_mb_width; mb_x++) {
+            for (int blk_y = 0; blk_y < blk_mb_height; blk_y++) {
+                for (int blk_x = 0; blk_x < blk_mb_width; blk_x++) {
+                    int frame_y = (tile_start_y +
+                                   APV_MB_HEIGHT * mb_y +
+                                   APV_TR_SIZE * blk_y) / sub_h;
+                    int frame_x = (tile_start_x +
+                                   APV_MB_WIDTH * mb_x +
+                                   APV_TR_SIZE * blk_x) / sub_w;
+
+                    ptrdiff_t frame_pitch = 
apv->output_frame->linesize[comp_index];
+                    uint8_t  *block_start = 
apv->output_frame->data[comp_index] +
+                                            frame_y * frame_pitch + 2 * 
frame_x;
+
+                    apv_decode_block(avctx,
+                                     block_start, frame_pitch,
+                                     &gbc, &entropy_state,
+                                     bit_depth,
+                                     qp_shift,
+                                     qmatrix_scaled);
+                }
+            }
+        }
+    }
+
+    av_log(avctx, AV_LOG_DEBUG,
+           "Decoded tile %d component %d: %dx%d MBs starting at (%d,%d)\n",
+           tile_index, comp_index, tile_mb_width, tile_mb_height,
+           tile_start_x, tile_start_y);
+
+    return 0;
+}
+
+static int apv_decode(AVCodecContext *avctx, AVFrame *output,
+                      APVRawFrame *input)
+{
+    APVDecodeContext                   *apv = avctx->priv_data;
+    const CodedBitstreamAPVContext *apv_cbc = apv->cbc->priv_data;
+    const APVDerivedTileInfo     *tile_info = &apv_cbc->tile_info;
+    int err, job_count;
+
+    err = apv_decode_check_format(avctx, &input->frame_header);
+    if (err < 0) {
+        av_log(avctx, AV_LOG_ERROR, "Unsupported format parameters.\n");
+        return err;
+    }
+
+    err = ff_thread_get_buffer(avctx, output, 0);
+    if (err) {
+        av_log(avctx, AV_LOG_ERROR, "No output frame supplied.\n");
+        return err;
+    }
+
+    apv->output_frame = output;
+
+    // Each component within a tile is independent of every other,
+    // so we can decode all in parallel.
+    job_count = tile_info->num_tiles * apv_cbc->num_comp;
+
+    avctx->execute2(avctx, apv_decode_tile_component,
+                    input, NULL, job_count);
+
+    return 0;
+}
+
+static int apv_decode_metadata(AVCodecContext *avctx, AVFrame *frame,
+                               const APVRawMetadata *md)
+{
+    int err;
+
+    for (int i = 0; i < md->metadata_count; i++) {
+        const APVRawMetadataPayload *pl = &md->payloads[i];
+
+        switch (pl->payload_type) {
+        case APV_METADATA_MDCV:
+            {
+                const APVRawMetadataMDCV *mdcv = &pl->mdcv;
+                AVMasteringDisplayMetadata *mdm;
+
+                err = ff_decode_mastering_display_new(avctx, frame, &mdm);
+                if (err < 0)
+                    return err;
+
+                if (mdm) {
+                    for (int i = 0; i < 3; i++) {
+                        mdm->display_primaries[i][0] =
+                            av_make_q(mdcv->primary_chromaticity_x[i], 1 << 
16);
+                        mdm->display_primaries[i][1] =
+                            av_make_q(mdcv->primary_chromaticity_y[i], 1 << 
16);
+                    }
+
+                    mdm->white_point[0] =
+                        av_make_q(mdcv->white_point_chromaticity_x, 1 << 16);
+                    mdm->white_point[1] =
+                        av_make_q(mdcv->white_point_chromaticity_y, 1 << 16);
+
+                    mdm->max_luminance =
+                        av_make_q(mdcv->max_mastering_luminance, 1 << 8);
+                    mdm->min_luminance =
+                        av_make_q(mdcv->min_mastering_luminance, 1 << 14);
+
+                    mdm->has_primaries = 1;
+                    mdm->has_luminance = 1;
+                }
+            }
+            break;
+        case APV_METADATA_CLL:
+            {
+                const APVRawMetadataCLL *cll = &pl->cll;
+                AVContentLightMetadata *clm;
+
+                err = ff_decode_content_light_new(avctx, frame, &clm);
+                if (err < 0)
+                    return err;
+
+                if (clm) {
+                    clm->MaxCLL  = cll->max_cll;
+                    clm->MaxFALL = cll->max_fall;
+                }
+            }
+            break;
+        default:
+            // Ignore other types of metadata.
+        }
+    }
+
+    return 0;
+}
+
+static int apv_decode_frame(AVCodecContext *avctx, AVFrame *frame,
+                            int *got_frame, AVPacket *packet)
+{
+    APVDecodeContext      *apv = avctx->priv_data;
+    CodedBitstreamFragment *au = &apv->au;
+    int err;
+
+    err = ff_cbs_read_packet(apv->cbc, au, packet);
+    if (err < 0) {
+        av_log(avctx, AV_LOG_ERROR, "Failed to read packet.\n");
+        return err;
+    }
+
+    for (int i = 0; i < au->nb_units; i++) {
+        CodedBitstreamUnit *pbu = &au->units[i];
+
+        switch (pbu->type) {
+        case APV_PBU_PRIMARY_FRAME:

If the other frame types are not going to be supported for now, then define decompose_unit_types to ignore them.

+            err = apv_decode(avctx, frame, pbu->content);
+            if (err < 0)
+                return err;
+            *got_frame = 1;
+            break;
+        case APV_PBU_METADATA:
+            apv_decode_metadata(avctx, frame, pbu->content);
+            break;
+        case APV_PBU_ACCESS_UNIT_INFORMATION:
+        case APV_PBU_FILLER:

And add these too.

+            // Ignored by the decoder.
+            break;
+        default:
+            av_log(avctx, AV_LOG_WARNING,

Maybe VERBOSE instead? If a sample has non supported frame types, this will be spammed in standard log levels.

If anything, print at WARNING level for PBU types not currently defined.

+                   "Ignoring unsupported PBU type %d.\n", pbu->type);
+        }
+    }
+
+    ff_cbs_fragment_reset(au);
+
+    return packet->size;
+}
+
+const FFCodec ff_apv_decoder = {
+    .p.name                = "apv",
+    CODEC_LONG_NAME("Advanced Professional Video"),
+    .p.type                = AVMEDIA_TYPE_VIDEO,
+    .p.id                  = AV_CODEC_ID_APV,
+    .priv_data_size        = sizeof(APVDecodeContext),
+    .init                  = apv_decode_init,
+    .close                 = apv_decode_close,
+    FF_CODEC_DECODE_CB(apv_decode_frame),
+    .p.capabilities        = AV_CODEC_CAP_DR1 |
+                             AV_CODEC_CAP_SLICE_THREADS |
+                             AV_CODEC_CAP_FRAME_THREADS,
+};

Attachment: OpenPGP_signature.asc
Description: OpenPGP digital signature

_______________________________________________
ffmpeg-devel mailing list
ffmpeg-devel@ffmpeg.org
https://ffmpeg.org/mailman/listinfo/ffmpeg-devel

To unsubscribe, visit link above, or email
ffmpeg-devel-requ...@ffmpeg.org with subject "unsubscribe".

Reply via email to