Author: pebender
Date: Mon Jun 22 13:15:29 2009
New Revision: 5013
Added:
trunk/gar-minimyth/script/lib/ffmpeg/files/ffmpeg-h264_vaapi_take_19.patch
Modified:
trunk/gar-minimyth/html/minimyth/document-changelog.txt
trunk/gar-minimyth/script/lib/ffmpeg/Makefile
trunk/gar-minimyth/script/lib/ffmpeg/checksums
Log:
- Added H.264 VAAPI patch to FFmpeg.
Modified: trunk/gar-minimyth/html/minimyth/document-changelog.txt
==============================================================================
--- trunk/gar-minimyth/html/minimyth/document-changelog.txt (original)
+++ trunk/gar-minimyth/html/minimyth/document-changelog.txt Mon Jun 22
13:15:29 2009
@@ -34,6 +34,7 @@
- Modified Xine so that it does not leave defunct xscreensaver-command
processes while it is running.
- Enabled VAAPI support in FFmpeg.
+ - Added H.264 VAAPI patch to FFmpeg.
- Added VAAPI support to VLC, but it is currently rather broken.
- You will need to set ffmpeg-hw=1 in
/home/minimyth/.confic/vlc/vlcrc
in order to enable VAAPI.
Modified: trunk/gar-minimyth/script/lib/ffmpeg/Makefile
==============================================================================
--- trunk/gar-minimyth/script/lib/ffmpeg/Makefile (original)
+++ trunk/gar-minimyth/script/lib/ffmpeg/Makefile Mon Jun 22 13:15:29 2009
@@ -3,7 +3,7 @@
CATEGORIES = lib
MASTER_SITES = svn://svn.ffmpeg.org/
DISTFILES = $(DISTNAME).tar.bz2
-PATCHFILES = $(GARNAME).patch.gar
+PATCHFILES = $(GARNAME)-h264_vaapi_take_19.patch $(GARNAME).patch.gar
LICENSE = GPL2/LGPL2_1
DESCRIPTION =
Modified: trunk/gar-minimyth/script/lib/ffmpeg/checksums
==============================================================================
--- trunk/gar-minimyth/script/lib/ffmpeg/checksums (original)
+++ trunk/gar-minimyth/script/lib/ffmpeg/checksums Mon Jun 22 13:15:29 2009
@@ -1 +1,2 @@
+1a0774e392c05307a54bf15bcf3c88ef download/ffmpeg-h264_vaapi_take_19.patch
aa5b869686f75dab01eacb45653aa1c4 download/ffmpeg.patch.gar
Added:
trunk/gar-minimyth/script/lib/ffmpeg/files/ffmpeg-h264_vaapi_take_19.patch
==============================================================================
--- (empty file)
+++
trunk/gar-minimyth/script/lib/ffmpeg/files/ffmpeg-h264_vaapi_take_19.patch
Mon Jun 22 13:15:29 2009
@@ -0,0 +1,386 @@
+diff -Naur ffmpeg-19228.29374-old/libavcodec/allcodecs.c
ffmpeg-19228.29374-new/libavcodec/allcodecs.c
+--- ffmpeg-19228.29374-old/libavcodec/allcodecs.c 2009-06-19
17:11:23.000000000 -0700
++++ ffmpeg-19228.29374-new/libavcodec/allcodecs.c 2009-06-22
09:12:33.000000000 -0700
+@@ -55,6 +55,7 @@
+
+ /* hardware accelerators */
+ REGISTER_HWACCEL (H263_VAAPI, h263_vaapi);
++ REGISTER_HWACCEL (H264_VAAPI, h264_vaapi);
+ REGISTER_HWACCEL (MPEG2_VAAPI, mpeg2_vaapi);
+ REGISTER_HWACCEL (MPEG4_VAAPI, mpeg4_vaapi);
+ REGISTER_HWACCEL (VC1_VAAPI, vc1_vaapi);
+diff -Naur ffmpeg-19228.29374-old/libavcodec/Makefile
ffmpeg-19228.29374-new/libavcodec/Makefile
+--- ffmpeg-19228.29374-old/libavcodec/Makefile 2009-06-19
17:11:23.000000000 -0700
++++ ffmpeg-19228.29374-new/libavcodec/Makefile 2009-06-22
09:13:28.000000000 -0700
+@@ -108,6 +108,7 @@
+ OBJS-$(CONFIG_H263P_ENCODER) += mpegvideo_enc.o motion_est.o
ratecontrol.o h263.o mpeg12data.o mpegvideo.o error_resilience.o
+ OBJS-$(CONFIG_H264_DECODER) += h264.o h264idct.o h264pred.o
h264_parser.o cabac.o mpegvideo.o error_resilience.o
+ OBJS-$(CONFIG_H264_ENCODER) += h264enc.o h264dspenc.o
++OBJS-$(CONFIG_H264_VAAPI_HWACCEL) += h264.o h264idct.o h264pred.o
h264_parser.o cabac.o mpegvideo.o error_resilience.o vaapi.o vaapi_h264.o
+ OBJS-$(CONFIG_H264_VDPAU_DECODER) += h264.o h264idct.o h264pred.o
h264_parser.o cabac.o mpegvideo.o error_resilience.o
+ OBJS-$(CONFIG_HUFFYUV_DECODER) += huffyuv.o
+ OBJS-$(CONFIG_HUFFYUV_ENCODER) += huffyuv.o
+diff -Naur ffmpeg-19228.29374-old/libavcodec/vaapi_h264.c
ffmpeg-19228.29374-new/libavcodec/vaapi_h264.c
+--- ffmpeg-19228.29374-old/libavcodec/vaapi_h264.c 1969-12-31
16:00:00.000000000 -0800
++++ ffmpeg-19228.29374-new/libavcodec/vaapi_h264.c 2009-06-22
09:12:33.000000000 -0700
+@@ -0,0 +1,360 @@
++/*
++ * H.264 HW decode acceleration through VAAPI
++ *
++ * Copyright (C) 2008-2009 Splitted-Desktop Systems
++ *
++ * This file is part of FFmpeg.
++ *
++ * FFmpeg is free software; you can redistribute it and/or
++ * modify it under the terms of the GNU Lesser General Public
++ * License as published by the Free Software Foundation; either
++ * version 2.1 of the License, or (at your option) any later version.
++ *
++ * FFmpeg is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ * Lesser General Public License for more details.
++ *
++ * You should have received a copy of the GNU Lesser General Public
++ * License along with FFmpeg; if not, write to the Free Software
++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
++ */
++
++#include "vaapi_internal.h"
++#include "h264.h"
++
++/** @file
++ * This file implements the glue code between FFmpeg's and VAAPI's
structures.
++ */
++
++/** Reconstruct bitstream slice_type.
++ */
++static int get_slice_type(H264Context *h)
++{
++ switch (h->slice_type) {
++ case FF_P_TYPE: return 0;
++ case FF_B_TYPE: return 1;
++ case FF_I_TYPE: return 2;
++ case FF_SP_TYPE: return 3;
++ case FF_SI_TYPE: return 4;
++ default: return -1;
++ }
++}
++
++/** Initialize an empty VAAPI picture.
++ */
++static void init_vaapi_pic(VAPictureH264 *va_pic)
++{
++ va_pic->picture_id = 0xffffffff;
++ va_pic->flags = VA_PICTURE_H264_INVALID;
++ va_pic->TopFieldOrderCnt = 0;
++ va_pic->BottomFieldOrderCnt = 0;
++}
++
++/** Translate an FFmpeg Picture into its VAAPI form.
++ * @param[out] va_pic A pointer to VAAPI's own picture struct
++ * @param[in] pic A pointer to the FFmpeg picture struct to
convert
++ * @param[in] pic_structure The picture field type (as defined in
mpegvideo.h),
++ * supersede pic's field type if nonzero.
++ */
++static void fill_vaapi_pic(VAPictureH264 *va_pic,
++ Picture *pic,
++ int pic_structure)
++{
++ if (pic_structure == 0)
++ pic_structure = pic->reference;
++
++ va_pic->picture_id = ff_vaapi_get_surface(pic);
++ va_pic->frame_idx = pic->long_ref ? pic->pic_id : pic->frame_num;
++
++ va_pic->flags = 0;
++ if (pic_structure != PICT_FRAME)
++ va_pic->flags |= (pic_structure & PICT_TOP_FIELD) ?
VA_PICTURE_H264_TOP_FIELD : VA_PICTURE_H264_BOTTOM_FIELD;
++ if (pic->reference)
++ va_pic->flags |= pic->long_ref ?
VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE;
++
++ va_pic->TopFieldOrderCnt = 0;
++ if ((pic_structure & PICT_TOP_FIELD) && pic->field_poc[0] != INT_MAX)
++ va_pic->TopFieldOrderCnt = pic->field_poc[0];
++
++ va_pic->BottomFieldOrderCnt = 0;
++ if ((pic_structure & PICT_BOTTOM_FIELD) && pic->field_poc[1] !=
INT_MAX)
++ va_pic->BottomFieldOrderCnt = pic->field_poc[1];
++}
++
++/** Decoded picture buffer.
++ */
++typedef struct DPB {
++ unsigned int size;
++ unsigned int max_size;
++ VAPictureH264 *pics;
++} DPB;
++
++/** Append picture to the decoded picture buffer, in a VAAPI form that
++ * merges the second field picture attributes with the first, if
available.
++ * The decoded picture buffer's size must be large enough
++ * to receive the new VAAPI picture object.
++ *
++ * VAAPI decoded pictures merge both field when present.
++ */
++static int append_to_vaapi_decoded_pic_buf(DPB *dpb, Picture *pic)
++{
++ unsigned int i;
++
++ if (dpb->size >= dpb->max_size)
++ return -1;
++
++ for (i = 0; i < dpb->size; i++) {
++ VAPictureH264 * const merged_va_pic = &dpb->pics[i];
++
++ /* Check if the reference picture was already refered in a
previous pass */
++ if (merged_va_pic->picture_id == ff_vaapi_get_surface(pic)) {
++ /* Don't overwrite our reference, use a temporary */
++ VAPictureH264 tmp_va_pic;
++ fill_vaapi_pic(&tmp_va_pic, pic, 0);
++
++ if ((tmp_va_pic.flags ^ merged_va_pic->flags) &
(VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD)) {
++ /* Merge second field. */
++ merged_va_pic->flags |= tmp_va_pic.flags &
(VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD);
++
++ if (tmp_va_pic.flags & VA_PICTURE_H264_TOP_FIELD) {
++ merged_va_pic->TopFieldOrderCnt =
tmp_va_pic.TopFieldOrderCnt;
++ } else {
++ merged_va_pic->BottomFieldOrderCnt =
tmp_va_pic.BottomFieldOrderCnt;
++ }
++ }
++ return 0;
++ }
++ }
++
++ fill_vaapi_pic(&dpb->pics[dpb->size++], pic, 0);
++ return 0;
++}
++
++/** Prepare VAAPI's reference frames array.
++ * Called when starting decoding a frame, this function fills the
++ * reference frame array with per-frame values.
++ * This is required because VAAPI need interlaced fields reference
++ * to be merged in its reference array.
++ */
++static int
prepare_vaapi_reference_frame_array(VAPictureParameterBufferH264 *pic_param,
++
H264Context *h)
++{
++ DPB dpb;
++ unsigned int i, list;
++
++ dpb.size = 0;
++ dpb.max_size = FF_ARRAY_ELEMS(pic_param->ReferenceFrames);
++ dpb.pics = pic_param->ReferenceFrames;
++ for (i = 0; i < dpb.max_size; i++)
++ init_vaapi_pic(&dpb.pics[i]);
++
++ /* For each refered picture in the current decoding context,
++ prepare a decoded picture buffer to hold it */
++ for (list = 0; list < h->list_count; list++)
++ for (i = 0; i < h->ref_count[list]; i++) {
++ Picture * const pic = &h->ref_list[list][i];
++ if (pic->reference && append_to_vaapi_decoded_pic_buf(&dpb,
pic) < 0)
++ return -1;
++ }
++ return 0;
++}
++
++/** Fill VAAPI reference picture lists.
++ * Called for each field in slice, it fills the reference picture
++ * list for that field.
++ * @param[out] RefPicList VAAPI internal reference picture list
++ * @param[in] ref_list A pointer to the FFmpeg reference list
++ * @param[in] ref_count The number of reference pictures in ref_list
++ */
++static void fill_vaapi_reference_pic_list(VAPictureH264 RefPicList[32],
++ Picture * ref_list,
++ unsigned int ref_count)
++{
++ unsigned int i, n = 0;
++ for (i = 0; i < ref_count; i++)
++ if (ref_list[i].reference)
++ fill_vaapi_pic(&RefPicList[n++], &ref_list[i], 0);
++
++ for (; n < 32; n++)
++ init_vaapi_pic(&RefPicList[n]);
++}
++
++/** Initialize prediction weight table.
++ * VAAPI requires a plain prediction weight table as it does not infer
++ * any value.
++ * @param[in] h A pointer to the current H.264 context
++ * @param[in] list The reference frame list index to use
++ * @param[out] luma_weight_flag VAAPI plain luma weight flag
++ * @param[out] luma_weight VAAPI plain luma weight table
++ * @param[out] luma_offset VAAPI plain luma offset table
++ * @param[out] chroma_weight_flag VAAPI plain chroma weight flag
++ * @param[out] chroma_weight VAAPI plain chroma weight table
++ * @param[out] chroma_offset VAAPI plain chroma offset table
++ */
++static void fill_vaapi_plain_pred_weight_table(H264Context *h, int list,
++ unsigned char
*luma_weight_flag,
++ short
luma_weight[32],
++ short
luma_offset[32],
++ unsigned char
*chroma_weight_flag,
++ short
chroma_weight[32][2],
++ short
chroma_offset[32][2])
++{
++ unsigned int i, j;
++
++ *luma_weight_flag = h->luma_weight_flag[list];
++ *chroma_weight_flag = h->chroma_weight_flag[list];
++
++ for (i = 0; i < h->ref_count[list]; i++) {
++ /* VAAPI also wants the inferred (default) values, not
++ only what is available in the bitstream (7.4.3.2). */
++ if (h->luma_weight_flag[list]) {
++ luma_weight[i] = h->luma_weight[list][i];
++ luma_offset[i] = h->luma_offset[list][i];
++ } else {
++ luma_weight[i] = 1 << h->luma_log2_weight_denom;
++ luma_offset[i] = 0;
++ }
++ for (j = 0; j < 2; j++) {
++ if (h->chroma_weight_flag[list]) {
++ chroma_weight[i][j] = h->chroma_weight[list][i][j];
++ chroma_offset[i][j] = h->chroma_offset[list][i][j];
++ } else {
++ chroma_weight[i][j] = 1 << h->chroma_log2_weight_denom;
++ chroma_offset[i][j] = 0;
++ }
++ }
++ }
++}
++
++/** Initialize and start decoding a frame with VAAPI.
++ */
++static int start_frame(AVCodecContext * avctx,
++ av_unused const uint8_t * buffer,
++ av_unused uint32_t size)
++{
++ H264Context * const h = avctx->priv_data;
++ MpegEncContext * const s = &h->s;
++ struct vaapi_context * const vactx = avctx->hwaccel_context;
++ VAPictureParameterBufferH264 *pic_param;
++ VAIQMatrixBufferH264 *iq_matrix;
++
++ dprintf(avctx, "start_frame()\n");
++
++ vactx->slice_param_size = sizeof(VASliceParameterBufferH264);
++
++ /* Fill in VAPictureParameterBufferH264. */
++ pic_param = ff_vaapi_alloc_picture(vactx,
sizeof(VAPictureParameterBufferH264));
++ if (!pic_param)
++ return -1;
++ fill_vaapi_pic(&pic_param->CurrPic, s->current_picture_ptr,
s->picture_structure);
++ if (prepare_vaapi_reference_frame_array(pic_param, h) < 0)
++ return -1;
++ pic_param->picture_width_in_mbs_minus1 =
s->mb_width - 1;
++ pic_param->picture_height_in_mbs_minus1 =
s->mb_height - 1;
++ pic_param->bit_depth_luma_minus8 =
h->sps.bit_depth_luma - 8;
++ pic_param->bit_depth_chroma_minus8 =
h->sps.bit_depth_chroma - 8;
++ pic_param->num_ref_frames =
h->sps.ref_frame_count;
++ pic_param->seq_fields.value = 0; /*
reset all bits */
++ pic_param->seq_fields.bits.chroma_format_idc =
h->sps.chroma_format_idc;
++ pic_param->seq_fields.bits.residual_colour_transform_flag =
h->sps.residual_color_transform_flag; /* XXX: only for 4:4:4 high profile?
*/
++ pic_param->seq_fields.bits.gaps_in_frame_num_value_allowed_flag =
h->sps.gaps_in_frame_num_allowed_flag;
++ pic_param->seq_fields.bits.frame_mbs_only_flag =
h->sps.frame_mbs_only_flag;
++ pic_param->seq_fields.bits.mb_adaptive_frame_field_flag =
h->sps.mb_aff;
++ pic_param->seq_fields.bits.direct_8x8_inference_flag =
h->sps.direct_8x8_inference_flag;
++ pic_param->seq_fields.bits.MinLumaBiPredSize8x8 =
h->sps.level_idc >= 31; /* A.3.3.2 */
++ pic_param->seq_fields.bits.log2_max_frame_num_minus4 =
h->sps.log2_max_frame_num - 4;
++ pic_param->seq_fields.bits.pic_order_cnt_type =
h->sps.poc_type;
++ pic_param->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 =
h->sps.log2_max_poc_lsb - 4;
++ pic_param->seq_fields.bits.delta_pic_order_always_zero_flag =
h->sps.delta_pic_order_always_zero_flag;
++ pic_param->num_slice_groups_minus1 =
h->pps.slice_group_count - 1;
++ pic_param->slice_group_map_type =
h->pps.mb_slice_group_map_type;
++ pic_param->slice_group_change_rate_minus1 = 0; /*
XXX: unimplemented in FFmpeg */
++ pic_param->pic_init_qp_minus26 =
h->pps.init_qp - 26;
++ pic_param->pic_init_qs_minus26 =
h->pps.init_qs - 26;
++ pic_param->chroma_qp_index_offset =
h->pps.chroma_qp_index_offset[0];
++ pic_param->second_chroma_qp_index_offset =
h->pps.chroma_qp_index_offset[1];
++ pic_param->pic_fields.value = 0; /*
reset all bits */
++ pic_param->pic_fields.bits.entropy_coding_mode_flag =
h->pps.cabac;
++ pic_param->pic_fields.bits.weighted_pred_flag =
h->pps.weighted_pred;
++ pic_param->pic_fields.bits.weighted_bipred_idc =
h->pps.weighted_bipred_idc;
++ pic_param->pic_fields.bits.transform_8x8_mode_flag =
h->pps.transform_8x8_mode;
++ pic_param->pic_fields.bits.field_pic_flag =
h->s.picture_structure != PICT_FRAME;
++ pic_param->pic_fields.bits.constrained_intra_pred_flag =
h->pps.constrained_intra_pred;
++ pic_param->pic_fields.bits.pic_order_present_flag =
h->pps.pic_order_present;
++ pic_param->pic_fields.bits.deblocking_filter_control_present_flag =
h->pps.deblocking_filter_parameters_present;
++ pic_param->pic_fields.bits.redundant_pic_cnt_present_flag =
h->pps.redundant_pic_cnt_present;
++ pic_param->pic_fields.bits.reference_pic_flag =
h->nal_ref_idc != 0;
++ pic_param->frame_num =
h->frame_num;
++
++ /* Fill in VAIQMatrixBufferH264. */
++ iq_matrix = ff_vaapi_alloc_iq_matrix(vactx,
sizeof(VAIQMatrixBufferH264));
++ if (!iq_matrix)
++ return -1;
++ memcpy(iq_matrix->ScalingList4x4, h->pps.scaling_matrix4,
sizeof(iq_matrix->ScalingList4x4));
++ memcpy(iq_matrix->ScalingList8x8, h->pps.scaling_matrix8,
sizeof(iq_matrix->ScalingList8x8));
++ return 0;
++}
++
++/** Finish hardware decoding of the frame.
++ */
++static int end_frame(AVCodecContext *avctx)
++{
++ H264Context * const h = avctx->priv_data;
++
++ dprintf(avctx, "end_frame()\n");
++ return ff_vaapi_common_end_frame(&h->s);
++}
++
++/** Decode the given H.264 slice with VAAPI.
++ */
++static int decode_slice(AVCodecContext *avctx,
++ const uint8_t *buffer,
++ uint32_t size)
++{
++ H264Context * const h = avctx->priv_data;
++ MpegEncContext * const s = &h->s;
++ VASliceParameterBufferH264 *slice_param;
++
++ dprintf(avctx, "decode_slice(): buffer %p, size %d\n", buffer, size);
++
++ /* Fill in VASliceParameterBufferH264. */
++ slice_param = (VASliceParameterBufferH264
*)ff_vaapi_alloc_slice(avctx->hwaccel_context, buffer, size);
++ if (!slice_param)
++ return -1;
++ slice_param->slice_data_bit_offset =
get_bits_count(&h->s.gb) + 8; /* bit buffer started beyond nal_unit_type */
++ slice_param->first_mb_in_slice = (s->mb_y >>
FIELD_OR_MBAFF_PICTURE) * s->mb_width + s->mb_x;
++ slice_param->slice_type = get_slice_type(h);
++ slice_param->direct_spatial_mv_pred_flag = h->slice_type ==
FF_B_TYPE ? h->direct_spatial_mv_pred : 0;
++ slice_param->num_ref_idx_l0_active_minus1 = h->ref_count[0] - 1;
++ slice_param->num_ref_idx_l1_active_minus1 = h->ref_count[1] - 1;
++ slice_param->cabac_init_idc = h->cabac_init_idc;
++ slice_param->slice_qp_delta = s->qscale -
h->pps.init_qp;
++ slice_param->disable_deblocking_filter_idc = h->deblocking_filter <
2 ? !h->deblocking_filter : h->deblocking_filter;
++ slice_param->slice_alpha_c0_offset_div2 =
h->slice_alpha_c0_offset / 2;
++ slice_param->slice_beta_offset_div2 = h->slice_beta_offset /
2;
++ fill_vaapi_reference_pic_list(slice_param->RefPicList0,
h->ref_list[0], h->list_count > 0 ? h->ref_count[0] : 0);
++ fill_vaapi_reference_pic_list(slice_param->RefPicList1,
h->ref_list[1], h->list_count > 1 ? h->ref_count[1] : 0);
++ slice_param->luma_log2_weight_denom =
h->luma_log2_weight_denom;
++ slice_param->chroma_log2_weight_denom =
h->chroma_log2_weight_denom;
++
++ fill_vaapi_plain_pred_weight_table(h, 0,
++
&slice_param->luma_weight_l0_flag, slice_param->luma_weight_l0,
slice_param->luma_offset_l0,
++
&slice_param->chroma_weight_l0_flag, slice_param->chroma_weight_l0,
slice_param->chroma_offset_l0);
++ fill_vaapi_plain_pred_weight_table(h, 1,
++
&slice_param->luma_weight_l1_flag, slice_param->luma_weight_l1,
slice_param->luma_offset_l1,
++
&slice_param->chroma_weight_l1_flag, slice_param->chroma_weight_l1,
slice_param->chroma_offset_l1);
++ return 0;
++}
++
++AVHWAccel h264_vaapi_hwaccel = {
++ .name = "h264_vaapi",
++ .type = CODEC_TYPE_VIDEO,
++ .id = CODEC_ID_H264,
++ .pix_fmt = PIX_FMT_VAAPI_VLD,
++ .capabilities = 0,
++ .start_frame = start_frame,
++ .end_frame = end_frame,
++ .decode_slice = decode_slice,
++ .priv_data_size = 0,
++};
--~--~---------~--~----~------------~-------~--~----~
You received this message because you are subscribed to the Google Groups
"minimyth-commits" group.
To post to this group, send email to [email protected]
To unsubscribe from this group, send email to
[email protected]
For more options, visit this group at
http://groups.google.com/group/minimyth-commits?hl=en
-~----------~----~----~----~------~----~------~--~---