Steve Lhomme pushed to branch master at VideoLAN / VLC
Commits:
cfc2424d by Thomas Guillem at 2026-02-05T14:02:45+00:00
mediacodec: refactor, split CreateVideoContext()
Non functional changes.
- - - - -
6618d1c8 by Thomas Guillem at 2026-02-05T14:02:45+00:00
mediacodec: return early
Non functional changes.
- - - - -
6a763bf5 by Thomas Guillem at 2026-02-05T14:02:45+00:00
mediacodec: init android_video_context_t from CreateSurface()
Non functional changes.
- - - - -
4a7a8f16 by Thomas Guillem at 2026-02-05T14:02:45+00:00
mediacodec: clean video only if video
- - - - -
ba91526d by Thomas Guillem at 2026-02-05T14:02:45+00:00
mediacodec: rename android_picture_ctx to asurface_picture_ctx
Non functional changes.
- - - - -
802800f1 by Thomas Guillem at 2026-02-05T14:02:45+00:00
android: utils: explict check for VLC_SUCCESS
- - - - -
b2c74101 by Thomas Guillem at 2026-02-05T14:02:45+00:00
android: utils: add AImageReader and ASurfaceControl APIs
Both APIs are avalaible since API 29+, but some new functions were added
in API 31. As I don't want to add a third behavior to test (legacy,
API29, API31), I force usage of API31+ to enable new AImageReader /
ASurfaceControl path.
- - - - -
f85300f6 by Thomas Guillem at 2026-02-05T14:02:45+00:00
android: utils: add android_picture_ctx
- - - - -
87df70aa by Thomas Guillem at 2026-02-05T14:02:45+00:00
android: display: add AImage support via ASurfaceControl
Will replace MediaCodec direct rendering once AImageReader is handled by
MediaCodec.
- Support HDR metadata (should be the same than legacy vout).
- Fully support vout crop/scale/size controls, without the need to
external JNI calls to libvlcjni AWindow class.
- Support picture re-display (it was not possible before)
- - - - -
952bc064 by Thomas Guillem at 2026-02-05T14:02:45+00:00
opengl: rename interop_android to interop_asurface
Non functional changes.
- - - - -
bbeb4e88 by Thomas Guillem at 2026-02-05T14:02:45+00:00
opengl: android: add AImage support
- - - - -
19f621bd by Thomas Guillem at 2026-02-05T14:02:45+00:00
mediacodec: add NewPicture()
This will be called asynchronously, so it need to handle timestamp_FifoGet
- - - - -
2d49f204 by Thomas Guillem at 2026-02-05T14:02:45+00:00
mediacodec: create picture only on required paths
No functional changes, as all paths create a picture for now.
- - - - -
f924965c by Thomas Guillem at 2026-02-05T14:02:45+00:00
mediacodec: add mc_video_color_info
Non functional changes.
- - - - -
f81ecd70 by Thomas Guillem at 2026-02-05T14:02:45+00:00
mediacodec: add and use vlc_to_mc_color_*()
Non functional changes.
- - - - -
fa42f2ae by Thomas Guillem at 2026-02-05T14:02:45+00:00
mediacodec: forward color info to fmt_out if needed
fmt_out color should already be set by the demuxer or the packetizer.
I don't know if there is a possibility that the hdr metadata is found by
the decoder and not by our demuxer or packetizer.
- - - - -
0aadefe7 by Thomas Guillem at 2026-02-05T14:02:45+00:00
mediacodec: plug to AImageReader
There are 2 now paths, the legacy path (before API 30) and the AImage
path (after API 31).
The new AImage path:
- No more inflight pictures: picture can outlive the decoder and be
displayed more than one time (the AImageReader need to outlive all
pictures but it's handled naturally from the video context).
- Can be rendered by OpenGL or directly (ASurfaceControl), without
hardcoding it before creating MediaCodec (contrary to the legacy path)
- - - - -
9 changed files:
- modules/codec/omxil/mediacodec.c
- modules/codec/omxil/mediacodec.h
- modules/codec/omxil/mediacodec_ndk.c
- modules/video_output/Makefile.am
- modules/video_output/android/display.c
- modules/video_output/android/utils.c
- modules/video_output/android/utils.h
- + modules/video_output/opengl/interop_aimage.c
- modules/video_output/opengl/interop_android.c →
modules/video_output/opengl/interop_asurface.c
Changes:
=====================================
modules/codec/omxil/mediacodec.c
=====================================
@@ -72,7 +72,7 @@ typedef void (*dec_on_flush_cb)(struct decoder_sys_t *);
typedef int (*dec_process_output_cb)(decoder_t *, mc_api_out *, picture_t **,
block_t **);
-struct android_picture_ctx
+struct asurface_picture_ctx
{
picture_context_t s;
atomic_uint refs;
@@ -128,7 +128,7 @@ typedef struct decoder_sys_t
struct
{
vlc_video_context *ctx;
- struct android_picture_ctx apic_ctxs[MAX_PIC];
+ struct asurface_picture_ctx apic_ctxs[MAX_PIC];
void *p_surface;
unsigned i_angle;
unsigned i_input_offset_x, i_input_offset_y;
@@ -140,6 +140,9 @@ typedef struct decoder_sys_t
timestamp_fifo_t *timestamp_fifo;
int i_mpeg_dar_num, i_mpeg_dar_den;
struct vlc_asurfacetexture *surfacetexture;
+ bool use_air;
+ vlc_cond_t air_cond;
+ unsigned air_waiting_count;
} video;
struct {
date_t i_end_date;
@@ -441,6 +444,122 @@ static int ParseExtra(decoder_t *p_dec)
return VLC_SUCCESS;
}
+static enum mc_media_format_color_range_t
+vlc_to_mc_color_range(video_color_range_t vlc_range)
+{
+ switch (vlc_range)
+ {
+ case COLOR_RANGE_FULL:
+ return MC_COLOR_RANGE_FULL;
+ case COLOR_RANGE_LIMITED:
+ return MC_COLOR_RANGE_LIMITED;
+ default:
+ return MC_COLOR_RANGE_UNSPECIFIED;
+ }
+}
+
+static video_color_range_t
+mc_to_vlc_color_range(enum mc_media_format_color_range_t mc_range)
+{
+ switch (mc_range)
+ {
+ case MC_COLOR_RANGE_FULL:
+ return COLOR_RANGE_FULL;
+ case MC_COLOR_RANGE_LIMITED:
+ return COLOR_RANGE_LIMITED;
+ default:
+ return COLOR_RANGE_UNDEF;
+ }
+}
+
+static enum mc_media_format_color_standard_t
+vlc_to_mc_color_standard(video_color_primaries_t vlc_primaries)
+{
+ switch (vlc_primaries)
+ {
+ case COLOR_PRIMARIES_BT601_525:
+ return MC_COLOR_STANDARD_BT601_NTSC;
+ case COLOR_PRIMARIES_BT601_625:
+ return MC_COLOR_STANDARD_BT601_PAL;
+ case COLOR_PRIMARIES_BT709:
+ return MC_COLOR_STANDARD_BT709;
+ case COLOR_PRIMARIES_BT2020:
+ return MC_COLOR_STANDARD_BT2020;
+ default:
+ return MC_COLOR_STANDARD_UNSPECIFIED;
+ }
+}
+
+static video_color_primaries_t
+mc_to_vlc_primaries(enum mc_media_format_color_standard_t mc_standard)
+{
+ switch (mc_standard)
+ {
+ case MC_COLOR_STANDARD_BT709:
+ return COLOR_PRIMARIES_BT709;
+ case MC_COLOR_STANDARD_BT601_PAL:
+ case MC_COLOR_STANDARD_BT601_NTSC:
+ return COLOR_PRIMARIES_BT601_525;
+ case MC_COLOR_STANDARD_BT2020:
+ return COLOR_PRIMARIES_BT2020;
+ default:
+ return COLOR_PRIMARIES_UNDEF;
+ }
+}
+
+static video_color_space_t
+mc_to_vlc_color_space(enum mc_media_format_color_standard_t mc_standard)
+{
+ switch (mc_standard)
+ {
+ case MC_COLOR_STANDARD_BT709:
+ return COLOR_SPACE_BT709;
+ case MC_COLOR_STANDARD_BT601_PAL:
+ case MC_COLOR_STANDARD_BT601_NTSC:
+ return COLOR_SPACE_BT601;
+ case MC_COLOR_STANDARD_BT2020:
+ return COLOR_SPACE_BT2020;
+ default:
+ return COLOR_SPACE_UNDEF;
+ }
+}
+
+static enum mc_media_format_color_transfer_t
+vlc_to_mc_color_transfer(video_transfer_func_t vlc_transfer)
+{
+ switch (vlc_transfer)
+ {
+ case TRANSFER_FUNC_LINEAR:
+ return MC_COLOR_TRANSFER_LINEAR;
+ case TRANSFER_FUNC_SMPTE_ST2084:
+ return MC_COLOR_TRANSFER_ST2084;
+ case TRANSFER_FUNC_HLG:
+ return MC_COLOR_TRANSFER_HLG;
+ case TRANSFER_FUNC_BT709:
+ return MC_COLOR_TRANSFER_SDR_VIDEO;
+ default:
+ return MC_COLOR_TRANSFER_UNSPECIFIED;
+ }
+}
+
+static video_transfer_func_t
+mc_to_vlc_color_transfer(enum mc_media_format_color_transfer_t mc_transfer)
+{
+ switch (mc_transfer)
+ {
+ case MC_COLOR_TRANSFER_LINEAR:
+ return TRANSFER_FUNC_LINEAR;
+ case MC_COLOR_TRANSFER_SDR_VIDEO:
+ return TRANSFER_FUNC_BT709;
+ case MC_COLOR_TRANSFER_ST2084:
+ return TRANSFER_FUNC_SMPTE_ST2084;
+ case MC_COLOR_TRANSFER_HLG:
+ return TRANSFER_FUNC_HLG;
+ default:
+ return TRANSFER_FUNC_UNDEF;
+ }
+}
+
/*****************************************************************************
* StartMediaCodec: Create the mediacodec instance
*****************************************************************************/
@@ -457,56 +576,10 @@ static int StartMediaCodec(decoder_t *p_dec)
args.video.p_surface = p_sys->video.p_surface;
- switch (p_dec->fmt_out.video.color_range)
- {
- case COLOR_RANGE_FULL:
- args.video.color_range = MC_COLOR_RANGE_FULL;
- break;
- case COLOR_RANGE_LIMITED:
- args.video.color_range = MC_COLOR_RANGE_LIMITED;
- break;
- default:
- args.video.color_range = MC_COLOR_RANGE_UNSPECIFIED;
- break;
- }
-
- switch (p_dec->fmt_out.video.primaries)
- {
- case COLOR_PRIMARIES_BT601_525:
- args.video.color_standard = MC_COLOR_STANDARD_BT601_NTSC;
- break;
- case COLOR_PRIMARIES_BT601_625:
- args.video.color_standard = MC_COLOR_STANDARD_BT601_PAL;
- break;
- case COLOR_PRIMARIES_BT709:
- args.video.color_standard = MC_COLOR_STANDARD_BT709;
- break;
- case COLOR_PRIMARIES_BT2020:
- args.video.color_standard = MC_COLOR_STANDARD_BT2020;
- break;
- default:
- args.video.color_standard = MC_COLOR_STANDARD_UNSPECIFIED;
- break;
- }
+ args.video.color.range =
vlc_to_mc_color_range(p_dec->fmt_out.video.color_range);
+ args.video.color.standard =
vlc_to_mc_color_standard(p_dec->fmt_out.video.primaries);
+ args.video.color.transfer =
vlc_to_mc_color_transfer(p_dec->fmt_out.video.transfer);
- switch (p_dec->fmt_out.video.transfer)
- {
- case TRANSFER_FUNC_LINEAR:
- args.video.color_transfer = MC_COLOR_TRANSFER_LINEAR;
- break;
- case TRANSFER_FUNC_SMPTE_ST2084:
- args.video.color_transfer = MC_COLOR_TRANSFER_ST2084;
- break;
- case TRANSFER_FUNC_HLG:
- args.video.color_transfer = MC_COLOR_TRANSFER_HLG;
- break;
- case TRANSFER_FUNC_BT709:
- args.video.color_transfer = MC_COLOR_TRANSFER_SDR_VIDEO;
- break;
- default:
- args.video.color_transfer = MC_COLOR_TRANSFER_UNSPECIFIED;
- break;
- }
args.video.b_tunneled_playback = args.video.p_surface ?
var_InheritBool(p_dec, CFG_PREFIX "tunneled-playback") : false;
@@ -544,7 +617,7 @@ static void StopMediaCodec(decoder_sys_t *p_sys)
p_sys->api.stop(&p_sys->api);
}
-static bool AndroidPictureContextRelease(struct android_picture_ctx *apctx,
+static bool AndroidPictureContextRelease(struct asurface_picture_ctx *apctx,
bool render)
{
int index = atomic_exchange(&apctx->index, -1);
@@ -562,8 +635,8 @@ static bool AndroidPictureContextRelease(struct
android_picture_ctx *apctx,
static bool PictureContextRenderPic(struct picture_context_t *ctx)
{
- struct android_picture_ctx *apctx =
- container_of(ctx, struct android_picture_ctx, s);
+ struct asurface_picture_ctx *apctx =
+ container_of(ctx, struct asurface_picture_ctx, s);
return AndroidPictureContextRelease(apctx, true);
}
@@ -571,8 +644,8 @@ static bool PictureContextRenderPic(struct
picture_context_t *ctx)
static bool PictureContextRenderPicTs(struct picture_context_t *ctx,
vlc_tick_t ts)
{
- struct android_picture_ctx *apctx =
- container_of(ctx, struct android_picture_ctx, s);
+ struct asurface_picture_ctx *apctx =
+ container_of(ctx, struct asurface_picture_ctx, s);
int index = atomic_exchange(&apctx->index, -1);
if (index >= 0)
@@ -597,19 +670,19 @@ PictureContextGetTexture(picture_context_t *context)
return p_sys->video.surfacetexture;
}
-static void PictureContextDestroy(struct picture_context_t *ctx)
+static void ASurfacePictureContextDestroy(struct picture_context_t *ctx)
{
- struct android_picture_ctx *apctx =
- container_of(ctx, struct android_picture_ctx, s);
+ struct asurface_picture_ctx *apctx =
+ container_of(ctx, struct asurface_picture_ctx, s);
if (atomic_fetch_sub_explicit(&apctx->refs, 1, memory_order_acq_rel) == 1)
AndroidPictureContextRelease(apctx, false);
}
-static struct picture_context_t *PictureContextCopy(struct picture_context_t
*ctx)
+static struct picture_context_t *ASurfacePictureContextCopy(struct
picture_context_t *ctx)
{
- struct android_picture_ctx *apctx =
- container_of(ctx, struct android_picture_ctx, s);
+ struct asurface_picture_ctx *apctx =
+ container_of(ctx, struct asurface_picture_ctx, s);
atomic_fetch_add_explicit(&apctx->refs, 1, memory_order_relaxed);
vlc_video_context_Hold(ctx->vctx);
@@ -617,11 +690,13 @@ static struct picture_context_t
*PictureContextCopy(struct picture_context_t *ct
}
static void AbortDecoderLocked(decoder_sys_t *p_dec);
-static void CleanFromVideoContext(void *priv)
+static void CleanFromLegacyVideoContext(void *priv)
{
android_video_context_t *avctx = priv;
decoder_sys_t *p_sys = avctx->dec_opaque;
+ assert(!p_sys->video.use_air);
+
vlc_mutex_lock(&p_sys->lock);
/* Unblock output thread waiting in dequeue_out */
DecodeFlushLocked(p_sys);
@@ -636,13 +711,13 @@ static void CleanFromVideoContext(void *priv)
static void ReleaseAllPictureContexts(decoder_sys_t *p_sys)
{
- /* No picture context if no direct rendering. */
- if (p_sys->video.ctx == NULL)
+ /* No picture context if no direct rendering or using air. */
+ if (p_sys->video.ctx == NULL || p_sys->video.use_air)
return;
for (size_t i = 0; i < ARRAY_SIZE(p_sys->video.apic_ctxs); ++i)
{
- struct android_picture_ctx *apctx = &p_sys->video.apic_ctxs[i];
+ struct asurface_picture_ctx *apctx = &p_sys->video.apic_ctxs[i];
/* Don't decrement apctx->refs, the picture_context should stay valid
* even if the underlying buffer is released since it might still be
@@ -651,7 +726,7 @@ static void ReleaseAllPictureContexts(decoder_sys_t *p_sys)
}
}
-static struct android_picture_ctx *
+static struct asurface_picture_ctx *
GetPictureContext(decoder_t *p_dec, unsigned index)
{
decoder_sys_t *p_sys = p_dec->p_sys;
@@ -661,7 +736,7 @@ GetPictureContext(decoder_t *p_dec, unsigned index)
{
for (size_t i = 0; i < ARRAY_SIZE(p_sys->video.apic_ctxs); ++i)
{
- struct android_picture_ctx *apctx = &p_sys->video.apic_ctxs[i];
+ struct asurface_picture_ctx *apctx = &p_sys->video.apic_ctxs[i];
/* Find an available picture context (ie. refs == 0) */
unsigned expected_refs = 0;
if (atomic_compare_exchange_strong(&apctx->refs, &expected_refs,
1))
@@ -674,7 +749,7 @@ GetPictureContext(decoder_t *p_dec, unsigned index)
/* Unlikely: Restore the ref count and try a next one, since
* this picture context is being released. Cf.
- * PictureContextDestroy(), this function first decrement the
+ * ASurfacePictureContextDestroy(), this function first
decrement the
* ref count before releasing the index. */
atomic_store(&apctx->refs, 0);
}
@@ -689,56 +764,192 @@ GetPictureContext(decoder_t *p_dec, unsigned index)
}
}
+static picture_t*
+NewPicture(decoder_t *p_dec, vlc_tick_t ts)
+{
+ decoder_sys_t *p_sys = p_dec->p_sys;
+
+ /* If the oldest input block had no PTS, the timestamp of
+ * the frame returned by MediaCodec might be wrong so we
+ * overwrite it with the corresponding dts. Call FifoGet
+ * first in order to avoid a gap if buffers are released
+ * due to an invalid format or a preroll */
+ vlc_tick_t forced_ts = timestamp_FifoGet(p_sys->video.timestamp_fifo);
+
+ picture_t *p_pic = decoder_NewPicture(p_dec);
+ if (p_pic == NULL)
+ return NULL;
+
+ if (forced_ts == VLC_TICK_INVALID)
+ p_pic->date = ts;
+ else
+ p_pic->date = forced_ts;
+ p_pic->b_progressive = true;
+ return p_pic;
+}
+
+static void CleanFromVideoContext(void *priv)
+{
+ android_video_context_t *avctx = priv;
+ assert(avctx->dec_opaque == NULL);
+ assert(avctx->air != NULL);
+ avctx->air_api->AImageReader.delete(avctx->air);
+}
+
+static void
+android_picture_ctx_destroy(picture_context_t *context)
+{
+ struct android_picture_ctx *apctx = container_of(context, struct
android_picture_ctx, s);
+
+ if (!vlc_atomic_rc_dec(&apctx->rc))
+ return;
+
+ android_video_context_t *avctx =
+ vlc_video_context_GetPrivate(apctx->s.vctx, VLC_VIDEO_CONTEXT_AWINDOW);
+
+ if (apctx->fence_fd >= 0)
+ close(apctx->fence_fd);
+
+ avctx->air_api->AImage.deleteAsync(apctx->image, apctx->read_fence_fd);
+
+ free(apctx);
+}
+
+static picture_context_t *
+android_picture_ctx_copy(picture_context_t *src)
+{
+ struct android_picture_ctx *src_ctx = container_of(src, struct
android_picture_ctx, s);
+ vlc_atomic_rc_inc(&src_ctx->rc);
+ return &src_ctx->s;
+}
+
static int
-CreateVideoContext(decoder_t *p_dec)
+QueueAImagePicture(decoder_t *p_dec, android_video_context_t *avctx,
+ AImage *image, int fence_fd)
{
decoder_sys_t *p_sys = p_dec->p_sys;
- vlc_decoder_device *dec_dev = decoder_GetDecoderDevice(p_dec);
- if (!dec_dev || dec_dev->type != VLC_DECODER_DEVICE_AWINDOW)
+ /* same value than p_out->buf.i_ts (propagated from MediaCodec input to
+ * output to AImageReader */
+ int64_t timestamp;
+ int32_t status = avctx->air_api->AImage.getTimestamp(image, ×tamp);
+
+ vlc_tick_t date = status == 0 ? VLC_TICK_FROM_NS(timestamp) : 0;
+
+ picture_t *p_pic = NewPicture(p_dec, date);
+ if (p_pic == NULL)
+ goto error;
+
+ if (p_pic->date == VLC_TICK_INVALID)
{
- msg_Err(p_dec, "Could not find an AWINDOW decoder device");
- return VLC_EGENERIC;
+ msg_Warn(p_dec, "invalid ts from AImageReader");
+ goto error;
}
- assert(dec_dev->opaque);
- AWindowHandler *awh = dec_dev->opaque;
+ struct android_picture_ctx *apctx = malloc(sizeof(*apctx));
+ if (apctx == NULL)
+ goto error;
- /* Force OpenGL interop (via AWindow_SurfaceTexture) if there is a
- * projection or an orientation to handle, if the Surface owner is not able
- * to modify its layout. */
+ apctx->image = image;
+ apctx->fence_fd = fence_fd;
+ apctx->read_fence_fd = -1;
+ apctx->sc = NULL;
+ vlc_atomic_rc_init(&apctx->rc);
- p_sys->video.surfacetexture = NULL;
- int awh_caps = AWindowHandler_getCapabilities(awh);
- bool can_set_video_layout = awh_caps & AWH_CAPS_SET_VIDEO_LAYOUT;
- bool can_use_surfacetexture = awh_caps & AWH_CAPS_SURFACE_VIEW;
+ apctx->s = (picture_context_t) {
+ android_picture_ctx_destroy, android_picture_ctx_copy,
p_sys->video.ctx,
+ };
+ p_pic->context = &apctx->s;
+ vlc_video_context_Hold(apctx->s.vctx);
+ decoder_QueueVideo(p_dec, p_pic);
+
+ return VLC_SUCCESS;
- bool use_surfacetexture = can_use_surfacetexture
- && (p_dec->fmt_out.video.projection_mode != PROJECTION_MODE_RECTANGULAR
- || (!p_sys->api.b_support_rotation && p_dec->fmt_out.video.orientation
!= ORIENT_NORMAL)
- || !can_set_video_layout);
+error:
+ if (p_pic != NULL)
+ picture_Release(p_pic);
+ if (fence_fd > 0)
+ close(fence_fd);
+ avctx->air_api->AImage.deleteAsync(image, -1);
+ return VLC_EGENERIC;
+}
- if (!use_surfacetexture)
+static void
+AImageReader_OnImageAvailable(void *context, AImageReader *reader)
+{
+ decoder_t *p_dec = context;
+ decoder_sys_t *p_sys = p_dec->p_sys;
+ assert(p_sys->video.use_air);
+ android_video_context_t *avctx =
+ vlc_video_context_GetPrivate(p_sys->video.ctx,
VLC_VIDEO_CONTEXT_AWINDOW);
+ assert(avctx->air != NULL);
+ assert(reader == avctx->air); (void) reader;
+
+ AImage *image = NULL;
+ int fence_fd = -1;
+ int32_t status =
+ avctx->air_api->AImageReader.acquireNextImageAsync(avctx->air, &image,
+ &fence_fd);
+ if (status != 0)
{
- p_sys->video.p_surface = AWindowHandler_getANativeWindow(awh,
AWindow_Video);
- assert (p_sys->video.p_surface);
- if (!p_sys->video.p_surface)
- {
- msg_Err(p_dec, "Could not find a valid ANativeWindow");
- goto error;
- }
+ msg_Warn(p_dec, "AImageReader_acquireNextImageAsync failed: %d",
+ status);
+ return;
}
+ vlc_mutex_lock(&p_sys->lock);
+ assert(p_sys->video.air_waiting_count > 0);
+ p_sys->video.air_waiting_count--;
+ vlc_cond_signal(&p_sys->video.air_cond);
+ QueueAImagePicture(p_dec, avctx, image, fence_fd);
+ vlc_mutex_unlock(&p_sys->lock);
+}
+
+static int
+CreateSurfaceFromAImageReader(decoder_t *p_dec, vlc_decoder_device *dec_dev,
+ AWindowHandler *awh, bool need_gpu)
+{
+ decoder_sys_t *p_sys = p_dec->p_sys;
+ struct aimage_reader_api *air_api = AWindowHandler_getAImageReaderApi(awh);
+ struct asurface_control_api *asc_api =
AWindowHandler_getASurfaceControlApi(awh);
+ if (air_api == NULL || asc_api == NULL)
+ return VLC_EGENERIC;
+
+ int32_t width = 1, height = 1; /* Ignored by MediaCodec */
+ int32_t format = AIMAGE_FORMAT_PRIVATE;
+ uint64_t usage = 0;
+ int32_t max_images = 32;
+
+ if (need_gpu)
+ usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+
+ AImageReader *reader;
+ int32_t status = air_api->AImageReader.newWithUsage(width, height, format,
+ usage, max_images,
+ &reader);
- if (use_surfacetexture || p_sys->video.p_surface == NULL)
+ if (status != 0)
{
- p_sys->video.surfacetexture = vlc_asurfacetexture_New(awh, false);
- assert(p_sys->video.surfacetexture);
- if (p_sys->video.surfacetexture == NULL)
- goto error;
- p_sys->video.p_surface = p_sys->video.surfacetexture->window;
- assert(p_sys->video.p_surface);
+ msg_Warn(p_dec, "AImageReader_newWithUsage failed: %d", status);
+ return VLC_EGENERIC;
+ }
+
+ ANativeWindow *window;
+ status = air_api->AImageReader.getWindow(reader, &window);
+
+ if (status != 0)
+ {
+ air_api->AImageReader.delete(reader);
+ msg_Warn(p_dec, "AImageReader_getWindow failed: %d", status);
+ return VLC_EGENERIC;
}
+ struct AImageReader_ImageListener listener = {
+ .context = p_dec,
+ .onImageAvailable = AImageReader_OnImageAvailable,
+ };
+
+ air_api->AImageReader.setImageListener(reader, &listener);
+
static const struct vlc_video_context_operations ops =
{
.destroy = CleanFromVideoContext,
@@ -746,14 +957,74 @@ CreateVideoContext(decoder_t *p_dec)
p_sys->video.ctx =
vlc_video_context_Create(dec_dev, VLC_VIDEO_CONTEXT_AWINDOW,
sizeof(android_video_context_t), &ops);
- vlc_decoder_device_Release(dec_dev);
+
+ if (!p_sys->video.ctx)
+ {
+ air_api->AImageReader.delete(reader);
+ return VLC_EGENERIC;
+ }
+
+ android_video_context_t *avctx =
+ vlc_video_context_GetPrivate(p_sys->video.ctx,
VLC_VIDEO_CONTEXT_AWINDOW);
+ avctx->dec_opaque = NULL;
+ avctx->air_api = air_api;
+ avctx->asc_api = asc_api;
+ avctx->air = reader;
+ avctx->render = NULL;
+ avctx->render_ts = NULL;
+ avctx->get_texture = NULL;
+ avctx->texture = NULL;
+ p_sys->video.p_surface = window;
+ p_sys->video.use_air = true;
+ p_sys->video.air_waiting_count = 0;
+ assert(window != NULL);
+ return VLC_SUCCESS;
+}
+
+static int
+CreateSurface(decoder_t *p_dec, vlc_decoder_device *dec_dev,
+ AWindowHandler *awh, bool use_surfacetexture)
+{
+ decoder_sys_t *p_sys = p_dec->p_sys;
+
+ static const struct vlc_video_context_operations ops =
+ {
+ .destroy = CleanFromLegacyVideoContext,
+ };
+ p_sys->video.ctx =
+ vlc_video_context_Create(dec_dev, VLC_VIDEO_CONTEXT_AWINDOW,
+ sizeof(android_video_context_t), &ops);
if (!p_sys->video.ctx)
return VLC_EGENERIC;
android_video_context_t *avctx =
vlc_video_context_GetPrivate(p_sys->video.ctx,
VLC_VIDEO_CONTEXT_AWINDOW);
+
+ if (!use_surfacetexture)
+ {
+ p_sys->video.p_surface = AWindowHandler_getANativeWindow(awh,
AWindow_Video);
+ assert (p_sys->video.p_surface);
+ if (!p_sys->video.p_surface)
+ {
+ msg_Err(p_dec, "Could not find a valid ANativeWindow");
+ goto error;
+ }
+ goto end;
+ }
+
+ p_sys->video.surfacetexture = vlc_asurfacetexture_New(awh, false);
+ assert(p_sys->video.surfacetexture);
+ if (p_sys->video.surfacetexture == NULL)
+ goto error;
+
+ p_sys->video.p_surface = p_sys->video.surfacetexture->window;
+ assert(p_sys->video.p_surface);
+
+end:
avctx->dec_opaque = p_dec->p_sys;
+ avctx->air_api = NULL;
+ avctx->air = NULL;
avctx->render = PictureContextRenderPic;
avctx->render_ts = p_sys->api.release_out_ts ? PictureContextRenderPicTs :
NULL;
avctx->get_texture = p_sys->video.surfacetexture ?
PictureContextGetTexture : NULL;
@@ -761,10 +1032,10 @@ CreateVideoContext(decoder_t *p_dec)
for (size_t i = 0; i < ARRAY_SIZE(p_sys->video.apic_ctxs); ++i)
{
- struct android_picture_ctx *apctx = &p_sys->video.apic_ctxs[i];
+ struct asurface_picture_ctx *apctx = &p_sys->video.apic_ctxs[i];
apctx->s = (picture_context_t) {
- PictureContextDestroy, PictureContextCopy,
+ ASurfacePictureContextDestroy, ASurfacePictureContextCopy,
p_sys->video.ctx,
};
atomic_init(&apctx->index, -1);
@@ -774,10 +1045,55 @@ CreateVideoContext(decoder_t *p_dec)
return VLC_SUCCESS;
error:
- vlc_decoder_device_Release(dec_dev);
+ vlc_video_context_Release(p_sys->video.ctx);
+ p_sys->video.ctx = NULL;
return VLC_EGENERIC;
}
+static int
+CreateVideoContext(decoder_t *p_dec)
+{
+ decoder_sys_t *p_sys = p_dec->p_sys;
+
+ vlc_decoder_device *dec_dev = decoder_GetDecoderDevice(p_dec);
+ if (!dec_dev || dec_dev->type != VLC_DECODER_DEVICE_AWINDOW)
+ {
+ msg_Err(p_dec, "Could not find an AWINDOW decoder device");
+ return VLC_EGENERIC;
+ }
+
+ assert(dec_dev->opaque);
+ AWindowHandler *awh = dec_dev->opaque;
+
+ /* Force OpenGL interop (via AWindow_SurfaceTexture) if there is a
+ * projection or an orientation to handle, if the Surface owner is not able
+ * to modify its layout. */
+ p_sys->video.surfacetexture = NULL;
+ int awh_caps = AWindowHandler_getCapabilities(awh);
+ bool can_set_video_layout = awh_caps & AWH_CAPS_SET_VIDEO_LAYOUT;
+ bool can_use_surfacetexture = awh_caps & AWH_CAPS_SURFACE_VIEW;
+
+ bool need_gpu_transform =
+ p_dec->fmt_out.video.projection_mode != PROJECTION_MODE_RECTANGULAR
+ || (!p_sys->api.b_support_rotation && p_dec->fmt_out.video.orientation
!= ORIENT_NORMAL)
+ || !can_set_video_layout;
+
+ bool use_surfacetexture = need_gpu_transform && can_use_surfacetexture;
+
+ int ret = CreateSurfaceFromAImageReader(p_dec, dec_dev, awh,
+ need_gpu_transform);
+ if (ret == VLC_SUCCESS)
+ {
+ vlc_decoder_device_Release(dec_dev);
+ return VLC_SUCCESS;
+ }
+
+ ret = CreateSurface(p_dec, dec_dev, awh, use_surfacetexture);
+ vlc_decoder_device_Release(dec_dev);
+
+ return ret;
+}
+
static void CleanInputVideo(decoder_t *p_dec)
{
decoder_sys_t *p_sys = p_dec->p_sys;
@@ -940,6 +1256,10 @@ static int OpenDecoder(vlc_object_t *p_this,
pf_MediaCodecApi_init pf_init)
if (p_dec->fmt_in->i_cat == VIDEO_ES)
{
+ vlc_cond_init(&p_sys->video.air_cond);
+ p_sys->video.use_air = false;
+ p_sys->video.air_waiting_count = 0;
+
switch (p_dec->fmt_in->i_codec)
{
case VLC_CODEC_H264:
@@ -1085,6 +1405,8 @@ static void AbortDecoderLocked(decoder_sys_t *p_sys)
{
p_sys->b_aborted = true;
vlc_cond_broadcast(&p_sys->cond);
+ if (p_sys->cat == VIDEO_ES && p_sys->video.use_air)
+ vlc_cond_signal(&p_sys->video.air_cond);
}
}
@@ -1095,11 +1417,18 @@ static void CleanDecoder(decoder_sys_t *p_sys)
CSDFree(p_sys);
p_sys->api.clean(&p_sys->api);
- if (p_sys->video.surfacetexture)
- vlc_asurfacetexture_Delete(p_sys->video.surfacetexture);
+ switch (p_sys->cat)
+ {
+ case VIDEO_ES:
+ if (p_sys->video.surfacetexture)
+ vlc_asurfacetexture_Delete(p_sys->video.surfacetexture);
- if (p_sys->video.timestamp_fifo)
- timestamp_FifoRelease(p_sys->video.timestamp_fifo);
+ if (p_sys->video.timestamp_fifo)
+ timestamp_FifoRelease(p_sys->video.timestamp_fifo);
+ break;
+ default:
+ break;
+ }
free(p_sys);
}
@@ -1116,15 +1445,22 @@ static void CloseDecoder(vlc_object_t *p_this)
p_sys->b_decoder_dead = true;
vlc_mutex_unlock(&p_sys->lock);
- if (p_sys->video.ctx)
+ if (p_sys->cat == VIDEO_ES && p_sys->video.ctx)
{
- /* If we have a video context, we're using Surface with inflight
- * pictures, which might already have been queued, and flushing
- * them would make them invalid, breaking mechanism like waiting
- * on OnFrameAvailableListener.*/
+ if (!p_sys->video.use_air)
+ {
+ vlc_video_context_Release(p_sys->video.ctx);
+ /* If we have a video context, we're using Surface with inflight
+ * pictures, which might already have been queued, and flushing
+ * them would make them invalid, breaking mechanism like waiting
+ * on OnFrameAvailableListener.*/
+ CleanInputVideo(p_dec);
+ return;
+ }
+ android_video_context_t *avctx =
+ vlc_video_context_GetPrivate(p_sys->video.ctx,
VLC_VIDEO_CONTEXT_AWINDOW);
+ avctx->air_api->AImageReader.setImageListener(avctx->air, NULL);
vlc_video_context_Release(p_sys->video.ctx);
- CleanInputVideo(p_dec);
- return;
}
vlc_mutex_lock(&p_sys->lock);
@@ -1149,51 +1485,64 @@ static int Video_ProcessOutput(decoder_t *p_dec,
mc_api_out *p_out,
if (p_out->type == MC_OUT_TYPE_BUF)
{
- picture_t *p_pic = NULL;
-
- /* If the oldest input block had no PTS, the timestamp of
- * the frame returned by MediaCodec might be wrong so we
- * overwrite it with the corresponding dts. Call FifoGet
- * first in order to avoid a gap if buffers are released
- * due to an invalid format or a preroll */
- int64_t forced_ts = timestamp_FifoGet(p_sys->video.timestamp_fifo);
-
if (!p_sys->b_has_format) {
msg_Warn(p_dec, "Buffers returned before output format is set,
dropping frame");
+ timestamp_FifoGet(p_sys->video.timestamp_fifo); /* Remove
timestamp */
return p_sys->api.release_out(&p_sys->api, p_out->buf.i_index,
false);
}
if (p_out->buf.i_ts <= p_sys->i_preroll_end)
+ {
+ timestamp_FifoGet(p_sys->video.timestamp_fifo); /* Remove
timestamp */
return p_sys->api.release_out(&p_sys->api, p_out->buf.i_index,
false);
+ }
if (!p_sys->api.b_direct_rendering && p_out->buf.p_ptr == NULL)
{
/* This can happen when receiving an EOS buffer */
msg_Warn(p_dec, "Invalid buffer, dropping frame");
+ timestamp_FifoGet(p_sys->video.timestamp_fifo); /* Remove
timestamp */
return p_sys->api.release_out(&p_sys->api, p_out->buf.i_index,
false);
}
- p_pic = decoder_NewPicture(p_dec);
- if (!p_pic) {
- msg_Warn(p_dec, "NewPicture failed");
- return p_sys->api.release_out(&p_sys->api, p_out->buf.i_index,
false);
- }
-
- if (forced_ts == VLC_TICK_INVALID)
- p_pic->date = p_out->buf.i_ts;
- else
- p_pic->date = forced_ts;
- p_pic->b_progressive = true;
-
if (p_sys->api.b_direct_rendering)
{
- struct android_picture_ctx *apctx =
- GetPictureContext(p_dec,p_out->buf.i_index);
- assert(apctx);
- assert(apctx->s.vctx);
- vlc_video_context_Hold(apctx->s.vctx);
- p_pic->context = &apctx->s;
+ if (p_sys->video.use_air)
+ {
+ /* We need to wait for AImageReader. Otherwise, we might
+ * overwrite a picture (even when using acquireNextImageAsync)
*/
+ while (p_sys->video.air_waiting_count != 0 &&
!p_sys->b_aborted)
+ vlc_cond_wait(&p_sys->video.air_cond, &p_sys->lock);
+ p_sys->video.air_waiting_count++;
+
+ p_sys->api.release_out(&p_sys->api, p_out->buf.i_index,
+ !p_sys->b_aborted);
+ /* picture will be wrapped from AImageReader callback */
+ assert(*pp_out_pic == NULL);
+ }
+ else
+ {
+ picture_t *p_pic = NewPicture(p_dec, p_out->buf.i_ts);
+ if (!p_pic) {
+ msg_Warn(p_dec, "NewPicture failed");
+ return p_sys->api.release_out(&p_sys->api,
p_out->buf.i_index, false);
+ }
+
+ struct asurface_picture_ctx *apctx =
+ GetPictureContext(p_dec,p_out->buf.i_index);
+ assert(apctx);
+ assert(apctx->s.vctx);
+ vlc_video_context_Hold(apctx->s.vctx);
+ p_pic->context = &apctx->s;
+ *pp_out_pic = p_pic;
+ }
} else {
+ picture_t *p_pic = NewPicture(p_dec, p_out->buf.i_ts);
+ if (!p_pic) {
+ msg_Warn(p_dec, "NewPicture failed");
+ return p_sys->api.release_out(&p_sys->api, p_out->buf.i_index,
false);
+ }
+
unsigned int chroma_div;
GetVlcChromaSizes(p_dec->fmt_out.i_codec,
p_dec->fmt_out.video.i_width,
@@ -1208,9 +1557,8 @@ static int Video_ProcessOutput(decoder_t *p_dec,
mc_api_out *p_out,
picture_Release(p_pic);
return -1;
}
+ *pp_out_pic = p_pic;
}
- assert(!(*pp_out_pic));
- *pp_out_pic = p_pic;
return 1;
} else {
assert(p_out->type == MC_OUT_TYPE_CONF);
@@ -1236,6 +1584,23 @@ static int Video_ProcessOutput(decoder_t *p_dec,
mc_api_out *p_out,
p_out->conf.video.crop_left, p_out->conf.video.crop_top,
p_out->conf.video.crop_right, p_out->conf.video.crop_bottom);
+ /* Only use MediaCodec output as fallback when container/input is
unspecified */
+ if (p_dec->fmt_out.video.primaries == COLOR_PRIMARIES_UNDEF)
+ p_dec->fmt_out.video.primaries =
+ mc_to_vlc_primaries(p_out->conf.video.color.standard);
+
+ if (p_dec->fmt_out.video.space == COLOR_SPACE_UNDEF)
+ p_dec->fmt_out.video.space =
+ mc_to_vlc_color_space(p_out->conf.video.color.standard);
+
+ if (p_dec->fmt_out.video.transfer == TRANSFER_FUNC_UNDEF)
+ p_dec->fmt_out.video.transfer =
+ mc_to_vlc_color_transfer(p_out->conf.video.color.transfer);
+
+ if (p_dec->fmt_out.video.color_range == COLOR_RANGE_UNDEF)
+ p_dec->fmt_out.video.color_range =
+ mc_to_vlc_color_range(p_out->conf.video.color.range);
+
int i_width = p_out->conf.video.crop_right + 1
- p_out->conf.video.crop_left;
int i_height = p_out->conf.video.crop_bottom + 1
=====================================
modules/codec/omxil/mediacodec.h
=====================================
@@ -75,6 +75,13 @@ enum mc_media_format_color_transfer_t
MC_COLOR_TRANSFER_HLG = 0x7,
};
+struct mc_video_color_info
+{
+ enum mc_media_format_color_range_t range;
+ enum mc_media_format_color_standard_t standard;
+ enum mc_media_format_color_transfer_t transfer;
+};
+
struct mc_api_out
{
enum {
@@ -103,6 +110,7 @@ struct mc_api_out
int crop_top;
int crop_right;
int crop_bottom;
+ struct mc_video_color_info color;
} video;
struct
{
@@ -125,9 +133,7 @@ union mc_api_args
bool b_tunneled_playback;
bool b_adaptive_playback;
bool b_low_latency;
- enum mc_media_format_color_transfer_t color_transfer;
- enum mc_media_format_color_range_t color_range;
- enum mc_media_format_color_standard_t color_standard;
+ struct mc_video_color_info color;
} video;
struct
{
=====================================
modules/codec/omxil/mediacodec_ndk.c
=====================================
@@ -103,9 +103,9 @@ static int ConfigureDecoder(mc_api *api, union mc_api_args
*p_args)
AMediaFormat_setInt32(p_sys->p_format, "height",
p_args->video.i_height);
AMediaFormat_setInt32(p_sys->p_format, "rotation-degrees",
p_args->video.i_angle);
- AMediaFormat_setInt32(p_sys->p_format, "color-range",
p_args->video.color_range);
- AMediaFormat_setInt32(p_sys->p_format, "color-standard",
p_args->video.color_standard);
- AMediaFormat_setInt32(p_sys->p_format, "color-transfer",
p_args->video.color_transfer);
+ AMediaFormat_setInt32(p_sys->p_format, "color-range",
p_args->video.color.range);
+ AMediaFormat_setInt32(p_sys->p_format, "color-standard",
p_args->video.color.standard);
+ AMediaFormat_setInt32(p_sys->p_format, "color-transfer",
p_args->video.color.transfer);
if (p_args->video.p_surface)
{
@@ -341,6 +341,11 @@ static int GetOutput(mc_api *api, int i_index, mc_api_out
*p_out)
p_out->conf.video.crop_top = GetFormatInteger(format,
"crop-top");
p_out->conf.video.crop_right = GetFormatInteger(format,
"crop-right");
p_out->conf.video.crop_bottom = GetFormatInteger(format,
"crop-bottom");
+
+ /* Extract color info from output format (API 28+) */
+ p_out->conf.video.color.range = GetFormatInteger(format,
"color-range");
+ p_out->conf.video.color.standard = GetFormatInteger(format,
"color-standard");
+ p_out->conf.video.color.transfer = GetFormatInteger(format,
"color-transfer");
}
else
{
=====================================
modules/video_output/Makefile.am
=====================================
@@ -351,16 +351,23 @@ libandroid_display_plugin_la_SOURCES =
video_output/android/display.c \
libandroid_display_plugin_la_CFLAGS = $(AM_CFLAGS) $(GLES2_CFLAGS)
-DUSE_OPENGL_ES2
libandroid_display_plugin_la_LIBADD = libvlc_opengles.la $(EGL_LIBS)
$(GLES2_LIBS) libandroid_utils.la libandroid_env.la
-libglinterop_android_plugin_la_SOURCES = video_output/opengl/interop_android.c
\
+libglinterop_asurface_plugin_la_SOURCES =
video_output/opengl/interop_asurface.c \
video_output/opengl/interop.h
-libglinterop_android_plugin_la_CFLAGS = $(AM_CFLAGS) -DUSE_OPENGL_ES2
-libglinterop_android_plugin_la_LIBADD = libandroid_env.la libandroid_utils.la \
+libglinterop_asurface_plugin_la_CFLAGS = $(AM_CFLAGS) -DUSE_OPENGL_ES2
+libglinterop_asurface_plugin_la_LIBADD = libandroid_env.la libandroid_utils.la
\
+ $(EGL_LIBS) $(GLES2_LIBS)
+
+libglinterop_aimage_plugin_la_SOURCES = video_output/opengl/interop_aimage.c \
+ video_output/opengl/interop.h
+libglinterop_aimage_plugin_la_CFLAGS = $(AM_CFLAGS) -DUSE_OPENGL_ES2
+libglinterop_aimage_plugin_la_LIBADD = libandroid_utils.la \
$(EGL_LIBS) $(GLES2_LIBS)
if HAVE_ANDROID
vout_LTLIBRARIES += libandroid_window_plugin.la libandroid_display_plugin.la
if HAVE_EGL
-vout_LTLIBRARIES += libegl_android_plugin.la libglinterop_android_plugin.la
+vout_LTLIBRARIES += libegl_android_plugin.la libglinterop_asurface_plugin.la \
+ libglinterop_aimage_plugin.la
endif
endif
=====================================
modules/video_output/android/display.c
=====================================
@@ -28,6 +28,8 @@
# include "config.h"
#endif
+#include <unistd.h>
+
#include <vlc_common.h>
#include <vlc_threads.h>
#include <vlc_plugin.h>
@@ -40,6 +42,67 @@
#include "../opengl/gl_api.h"
#include "../opengl/sub_renderer.h"
+static int32_t video_format_to_adataspace(const video_format_t *fmt)
+{
+ int32_t standard, transfer, range;
+
+ switch (fmt->primaries) {
+ case COLOR_PRIMARIES_BT709:
+ standard = ADATASPACE_STANDARD_BT709;
+ break;
+ case COLOR_PRIMARIES_BT601_625:
+ standard = ADATASPACE_STANDARD_BT601_625;
+ break;
+ case COLOR_PRIMARIES_BT601_525:
+ standard = ADATASPACE_STANDARD_BT601_525;
+ break;
+ case COLOR_PRIMARIES_BT2020:
+ standard = ADATASPACE_STANDARD_BT2020;
+ break;
+ case COLOR_PRIMARIES_DCI_P3:
+ standard = ADATASPACE_STANDARD_DCI_P3;
+ break;
+ default:
+ standard = ADATASPACE_STANDARD_UNSPECIFIED;
+ break;
+ }
+
+ switch (fmt->transfer) {
+ case TRANSFER_FUNC_LINEAR:
+ transfer = ADATASPACE_TRANSFER_LINEAR;
+ break;
+ case TRANSFER_FUNC_SRGB:
+ transfer = ADATASPACE_TRANSFER_SRGB;
+ break;
+ case TRANSFER_FUNC_BT709:
+ transfer = ADATASPACE_TRANSFER_SMPTE_170M;
+ break;
+ case TRANSFER_FUNC_SMPTE_ST2084:
+ transfer = ADATASPACE_TRANSFER_ST2084;
+ break;
+ case TRANSFER_FUNC_HLG:
+ transfer = ADATASPACE_TRANSFER_HLG;
+ break;
+ default:
+ transfer = ADATASPACE_TRANSFER_UNSPECIFIED;
+ break;
+ }
+
+ switch (fmt->color_range) {
+ case COLOR_RANGE_FULL:
+ range = ADATASPACE_RANGE_FULL;
+ break;
+ case COLOR_RANGE_LIMITED:
+ range = ADATASPACE_RANGE_LIMITED;
+ break;
+ default:
+ range = ADATASPACE_RANGE_UNSPECIFIED;
+ break;
+ }
+
+ return standard | transfer | range;
+}
+
struct subpicture
{
vlc_window_t *window;
@@ -65,6 +128,11 @@ struct sys
bool can_set_video_layout;
android_video_context_t *avctx;
struct subpicture sub;
+
+ struct {
+ ASurfaceControl *sc;
+ picture_t *previous_picture;
+ } asc;
};
static void subpicture_SetDisplaySize(vout_display_t *vd, unsigned width,
unsigned height)
@@ -365,12 +433,91 @@ delete_win:
return -1;
}
+static void ASC_OnComplete(void *context, ASurfaceTransactionStats *stats)
+{
+ picture_t *pic = context;
+ struct android_picture_ctx *apctx =
+ container_of(pic->context, struct android_picture_ctx, s);
+ vlc_video_context *vctx = picture_GetVideoContext(pic);
+ assert(vctx != NULL);
+ android_video_context_t *avctx =
+ vlc_video_context_GetPrivate(vctx, VLC_VIDEO_CONTEXT_AWINDOW);
+
+ /* See ASurfaceTransactionStats_getPreviousReleaseFenceFd documentation.
+ * When buffer n is displayed, this callback is called with the n-1
+ * picture, update the read fence fd from the previous transaction
+ * (if valid = buffer not yet released) and release the VLC picture. */
+ int release_fd =
+
avctx->asc_api->ASurfaceTransactionStats.getPreviousReleaseFenceFd(stats,
apctx->sc);
+ if (release_fd >= 0)
+ android_picture_ctx_set_read_fence(apctx, release_fd);
+ picture_Release(pic);
+}
+
+static void PrepareWithASC(vout_display_t *vd, picture_t *pic,
+ const vlc_render_subpicture *subpicture, vlc_tick_t
date)
+{
+ struct sys *sys = vd->sys;
+ assert(sys->asc.sc != NULL);
+ assert(pic->context != NULL);
+ struct aimage_reader_api *air_api = sys->avctx->air_api;
+
+ picture_context_t *ctx = pic->context;
+ struct android_picture_ctx *apctx = container_of(ctx, struct
android_picture_ctx, s);
+ apctx->sc = sys->asc.sc;
+
+ AHardwareBuffer *buffer = NULL;
+ int32_t status = air_api->AImage.getHardwareBuffer(apctx->image, &buffer);
+ if (status != 0)
+ {
+ msg_Warn(vd, "PrepareWithASC: AImage_getHardwareBuffer failed: %d",
status);
+ return;
+ }
+ assert(buffer != NULL);
+
+ struct asurface_control_api *asc_api = sys->avctx->asc_api;
+ ASurfaceTransaction *txn = asc_api->ASurfaceTransaction.create();
+ if (txn == NULL)
+ return;
+
+ if (sys->asc.previous_picture != NULL)
+ {
+ asc_api->ASurfaceTransaction.setOnComplete(txn,
sys->asc.previous_picture,
+ ASC_OnComplete);
+ sys->asc.previous_picture = NULL;
+ }
+
+ int fence_fd = android_picture_ctx_get_fence_fd(apctx);
+
+ asc_api->ASurfaceTransaction.setBuffer(txn, sys->asc.sc, buffer, fence_fd);
+ asc_api->ASurfaceTransaction.setDesiredPresentTime(txn,
NS_FROM_VLC_TICK(date));
+ asc_api->ASurfaceTransaction.apply(txn);
+ asc_api->ASurfaceTransaction.delete(txn);
+
+ sys->asc.previous_picture = picture_Hold(pic);
+
+ if (sys->sub.window != NULL)
+ subpicture_Prepare(vd, subpicture);
+}
+
+static void DisplayWithASC(vout_display_t *vd, picture_t *picture)
+{
+ struct sys *sys = vd->sys;
+ assert(picture->context);
+ assert(sys->asc.sc != NULL);
+ /* Nothing to do for ASC (the display date was set in the transaction) */
+
+ if (sys->sub.window != NULL)
+ subpicture_Display(vd);
+}
+
static void Prepare(vout_display_t *vd, picture_t *picture,
const vlc_render_subpicture *subpicture, vlc_tick_t date)
{
struct sys *sys = vd->sys;
assert(picture->context);
+ assert(sys->asc.sc == NULL);
if (sys->avctx->render_ts != NULL)
sys->avctx->render_ts(picture->context, date);
@@ -382,6 +529,8 @@ static void Display(vout_display_t *vd, picture_t *picture)
{
struct sys *sys = vd->sys;
assert(picture->context);
+ assert(sys->asc.sc == NULL);
+
sys->avctx->render(picture->context);
if (sys->sub.window != NULL)
@@ -402,6 +551,35 @@ static void SetVideoLayout(vout_display_t *vd)
rot_fmt.i_sar_num, rot_fmt.i_sar_den);
}
+static void UpdateASCGeometry(vout_display_t *vd)
+{
+ struct sys *sys = vd->sys;
+ assert(sys->asc.sc != NULL);
+
+ struct asurface_control_api *asc_api = sys->avctx->asc_api;
+ ASurfaceTransaction *txn = asc_api->ASurfaceTransaction.create();
+ if (txn == NULL)
+ return;
+
+ const ARect crop = {
+ .left = vd->source->i_x_offset,
+ .top = vd->source->i_y_offset,
+ .right = vd->source->i_x_offset + vd->source->i_visible_width,
+ .bottom = vd->source->i_y_offset + vd->source->i_visible_height,
+ };
+ asc_api->ASurfaceTransaction.setCrop(txn, sys->asc.sc, &crop);
+
+ asc_api->ASurfaceTransaction.setPosition(txn, sys->asc.sc,
+ vd->place->x, vd->place->y);
+
+ float x_scale = vd->place->width / (float) vd->source->i_visible_width;
+ float y_scale = vd->place->height / (float) vd->source->i_visible_height;
+ asc_api->ASurfaceTransaction.setScale(txn, sys->asc.sc, x_scale, y_scale);
+
+ asc_api->ASurfaceTransaction.apply(txn);
+ asc_api->ASurfaceTransaction.delete(txn);
+}
+
static int SetDisplaySize(vout_display_t *vd, unsigned width, unsigned height)
{
struct sys *sys = vd->sys;
@@ -409,6 +587,8 @@ static int SetDisplaySize(vout_display_t *vd, unsigned
width, unsigned height)
subpicture_SetDisplaySize(vd, width, height);
msg_Dbg(vd, "change display size: %dx%d", width, height);
+ if (sys->asc.sc != NULL)
+ UpdateASCGeometry(vd);
return VLC_SUCCESS;
}
@@ -429,11 +609,18 @@ static int Control(vout_display_t *vd, int query)
vd->source->i_visible_height,
vd->source->i_sar_num,
vd->source->i_sar_den);
-
- SetVideoLayout(vd);
+ if (sys->asc.sc != NULL)
+ UpdateASCGeometry(vd);
+ else
+ SetVideoLayout(vd);
return VLC_SUCCESS;
}
case VOUT_DISPLAY_CHANGE_SOURCE_PLACE:
+ msg_Dbg(vd, "change source place: %dx%d @ %ux%u",
+ vd->place->x, vd->place->y,
+ vd->place->width, vd->place->height);
+ if (sys->asc.sc != NULL)
+ UpdateASCGeometry(vd);
return VLC_SUCCESS;
default:
msg_Warn(vd, "Unknown request in android-display: %d", query);
@@ -448,12 +635,64 @@ static void Close(vout_display_t *vd)
if (sys->can_set_video_layout)
AWindowHandler_setVideoLayout(sys->awh, 0, 0, 0, 0, 0, 0);
+ if (sys->asc.sc != NULL)
+ {
+ struct asurface_control_api *asc_api = sys->avctx->asc_api;
+
+ if (sys->asc.previous_picture != NULL)
+ picture_Release(sys->asc.previous_picture);
+ asc_api->ASurfaceControl.release(sys->asc.sc);
+ }
+
if (sys->sub.window != NULL)
subpicture_CloseDisplay(vd);
free(sys);
}
+static int CreateSurfaceControl(vout_display_t *vd)
+{
+ struct sys *sys = vd->sys;
+ struct asurface_control_api *asc_api = sys->avctx->asc_api;
+ assert(asc_api != NULL); /* If AIR is used, then ASC must be avalaible */
+
+ /* Connect to the SurfaceView */
+ ANativeWindow *video = AWindowHandler_getANativeWindow(sys->awh,
AWindow_Video);
+ if (video == NULL)
+ return VLC_EGENERIC;
+
+ ASurfaceControl *sc =
+ asc_api->ASurfaceControl.createFromWindow(video, "vlc_video_control");
+
+ if (sc == NULL)
+ return VLC_EGENERIC;
+
+ ASurfaceTransaction *txn = asc_api->ASurfaceTransaction.create();
+ if (txn == NULL)
+ {
+ asc_api->ASurfaceControl.release(sc);
+ return VLC_EGENERIC;
+ }
+
+ asc_api->ASurfaceTransaction.setVisibility(txn, sc,
+ ASURFACE_TRANSACTION_VISIBILITY_SHOW);
+ asc_api->ASurfaceTransaction.setBufferTransparency(txn, sc,
+ ASURFACE_TRANSACTION_TRANSPARENCY_OPAQUE);
+
+ /* Set colorspace */
+ int32_t dataspace = video_format_to_adataspace(vd->source);
+ if (dataspace != ADATASPACE_UNKNOWN)
+ asc_api->ASurfaceTransaction.setBufferDataSpace(txn, sc, dataspace);
+
+ asc_api->ASurfaceTransaction.apply(txn);
+ asc_api->ASurfaceTransaction.delete(txn);
+
+ sys->asc.sc = sc;
+ sys->asc.previous_picture = NULL;
+
+ return VLC_SUCCESS;
+}
+
static int Open(vout_display_t *vd,
video_format_t *fmtp, vlc_video_context *context)
{
@@ -482,8 +721,10 @@ static int Open(vout_display_t *vd,
sys->awh = awh;
sys->can_set_video_layout = can_set_video_layout;
+ sys->asc.sc = NULL;
sys->avctx = vlc_video_context_GetPrivate(context,
VLC_VIDEO_CONTEXT_AWINDOW);
assert(sys->avctx);
+
if (sys->avctx->texture != NULL)
{
/* video context configured for opengl */
@@ -491,6 +732,17 @@ static int Open(vout_display_t *vd,
return VLC_EGENERIC;
}
+ if (sys->avctx->air != NULL)
+ {
+ int ret = CreateSurfaceControl(vd);
+ if (ret == VLC_EGENERIC)
+ {
+ free(sys);
+ return VLC_EGENERIC;
+ }
+ msg_Dbg(vd, "Using new ASurfaceControl");
+ }
+
const bool has_subtitle_surface =
AWindowHandler_getANativeWindow(sys->awh, AWindow_Subtitles) != NULL;
if (has_subtitle_surface)
@@ -510,18 +762,33 @@ static int Open(vout_display_t *vd,
sys->sub.window = NULL;
}
- SetVideoLayout(vd);
-
- static const struct vlc_display_operations ops = {
- .close = Close,
- .prepare = Prepare,
- .display = Display,
- .set_display_size = SetDisplaySize,
- .control = Control,
- .set_viewpoint = NULL,
- };
+ if (sys->asc.sc != NULL)
+ UpdateASCGeometry(vd);
+ else
+ SetVideoLayout(vd);
- vd->ops = &ops;
+ if (sys->asc.sc != NULL)
+ {
+ static const struct vlc_display_operations ops = {
+ .close = Close,
+ .prepare = PrepareWithASC,
+ .display = DisplayWithASC,
+ .set_display_size = SetDisplaySize,
+ .control = Control,
+ };
+ vd->ops = &ops;
+ }
+ else
+ {
+ static const struct vlc_display_operations ops = {
+ .close = Close,
+ .prepare = Prepare,
+ .display = Display,
+ .set_display_size = SetDisplaySize,
+ .control = Control,
+ };
+ vd->ops = &ops;
+ }
return VLC_SUCCESS;
}
=====================================
modules/video_output/android/utils.c
=====================================
@@ -138,6 +138,14 @@ struct AWindowHandler
jfloatArray jtransform_mtx_array;
jfloat *jtransform_mtx;
} stex;
+
+ struct aimage_reader_api ndk_air_api;
+ void *ndk_air_lib;
+ void *ndk_sync_lib;
+ bool b_has_ndk_air_api;
+
+ struct asurface_control_api ndk_asc_api;
+ bool b_has_ndk_asc_api;
};
#define JNI_CALL(what, obj, method, ...) \
@@ -379,10 +387,101 @@ LoadNDKSurfaceTextureAPI(AWindowHandler *p_awh, void
*p_library)
return VLC_SUCCESS;
}
+static int
+LoadAimageReaderAPI(AWindowHandler *p_awh, void *p_android_lib)
+{
+ /* AHardwareBuffer functions are in libandroid.so, API 31+ */
+#define LOAD(object, name) \
+ p_awh->ndk_air_api.object.name = dlsym(p_android_lib, #object "_" #name); \
+ if (p_awh->ndk_air_api.object.name == NULL) return VLC_EGENERIC
+
+ LOAD(AHardwareBuffer, getId);
+ LOAD(AHardwareBuffer, describe);
+
+#undef LOAD
+
+ void *p_library = dlopen("libmediandk.so", RTLD_NOW);
+ if (p_library == NULL)
+ return VLC_EGENERIC;
+
+#define LOAD(object, name) \
+ p_awh->ndk_air_api.object.name = dlsym(p_library, #object "_" #name); \
+ if (p_awh->ndk_air_api.object.name == NULL) goto error
+
+ /* API 29+ */
+ LOAD(AImageReader, newWithUsage);
+ LOAD(AImageReader, delete);
+ LOAD(AImageReader, getWindow);
+ LOAD(AImageReader, acquireNextImageAsync);
+ LOAD(AImageReader, setImageListener);
+
+ LOAD(AImage, deleteAsync);
+ LOAD(AImage, getHardwareBuffer);
+ LOAD(AImage, getTimestamp);
+ LOAD(AImage, getCropRect);
+ LOAD(AImage, getWidth);
+ LOAD(AImage, getHeight);
+
+#undef LOAD
+
+ /* sync_merge is in libsync.so */
+ p_awh->ndk_sync_lib = dlopen("libsync.so", RTLD_NOW);
+ if (p_awh->ndk_sync_lib != NULL)
+ {
+ p_awh->ndk_air_api.sync_merge = dlsym(p_awh->ndk_sync_lib,
"sync_merge");
+ if (p_awh->ndk_air_api.sync_merge == NULL)
+ {
+ dlclose(p_awh->ndk_sync_lib);
+ p_awh->ndk_sync_lib = NULL;
+ goto error;
+ }
+ }
+
+ p_awh->ndk_air_lib = p_library;
+ return VLC_SUCCESS;
+
+error:
+ dlclose(p_library);
+ return VLC_EGENERIC;
+}
+
+static int
+LoadASurfaceControlAPI(AWindowHandler *p_awh, void *p_library)
+{
+#define LOAD(object, name) \
+ p_awh->ndk_asc_api.object.name = dlsym(p_library, #object "_" #name); \
+ if (p_awh->ndk_asc_api.object.name == NULL) return VLC_EGENERIC
+
+ /* API 31+ functions */
+ LOAD(ASurfaceTransaction, setCrop);
+ LOAD(ASurfaceTransaction, setPosition);
+ LOAD(ASurfaceTransaction, setBufferTransform);
+ LOAD(ASurfaceTransaction, setScale);
+ LOAD(ASurfaceTransactionStats, getPreviousReleaseFenceFd);
+
+ /* API 29+ functions */
+ LOAD(ASurfaceControl, createFromWindow);
+ LOAD(ASurfaceControl, release);
+ LOAD(ASurfaceTransaction, create);
+ LOAD(ASurfaceTransaction, delete);
+ LOAD(ASurfaceTransaction, apply);
+ LOAD(ASurfaceTransaction, setBuffer);
+ LOAD(ASurfaceTransaction, setVisibility);
+ LOAD(ASurfaceTransaction, setBufferTransparency);
+ LOAD(ASurfaceTransaction, setBufferDataSpace);
+ LOAD(ASurfaceTransaction, setHdrMetadata_smpte2086);
+ LOAD(ASurfaceTransaction, setHdrMetadata_cta861_3);
+ LOAD(ASurfaceTransaction, setOnComplete);
+ LOAD(ASurfaceTransaction, setDesiredPresentTime);
+
+#undef LOAD
+
+ return VLC_SUCCESS;
+}
+
/*
* Android NativeWindow (post android 2.3)
*/
-
static void
LoadNativeWindowAPI(AWindowHandler *p_awh)
{
@@ -390,7 +489,9 @@ LoadNativeWindowAPI(AWindowHandler *p_awh)
if (!p_library)
return;
- p_awh->b_has_ndk_ast_api = !LoadNDKSurfaceTextureAPI(p_awh, p_library);
+ p_awh->b_has_ndk_ast_api = LoadNDKSurfaceTextureAPI(p_awh, p_library) ==
VLC_SUCCESS;
+ p_awh->b_has_ndk_air_api = LoadAimageReaderAPI(p_awh, p_library) ==
VLC_SUCCESS;
+ p_awh->b_has_ndk_asc_api = LoadASurfaceControlAPI(p_awh, p_library) ==
VLC_SUCCESS;
p_awh->p_anw_dl = p_library;
}
@@ -622,6 +723,9 @@ AWindowHandler_new(vlc_object_t *obj, vlc_window_t *wnd,
awh_events_t *p_events)
}
}
LoadNativeWindowAPI(p_awh);
+ msg_Dbg(obj, "has_anw: %d has_ast: %d has_air: %d has_asc: %d",
+ p_awh->p_anw_dl != NULL, p_awh->b_has_ndk_ast_api,
+ p_awh->b_has_ndk_air_api, p_awh->b_has_ndk_asc_api);
p_awh->capabilities = 0;
@@ -638,7 +742,9 @@ AWindowHandler_new(vlc_object_t *obj, vlc_window_t *wnd,
awh_events_t *p_events)
if (vout_modules
&& (strncmp(vout_modules, "gles2", sizeof("gles2") - 1) == 0
|| strncmp(vout_modules, "opengles2", sizeof("opengles2") - 1) == 0))
+ {
p_awh->capabilities &= ~AWH_CAPS_SET_VIDEO_LAYOUT;
+ }
free(vout_modules);
}
@@ -720,11 +826,29 @@ AWindowHandler_destroy(AWindowHandler *p_awh)
(*p_env)->DeleteGlobalRef(p_env, p_awh->stex.jtransform_mtx_array);
}
+ if (p_awh->ndk_sync_lib)
+ dlclose(p_awh->ndk_sync_lib);
+ if (p_awh->ndk_air_lib)
+ dlclose(p_awh->ndk_air_lib);
if (p_awh->p_anw_dl)
dlclose(p_awh->p_anw_dl);
free(p_awh);
}
+struct aimage_reader_api *
+AWindowHandler_getAImageReaderApi(AWindowHandler *p_awh)
+{
+ return p_awh->b_has_ndk_air_api && p_awh->b_has_ndk_asc_api ?
+ &p_awh->ndk_air_api : NULL;
+}
+
+struct asurface_control_api *
+AWindowHandler_getASurfaceControlApi(AWindowHandler *p_awh)
+{
+ return p_awh->b_has_ndk_air_api && p_awh->b_has_ndk_asc_api ?
+ &p_awh->ndk_asc_api : NULL;
+}
+
static struct vlc_asurfacetexture_priv* CreateSurfaceTexture(
AWindowHandler *p_awh, JNIEnv *p_env, bool single_buffer)
{
=====================================
modules/video_output/android/utils.h
=====================================
@@ -25,10 +25,18 @@
# include "config.h"
#endif
+#include <unistd.h>
+#include <assert.h>
+
#include <jni.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>
+#include <android/hardware_buffer.h>
#include <android/input.h>
+#include <android/surface_control.h>
+#include <android/hdr_metadata.h>
+#include <android/data_space.h>
+#include <media/NdkImageReader.h>
#include <vlc_vout_display.h>
#include <vlc_common.h>
@@ -38,6 +46,138 @@
/* AWH backed by a Android SurfaceView */
#define AWH_CAPS_SURFACE_VIEW 0x2
+/*
+ * AImageReader function pointers
+ */
+typedef int32_t (*pfn_AImageReader_newWithUsage)(
+ int32_t width, int32_t height, int32_t format, uint64_t usage,
+ int32_t maxImages, AImageReader **reader);
+typedef void (*pfn_AImageReader_delete)(AImageReader *reader);
+typedef int32_t (*pfn_AImageReader_getWindow)(
+ AImageReader *reader, ANativeWindow **window);
+typedef int32_t (*pfn_AImageReader_acquireNextImageAsync)(
+ AImageReader *reader, AImage **image, int *acquireFenceFd);
+typedef void (*pfn_AImage_deleteAsync)(AImage *image, int releaseFenceFd);
+typedef int32_t (*pfn_AImage_getHardwareBuffer)(
+ const AImage *image, struct AHardwareBuffer **buffer);
+typedef int32_t (*pfn_AImage_getTimestamp)(
+ const AImage *image, int64_t *timestampNs);
+typedef int32_t (*pfn_AImage_getCropRect)(
+ const AImage *image, AImageCropRect *rect);
+typedef int32_t (*pfn_AImage_getWidth)(
+ const AImage *image, int32_t *width);
+typedef int32_t (*pfn_AImage_getHeight)(
+ const AImage *image, int32_t *height);
+typedef int32_t (*pfn_AImageReader_setImageListener)(
+ AImageReader *reader, AImageReader_ImageListener *listener);
+typedef int (*pfn_sync_merge)(const char *name, int fd1, int fd2);
+
+typedef int32_t (*pfn_AHardwareBuffer_getId)(
+ const struct AHardwareBuffer *buffer, uint64_t *outId);
+typedef void (*pfn_AHardwareBuffer_describe)(
+ const struct AHardwareBuffer *buffer, AHardwareBuffer_Desc *outDesc);
+
+/*
+* ASurfaceControl function pointers
+*/
+typedef ASurfaceControl* (*pfn_ASurfaceControl_createFromWindow)(
+ ANativeWindow *parent, const char *debug_name);
+typedef void (*pfn_ASurfaceControl_release)(ASurfaceControl *surface_control);
+typedef ASurfaceTransaction* (*pfn_ASurfaceTransaction_create)(void);
+typedef void (*pfn_ASurfaceTransaction_delete)(ASurfaceTransaction
*transaction);
+typedef void (*pfn_ASurfaceTransaction_apply)(ASurfaceTransaction
*transaction);
+typedef void (*pfn_ASurfaceTransaction_setBuffer)(
+ ASurfaceTransaction *transaction, ASurfaceControl *surface_control,
+ struct AHardwareBuffer *buffer, int acquire_fence_fd);
+typedef void (*pfn_ASurfaceTransaction_setVisibility)(
+ ASurfaceTransaction *transaction, ASurfaceControl *surface_control,
+ int8_t visibility);
+typedef void (*pfn_ASurfaceTransaction_setBufferTransparency)(
+ ASurfaceTransaction *transaction, ASurfaceControl *surface_control,
+ int8_t transparency);
+typedef void (*pfn_ASurfaceTransaction_setBufferDataSpace)(
+ ASurfaceTransaction *transaction, ASurfaceControl *surface_control,
+ int32_t data_space);
+typedef void (*pfn_ASurfaceTransaction_setHdrMetadata_smpte2086)(
+ ASurfaceTransaction *transaction, ASurfaceControl *surface_control,
+ struct AHdrMetadata_smpte2086 *metadata);
+typedef void (*pfn_ASurfaceTransaction_setHdrMetadata_cta861_3)(
+ ASurfaceTransaction *transaction, ASurfaceControl *surface_control,
+ struct AHdrMetadata_cta861_3 *metadata);
+typedef void (*pfn_ASurfaceTransaction_setOnComplete)(
+ ASurfaceTransaction *transaction, void *context,
+ void (*func)(void *context, ASurfaceTransactionStats *stats));
+typedef void (*pfn_ASurfaceTransaction_setCrop)(
+ ASurfaceTransaction *transaction, ASurfaceControl *surface_control,
+ const ARect *crop);
+typedef void (*pfn_ASurfaceTransaction_setPosition)(
+ ASurfaceTransaction *transaction, ASurfaceControl *surface_control,
+ int32_t x, int32_t y);
+typedef void (*pfn_ASurfaceTransaction_setBufferTransform)(
+ ASurfaceTransaction *transaction, ASurfaceControl *surface_control,
+ int32_t transform);
+typedef void (*pfn_ASurfaceTransaction_setScale)(
+ ASurfaceTransaction *transaction, ASurfaceControl *surface_control,
+ float xScale, float yScale);
+typedef void (*pfn_ASurfaceTransaction_setDesiredPresentTime)(
+ ASurfaceTransaction* transaction,
+ int64_t desiredPresentTime);
+typedef int (*pfn_ASurfaceTransactionStats_getPreviousReleaseFenceFd)(
+ ASurfaceTransactionStats *stats, ASurfaceControl *surface_control);
+
+struct aimage_reader_api
+{
+ /* AImageReader, API 31+ (because AHardwareBuffer) */
+ struct {
+ pfn_AImageReader_newWithUsage newWithUsage;
+ pfn_AImageReader_delete delete;
+ pfn_AImageReader_getWindow getWindow;
+ pfn_AImageReader_acquireNextImageAsync acquireNextImageAsync;
+ pfn_AImageReader_setImageListener setImageListener;
+ } AImageReader;
+ struct {
+ pfn_AImage_deleteAsync deleteAsync;
+ pfn_AImage_getHardwareBuffer getHardwareBuffer;
+ pfn_AImage_getTimestamp getTimestamp;
+ pfn_AImage_getCropRect getCropRect;
+ pfn_AImage_getWidth getWidth;
+ pfn_AImage_getHeight getHeight;
+ } AImage;
+ pfn_sync_merge sync_merge;
+ struct {
+ pfn_AHardwareBuffer_getId getId;
+ pfn_AHardwareBuffer_describe describe;
+ } AHardwareBuffer;
+};
+
+struct asurface_control_api
+{
+ struct {
+ pfn_ASurfaceControl_createFromWindow createFromWindow;
+ pfn_ASurfaceControl_release release;
+ } ASurfaceControl;
+ struct {
+ pfn_ASurfaceTransaction_create create;
+ pfn_ASurfaceTransaction_delete delete;
+ pfn_ASurfaceTransaction_apply apply;
+ pfn_ASurfaceTransaction_setBuffer setBuffer;
+ pfn_ASurfaceTransaction_setVisibility setVisibility;
+ pfn_ASurfaceTransaction_setBufferTransparency setBufferTransparency;
+ pfn_ASurfaceTransaction_setBufferDataSpace setBufferDataSpace;
+ pfn_ASurfaceTransaction_setHdrMetadata_smpte2086
setHdrMetadata_smpte2086;
+ pfn_ASurfaceTransaction_setHdrMetadata_cta861_3
setHdrMetadata_cta861_3;
+ pfn_ASurfaceTransaction_setOnComplete setOnComplete;
+ pfn_ASurfaceTransaction_setCrop setCrop;
+ pfn_ASurfaceTransaction_setPosition setPosition;
+ pfn_ASurfaceTransaction_setBufferTransform setBufferTransform;
+ pfn_ASurfaceTransaction_setScale setScale;
+ pfn_ASurfaceTransaction_setDesiredPresentTime setDesiredPresentTime;
+ } ASurfaceTransaction;
+ struct {
+ pfn_ASurfaceTransactionStats_getPreviousReleaseFenceFd
getPreviousReleaseFenceFd;
+ } ASurfaceTransactionStats;
+};
+
typedef struct AWindowHandler AWindowHandler;
typedef struct ASurfaceTexture ASurfaceTexture;
@@ -67,6 +207,10 @@ typedef struct android_video_context_t
android_video_context_t;
struct android_video_context_t
{
+ struct aimage_reader_api *air_api;
+ struct asurface_control_api *asc_api;
+ AImageReader *air;
+
struct vlc_asurfacetexture *texture;
void *dec_opaque;
bool (*render)(struct picture_context_t *ctx);
@@ -76,6 +220,40 @@ struct android_video_context_t
(*get_texture)(struct picture_context_t *ctx);
};
+struct android_picture_ctx
+{
+ picture_context_t s;
+ AImage *image;
+ int fence_fd;
+ int read_fence_fd;
+ vlc_atomic_rc_t rc;
+ ASurfaceControl *sc;
+};
+
+static inline int
+android_picture_ctx_get_fence_fd(struct android_picture_ctx *apctx)
+{
+ return (apctx->fence_fd >= 0) ? dup(apctx->fence_fd) : -1;
+}
+
+static inline void
+android_picture_ctx_set_read_fence(struct android_picture_ctx *apctx, int fd)
+{
+ assert(fd >= 0);
+ if (apctx->read_fence_fd < 0)
+ apctx->read_fence_fd = fd;
+ else
+ {
+ android_video_context_t *avctx =
+ vlc_video_context_GetPrivate(apctx->s.vctx,
VLC_VIDEO_CONTEXT_AWINDOW);
+
+ int merged = avctx->air_api->sync_merge("vlc_read_fence",
apctx->read_fence_fd, fd);
+ close(apctx->read_fence_fd);
+ close(fd);
+ apctx->read_fence_fd = merged;
+ }
+}
+
struct vlc_asurfacetexture
{
struct ANativeWindow *window;
@@ -124,6 +302,12 @@ AWindowHandler *
AWindowHandler_newFromANWs(vlc_object_t *obj, ANativeWindow *video,
ANativeWindow *subtitle);
+struct aimage_reader_api *
+AWindowHandler_getAImageReaderApi(AWindowHandler *p_awh);
+
+struct asurface_control_api *
+AWindowHandler_getASurfaceControlApi(AWindowHandler *p_awh);
+
/**
* Get the Video or the Subtitles ANativeWindow
*
=====================================
modules/video_output/opengl/interop_aimage.c
=====================================
@@ -0,0 +1,467 @@
+/*****************************************************************************
+ * interop_aimage.c: OpenGL AImage/EGL interop
+ *****************************************************************************
+ * Copyright (C) 2026 VLC authors and VideoLAN
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as published by
+ * the Free Software Foundation; either version 2.1 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301, USA.
+ *****************************************************************************/
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#ifndef __ANDROID__
+# error this file must be built from android
+#endif
+
+#include <vlc_common.h>
+#include <vlc_plugin.h>
+#include "interop.h"
+#include "../android/utils.h"
+#include "gl_api.h"
+#include "gl_util.h"
+
+#include <EGL/egl.h>
+#include <EGL/eglext.h>
+
+/* AHardwareBuffer -> OpenGL/Vulkan image cache */
+#define AHB_CACHE_SIZE 64
+struct ahb_slot
+{
+ /* From AHardwareBuffer_getId */
+ uint64_t ahb_id;
+ bool valid;
+ EGLImageKHR image;
+};
+
+struct ahb_cache {
+ struct ahb_slot slots[AHB_CACHE_SIZE];
+};
+struct priv
+{
+ float mtx_2x3[2*3];
+
+ struct aimage_reader_api *air_api;
+ struct ahb_cache ahb_cache; /* EGLImage cache for AHardwareBuffers */
+
+ struct {
+ PFNGLACTIVETEXTUREPROC ActiveTexture;
+ PFNGLBINDTEXTUREPROC BindTexture;
+ PFNGLFLUSHPROC Flush;
+ PFNGLEGLIMAGETARGETTEXTURE2DOESPROC EGLImageTargetTexture2DOES;
+ } gl;
+
+ struct {
+ EGLDisplay display;
+ PFNEGLGETCURRENTDISPLAYPROC GetCurrentDisplay;
+ PFNEGLQUERYSTRINGPROC QueryString;
+ PFNEGLCREATEIMAGEKHRPROC CreateImageKHR;
+ PFNEGLDESTROYIMAGEKHRPROC DestroyImageKHR;
+ PFNEGLGETNATIVECLIENTBUFFERANDROIDPROC GetNativeClientBufferANDROID;
+ PFNEGLCREATESYNCKHRPROC CreateSyncKHR;
+ PFNEGLWAITSYNCKHRPROC WaitSyncKHR;
+ PFNEGLDESTROYSYNCKHRPROC DestroySyncKHR;
+ PFNEGLDUPNATIVEFENCEFDANDROIDPROC DupNativeFenceFDANDROID;
+ } egl;
+};
+
+
+static void
+ahb_cache_init(struct ahb_cache *pool)
+{
+ for (int i = 0; i < AHB_CACHE_SIZE; i++)
+ {
+ pool->slots[i].ahb_id = 0;
+ pool->slots[i].valid = false;
+ pool->slots[i].image = NULL;
+ }
+}
+
+static struct ahb_slot *
+ahb_cache_get(struct ahb_cache *pool, uint64_t ahb_id, bool *is_new)
+{
+ struct ahb_slot *free_slot = NULL;
+ for (size_t i = 0; i < AHB_CACHE_SIZE; i++)
+ {
+ struct ahb_slot *slot = &pool->slots[i];
+
+ if (slot->valid && slot->ahb_id == ahb_id)
+ {
+ *is_new = false;
+ return slot;
+ }
+
+ if (free_slot == NULL && !slot->valid)
+ free_slot = slot;
+ }
+
+ *is_new = true;
+ if (free_slot == NULL)
+ free_slot = &pool->slots[0];
+
+ free_slot->ahb_id = ahb_id;
+ free_slot->valid = true;
+ return free_slot;
+}
+
+static int
+ComputeAImageTransformMatrix(const struct vlc_gl_interop *interop, AImage
*image)
+{
+ struct priv *priv = interop->priv;
+ AImageCropRect crop;
+ int32_t buf_width, buf_height;
+
+ if (priv->air_api->AImage.getCropRect(image, &crop) != 0 ||
+ priv->air_api->AImage.getWidth(image, &buf_width) != 0 ||
+ priv->air_api->AImage.getHeight(image, &buf_height) != 0)
+ return VLC_EGENERIC;
+
+ if (buf_width <= 0 || buf_height <= 0 ||
+ crop.right <= crop.left || crop.bottom <= crop.top)
+ return VLC_EGENERIC;
+
+ float crop_w = crop.right - crop.left;
+ float crop_h = crop.bottom - crop.top;
+
+ /* Scale + Y-flip: apply flip first (t' = 1-t), then crop */
+ float sx = crop_w / buf_width;
+ float sy = -crop_h / buf_height; /* negative for Y-flip */
+
+ float tx = (float)crop.left / buf_width;
+ float ty = (float)crop.bottom / buf_height; /* bottom, not top */
+
+ priv->mtx_2x3[0] = sx; priv->mtx_2x3[2] = 0.0f; priv->mtx_2x3[4] = tx;
+ priv->mtx_2x3[1] = 0.0f; priv->mtx_2x3[3] = sy; priv->mtx_2x3[5] = ty;
+
+ return VLC_SUCCESS;
+}
+
+static int
+tc_aimage_allocate_textures(const struct vlc_gl_interop *interop, uint32_t
textures[],
+ const int32_t tex_width[], const int32_t
tex_height[])
+{
+ (void) interop; (void) tex_width; (void) tex_height;
+ assert(textures[0] != 0); (void) textures;
+
+ return VLC_SUCCESS;
+}
+
+static EGLImageKHR
+GetCachedEGLImage(const struct vlc_gl_interop *interop, AHardwareBuffer *ahb)
+{
+ struct priv *priv = interop->priv;
+ vlc_object_t *o = VLC_OBJECT(interop->gl);
+
+ uint64_t ahb_id;
+ int32_t status = priv->air_api->AHardwareBuffer.getId(ahb, &ahb_id);
+
+ bool is_new = false;
+ struct ahb_slot *slot;
+ if (status != 0)
+ {
+ /* If we can't identify buffers, always use slot 0 as "new" */
+ is_new = true;
+ slot = &priv->ahb_cache.slots[0];
+ }
+ else
+ slot = ahb_cache_get(&priv->ahb_cache, ahb_id, &is_new);
+
+ if (!is_new)
+ {
+ assert(slot->image != NULL);
+ /* Reuse existing EGLImage */
+ return slot->image;
+ }
+ /* else : cache miss or first init */
+
+ EGLImageKHR egl_image;
+
+ /* Destroy old EGLImage if present */
+ if (slot->image != NULL)
+ {
+ msg_Dbg(o, "cache miss: creating a new EGLImageKHR");
+ priv->egl.DestroyImageKHR(priv->egl.display, slot->image);
+ }
+
+ EGLClientBuffer client_buffer =
priv->egl.GetNativeClientBufferANDROID(ahb);
+ if (client_buffer == NULL)
+ {
+ msg_Err(o, "eglGetNativeClientBufferANDROID failed");
+ slot->image = NULL;
+ return NULL;
+ }
+
+ /* Create EGLImage from AHardwareBuffer */
+ EGLint attrs[] = { EGL_NONE };
+ egl_image = priv->egl.CreateImageKHR(priv->egl.display, EGL_NO_CONTEXT,
+ EGL_NATIVE_BUFFER_ANDROID,
+ client_buffer, attrs);
+
+ if (egl_image == EGL_NO_IMAGE_KHR)
+ {
+ msg_Err(o, "eglCreateImageKHR failed: 0x%x", eglGetError());
+ slot->image = NULL;
+ return NULL;
+ }
+
+ slot->image = egl_image;
+ return egl_image;
+}
+
+static int
+tc_aimage_update(const struct vlc_gl_interop *interop, uint32_t textures[],
+ const int32_t tex_width[], const int32_t tex_height[],
+ picture_t *pic, const size_t plane_offset[])
+{
+ struct priv *priv = interop->priv;
+ vlc_object_t *o = VLC_OBJECT(interop->gl);
+
+ (void) tex_width; (void) tex_height; (void) plane_offset;
+ assert(pic->context);
+ assert(textures[0] != 0);
+
+ struct android_picture_ctx *apctx =
+ container_of(pic->context, struct android_picture_ctx, s);
+
+ AHardwareBuffer *ahb = NULL;
+ int32_t status = priv->air_api->AImage.getHardwareBuffer(apctx->image,
&ahb);
+ if (status != 0)
+ {
+ msg_Err(o, "AImage_getHardwareBuffer failed: %d", status);
+ goto error;
+ }
+ assert(ahb != NULL);
+
+ /* Get fence_fd and wait using EGL sync */
+ int fence_fd = android_picture_ctx_get_fence_fd(apctx);
+ if (fence_fd >= 0)
+ {
+ const EGLint sync_attribs[] = {
+ EGL_SYNC_NATIVE_FENCE_FD_ANDROID, fence_fd,
+ EGL_NONE
+ };
+ EGLSyncKHR sync = priv->egl.CreateSyncKHR(
+ priv->egl.display, EGL_SYNC_NATIVE_FENCE_ANDROID, sync_attribs);
+
+ if (sync != EGL_NO_SYNC_KHR)
+ {
+ priv->egl.WaitSyncKHR(priv->egl.display, sync, 0);
+ priv->egl.DestroySyncKHR(priv->egl.display, sync);
+ }
+ else
+ {
+ msg_Warn(o, "eglCreateSyncKHR failed");
+ close(fence_fd);
+ }
+ }
+
+ EGLImageKHR egl_image = GetCachedEGLImage(interop, ahb);
+ if (egl_image == NULL)
+ goto error;
+
+ priv->gl.ActiveTexture(GL_TEXTURE0);
+ priv->gl.BindTexture(GL_TEXTURE_EXTERNAL_OES, textures[0]);
+ priv->gl.EGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, egl_image);
+
+ /* Create read fence for this picture */
+ const EGLint attribs[] = {
+ EGL_SYNC_NATIVE_FENCE_FD_ANDROID, EGL_NO_NATIVE_FENCE_FD_ANDROID,
+ EGL_NONE
+ };
+ EGLSyncKHR sync = priv->egl.CreateSyncKHR(
+ priv->egl.display, EGL_SYNC_NATIVE_FENCE_ANDROID, attribs);
+ if (sync != EGL_NO_SYNC_KHR)
+ {
+ priv->gl.Flush();
+ int read_fence_fd =
priv->egl.DupNativeFenceFDANDROID(priv->egl.display,
+ sync);
+ priv->egl.DestroySyncKHR(priv->egl.display, sync);
+ if (read_fence_fd >= 0)
+ android_picture_ctx_set_read_fence(apctx, read_fence_fd);
+ else
+ msg_Warn(o, "eglDupNativeFenceFDANDROID failed");
+ }
+
+ int ret = ComputeAImageTransformMatrix(interop, apctx->image);
+ if (ret != VLC_SUCCESS)
+ {
+ msg_Warn(o, "Failed to get AImage dimensions/crop");
+ goto error;
+ }
+
+ return VLC_SUCCESS;
+
+error:
+ return VLC_EGENERIC;
+}
+
+static const float *
+tc_aimage_transform_matrix(const struct vlc_gl_interop *interop)
+{
+ struct priv *priv = interop->priv;
+ return priv->mtx_2x3;
+}
+
+static void
+Close(struct vlc_gl_interop *interop)
+{
+ struct priv *priv = interop->priv;
+
+ for (int i = 0; i < AHB_CACHE_SIZE; i++)
+ {
+ struct ahb_slot *slot = &priv->ahb_cache.slots[i];
+ if (slot->valid)
+ {
+ assert(slot->image != NULL);
+ priv->egl.DestroyImageKHR(priv->egl.display, slot->image);
+ }
+ }
+
+ free(priv);
+}
+
+static int
+InitAImage(struct vlc_gl_interop *interop, android_video_context_t *avctx)
+{
+ struct priv *priv = interop->priv;
+
+#define LOAD_GL_SYMBOL(name) \
+ priv->gl.name = vlc_gl_GetProcAddress(interop->gl, "gl" # name); \
+ if (priv->gl.name == NULL) return VLC_EGENERIC
+
+#define LOAD_EGL_SYMBOL(name) \
+ priv->egl.name = vlc_gl_GetProcAddress(interop->gl, "egl" # name); \
+ if (priv->egl.name == NULL) return VLC_EGENERIC
+
+ LOAD_GL_SYMBOL(ActiveTexture);
+ LOAD_GL_SYMBOL(BindTexture);
+ LOAD_GL_SYMBOL(Flush);
+ LOAD_GL_SYMBOL(EGLImageTargetTexture2DOES);
+
+ LOAD_EGL_SYMBOL(GetCurrentDisplay);
+ LOAD_EGL_SYMBOL(QueryString);
+ LOAD_EGL_SYMBOL(CreateImageKHR);
+ LOAD_EGL_SYMBOL(DestroyImageKHR);
+ LOAD_EGL_SYMBOL(GetNativeClientBufferANDROID);
+ LOAD_EGL_SYMBOL(CreateSyncKHR);
+ LOAD_EGL_SYMBOL(WaitSyncKHR);
+ LOAD_EGL_SYMBOL(DestroySyncKHR);
+ LOAD_EGL_SYMBOL(DupNativeFenceFDANDROID);
+
+#undef LOAD_GL_SYMBOL
+#undef LOAD_EGL_SYMBOL
+
+ priv->egl.display = priv->egl.GetCurrentDisplay();
+ if (priv->egl.display == NULL)
+ return VLC_EGENERIC;
+
+ const char *eglexts = priv->egl.QueryString(priv->egl.display,
+ EGL_EXTENSIONS);
+ if (eglexts == NULL)
+ return VLC_EGENERIC;
+ if (!vlc_gl_StrHasToken(eglexts, "EGL_ANDROID_get_native_client_buffer")
+ || !vlc_gl_StrHasToken(eglexts, "EGL_ANDROID_image_native_buffer")
+ || !vlc_gl_StrHasToken(eglexts, "EGL_ANDROID_native_fence_sync"))
+ return VLC_EGENERIC;
+
+ priv->air_api = avctx->air_api;
+ ahb_cache_init(&priv->ahb_cache);
+ return VLC_SUCCESS;
+}
+
+static int
+Open(struct vlc_gl_interop *interop)
+{
+ if (interop->fmt_in.i_chroma != VLC_CODEC_ANDROID_OPAQUE
+ || !interop->vctx)
+ return VLC_EGENERIC;
+
+ struct vlc_gl_extension_vt extension_vt;
+ vlc_gl_LoadExtensionFunctions(interop->gl, &extension_vt);
+
+ if (!vlc_gl_HasExtension(&extension_vt, "GL_OES_EGL_image_external"))
+ {
+ msg_Warn(&interop->obj, "GL_OES_EGL_image_external is not available,"
+ " disabling android interop.");
+ return VLC_EGENERIC;
+ }
+
+ android_video_context_t *avctx =
+ vlc_video_context_GetPrivate(interop->vctx, VLC_VIDEO_CONTEXT_AWINDOW);
+ assert(avctx != NULL);
+
+ bool has_aimage = avctx->air != NULL && avctx->air_api != NULL;
+ if (!has_aimage)
+ return VLC_EGENERIC;
+
+ interop->priv = malloc(sizeof(struct priv));
+ if (unlikely(interop->priv == NULL))
+ return VLC_ENOMEM;
+
+ struct priv *priv = interop->priv;
+ priv->air_api = NULL;
+
+ /* Try to use AImage path if available */
+ int ret = InitAImage(interop, avctx);
+ if (ret != VLC_SUCCESS)
+ {
+ free(priv);
+ return ret;
+ }
+
+ static const struct vlc_gl_interop_ops aimage_ops = {
+ .allocate_textures = tc_aimage_allocate_textures,
+ .update_textures = tc_aimage_update,
+ .get_transform_matrix = tc_aimage_transform_matrix,
+ .close = Close,
+ };
+ interop->ops = &aimage_ops;
+
+ interop->tex_target = GL_TEXTURE_EXTERNAL_OES;
+ if (vlc_gl_HasExtension(&extension_vt, "GL_EXT_YUV_target"))
+ {
+ msg_Warn(&interop->obj, "GL_EXT_YUV_target is available,"
+ " using it.");
+ /* We represent as Packed YUV 4:4:4 since there is a single
+ * texture target available. */
+ interop->fmt_out.i_chroma = VLC_CODEC_V308;
+ interop->fmt_out.space = interop->fmt_in.space;
+ interop->fmt_out.primaries = interop->fmt_in.primaries;
+ interop->fmt_out.transfer = interop->fmt_in.transfer;
+ }
+ else
+ {
+ interop->fmt_out.i_chroma = VLC_CODEC_RGBA;
+ interop->fmt_out.space = COLOR_SPACE_UNDEF;
+ }
+
+ interop->tex_count = 1;
+ interop->texs[0] = (struct vlc_gl_tex_cfg) {
+ .w = {1, 1},
+ .h = {1, 1},
+ .internal = GL_RGBA,
+ .format = GL_RGBA,
+ .type = GL_UNSIGNED_BYTE,
+ };
+
+ return VLC_SUCCESS;
+}
+
+vlc_module_begin ()
+ set_description("Android OpenGL AImage converter")
+ set_capability("glinterop", 2)
+ set_callback(Open)
+ set_subcategory(SUBCAT_VIDEO_VOUT)
+vlc_module_end ()
=====================================
modules/video_output/opengl/interop_android.c →
modules/video_output/opengl/interop_asurface.c
=====================================
@@ -1,5 +1,5 @@
/*****************************************************************************
- * converter_android.c: OpenGL Android opaque converter
+ * interop_asurface.c: OpenGL Android Surface interop
*****************************************************************************
* Copyright (C) 2016 VLC authors and VideoLAN
*
View it on GitLab:
https://code.videolan.org/videolan/vlc/-/compare/a588d1ea7025c20a40fd755aa3699db7e73a0a78...0aadefe758b10697b6c0b2515ac94be9186cd1f7
--
View it on GitLab:
https://code.videolan.org/videolan/vlc/-/compare/a588d1ea7025c20a40fd755aa3699db7e73a0a78...0aadefe758b10697b6c0b2515ac94be9186cd1f7
You're receiving this email because of your account on code.videolan.org.
VideoLAN code repository instance_______________________________________________
vlc-commits mailing list
[email protected]
https://mailman.videolan.org/listinfo/vlc-commits