Diff
Modified: trunk/Source/WebCore/ChangeLog (291356 => 291357)
--- trunk/Source/WebCore/ChangeLog 2022-03-16 17:10:37 UTC (rev 291356)
+++ trunk/Source/WebCore/ChangeLog 2022-03-16 17:33:02 UTC (rev 291357)
@@ -1,3 +1,48 @@
+2022-03-16 Philippe Normand <[email protected]>
+
+ Migrate use of MediaSampleGStreamer to VideoFrame in WebRTC pipelines
+ https://bugs.webkit.org/show_bug.cgi?id=237885
+
+ Reviewed by Youenn Fablet.
+
+ Introducing VideoFrameGStreamer, meant to be used from mediastream producers and consumers.
+ The VideoFrame internally manages a GstSample storing the actual video frame data that can
+ be passed around between RealtimeMediaSources and the mediastream GStreamer source element.
+
+ Covered by existing layout tests.
+
+ * html/HTMLCanvasElement.cpp:
+ (WebCore::HTMLCanvasElement::toMediaSample):
+ * platform/GStreamer.cmake:
+ * platform/graphics/gstreamer/MediaSampleGStreamer.cpp:
+ (WebCore::MediaSampleGStreamer::MediaSampleGStreamer):
+ (WebCore::MediaSampleGStreamer::createFakeSample):
+ (WebCore::MediaSampleGStreamer::createImageSample): Deleted.
+ (WebCore::MediaSampleGStreamer::initializeFromBuffer): Deleted.
+ (WebCore::MediaSampleGStreamer::getRGBAImageData const): Deleted.
+ * platform/graphics/gstreamer/MediaSampleGStreamer.h:
+ (WebCore::MediaSampleGStreamer::create):
+ (WebCore::MediaSampleGStreamer::createWrappedSample): Deleted.
+ (WebCore::MediaSampleGStreamer::createImageSample): Deleted.
+ * platform/graphics/gstreamer/VideoFrameGStreamer.cpp: Added.
+ (WebCore::VideoFrameGStreamer::createFromPixelBuffer):
+ (WebCore::VideoFrameGStreamer::VideoFrameGStreamer):
+ (WebCore::VideoFrameGStreamer::getRGBAImageData const):
+ * platform/graphics/gstreamer/VideoFrameGStreamer.h: Added.
+ * platform/graphics/texmap/GraphicsContextGLTextureMapper.cpp:
+ (WebCore::GraphicsContextGLTextureMapper::paintCompositedResultsToMediaSample):
+ * platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp:
+ * platform/mediastream/gstreamer/GStreamerVideoCaptureSource.cpp:
+ (WebCore::GStreamerVideoCaptureSource::processNewFrame):
+ (WebCore::GStreamerVideoCaptureSource::newSampleCallback):
+ * platform/mediastream/gstreamer/GStreamerVideoCaptureSource.h:
+ * platform/mediastream/gstreamer/MockRealtimeVideoSourceGStreamer.cpp:
+ (WebCore::MockRealtimeVideoSourceGStreamer::updateSampleBuffer):
+ * platform/mediastream/libwebrtc/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.cpp:
+ (WebCore::RealtimeIncomingVideoSourceLibWebRTC::OnFrame):
+ * platform/mediastream/libwebrtc/gstreamer/RealtimeOutgoingVideoSourceLibWebRTC.cpp:
+ (WebCore::RealtimeOutgoingVideoSourceLibWebRTC::videoSampleAvailable):
+
2022-03-16 Alan Bujtas <[email protected]>
[IFC][Integration] Move firstSelectedBox/lastSelectedBox out of InlineIterator::Line
Modified: trunk/Source/WebCore/html/HTMLCanvasElement.cpp (291356 => 291357)
--- trunk/Source/WebCore/html/HTMLCanvasElement.cpp 2022-03-16 17:10:37 UTC (rev 291356)
+++ trunk/Source/WebCore/html/HTMLCanvasElement.cpp 2022-03-16 17:33:02 UTC (rev 291357)
@@ -94,7 +94,7 @@
#endif
#if USE(GSTREAMER)
-#include "MediaSampleGStreamer.h"
+#include "VideoFrameGStreamer.h"
#endif
#if PLATFORM(COCOA)
@@ -800,7 +800,7 @@
#if PLATFORM(COCOA)
return MediaSampleAVFObjC::createFromPixelBuffer(WTFMove(*pixelBuffer));
#elif USE(GSTREAMER)
- return MediaSampleGStreamer::createImageSample(WTFMove(*pixelBuffer));
+ return VideoFrameGStreamer::createFromPixelBuffer(WTFMove(*pixelBuffer));
#endif
#else
return nullptr;
Modified: trunk/Source/WebCore/platform/GStreamer.cmake (291356 => 291357)
--- trunk/Source/WebCore/platform/GStreamer.cmake 2022-03-16 17:10:37 UTC (rev 291356)
+++ trunk/Source/WebCore/platform/GStreamer.cmake 2022-03-16 17:33:02 UTC (rev 291357)
@@ -27,6 +27,7 @@
platform/graphics/gstreamer/TextCombinerPadGStreamer.cpp
platform/graphics/gstreamer/TextSinkGStreamer.cpp
platform/graphics/gstreamer/TrackPrivateBaseGStreamer.cpp
+ platform/graphics/gstreamer/VideoFrameGStreamer.cpp
platform/graphics/gstreamer/VideoFrameMetadataGStreamer.cpp
platform/graphics/gstreamer/VideoSinkGStreamer.cpp
platform/graphics/gstreamer/VideoTrackPrivateGStreamer.cpp
Modified: trunk/Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.cpp (291356 => 291357)
--- trunk/Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.cpp 2022-03-16 17:10:37 UTC (rev 291356)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.cpp 2022-03-16 17:33:02 UTC (rev 291357)
@@ -23,10 +23,7 @@
#include "MediaSampleGStreamer.h"
#include "GStreamerCommon.h"
-#include "PixelBuffer.h"
#include "VideoFrameMetadataGStreamer.h"
-#include <_javascript_Core/JSCInlines.h>
-#include <_javascript_Core/TypedArrayInlines.h>
#include <algorithm>
#if ENABLE(VIDEO) && USE(GSTREAMER)
@@ -33,108 +30,15 @@
namespace WebCore {
-MediaSampleGStreamer::MediaSampleGStreamer(GRefPtr<GstSample>&& sample, const FloatSize& presentationSize, const AtomString& trackId, VideoRotation videoRotation, bool videoMirrored, std::optional<VideoFrameTimeMetadata>&& metadata)
+MediaSampleGStreamer::MediaSampleGStreamer(GRefPtr<GstSample>&& sample, const FloatSize& presentationSize, const AtomString& trackId)
: m_pts(MediaTime::zeroTime())
, m_dts(MediaTime::zeroTime())
, m_duration(MediaTime::zeroTime())
, m_trackId(trackId)
, m_presentationSize(presentationSize)
- , m_videoRotation(videoRotation)
- , m_videoMirrored(videoMirrored)
{
ASSERT(sample);
- GstBuffer* buffer = gst_sample_get_buffer(sample.get());
- RELEASE_ASSERT(buffer);
-
- if (metadata)
- buffer = webkitGstBufferSetVideoFrameTimeMetadata(buffer, WTFMove(metadata));
-
m_sample = sample;
- initializeFromBuffer();
-}
-
-MediaSampleGStreamer::MediaSampleGStreamer(const FloatSize& presentationSize, const AtomString& trackId)
- : m_pts(MediaTime::zeroTime())
- , m_dts(MediaTime::zeroTime())
- , m_duration(MediaTime::zeroTime())
- , m_trackId(trackId)
- , m_presentationSize(presentationSize)
-{
-}
-
-MediaSampleGStreamer::MediaSampleGStreamer(const GRefPtr<GstSample>& sample, VideoRotation videoRotation)
- : m_sample(sample)
- , m_videoRotation(videoRotation)
-{
- initializeFromBuffer();
-}
-
-Ref<MediaSampleGStreamer> MediaSampleGStreamer::createFakeSample(GstCaps*, MediaTime pts, MediaTime dts, MediaTime duration, const FloatSize& presentationSize, const AtomString& trackId)
-{
- MediaSampleGStreamer* gstreamerMediaSample = new MediaSampleGStreamer(presentationSize, trackId);
- gstreamerMediaSample->m_pts = pts;
- gstreamerMediaSample->m_dts = dts;
- gstreamerMediaSample->m_duration = duration;
- gstreamerMediaSample->m_flags = MediaSample::IsNonDisplaying;
- return adoptRef(*gstreamerMediaSample);
-}
-
-Ref<MediaSampleGStreamer> MediaSampleGStreamer::createImageSample(PixelBuffer&& pixelBuffer, const IntSize& destinationSize, double frameRate, VideoRotation videoRotation, bool videoMirrored, std::optional<VideoFrameTimeMetadata>&& metadata)
-{
- ensureGStreamerInitialized();
-
- auto size = pixelBuffer.size();
-
- auto data = ""
- auto sizeInBytes = data->byteLength();
- auto dataBaseAddress = data->data();
- auto leakedData = &data.leakRef();
-
- auto buffer = adoptGRef(gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY, dataBaseAddress, sizeInBytes, 0, sizeInBytes, leakedData, [](gpointer userData) {
- static_cast<JSC::Uint8ClampedArray*>(userData)->deref();
- }));
-
- auto width = size.width();
- auto height = size.height();
- gst_buffer_add_video_meta(buffer.get(), GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_FORMAT_BGRA, width, height);
-
- if (metadata)
- webkitGstBufferSetVideoFrameTimeMetadata(buffer.get(), *metadata);
-
- int frameRateNumerator, frameRateDenominator;
- gst_util_double_to_fraction(frameRate, &frameRateNumerator, &frameRateDenominator);
-
- auto caps = adoptGRef(gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGRA", "width", G_TYPE_INT, width,
- "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, frameRateNumerator, frameRateDenominator, nullptr));
- auto sample = adoptGRef(gst_sample_new(buffer.get(), caps.get(), nullptr, nullptr));
-
- // Optionally resize the video frame to fit destinationSize. This code path is used mostly by
- // the mock realtime video source when the gUM constraints specifically required exact width
- // and/or height values.
- if (!destinationSize.isZero()) {
- GstVideoInfo inputInfo;
- gst_video_info_from_caps(&inputInfo, caps.get());
-
- width = destinationSize.width();
- height = destinationSize.height();
- auto outputCaps = adoptGRef(gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGRA", "width", G_TYPE_INT, width,
- "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, frameRateNumerator, frameRateDenominator, nullptr));
- GstVideoInfo outputInfo;
- gst_video_info_from_caps(&outputInfo, outputCaps.get());
-
- auto outputBuffer = adoptGRef(gst_buffer_new_allocate(nullptr, GST_VIDEO_INFO_SIZE(&outputInfo), nullptr));
- gst_buffer_add_video_meta(outputBuffer.get(), GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_FORMAT_BGRA, width, height);
- GUniquePtr<GstVideoConverter> converter(gst_video_converter_new(&inputInfo, &outputInfo, nullptr));
- GstMappedFrame inputFrame(gst_sample_get_buffer(sample.get()), inputInfo, GST_MAP_READ);
- GstMappedFrame outputFrame(outputBuffer.get(), outputInfo, GST_MAP_WRITE);
- gst_video_converter_frame(converter.get(), inputFrame.get(), outputFrame.get());
- sample = adoptGRef(gst_sample_new(outputBuffer.get(), outputCaps.get(), nullptr, nullptr));
- }
- return create(WTFMove(sample), FloatSize(width, height), { }, videoRotation, videoMirrored);
-}
-
-void MediaSampleGStreamer::initializeFromBuffer()
-{
const GstClockTime minimumDuration = 1000; // 1 us
auto* buffer = gst_sample_get_buffer(m_sample.get());
RELEASE_ASSERT(buffer);
@@ -168,38 +72,23 @@
m_flags = static_cast<MediaSample::SampleFlags>(m_flags | MediaSample::IsNonDisplaying);
}
-RefPtr<JSC::Uint8ClampedArray> MediaSampleGStreamer::getRGBAImageData() const
+MediaSampleGStreamer::MediaSampleGStreamer(const FloatSize& presentationSize, const AtomString& trackId)
+ : m_pts(MediaTime::zeroTime())
+ , m_dts(MediaTime::zeroTime())
+ , m_duration(MediaTime::zeroTime())
+ , m_trackId(trackId)
+ , m_presentationSize(presentationSize)
{
- auto* caps = gst_sample_get_caps(m_sample.get());
- GstVideoInfo inputInfo;
- if (!gst_video_info_from_caps(&inputInfo, caps))
- return nullptr;
+}
- // We could check the input format is RGBA before attempting a conversion, but it is very
- // unlikely to pay off. The input format is likely to be BGRA (when the samples are created as a
- // result of mediastream captureStream) or some YUV format if the sample is from a video capture
- // device. This method is called only by internals during layout tests, it is thus not critical
- // to optimize this code path.
-
- auto outputCaps = adoptGRef(gst_caps_copy(caps));
- gst_caps_set_simple(outputCaps.get(), "format", G_TYPE_STRING, "RGBA", nullptr);
-
- GstVideoInfo outputInfo;
- if (!gst_video_info_from_caps(&outputInfo, outputCaps.get()))
- return nullptr;
-
- int width = GST_VIDEO_INFO_WIDTH(&inputInfo);
- int height = GST_VIDEO_INFO_HEIGHT(&inputInfo);
- unsigned byteLength = GST_VIDEO_INFO_SIZE(&inputInfo);
- auto bufferStorage = JSC::ArrayBuffer::create(width * height, 4);
- auto outputBuffer = adoptGRef(gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_NO_SHARE, bufferStorage->data(), byteLength, 0, byteLength, nullptr, [](gpointer) { }));
- gst_buffer_add_video_meta(outputBuffer.get(), GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_FORMAT_RGBA, width, height);
- GstMappedFrame outputFrame(outputBuffer.get(), outputInfo, GST_MAP_WRITE);
-
- GUniquePtr<GstVideoConverter> converter(gst_video_converter_new(&inputInfo, &outputInfo, nullptr));
- GstMappedFrame inputFrame(gst_sample_get_buffer(m_sample.get()), inputInfo, GST_MAP_READ);
- gst_video_converter_frame(converter.get(), inputFrame.get(), outputFrame.get());
- return JSC::Uint8ClampedArray::tryCreate(WTFMove(bufferStorage), 0, byteLength);
+Ref<MediaSampleGStreamer> MediaSampleGStreamer::createFakeSample(GstCaps*, const MediaTime& pts, const MediaTime& dts, const MediaTime& duration, const FloatSize& presentationSize, const AtomString& trackId)
+{
+ MediaSampleGStreamer* gstreamerMediaSample = new MediaSampleGStreamer(presentationSize, trackId);
+ gstreamerMediaSample->m_pts = pts;
+ gstreamerMediaSample->m_dts = dts;
+ gstreamerMediaSample->m_duration = duration;
+ gstreamerMediaSample->m_flags = MediaSample::IsNonDisplaying;
+ return adoptRef(*gstreamerMediaSample);
}
void MediaSampleGStreamer::extendToTheBeginning()
Modified: trunk/Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.h (291356 => 291357)
--- trunk/Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.h 2022-03-16 17:10:37 UTC (rev 291356)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.h 2022-03-16 17:33:02 UTC (rev 291357)
@@ -31,23 +31,15 @@
namespace WebCore {
-class PixelBuffer;
-
class MediaSampleGStreamer : public MediaSample {
public:
- static Ref<MediaSampleGStreamer> create(GRefPtr<GstSample>&& sample, const FloatSize& presentationSize, const AtomString& trackId, VideoRotation videoRotation = VideoRotation::None, bool videoMirrored = false, std::optional<VideoFrameTimeMetadata>&& metadata = std::nullopt)
+ static Ref<MediaSampleGStreamer> create(GRefPtr<GstSample>&& sample, const FloatSize& presentationSize, const AtomString& trackId)
{
- return adoptRef(*new MediaSampleGStreamer(WTFMove(sample), presentationSize, trackId, videoRotation, videoMirrored, WTFMove(metadata)));
+ return adoptRef(*new MediaSampleGStreamer(WTFMove(sample), presentationSize, trackId));
}
- static Ref<MediaSampleGStreamer> createWrappedSample(const GRefPtr<GstSample>& sample, VideoRotation videoRotation = VideoRotation::None)
- {
- return adoptRef(*new MediaSampleGStreamer(sample, videoRotation));
- }
+ static Ref<MediaSampleGStreamer> createFakeSample(GstCaps*, const MediaTime& pts, const MediaTime& dts, const MediaTime& duration, const FloatSize& presentationSize, const AtomString& trackId);
- static Ref<MediaSampleGStreamer> createFakeSample(GstCaps*, MediaTime pts, MediaTime dts, MediaTime duration, const FloatSize& presentationSize, const AtomString& trackId);
- static Ref<MediaSampleGStreamer> createImageSample(PixelBuffer&&, const IntSize& destinationSize = { }, double frameRate = 1, VideoRotation videoRotation = VideoRotation::None, bool videoMirrored = false, std::optional<VideoFrameTimeMetadata>&& metadata = std::nullopt);
-
void extendToTheBeginning();
MediaTime presentationTime() const override { return m_pts; }
MediaTime decodeTime() const override { return m_dts; }
@@ -62,20 +54,14 @@
PlatformSample platformSample() const override;
PlatformSample::Type platformSampleType() const override { return PlatformSample::GStreamerSampleType; }
void dump(PrintStream&) const override;
- RefPtr<JSC::Uint8ClampedArray> getRGBAImageData() const final;
- VideoRotation videoRotation() const override { return m_videoRotation; }
- bool videoMirrored() const override { return m_videoMirrored; }
protected:
- MediaSampleGStreamer(GRefPtr<GstSample>&&, const FloatSize& presentationSize, const AtomString& trackId, VideoRotation = VideoRotation::None, bool videoMirrored = false, std::optional<VideoFrameTimeMetadata>&& = std::nullopt);
- MediaSampleGStreamer(const GRefPtr<GstSample>&, VideoRotation = VideoRotation::None);
+ MediaSampleGStreamer(GRefPtr<GstSample>&&, const FloatSize& presentationSize, const AtomString& trackId);
virtual ~MediaSampleGStreamer() = default;
private:
MediaSampleGStreamer(const FloatSize& presentationSize, const AtomString& trackId);
- void initializeFromBuffer();
-
MediaTime m_pts;
MediaTime m_dts;
MediaTime m_duration;
@@ -84,8 +70,6 @@
GRefPtr<GstSample> m_sample;
FloatSize m_presentationSize;
MediaSample::SampleFlags m_flags { MediaSample::IsSync };
- VideoRotation m_videoRotation { VideoRotation::None };
- bool m_videoMirrored { false };
};
} // namespace WebCore.
Added: trunk/Source/WebCore/platform/graphics/gstreamer/VideoFrameGStreamer.cpp (0 => 291357)
--- trunk/Source/WebCore/platform/graphics/gstreamer/VideoFrameGStreamer.cpp (rev 0)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/VideoFrameGStreamer.cpp 2022-03-16 17:33:02 UTC (rev 291357)
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2022 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#include "config.h"
+#include "VideoFrameGStreamer.h"
+
+#include "GStreamerCommon.h"
+#include "PixelBuffer.h"
+#include <_javascript_Core/JSCInlines.h>
+#include <_javascript_Core/TypedArrayInlines.h>
+
+#if ENABLE(VIDEO) && USE(GSTREAMER)
+
+namespace WebCore {
+
+Ref<VideoFrameGStreamer> VideoFrameGStreamer::createFromPixelBuffer(PixelBuffer&& pixelBuffer, const MediaTime& presentationTime, const IntSize& destinationSize, double frameRate, VideoRotation videoRotation, bool videoMirrored, std::optional<VideoFrameTimeMetadata>&& metadata)
+{
+ ensureGStreamerInitialized();
+
+ auto size = pixelBuffer.size();
+
+ auto data = ""
+ auto sizeInBytes = data->byteLength();
+ auto dataBaseAddress = data->data();
+ auto leakedData = &data.leakRef();
+
+ auto buffer = adoptGRef(gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY, dataBaseAddress, sizeInBytes, 0, sizeInBytes, leakedData, [](gpointer userData) {
+ static_cast<JSC::Uint8ClampedArray*>(userData)->deref();
+ }));
+
+ auto width = size.width();
+ auto height = size.height();
+ gst_buffer_add_video_meta(buffer.get(), GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_FORMAT_BGRA, width, height);
+
+ if (metadata)
+ webkitGstBufferSetVideoFrameTimeMetadata(buffer.get(), *metadata);
+
+ int frameRateNumerator, frameRateDenominator;
+ gst_util_double_to_fraction(frameRate, &frameRateNumerator, &frameRateDenominator);
+
+ auto caps = adoptGRef(gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGRA", "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, frameRateNumerator, frameRateDenominator, nullptr));
+ auto sample = adoptGRef(gst_sample_new(buffer.get(), caps.get(), nullptr, nullptr));
+
+ // Optionally resize the video frame to fit destinationSize. This code path is used mostly by
+ // the mock realtime video source when the gUM constraints specifically required exact width
+ // and/or height values.
+ if (!destinationSize.isZero()) {
+ GstVideoInfo inputInfo;
+ gst_video_info_from_caps(&inputInfo, caps.get());
+
+ width = destinationSize.width();
+ height = destinationSize.height();
+ auto outputCaps = adoptGRef(gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGRA", "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, frameRateNumerator, frameRateDenominator, nullptr));
+ GstVideoInfo outputInfo;
+ gst_video_info_from_caps(&outputInfo, outputCaps.get());
+
+ auto outputBuffer = adoptGRef(gst_buffer_new_allocate(nullptr, GST_VIDEO_INFO_SIZE(&outputInfo), nullptr));
+ gst_buffer_add_video_meta(outputBuffer.get(), GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_FORMAT_BGRA, width, height);
+ GUniquePtr<GstVideoConverter> converter(gst_video_converter_new(&inputInfo, &outputInfo, nullptr));
+ GstMappedFrame inputFrame(gst_sample_get_buffer(sample.get()), inputInfo, GST_MAP_READ);
+ GstMappedFrame outputFrame(outputBuffer.get(), outputInfo, GST_MAP_WRITE);
+ gst_video_converter_frame(converter.get(), inputFrame.get(), outputFrame.get());
+ sample = adoptGRef(gst_sample_new(outputBuffer.get(), outputCaps.get(), nullptr, nullptr));
+ }
+
+ return adoptRef(*new VideoFrameGStreamer(WTFMove(sample), FloatSize(width, height), presentationTime, videoRotation, videoMirrored, WTFMove(metadata)));
+}
+
+VideoFrameGStreamer::VideoFrameGStreamer(GRefPtr<GstSample>&& sample, const FloatSize& presentationSize, const MediaTime& presentationTime, VideoRotation videoRotation, bool videoMirrored, std::optional<VideoFrameTimeMetadata>&& metadata)
+ : VideoFrame(presentationTime, videoMirrored, videoRotation)
+ , m_sample(WTFMove(sample))
+ , m_presentationSize(presentationSize)
+{
+ ASSERT(m_sample);
+ GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
+ RELEASE_ASSERT(buffer);
+
+ if (metadata)
+ buffer = webkitGstBufferSetVideoFrameTimeMetadata(buffer, WTFMove(metadata));
+}
+
+VideoFrameGStreamer::VideoFrameGStreamer(const GRefPtr<GstSample>& sample, const MediaTime& presentationTime, VideoRotation videoRotation)
+ : VideoFrame(presentationTime, false, videoRotation)
+ , m_sample(sample)
+{
+}
+
+RefPtr<JSC::Uint8ClampedArray> VideoFrameGStreamer::getRGBAImageData() const
+{
+ auto* caps = gst_sample_get_caps(m_sample.get());
+ GstVideoInfo inputInfo;
+ if (!gst_video_info_from_caps(&inputInfo, caps))
+ return nullptr;
+
+ // We could check the input format is RGBA before attempting a conversion, but it is very
+ // unlikely to pay off. The input format is likely to be BGRA (when the samples are created as a
+ // result of mediastream captureStream) or some YUV format if the sample is from a video capture
+ // device. This method is called only by internals during layout tests, it is thus not critical
+ // to optimize this code path.
+
+ auto outputCaps = adoptGRef(gst_caps_copy(caps));
+ gst_caps_set_simple(outputCaps.get(), "format", G_TYPE_STRING, "RGBA", nullptr);
+
+ GstVideoInfo outputInfo;
+ if (!gst_video_info_from_caps(&outputInfo, outputCaps.get()))
+ return nullptr;
+
+ int width = GST_VIDEO_INFO_WIDTH(&inputInfo);
+ int height = GST_VIDEO_INFO_HEIGHT(&inputInfo);
+ unsigned byteLength = GST_VIDEO_INFO_SIZE(&inputInfo);
+ auto bufferStorage = JSC::ArrayBuffer::create(width * height, 4);
+ auto outputBuffer = adoptGRef(gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_NO_SHARE, bufferStorage->data(), byteLength, 0, byteLength, nullptr, [](gpointer) { }));
+ gst_buffer_add_video_meta(outputBuffer.get(), GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_FORMAT_RGBA, width, height);
+ GstMappedFrame outputFrame(outputBuffer.get(), outputInfo, GST_MAP_WRITE);
+
+ GUniquePtr<GstVideoConverter> converter(gst_video_converter_new(&inputInfo, &outputInfo, nullptr));
+ GstMappedFrame inputFrame(gst_sample_get_buffer(m_sample.get()), inputInfo, GST_MAP_READ);
+ gst_video_converter_frame(converter.get(), inputFrame.get(), outputFrame.get());
+ return JSC::Uint8ClampedArray::tryCreate(WTFMove(bufferStorage), 0, byteLength);
+}
+
+} // namespace WebCore
+
+#endif // ENABLE(VIDEO) && USE(GSTREAMER)
Added: trunk/Source/WebCore/platform/graphics/gstreamer/VideoFrameGStreamer.h (0 => 291357)
--- trunk/Source/WebCore/platform/graphics/gstreamer/VideoFrameGStreamer.h (rev 0)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/VideoFrameGStreamer.h 2022-03-16 17:33:02 UTC (rev 291357)
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2022 Igalia S.L
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * aint with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO) && USE(GSTREAMER)
+
+#include "VideoFrame.h"
+#include "VideoFrameMetadataGStreamer.h"
+
+namespace WebCore {
+
+class PixelBuffer;
+
+class VideoFrameGStreamer final : public VideoFrame {
+public:
+ static Ref<VideoFrameGStreamer> create(GRefPtr<GstSample>&& sample, const FloatSize& presentationSize, const MediaTime& presentationTime = MediaTime::invalidTime(), VideoRotation videoRotation = VideoRotation::None, bool videoMirrored = false, std::optional<VideoFrameTimeMetadata>&& metadata = std::nullopt)
+ {
+ return adoptRef(*new VideoFrameGStreamer(WTFMove(sample), presentationSize, presentationTime, videoRotation, videoMirrored, WTFMove(metadata)));
+ }
+
+ static Ref<VideoFrameGStreamer> createWrappedSample(const GRefPtr<GstSample>& sample, const MediaTime& presentationTime, VideoRotation videoRotation = VideoRotation::None)
+ {
+ return adoptRef(*new VideoFrameGStreamer(sample, presentationTime, videoRotation));
+ }
+
+ static Ref<VideoFrameGStreamer> createFromPixelBuffer(PixelBuffer&&, const MediaTime& presentationTime = MediaTime::invalidTime(), const IntSize& destinationSize = { }, double frameRate = 1, VideoRotation videoRotation = VideoRotation::None, bool videoMirrored = false, std::optional<VideoFrameTimeMetadata>&& metadata = std::nullopt);
+
+ GstSample* sample() const { return m_sample.get(); }
+
+private:
+ VideoFrameGStreamer(GRefPtr<GstSample>&&, const FloatSize& presentationSize, const MediaTime& presentationTime = MediaTime::invalidTime(), VideoRotation = VideoRotation::None, bool videoMirrored = false, std::optional<VideoFrameTimeMetadata>&& = std::nullopt);
+ VideoFrameGStreamer(const GRefPtr<GstSample>&, const MediaTime& presentationTime, VideoRotation = VideoRotation::None);
+
+ FloatSize presentationSize() const final { return m_presentationSize; }
+ RefPtr<JSC::Uint8ClampedArray> getRGBAImageData() const final;
+
+ GRefPtr<GstSample> m_sample;
+ FloatSize m_presentationSize;
+};
+
+} // namespace WebCore
+
+#endif // ENABLE(VIDEO) && USE(GSTREAMER)
Modified: trunk/Source/WebCore/platform/graphics/texmap/GraphicsContextGLTextureMapper.cpp (291356 => 291357)
--- trunk/Source/WebCore/platform/graphics/texmap/GraphicsContextGLTextureMapper.cpp 2022-03-16 17:10:37 UTC (rev 291356)
+++ trunk/Source/WebCore/platform/graphics/texmap/GraphicsContextGLTextureMapper.cpp 2022-03-16 17:33:02 UTC (rev 291357)
@@ -63,7 +63,7 @@
#endif
#if USE(GSTREAMER) && ENABLE(MEDIA_STREAM)
-#include "MediaSampleGStreamer.h"
+#include "VideoFrameGStreamer.h"
#endif
#if ENABLE(VIDEO)
@@ -122,7 +122,7 @@
{
#if USE(GSTREAMER)
if (auto pixelBuffer = readCompositedResults())
- return MediaSampleGStreamer::createImageSample(WTFMove(*pixelBuffer));
+ return VideoFrameGStreamer::createFromPixelBuffer(WTFMove(*pixelBuffer));
#endif
return nullptr;
}
Modified: trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp (291356 => 291357)
--- trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp 2022-03-16 17:10:37 UTC (rev 291356)
+++ trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp 2022-03-16 17:33:02 UTC (rev 291357)
@@ -29,8 +29,8 @@
#include "AudioTrackPrivateMediaStream.h"
#include "GStreamerAudioData.h"
#include "GStreamerCommon.h"
-#include "MediaSampleGStreamer.h"
#include "MediaStreamPrivate.h"
+#include "VideoFrameGStreamer.h"
#include "VideoFrameMetadataGStreamer.h"
#include "VideoTrackPrivateMediaStream.h"
@@ -283,7 +283,6 @@
if (!m_configuredSize.height())
m_configuredSize.setHeight(captureSize.height());
- auto* mediaSample = static_cast<MediaSampleGStreamer*>(&sample);
auto videoRotation = sample.videoRotation();
bool videoMirrored = sample.videoMirrored();
if (m_videoRotation != videoRotation || m_videoMirrored != videoMirrored) {
@@ -296,7 +295,8 @@
gst_pad_push_event(pad.get(), gst_event_new_tag(gst_tag_list_new(GST_TAG_IMAGE_ORIENTATION, orientation.utf8().data(), nullptr)));
}
- auto* gstSample = mediaSample->platformSample().sample.gstSample;
+ auto* videoFrame = static_cast<VideoFrameGStreamer*>(&sample);
+ auto* gstSample = videoFrame->sample();
if (!m_configuredSize.isEmpty() && m_lastKnownSize != m_configuredSize) {
m_lastKnownSize = m_configuredSize;
updateBlackFrame(gst_sample_get_caps(gstSample));
Modified: trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCaptureSource.cpp (291356 => 291357)
--- trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCaptureSource.cpp 2022-03-16 17:10:37 UTC (rev 291356)
+++ trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCaptureSource.cpp 2022-03-16 17:33:02 UTC (rev 291357)
@@ -27,7 +27,6 @@
#include "DisplayCaptureManager.h"
#include "GStreamerCaptureDeviceManager.h"
-#include "MediaSampleGStreamer.h"
#include <gst/app/gstappsink.h>
@@ -191,21 +190,22 @@
m_capturer->play();
}
-void GStreamerVideoCaptureSource::processNewFrame(Ref<MediaSample>&& sample)
+void GStreamerVideoCaptureSource::processNewFrame(Ref<VideoFrameGStreamer>&& videoFrame)
{
if (!isProducingData() || muted())
return;
- dispatchMediaSampleToObservers(WTFMove(sample), { });
+ dispatchMediaSampleToObservers(WTFMove(videoFrame), { });
}
GstFlowReturn GStreamerVideoCaptureSource::newSampleCallback(GstElement* sink, GStreamerVideoCaptureSource* source)
{
auto gstSample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
- auto mediaSample = MediaSampleGStreamer::create(WTFMove(gstSample), WebCore::FloatSize(), String());
+ auto presentationTime = fromGstClockTime(GST_BUFFER_PTS(gst_sample_get_buffer(gstSample.get())));
+ auto videoFrame = VideoFrameGStreamer::create(WTFMove(gstSample), WebCore::FloatSize(), presentationTime);
- source->scheduleDeferredTask([source, sample = WTFMove(mediaSample)] () mutable {
- source->processNewFrame(WTFMove(sample));
+ source->scheduleDeferredTask([source, videoFrame = WTFMove(videoFrame)] () mutable {
+ source->processNewFrame(WTFMove(videoFrame));
});
return GST_FLOW_OK;
Modified: trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCaptureSource.h (291356 => 291357)
--- trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCaptureSource.h 2022-03-16 17:10:37 UTC (rev 291356)
+++ trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCaptureSource.h 2022-03-16 17:33:02 UTC (rev 291357)
@@ -26,6 +26,7 @@
#include "CaptureDevice.h"
#include "GStreamerVideoCapturer.h"
#include "RealtimeVideoCaptureSource.h"
+#include "VideoFrameGStreamer.h"
namespace WebCore {
@@ -42,7 +43,7 @@
const RealtimeMediaSourceSettings& settings() override;
GstElement* pipeline() { return m_capturer->pipeline(); }
GStreamerCapturer* capturer() { return m_capturer.get(); }
- void processNewFrame(Ref<MediaSample>&&);
+ void processNewFrame(Ref<VideoFrameGStreamer>&&);
// GStreamerCapturer::Observer
void sourceCapsChanged(const GstCaps*) final;
Modified: trunk/Source/WebCore/platform/mediastream/gstreamer/MockRealtimeVideoSourceGStreamer.cpp (291356 => 291357)
--- trunk/Source/WebCore/platform/mediastream/gstreamer/MockRealtimeVideoSourceGStreamer.cpp 2022-03-16 17:10:37 UTC (rev 291356)
+++ trunk/Source/WebCore/platform/mediastream/gstreamer/MockRealtimeVideoSourceGStreamer.cpp 2022-03-16 17:33:02 UTC (rev 291357)
@@ -26,9 +26,9 @@
#if ENABLE(MEDIA_STREAM) && USE(GSTREAMER)
#include "MockRealtimeVideoSourceGStreamer.h"
-#include "MediaSampleGStreamer.h"
#include "MockRealtimeMediaSourceCenter.h"
#include "PixelBuffer.h"
+#include "VideoFrameGStreamer.h"
namespace WebCore {
@@ -164,9 +164,9 @@
std::optional<VideoFrameTimeMetadata> metadata;
metadata->captureTime = MonotonicTime::now().secondsSinceEpoch();
- auto sample = MediaSampleGStreamer::createImageSample(WTFMove(*pixelBuffer), size(), frameRate(), sampleRotation(), false, WTFMove(metadata));
- sample->offsetTimestampsBy(MediaTime::createWithDouble((elapsedTime() + 100_ms).seconds()));
- dispatchMediaSampleToObservers(sample.get(), { });
+ auto presentationTime = MediaTime::createWithDouble((elapsedTime() + 100_ms).seconds());
+ auto videoFrame = VideoFrameGStreamer::createFromPixelBuffer(WTFMove(*pixelBuffer), presentationTime, size(), frameRate(), sampleRotation(), false, WTFMove(metadata));
+ dispatchMediaSampleToObservers(videoFrame.get(), { });
}
} // namespace WebCore
Modified: trunk/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.cpp (291356 => 291357)
--- trunk/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.cpp 2022-03-16 17:10:37 UTC (rev 291356)
+++ trunk/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.cpp 2022-03-16 17:33:02 UTC (rev 291357)
@@ -32,7 +32,7 @@
#include "RealtimeIncomingVideoSourceLibWebRTC.h"
#include "GStreamerVideoFrameLibWebRTC.h"
-#include "MediaSampleGStreamer.h"
+#include "VideoFrameGStreamer.h"
namespace WebCore {
@@ -58,13 +58,14 @@
if (!isProducingData())
return;
+ auto presentationTime = fromGstClockTime(frame.timestamp_us());
if (frame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNative) {
auto* framebuffer = static_cast<GStreamerVideoFrameLibWebRTC*>(frame.video_frame_buffer().get());
- videoSampleAvailable(MediaSampleGStreamer::createWrappedSample(framebuffer->getSample(), static_cast<MediaSample::VideoRotation>(frame.rotation())), { });
+ videoSampleAvailable(VideoFrameGStreamer::createWrappedSample(framebuffer->getSample(), presentationTime, static_cast<MediaSample::VideoRotation>(frame.rotation())), { });
} else {
auto gstSample = convertLibWebRTCVideoFrameToGStreamerSample(frame);
auto metadata = std::make_optional(metadataFromVideoFrame(frame));
- videoSampleAvailable(MediaSampleGStreamer::create(WTFMove(gstSample), { }, { }, static_cast<MediaSample::VideoRotation>(frame.rotation()), false, WTFMove(metadata)), { });
+ videoSampleAvailable(VideoFrameGStreamer::create(WTFMove(gstSample), { }, presentationTime, static_cast<MediaSample::VideoRotation>(frame.rotation()), false, WTFMove(metadata)), { });
}
}
Modified: trunk/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/RealtimeOutgoingVideoSourceLibWebRTC.cpp (291356 => 291357)
--- trunk/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/RealtimeOutgoingVideoSourceLibWebRTC.cpp 2022-03-16 17:10:37 UTC (rev 291356)
+++ trunk/Source/WebCore/platform/mediastream/libwebrtc/gstreamer/RealtimeOutgoingVideoSourceLibWebRTC.cpp 2022-03-16 17:33:02 UTC (rev 291357)
@@ -31,7 +31,7 @@
#include "RealtimeOutgoingVideoSourceLibWebRTC.h"
#include "GStreamerVideoFrameLibWebRTC.h"
-#include "MediaSampleGStreamer.h"
+#include "VideoFrameGStreamer.h"
namespace WebCore {
@@ -67,9 +67,8 @@
break;
}
- ASSERT(sample.platformSample().type == PlatformSample::GStreamerSampleType);
- auto& mediaSample = static_cast<MediaSampleGStreamer&>(sample);
- auto frameBuffer = GStreamerVideoFrameLibWebRTC::create(mediaSample.platformSample().sample.gstSample);
+ auto& videoFrame = static_cast<VideoFrameGStreamer&>(sample);
+ auto frameBuffer = GStreamerVideoFrameLibWebRTC::create(videoFrame.sample());
sendFrame(WTFMove(frameBuffer));
}