Diff
Modified: trunk/Source/WebCore/ChangeLog (271196 => 271197)
--- trunk/Source/WebCore/ChangeLog 2021-01-06 09:12:16 UTC (rev 271196)
+++ trunk/Source/WebCore/ChangeLog 2021-01-06 09:56:37 UTC (rev 271197)
@@ -1,3 +1,44 @@
+2021-01-06 Philippe Normand <[email protected]>
+
+ REGRESSION[r270947][GStreamer]: Deadlocks audio rendering
+ https://bugs.webkit.org/show_bug.cgi?id=220112
+
+ Reviewed by Chris Dumez.
+
+ This patch fixes the mentioned regression by making the webaudiosrc element wait on the
+ dispatch condition only if it wasn't done synchronously. Additionally several race
+ conditions are now fixed in the AudioDestination, by ensuring the start and stop completion
+ handlers are invoked only after the corresponding pipeline state has been reached. Also the
+ AudioDestination now correctly notifies its parent node of is-playing changes.
+
+ * platform/audio/gstreamer/AudioDestinationGStreamer.cpp:
+ (WebCore::AudioDestinationGStreamer::AudioDestinationGStreamer):
+ (WebCore::AudioDestinationGStreamer::~AudioDestinationGStreamer):
+ (WebCore::AudioDestinationGStreamer::handleMessage):
+ (WebCore::AudioDestinationGStreamer::start):
+ (WebCore::AudioDestinationGStreamer::startRendering):
+ (WebCore::AudioDestinationGStreamer::stop):
+ (WebCore::AudioDestinationGStreamer::stopRendering):
+ (WebCore::AudioDestinationGStreamer::notifyStartupResult):
+ (WebCore::AudioDestinationGStreamer::notifyStopResult):
+ (WebCore::AudioDestinationGStreamer::notifyIsPlaying):
+ * platform/audio/gstreamer/AudioDestinationGStreamer.h:
+ * platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp:
+ (webkit_web_audio_src_class_init):
+ (webKitWebAudioSrcConstructed):
+ (webKitWebAudioSrcSetProperty):
+ (webKitWebAudioSrcGetProperty):
+ (webKitWebAudioSrcAllocateBuffers):
+ (webKitWebAudioSrcRenderAndPushFrames):
+ (webKitWebAudioSrcRenderIteration):
+ (webKitWebAudioSrcChangeState):
+ (webkitWebAudioSourceSetDispatchToRenderThreadFunction):
+ * platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.h:
+ * platform/graphics/gstreamer/GStreamerCommon.cpp:
+ (WebCore::webkitGstSetElementStateSynchronously):
+ * platform/graphics/gstreamer/GStreamerCommon.h:
+ (WebCore::webkitGstSetElementStateSynchronously):
+
2021-01-05 Eric Carlson <[email protected]>
[Cocoa] WebM format reader doesn't work with a url in a <source> element
Modified: trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp (271196 => 271197)
--- trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp 2021-01-06 09:12:16 UTC (rev 271196)
+++ trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp 2021-01-06 09:56:37 UTC (rev 271197)
@@ -32,8 +32,10 @@
#include "WebKitWebAudioSourceGStreamer.h"
#include <gst/audio/gstaudiobasesink.h>
#include <gst/gst.h>
+#include <wtf/PrintStream.h>
#include <wtf/glib/GUniquePtr.h>
#include <wtf/glib/RunLoopSourcePriority.h>
+#include <wtf/text/StringConcatenateNumbers.h>
namespace WebCore {
@@ -120,14 +122,15 @@
, m_renderBus(AudioBus::create(numberOfOutputChannels, AudioUtilities::renderQuantumSize, false))
, m_sampleRate(sampleRate)
{
- m_pipeline = gst_pipeline_new("audio-destination");
- GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
+ static Atomic<uint32_t> pipelineId;
+ m_pipeline = gst_pipeline_new(makeString("audio-destination-", pipelineId.exchangeAdd(1)).ascii().data());
+ auto bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
ASSERT(bus);
gst_bus_add_signal_watch_full(bus.get(), RunLoopSourcePriority::RunLoopDispatcher);
g_signal_connect(bus.get(), "message", G_CALLBACK(messageCallback), this);
- m_src = reinterpret_cast<GstElement*>(g_object_new(WEBKIT_TYPE_WEB_AUDIO_SRC, "rate", sampleRate,
- "bus", m_renderBus.get(), "provider", this, "frames", AudioUtilities::renderQuantumSize, nullptr));
+ m_src = GST_ELEMENT_CAST(g_object_new(WEBKIT_TYPE_WEB_AUDIO_SRC, "rate", sampleRate,
+ "bus", m_renderBus.get(), "destination", this, "frames", AudioUtilities::renderQuantumSize, nullptr));
GRefPtr<GstElement> audioSink = createPlatformAudioSink();
m_audioSinkAvailable = audioSink;
@@ -165,12 +168,14 @@
AudioDestinationGStreamer::~AudioDestinationGStreamer()
{
- GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
+ GST_DEBUG_OBJECT(m_pipeline.get(), "Disposing");
+ auto bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
ASSERT(bus);
g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(messageCallback), this);
gst_bus_remove_signal_watch(bus.get());
gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
+ notifyStopResult(true);
}
unsigned AudioDestinationGStreamer::framesPerBuffer() const
@@ -192,7 +197,7 @@
gst_message_parse_error(message, &error.outPtr(), &debug.outPtr());
g_warning("Error: %d, %s. Debug output: %s", error->code, error->message, debug.get());
gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
- m_isPlaying = false;
+ notifyIsPlaying(false);
break;
case GST_MESSAGE_STATE_CHANGED:
if (GST_MESSAGE_SRC(message) == GST_OBJECT(m_pipeline.get())) {
@@ -200,18 +205,16 @@
gst_message_parse_state_changed(message, &oldState, &newState, &pending);
GST_INFO_OBJECT(m_pipeline.get(), "State changed (old: %s, new: %s, pending: %s)",
- gst_element_state_get_name(oldState),
- gst_element_state_get_name(newState),
- gst_element_state_get_name(pending));
+ gst_element_state_get_name(oldState), gst_element_state_get_name(newState), gst_element_state_get_name(pending));
WTF::String dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), '_',
- gst_element_state_get_name(oldState), '_',
- gst_element_state_get_name(newState));
+ gst_element_state_get_name(oldState), '_', gst_element_state_get_name(newState));
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN_CAST(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.utf8().data());
}
break;
default:
+ GST_DEBUG_OBJECT(m_pipeline.get(), "Unhandled message: %s", GST_MESSAGE_TYPE_NAME(message));
break;
}
return TRUE;
@@ -219,7 +222,7 @@
void AudioDestinationGStreamer::start(Function<void(Function<void()>&&)>&& dispatchToRenderThread, CompletionHandler<void(bool)>&& completionHandler)
{
- webkitWebAudioSourceSetDispatchToRenderThreadCallback(WEBKIT_WEB_AUDIO_SRC(m_src.get()), WTFMove(dispatchToRenderThread));
+ webkitWebAudioSourceSetDispatchToRenderThreadFunction(WEBKIT_WEB_AUDIO_SRC(m_src.get()), WTFMove(dispatchToRenderThread));
startRendering(WTFMove(completionHandler));
}
@@ -226,43 +229,84 @@
void AudioDestinationGStreamer::startRendering(CompletionHandler<void(bool)>&& completionHandler)
{
ASSERT(m_audioSinkAvailable);
- bool success = false;
- if (m_audioSinkAvailable) {
- GST_DEBUG("Starting");
- if (gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
- g_warning("Error: Failed to set pipeline to playing");
- m_isPlaying = false;
- } else {
- m_isPlaying = true;
- success = true;
- }
+ m_startupCompletionHandler = WTFMove(completionHandler);
+ GST_DEBUG_OBJECT(m_pipeline.get(), "Starting audio rendering, sink %s", m_audioSinkAvailable ? "available" : "not available");
+
+ if (m_isPlaying) {
+ notifyStartupResult(true);
+ return;
}
- callOnMainThread([completionHandler = WTFMove(completionHandler), success]() mutable {
- completionHandler(success);
- });
+ if (!m_audioSinkAvailable) {
+ notifyStartupResult(false);
+ return;
+ }
+
+ notifyStartupResult(webkitGstSetElementStateSynchronously(m_pipeline.get(), GST_STATE_PLAYING, [this](GstMessage* message) -> bool {
+ return handleMessage(message);
+ }));
}
void AudioDestinationGStreamer::stop(CompletionHandler<void(bool)>&& completionHandler)
{
stopRendering(WTFMove(completionHandler));
+ webkitWebAudioSourceSetDispatchToRenderThreadFunction(WEBKIT_WEB_AUDIO_SRC(m_src.get()), nullptr);
}
void AudioDestinationGStreamer::stopRendering(CompletionHandler<void(bool)>&& completionHandler)
{
ASSERT(m_audioSinkAvailable);
- bool success = false;
- if (m_audioSinkAvailable) {
- GST_DEBUG("Stopping");
- gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
- m_isPlaying = false;
- success = true;
+ m_stopCompletionHandler = WTFMove(completionHandler);
+ GST_DEBUG_OBJECT(m_pipeline.get(), "Stopping audio rendering, sink %s", m_audioSinkAvailable ? "available" : "not available");
+
+ if (!m_isPlaying) {
+ GST_DEBUG_OBJECT(m_pipeline.get(), "Already stopped");
+ notifyStopResult(true);
+ return;
}
- callOnMainThread([completionHandler = WTFMove(completionHandler), success]() mutable {
- completionHandler(success);
+
+ if (!m_audioSinkAvailable) {
+ notifyStopResult(false);
+ return;
+ }
+
+ notifyStopResult(webkitGstSetElementStateSynchronously(m_pipeline.get(), GST_STATE_READY, [this](GstMessage* message) -> bool {
+ return handleMessage(message);
+ }));
+}
+
+void AudioDestinationGStreamer::notifyStartupResult(bool success)
+{
+ callOnMainThreadAndWait([this, completionHandler = WTFMove(m_startupCompletionHandler), success]() mutable {
+ GST_DEBUG_OBJECT(m_pipeline.get(), "Has start completion handler: %s", boolForPrinting(!!completionHandler));
+ if (completionHandler)
+ completionHandler(success);
});
}
+void AudioDestinationGStreamer::notifyStopResult(bool success)
+{
+ if (success)
+ notifyIsPlaying(false);
+
+ callOnMainThreadAndWait([this, completionHandler = WTFMove(m_stopCompletionHandler), success]() mutable {
+ GST_DEBUG_OBJECT(m_pipeline.get(), "Has stop completion handler: %s", boolForPrinting(!!completionHandler));
+ if (completionHandler)
+ completionHandler(success);
+ });
+}
+
+void AudioDestinationGStreamer::notifyIsPlaying(bool isPlaying)
+{
+ if (m_isPlaying == isPlaying)
+ return;
+
+ GST_DEBUG("Is playing: %s", boolForPrinting(isPlaying));
+ m_isPlaying = isPlaying;
+ if (m_callback)
+ m_callback->isPlayingDidChange();
+}
+
} // namespace WebCore
#endif // ENABLE(WEB_AUDIO)
Modified: trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.h (271196 => 271197)
--- trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.h 2021-01-06 09:12:16 UTC (rev 271196)
+++ trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.h 2021-01-06 09:56:37 UTC (rev 271197)
@@ -21,12 +21,9 @@
#include "AudioBus.h"
#include "AudioDestination.h"
#include "GRefPtrGStreamer.h"
+#include <wtf/Condition.h>
#include <wtf/Forward.h>
-typedef struct _GstElement GstElement;
-typedef struct _GstPad GstPad;
-typedef struct _GstMessage GstMessage;
-
namespace WebCore {
class AudioDestinationGStreamer : public AudioDestination {
@@ -42,6 +39,7 @@
unsigned framesPerBuffer() const final;
gboolean handleMessage(GstMessage*);
+ void notifyIsPlaying(bool);
protected:
virtual void startRendering(CompletionHandler<void(bool)>&&);
@@ -48,6 +46,9 @@
virtual void stopRendering(CompletionHandler<void(bool)>&&);
private:
+ void notifyStartupResult(bool);
+ void notifyStopResult(bool);
+
RefPtr<AudioBus> m_renderBus;
float m_sampleRate;
@@ -55,6 +56,10 @@
bool m_audioSinkAvailable { false };
GRefPtr<GstElement> m_pipeline;
GRefPtr<GstElement> m_src;
+ CompletionHandler<void(bool)> m_startupCompletionHandler;
+ CompletionHandler<void(bool)> m_stopCompletionHandler;
+ Lock m_setStateLock;
+ Condition m_setStateCondition;
};
} // namespace WebCore
Modified: trunk/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp (271196 => 271197)
--- trunk/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp 2021-01-06 09:12:16 UTC (rev 271196)
+++ trunk/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp 2021-01-06 09:56:37 UTC (rev 271197)
@@ -24,7 +24,7 @@
#if ENABLE(WEB_AUDIO) && USE(GSTREAMER)
#include "AudioBus.h"
-#include "AudioDestination.h"
+#include "AudioDestinationGStreamer.h"
#include "AudioIOCallback.h"
#include "AudioUtilities.h"
#include "GStreamerCommon.h"
@@ -59,7 +59,7 @@
struct _WebKitWebAudioSrcPrivate {
gfloat sampleRate;
AudioBus* bus;
- AudioDestination* provider;
+ AudioDestinationGStreamer* destination;
guint framesToPull;
guint bufferSize;
@@ -79,9 +79,13 @@
GRefPtr<GstBufferPool> pool;
bool enableGapBufferSupport;
+ bool hasRenderedAudibleFrame { false };
- Optional<Function<void(Function<void()>&&)>> dispatchToRenderThreadCallback;
- Lock dispatchMutex;
+ Lock dispatchToRenderThreadLock;
+ Function<void(Function<void()>&&)> dispatchToRenderThreadFunction;
+
+ bool dispatchDone;
+ Lock dispatchLock;
Condition dispatchCondition;
_WebKitWebAudioSrcPrivate()
@@ -105,7 +109,7 @@
enum {
PROP_RATE = 1,
PROP_BUS,
- PROP_PROVIDER,
+ PROP_DESTINATION,
PROP_FRAMES
};
@@ -116,7 +120,7 @@
static void webKitWebAudioSrcSetProperty(GObject*, guint propertyId, const GValue*, GParamSpec*);
static void webKitWebAudioSrcGetProperty(GObject*, guint propertyId, GValue*, GParamSpec*);
static GstStateChangeReturn webKitWebAudioSrcChangeState(GstElement*, GstStateChange);
-static void webKitWebAudioSrcLoop(WebKitWebAudioSrc*);
+static void webKitWebAudioSrcRenderIteration(WebKitWebAudioSrc*);
static GstCaps* getGStreamerMonoAudioCaps(float sampleRate)
{
@@ -185,10 +189,8 @@
g_param_spec_pointer("bus", "bus",
"Bus", flags));
- g_object_class_install_property(objectClass,
- PROP_PROVIDER,
- g_param_spec_pointer("provider", "provider",
- "Provider", flags));
+ g_object_class_install_property(objectClass, PROP_DESTINATION, g_param_spec_pointer("destination", "destination",
+ "Destination", flags));
g_object_class_install_property(objectClass,
PROP_FRAMES,
@@ -203,12 +205,12 @@
WebKitWebAudioSrcPrivate* priv = src->priv;
ASSERT(priv->bus);
- ASSERT(priv->provider);
+ ASSERT(priv->destination);
ASSERT(priv->sampleRate);
gst_element_add_pad(GST_ELEMENT(src), priv->sourcePad);
- priv->task = adoptGRef(gst_task_new(reinterpret_cast<GstTaskFunction>(webKitWebAudioSrcLoop), src, nullptr));
+ priv->task = adoptGRef(gst_task_new(reinterpret_cast<GstTaskFunction>(webKitWebAudioSrcRenderIteration), src, nullptr));
gst_task_set_lock(priv->task.get(), &priv->mutex);
priv->interleave = gst_element_factory_make("audiointerleave", nullptr);
@@ -260,8 +262,8 @@
case PROP_BUS:
priv->bus = static_cast<AudioBus*>(g_value_get_pointer(value));
break;
- case PROP_PROVIDER:
- priv->provider = static_cast<AudioDestination*>(g_value_get_pointer(value));
+ case PROP_DESTINATION:
+ priv->destination = static_cast<AudioDestinationGStreamer*>(g_value_get_pointer(value));
break;
case PROP_FRAMES:
priv->framesToPull = g_value_get_uint(value);
@@ -285,8 +287,8 @@
case PROP_BUS:
g_value_set_pointer(value, priv->bus);
break;
- case PROP_PROVIDER:
- g_value_set_pointer(value, priv->provider);
+ case PROP_DESTINATION:
+ g_value_set_pointer(value, priv->destination);
break;
case PROP_FRAMES:
g_value_set_uint(value, priv->framesToPull);
@@ -297,22 +299,19 @@
}
}
-static Optional<Vector<GRefPtr<GstBuffer>>> webKitWebAudioSrcAllocateBuffersAndRenderAudio(WebKitWebAudioSrc* src)
+static Optional<Vector<GRefPtr<GstBuffer>>> webKitWebAudioSrcAllocateBuffers(WebKitWebAudioSrc* src)
{
WebKitWebAudioSrcPrivate* priv = src->priv;
ASSERT(priv->bus);
- ASSERT(priv->provider);
- if (!priv->provider || !priv->bus) {
- GST_ELEMENT_ERROR(src, CORE, FAILED, ("Internal WebAudioSrc error"), ("Can't start without provider or bus"));
+ ASSERT(priv->destination);
+ if (!priv->destination || !priv->bus) {
+ GST_ELEMENT_ERROR(src, CORE, FAILED, ("Internal WebAudioSrc error"), ("Can't start without destination or bus"));
gst_task_stop(src->priv->task.get());
return WTF::nullopt;
}
ASSERT(priv->pool);
- GstClockTime timestamp = gst_util_uint64_scale(priv->numberOfSamples, GST_SECOND, priv->sampleRate);
- priv->numberOfSamples += priv->framesToPull;
- GstClockTime duration = gst_util_uint64_scale(priv->numberOfSamples, GST_SECOND, priv->sampleRate) - timestamp;
Vector<GRefPtr<GstBuffer>> channelBufferList;
channelBufferList.reserveInitialCapacity(priv->sources.size());
@@ -329,8 +328,6 @@
}
ASSERT(buffer);
- GST_BUFFER_TIMESTAMP(buffer.get()) = timestamp;
- GST_BUFFER_DURATION(buffer.get()) = duration;
GstMappedBuffer mappedBuffer(buffer.get(), GST_MAP_READWRITE);
ASSERT(mappedBuffer);
mappedBuffers.uncheckedAppend(WTFMove(mappedBuffer));
@@ -338,9 +335,22 @@
channelBufferList.uncheckedAppend(WTFMove(buffer));
}
+ return makeOptional(channelBufferList);
+}
+
+static void webKitWebAudioSrcRenderAndPushFrames(GRefPtr<GstElement>&& element, Vector<GRefPtr<GstBuffer>>&& channelBufferList)
+{
+ auto* src = ""
+ auto* priv = src->priv;
+
+ ASSERT(channelBufferList.size() == priv->sources.size());
+
+ GstClockTime timestamp = gst_util_uint64_scale(priv->numberOfSamples, GST_SECOND, priv->sampleRate);
+ priv->numberOfSamples += priv->framesToPull;
+ GstClockTime duration = gst_util_uint64_scale(priv->numberOfSamples, GST_SECOND, priv->sampleRate) - timestamp;
+
AudioIOPosition outputTimestamp;
- auto clock = adoptGRef(gst_element_get_clock(GST_ELEMENT_CAST(src)));
- if (clock) {
+ if (auto clock = adoptGRef(gst_element_get_clock(element.get()))) {
auto clockTime = gst_clock_get_time(clock.get());
outputTimestamp.position = Seconds::fromNanoseconds(timestamp);
outputTimestamp.timestamp = MonotonicTime::fromRawSeconds(static_cast<double>((g_get_monotonic_time() + GST_TIME_AS_USECONDS(clockTime)) / 1000000.0));
@@ -347,36 +357,18 @@
}
// FIXME: Add support for local/live audio input.
+ priv->destination->callRenderCallback(nullptr, priv->bus, priv->framesToPull, outputTimestamp);
- if (src->priv->dispatchToRenderThreadCallback.hasValue()) {
- LockHolder holder(priv->dispatchMutex);
- (*priv->dispatchToRenderThreadCallback)([src, outputTimestamp]() mutable {
- auto* priv = src->priv;
- priv->provider->callRenderCallback(nullptr, priv->bus, priv->framesToPull, outputTimestamp);
- priv->dispatchCondition.notifyOne();
- });
- priv->dispatchCondition.wait(priv->dispatchMutex);
- } else
- priv->provider->callRenderCallback(nullptr, priv->bus, priv->framesToPull, outputTimestamp);
-
- return makeOptional(channelBufferList);
-}
-
-static void webKitWebAudioSrcLoop(WebKitWebAudioSrc* src)
-{
- WebKitWebAudioSrcPrivate* priv = src->priv;
-
- Optional<Vector<GRefPtr<GstBuffer>>> channelBufferList = webKitWebAudioSrcAllocateBuffersAndRenderAudio(src);
- if (!channelBufferList) {
- gst_task_stop(src->priv->task.get());
- return;
+ if (!priv->hasRenderedAudibleFrame && !priv->bus->isSilent()) {
+ priv->destination->notifyIsPlaying(true);
+ priv->hasRenderedAudibleFrame = true;
}
- ASSERT(channelBufferList->size() == priv->sources.size());
-
bool failed = false;
for (unsigned i = 0; i < priv->sources.size(); ++i) {
- auto& buffer = channelBufferList.value()[i];
+ auto& buffer = channelBufferList[i];
+ GST_BUFFER_TIMESTAMP(buffer.get()) = outputTimestamp.position.nanoseconds();
+ GST_BUFFER_DURATION(buffer.get()) = duration;
if (priv->enableGapBufferSupport && priv->bus->channel(i)->isSilent())
GST_BUFFER_FLAG_SET(buffer.get(), GST_BUFFER_FLAG_GAP);
@@ -391,25 +383,65 @@
// FLUSHING and EOS are not errors.
if (ret < GST_FLOW_EOS || ret == GST_FLOW_NOT_LINKED)
GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to push buffer on %s flow: %s", GST_OBJECT_NAME(appsrc.get()), gst_flow_get_name(ret)));
- gst_task_stop(src->priv->task.get());
+ gst_task_stop(priv->task.get());
failed = true;
}
}
+
+ {
+ LockHolder lock(priv->dispatchLock);
+ priv->dispatchDone = true;
+ priv->dispatchCondition.notifyOne();
+ }
}
+static void webKitWebAudioSrcRenderIteration(WebKitWebAudioSrc* src)
+{
+ auto* priv = src->priv;
+ auto channelBufferList = webKitWebAudioSrcAllocateBuffers(src);
+ if (!channelBufferList) {
+ gst_task_stop(priv->task.get());
+ return;
+ }
+
+ {
+ LockHolder lock(priv->dispatchLock);
+ priv->dispatchDone = false;
+ }
+
+ auto locker = tryHoldLock(priv->dispatchToRenderThreadLock);
+ if (!locker || !priv->dispatchToRenderThreadFunction)
+ return;
+
+ priv->dispatchToRenderThreadFunction([channels = WTFMove(*channelBufferList), protectedThis = GRefPtr<GstElement>(GST_ELEMENT_CAST(src))]() mutable {
+ webKitWebAudioSrcRenderAndPushFrames(WTFMove(protectedThis), WTFMove(channels));
+ });
+
+ {
+ LockHolder lock(priv->dispatchLock);
+ if (!priv->dispatchDone)
+ priv->dispatchCondition.wait(priv->dispatchLock);
+ }
+}
+
static GstStateChangeReturn webKitWebAudioSrcChangeState(GstElement* element, GstStateChange transition)
{
GstStateChangeReturn returnValue = GST_STATE_CHANGE_SUCCESS;
- WebKitWebAudioSrc* src = ""
+ auto* src = ""
+ auto* priv = src->priv;
+#if GST_CHECK_VERSION(1, 14, 0)
+ GST_DEBUG_OBJECT(element, "%s", gst_state_change_get_name(transition));
+#endif
+
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
- if (!src->priv->interleave) {
+ if (!priv->interleave) {
gst_element_post_message(element, gst_missing_element_message_new(element, "audiointerleave"));
GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (nullptr), ("no audiointerleave"));
return GST_STATE_CHANGE_FAILURE;
}
- src->priv->numberOfSamples = 0;
+ priv->numberOfSamples = 0;
break;
default:
break;
@@ -423,26 +455,28 @@
switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED: {
- GST_DEBUG_OBJECT(src, "READY->PAUSED");
-
- src->priv->pool = gst_buffer_pool_new();
- GstStructure* config = gst_buffer_pool_get_config(src->priv->pool.get());
- gst_buffer_pool_config_set_params(config, nullptr, src->priv->bufferSize, 0, 0);
- gst_buffer_pool_set_config(src->priv->pool.get(), config);
- if (!gst_buffer_pool_set_active(src->priv->pool.get(), TRUE))
+ priv->pool = gst_buffer_pool_new();
+ GstStructure* config = gst_buffer_pool_get_config(priv->pool.get());
+ gst_buffer_pool_config_set_params(config, nullptr, priv->bufferSize, 0, 0);
+ gst_buffer_pool_set_config(priv->pool.get(), config);
+ if (!gst_buffer_pool_set_active(priv->pool.get(), TRUE))
returnValue = GST_STATE_CHANGE_FAILURE;
- else if (!gst_task_start(src->priv->task.get()))
+ else if (!gst_task_start(priv->task.get()))
returnValue = GST_STATE_CHANGE_FAILURE;
break;
}
case GST_STATE_CHANGE_PAUSED_TO_READY:
- GST_DEBUG_OBJECT(src, "PAUSED->READY");
+ {
+ LockHolder lock(priv->dispatchLock);
+ priv->dispatchDone = false;
+ priv->dispatchCondition.notifyAll();
+ }
+ gst_buffer_pool_set_flushing(priv->pool.get(), TRUE);
+ if (!gst_task_join(priv->task.get()))
+ returnValue = GST_STATE_CHANGE_FAILURE;
- gst_buffer_pool_set_flushing(src->priv->pool.get(), TRUE);
- if (!gst_task_join(src->priv->task.get()))
- returnValue = GST_STATE_CHANGE_FAILURE;
- gst_buffer_pool_set_active(src->priv->pool.get(), FALSE);
- src->priv->pool = nullptr;
+ gst_buffer_pool_set_active(priv->pool.get(), FALSE);
+ priv->pool = nullptr;
break;
default:
break;
@@ -451,10 +485,10 @@
return returnValue;
}
-void webkitWebAudioSourceSetDispatchToRenderThreadCallback(WebKitWebAudioSrc* src, Function<void(Function<void()>&&)>&& function)
+void webkitWebAudioSourceSetDispatchToRenderThreadFunction(WebKitWebAudioSrc* src, Function<void(Function<void()>&&)>&& function)
{
- ASSERT(function);
- src->priv->dispatchToRenderThreadCallback = WTFMove(function);
+ auto locker = holdLock(src->priv->dispatchToRenderThreadLock);
+ src->priv->dispatchToRenderThreadFunction = WTFMove(function);
}
#endif // ENABLE(WEB_AUDIO) && USE(GSTREAMER)
Modified: trunk/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.h (271196 => 271197)
--- trunk/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.h 2021-01-06 09:12:16 UTC (rev 271196)
+++ trunk/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.h 2021-01-06 09:56:37 UTC (rev 271197)
@@ -30,7 +30,7 @@
GType webkit_web_audio_src_get_type();
-void webkitWebAudioSourceSetDispatchToRenderThreadCallback(WebKitWebAudioSrc*, Function<void(Function<void()>&&)>&&);
+void webkitWebAudioSourceSetDispatchToRenderThreadFunction(WebKitWebAudioSrc*, Function<void(Function<void()>&&)>&&);
#endif // USE(GSTREAMER)
Modified: trunk/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp (271196 => 271197)
--- trunk/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp 2021-01-06 09:12:16 UTC (rev 271196)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp 2021-01-06 09:56:37 UTC (rev 271197)
@@ -32,6 +32,7 @@
#include <gst/audio/audio-info.h>
#include <gst/gst.h>
#include <mutex>
+#include <wtf/Scope.h>
#include <wtf/glib/GLibUtilities.h>
#include <wtf/glib/GUniquePtr.h>
#include <wtf/glib/RunLoopSourcePriority.h>
@@ -454,6 +455,47 @@
return audioSink;
}
+bool webkitGstSetElementStateSynchronously(GstElement* pipeline, GstState targetState, Function<bool(GstMessage*)>&& messageHandler)
+{
+ GST_DEBUG_OBJECT(pipeline, "Setting state to %s", gst_element_state_get_name(targetState));
+
+ GstState currentState;
+ auto result = gst_element_get_state(pipeline, ¤tState, nullptr, 10);
+ if (result == GST_STATE_CHANGE_SUCCESS && currentState >= targetState) {
+ GST_DEBUG_OBJECT(pipeline, "Target state already reached");
+ return true;
+ }
+
+ auto bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(pipeline)));
+ gst_bus_enable_sync_message_emission(bus.get());
+
+ auto cleanup = makeScopeExit([bus = GRefPtr<GstBus>(bus), pipeline, targetState] {
+ gst_bus_disable_sync_message_emission(bus.get());
+ GstState currentState;
+ auto result = gst_element_get_state(pipeline, ¤tState, nullptr, 0);
+ GST_DEBUG_OBJECT(pipeline, "Task finished, result: %s, target state reached: %s", gst_element_state_change_return_get_name(result), boolForPrinting(currentState == targetState));
+ });
+
+ result = gst_element_set_state(pipeline, targetState);
+ if (result == GST_STATE_CHANGE_FAILURE)
+ return false;
+
+ if (result == GST_STATE_CHANGE_ASYNC) {
+ while (auto message = adoptGRef(gst_bus_timed_pop_filtered(bus.get(), GST_CLOCK_TIME_NONE, GST_MESSAGE_STATE_CHANGED))) {
+ if (!messageHandler(message.get()))
+ return false;
+
+ result = gst_element_get_state(pipeline, ¤tState, nullptr, 10);
+ if (result == GST_STATE_CHANGE_FAILURE)
+ return false;
+
+ if (currentState == targetState)
+ return true;
+ }
+ }
+ return true;
}
+}
+
#endif // USE(GSTREAMER)
Modified: trunk/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h (271196 => 271197)
--- trunk/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h 2021-01-06 09:12:16 UTC (rev 271196)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h 2021-01-06 09:56:37 UTC (rev 271197)
@@ -294,6 +294,10 @@
GstElement* createPlatformAudioSink();
+bool webkitGstSetElementStateSynchronously(GstElement*, GstState, Function<bool(GstMessage*)>&& = [](GstMessage*) -> bool {
+ return true;
+});
+
}
#ifndef GST_BUFFER_DTS_OR_PTS