Modified: trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp (276196 => 276197)
--- trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp 2021-04-17 09:03:51 UTC (rev 276196)
+++ trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp 2021-04-17 11:23:09 UTC (rev 276197)
@@ -26,13 +26,12 @@
#if ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(GSTREAMER)
-#include "AudioTrackPrivate.h"
+#include "AudioTrackPrivateMediaStream.h"
#include "GStreamerAudioData.h"
#include "GStreamerCommon.h"
#include "MediaSampleGStreamer.h"
#include "MediaStreamPrivate.h"
-#include "MediaStreamTrackPrivate.h"
-#include "VideoTrackPrivate.h"
+#include "VideoTrackPrivateMediaStream.h"
#include <gst/app/gstappsrc.h>
#include <wtf/UUID.h>
@@ -40,24 +39,16 @@
using namespace WebCore;
-static void webkitMediaStreamSrcPushVideoSample(WebKitMediaStreamSrc*, GstSample*);
-static void webkitMediaStreamSrcPushAudioSample(WebKitMediaStreamSrc*, const GRefPtr<GstSample>&);
-static void webkitMediaStreamSrcTrackEnded(WebKitMediaStreamSrc*, MediaStreamTrackPrivate&);
-static void webkitMediaStreamSrcRemoveTrackByType(WebKitMediaStreamSrc*, RealtimeMediaSource::Type);
-static void webkitMediaStreamSrcRemoveTrackObserver(WebKitMediaStreamSrc*, MediaStreamTrackPrivate&);
-static void webkitMediaStreamSrcPostStreamCollection(WebKitMediaStreamSrc*);
-static void webkitMediaStreamSrcRemovePad(WebKitMediaStreamSrc*, const char* padName);
-
-static GstStaticPadTemplate videoSrcTemplate = GST_STATIC_PAD_TEMPLATE("video_src", GST_PAD_SRC, GST_PAD_SOMETIMES,
+static GstStaticPadTemplate videoSrcTemplate = GST_STATIC_PAD_TEMPLATE("video_src%u", GST_PAD_SRC, GST_PAD_SOMETIMES,
GST_STATIC_CAPS("video/x-raw;video/x-h264;video/x-vp8"));
-static GstStaticPadTemplate audioSrcTemplate = GST_STATIC_PAD_TEMPLATE("audio_src", GST_PAD_SRC, GST_PAD_SOMETIMES,
+static GstStaticPadTemplate audioSrcTemplate = GST_STATIC_PAD_TEMPLATE("audio_src%u", GST_PAD_SRC, GST_PAD_SOMETIMES,
GST_STATIC_CAPS("audio/x-raw(ANY);"));
GST_DEBUG_CATEGORY_STATIC(webkitMediaStreamSrcDebug);
#define GST_CAT_DEFAULT webkitMediaStreamSrcDebug
-GRefPtr<GstTagList> mediaStreamTrackPrivateGetTags(MediaStreamTrackPrivate* track)
+GRefPtr<GstTagList> mediaStreamTrackPrivateGetTags(const MediaStreamTrackPrivate* track)
{
auto tagList = adoptGRef(gst_tag_list_new_empty());
@@ -100,55 +91,6 @@
return stream;
}
-class WebKitMediaStreamTrackObserver
- : public MediaStreamTrackPrivate::Observer
- , public RealtimeMediaSource::AudioSampleObserver
- , public RealtimeMediaSource::VideoSampleObserver {
- WTF_MAKE_FAST_ALLOCATED;
-public:
- virtual ~WebKitMediaStreamTrackObserver() { };
- WebKitMediaStreamTrackObserver(GstElement* src)
- : m_src(src) { }
- void trackStarted(MediaStreamTrackPrivate&) final { };
-
- void trackEnded(MediaStreamTrackPrivate& track) final
- {
- if (m_src)
- webkitMediaStreamSrcTrackEnded(WEBKIT_MEDIA_STREAM_SRC(m_src), track);
- }
-
- void trackEnabledChanged(MediaStreamTrackPrivate& track) final
- {
- m_enabled = track.enabled();
- }
-
- void trackMutedChanged(MediaStreamTrackPrivate&) final { };
- void trackSettingsChanged(MediaStreamTrackPrivate&) final { };
- void readyStateChanged(MediaStreamTrackPrivate&) final { };
-
- void videoSampleAvailable(MediaSample& sample) final
- {
- if (!m_enabled || !m_src)
- return;
-
- auto* gstSample = static_cast<MediaSampleGStreamer*>(&sample)->platformSample().sample.gstSample;
- webkitMediaStreamSrcPushVideoSample(WEBKIT_MEDIA_STREAM_SRC(m_src), gstSample);
- }
-
- void audioSamplesAvailable(const MediaTime&, const PlatformAudioData& audioData, const AudioStreamDescription&, size_t) final
- {
- if (!m_enabled || !m_src)
- return;
-
- auto data = "" GStreamerAudioData&>(audioData);
- webkitMediaStreamSrcPushAudioSample(WEBKIT_MEDIA_STREAM_SRC(m_src), data.getSample());
- }
-
-private:
- GstElement* m_src { nullptr };
- bool m_enabled { true };
-};
-
class WebKitMediaStreamObserver : public MediaStreamPrivate::Observer {
WTF_MAKE_FAST_ALLOCATED;
public:
@@ -166,28 +108,31 @@
void didAddTrack(MediaStreamTrackPrivate& track) final
{
if (m_src)
- webkitMediaStreamSrcAddTrack(WEBKIT_MEDIA_STREAM_SRC(m_src), &track, false);
+ webkitMediaStreamSrcAddTrack(WEBKIT_MEDIA_STREAM_SRC_CAST(m_src), &track, false);
}
- void didRemoveTrack(MediaStreamTrackPrivate& track) final
- {
- if (!m_src)
- return;
+ void didRemoveTrack(MediaStreamTrackPrivate&) final;
- webkitMediaStreamSrcRemoveTrackObserver(WEBKIT_MEDIA_STREAM_SRC(m_src), track);
- webkitMediaStreamSrcRemoveTrackByType(WEBKIT_MEDIA_STREAM_SRC(m_src), track.type());
- }
-
private:
GstElement* m_src;
};
-class InternalSource {
+class InternalSource final : public MediaStreamTrackPrivate::Observer,
+ public RealtimeMediaSource::AudioSampleObserver,
+ public RealtimeMediaSource::VideoSampleObserver {
WTF_MAKE_FAST_ALLOCATED;
public:
- InternalSource(bool isCaptureTrack, RealtimeMediaSource::Type type)
- : m_type(type)
+ InternalSource(GstElement* parent, MediaStreamTrackPrivate& track, const String& padName)
+ : m_parent(parent)
+ , m_track(track)
+ , m_padName(padName)
{
+ if (track.type() == RealtimeMediaSource::Type::Audio)
+ m_audioTrack = AudioTrackPrivateMediaStream::create(track);
+ else if (track.type() == RealtimeMediaSource::Type::Video)
+ m_videoTrack = VideoTrackPrivateMediaStream::create(track);
+
+ bool isCaptureTrack = track.isCaptureTrack();
m_src = gst_element_factory_make("appsrc", nullptr);
RELEASE_ASSERT_WITH_MESSAGE(GST_IS_APP_SRC(m_src.get()), "GStreamer appsrc element not found. Please make sure to install gst-plugins-base");
@@ -201,25 +146,66 @@
}), this);
}
- ~InternalSource()
+ virtual ~InternalSource()
{
- g_signal_handlers_disconnect_matched(m_src.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
+ stopObserving();
- if (auto parent = adoptGRef(gst_object_get_parent(GST_OBJECT_CAST(m_src.get())))) {
- GST_STATE_LOCK(GST_ELEMENT_CAST(parent.get()));
- gst_element_set_locked_state(m_src.get(), true);
- gst_element_set_state(m_src.get(), GST_STATE_NULL);
- gst_bin_remove(GST_BIN_CAST(parent.get()), m_src.get());
- gst_element_set_locked_state(m_src.get(), false);
- GST_STATE_UNLOCK(GST_ELEMENT_CAST(parent.get()));
+ if (m_src)
+ g_signal_handlers_disconnect_matched(m_src.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
+ }
- const char* padName = m_type == RealtimeMediaSource::Type::Video ? "video_src" : "audio_src";
- webkitMediaStreamSrcRemovePad(WEBKIT_MEDIA_STREAM_SRC(parent.get()), padName);
+ const MediaStreamTrackPrivate& track() const { return m_track; }
+ const String& padName() const { return m_padName; }
+ GstElement* get() const { return m_src.get(); }
+
+ void startObserving()
+ {
+ if (m_isObserving)
+ return;
+
+ m_track.addObserver(*this);
+ switch (m_track.type()) {
+ case RealtimeMediaSource::Type::Audio:
+ m_track.source().addAudioSampleObserver(*this);
+ break;
+ case RealtimeMediaSource::Type::Video:
+ m_track.source().addVideoSampleObserver(*this);
+ break;
+ case RealtimeMediaSource::Type::None:
+ ASSERT_NOT_REACHED();
}
+ m_isObserving = true;
}
- GstElement* get() const { return m_src.get(); }
+ void stopObserving()
+ {
+ if (!m_isObserving)
+ return;
+ m_isObserving = false;
+ switch (m_track.type()) {
+ case RealtimeMediaSource::Type::Audio:
+ m_track.source().removeAudioSampleObserver(*this);
+ break;
+ case RealtimeMediaSource::Type::Video:
+ m_track.source().removeVideoSampleObserver(*this);
+ break;
+ case RealtimeMediaSource::Type::None:
+ ASSERT_NOT_REACHED();
+ }
+ m_track.removeObserver(*this);
+ }
+
+ void configureAudioTrack(float volume, bool isMuted, bool isPlaying)
+ {
+ ASSERT(m_track.type() == RealtimeMediaSource::Type::Audio);
+ m_audioTrack->setVolume(volume);
+ m_audioTrack->setMuted(isMuted);
+ m_audioTrack->setEnabled(m_audioTrack->streamTrack().enabled() && !m_audioTrack->streamTrack().muted());
+ if (isPlaying)
+ m_audioTrack->play();
+ }
+
void pushSample(GstSample* sample)
{
ASSERT(m_src);
@@ -235,12 +221,12 @@
gst_pad_set_offset(pad.get(), -m_firstBufferPts);
}
- if (m_type == RealtimeMediaSource::Type::Video && drop)
+ if (m_track.type() == RealtimeMediaSource::Type::Video && drop)
drop = doCapsHaveType(caps, "video/x-raw") || GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
if (drop) {
m_needsDiscont = true;
- GST_INFO_OBJECT(m_src.get(), "%s queue full already... not pushing", m_type == RealtimeMediaSource::Type::Video ? "Video" : "Audio");
+ GST_INFO_OBJECT(m_src.get(), "%s queue full already... not pushing", m_track.type() == RealtimeMediaSource::Type::Video ? "Video" : "Audio");
return;
}
@@ -252,24 +238,61 @@
gst_app_src_push_sample(GST_APP_SRC(m_src.get()), sample);
}
+ void trackStarted(MediaStreamTrackPrivate&) final { };
+ void trackEnded(MediaStreamTrackPrivate&) final;
+
+ void trackEnabledChanged(MediaStreamTrackPrivate& track) final
+ {
+ m_enabled = track.enabled();
+ }
+
+ void trackMutedChanged(MediaStreamTrackPrivate&) final { };
+ void trackSettingsChanged(MediaStreamTrackPrivate&) final { };
+ void readyStateChanged(MediaStreamTrackPrivate&) final { };
+
+ void videoSampleAvailable(MediaSample& sample) final
+ {
+ if (!m_enabled || !m_parent)
+ return;
+
+ auto* gstSample = static_cast<MediaSampleGStreamer*>(&sample)->platformSample().sample.gstSample;
+ pushSample(gstSample);
+ }
+
+ void audioSamplesAvailable(const MediaTime&, const PlatformAudioData& audioData, const AudioStreamDescription&, size_t) final
+ {
+ if (!m_enabled || !m_parent)
+ return;
+
+ const auto& data = "" GStreamerAudioData&>(audioData);
+ auto sample = data.getSample();
+ pushSample(sample.get());
+ }
+
private:
- RealtimeMediaSource::Type m_type { RealtimeMediaSource::Type::None };
+ GstElement* m_parent { nullptr };
+ MediaStreamTrackPrivate& m_track;
GRefPtr<GstElement> m_src;
+ bool m_enabled { true };
GstClockTime m_firstBufferPts { GST_CLOCK_TIME_NONE };
bool m_enoughData { false };
bool m_needsDiscont { false };
+ String m_padName;
+ bool m_isObserving { false };
+ RefPtr<AudioTrackPrivateMediaStream> m_audioTrack;
+ RefPtr<VideoTrackPrivateMediaStream> m_videoTrack;
};
struct _WebKitMediaStreamSrcPrivate {
CString uri;
- Optional<InternalSource> audioSrc;
- Optional<InternalSource> videoSrc;
- std::unique_ptr<WebKitMediaStreamTrackObserver> mediaStreamTrackObserver;
+ Vector<std::unique_ptr<InternalSource>> sources;
std::unique_ptr<WebKitMediaStreamObserver> mediaStreamObserver;
RefPtr<MediaStreamPrivate> stream;
Vector<RefPtr<MediaStreamTrackPrivate>> tracks;
GUniquePtr<GstFlowCombiner> flowCombiner;
GRefPtr<GstStreamCollection> streamCollection;
+ Atomic<unsigned> audioPadCounter;
+ Atomic<unsigned> videoPadCounter;
};
enum {
@@ -278,6 +301,17 @@
PROP_LAST
};
+void WebKitMediaStreamObserver::didRemoveTrack(MediaStreamTrackPrivate& track)
+{
+ if (!m_src)
+ return;
+
+ auto* element = WEBKIT_MEDIA_STREAM_SRC_CAST(m_src);
+ element->priv->sources.removeFirstMatching([&](auto& item) {
+ return item->track().id() == track.id();
+ });
+}
+
static GstURIType webkitMediaStreamSrcUriGetType(GType)
{
return GST_URI_SRC;
@@ -291,13 +325,13 @@
static char* webkitMediaStreamSrcUriGetUri(GstURIHandler* handler)
{
- WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(handler);
+ WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC_CAST(handler);
return g_strdup(self->priv->uri.data());
}
static gboolean webkitMediaStreamSrcUriSetUri(GstURIHandler* handler, const char* uri, GError**)
{
- WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(handler);
+ WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC_CAST(handler);
self->priv->uri = CString(uri);
return TRUE;
}
@@ -345,10 +379,9 @@
static void webkitMediaStreamSrcConstructed(GObject* object)
{
GST_CALL_PARENT(G_OBJECT_CLASS, constructed, (object));
- WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(object);
+ WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC_CAST(object);
auto* priv = self->priv;
- priv->mediaStreamTrackObserver = makeUnique<WebKitMediaStreamTrackObserver>(GST_ELEMENT_CAST(self));
priv->mediaStreamObserver = makeUnique<WebKitMediaStreamObserver>(GST_ELEMENT_CAST(self));
priv->flowCombiner = GUniquePtr<GstFlowCombiner>(gst_flow_combiner_new());
@@ -357,37 +390,10 @@
ASSERT(g_object_is_floating(self));
}
-static void webkitMediaStreamSrcRemoveTrackObserver(WebKitMediaStreamSrc* self, MediaStreamTrackPrivate& track)
-{
- auto* priv = self->priv;
- switch (track.type()) {
- case RealtimeMediaSource::Type::Video:
- track.source().removeVideoSampleObserver(*priv->mediaStreamTrackObserver);
- break;
- case RealtimeMediaSource::Type::Audio:
- track.source().removeAudioSampleObserver(*priv->mediaStreamTrackObserver);
- break;
- case RealtimeMediaSource::Type::None:
- ASSERT_NOT_REACHED();
- }
- track.removeObserver(*priv->mediaStreamTrackObserver);
-}
-
-static void stopObservingTracks(WebKitMediaStreamSrc* self)
-{
- GST_OBJECT_LOCK(self);
- auto* priv = self->priv;
- for (auto& track : priv->tracks)
- webkitMediaStreamSrcRemoveTrackObserver(self, *track);
- GST_OBJECT_UNLOCK(self);
-}
-
static void webkitMediaStreamSrcDispose(GObject* object)
{
- WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(object);
+ WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC_CAST(object);
- stopObservingTracks(self);
-
GST_OBJECT_LOCK(self);
auto* priv = self->priv;
if (priv->stream) {
@@ -402,14 +408,24 @@
static GstStateChangeReturn webkitMediaStreamSrcChangeState(GstElement* element, GstStateChange transition)
{
GST_DEBUG_OBJECT(element, "%s", gst_state_change_get_name(transition));
+ WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC_CAST(element);
- if (transition == GST_STATE_CHANGE_PAUSED_TO_READY)
- stopObservingTracks(WEBKIT_MEDIA_STREAM_SRC(element));
+ if (transition == GST_STATE_CHANGE_PAUSED_TO_READY) {
+ GST_OBJECT_LOCK(self);
+ for (auto& item : self->priv->sources)
+ item->stopObserving();
+ GST_OBJECT_UNLOCK(self);
+ }
GstStateChangeReturn result = GST_ELEMENT_CLASS(webkit_media_stream_src_parent_class)->change_state(element, transition);
- if (transition == GST_STATE_CHANGE_READY_TO_PAUSED)
+ if (transition == GST_STATE_CHANGE_READY_TO_PAUSED) {
+ GST_OBJECT_LOCK(self);
+ for (auto& item : self->priv->sources)
+ item->startObserving();
+ GST_OBJECT_UNLOCK(self);
result = GST_STATE_CHANGE_NO_PREROLL;
+ }
return result;
}
@@ -436,7 +452,7 @@
{
GRefPtr<GstElement> element = adoptGRef(GST_ELEMENT_CAST(gst_object_get_parent(parent)));
GstFlowReturn chainResult = gst_proxy_pad_chain_default(pad, GST_OBJECT_CAST(element.get()), buffer);
- auto* self = WEBKIT_MEDIA_STREAM_SRC(element.get());
+ auto* self = WEBKIT_MEDIA_STREAM_SRC_CAST(element.get());
GstFlowReturn result = gst_flow_combiner_update_pad_flow(self->priv->flowCombiner.get(), pad, chainResult);
if (result == GST_FLOW_FLUSHING)
@@ -445,12 +461,39 @@
return result;
}
-static void webkitMediaStreamSrcAddPad(WebKitMediaStreamSrc* self, GstPad* target, GstStaticPadTemplate* padTemplate, GRefPtr<GstTagList>&& tags, RealtimeMediaSource::Type sourceType)
+static void webkitMediaStreamSrcPostStreamCollection(WebKitMediaStreamSrc* self)
{
+ auto* priv = self->priv;
+
+ GST_OBJECT_LOCK(self);
+ if (priv->stream && (!priv->stream->active() || !priv->stream->hasTracks())) {
+ GST_OBJECT_UNLOCK(self);
+ return;
+ }
+
+ auto upstreamId = priv->stream ? priv->stream->id() : createCanonicalUUIDString();
+ priv->streamCollection = adoptGRef(gst_stream_collection_new(upstreamId.ascii().data()));
+ for (auto& track : priv->tracks) {
+ if (!track->isActive())
+ continue;
+ gst_stream_collection_add_stream(priv->streamCollection.get(), webkitMediaStreamNew(track.get()));
+ }
+
+ GST_OBJECT_UNLOCK(self);
+
+ GST_DEBUG_OBJECT(self, "Posting stream collection");
+ gst_element_post_message(GST_ELEMENT_CAST(self), gst_message_new_stream_collection(GST_OBJECT_CAST(self), priv->streamCollection.get()));
+}
+
+static void webkitMediaStreamSrcAddPad(WebKitMediaStreamSrc* self, GstPad* target, GstStaticPadTemplate* padTemplate, GRefPtr<GstTagList>&& tags, RealtimeMediaSource::Type sourceType, const String& padName)
+{
+ ASSERT(sourceType != RealtimeMediaSource::Type::None);
+ if (sourceType == RealtimeMediaSource::Type::None)
+ return;
+
GST_DEBUG_OBJECT(self, "%s Ghosting %" GST_PTR_FORMAT, gst_object_get_path_string(GST_OBJECT_CAST(self)), target);
- const char* padName = sourceType == RealtimeMediaSource::Type::Audio ? "audio_src" : "video_src";
- auto* ghostPad = webkitGstGhostPadFromStaticTemplate(padTemplate, padName, target);
+ auto* ghostPad = webkitGstGhostPadFromStaticTemplate(padTemplate, padName.ascii().data(), target);
gst_pad_set_active(ghostPad, TRUE);
gst_element_add_pad(GST_ELEMENT_CAST(self), ghostPad);
@@ -461,27 +504,14 @@
gst_pad_push_event(target, gst_event_new_tag(tags.leakRef()));
}
-static void webkitMediaStreamSrcRemovePad(WebKitMediaStreamSrc* self, const char* padName)
-{
- auto pad = adoptGRef(gst_element_get_static_pad(GST_ELEMENT_CAST(self), padName));
- if (!pad)
- return;
-
- auto proxyPad = adoptGRef(GST_PAD_CAST(gst_proxy_pad_get_internal(GST_PROXY_PAD(pad.get()))));
- if (proxyPad)
- gst_flow_combiner_remove_pad(self->priv->flowCombiner.get(), proxyPad.get());
-
- gst_pad_set_active(pad.get(), FALSE);
- gst_element_remove_pad(GST_ELEMENT_CAST(self), pad.get());
-}
-
struct ProbeData {
- ProbeData(GstElement* element, GstStaticPadTemplate* padTemplate, GRefPtr<GstTagList>&& tags, const char* trackId, RealtimeMediaSource::Type sourceType)
+ ProbeData(GstElement* element, GstStaticPadTemplate* padTemplate, GRefPtr<GstTagList>&& tags, const char* trackId, RealtimeMediaSource::Type sourceType, const String& padName)
: element(element)
, padTemplate(padTemplate)
, tags(WTFMove(tags))
, trackId(g_strdup(trackId))
, sourceType(sourceType)
+ , padName(padName)
{
}
@@ -490,12 +520,13 @@
GRefPtr<GstTagList> tags;
GUniquePtr<char> trackId;
RealtimeMediaSource::Type sourceType;
+ String padName;
};
static GstPadProbeReturn webkitMediaStreamSrcPadProbeCb(GstPad* pad, GstPadProbeInfo* info, ProbeData* data)
{
GstEvent* event = GST_PAD_PROBE_INFO_EVENT(info);
- WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(data->element.get());
+ WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC_CAST(data->element.get());
GST_DEBUG_OBJECT(self, "Event %" GST_PTR_FORMAT, event);
switch (GST_EVENT_TYPE(event)) {
@@ -511,9 +542,9 @@
gst_event_set_group_id(streamStart, 1);
gst_pad_push_event(pad, streamStart);
- webkitMediaStreamSrcAddPad(self, pad, data->padTemplate, WTFMove(data->tags), data->sourceType);
+ webkitMediaStreamSrcAddPad(self, pad, data->padTemplate, WTFMove(data->tags), data->sourceType, data->padName);
callOnMainThreadAndWait([element = data->element] {
- webkitMediaStreamSrcPostStreamCollection(WEBKIT_MEDIA_STREAM_SRC(element.get()));
+ webkitMediaStreamSrcPostStreamCollection(WEBKIT_MEDIA_STREAM_SRC_CAST(element.get()));
});
return GST_PAD_PROBE_REMOVE;
}
@@ -524,100 +555,53 @@
return GST_PAD_PROBE_OK;
}
-static void webkitMediaStreamSrcSetupSrc(WebKitMediaStreamSrc* self, MediaStreamTrackPrivate* track, GstElement* element, GstStaticPadTemplate* padTemplate, bool onlyTrack)
+void webkitMediaStreamSrcAddTrack(WebKitMediaStreamSrc* self, MediaStreamTrackPrivate* track, bool onlyTrack)
{
const char* sourceType = "unknown";
- switch (track->source().type()) {
- case RealtimeMediaSource::Type::Audio:
+ unsigned counter = 0;
+ GstStaticPadTemplate* padTemplate = nullptr;
+ if (track->type() == RealtimeMediaSource::Type::Audio) {
+ padTemplate = &audioSrcTemplate;
sourceType = "audio";
- break;
- case RealtimeMediaSource::Type::Video:
+ counter = self->priv->audioPadCounter.exchangeAdd(1);
+ } else if (track->type() == RealtimeMediaSource::Type::Video) {
+ padTemplate = &videoSrcTemplate;
sourceType = "video";
- break;
- case RealtimeMediaSource::Type::None:
- ASSERT_NOT_REACHED();
+ counter = self->priv->videoPadCounter.exchangeAdd(1);
+ } else {
+ GST_WARNING_OBJECT(self, "Unsupported track type: %d", static_cast<int>(track->type()));
+ return;
}
+ StringBuilder padName;
+ padName.append(sourceType);
+ padName.append("_src");
+ padName.append(counter);
+
GST_DEBUG_OBJECT(self, "Setup %s source for track %s, only track: %s", sourceType, track->id().utf8().data(), boolForPrinting(onlyTrack));
+
+ auto source = makeUnique<InternalSource>(GST_ELEMENT_CAST(self), *track, padName.toString());
+ auto* element = source->get();
gst_bin_add(GST_BIN_CAST(self), element);
auto pad = adoptGRef(gst_element_get_static_pad(element, "src"));
auto tags = mediaStreamTrackPrivateGetTags(track);
if (!onlyTrack) {
- auto* data = "" ProbeData(GST_ELEMENT_CAST(self), padTemplate, WTFMove(tags), track->id().utf8().data(), track->source().type());
+ auto* data = "" ProbeData(GST_ELEMENT_CAST(self), padTemplate, WTFMove(tags), track->id().utf8().data(), track->source().type(), source->padName());
gst_pad_add_probe(pad.get(), GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, reinterpret_cast<GstPadProbeCallback>(webkitMediaStreamSrcPadProbeCb), data, [](gpointer data) {
delete reinterpret_cast<ProbeData*>(data);
});
} else {
gst_pad_set_active(pad.get(), TRUE);
- webkitMediaStreamSrcAddPad(self, pad.get(), padTemplate, WTFMove(tags), track->source().type());
+ webkitMediaStreamSrcAddPad(self, pad.get(), padTemplate, WTFMove(tags), track->source().type(), source->padName());
}
-
- auto* priv = self->priv;
- track->addObserver(*priv->mediaStreamTrackObserver);
- switch (track->type()) {
- case RealtimeMediaSource::Type::Audio:
- track->source().addAudioSampleObserver(*priv->mediaStreamTrackObserver);
- break;
- case RealtimeMediaSource::Type::Video:
- track->source().addVideoSampleObserver(*priv->mediaStreamTrackObserver);
- break;
- case RealtimeMediaSource::Type::None:
- ASSERT_NOT_REACHED();
- }
-
gst_element_sync_state_with_parent(element);
-}
-static void webkitMediaStreamSrcPostStreamCollection(WebKitMediaStreamSrc* self)
-{
- auto* priv = self->priv;
-
- GST_OBJECT_LOCK(self);
- if (priv->stream && (!priv->stream->active() || !priv->stream->hasTracks())) {
- GST_OBJECT_UNLOCK(self);
- return;
- }
-
- auto upstreamId = priv->stream ? priv->stream->id() : createCanonicalUUIDString();
- priv->streamCollection = adoptGRef(gst_stream_collection_new(upstreamId.ascii().data()));
- for (auto& track : priv->tracks) {
- if (!track->isActive())
- continue;
- gst_stream_collection_add_stream(priv->streamCollection.get(), webkitMediaStreamNew(track.get()));
- }
-
- GST_OBJECT_UNLOCK(self);
-
- GST_DEBUG_OBJECT(self, "Posting stream collection");
- gst_element_post_message(GST_ELEMENT_CAST(self), gst_message_new_stream_collection(GST_OBJECT_CAST(self), priv->streamCollection.get()));
-}
-
-void webkitMediaStreamSrcAddTrack(WebKitMediaStreamSrc* self, MediaStreamTrackPrivate* track, bool onlyTrack)
-{
- auto* priv = self->priv;
- if (track->type() == RealtimeMediaSource::Type::Audio) {
- priv->audioSrc.emplace(track->isCaptureTrack(), track->type());
- webkitMediaStreamSrcSetupSrc(self, track, priv->audioSrc->get(), &audioSrcTemplate, onlyTrack);
- } else if (track->type() == RealtimeMediaSource::Type::Video) {
- priv->videoSrc.emplace(track->isCaptureTrack(), track->type());
- webkitMediaStreamSrcSetupSrc(self, track, priv->videoSrc->get(), &videoSrcTemplate, onlyTrack);
- } else
- GST_INFO_OBJECT(self, "Unsupported track type: %d", static_cast<int>(track->type()));
-
+ source->startObserving();
+ self->priv->sources.append(WTFMove(source));
self->priv->tracks.append(track);
}
-static void webkitMediaStreamSrcRemoveTrackByType(WebKitMediaStreamSrc* self, RealtimeMediaSource::Type trackType)
-{
- if (trackType == RealtimeMediaSource::Type::Audio)
- self->priv->audioSrc.reset();
- else if (trackType == RealtimeMediaSource::Type::Video)
- self->priv->videoSrc.reset();
- else
- GST_INFO_OBJECT(self, "Unsupported track type: %d", static_cast<int>(trackType));
-}
-
void webkitMediaStreamSrcSetStream(WebKitMediaStreamSrc* self, MediaStreamPrivate* stream, bool isVideoPlayer)
{
ASSERT(WEBKIT_IS_MEDIA_STREAM_SRC(self));
@@ -633,25 +617,13 @@
webkitMediaStreamSrcAddTrack(self, track.get(), onlyTrack);
}
webkitMediaStreamSrcPostStreamCollection(self);
- gst_element_no_more_pads(GST_ELEMENT_CAST(self));
}
-static void webkitMediaStreamSrcPushVideoSample(WebKitMediaStreamSrc* self, GstSample* sample)
+static void webkitMediaStreamSrcTrackEnded(WebKitMediaStreamSrc* self, InternalSource& source)
{
- if (self->priv->videoSrc)
- self->priv->videoSrc->pushSample(sample);
-}
-
-static void webkitMediaStreamSrcPushAudioSample(WebKitMediaStreamSrc* self, const GRefPtr<GstSample>& sample)
-{
- if (self->priv->audioSrc)
- self->priv->audioSrc->pushSample(sample.get());
-}
-
-static void webkitMediaStreamSrcTrackEnded(WebKitMediaStreamSrc* self, MediaStreamTrackPrivate& track)
-{
GRefPtr<GstPad> pad;
+ auto& track = source.track();
GST_DEBUG_OBJECT(self, "Track %s ended", track.label().utf8().data());
GST_OBJECT_LOCK(self);
GstElement* element = GST_ELEMENT_CAST(self);
@@ -684,8 +656,38 @@
auto tags = mediaStreamTrackPrivateGetTags(&track);
gst_pad_push_event(pad.get(), gst_event_new_tag(tags.leakRef()));
gst_pad_push_event(pad.get(), gst_event_new_eos());
+
+ GST_STATE_LOCK(GST_ELEMENT_CAST(self));
+ gst_element_set_locked_state(source.get(), true);
+ gst_element_set_state(source.get(), GST_STATE_NULL);
+ gst_bin_remove(GST_BIN_CAST(self), source.get());
+ gst_element_set_locked_state(source.get(), false);
+ GST_STATE_UNLOCK(GST_ELEMENT_CAST(self));
+
+ auto proxyPad = adoptGRef(GST_PAD_CAST(gst_proxy_pad_get_internal(GST_PROXY_PAD(pad.get()))));
+ if (proxyPad)
+ gst_flow_combiner_remove_pad(self->priv->flowCombiner.get(), proxyPad.get());
+
+ gst_pad_set_active(pad.get(), FALSE);
+ gst_element_remove_pad(GST_ELEMENT_CAST(self), pad.get());
}
+void InternalSource::trackEnded(MediaStreamTrackPrivate&)
+{
+ if (m_parent)
+ webkitMediaStreamSrcTrackEnded(WEBKIT_MEDIA_STREAM_SRC_CAST(m_parent), *this);
+
+ stopObserving();
+}
+
+void webkitMediaStreamSrcConfigureAudioTracks(WebKitMediaStreamSrc* self, float volume, bool isMuted, bool isPlaying)
+{
+ for (auto& source : self->priv->sources) {
+ if (source->track().type() == RealtimeMediaSource::Type::Audio)
+ source->configureAudioTrack(volume, isMuted, isPlaying);
+ }
+}
+
GstElement* webkitMediaStreamSrcNew()
{
return GST_ELEMENT_CAST(g_object_new(webkit_media_stream_src_get_type(), nullptr));