Modified: trunk/Source/WebCore/ChangeLog (208942 => 208943)
--- trunk/Source/WebCore/ChangeLog 2016-11-21 09:29:22 UTC (rev 208942)
+++ trunk/Source/WebCore/ChangeLog 2016-11-21 09:39:26 UTC (rev 208943)
@@ -1,3 +1,17 @@
+2016-11-21 Philippe Normand <[email protected]>
+
+ [Gstreamer] Add volume and mute support to the WebRTC mediaplayer
+ https://bugs.webkit.org/show_bug.cgi?id=153828
+
+ Reviewed by Darin Adler.
+
+ * platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.cpp:
+ (WebCore::MediaPlayerPrivateGStreamerOwr::setVolume): New implementation setting the OWR source volume property.
+ (WebCore::MediaPlayerPrivateGStreamerOwr::setMuted): New implementation setting the OWR source mute property.
+ (WebCore::MediaPlayerPrivateGStreamerOwr::maybeHandleChangeMutedState): Also set audio OWR source mute state depending on the track enabled state.
+ (WebCore::MediaPlayerPrivateGStreamerOwr::trackEnabledChanged): chain to maybeHandleChangeMuteState.
+ * platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.h:
+
2016-11-21 Alejandro G. Castro <[email protected]> and Philippe Normand <[email protected]>
[WebRTC][OpenWebRTC] RTP bundling support
Modified: trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.cpp (208942 => 208943)
--- trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.cpp 2016-11-21 09:29:22 UTC (rev 208942)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.cpp 2016-11-21 09:39:26 UTC (rev 208943)
@@ -30,7 +30,6 @@
#include "NotImplemented.h"
#include "RealtimeMediaSourceOwr.h"
#include "URL.h"
-#include <gst/audio/streamvolume.h>
#include <owr/owr.h>
#include <owr/owr_gst_audio_renderer.h>
#include <owr/owr_gst_video_renderer.h>
@@ -101,6 +100,32 @@
return m_audioTrack;
}
+void MediaPlayerPrivateGStreamerOwr::setVolume(float volume)
+{
+ if (!m_audioTrack)
+ return;
+
+ auto& realTimeMediaSource = static_cast<RealtimeMediaSourceOwr&>(m_audioTrack->source());
+ auto mediaSource = OWR_MEDIA_SOURCE(realTimeMediaSource.mediaSource());
+
+ GST_DEBUG("Setting volume: %f", volume);
+ g_object_set(mediaSource, "volume", static_cast<gdouble>(volume), nullptr);
+}
+
+void MediaPlayerPrivateGStreamerOwr::setMuted(bool muted)
+{
+ if (!m_audioTrack)
+ return;
+
+ auto& realTimeMediaSource = static_cast<RealtimeMediaSourceOwr&>(m_audioTrack->source());
+ auto mediaSource = OWR_MEDIA_SOURCE(realTimeMediaSource.mediaSource());
+ if (!mediaSource)
+ return;
+
+ GST_DEBUG("Setting mute: %s", muted ? "on":"off");
+ g_object_set(mediaSource, "mute", muted, nullptr);
+}
+
float MediaPlayerPrivateGStreamerOwr::currentTime() const
{
gint64 position = GST_CLOCK_TIME_NONE;
@@ -300,13 +325,13 @@
void MediaPlayerPrivateGStreamerOwr::maybeHandleChangeMutedState(MediaStreamTrackPrivate& track)
{
- auto realTimeMediaSource = reinterpret_cast<RealtimeMediaSourceOwr*>(&track.source());
- auto mediaSource = OWR_MEDIA_SOURCE(realTimeMediaSource->mediaSource());
+ auto& realTimeMediaSource = static_cast<RealtimeMediaSourceOwr&>(track.source());
+ auto mediaSource = OWR_MEDIA_SOURCE(realTimeMediaSource.mediaSource());
- GST_DEBUG("%s track now %s", track.type() == RealtimeMediaSource::Audio ? "audio":"video", realTimeMediaSource->muted() ? "muted":"un-muted");
+ GST_DEBUG("%s track now %s", track.type() == RealtimeMediaSource::Audio ? "audio":"video", realTimeMediaSource.muted() ? "muted":"un-muted");
switch (track.type()) {
case RealtimeMediaSource::Audio:
- if (!realTimeMediaSource->muted()) {
+ if (!realTimeMediaSource.muted()) {
g_object_set(m_audioRenderer.get(), "disabled", false, nullptr);
owr_media_renderer_set_source(OWR_MEDIA_RENDERER(m_audioRenderer.get()), mediaSource);
} else {
@@ -313,9 +338,11 @@
g_object_set(m_audioRenderer.get(), "disabled", true, nullptr);
owr_media_renderer_set_source(OWR_MEDIA_RENDERER(m_audioRenderer.get()), nullptr);
}
+ if (mediaSource)
+ g_object_set(mediaSource, "mute", !track.enabled(), nullptr);
break;
case RealtimeMediaSource::Video:
- if (!realTimeMediaSource->muted()) {
+ if (!realTimeMediaSource.muted()) {
g_object_set(m_videoRenderer.get(), "disabled", false, nullptr);
owr_media_renderer_set_source(OWR_MEDIA_RENDERER(m_videoRenderer.get()), mediaSource);
} else {
@@ -333,9 +360,10 @@
GST_DEBUG("Track settings changed");
}
-void MediaPlayerPrivateGStreamerOwr::trackEnabledChanged(MediaStreamTrackPrivate&)
+void MediaPlayerPrivateGStreamerOwr::trackEnabledChanged(MediaStreamTrackPrivate& track)
{
- GST_DEBUG("Track enabled changed");
+ GST_DEBUG("%s track now %s", track.type() == RealtimeMediaSource::Audio ? "audio":"video", track.enabled() ? "enabled":"disabled");
+ maybeHandleChangeMutedState(track);
}
GstElement* MediaPlayerPrivateGStreamerOwr::createVideoSink()
Modified: trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.h (208942 => 208943)
--- trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.h 2016-11-21 09:29:22 UTC (rev 208942)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.h 2016-11-21 09:39:26 UTC (rev 208943)
@@ -40,51 +40,54 @@
static void registerMediaEngine(MediaEngineRegistrar);
- void setSize(const IntSize&) override;
+ void setSize(const IntSize&) final;
private:
- GstElement* createVideoSink() override;
- GstElement* audioSink() const override { return m_audioSink.get(); }
- bool isLiveStream() const override { return true; }
+ GstElement* createVideoSink() final;
+ GstElement* audioSink() const final { return m_audioSink.get(); }
+ bool isLiveStream() const final { return true; }
- String engineDescription() const override { return "OpenWebRTC"; }
+ String engineDescription() const final { return "OpenWebRTC"; }
- void load(const String&) override;
+ void load(const String&) final;
#if ENABLE(MEDIA_SOURCE)
- void load(const String&, MediaSourcePrivateClient*) override;
+ void load(const String&, MediaSourcePrivateClient*) final;
#endif
- void load(MediaStreamPrivate&) override;
- void cancelLoad() override { }
+ void load(MediaStreamPrivate&) final;
+ void cancelLoad() final { }
- void prepareToPlay() override { }
- void play() override;
- void pause() override;
+ void prepareToPlay() final { }
+ void play() final;
+ void pause() final;
- bool hasVideo() const override;
- bool hasAudio() const override;
+ bool hasVideo() const final;
+ bool hasAudio() const final;
- float duration() const override { return 0; }
+ float duration() const final { return 0; }
- float currentTime() const override;
- void seek(float) override { }
- bool seeking() const override { return false; }
+ float currentTime() const final;
+ void seek(float) final { }
+ bool seeking() const final { return false; }
- void setRate(float) override { }
- void setPreservesPitch(bool) override { }
- bool paused() const override { return m_paused; }
+ void setRate(float) final { }
+ void setPreservesPitch(bool) final { }
+ bool paused() const final { return m_paused; }
- bool hasClosedCaptions() const override { return false; }
- void setClosedCaptionsVisible(bool) override { };
+ void setVolume(float) final;
+ void setMuted(bool) final;
- float maxTimeSeekable() const override { return 0; }
- std::unique_ptr<PlatformTimeRanges> buffered() const override { return std::make_unique<PlatformTimeRanges>(); }
- bool didLoadingProgress() const override;
+ bool hasClosedCaptions() const final { return false; }
+ void setClosedCaptionsVisible(bool) final { };
- unsigned long long totalBytes() const override { return 0; }
+ float maxTimeSeekable() const final { return 0; }
+ std::unique_ptr<PlatformTimeRanges> buffered() const final { return std::make_unique<PlatformTimeRanges>(); }
+ bool didLoadingProgress() const final;
- bool canLoadPoster() const override { return false; }
- void setPoster(const String&) override { }
+ unsigned long long totalBytes() const final { return 0; }
+ bool canLoadPoster() const final { return false; }
+ void setPoster(const String&) final { }
+
// MediaStreamTrackPrivate::Observer implementation.
void trackEnded(MediaStreamTrackPrivate&) final;
void trackMutedChanged(MediaStreamTrackPrivate&) final;