Title: [252938] trunk/Source/WebCore
Revision
252938
Author
ph...@webkit.org
Date
2019-11-29 03:11:00 -0800 (Fri, 29 Nov 2019)

Log Message

Unreviewed, rolling out r252937.

broke GTK/WPE builds and most likely media track notification
support

Reverted changeset:

"[GStreamer] MediaPlayerPrivateGStreamer style cleanups"
https://bugs.webkit.org/show_bug.cgi?id=204617
https://trac.webkit.org/changeset/252937

Modified Paths

Diff

Modified: trunk/Source/WebCore/ChangeLog (252937 => 252938)


--- trunk/Source/WebCore/ChangeLog	2019-11-29 10:24:54 UTC (rev 252937)
+++ trunk/Source/WebCore/ChangeLog	2019-11-29 11:11:00 UTC (rev 252938)
@@ -1,3 +1,16 @@
+2019-11-29  Philippe Normand  <pnorm...@igalia.com>
+
+        Unreviewed, rolling out r252937.
+
+        broke GTK/WPE builds and most likely media track notification
+        support
+
+        Reverted changeset:
+
+        "[GStreamer] MediaPlayerPrivateGStreamer style cleanups"
+        https://bugs.webkit.org/show_bug.cgi?id=204617
+        https://trac.webkit.org/changeset/252937
+
 2019-11-29  Charlie Turner  <ctur...@igalia.com>
 
         [GStreamer] MediaPlayerPrivateGStreamer style cleanups

Modified: trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp (252937 => 252938)


--- trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp	2019-11-29 10:24:54 UTC (rev 252937)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp	2019-11-29 11:11:00 UTC (rev 252938)
@@ -336,12 +336,9 @@
 };
 #endif
 
-static void initializeDebugCategory()
+void MediaPlayerPrivateGStreamer::initializeDebugCategory()
 {
-    static std::once_flag onceFlag;
-    std::call_once(onceFlag, [] {
-        GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
-    });
+    GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
 }
 
 MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
@@ -500,7 +497,7 @@
     GST_DEBUG_OBJECT(pipeline(), "preload: %s", convertEnumerationToString(m_preload).utf8().data());
     if (m_preload == MediaPlayer::None) {
         GST_INFO_OBJECT(pipeline(), "Delaying load.");
-        m_isDelayingLoad = true;
+        m_delayingLoad = true;
     }
 
     // Reset network and ready states. Those will be set properly once
@@ -509,10 +506,10 @@
     m_player->networkStateChanged();
     m_readyState = MediaPlayer::HaveNothing;
     m_player->readyStateChanged();
-    m_areVolumeAndMuteInitialized = false;
+    m_volumeAndMuteInitialized = false;
     m_hasTaintedOrigin = WTF::nullopt;
 
-    if (!m_isDelayingLoad)
+    if (!m_delayingLoad)
         commitLoad();
 }
 
@@ -544,6 +541,19 @@
 }
 #endif
 
+void MediaPlayerPrivateGStreamer::commitLoad()
+{
+    ASSERT(!m_delayingLoad);
+    GST_DEBUG_OBJECT(pipeline(), "Committing load.");
+
+    // GStreamer needs to have the pipeline set to a paused state to
+    // start providing anything useful.
+    changePipelineState(GST_STATE_PAUSED);
+
+    updateDownloadBufferingFlag();
+    updateStates();
+}
+
 void MediaPlayerPrivateGStreamer::cancelLoad()
 {
     if (m_networkState < MediaPlayer::Loading || m_networkState == MediaPlayer::Loaded)
@@ -557,8 +567,8 @@
 {
     GST_DEBUG_OBJECT(pipeline(), "Prepare to play");
     m_preload = MediaPlayer::Auto;
-    if (m_isDelayingLoad) {
-        m_isDelayingLoad = false;
+    if (m_delayingLoad) {
+        m_delayingLoad = false;
         commitLoad();
     }
 }
@@ -566,13 +576,13 @@
 void MediaPlayerPrivateGStreamer::play()
 {
     if (!m_playbackRate) {
-        m_isPlaybackRatePaused = true;
+        m_playbackRatePause = true;
         return;
     }
 
     if (changePipelineState(GST_STATE_PLAYING)) {
         m_isEndReached = false;
-        m_isDelayingLoad = false;
+        m_delayingLoad = false;
         m_preload = MediaPlayer::Auto;
         updateDownloadBufferingFlag();
         GST_INFO_OBJECT(pipeline(), "Play");
@@ -582,7 +592,7 @@
 
 void MediaPlayerPrivateGStreamer::pause()
 {
-    m_isPlaybackRatePaused = false;
+    m_playbackRatePause = false;
     GstState currentState, pendingState;
     gst_element_get_state(m_pipeline.get(), &currentState, &pendingState, 0);
     if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
@@ -604,7 +614,7 @@
         return true;
     }
 
-    if (m_isPlaybackRatePaused) {
+    if (m_playbackRatePause) {
         GST_DEBUG_OBJECT(pipeline(), "Playback rate is 0, simulating PAUSED state");
         return false;
     }
@@ -623,8 +633,12 @@
 
     if (rate < 0) {
         startTime = MediaTime::zeroTime();
-        // If we are at beginning of media, start from the end to avoid immediate EOS.
-        endTime = position < MediaTime::zeroTime() ? durationMediaTime() : position;
+        // If we are at beginning of media, start from the end to
+        // avoid immediate EOS.
+        if (position < MediaTime::zeroTime())
+            endTime = durationMediaTime();
+        else
+            endTime = position;
     }
 
     if (!rate)
@@ -636,9 +650,12 @@
 
 void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime)
 {
-    if (!m_pipeline || m_didErrorOccur)
+    if (!m_pipeline)
         return;
 
+    if (m_errorOccured)
+        return;
+
     GST_INFO_OBJECT(pipeline(), "[Seek] seek attempt to %s", toString(mediaTime).utf8().data());
 
     // Avoid useless seeking.
@@ -649,7 +666,7 @@
 
     MediaTime time = std::min(mediaTime, durationMediaTime());
 
-    if (m_isLiveStream) {
+    if (isLiveStream()) {
         GST_DEBUG_OBJECT(pipeline(), "[Seek] Live stream seek unhandled");
         return;
     }
@@ -656,9 +673,9 @@
 
     GST_INFO_OBJECT(pipeline(), "[Seek] seeking to %s", toString(time).utf8().data());
 
-    if (m_isSeeking) {
+    if (m_seeking) {
         m_timeOfOverlappingSeek = time;
-        if (m_isSeekPending) {
+        if (m_seekIsPending) {
             m_seekTime = time;
             return;
         }
@@ -671,10 +688,10 @@
         return;
     }
     if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
-        m_isSeekPending = true;
+        m_seekIsPending = true;
         if (m_isEndReached) {
             GST_DEBUG_OBJECT(pipeline(), "[Seek] reset pipeline");
-            m_shouldResetPipeline = true;
+            m_resetPipeline = true;
             if (!changePipelineState(GST_STATE_PAUSED))
                 loadingFailed(MediaPlayer::Empty);
         }
@@ -686,7 +703,7 @@
         }
     }
 
-    m_isSeeking = true;
+    m_seeking = true;
     m_seekTime = time;
     m_isEndReached = false;
 }
@@ -693,13 +710,13 @@
 
 void MediaPlayerPrivateGStreamer::updatePlaybackRate()
 {
-    if (!m_isChangingRate)
+    if (!m_changingRate)
         return;
 
     GST_INFO_OBJECT(pipeline(), "Set Rate to %f", m_playbackRate);
 
     // Mute the sound if the playback rate is negative or too extreme and audio pitch is not adjusted.
-    bool mute = m_playbackRate <= 0 || (!m_shouldPreservePitch && (m_playbackRate < 0.8 || m_playbackRate > 2));
+    bool mute = m_playbackRate <= 0 || (!m_preservesPitch && (m_playbackRate < 0.8 || m_playbackRate > 2));
 
     GST_INFO_OBJECT(pipeline(), mute ? "Need to mute audio" : "Do not need to mute audio");
 
@@ -711,19 +728,43 @@
         GST_ERROR("Set rate to %f failed", m_playbackRate);
     }
 
-    if (m_isPlaybackRatePaused) {
-        GstState state, pending;
+    if (m_playbackRatePause) {
+        GstState state;
+        GstState pending;
 
         gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
         if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
             changePipelineState(GST_STATE_PLAYING);
-        m_isPlaybackRatePaused = false;
+        m_playbackRatePause = false;
     }
 
-    m_isChangingRate = false;
+    m_changingRate = false;
     m_player->rateChanged();
 }
 
+MediaTime MediaPlayerPrivateGStreamer::platformDuration() const
+{
+    if (!m_pipeline)
+        return MediaTime::invalidTime();
+
+    GST_TRACE_OBJECT(pipeline(), "errorOccured: %s, pipeline state: %s", boolForPrinting(m_errorOccured), gst_element_state_get_name(GST_STATE(m_pipeline.get())));
+    if (m_errorOccured)
+        return MediaTime::invalidTime();
+
+    // The duration query would fail on a not-prerolled pipeline.
+    if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
+        return MediaTime::invalidTime();
+
+    int64_t duration = 0;
+    if (!gst_element_query_duration(m_pipeline.get(), GST_FORMAT_TIME, &duration) || !GST_CLOCK_TIME_IS_VALID(duration)) {
+        GST_DEBUG_OBJECT(pipeline(), "Time duration query failed for %s", m_url.string().utf8().data());
+        return MediaTime::positiveInfiniteTime();
+    }
+
+    GST_LOG_OBJECT(pipeline(), "Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
+    return MediaTime(duration, GST_SECOND);
+}
+
 MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
 {
     GST_TRACE_OBJECT(pipeline(), "Cached duration: %s", m_cachedDuration.toString().utf8().data());
@@ -741,11 +782,11 @@
 
 MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
 {
-    if (!m_pipeline || m_didErrorOccur)
+    if (!m_pipeline || m_errorOccured)
         return MediaTime::invalidTime();
 
-    GST_TRACE_OBJECT(pipeline(), "seeking: %s, seekTime: %s", boolForPrinting(m_isSeeking), m_seekTime.toString().utf8().data());
-    if (m_isSeeking)
+    GST_TRACE_OBJECT(pipeline(), "seeking: %s, seekTime: %s", boolForPrinting(m_seeking), m_seekTime.toString().utf8().data());
+    if (m_seeking)
         return m_seekTime;
 
     return playbackPosition();
@@ -753,36 +794,36 @@
 
 void MediaPlayerPrivateGStreamer::setRate(float rate)
 {
-    float rateClamped = clampTo(rate, -20.0, 20.0);
-    if (rateClamped != rate)
-        GST_WARNING("Clamping original rate (%f) to [-20, 20] (%f), higher rates cause crashes", rate, rateClamped);
+    // Higher rate causes crash.
+    rate = clampTo(rate, -20.0, 20.0);
 
     // Avoid useless playback rate update.
-    if (m_playbackRate == rateClamped) {
-        // And make sure that upper layers were notified if rate was set.
+    if (m_playbackRate == rate) {
+        // and make sure that upper layers were notified if rate was set
 
-        if (!m_isChangingRate && m_player->rate() != m_playbackRate)
+        if (!m_changingRate && m_player->rate() != m_playbackRate)
             m_player->rateChanged();
         return;
     }
 
-    if (m_isLiveStream) {
-        // Notify upper layers that we cannot handle passed rate.
-        m_isChangingRate = false;
+    if (isLiveStream()) {
+        // notify upper layers that we cannot handle passed rate.
+        m_changingRate = false;
         m_player->rateChanged();
         return;
     }
 
-    GstState state, pending;
+    GstState state;
+    GstState pending;
 
-    m_playbackRate = rateClamped;
-    m_isChangingRate = true;
+    m_playbackRate = rate;
+    m_changingRate = true;
 
     gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
 
-    if (!rateClamped) {
-        m_isChangingRate = false;
-        m_isPlaybackRatePaused = true;
+    if (!rate) {
+        m_changingRate = false;
+        m_playbackRatePause = true;
         if (state != GST_STATE_PAUSED && pending != GST_STATE_PAUSED)
             changePipelineState(GST_STATE_PAUSED);
         return;
@@ -802,20 +843,20 @@
 
 void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
 {
-    m_shouldPreservePitch = preservesPitch;
+    m_preservesPitch = preservesPitch;
 }
 
 void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
 {
     GST_DEBUG_OBJECT(pipeline(), "Setting preload to %s", convertEnumerationToString(preload).utf8().data());
-    if (preload == MediaPlayer::Auto && m_isLiveStream)
+    if (preload == MediaPlayer::Auto && isLiveStream())
         return;
 
     m_preload = preload;
     updateDownloadBufferingFlag();
 
-    if (m_isDelayingLoad && m_preload != MediaPlayer::None) {
-        m_isDelayingLoad = false;
+    if (m_delayingLoad && m_preload != MediaPlayer::None) {
+        m_delayingLoad = false;
         commitLoad();
     }
 }
@@ -823,7 +864,7 @@
 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
 {
     auto timeRanges = makeUnique<PlatformTimeRanges>();
-    if (m_didErrorOccur || m_isLiveStream)
+    if (m_errorOccured || isLiveStream())
         return timeRanges;
 
     MediaTime mediaDuration = durationMediaTime();
@@ -835,8 +876,8 @@
     if (!gst_element_query(m_pipeline.get(), query.get()))
         return timeRanges;
 
-    unsigned numBufferingRanges = gst_query_get_n_buffering_ranges(query.get());
-    for (unsigned index = 0; index < numBufferingRanges; index++) {
+    guint numBufferingRanges = gst_query_get_n_buffering_ranges(query.get());
+    for (guint index = 0; index < numBufferingRanges; index++) {
         gint64 rangeStart = 0, rangeStop = 0;
         if (gst_query_parse_nth_buffering_range(query.get(), index, &rangeStart, &rangeStop)) {
             uint64_t startTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStart, GST_FORMAT_PERCENT_MAX);
@@ -845,7 +886,8 @@
         }
     }
 
-    // Fallback to the more general maxTimeLoaded() if no range has been found.
+    // Fallback to the more general maxTimeLoaded() if no range has
+    // been found.
     if (!timeRanges->length()) {
         MediaTime loaded = maxTimeLoaded();
         if (loaded.isValid() && loaded)
@@ -857,16 +899,16 @@
 
 MediaTime MediaPlayerPrivateGStreamer::maxMediaTimeSeekable() const
 {
-    GST_TRACE_OBJECT(pipeline(), "errorOccured: %s, isLiveStream: %s", boolForPrinting(m_didErrorOccur), boolForPrinting(m_isLiveStream));
-    if (m_didErrorOccur)
+    GST_TRACE_OBJECT(pipeline(), "errorOccured: %s, isLiveStream: %s", boolForPrinting(m_errorOccured), boolForPrinting(isLiveStream()));
+    if (m_errorOccured)
         return MediaTime::zeroTime();
 
-    if (m_isLiveStream)
+    if (isLiveStream())
         return MediaTime::zeroTime();
 
     MediaTime duration = durationMediaTime();
     GST_DEBUG_OBJECT(pipeline(), "maxMediaTimeSeekable, duration: %s", toString(duration).utf8().data());
-    // Infinite duration means live stream.
+    // infinite duration means live stream
     if (duration.isPositiveInfinite())
         return MediaTime::zeroTime();
 
@@ -875,7 +917,7 @@
 
 MediaTime MediaPlayerPrivateGStreamer::maxTimeLoaded() const
 {
-    if (m_didErrorOccur)
+    if (m_errorOccured)
         return MediaTime::zeroTime();
 
     MediaTime loaded = m_maxTimeLoaded;
@@ -887,7 +929,7 @@
 
 bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
 {
-    if (m_didErrorOccur || m_loadingStalled)
+    if (m_errorOccured || m_loadingStalled)
         return false;
 
     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
@@ -910,22 +952,29 @@
 
 unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
 {
-    if (m_didErrorOccur || !m_source || m_isLiveStream)
+    if (m_errorOccured)
         return 0;
 
     if (m_totalBytes)
         return m_totalBytes;
 
+    if (!m_source)
+        return 0;
+
+    if (isLiveStream())
+        return 0;
+
     GstFormat fmt = GST_FORMAT_BYTES;
     gint64 length = 0;
     if (gst_element_query_duration(m_source.get(), fmt, &length)) {
         GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
         m_totalBytes = static_cast<unsigned long long>(length);
-        m_isLiveStream = !length;
+        m_isStreaming = !length;
         return m_totalBytes;
     }
 
-    // Fall back to querying the source pads manually. See also https://bugzilla.gnome.org/show_bug.cgi?id=638749
+    // Fall back to querying the source pads manually.
+    // See also https://bugzilla.gnome.org/show_bug.cgi?id=638749
     GstIterator* iter = gst_element_iterate_src_pads(m_source.get());
     bool done = false;
     while (!done) {
@@ -955,7 +1004,7 @@
 
     GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
     m_totalBytes = static_cast<unsigned long long>(length);
-    m_isLiveStream = !length;
+    m_isStreaming = !length;
     return m_totalBytes;
 }
 
@@ -1080,7 +1129,8 @@
 {
     ASSERT(m_pipeline);
 
-    GstState currentState, pending;
+    GstState currentState;
+    GstState pending;
 
     gst_element_get_state(m_pipeline.get(), &currentState, &pending, 0);
     if (currentState == newState || pending == newState) {
@@ -1097,10 +1147,13 @@
     if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE)
         return false;
 
-    // Create a timer when entering the READY state so that we can free resources if we stay for too long on READY.
-    // Also lets remove the timer if we request a state change for any state other than READY. See also https://bugs.webkit.org/show_bug.cgi?id=117354
+    // Create a timer when entering the READY state so that we can free resources
+    // if we stay for too long on READY.
+    // Also lets remove the timer if we request a state change for any state other than READY.
+    // See also https://bugs.webkit.org/show_bug.cgi?id=117354
     if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
-        // Max interval in seconds to stay in the READY state on manual state change requests.
+        // Max interval in seconds to stay in the READY state on manual
+        // state change requests.
         static const Seconds readyStateTimerDelay { 1_min };
         m_readyTimerHandler.startOneShot(readyStateTimerDelay);
     } else if (newState != GST_STATE_READY)
@@ -1144,10 +1197,7 @@
     setSyncOnClock(audioSink(), sync);
 }
 
-enum MediaType {
-    Video, Audio, Text
-};
-void MediaPlayerPrivateGStreamer::notifyPlayerOf(MediaType mediaType)
+void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
 {
     if (UNLIKELY(!m_pipeline || !m_source))
         return;
@@ -1154,27 +1204,10 @@
 
     ASSERT(m_isLegacyPlaybin || isMediaSource());
 
-    const char *mediaType;
-    const char *sourceType;
-    switch (mediaType) {
-    case MediaType::Video:
-        mediaType = "video";
-        sourceType = "n-video";        
-        break;
-    case MediaType::Audio:
-        mediaType = "audio";
-        sourceType = "n-audio";        
-        break;
-    case MediaType::Text:
-        mediaType = "text";
-        sourceType = "n-text";
-        break;
-    }
-
-    unsigned numTracks = 0;
+    gint numTracks = 0;
     bool useMediaSource = isMediaSource();
     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
-    g_object_get(element, sourceType, &numTracks, nullptr);
+    g_object_get(element, "n-video", &numTracks, nullptr);
 
     GST_INFO_OBJECT(pipeline(), "Media has %d video tracks", numTracks);
 
@@ -1194,7 +1227,7 @@
 
 #if ENABLE(VIDEO_TRACK)
     Vector<String> validVideoStreams;
-    for (unsigned i = 0; i < numTracks; ++i) {
+    for (gint i = 0; i < numTracks; ++i) {
         GRefPtr<GstPad> pad;
         g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
         ASSERT(pad);
@@ -1201,7 +1234,7 @@
 
         String streamId = "V" + String::number(i);
         validVideoStreams.append(streamId);
-        if (i < m_videoTracks.size()) {
+        if (i < static_cast<gint>(m_videoTracks.size())) {
             RefPtr<VideoTrackPrivateGStreamer> existingTrack = m_videoTracks.get(streamId);
             if (existingTrack) {
                 existingTrack->setIndex(i);
@@ -1222,10 +1255,6 @@
     m_player->client().mediaPlayerEngineUpdated(m_player);
 }
 
-void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
-{
-}
-
 void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
 {
     player->m_notifier->notify(MainThreadNotification::VideoCapsChanged, [player] {
@@ -1253,7 +1282,7 @@
 
     ASSERT(m_isLegacyPlaybin || isMediaSource());
 
-    unsigned numTracks = 0;
+    gint numTracks = 0;
     bool useMediaSource = isMediaSource();
     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
     g_object_get(element, "n-audio", &numTracks, nullptr);
@@ -1272,7 +1301,7 @@
 
 #if ENABLE(VIDEO_TRACK)
     Vector<String> validAudioStreams;
-    for (unsigned i = 0; i < numTracks; ++i) {
+    for (gint i = 0; i < numTracks; ++i) {
         GRefPtr<GstPad> pad;
         g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
         ASSERT(pad);
@@ -1279,7 +1308,7 @@
 
         String streamId = "A" + String::number(i);
         validAudioStreams.append(streamId);
-        if (i < m_audioTracks.size()) {
+        if (i < static_cast<gint>(m_audioTracks.size())) {
             RefPtr<AudioTrackPrivateGStreamer> existingTrack = m_audioTracks.get(streamId);
             if (existingTrack) {
                 existingTrack->setIndex(i);
@@ -1315,7 +1344,7 @@
 
     ASSERT(m_isLegacyPlaybin || isMediaSource());
 
-    unsigned numTracks = 0;
+    gint numTracks = 0;
     bool useMediaSource = isMediaSource();
     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
     g_object_get(element, "n-text", &numTracks, nullptr);
@@ -1328,7 +1357,7 @@
     }
 
     Vector<String> validTextStreams;
-    for (unsigned i = 0; i < numTracks; ++i) {
+    for (gint i = 0; i < numTracks; ++i) {
         GRefPtr<GstPad> pad;
         g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
         ASSERT(pad);
@@ -1340,7 +1369,7 @@
         String streamId = "T" + String::number(i);
 
         validTextStreams.append(streamId);
-        if (i < m_textTracks.size()) {
+        if (i < static_cast<gint>(m_textTracks.size())) {
             RefPtr<InbandTextTrackPrivateGStreamer> existingTrack = m_textTracks.get(streamId);
             if (existingTrack) {
                 existingTrack->setIndex(i);
@@ -1393,56 +1422,9 @@
 }
 #endif
 
-MediaTime MediaPlayerPrivateGStreamer::platformDuration() const
-{
-    if (!m_pipeline)
-        return MediaTime::invalidTime();
-
-    GST_TRACE_OBJECT(pipeline(), "errorOccured: %s, pipeline state: %s", boolForPrinting(m_didErrorOccur), gst_element_state_get_name(GST_STATE(m_pipeline.get())));
-    if (m_didErrorOccur)
-        return MediaTime::invalidTime();
-
-    // The duration query would fail on a not-prerolled pipeline.
-    if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
-        return MediaTime::invalidTime();
-
-    int64_t duration = 0;
-    if (!gst_element_query_duration(m_pipeline.get(), GST_FORMAT_TIME, &duration) || !GST_CLOCK_TIME_IS_VALID(duration)) {
-        GST_DEBUG_OBJECT(pipeline(), "Time duration query failed for %s", m_url.string().utf8().data());
-        return MediaTime::positiveInfiniteTime();
-    }
-
-    GST_LOG_OBJECT(pipeline(), "Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
-    return MediaTime(duration, GST_SECOND);
-}
-
-bool MediaPlayerPrivateGStreamer::isMuted() const
-{
-    if (!m_volumeElement)
-        return false;
-
-    gboolean isMuted;
-    g_object_get(m_volumeElement.get(), "mute", &isMuted, nullptr);
-    GST_INFO_OBJECT(pipeline(), "Player is muted: %s", boolForPrinting(!!isMuted));
-    return isMuted;
-}
-
-void MediaPlayerPrivateGStreamer::commitLoad()
-{
-    ASSERT(!m_isDelayingLoad);
-    GST_DEBUG_OBJECT(pipeline(), "Committing load.");
-
-    // GStreamer needs to have the pipeline set to a paused state to
-    // start providing anything useful.
-    changePipelineState(GST_STATE_PAUSED);
-
-    updateDownloadBufferingFlag();
-    updateStates();
-}
-
 void MediaPlayerPrivateGStreamer::fillTimerFired()
 {
-    if (m_didErrorOccur) {
+    if (m_errorOccured) {
         GST_DEBUG_OBJECT(pipeline(), "[Buffering] An error occurred, disabling the fill timer");
         m_fillTimer.stop();
         return;
@@ -1484,7 +1466,7 @@
 {
     GST_WARNING("Loading failed, error: %s", convertEnumerationToString(networkError).utf8().data());
 
-    m_didErrorOccur = true;
+    m_errorOccured = true;
     if (forceNotifications || m_networkState != networkError) {
         m_networkState = networkError;
         m_player->networkStateChanged();
@@ -1527,8 +1509,8 @@
 
 MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const
 {
-    GST_TRACE_OBJECT(pipeline(), "isEndReached: %s, seeking: %s, seekTime: %s", boolForPrinting(m_isEndReached), boolForPrinting(m_isSeeking), m_seekTime.toString().utf8().data());
-    if (m_isEndReached && m_isSeeking)
+    GST_TRACE_OBJECT(pipeline(), "isEndReached: %s, seeking: %s, seekTime: %s", boolForPrinting(m_isEndReached), boolForPrinting(m_seeking), m_seekTime.toString().utf8().data());
+    if (m_isEndReached && m_seeking)
         return m_seekTime;
 
     // This constant should remain lower than HTMLMediaElement's maxTimeupdateEventFrequency.
@@ -1548,11 +1530,10 @@
         gst_query_parse_position(query, 0, &position);
     gst_query_unref(query);
 
-    GstClockTime gstreamerPosition = static_cast<GstClockTime>(position);
-    GST_TRACE_OBJECT(pipeline(), "Position %" GST_TIME_FORMAT ", canFallBackToLastFinishedSeekPosition: %s", GST_TIME_ARGS(gstreamerPosition), boolForPrinting(m_canFallBackToLastFinishedSeekPosition));
+    GST_TRACE_OBJECT(pipeline(), "Position %" GST_TIME_FORMAT ", canFallBackToLastFinishedSeekPosition: %s", GST_TIME_ARGS(position), boolForPrinting(m_canFallBackToLastFinishedSeekPosition));
 
     MediaTime playbackPosition = MediaTime::zeroTime();
-
+    GstClockTime gstreamerPosition = static_cast<GstClockTime>(position);
     if (GST_CLOCK_TIME_IS_VALID(gstreamerPosition))
         playbackPosition = MediaTime(gstreamerPosition, GST_SECOND);
     else if (m_canFallBackToLastFinishedSeekPosition)
@@ -1628,7 +1609,6 @@
             selectedStreams.append(m_currentVideoStreamId);
         break;
     case TrackPrivateBaseGStreamer::TrackType::Unknown:
-        FALLTHROUGH;
     default:
         ASSERT_NOT_REACHED();
     }
@@ -1680,7 +1660,7 @@
             GST_WARNING("Unknown track type found for stream %s", streamId.utf8().data());
     }
 
-    if (oldHasVideo != m_hasVideo || oldHasAudio != m_hasAudio)
+    if ((oldHasVideo != m_hasVideo) || (oldHasAudio != m_hasAudio))
         m_player->characteristicChanged();
 
     if (m_hasVideo)
@@ -1740,9 +1720,7 @@
         return false;
 
     const gchar* contextType;
-    if (!gst_message_parse_context_type(message, &contextType))
-        return false;
-
+    gst_message_parse_context_type(message, &contextType);
     GST_DEBUG_OBJECT(pipeline(), "Handling %s need-context message for %s", contextType, GST_MESSAGE_SRC_NAME(message));
 
     if (!g_strcmp0(contextType, WEBKIT_WEB_SRC_PLAYER_CONTEXT_TYPE_NAME)) {
@@ -1793,7 +1771,7 @@
             gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, preferredKeySystemUuid, nullptr);
             gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
         } else
-            GST_WARNING("CDM instance not initialized");
+            GST_WARNING("CDM instance not initializaed");
 
         return true;
     }
@@ -1803,7 +1781,7 @@
     return false;
 }
 
-// Returns the size of the video.
+// Returns the size of the video
 FloatSize MediaPlayerPrivateGStreamer::naturalSize() const
 {
 #if USE(GSTREAMER_HOLEPUNCH)
@@ -1841,8 +1819,10 @@
     if (!caps)
         return FloatSize();
 
-    // TODO: handle possible clean aperture data. See https://bugzilla.gnome.org/show_bug.cgi?id=596571
-    // TODO: handle possible transformation matrix. See https://bugzilla.gnome.org/show_bug.cgi?id=596326
+    // TODO: handle possible clean aperture data. See
+    // https://bugzilla.gnome.org/show_bug.cgi?id=596571
+    // TODO: handle possible transformation matrix. See
+    // https://bugzilla.gnome.org/show_bug.cgi?id=596326
 
     // Get the video PAR and original size, if this fails the
     // video-sink has likely not yet negotiated its caps.
@@ -1854,7 +1834,7 @@
 
 #if USE(TEXTURE_MAPPER_GL)
     // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
-    if (m_canRenderingBeAccelerated) {
+    if (m_renderingCanBeAccelerated) {
         if (m_videoSourceOrientation.usesWidthAsHeight())
             originalSize = originalSize.transposedSize();
     }
@@ -1873,19 +1853,19 @@
     displayHeight /= displayAspectRatioGCD;
 
     // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
-    uint64_t width = 0, height = 0;
+    guint64 width = 0, height = 0;
     if (!(originalSize.height() % displayHeight)) {
         GST_DEBUG_OBJECT(pipeline(), "Keeping video original height");
         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
-        height = originalSize.height();
+        height = static_cast<guint64>(originalSize.height());
     } else if (!(originalSize.width() % displayWidth)) {
         GST_DEBUG_OBJECT(pipeline(), "Keeping video original width");
         height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
-        width = originalSize.width();
+        width = static_cast<guint64>(originalSize.width());
     } else {
         GST_DEBUG_OBJECT(pipeline(), "Approximating while keeping original video height");
         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
-        height = originalSize.height();
+        height = static_cast<guint64>(originalSize.height());
     }
 
     GST_DEBUG_OBJECT(pipeline(), "Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
@@ -1943,15 +1923,35 @@
     return m_readyState;
 }
 
-void MediaPlayerPrivateGStreamer::setMuted(bool shouldMute)
+void MediaPlayerPrivateGStreamer::sizeChanged()
 {
-    if (!m_volumeElement || shouldMute == isMuted())
+    notImplemented();
+}
+
+void MediaPlayerPrivateGStreamer::setMuted(bool mute)
+{
+    if (!m_volumeElement)
         return;
 
-    GST_INFO_OBJECT(pipeline(), "Muted? %s", boolForPrinting(shouldMute));
-    g_object_set(m_volumeElement.get(), "mute", shouldMute, nullptr);
+    bool currentValue = muted();
+    if (currentValue == mute)
+        return;
+
+    GST_INFO_OBJECT(pipeline(), "Set muted to %s", toString(mute).utf8().data());
+    g_object_set(m_volumeElement.get(), "mute", mute, nullptr);
 }
 
+bool MediaPlayerPrivateGStreamer::muted() const
+{
+    if (!m_volumeElement)
+        return false;
+
+    gboolean muted;
+    g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
+    GST_INFO_OBJECT(pipeline(), "Player is muted: %s", toString(static_cast<bool>(muted)).utf8().data());
+    return muted;
+}
+
 void MediaPlayerPrivateGStreamer::notifyPlayerOfMute()
 {
     if (!m_player || !m_volumeElement)
@@ -1999,7 +1999,7 @@
     GST_LOG_OBJECT(pipeline(), "Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
     switch (GST_MESSAGE_TYPE(message)) {
     case GST_MESSAGE_ERROR:
-        if (m_shouldResetPipeline || !m_missingPluginCallbacks.isEmpty() || m_didErrorOccur)
+        if (m_resetPipeline || !m_missingPluginCallbacks.isEmpty() || m_errorOccured)
             break;
         gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
         GST_ERROR("Error %d: %s (url="" err->code, err->message, m_url.string().utf8().data());
@@ -2014,7 +2014,9 @@
             || g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
             error = MediaPlayer::FormatError;
         else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
-            // Let the mediaPlayerClient handle the stream error, in this case the HTMLMediaElement will emit a stalled event.
+            // Let the mediaPlayerClient handle the stream error, in
+            // this case the HTMLMediaElement will emit a stalled
+            // event.
             GST_ERROR("Decode error, let the Media element emit a stalled event.");
             m_loadingStalled = true;
             break;
@@ -2027,7 +2029,7 @@
         if (attemptNextLocation)
             issueError = !loadNextLocation();
         if (issueError) {
-            m_didErrorOccur = true;
+            m_errorOccured = true;
             if (m_networkState != error) {
                 m_networkState = error;
                 m_player->networkStateChanged();
@@ -2038,12 +2040,12 @@
         didEnd();
         break;
     case GST_MESSAGE_ASYNC_DONE:
-        if (!messageSourceIsPlaybin || m_isDelayingLoad)
+        if (!messageSourceIsPlaybin || m_delayingLoad)
             break;
         asyncStateChangeDone();
         break;
     case GST_MESSAGE_STATE_CHANGED: {
-        if (!messageSourceIsPlaybin || m_isDelayingLoad)
+        if (!messageSourceIsPlaybin || m_delayingLoad)
             break;
         updateStates();
 
@@ -2174,8 +2176,8 @@
                     }
                 }
                 if (!isRangeRequest) {
-                    m_isLiveStream = !contentLength;
-                    GST_INFO_OBJECT(pipeline(), "%s stream detected", m_isLiveStream ? "Live" : "Non-live");
+                    m_isStreaming = !contentLength;
+                    GST_INFO_OBJECT(pipeline(), "%s stream detected", m_isStreaming ? "Live" : "Non-live");
                     updateDownloadBufferingFlag();
                 }
             }
@@ -2283,8 +2285,8 @@
 {
     GST_DEBUG_OBJECT(pipeline(), "[Buffering] mode: %s, status: %f%%", enumToString(GST_TYPE_BUFFERING_MODE, mode).data(), percentage);
 
-    m_didDownloadFinish = percentage == 100;
-    m_isBuffering = !m_didDownloadFinish;
+    m_downloadFinished = percentage == 100;
+    m_buffering = !m_downloadFinished;
 
     switch (mode) {
     case GST_BUFFERING_STREAM: {
@@ -2291,7 +2293,7 @@
         updateMaxTimeLoaded(percentage);
 
         m_bufferingPercentage = percentage;
-        if (m_didDownloadFinish)
+        if (m_downloadFinished)
             updateStates();
 
         break;
@@ -2301,7 +2303,7 @@
 
         // Media is now fully loaded. It will play even if network connection is
         // cut. Buffering is done, remove the fill source from the main loop.
-        if (m_didDownloadFinish)
+        if (m_downloadFinished)
             m_fillTimer.stop();
 
         updateStates();
@@ -2321,7 +2323,7 @@
     if (section->section_type == GST_MPEGTS_SECTION_PMT) {
         const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
         m_metadataTracks.clear();
-        for (unsigned i = 0; i < pmt->streams->len; ++i) {
+        for (guint i = 0; i < pmt->streams->len; ++i) {
             const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
             if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
                 AtomString pid = String::number(stream->pid);
@@ -2340,9 +2342,9 @@
                 // expressed in hexadecimal using uppercase ASCII hex digits.
                 String inbandMetadataTrackDispatchType;
                 appendUnsignedAsHexFixedSize(stream->stream_type, inbandMetadataTrackDispatchType, 2);
-                for (unsigned j = 0; j < stream->descriptors->len; ++j) {
+                for (guint j = 0; j < stream->descriptors->len; ++j) {
                     const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
-                    for (unsigned k = 0; k < descriptor->length; ++k)
+                    for (guint k = 0; k < descriptor->length; ++k)
                         appendByteAsHex(descriptor->data[k], inbandMetadataTrackDispatchType);
                 }
                 track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType);
@@ -2392,12 +2394,10 @@
 
     gint64 start = -1, stop = -1;
     gst_toc_entry_get_start_stop_times(entry, &start, &stop);
-
-    uint32_t truncatedGstSecond = static_cast<uint32_t>(GST_SECOND);
     if (start != -1)
-        cue->setStartTime(MediaTime(static_cast<int64_t>(start), truncatedGstSecond));
+        cue->setStartTime(MediaTime(start, GST_SECOND));
     if (stop != -1)
-        cue->setEndTime(MediaTime(static_cast<int64_t>(stop), truncatedGstSecond));
+        cue->setEndTime(MediaTime(stop, GST_SECOND));
 
     GstTagList* tags = gst_toc_entry_get_tags(entry);
     if (tags) {
@@ -2495,15 +2495,15 @@
 
 void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
 {
-    if (!m_pipeline || m_didErrorOccur)
+    if (!m_pipeline || m_errorOccured)
         return;
 
-    if (m_isSeeking) {
-        if (m_isSeekPending)
+    if (m_seeking) {
+        if (m_seekIsPending)
             updateStates();
         else {
             GST_DEBUG_OBJECT(pipeline(), "[Seek] seeked to %s", toString(m_seekTime).utf8().data());
-            m_isSeeking = false;
+            m_seeking = false;
             m_cachedPosition = MediaTime::invalidTime();
             if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek.isValid()) {
                 seek(m_timeOfOverlappingSeek);
@@ -2523,12 +2523,16 @@
 
 void MediaPlayerPrivateGStreamer::updateStates()
 {
-    if (!m_pipeline || m_didErrorOccur)
+    if (!m_pipeline)
         return;
 
+    if (m_errorOccured)
+        return;
+
     MediaPlayer::NetworkState oldNetworkState = m_networkState;
     MediaPlayer::ReadyState oldReadyState = m_readyState;
-    GstState pending, state;
+    GstState pending;
+    GstState state;
     bool stateReallyChanged = false;
 
     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
@@ -2548,9 +2552,9 @@
         if (m_isEndReached && m_currentState == GST_STATE_READY)
             break;
 
-        m_shouldResetPipeline = m_currentState <= GST_STATE_READY;
+        m_resetPipeline = m_currentState <= GST_STATE_READY;
 
-        bool didBuffering = m_isBuffering;
+        bool didBuffering = m_buffering;
 
         // Update ready and network states.
         switch (m_currentState) {
@@ -2564,17 +2568,17 @@
             break;
         case GST_STATE_PAUSED:
         case GST_STATE_PLAYING:
-            if (m_isBuffering) {
+            if (m_buffering) {
                 if (m_bufferingPercentage == 100) {
                     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Complete.");
-                    m_isBuffering = false;
+                    m_buffering = false;
                     m_readyState = MediaPlayer::HaveEnoughData;
-                    m_networkState = m_didDownloadFinish ? MediaPlayer::Idle : MediaPlayer::Loading;
+                    m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
                 } else {
                     m_readyState = MediaPlayer::HaveCurrentData;
                     m_networkState = MediaPlayer::Loading;
                 }
-            } else if (m_didDownloadFinish) {
+            } else if (m_downloadFinished) {
                 m_readyState = MediaPlayer::HaveEnoughData;
                 m_networkState = MediaPlayer::Loaded;
             } else {
@@ -2590,25 +2594,25 @@
 
         // Sync states where needed.
         if (m_currentState == GST_STATE_PAUSED) {
-            if (!m_areVolumeAndMuteInitialized) {
+            if (!m_volumeAndMuteInitialized) {
                 notifyPlayerOfVolumeChange();
                 notifyPlayerOfMute();
-                m_areVolumeAndMuteInitialized = true;
+                m_volumeAndMuteInitialized = true;
             }
 
-            if (didBuffering && !m_isBuffering && !m_isPaused && m_playbackRate) {
+            if (didBuffering && !m_buffering && !m_paused && m_playbackRate) {
                 GST_DEBUG_OBJECT(pipeline(), "[Buffering] Restarting playback.");
                 changePipelineState(GST_STATE_PLAYING);
             }
         } else if (m_currentState == GST_STATE_PLAYING) {
-            m_isPaused = false;
+            m_paused = false;
 
-            if ((m_isBuffering && m_isLiveStream) || !m_playbackRate) {
+            if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
                 GST_DEBUG_OBJECT(pipeline(), "[Buffering] Pausing stream for buffering.");
                 changePipelineState(GST_STATE_PAUSED);
             }
         } else
-            m_isPaused = true;
+            m_paused = true;
 
         GST_DEBUG_OBJECT(pipeline(), "Old state: %s, new state: %s (requested: %s)", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState), gst_element_state_get_name(m_requestedState));
         if (m_requestedState == GST_STATE_PAUSED && m_currentState == GST_STATE_PAUSED) {
@@ -2633,13 +2637,13 @@
         break;
     case GST_STATE_CHANGE_FAILURE:
         GST_DEBUG_OBJECT(pipeline(), "Failure: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
-        // Change failed.
+        // Change failed
         return;
     case GST_STATE_CHANGE_NO_PREROLL:
         GST_DEBUG_OBJECT(pipeline(), "No preroll: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
 
         // Live pipelines go in PAUSED without prerolling.
-        m_isLiveStream = true;
+        m_isStreaming = true;
         updateDownloadBufferingFlag();
 
         if (m_currentState == GST_STATE_READY)
@@ -2646,11 +2650,11 @@
             m_readyState = MediaPlayer::HaveNothing;
         else if (m_currentState == GST_STATE_PAUSED) {
             m_readyState = MediaPlayer::HaveEnoughData;
-            m_isPaused = true;
+            m_paused = true;
         } else if (m_currentState == GST_STATE_PLAYING)
-            m_isPaused = false;
+            m_paused = false;
 
-        if (!m_isPaused && m_playbackRate)
+        if (!m_paused && m_playbackRate)
             changePipelineState(GST_STATE_PLAYING);
 
         m_networkState = MediaPlayer::Loading;
@@ -2676,11 +2680,11 @@
 
     if (getStateResult == GST_STATE_CHANGE_SUCCESS && m_currentState >= GST_STATE_PAUSED) {
         updatePlaybackRate();
-        if (m_isSeekPending) {
+        if (m_seekIsPending) {
             GST_DEBUG_OBJECT(pipeline(), "[Seek] committing pending seek to %s", toString(m_seekTime).utf8().data());
-            m_isSeekPending = false;
-            m_isSeeking = doSeek(m_seekTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
-            if (!m_isSeeking) {
+            m_seekIsPending = false;
+            m_seeking = doSeek(m_seekTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
+            if (!m_seeking) {
                 m_cachedPosition = MediaTime::invalidTime();
                 GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(m_seekTime).utf8().data());
             }
@@ -2714,7 +2718,7 @@
         return false;
 
     const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
-    const char* newLocation = nullptr;
+    const gchar* newLocation = nullptr;
 
     if (!locations) {
         // Fallback on new-location string.
@@ -2771,7 +2775,7 @@
             m_player->readyStateChanged();
 
             // Reset pipeline state.
-            m_shouldResetPipeline = true;
+            m_resetPipeline = true;
 
             GstState state;
             gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
@@ -2797,7 +2801,7 @@
     // position is not always reported as 0 for instance.
     m_cachedPosition = MediaTime::invalidTime();
     MediaTime now = currentMediaTime();
-    if (now > MediaTime::zeroTime() && !m_isSeeking) {
+    if (now > MediaTime::zeroTime() && !m_seeking) {
         m_cachedDuration = now;
         m_player->durationChanged();
     }
@@ -2805,9 +2809,9 @@
     m_isEndReached = true;
 
     if (!m_player->client().mediaPlayerIsLooping()) {
-        m_isPaused = true;
+        m_paused = true;
         changePipelineState(GST_STATE_READY);
-        m_didDownloadFinish = false;
+        m_downloadFinished = false;
     }
     timeChanged();
 }
@@ -2860,12 +2864,12 @@
     unsigned flagDownload = getGstPlayFlag("download");
 
     // We don't want to stop downloading if we already started it.
-    if (flags & flagDownload && m_readyState > MediaPlayer::HaveNothing && !m_shouldResetPipeline) {
+    if (flags & flagDownload && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline) {
         GST_DEBUG_OBJECT(pipeline(), "Download already started, not starting again");
         return;
     }
 
-    bool shouldDownload = !m_isLiveStream && m_preload == MediaPlayer::Auto;
+    bool shouldDownload = !isLiveStream() && m_preload == MediaPlayer::Auto;
     if (shouldDownload) {
         GST_INFO_OBJECT(pipeline(), "Enabling on-disk buffering");
         g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
@@ -2879,7 +2883,7 @@
 
 void MediaPlayerPrivateGStreamer::createGSTPlayBin(const URL& url, const String& pipelineName)
 {
-    const char* playbinName = "playbin";
+    const gchar* playbinName = "playbin";
 
     // MSE doesn't support playbin3. Mediastream requires playbin3. Regular
     // playback can use playbin3 on-demand with the WEBKIT_GST_USE_PLAYBIN3
@@ -2902,6 +2906,8 @@
 
     m_isLegacyPlaybin = !g_strcmp0(playbinName, "playbin");
 
+    // gst_element_factory_make() returns a floating reference so
+    // we should not adopt.
     static Atomic<uint32_t> pipelineId;
     setPipeline(gst_element_factory_make(playbinName,
         (pipelineName.isEmpty() ? makeString("media-player-", pipelineId.exchangeAdd(1)) : pipelineName).utf8().data()));
@@ -2967,7 +2973,7 @@
 
     configurePlaySink();
 
-    if (m_shouldPreservePitch) {
+    if (m_preservesPitch) {
         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
 
         if (!scale)
@@ -2976,7 +2982,7 @@
             g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
     }
 
-    if (!m_canRenderingBeAccelerated) {
+    if (!m_renderingCanBeAccelerated) {
         // If not using accelerated compositing, let GStreamer handle
         // the image-orientation tag.
         GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
@@ -3001,7 +3007,7 @@
 
 bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
 {
-    if (m_isLiveStream)
+    if (isLiveStream())
         return false;
 
     if (m_url.isLocalFile())
@@ -3021,7 +3027,7 @@
 
 void MediaPlayerPrivateGStreamer::acceleratedRenderingStateChanged()
 {
-    m_canRenderingBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled();
+    m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled();
 }
 
 #if USE(TEXTURE_MAPPER_GL)
@@ -3069,7 +3075,7 @@
             if (!proxy.isActive())
                 return;
 
-            std::unique_ptr<GstVideoFrameHolder> frameHolder = makeUnique<GstVideoFrameHolder>(m_sample.get(), m_videoDecoderPlatform, m_textureMapperFlags, !m_isUsingFallbackVideoSink);
+            std::unique_ptr<GstVideoFrameHolder> frameHolder = makeUnique<GstVideoFrameHolder>(m_sample.get(), m_videoDecoderPlatform, m_textureMapperFlags, !m_usingFallbackVideoSink);
 
             std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer;
             if (frameHolder->hasMappedTextures()) {
@@ -3111,14 +3117,14 @@
 
 void MediaPlayerPrivateGStreamer::triggerRepaint(GstSample* sample)
 {
-    bool shouldTriggerResize;
+    bool triggerResize;
     {
         auto sampleLocker = holdLock(m_sampleMutex);
-        shouldTriggerResize = !m_sample;
+        triggerResize = !m_sample;
         m_sample = sample;
     }
 
-    if (shouldTriggerResize) {
+    if (triggerResize) {
         GST_DEBUG_OBJECT(pipeline(), "First sample reached the sink, triggering video dimensions update");
         m_notifier->notify(MainThreadNotification::SizeChanged, [this] {
             m_player->sizeChanged();
@@ -3125,9 +3131,9 @@
         });
     }
 
-    if (!m_canRenderingBeAccelerated) {
+    if (!m_renderingCanBeAccelerated) {
         LockHolder locker(m_drawMutex);
-        if (m_isBeingDestroyed)
+        if (m_destroying)
             return;
         m_drawTimer.startOneShot(0_s);
         m_drawCondition.wait(m_drawMutex);
@@ -3135,7 +3141,7 @@
     }
 
 #if USE(TEXTURE_MAPPER_GL)
-    if (m_isUsingFallbackVideoSink) {
+    if (m_usingFallbackVideoSink) {
         LockHolder lock(m_drawMutex);
         auto proxyOperation =
             [this](TextureMapperPlatformLayerProxy& proxy)
@@ -3171,10 +3177,10 @@
     //
     // This function is also used when destroying the player (destroying parameter is true), to release the gstreamer thread from
     // m_drawCondition and to ensure that new triggerRepaint calls won't wait on m_drawCondition.
-    if (!m_canRenderingBeAccelerated) {
+    if (!m_renderingCanBeAccelerated) {
         LockHolder locker(m_drawMutex);
         m_drawTimer.stop();
-        m_isBeingDestroyed = destroying;
+        m_destroying = destroying;
         m_drawCondition.notifyOne();
     }
 }
@@ -3256,9 +3262,9 @@
             m_colorConvertInputCaps = caps;
             m_colorConvertOutputCaps = adoptGRef(gst_caps_copy(caps));
 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
-            const char* formatString = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? "RGBA" : "BGRx";
+            const gchar* formatString = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? "RGBA" : "BGRx";
 #else
-            const char* formatString = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? "RGBA" : "RGBx";
+            const gchar* formatString = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? "RGBA" : "RGBx";
 #endif
             gst_caps_set_simple(m_colorConvertOutputCaps.get(), "format", G_TYPE_STRING, formatString, nullptr);
             if (!gst_gl_color_convert_set_caps(m_colorConvert.get(), caps, m_colorConvertOutputCaps.get()))
@@ -3279,7 +3285,7 @@
     if (!gstImage)
         return;
 
-    context.drawImage(gstImage->image(), rect, gstImage->rect(), { CompositeCopy, m_canRenderingBeAccelerated ? m_videoSourceOrientation : ImageOrientation() });
+    context.drawImage(gstImage->image(), rect, gstImage->rect(), { CompositeCopy, m_renderingCanBeAccelerated ? m_videoSourceOrientation : ImageOrientation() });
 }
 
 #if USE(GSTREAMER_GL)
@@ -3287,7 +3293,7 @@
 {
     UNUSED_PARAM(context);
 
-    if (m_isUsingFallbackVideoSink)
+    if (m_usingFallbackVideoSink)
         return false;
 
     if (premultiplyAlpha)
@@ -3319,7 +3325,7 @@
 NativeImagePtr MediaPlayerPrivateGStreamer::nativeImageForCurrentTime()
 {
 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
-    if (m_isUsingFallbackVideoSink)
+    if (m_usingFallbackVideoSink)
         return nullptr;
 
     auto sampleLocker = holdLock(m_sampleMutex);
@@ -3400,7 +3406,7 @@
     if (m_readyState == MediaPlayer::HaveNothing)
         return MediaPlayer::Unknown;
 
-    if (m_isLiveStream)
+    if (isLiveStream())
         return MediaPlayer::LiveStream;
 
     return MediaPlayer::Download;
@@ -3478,12 +3484,12 @@
 #endif
 
 #if USE(GSTREAMER_GL)
-    if (m_canRenderingBeAccelerated)
+    if (m_renderingCanBeAccelerated)
         m_videoSink = createVideoSinkGL();
 #endif
 
     if (!m_videoSink) {
-        m_isUsingFallbackVideoSink = true;
+        m_usingFallbackVideoSink = true;
         m_videoSink = webkitVideoSinkNew();
         g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
         g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
@@ -3537,7 +3543,7 @@
 
 unsigned MediaPlayerPrivateGStreamer::decodedFrameCount() const
 {
-    uint64_t decodedFrames = 0;
+    guint64 decodedFrames = 0;
     if (m_fpsSink)
         g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr);
     return static_cast<unsigned>(decodedFrames);
@@ -3545,7 +3551,7 @@
 
 unsigned MediaPlayerPrivateGStreamer::droppedFrameCount() const
 {
-    uint64_t framesDropped = 0;
+    guint64 framesDropped = 0;
     if (m_fpsSink)
         g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr);
     return static_cast<unsigned>(framesDropped);
@@ -3643,8 +3649,8 @@
 
 void MediaPlayerPrivateGStreamer::attemptToDecryptWithLocalInstance()
 {
-    bool wasEventHandled = gst_element_send_event(pipeline(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB, gst_structure_new_empty("attempt-to-decrypt")));
-    GST_DEBUG("attempting to decrypt, event handled %s", boolForPrinting(wasEventHandled));
+    bool eventHandled = gst_element_send_event(pipeline(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB, gst_structure_new_empty("attempt-to-decrypt")));
+    GST_DEBUG("attempting to decrypt, event handled %s", boolForPrinting(eventHandled));
 }
 
 void MediaPlayerPrivateGStreamer::handleProtectionEvent(GstEvent* event)
@@ -3663,15 +3669,15 @@
     initializationDataEncountered({eventKeySystemUUID, initData});
 }
 
-void MediaPlayerPrivateGStreamer::setWaitingForKey(bool isWaitingForKey)
+void MediaPlayerPrivateGStreamer::setWaitingForKey(bool waitingForKey)
 {
     // We bail out if values did not change or if we are requested to not wait anymore but there are still waiting decryptors.
-    GST_TRACE("isWaitingForKey %s, m_isWaitingForKey %s", boolForPrinting(isWaitingForKey), boolForPrinting(m_isWaitingForKey));
-    if (isWaitingForKey == m_isWaitingForKey || (!isWaitingForKey && this->waitingForKey()))
+    GST_TRACE("waitingForKey %s, m_waitingForKey %s", boolForPrinting(waitingForKey), boolForPrinting(m_waitingForKey));
+    if (waitingForKey == m_waitingForKey || (!waitingForKey && this->waitingForKey()))
         return;
 
-    m_isWaitingForKey = isWaitingForKey;
-    GST_DEBUG("waiting for key changed %s", boolForPrinting(m_isWaitingForKey));
+    m_waitingForKey = waitingForKey;
+    GST_DEBUG("waiting for key changed %s", boolForPrinting(m_waitingForKey));
     m_player->waitingForKeyChanged();
 }
 

Modified: trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h (252937 => 252938)


--- trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h	2019-11-29 10:24:54 UTC (rev 252937)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h	2019-11-29 11:11:00 UTC (rev 252938)
@@ -120,14 +120,17 @@
 {
     WTF_MAKE_FAST_ALLOCATED;
 public:
+    static void initializeDebugCategory();
+
     MediaPlayerPrivateGStreamer(MediaPlayer*);
     virtual ~MediaPlayerPrivateGStreamer();
 
+    static bool isAvailable();
     static void registerMediaEngine(MediaEngineRegistrar);
-    static MediaPlayer::SupportsType extendedSupportsType(const MediaEngineSupportParameters&, MediaPlayer::SupportsType);
 
-    bool hasVideo() const final { return m_hasVideo; }
-    bool hasAudio() const final { return m_hasAudio; }
+    bool hasVideo() const override { return m_hasVideo; }
+    bool hasAudio() const override { return m_hasAudio; }
+
     void load(const String &url) override;
 #if ENABLE(MEDIA_SOURCE)
     void load(const String& url, MediaSourcePrivateClient*) override;
@@ -135,58 +138,82 @@
 #if ENABLE(MEDIA_STREAM)
     void load(MediaStreamPrivate&) override;
 #endif
-    void cancelLoad() final;
-    void prepareToPlay() final;
-    void play() final;
+    void commitLoad();
+    void cancelLoad() override;
+
+    void prepareToPlay() override;
+    void play() override;
     void pause() override;
-    bool paused() const final;
-    bool seeking() const override { return m_isSeeking; }
+    bool paused() const override;
+    bool seeking() const override { return m_seeking; }
     void seek(const MediaTime&) override;
+
+    MediaTime platformDuration() const;
+
     void setRate(float) override;
-    double rate() const final;
-    void setPreservesPitch(bool) final; 
-    void setPreload(MediaPlayer::Preload) final;
+    double rate() const override;
+    void setPreservesPitch(bool) override;
+    void setPreload(MediaPlayer::Preload) override;
+
     FloatSize naturalSize() const final;
-    void setVolume(float) final;
-    float volume() const final;
-    void setMuted(bool) final;
-    MediaPlayer::NetworkState networkState() const final;
-    MediaPlayer::ReadyState readyState() const final;
-    void setVisible(bool) final { }
-    void setSize(const IntSize&) final;
+
+    void setVolume(float) override;
+    float volume() const override;
+
+    void setMuted(bool) override;
+    bool muted() const;
+
+    MediaPlayer::NetworkState networkState() const override;
+    MediaPlayer::ReadyState readyState() const override;
+
+    void setVisible(bool) override { }
+    void setSize(const IntSize&) override;
+    void sizeChanged();
+
     // Prefer MediaTime based methods over float based.
-    float duration() const final { return durationMediaTime().toFloat(); }
-    double durationDouble() const final { return durationMediaTime().toDouble(); }
-    MediaTime durationMediaTime() const;
-    float currentTime() const final { return currentMediaTime().toFloat(); }
-    double currentTimeDouble() const final { return currentMediaTime().toDouble(); }
+
+    float duration() const override { return durationMediaTime().toFloat(); }
+    double durationDouble() const override { return durationMediaTime().toDouble(); }
+    MediaTime durationMediaTime() const override;
+    float currentTime() const override { return currentMediaTime().toFloat(); }
+    double currentTimeDouble() const override { return currentMediaTime().toDouble(); }
     MediaTime currentMediaTime() const override;
     std::unique_ptr<PlatformTimeRanges> buffered() const override;
-    void seek(float time) final { seek(MediaTime::createWithFloat(time)); }
-    void seekDouble(double time) final { seek(MediaTime::createWithDouble(time)); }
-    float maxTimeSeekable() const final { return maxMediaTimeSeekable().toFloat(); }
+    void seek(float time) override { seek(MediaTime::createWithFloat(time)); }
+    void seekDouble(double time) override { seek(MediaTime::createWithDouble(time)); }
+
+    float maxTimeSeekable() const override { return maxMediaTimeSeekable().toFloat(); }
     MediaTime maxMediaTimeSeekable() const override;
-    double minTimeSeekable() const final { return minMediaTimeSeekable().toFloat(); }
-    MediaTime minMediaTimeSeekable() const final { return MediaTime::zeroTime(); }
-    bool didLoadingProgress() const final;
-    unsigned long long totalBytes() const final;
-    bool hasSingleSecurityOrigin() const final;
-    Optional<bool> wouldTaintOrigin(const SecurityOrigin&) const final;
-    void simulateAudioInterruption() final;
+    double minTimeSeekable() const override { return minMediaTimeSeekable().toFloat(); }
+    MediaTime minMediaTimeSeekable() const override { return MediaTime::zeroTime(); }
+
+    bool didLoadingProgress() const override;
+    unsigned long long totalBytes() const override;
+
+    bool hasSingleSecurityOrigin() const override;
+    Optional<bool> wouldTaintOrigin(const SecurityOrigin&) const override;
+
+    void simulateAudioInterruption() override;
+
 #if ENABLE(WEB_AUDIO)
-    AudioSourceProvider* audioSourceProvider() final;
+    AudioSourceProvider* audioSourceProvider() override;
 #endif
-    void paint(GraphicsContext&, const FloatRect&) final;
-    bool supportsFullscreen() const final;
-    MediaPlayer::MovieLoadType movieLoadType() const final;
 
-    unsigned decodedFrameCount() const final;
-    unsigned droppedFrameCount() const final;
-    unsigned audioDecodedByteCount() const final;
-    unsigned videoDecodedByteCount() const final;
+    void paint(GraphicsContext&, const FloatRect&) override;
 
-    void acceleratedRenderingStateChanged() final;
+    bool supportsFullscreen() const override;
 
+    MediaPlayer::MovieLoadType movieLoadType() const override;
+
+    MediaPlayer* mediaPlayer() const { return m_player; }
+
+    unsigned decodedFrameCount() const override;
+    unsigned droppedFrameCount() const override;
+    unsigned audioDecodedByteCount() const override;
+    unsigned videoDecodedByteCount() const override;
+
+    void acceleratedRenderingStateChanged() override;
+
 #if USE(TEXTURE_MAPPER_GL)
     PlatformLayer* platformLayer() const override;
 #if PLATFORM(WIN_CAIRO)
@@ -198,23 +225,29 @@
 #endif
 
 #if ENABLE(ENCRYPTED_MEDIA)
-    void cdmInstanceAttached(CDMInstance&) final;
-    void cdmInstanceDetached(CDMInstance&) final;
+    void cdmInstanceAttached(CDMInstance&) override;
+    void cdmInstanceDetached(CDMInstance&) override;
+    void handleProtectionEvent(GstEvent*);
+    virtual void attemptToDecryptWithLocalInstance();
     void attemptToDecryptWithInstance(CDMInstance&) final;
-    bool waitingForKey() const final;
-
-    void handleProtectionEvent(GstEvent*);
+    void initializationDataEncountered(InitData&&);
+    void setWaitingForKey(bool);
+    bool waitingForKey() const override;
 #endif
 
+    static bool supportsKeySystem(const String& keySystem, const String& mimeType);
+    static MediaPlayer::SupportsType extendedSupportsType(const MediaEngineSupportParameters&, MediaPlayer::SupportsType);
+
 #if USE(GSTREAMER_GL)
     bool copyVideoTextureToPlatformTexture(GraphicsContext3D*, Platform3DObject, GC3Denum, GC3Dint, GC3Denum, GC3Denum, GC3Denum, bool, bool) override;
     NativeImagePtr nativeImageForCurrentTime() override;
 #endif
 
+    void setVideoSourceOrientation(ImageOrientation);
+    GstElement* pipeline() const { return m_pipeline.get(); }
     void enableTrack(TrackPrivateBaseGStreamer::TrackType, unsigned index);
 
     // Append pipeline interface
-    // FIXME: Use the client interface pattern, AppendPipeline does not need the full interface to this class just for these two functions.
     bool handleSyncMessage(GstMessage*);
     void handleMessage(GstMessage*);
 
@@ -237,10 +270,8 @@
         StreamCollectionChanged = 1 << 7
     };
 
-    static bool isAvailable();
-#if ENABLE(ENCRYPTED_MEDIA)
-    static bool supportsKeySystem(const String& keySystem, const String& mimeType);
-#endif
+    virtual bool isLiveStream() const { return m_isStreaming; }
+    MediaTime maxTimeLoaded() const;
 
     virtual void durationChanged();
     virtual void sourceSetup(GstElement*);
@@ -247,6 +278,7 @@
     virtual void configurePlaySink() { }
     virtual bool changePipelineState(GstState);
 
+
 #if USE(GSTREAMER_HOLEPUNCH)
     GstElement* createHolePunchVideoSink();
     void pushNextHolePunchBuffer();
@@ -260,10 +292,10 @@
 #if USE(TEXTURE_MAPPER_GL)
     void pushTextureToCompositor();
 #if USE(NICOSIA)
-    void swapBuffersIfNeeded() final;
+    void swapBuffersIfNeeded() override;
 #else
-    RefPtr<TextureMapperPlatformLayerProxy> proxy() const final;
-    void swapBuffersIfNeeded() final;
+    RefPtr<TextureMapperPlatformLayerProxy> proxy() const override;
+    void swapBuffersIfNeeded() override;
 #endif
 #endif
 
@@ -285,6 +317,9 @@
     static void volumeChangedCallback(MediaPlayerPrivateGStreamer*);
     static void muteChangedCallback(MediaPlayerPrivateGStreamer*);
 
+    // FIXME: Where is this used?
+    void handlePluginInstallerResult(GstInstallPluginsReturn);
+
     void readyTimerFired();
 
     void notifyPlayerOfVideo();
@@ -323,22 +358,22 @@
     mutable MediaTime m_cachedPosition;
     mutable MediaTime m_cachedDuration;
     bool m_canFallBackToLastFinishedSeekPosition { false };
-    bool m_isChangingRate { false };
-    bool m_didDownloadFinish { false };
-    bool m_didErrorOccur { false };
+    bool m_changingRate { false };
+    bool m_downloadFinished { false };
+    bool m_errorOccured { false };
     mutable bool m_isEndReached { false };
-    mutable bool m_isLiveStream { false };
-    bool m_isPaused { true };
+    mutable bool m_isStreaming { false };
+    bool m_paused { true };
     float m_playbackRate { 1 };
     GstState m_currentState;
     GstState m_oldState;
     GstState m_requestedState { GST_STATE_VOID_PENDING };
-    bool m_shouldResetPipeline { false };
-    bool m_isSeeking { false };
-    bool m_isSeekPending { false };
+    bool m_resetPipeline { false };
+    bool m_seeking { false };
+    bool m_seekIsPending { false };
     MediaTime m_seekTime;
     GRefPtr<GstElement> m_source { nullptr };
-    bool m_areVolumeAndMuteInitialized { false };
+    bool m_volumeAndMuteInitialized { false };
 
 #if USE(TEXTURE_MAPPER_GL)
     TextureMapperGL::Flags m_textureMapperFlags;
@@ -356,10 +391,10 @@
     GRefPtr<GstSample> m_sample;
 
     mutable FloatSize m_videoSize;
-    bool m_isUsingFallbackVideoSink { false };
-    bool m_canRenderingBeAccelerated { false };
+    bool m_usingFallbackVideoSink { false };
+    bool m_renderingCanBeAccelerated { false };
 
-    bool m_isBeingDestroyed { false };
+    bool m_destroying { false };
 
 #if USE(GSTREAMER_GL)
     std::unique_ptr<VideoTextureCopierGStreamer> m_videoTextureCopier;
@@ -377,21 +412,17 @@
     Lock m_protectionMutex; // Guards access to m_handledProtectionEvents.
     HashSet<uint32_t> m_handledProtectionEvents;
 
-    bool m_isWaitingForKey { false };
+    bool m_waitingForKey { false };
 #endif
 
     Optional<GstVideoDecoderPlatform> m_videoDecoderPlatform;
 
 private:
-    MediaTime maxTimeLoaded() const;
-    GstElement* pipeline() const { return m_pipeline.get(); }
-    void setVideoSourceOrientation(ImageOrientation);
-    MediaTime platformDuration() const;
-    bool isMuted() const;
-    void commitLoad();
     void fillTimerFired();
+
     void didEnd();
 
+
     GstElement* createVideoSink();
     GstElement* createAudioSink();
     GstElement* audioSink() const;
@@ -444,12 +475,6 @@
     void updateTracks();
     void clearTracks();
 
-#if ENABLE(ENCRYPTED_MEDIA)
-    void attemptToDecryptWithLocalInstance();
-    void initializationDataEncountered(InitData&&);
-    void setWaitingForKey(bool);
-#endif
-
 #if ENABLE(VIDEO_TRACK)
     GRefPtr<GstElement> m_textAppSink;
     GRefPtr<GstPad> m_textAppSinkPad;
@@ -456,7 +481,7 @@
 #endif
     GstStructure* m_mediaLocations { nullptr };
     int m_mediaLocationCurrentIndex { 0 };
-    bool m_isPlaybackRatePaused { false };
+    bool m_playbackRatePause { false };
     MediaTime m_timeOfOverlappingSeek;
     float m_lastPlaybackRate { 1 };
     Timer m_fillTimer;
@@ -463,7 +488,7 @@
     MediaTime m_maxTimeLoaded;
     bool m_loadingStalled { false };
     MediaPlayer::Preload m_preload;
-    bool m_isDelayingLoad { false };
+    bool m_delayingLoad { false };
     mutable MediaTime m_maxTimeLoadedAtLastDidLoadingProgress;
     bool m_hasVideo { false };
     bool m_hasAudio { false };
@@ -478,11 +503,11 @@
     RefPtr<TextureMapperPlatformLayerProxy> m_platformLayerProxy;
 #endif
 #endif
-    bool m_isBuffering { false };
+    bool m_buffering { false };
     int m_bufferingPercentage { 0 };
     mutable unsigned long long m_totalBytes { 0 };
     URL m_url;
-    bool m_shouldPreservePitch { false };
+    bool m_preservesPitch { false };
     mutable Optional<Seconds> m_lastQueryTime;
     bool m_isLegacyPlaybin;
     GRefPtr<GstStreamCollection> m_streamCollection;

Modified: trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp (252937 => 252938)


--- trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp	2019-11-29 10:24:54 UTC (rev 252937)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp	2019-11-29 11:11:00 UTC (rev 252938)
@@ -132,14 +132,13 @@
 
 void MediaPlayerPrivateGStreamerMSE::pause()
 {
-    // FIXME: Should not need direct access to this member. This override is probably not needed.
-    m_isPaused = true;
+    m_paused = true;
     MediaPlayerPrivateGStreamer::pause();
 }
 
 MediaTime MediaPlayerPrivateGStreamerMSE::durationMediaTime() const
 {
-    if (UNLIKELY(!m_pipeline || m_didErrorOccur))
+    if (UNLIKELY(!m_pipeline || m_errorOccured))
         return MediaTime();
 
     return m_mediaTimeDuration;
@@ -147,7 +146,7 @@
 
 void MediaPlayerPrivateGStreamerMSE::seek(const MediaTime& time)
 {
-    if (UNLIKELY(!m_pipeline || m_didErrorOccur))
+    if (UNLIKELY(!m_pipeline || m_errorOccured))
         return;
 
     GST_INFO("[Seek] seek attempt to %s secs", toString(time).utf8().data());
@@ -155,15 +154,15 @@
     // Avoid useless seeking.
     MediaTime current = currentMediaTime();
     if (time == current) {
-        if (!m_isSeeking)
+        if (!m_seeking)
             timeChanged();
         return;
     }
 
-    if (m_isLiveStream)
+    if (isLiveStream())
         return;
 
-    if (m_isSeeking && m_isSeekPending) {
+    if (m_seeking && m_seekIsPending) {
         m_seekTime = time;
         return;
     }
@@ -180,7 +179,7 @@
     }
 
     m_isEndReached = false;
-    GST_DEBUG("m_isSeeking=%s, m_seekTime=%s", boolForPrinting(m_isSeeking), toString(m_seekTime).utf8().data());
+    GST_DEBUG("m_seeking=%s, m_seekTime=%s", boolForPrinting(m_seeking), toString(m_seekTime).utf8().data());
 }
 
 void MediaPlayerPrivateGStreamerMSE::configurePlaySink()
@@ -233,7 +232,7 @@
     GstSeekFlags seekType = static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE);
 
     // Always move to seeking state to report correct 'currentTime' while pending for actual seek to complete.
-    m_isSeeking = true;
+    m_seeking = true;
 
     // Check if playback pipeline is ready for seek.
     GstState state, newState;
@@ -241,7 +240,7 @@
     if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
         GST_DEBUG("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
         webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
-        m_isSeeking = false;
+        m_seeking = false;
         return false;
     }
     if ((getStateResult == GST_STATE_CHANGE_ASYNC
@@ -263,19 +262,19 @@
 
         GST_DEBUG("[Seek] Delaying the seek: %s", reason.data());
 
-        m_isSeekPending = true;
+        m_seekIsPending = true;
 
         if (m_isEndReached) {
             GST_DEBUG("[Seek] reset pipeline");
-            m_shouldResetPipeline = true;
-            m_isSeeking = false;
+            m_resetPipeline = true;
+            m_seeking = false;
             if (!changePipelineState(GST_STATE_PAUSED))
                 loadingFailed(MediaPlayer::Empty);
             else
-                m_isSeeking = true;
+                m_seeking = true;
         }
 
-        return m_isSeeking;
+        return m_seeking;
     }
 
     // Stop accepting new samples until actual seek is finished.
@@ -301,7 +300,7 @@
         if (setStateResult == GST_STATE_CHANGE_FAILURE) {
             GST_DEBUG("[Seek] Cannot seek, failed to pause playback pipeline.");
             webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
-            m_isSeeking = false;
+            m_seeking = false;
             return false;
         }
         m_readyState = MediaPlayer::HaveMetadata;
@@ -315,7 +314,7 @@
         m_mediaSource->monitorSourceBuffers();
         ASSERT(m_mseSeekCompleted);
         // Note: seekCompleted will recursively call us.
-        return m_isSeeking;
+        return m_seeking;
     }
 
     GST_DEBUG("We can seek now");
@@ -338,7 +337,7 @@
     m_gstSeekCompleted = false;
     if (!gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType, GST_SEEK_TYPE_SET, toGstClockTime(startTime), GST_SEEK_TYPE_SET, toGstClockTime(endTime))) {
         webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
-        m_isSeeking = false;
+        m_seeking = false;
         m_gstSeekCompleted = true;
         GST_DEBUG("doSeek(): gst_element_seek() failed, returning false");
         return false;
@@ -351,7 +350,7 @@
 
 void MediaPlayerPrivateGStreamerMSE::maybeFinishSeek()
 {
-    if (!m_isSeeking || !m_mseSeekCompleted || !m_gstSeekCompleted)
+    if (!m_seeking || !m_mseSeekCompleted || !m_gstSeekCompleted)
         return;
 
     GstState state, newState;
@@ -363,9 +362,9 @@
         return;
     }
 
-    if (m_isSeekPending) {
+    if (m_seekIsPending) {
         GST_DEBUG("[Seek] Committing pending seek to %s", toString(m_seekTime).utf8().data());
-        m_isSeekPending = false;
+        m_seekIsPending = false;
         if (!doSeek()) {
             GST_WARNING("[Seek] Seeking to %s failed", toString(m_seekTime).utf8().data());
             m_cachedPosition = MediaTime::invalidTime();
@@ -376,7 +375,7 @@
     GST_DEBUG("[Seek] Seeked to %s", toString(m_seekTime).utf8().data());
 
     webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
-    m_isSeeking = false;
+    m_seeking = false;
     m_cachedPosition = MediaTime::invalidTime();
     // The pipeline can still have a pending state. In this case a position query will fail.
     // Right now we can use m_seekTime as a fallback.
@@ -391,7 +390,7 @@
 
 bool MediaPlayerPrivateGStreamerMSE::seeking() const
 {
-    return m_isSeeking;
+    return m_seeking;
 }
 
 // FIXME: MediaPlayerPrivateGStreamer manages the ReadyState on its own. We shouldn't change it manually.
@@ -429,7 +428,7 @@
 
 void MediaPlayerPrivateGStreamerMSE::waitForSeekCompleted()
 {
-    if (!m_isSeeking)
+    if (!m_seeking)
         return;
 
     GST_DEBUG("Waiting for MSE seek completed");
@@ -480,7 +479,7 @@
 
 void MediaPlayerPrivateGStreamerMSE::updateStates()
 {
-    if (UNLIKELY(!m_pipeline || m_didErrorOccur))
+    if (UNLIKELY(!m_pipeline || m_errorOccured))
         return;
 
     MediaPlayer::NetworkState oldNetworkState = m_networkState;
@@ -499,8 +498,8 @@
         if (m_isEndReached && state == GST_STATE_READY)
             break;
 
-        m_shouldResetPipeline = (state <= GST_STATE_READY);
-        if (m_shouldResetPipeline)
+        m_resetPipeline = (state <= GST_STATE_READY);
+        if (m_resetPipeline)
             m_mediaTimeDuration = MediaTime::zeroTime();
 
         // Update ready and network states.
@@ -539,18 +538,18 @@
 
         // Sync states where needed.
         if (state == GST_STATE_PAUSED) {
-            if (!m_areVolumeAndMuteInitialized) {
+            if (!m_volumeAndMuteInitialized) {
                 notifyPlayerOfVolumeChange();
                 notifyPlayerOfMute();
-                m_areVolumeAndMuteInitialized = true;
+                m_volumeAndMuteInitialized = true;
             }
 
-            if (!seeking() && !m_isPaused && m_playbackRate) {
+            if (!seeking() && !m_paused && m_playbackRate) {
                 GST_DEBUG("[Buffering] Restarting playback.");
                 changePipelineState(GST_STATE_PLAYING);
             }
         } else if (state == GST_STATE_PLAYING) {
-            m_isPaused = false;
+            m_paused = false;
 
             if (!m_playbackRate) {
                 GST_DEBUG("[Buffering] Pausing stream for buffering.");
@@ -557,7 +556,7 @@
                 changePipelineState(GST_STATE_PAUSED);
             }
         } else
-            m_isPaused = true;
+            m_paused = true;
 
         if (m_requestedState == GST_STATE_PAUSED && state == GST_STATE_PAUSED) {
             shouldUpdatePlaybackState = true;
@@ -578,7 +577,7 @@
         GST_DEBUG("No preroll: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
 
         // Live pipelines go in PAUSED without prerolling.
-        m_isLiveStream = true;
+        m_isStreaming = true;
 
         if (state == GST_STATE_READY) {
             m_readyState = MediaPlayer::HaveNothing;
@@ -586,11 +585,11 @@
         } else if (state == GST_STATE_PAUSED) {
             m_readyState = MediaPlayer::HaveEnoughData;
             GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
-            m_isPaused = true;
+            m_paused = true;
         } else if (state == GST_STATE_PLAYING)
-            m_isPaused = false;
+            m_paused = false;
 
-        if (!m_isPaused && m_playbackRate)
+        if (!m_paused && m_playbackRate)
             changePipelineState(GST_STATE_PLAYING);
 
         m_networkState = MediaPlayer::Loading;
@@ -621,10 +620,10 @@
 }
 void MediaPlayerPrivateGStreamerMSE::asyncStateChangeDone()
 {
-    if (UNLIKELY(!m_pipeline || m_didErrorOccur))
+    if (UNLIKELY(!m_pipeline || m_errorOccured))
         return;
 
-    if (m_isSeeking)
+    if (m_seeking)
         maybeFinishSeek();
     else
         updateStates();
@@ -775,7 +774,7 @@
 
 MediaTime MediaPlayerPrivateGStreamerMSE::maxMediaTimeSeekable() const
 {
-    if (UNLIKELY(m_didErrorOccur))
+    if (UNLIKELY(m_errorOccured))
         return MediaTime::zeroTime();
 
     GST_DEBUG("maxMediaTimeSeekable");

Modified: trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h (252937 => 252938)


--- trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h	2019-11-29 10:24:54 UTC (rev 252937)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h	2019-11-29 11:11:00 UTC (rev 252938)
@@ -54,6 +54,7 @@
 
     void updateDownloadBufferingFlag() override { };
 
+    bool isLiveStream() const override { return false; }
     MediaTime currentMediaTime() const override;
 
     void pause() override;

Modified: trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.cpp (252937 => 252938)


--- trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.cpp	2019-11-29 10:24:54 UTC (rev 252937)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaSourceClientGStreamerMSE.cpp	2019-11-29 11:11:00 UTC (rev 252938)
@@ -163,8 +163,7 @@
     ASSERT(WTF::isMainThread());
 
     // This is only for on-the-fly reenqueues after appends. When seeking, the seek will do its own flush.
-    // FIXME: Should not be touching private parts.
-    if (!m_playerPrivate.m_isSeeking)
+    if (!m_playerPrivate.m_seeking)
         m_playerPrivate.m_playbackPipeline->flush(trackId);
 }
 
_______________________________________________
webkit-changes mailing list
webkit-changes@lists.webkit.org
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to