Modified: trunk/LayoutTests/ChangeLog (223504 => 223505)
--- trunk/LayoutTests/ChangeLog 2017-10-17 09:39:52 UTC (rev 223504)
+++ trunk/LayoutTests/ChangeLog 2017-10-17 09:40:47 UTC (rev 223505)
@@ -1,5 +1,27 @@
2017-10-17 Alicia Boya García <[email protected]>
+ [MSE][GStreamer] Insert parser elements in AppendPipeline when demuxing opus or Vorbis
+ https://bugs.webkit.org/show_bug.cgi?id=178076
+
+ Reviewed by Xabier Rodriguez-Calvar.
+
+ YouTube does not include durations in the WebM container for files
+ containing Opus audio, so we need to read them from the contained
+ stream. Fortunately, GStreamer has an element to do that: opusparse.
+
+ The same thing happens with Vorbis contained in WebM files from the
+ W3C tests, which should also be fixed by the GStreamer element
+ vorbisparse.
+
+ This patch adds an opusparse or vorbisparse element to the
+ AppendPipeline at the sinkpad of the demuxer when either is found.
+
+ Tests: updated expectations.
+
+ * platform/gtk/TestExpectations:
+
+2017-10-17 Alicia Boya García <[email protected]>
+
[GStreamer][MSE] Unreviewed microgardening
https://bugs.webkit.org/show_bug.cgi?id=178344
Modified: trunk/Source/WebCore/ChangeLog (223504 => 223505)
--- trunk/Source/WebCore/ChangeLog 2017-10-17 09:39:52 UTC (rev 223504)
+++ trunk/Source/WebCore/ChangeLog 2017-10-17 09:40:47 UTC (rev 223505)
@@ -1,3 +1,34 @@
+2017-10-17 Alicia Boya García <[email protected]>
+
+ [MSE][GStreamer] Insert parser elements in AppendPipeline when demuxing opus or Vorbis
+ https://bugs.webkit.org/show_bug.cgi?id=178076
+
+ Reviewed by Xabier Rodriguez-Calvar.
+
+ YouTube does not include durations in the WebM container for files
+ containing Opus audio, so we need to read them from the contained
+ stream. Fortunately, GStreamer has an element to do that: opusparse.
+
+ The same thing happens with Vorbis contained in WebM files from the
+ W3C tests, which should also be fixed by the GStreamer element
+ vorbisparse.
+
+ This patch adds an opusparse or vorbisparse element to the
+ AppendPipeline at the sinkpad of the demuxer when either is found.
+
+ Tests: updated expectations.
+
+ * platform/graphics/gstreamer/mse/AppendPipeline.cpp:
+ (WebCore::AppendPipeline::appsinkNewSample):
+ (WebCore::createOptionalParserForFormat):
+ (WebCore::AppendPipeline::connectDemuxerSrcPadToAppsinkFromAnyThread):
+ (WebCore::AppendPipeline::disconnectDemuxerSrcPadFromAppsinkFromAnyThread):
+ * platform/graphics/gstreamer/mse/AppendPipeline.h:
+ * platform/graphics/gstreamer/mse/GStreamerMediaSample.cpp:
+ (WebCore::GStreamerMediaSample::GStreamerMediaSample):
+ * platform/graphics/gstreamer/mse/PlaybackPipeline.cpp:
+ (WebCore::PlaybackPipeline::attachTrack):
+
2017-10-17 Ms2ger <[email protected]>
Add WebGL2 texImage3D overloads.
Modified: trunk/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp (223504 => 223505)
--- trunk/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp 2017-10-17 09:39:52 UTC (rev 223504)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp 2017-10-17 09:40:47 UTC (rev 223505)
@@ -728,7 +728,12 @@
RefPtr<GStreamerMediaSample> mediaSample = WebCore::GStreamerMediaSample::create(sample, m_presentationSize, trackId());
- GST_TRACE("append: trackId=%s PTS=%f presentationSize=%.0fx%.0f", mediaSample->trackID().string().utf8().data(), mediaSample->presentationTime().toFloat(), mediaSample->presentationSize().width(), mediaSample->presentationSize().height());
+ GST_TRACE("append: trackId=%s PTS=%s DTS=%s DUR=%s presentationSize=%.0fx%.0f",
+ mediaSample->trackID().string().utf8().data(),
+ mediaSample->presentationTime().toString().utf8().data(),
+ mediaSample->decodeTime().toString().utf8().data(),
+ mediaSample->duration().toString().utf8().data(),
+ mediaSample->presentationSize().width(), mediaSample->presentationSize().height());
// If we're beyond the duration, ignore this sample and the remaining ones.
MediaTime duration = m_mediaSourceClient->duration();
@@ -944,6 +949,30 @@
return m_flowReturn;
}
+static GRefPtr<GstElement>
+createOptionalParserForFormat(GstPad* demuxerSrcPad)
+{
+ GRefPtr<GstCaps> padCaps = adoptGRef(gst_pad_get_current_caps(demuxerSrcPad));
+ GstStructure* structure = gst_caps_get_structure(padCaps.get(), 0);
+ const char* mediaType = gst_structure_get_name(structure);
+
+ GUniquePtr<char> demuxerPadName(gst_pad_get_name(demuxerSrcPad));
+ GUniquePtr<char> parserName(g_strdup_printf("%s_parser", demuxerPadName.get()));
+
+ if (!g_strcmp0(mediaType, "audio/x-opus")) {
+ GstElement* opusparse = gst_element_factory_make("opusparse", parserName.get());
+ RELEASE_ASSERT(opusparse);
+ return GRefPtr<GstElement>(opusparse);
+ }
+ if (!g_strcmp0(mediaType, "audio/x-vorbis")) {
+ GstElement* vorbisparse = gst_element_factory_make("vorbisparse", parserName.get());
+ RELEASE_ASSERT(vorbisparse);
+ return GRefPtr<GstElement>(vorbisparse);
+ }
+
+ return nullptr;
+}
+
void AppendPipeline::connectDemuxerSrcPadToAppsinkFromAnyThread(GstPad* demuxerSrcPad)
{
if (!m_appsink)
@@ -997,30 +1026,50 @@
if (!parent)
gst_bin_add(GST_BIN(m_pipeline.get()), m_appsink.get());
+ // Current head of the pipeline being built.
+ GRefPtr<GstPad> currentSrcPad = demuxerSrcPad;
+
+ // Some audio files unhelpfully omit the duration of frames in the container. We need to parse
+ // the contained audio streams in order to know the duration of the frames.
+ // This is known to be an issue with YouTube WebM files containing Opus audio as of YTTV2018.
+ m_parser = createOptionalParserForFormat(currentSrcPad.get());
+ if (m_parser) {
+ gst_bin_add(GST_BIN(m_pipeline.get()), m_parser.get());
+ gst_element_sync_state_with_parent(m_parser.get());
+
+ GRefPtr<GstPad> parserSinkPad = adoptGRef(gst_element_get_static_pad(m_parser.get(), "sink"));
+ GRefPtr<GstPad> parserSrcPad = adoptGRef(gst_element_get_static_pad(m_parser.get(), "src"));
+
+ gst_pad_link(currentSrcPad.get(), parserSinkPad.get());
+ currentSrcPad = parserSrcPad;
+ }
+
#if ENABLE(ENCRYPTED_MEDIA)
if (m_decryptor) {
gst_object_ref(m_decryptor.get());
gst_bin_add(GST_BIN(m_pipeline.get()), m_decryptor.get());
+ gst_element_sync_state_with_parent(m_decryptor.get());
GRefPtr<GstPad> decryptorSinkPad = adoptGRef(gst_element_get_static_pad(m_decryptor.get(), "sink"));
- gst_pad_link(demuxerSrcPad, decryptorSinkPad.get());
-
GRefPtr<GstPad> decryptorSrcPad = adoptGRef(gst_element_get_static_pad(m_decryptor.get(), "src"));
- gst_pad_link(decryptorSrcPad.get(), appsinkSinkPad.get());
- gst_element_sync_state_with_parent(m_appsink.get());
- gst_element_sync_state_with_parent(m_decryptor.get());
+ gst_pad_link(currentSrcPad.get(), decryptorSinkPad.get());
+ currentSrcPad = decryptorSrcPad;
+ }
+#endif
- if (m_pendingDecryptionStructure)
- dispatchPendingDecryptionStructure();
- } else {
-#endif
- gst_pad_link(demuxerSrcPad, appsinkSinkPad.get());
- gst_element_sync_state_with_parent(m_appsink.get());
+ gst_pad_link(currentSrcPad.get(), appsinkSinkPad.get());
+
+ gst_element_sync_state_with_parent(m_appsink.get());
+
#if ENABLE(ENCRYPTED_MEDIA)
- }
+ if (m_pendingDecryptionStructure)
+ dispatchPendingDecryptionStructure();
#endif
gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
+ gst_element_sync_state_with_parent(m_appsink.get());
+
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-after-link");
}
}
@@ -1089,31 +1138,27 @@
m_padAddRemoveCondition.notifyOne();
}
-void AppendPipeline::disconnectDemuxerSrcPadFromAppsinkFromAnyThread(GstPad* demuxerSrcPad)
+void AppendPipeline::disconnectDemuxerSrcPadFromAppsinkFromAnyThread(GstPad*)
{
- // Must be done in the thread we were called from (usually streaming thread).
- if (!gst_pad_is_linked(demuxerSrcPad)) {
- gulong probeId = GPOINTER_TO_ULONG(g_object_get_data(G_OBJECT(demuxerSrcPad), "blackHoleProbeId"));
- if (probeId) {
- GST_DEBUG("Disconnecting black hole probe.");
- g_object_set_data(G_OBJECT(demuxerSrcPad), "blackHoleProbeId", nullptr);
- gst_pad_remove_probe(demuxerSrcPad, probeId);
- } else
- GST_WARNING("Not disconnecting demuxer src pad because it wasn't linked");
- return;
- }
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pad-removed-before");
GST_DEBUG("Disconnecting appsink");
#if ENABLE(ENCRYPTED_MEDIA)
if (m_decryptor) {
- gst_element_unlink(m_decryptor.get(), m_appsink.get());
- gst_element_unlink(m_demux.get(), m_decryptor.get());
gst_element_set_state(m_decryptor.get(), GST_STATE_NULL);
gst_bin_remove(GST_BIN(m_pipeline.get()), m_decryptor.get());
- } else
+ m_decryptor = nullptr;
+ }
#endif
- gst_element_unlink(m_demux.get(), m_appsink.get());
+
+ if (m_parser) {
+ gst_element_set_state(m_parser.get(), GST_STATE_NULL);
+ gst_bin_remove(GST_BIN(m_pipeline.get()), m_parser.get());
+ m_parser = nullptr;
+ }
+
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pad-removed-after");
}
#if ENABLE(ENCRYPTED_MEDIA)
Modified: trunk/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.cpp (223504 => 223505)
--- trunk/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.cpp 2017-10-17 09:39:52 UTC (rev 223504)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/mse/PlaybackPipeline.cpp 2017-10-17 09:40:47 UTC (rev 223505)
@@ -235,7 +235,10 @@
pad = adoptGRef(gst_element_get_static_pad(parser, "src"));
gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad.get()));
- } else if (!g_strcmp0(mediaType, "video/x-vp9"))
+ } else if (!g_strcmp0(mediaType, "video/x-vp8")
+ || !g_strcmp0(mediaType, "video/x-vp9")
+ || !g_strcmp0(mediaType, "audio/x-opus")
+ || !g_strcmp0(mediaType, "audio/x-vorbis"))
stream->parser = nullptr;
else {
GST_ERROR_OBJECT(stream->parent, "Unsupported media format: %s", mediaType);