Title: [221430] trunk/Source/WebCore
Revision
221430
Author
jer.no...@apple.com
Date
2017-08-31 11:48:45 -0700 (Thu, 31 Aug 2017)

Log Message

MSE-to-Canvas painting can become "stuck" during heavy workloads
https://bugs.webkit.org/show_bug.cgi?id=176170

Reviewed by Eric Carlson.

During heavy workloads, the trigger from CMBufferQueue notifying us that we have dipped below
the "low-water mark" of decoded (and decoding) frames will not fire. Instead of using a trigger
(since it will not fire when the number of "frames being decoded" changes, just the number of
decoded frames), just call maybeBecomeReadyForMoreMediaData() whenever the number of frames in
the decoded queue decreases, or when the number of frames being decoded decreases.

* platform/graphics/cocoa/WebCoreDecompressionSession.h:
* platform/graphics/cocoa/WebCoreDecompressionSession.mm:
(WebCore::WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaData):
(WebCore::WebCoreDecompressionSession::enqueueSample):
(WebCore::WebCoreDecompressionSession::decodeSample):
(WebCore::WebCoreDecompressionSession::handleDecompressionOutput):
(WebCore::WebCoreDecompressionSession::getFirstVideoFrame):
(WebCore::WebCoreDecompressionSession::automaticDequeue):
(WebCore::WebCoreDecompressionSession::imageForTime):
(WebCore::WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaDataCallback): Deleted.

Modified Paths

Diff

Modified: trunk/Source/WebCore/ChangeLog (221429 => 221430)


--- trunk/Source/WebCore/ChangeLog	2017-08-31 18:37:38 UTC (rev 221429)
+++ trunk/Source/WebCore/ChangeLog	2017-08-31 18:48:45 UTC (rev 221430)
@@ -1,3 +1,27 @@
+2017-08-31  Jer Noble  <jer.no...@apple.com>
+
+        MSE-to-Canvas painting can become "stuck" during heavy workloads
+        https://bugs.webkit.org/show_bug.cgi?id=176170
+
+        Reviewed by Eric Carlson.
+
+        During heavy workloads, the trigger from CMBufferQueue notifying us that we have dipped below
+        the "low-water mark" of decoded (and decoding) frames will not fire. Instead of using a trigger
+        (since it will not fire when the number of "frames being decoded" changes, just the number of
+        decoded frames), just call maybeBecomeReadyForMoreMediaData() whenever the number of frames in
+        the decoded queue decreases, or when the number of frames being decoded decreases.
+
+        * platform/graphics/cocoa/WebCoreDecompressionSession.h:
+        * platform/graphics/cocoa/WebCoreDecompressionSession.mm:
+        (WebCore::WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaData):
+        (WebCore::WebCoreDecompressionSession::enqueueSample):
+        (WebCore::WebCoreDecompressionSession::decodeSample):
+        (WebCore::WebCoreDecompressionSession::handleDecompressionOutput):
+        (WebCore::WebCoreDecompressionSession::getFirstVideoFrame):
+        (WebCore::WebCoreDecompressionSession::automaticDequeue):
+        (WebCore::WebCoreDecompressionSession::imageForTime):
+        (WebCore::WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaDataCallback): Deleted.
+
 2017-08-31  Youenn Fablet  <you...@apple.com>
 
         Move consume promise from FetchBody to FetchBodyConsumer

Modified: trunk/Source/WebCore/platform/graphics/cocoa/WebCoreDecompressionSession.h (221429 => 221430)


--- trunk/Source/WebCore/platform/graphics/cocoa/WebCoreDecompressionSession.h	2017-08-31 18:37:38 UTC (rev 221429)
+++ trunk/Source/WebCore/platform/graphics/cocoa/WebCoreDecompressionSession.h	2017-08-31 18:48:45 UTC (rev 221430)
@@ -38,7 +38,6 @@
 
 typedef CFTypeRef CMBufferRef;
 typedef struct opaqueCMBufferQueue *CMBufferQueueRef;
-typedef struct opaqueCMBufferQueueTriggerToken *CMBufferQueueTriggerToken;
 typedef struct opaqueCMSampleBuffer *CMSampleBufferRef;
 typedef struct OpaqueCMTimebase* CMTimebaseRef;
 typedef signed long CMItemCount;
@@ -91,7 +90,6 @@
     static CMTime getPresentationTime(CMBufferRef, void* refcon);
     static CMTime getDuration(CMBufferRef, void* refcon);
     static CFComparisonResult compareBuffers(CMBufferRef buf1, CMBufferRef buf2, void* refcon);
-    static void maybeBecomeReadyForMoreMediaDataCallback(void* refcon, CMBufferQueueTriggerToken);
     void maybeBecomeReadyForMoreMediaData();
 
     static const CMItemCount kMaximumCapacity = 120;
@@ -108,7 +106,6 @@
     OSObjectPtr<dispatch_source_t> m_timerSource;
     std::function<void()> m_notificationCallback;
     std::function<void()> m_hasAvailableFrameCallback;
-    CMBufferQueueTriggerToken m_didBecomeReadyTrigger { nullptr };
 
     bool m_invalidated { false };
     int m_framesBeingDecoded { 0 };

Modified: trunk/Source/WebCore/platform/graphics/cocoa/WebCoreDecompressionSession.mm (221429 => 221430)


--- trunk/Source/WebCore/platform/graphics/cocoa/WebCoreDecompressionSession.mm	2017-08-31 18:37:38 UTC (rev 221429)
+++ trunk/Source/WebCore/platform/graphics/cocoa/WebCoreDecompressionSession.mm	2017-08-31 18:48:45 UTC (rev 221430)
@@ -85,18 +85,13 @@
     }
 }
 
-void WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaDataCallback(void* refcon, CMBufferQueueTriggerToken)
-{
-    WebCoreDecompressionSession* session = static_cast<WebCoreDecompressionSession*>(refcon);
-    session->maybeBecomeReadyForMoreMediaData();
-}
-
 void WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaData()
 {
-    LOG(Media, "WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaData(%p) - isReadyForMoreMediaData(%d), hasCallback(%d)", this, isReadyForMoreMediaData(), !!m_notificationCallback);
     if (!isReadyForMoreMediaData() || !m_notificationCallback)
         return;
 
+    LOG(Media, "WebCoreDecompressionSession::maybeBecomeReadyForMoreMediaData(%p) - isReadyForMoreMediaData(%d), hasCallback(%d)", this, isReadyForMoreMediaData(), !!m_notificationCallback);
+
     if (isMainThread()) {
         m_notificationCallback();
         return;
@@ -148,8 +143,6 @@
 #pragma pack(pop)
         CMBufferQueueCreate(kCFAllocatorDefault, kMaximumCapacity, &callbacks.callbacks, &outQueue);
         m_producerQueue = adoptCF(outQueue);
-
-        CMBufferQueueInstallTriggerWithIntegerThreshold(m_producerQueue.get(), maybeBecomeReadyForMoreMediaDataCallback, this, kCMBufferQueueTrigger_WhenBufferCountBecomesLessThan, kLowWaterMark, &m_didBecomeReadyTrigger);
     }
 
     if (!m_consumerQueue) {
@@ -242,6 +235,8 @@
     if (!shouldDecodeSample(sample, displaying)) {
         ++m_totalVideoFrames;
         ++m_droppedVideoFrames;
+        --m_framesBeingDecoded;
+        maybeBecomeReadyForMoreMediaData();
         return;
     }
 
@@ -264,6 +259,8 @@
     CMVideoFormatDescriptionRef rawImageBufferDescription = nullptr;
     if (status != noErr || noErr != CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, rawImageBuffer, &rawImageBufferDescription)) {
         ++m_corruptedVideoFrames;
+        --m_framesBeingDecoded;
+        maybeBecomeReadyForMoreMediaData();
         return;
     }
     RetainPtr<CMVideoFormatDescriptionRef> imageBufferDescription = adoptCF(rawImageBufferDescription);
@@ -277,6 +274,8 @@
     CMSampleBufferRef rawImageSampleBuffer = nullptr;
     if (noErr != CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, rawImageBuffer, imageBufferDescription.get(), &imageBufferTiming, &rawImageSampleBuffer)) {
         ++m_corruptedVideoFrames;
+        --m_framesBeingDecoded;
+        maybeBecomeReadyForMoreMediaData();
         return;
     }
 
@@ -298,6 +297,9 @@
     RetainPtr<CMSampleBufferRef> currentSample = adoptCF((CMSampleBufferRef)CMBufferQueueDequeueAndRetain(m_producerQueue.get()));
     RetainPtr<CVPixelBufferRef> imageBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(currentSample.get());
     ASSERT(CFGetTypeID(imageBuffer.get()) == CVPixelBufferGetTypeID());
+
+    maybeBecomeReadyForMoreMediaData();
+
     return imageBuffer;
 }
 
@@ -309,6 +311,9 @@
     auto time = toMediaTime(CMTimebaseGetTime(m_timebase.get()));
     LOG(Media, "WebCoreDecompressionSession::automaticDequeue(%p) - purging all samples before time(%s)", this, toString(time).utf8().data());
 
+    MediaTime nextFireTime = MediaTime::positiveInfiniteTime();
+    bool releasedImageBuffers = false;
+
     while (CMSampleBufferRef firstSample = (CMSampleBufferRef)CMBufferQueueGetHead(m_producerQueue.get())) {
         MediaTime presentationTimestamp = toMediaTime(CMSampleBufferGetPresentationTimeStamp(firstSample));
         MediaTime duration = toMediaTime(CMSampleBufferGetDuration(firstSample));
@@ -315,6 +320,7 @@
         MediaTime presentationEndTimestamp = presentationTimestamp + duration;
         if (time > presentationEndTimestamp) {
             CFRelease(CMBufferQueueDequeueAndRetain(m_producerQueue.get()));
+            releasedImageBuffers = true;
             continue;
         }
 
@@ -323,12 +329,16 @@
         auto end = toMediaTime(CMBufferQueueGetEndPresentationTimeStamp(m_producerQueue.get()));
         LOG(Media, "WebCoreDecompressionSession::automaticDequeue(%p) - queue(%s -> %s)", this, toString(begin).utf8().data(), toString(end).utf8().data());
 #endif
-        CMTimebaseSetTimerDispatchSourceNextFireTime(m_timebase.get(), m_timerSource.get(), toCMTime(presentationEndTimestamp), 0);
-        return;
+
+        nextFireTime = presentationEndTimestamp;
+        break;
     }
 
+    if (releasedImageBuffers)
+        maybeBecomeReadyForMoreMediaData();
+
     LOG(Media, "WebCoreDecompressionSession::automaticDequeue(%p) - queue empty", this, toString(time).utf8().data());
-    CMTimebaseSetTimerDispatchSourceNextFireTime(m_timebase.get(), m_timerSource.get(), kCMTimePositiveInfinity, 0);
+    CMTimebaseSetTimerDispatchSourceNextFireTime(m_timebase.get(), m_timerSource.get(), toCMTime(nextFireTime), 0);
 }
 
 void WebCoreDecompressionSession::enqueueDecodedSample(CMSampleBufferRef sample, bool displaying)
@@ -425,6 +435,8 @@
         return nullptr;
     }
 
+    bool releasedImageBuffers = false;
+
     while (CMSampleBufferRef firstSample = (CMSampleBufferRef)CMBufferQueueGetHead(m_producerQueue.get())) {
         MediaTime presentationTimestamp = toMediaTime(CMSampleBufferGetPresentationTimeStamp(firstSample));
         MediaTime duration = toMediaTime(CMSampleBufferGetDuration(firstSample));
@@ -433,6 +445,7 @@
             return nullptr;
         if (!allowEarlier && presentationEndTimestamp < time) {
             CFRelease(CMBufferQueueDequeueAndRetain(m_producerQueue.get()));
+            releasedImageBuffers = true;
             continue;
         }
 
@@ -443,6 +456,8 @@
         if (m_timebase)
             CMTimebaseSetTimerDispatchSourceToFireImmediately(m_timebase.get(), m_timerSource.get());
 
+        maybeBecomeReadyForMoreMediaData();
+
         LOG(Media, "WebCoreDecompressionSession::imageForTime(%p) - found sample for time(%s) in queue(%s -> %s)", this, toString(time).utf8().data(), toString(startTime).utf8().data(), toString(endTime).utf8().data());
         return imageBuffer;
     }
@@ -450,6 +465,9 @@
     if (m_timebase)
         CMTimebaseSetTimerDispatchSourceToFireImmediately(m_timebase.get(), m_timerSource.get());
 
+    if (releasedImageBuffers)
+        maybeBecomeReadyForMoreMediaData();
+
     LOG(Media, "WebCoreDecompressionSession::imageForTime(%p) - no matching sample for time(%s) in queue(%s -> %s)", this, toString(time).utf8().data(), toString(startTime).utf8().data(), toString(endTime).utf8().data());
     return nullptr;
 }
_______________________________________________
webkit-changes mailing list
webkit-changes@lists.webkit.org
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to