Diff
Modified: trunk/LayoutTests/ChangeLog (288030 => 288031)
--- trunk/LayoutTests/ChangeLog 2022-01-14 21:47:10 UTC (rev 288030)
+++ trunk/LayoutTests/ChangeLog 2022-01-14 21:58:40 UTC (rev 288031)
@@ -1,3 +1,18 @@
+2022-01-14 Jer Noble <[email protected]>
+
+ [Cocoa] rVFC() isn't called for initial video load
+ https://bugs.webkit.org/show_bug.cgi?id=235006
+
+ Reviewed by Eric Carlson.
+
+ * media/request-video-frame-loadstart-expected.txt: Added.
+ * media/request-video-frame-loadstart.html: Added.
+ * media/request-video-frame-seek-expected.txt: Added.
+ * media/request-video-frame-seek.html: Added.
+ * platform/ios-wk2/TestExpectations:
+ * platform/mac-wk1/TestExpectations:
+ * platform/mac-wk2/TestExpectations:
+
2022-01-14 Kimmo Kinnunen <[email protected]>
gl.texImage2D upload of getUserMedia streams via <video> element fails
Added: trunk/LayoutTests/media/request-video-frame-loadstart-expected.txt (0 => 288031)
--- trunk/LayoutTests/media/request-video-frame-loadstart-expected.txt (rev 0)
+++ trunk/LayoutTests/media/request-video-frame-loadstart-expected.txt 2022-01-14 21:58:40 UTC (rev 288031)
@@ -0,0 +1,6 @@
+
+RUN(video.requestVideoFrameCallback(callback))
+RUN(video.src = "" "content/test"))
+Promise resolved OK
+END OF TEST
+
Added: trunk/LayoutTests/media/request-video-frame-loadstart.html (0 => 288031)
--- trunk/LayoutTests/media/request-video-frame-loadstart.html (rev 0)
+++ trunk/LayoutTests/media/request-video-frame-loadstart.html 2022-01-14 21:58:40 UTC (rev 288031)
@@ -0,0 +1,27 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <title>request-video-frame-loadstart</title>
+ <script src=""
+ <script src=""
+ <script>
+ window.addEventListener('load', async event => {
+ findMediaElement();
+
+ let videoFramePromise = new Promise((resolve, reject) => {
+ consoleWrite('RUN(video.requestVideoFrameCallback(callback))');
+ video.requestVideoFrameCallback(resolve);
+ });
+
+ run('video.src = "" "content/test")');
+
+ await shouldResolve(videoFramePromise);
+
+ endTest();
+ });
+ </script>
+</head>
+<body>
+ <video muted></video>
+</body>
+</html>
\ No newline at end of file
Added: trunk/LayoutTests/media/request-video-frame-seek-expected.txt (0 => 288031)
--- trunk/LayoutTests/media/request-video-frame-seek-expected.txt (rev 0)
+++ trunk/LayoutTests/media/request-video-frame-seek-expected.txt 2022-01-14 21:58:40 UTC (rev 288031)
@@ -0,0 +1,8 @@
+
+RUN(video.src = "" "content/test"))
+EVENT(canplaythrough)
+RUN(video.requestVideoFrameCallback(callback))
+RUN(video.currentTime = 0.5)
+Promise resolved OK
+END OF TEST
+
Added: trunk/LayoutTests/media/request-video-frame-seek.html (0 => 288031)
--- trunk/LayoutTests/media/request-video-frame-seek.html (rev 0)
+++ trunk/LayoutTests/media/request-video-frame-seek.html 2022-01-14 21:58:40 UTC (rev 288031)
@@ -0,0 +1,31 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <title>request-video-frame-seek</title>
+ <script src=""
+ <script src=""
+ <script>
+ window.addEventListener('load', async event => {
+ findMediaElement();
+
+ run('video.src = "" "content/test")');
+
+ await waitFor(video, 'canplaythrough');
+
+ let videoFramePromise = new Promise((resolve, reject) => {
+ consoleWrite('RUN(video.requestVideoFrameCallback(callback))');
+ video.requestVideoFrameCallback(resolve);
+ });
+
+ run('video.currentTime = 0.5');
+
+ await shouldResolve(videoFramePromise);
+
+ endTest();
+ });
+ </script>
+</head>
+<body>
+ <video muted></video>
+</body>
+</html>
\ No newline at end of file
Modified: trunk/LayoutTests/platform/ios-wk2/TestExpectations (288030 => 288031)
--- trunk/LayoutTests/platform/ios-wk2/TestExpectations 2022-01-14 21:47:10 UTC (rev 288030)
+++ trunk/LayoutTests/platform/ios-wk2/TestExpectations 2022-01-14 21:58:40 UTC (rev 288031)
@@ -145,6 +145,7 @@
# Timing out tests until we add XR session.
imported/w3c/web-platform-tests/video-rvfc/request-video-frame-callback-before-xr-session.https.html [ Skip ]
imported/w3c/web-platform-tests/video-rvfc/request-video-frame-callback-during-xr-session.https.html [ Skip ]
+webkit.org/b/235072 imported/w3c/web-platform-tests/video-rvfc/request-video-frame-callback-repeating.html [ Skip ]
#//////////////////////////////////////////////////////////////////////////////////////////
# End platform-specific directories.
Modified: trunk/LayoutTests/platform/mac-wk1/TestExpectations (288030 => 288031)
--- trunk/LayoutTests/platform/mac-wk1/TestExpectations 2022-01-14 21:47:10 UTC (rev 288030)
+++ trunk/LayoutTests/platform/mac-wk1/TestExpectations 2022-01-14 21:58:40 UTC (rev 288031)
@@ -38,6 +38,8 @@
imported/w3c/web-platform-tests/video-rvfc/request-video-frame-callback-before-xr-session.https.html [ Skip ]
imported/w3c/web-platform-tests/video-rvfc/request-video-frame-callback-during-xr-session.https.html [ Skip ]
+webkit.org/b/235072 imported/w3c/web-platform-tests/video-rvfc/request-video-frame-callback-repeating.html [ Skip ]
+
#//////////////////////////////////////////////////////////////////////////////////////////
# End platform-specific directories.
#//////////////////////////////////////////////////////////////////////////////////////////
Modified: trunk/LayoutTests/platform/mac-wk2/TestExpectations (288030 => 288031)
--- trunk/LayoutTests/platform/mac-wk2/TestExpectations 2022-01-14 21:47:10 UTC (rev 288030)
+++ trunk/LayoutTests/platform/mac-wk2/TestExpectations 2022-01-14 21:58:40 UTC (rev 288031)
@@ -124,6 +124,8 @@
imported/w3c/web-platform-tests/video-rvfc/request-video-frame-callback-before-xr-session.https.html [ Skip ]
imported/w3c/web-platform-tests/video-rvfc/request-video-frame-callback-during-xr-session.https.html [ Skip ]
+webkit.org/b/235072 imported/w3c/web-platform-tests/video-rvfc/request-video-frame-callback-repeating.html [ Skip ]
+
#//////////////////////////////////////////////////////////////////////////////////////////
# End platform-specific directories.
#//////////////////////////////////////////////////////////////////////////////////////////
Modified: trunk/Source/WebCore/ChangeLog (288030 => 288031)
--- trunk/Source/WebCore/ChangeLog 2022-01-14 21:47:10 UTC (rev 288030)
+++ trunk/Source/WebCore/ChangeLog 2022-01-14 21:58:40 UTC (rev 288031)
@@ -1,3 +1,75 @@
+2022-01-14 Jer Noble <[email protected]>
+
+ [Cocoa] rVFC() isn't called for initial video load
+ https://bugs.webkit.org/show_bug.cgi?id=235006
+
+ Reviewed by Eric Carlson.
+
+ Tests: media/request-video-frame-loadstart.html
+ media/request-video-frame-seek.html
+
+ Add a new utility class, QueuedVideoOutput, which will pull pixel buffers out of an AVPlayerItemVideoOutput
+ pre-emptively. Once those pixel buffers are enqueued locally, their associated timing information can be used to
+ fire a callback for that buffer's display time.
+
+ Previously, paints were blocked from pulling new pixel buffers from the video output. With the QueuedVideoOutput
+ class, this is no longer necessary. The QueuedVideoOutput will notify its client when the image for currentTime
+ changes, and other clients can freely ask for the currentTime's image.
+
+ To curb runaway memory growth, frames are purged as soon as a new image for the current time is available, and when
+ the video output signals that its own queues were purged.
+
+ * WebCore.xcodeproj/project.pbxproj:
+ * html/HTMLVideoElement.cpp:
+ (WebCore::HTMLVideoElement::cancelVideoFrameCallback):
+ (WebCore::HTMLVideoElement::serviceRequestVideoFrameCallbacks):
+ * html/HTMLVideoElement.h:
+ * platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h:
+ * platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm:
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame const):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::updateVideoFullscreenInlineImage):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::startVideoFrameMetadataGathering):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::checkNewVideoFrameMetadata):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::updateVideoTracks):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::createVideoOutput):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::updateLastImage):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::pixelBufferForCurrentTime):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::colorSpace):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange):
+ (WebCore::globalPullDelegateQueue): Deleted.
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange): Deleted.
+ (-[WebCoreAVFPullDelegate initWithPlayer:]): Deleted.
+ (-[WebCoreAVFPullDelegate outputMediaDataWillChange:]): Deleted.
+ (-[WebCoreAVFPullDelegate outputSequenceWasFlushed:]): Deleted.
+ * platform/graphics/avfoundation/objc/QueuedVideoOutput.h: Added.
+ * platform/graphics/avfoundation/objc/QueuedVideoOutput.mm: Added.
+ (-[WebQueuedVideoOutputDelegate initWithParent:]):
+ (-[WebQueuedVideoOutputDelegate outputMediaDataWillChange:]):
+ (-[WebQueuedVideoOutputDelegate outputSequenceWasFlushed:]):
+ (-[WebQueuedVideoOutputDelegate observeValueForKeyPath:ofObject:change:context:]):
+ (WebCore::globalOutputDelegateQueue):
+ (WebCore::QueuedVideoOutput::create):
+ (WebCore::QueuedVideoOutput::QueuedVideoOutput):
+ (WebCore::QueuedVideoOutput::~QueuedVideoOutput):
+ (WebCore::QueuedVideoOutput::invalidate):
+ (WebCore::decltype):
+ (WebCore::QueuedVideoOutput::hasImageForTime const):
+ (WebCore::QueuedVideoOutput::takeVideoFrameEntryForTime):
+ (WebCore::QueuedVideoOutput::addCurrentImageChangedObserver):
+ (WebCore::QueuedVideoOutput::configureNextImageObserver):
+ (WebCore::QueuedVideoOutput::imageForCurrentTimeChanged):
+ (WebCore::QueuedVideoOutput::addVideoFrameEntries):
+ (WebCore::QueuedVideoOutput::purgeVideoFrameEntries):
+ (WebCore::QueuedVideoOutput::purgeImagesBeforeTime):
+ (WebCore::QueuedVideoOutput::rateChanged):
+
2022-01-14 Tyler Wilcock <[email protected]>
AX: AXIsolatedObject::initializeAttributeData should compute AXAncestorFlags if they are unexpectedly uninitialized
Modified: trunk/Source/WebCore/PAL/ChangeLog (288030 => 288031)
--- trunk/Source/WebCore/PAL/ChangeLog 2022-01-14 21:47:10 UTC (rev 288030)
+++ trunk/Source/WebCore/PAL/ChangeLog 2022-01-14 21:58:40 UTC (rev 288031)
@@ -1,3 +1,12 @@
+2022-01-14 Jer Noble <[email protected]>
+
+ [Cocoa] rVFC() isn't called for initial video load
+ https://bugs.webkit.org/show_bug.cgi?id=235006
+
+ Reviewed by Eric Carlson.
+
+ * pal/spi/cocoa/AVFoundationSPI.h:
+
2022-01-13 Elliott Williams <[email protected]>
[XCBuild] Add "product dependencies" which influence workspace build order
Modified: trunk/Source/WebCore/PAL/pal/spi/cocoa/AVFoundationSPI.h (288030 => 288031)
--- trunk/Source/WebCore/PAL/pal/spi/cocoa/AVFoundationSPI.h 2022-01-14 21:47:10 UTC (rev 288030)
+++ trunk/Source/WebCore/PAL/pal/spi/cocoa/AVFoundationSPI.h 2022-01-14 21:58:40 UTC (rev 288031)
@@ -40,9 +40,10 @@
#import <AVFoundation/AVMediaSelectionGroup_Private.h>
#import <AVFoundation/AVOutputContext_Private.h>
#import <AVFoundation/AVOutputDevice.h>
-#import <AVFoundation/AVPlayer_Private.h>
+#import <AVFoundation/AVPlayerItemOutput_Private.h>
#import <AVFoundation/AVPlayerItem_Private.h>
#import <AVFoundation/AVPlayerLayer_Private.h>
+#import <AVFoundation/AVPlayer_Private.h>
#if ENABLE(MEDIA_SOURCE)
#if PLATFORM(IOS_FAMILY_SIMULATOR)
@@ -66,6 +67,7 @@
#import <AVFoundation/AVCaptureSession.h>
#import <AVFoundation/AVPlayer.h>
#import <AVFoundation/AVPlayerItem.h>
+#import <AVFoundation/AVPlayerItemOutput.h>
#if HAVE(AVFOUNDATION_INTERSTITIAL_EVENTS)
#import <AVFoundation/AVPlayerInterstitialEventController.h>
@@ -92,6 +94,11 @@
@end
#endif
+@interface AVPlayerItemVideoOutput (AVPlayerItemVideoOutputEarliestTime)
+@property (nonatomic, readonly) CMTime earliestAvailablePixelBufferItemTime;
+- (void)requestNotificationOfMediaDataChangeAsSoonAsPossible;
+@end
+
#if ENABLE(WIRELESS_PLAYBACK_TARGET) || PLATFORM(IOS_FAMILY)
NS_ASSUME_NONNULL_BEGIN
Modified: trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj (288030 => 288031)
--- trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj 2022-01-14 21:47:10 UTC (rev 288030)
+++ trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj 2022-01-14 21:58:40 UTC (rev 288031)
@@ -4449,6 +4449,8 @@
CD1F9B4D270D03A900617EB6 /* ScrollExtents.h in Headers */ = {isa = PBXBuildFile; fileRef = CD1F9B4C270D03A900617EB6 /* ScrollExtents.h */; settings = {ATTRIBUTES = (Private, ); }; };
CD1F9B7D270E667800617EB6 /* HTMLAnchorElementInlines.h in Headers */ = {isa = PBXBuildFile; fileRef = CD1F9B7C270E667800617EB6 /* HTMLAnchorElementInlines.h */; settings = {ATTRIBUTES = (Private, ); }; };
CD1F9B80270E671A00617EB6 /* SVGElementInlines.h in Headers */ = {isa = PBXBuildFile; fileRef = CD1F9B7F270E671A00617EB6 /* SVGElementInlines.h */; };
+ CD20ED3C27878FFB0038BE44 /* QueuedVideoOutput.h in Headers */ = {isa = PBXBuildFile; fileRef = CD20ED3A27878FFB0038BE44 /* QueuedVideoOutput.h */; };
+ CD20ED3D27878FFB0038BE44 /* QueuedVideoOutput.mm in Sources */ = {isa = PBXBuildFile; fileRef = CD20ED3B27878FFB0038BE44 /* QueuedVideoOutput.mm */; };
CD225C0B1C46FBF400140761 /* WebCoreNSURLSession.mm in Sources */ = {isa = PBXBuildFile; fileRef = CD225C091C46FBF400140761 /* WebCoreNSURLSession.mm */; };
CD225C0C1C46FBF400140761 /* WebCoreNSURLSession.h in Headers */ = {isa = PBXBuildFile; fileRef = CD225C0A1C46FBF400140761 /* WebCoreNSURLSession.h */; settings = {ATTRIBUTES = (Private, ); }; };
CD27AE5022A9868700947FF9 /* ImageRotationSessionVT.h in Headers */ = {isa = PBXBuildFile; fileRef = CD27AE4E22A9868700947FF9 /* ImageRotationSessionVT.h */; settings = {ATTRIBUTES = (Private, ); }; };
@@ -15884,6 +15886,8 @@
CD1F9B70270DFA7F00617EB6 /* MediaKeyMessageEventInit.idl */ = {isa = PBXFileReference; lastKnownFileType = text; path = MediaKeyMessageEventInit.idl; sourceTree = "<group>"; };
CD1F9B7C270E667800617EB6 /* HTMLAnchorElementInlines.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = HTMLAnchorElementInlines.h; sourceTree = "<group>"; };
CD1F9B7F270E671A00617EB6 /* SVGElementInlines.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SVGElementInlines.h; sourceTree = "<group>"; };
+ CD20ED3A27878FFB0038BE44 /* QueuedVideoOutput.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = QueuedVideoOutput.h; sourceTree = "<group>"; };
+ CD20ED3B27878FFB0038BE44 /* QueuedVideoOutput.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = QueuedVideoOutput.mm; sourceTree = "<group>"; };
CD225C091C46FBF400140761 /* WebCoreNSURLSession.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = WebCoreNSURLSession.mm; sourceTree = "<group>"; };
CD225C0A1C46FBF400140761 /* WebCoreNSURLSession.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WebCoreNSURLSession.h; sourceTree = "<group>"; };
CD27AE4E22A9868700947FF9 /* ImageRotationSessionVT.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ImageRotationSessionVT.h; sourceTree = "<group>"; };
@@ -30641,6 +30645,8 @@
CDC8B5A5180474F70016E685 /* MediaSourcePrivateAVFObjC.h */,
CDC8B5A4180474F70016E685 /* MediaSourcePrivateAVFObjC.mm */,
7A29F57118C69514004D0F81 /* OutOfBandTextTrackPrivateAVF.h */,
+ CD20ED3A27878FFB0038BE44 /* QueuedVideoOutput.h */,
+ CD20ED3B27878FFB0038BE44 /* QueuedVideoOutput.mm */,
CDF994FA24C12A6A002EA062 /* SourceBufferParserAVFObjC.h */,
CDF994FB24C12A6A002EA062 /* SourceBufferParserAVFObjC.mm */,
CDC8B5A918047FF10016E685 /* SourceBufferPrivateAVFObjC.h */,
@@ -36602,6 +36608,7 @@
EB0FB70D270D0B2E00F7810D /* PushSubscriptionOptionsInit.h in Headers */,
550A0BCA085F6039007353D6 /* QualifiedName.h in Headers */,
83C1F5941EDF69D300410D27 /* QualifiedNameCache.h in Headers */,
+ CD20ED3C27878FFB0038BE44 /* QueuedVideoOutput.h in Headers */,
A15E31F41E0CB0B5004B371C /* QuickLook.h in Headers */,
9BAEE92C22388A7D004157A9 /* Quirks.h in Headers */,
379E371713736A6600B9E919 /* QuotedPrintable.h in Headers */,
@@ -38585,6 +38592,7 @@
AA12DF491743DF83004DAFDF /* PlatformSpeechSynthesizerCocoa.mm in Sources */,
CDA29A301CBF74D400901CCF /* PlaybackSessionInterfaceAVKit.mm in Sources */,
CDA29A161CBDA56C00901CCF /* PlaybackSessionInterfaceMac.mm in Sources */,
+ CD20ED3D27878FFB0038BE44 /* QueuedVideoOutput.mm in Sources */,
419242492127B93E00634FCF /* RealtimeOutgoingVideoSourceCocoa.mm in Sources */,
071C004B270B864900D027C7 /* ReplayKitCaptureSource.mm in Sources */,
5C2B1AEC22397EBC00B91CF7 /* ResourceResponseCocoa.mm in Sources */,
Modified: trunk/Source/WebCore/html/HTMLVideoElement.cpp (288030 => 288031)
--- trunk/Source/WebCore/html/HTMLVideoElement.cpp 2022-01-14 21:47:10 UTC (rev 288030)
+++ trunk/Source/WebCore/html/HTMLVideoElement.cpp 2022-01-14 21:58:40 UTC (rev 288031)
@@ -603,13 +603,16 @@
void HTMLVideoElement::cancelVideoFrameCallback(unsigned identifier)
{
- auto index = m_videoFrameRequests.findMatching([identifier](auto& request) { return request->identifier == identifier; });
+ // Search first the requests currently being serviced, and mark them as cancelled if found.
+ auto index = m_servicedVideoFrameRequests.findMatching([identifier](auto& request) { return request->identifier == identifier; });
+ if (index != notFound) {
+ m_servicedVideoFrameRequests[index]->cancelled = true;
+ return;
+ }
+
+ index = m_videoFrameRequests.findMatching([identifier](auto& request) { return request->identifier == identifier; });
if (index == notFound)
return;
- if (m_isRunningVideoFrameRequests) {
- m_videoFrameRequests[index]->cancelled = true;
- return;
- }
m_videoFrameRequests.remove(index);
if (m_videoFrameRequests.isEmpty() && player())
@@ -631,6 +634,12 @@
if (!player())
return;
+ // If the requestVideoFrameCallback is called before the readyState >= HaveCurrentData,
+ // calls to createImageBitmap() with this element will result in a failed promise. Delay
+ // notifying the callback until we reach the HaveCurrentData state.
+ if (readyState() < HAVE_CURRENT_DATA)
+ return;
+
auto videoFrameMetadata = player()->videoFrameMetadata();
if (!videoFrameMetadata || !document().domWindow())
return;
@@ -639,21 +648,15 @@
Ref protectedThis { *this };
- // We store the size before calling callbacks as we do not want to call newly added callbacks.
- auto callbackCount = m_videoFrameRequests.size();
-
- m_isRunningVideoFrameRequests = true;
- for (size_t index = 0; index < callbackCount; ++index) {
- auto& request = m_videoFrameRequests[index];
+ m_videoFrameRequests.swap(m_servicedVideoFrameRequests);
+ for (auto& request : m_servicedVideoFrameRequests) {
if (!request->cancelled) {
request->callback->handleEvent(std::round(now.milliseconds()), *videoFrameMetadata);
request->cancelled = true;
}
}
- m_isRunningVideoFrameRequests = false;
+ m_servicedVideoFrameRequests.clear();
- m_videoFrameRequests.removeAllMatching([](auto& callback) { return callback->cancelled; });
-
if (m_videoFrameRequests.isEmpty() && player())
player()->stopVideoFrameMetadataGathering();
}
Modified: trunk/Source/WebCore/html/HTMLVideoElement.h (288030 => 288031)
--- trunk/Source/WebCore/html/HTMLVideoElement.h 2022-01-14 21:47:10 UTC (rev 288030)
+++ trunk/Source/WebCore/html/HTMLVideoElement.h 2022-01-14 21:58:40 UTC (rev 288031)
@@ -172,8 +172,8 @@
bool cancelled { false };
};
Vector<UniqueRef<VideoFrameRequest>> m_videoFrameRequests;
+ Vector<UniqueRef<VideoFrameRequest>> m_servicedVideoFrameRequests;
unsigned m_nextVideoFrameRequestIndex { 0 };
- bool m_isRunningVideoFrameRequests { false };
};
} // namespace WebCore
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h (288030 => 288031)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h 2022-01-14 21:47:10 UTC (rev 288030)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h 2022-01-14 21:58:40 UTC (rev 288031)
@@ -31,6 +31,7 @@
#include <CoreMedia/CMTime.h>
#include <wtf/Function.h>
#include <wtf/HashMap.h>
+#include <wtf/Observer.h>
OBJC_CLASS AVAssetImageGenerator;
OBJC_CLASS AVAssetTrack;
@@ -67,6 +68,7 @@
class MediaPlaybackTarget;
class MediaSelectionGroupAVFObjC;
class PixelBufferConformerCV;
+class QueuedVideoOutput;
class FragmentedSharedBuffer;
class VideoLayerManagerObjC;
class VideoTrackPrivateAVFObjC;
@@ -347,7 +349,7 @@
void startVideoFrameMetadataGathering() final;
void stopVideoFrameMetadataGathering() final;
std::optional<VideoFrameMetadata> videoFrameMetadata() final { return std::exchange(m_videoFrameMetadata, { }); }
- void checkNewVideoFrameMetadata(CMTime);
+ void checkNewVideoFrameMetadata();
RetainPtr<AVURLAsset> m_avAsset;
RetainPtr<AVPlayer> m_avPlayer;
@@ -367,7 +369,7 @@
#endif
RetainPtr<AVAssetImageGenerator> m_imageGenerator;
- RetainPtr<AVPlayerItemVideoOutput> m_videoOutput;
+ RefPtr<QueuedVideoOutput> m_videoOutput;
RetainPtr<WebCoreAVFPullDelegate> m_videoOutputDelegate;
RetainPtr<CVPixelBufferRef> m_lastPixelBuffer;
RefPtr<NativeImage> m_lastImage;
@@ -464,6 +466,8 @@
std::optional<VideoFrameMetadata> m_videoFrameMetadata;
mutable std::optional<NSTimeInterval> m_cachedSeekableTimeRangesLastModifiedTime;
mutable std::optional<NSTimeInterval> m_cachedLiveUpdateInterval;
+ std::unique_ptr<Observer<void()>> m_currentImageChangedObserver;
+ std::unique_ptr<Observer<void()>> m_waitForVideoOutputMediaDataWillChangeObserver;
};
}
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm (288030 => 288031)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm 2022-01-14 21:47:10 UTC (rev 288030)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm 2022-01-14 21:58:40 UTC (rev 288031)
@@ -58,6 +58,7 @@
#import "PlatformScreen.h"
#import "PlatformTextTrack.h"
#import "PlatformTimeRanges.h"
+#import "QueuedVideoOutput.h"
#import "RuntimeApplicationChecks.h"
#import "ScriptDisallowedScope.h"
#import "SecurityOrigin.h"
@@ -205,16 +206,6 @@
- (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
@end
-@interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
- BinarySemaphore m_semaphore;
-}
-- (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
-- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
-- (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
-
-@property (nonatomic, readonly) BinarySemaphore& semaphore;
-@end
-
namespace WebCore {
static String convertEnumerationToString(AVPlayerTimeControlStatus enumerationValue)
{
@@ -261,16 +252,6 @@
return globalQueue;
}
-static dispatch_queue_t globalPullDelegateQueue()
-{
- static dispatch_queue_t globalQueue;
- static dispatch_once_t onceToken;
- dispatch_once(&onceToken, ^{
- globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
- });
- return globalQueue;
-}
-
static void registerFormatReaderIfNecessary()
{
#if ENABLE(WEBM_FORMAT_READER)
@@ -469,7 +450,8 @@
for (auto& pair : m_resourceLoaderMap)
pair.value->invalidate();
- [m_videoOutput setDelegate:nil queue:0];
+ if (m_videoOutput)
+ m_videoOutput->invalidate();
if (m_videoLayer)
destroyVideoLayer();
@@ -702,7 +684,7 @@
if (currentRenderingMode() == MediaRenderingMode::MediaRenderingToLayer)
return m_cachedIsReadyForDisplay;
- if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
+ if (m_videoOutput && (m_lastPixelBuffer || m_videoOutput->hasImageForTime(currentMediaTime())))
return true;
return m_videoFrameHasDrawn;
@@ -1444,34 +1426,30 @@
void MediaPlayerPrivateAVFoundationObjC::startVideoFrameMetadataGathering()
{
- ASSERT(!m_videoFrameMetadataGatheringObserver || m_avPlayer);
+ // requestVideoFrameCallback() cares about the /next/ available frame. Pull the current frame from
+ // the QueuedVideoOutput so paints of the current frame succeed;
+ updateLastPixelBuffer();
+
+ m_currentImageChangedObserver = WTF::makeUnique<Observer<void()>>([this] {
+ m_currentImageChangedObserver = nullptr;
+ checkNewVideoFrameMetadata();
+ });
+
+ if (m_videoOutput)
+ m_videoOutput->addCurrentImageChangedObserver(*m_currentImageChangedObserver);
+
m_isGatheringVideoFrameMetadata = true;
-
- // FIXME: We should use a CADisplayLink to get updates on rendering, for now we emulate with addPeriodicTimeObserverForInterval.
- m_videoFrameMetadataGatheringObserver = [m_avPlayer addPeriodicTimeObserverForInterval:PAL::CMTimeMake(1, 60) queue:dispatch_get_main_queue() usingBlock:[weakThis = WeakPtr { *this }](CMTime currentTime) {
- ensureOnMainThread([weakThis, currentTime] {
- if (weakThis)
- weakThis->checkNewVideoFrameMetadata(currentTime);
- });
- }];
}
-void MediaPlayerPrivateAVFoundationObjC::checkNewVideoFrameMetadata(CMTime currentTime)
+void MediaPlayerPrivateAVFoundationObjC::checkNewVideoFrameMetadata()
{
- if (!updateLastPixelBuffer())
+ if (!m_isGatheringVideoFrameMetadata)
return;
- VideoFrameMetadata metadata;
- metadata.width = m_cachedPresentationSize.width();
- metadata.height = m_cachedPresentationSize.height();
- metadata.presentedFrames = ++m_sampleCount;
- metadata.mediaTime = PAL::CMTimeGetSeconds(currentTime);
- // FIXME: presentationTime and expectedDisplayTime might not always have the same value, we should try getting more precise values.
- metadata.presentationTime = MonotonicTime::now().secondsSinceEpoch().seconds();
- metadata.expectedDisplayTime = metadata.presentationTime;
+ if (!updateLastPixelBuffer() && !m_videoFrameMetadata)
+ return;
- m_videoFrameMetadata = metadata;
- player()->onNewVideoFrameMetadata(WTFMove(metadata), m_lastPixelBuffer.get());
+ player()->onNewVideoFrameMetadata(WTFMove(*m_videoFrameMetadata), m_lastPixelBuffer.get());
}
void MediaPlayerPrivateAVFoundationObjC::stopVideoFrameMetadataGathering()
@@ -2517,18 +2495,18 @@
if (!m_avPlayerItem || m_videoOutput)
return;
- m_videoOutput = adoptNS([PAL::allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:nil]);
+ m_videoOutput = QueuedVideoOutput::create(m_avPlayerItem.get(), m_avPlayer.get());
ASSERT(m_videoOutput);
if (!m_videoOutput) {
ERROR_LOG(LOGIDENTIFIER, "-[AVPlayerItemVideoOutput initWithPixelBufferAttributes:] failed!");
return;
}
+ if (m_currentImageChangedObserver)
+ m_videoOutput->addCurrentImageChangedObserver(*m_currentImageChangedObserver);
- m_videoOutputDelegate = adoptNS([[WebCoreAVFPullDelegate alloc] initWithPlayer:*this]);
- [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
+ if (m_waitForVideoOutputMediaDataWillChangeObserver)
+ m_videoOutput->addCurrentImageChangedObserver(*m_waitForVideoOutputMediaDataWillChangeObserver);
- [m_avPlayerItem addOutput:m_videoOutput.get()];
-
setNeedsRenderingModeChanged();
}
@@ -2537,19 +2515,19 @@
if (!m_videoOutput)
return;
- if (m_avPlayerItem)
- [m_avPlayerItem removeOutput:m_videoOutput.get()];
-
INFO_LOG(LOGIDENTIFIER);
- m_videoOutput = 0;
+ m_videoOutput->invalidate();
+ m_videoOutput = nullptr;
+ m_videoFrameMetadata = { };
+
setNeedsRenderingModeChanged();
}
bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
{
- if (!m_avPlayerItem || readyState() < MediaPlayer::ReadyState::HaveCurrentData)
+ if (!m_avPlayerItem)
return false;
m_haveBeenAskedToPaint = true;
@@ -2558,13 +2536,26 @@
createVideoOutput();
ASSERT(m_videoOutput);
- CMTime currentTime = [m_avPlayerItem currentTime];
+ auto currentTime = currentMediaTime();
- if (![m_videoOutput hasNewPixelBufferForItemTime:currentTime])
+ if (!m_videoOutput->hasImageForTime(currentTime))
return false;
- m_lastPixelBuffer = adoptCF([m_videoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
+ auto entry = m_videoOutput->takeVideoFrameEntryForTime(currentTime);
+ m_lastPixelBuffer = WTFMove(entry.pixelBuffer);
+ if (m_isGatheringVideoFrameMetadata) {
+ auto presentationTime = MonotonicTime::now().secondsSinceEpoch().seconds() - (currentTime - entry.displayTime).toDouble();
+ m_videoFrameMetadata = {
+ .width = static_cast<unsigned>(CVPixelBufferGetWidth(m_lastPixelBuffer.get())),
+ .height = static_cast<unsigned>(CVPixelBufferGetHeight(m_lastPixelBuffer.get())),
+ .presentedFrames = static_cast<unsigned>(++m_sampleCount),
+ .mediaTime = entry.displayTime.toDouble(),
+ .presentationTime = presentationTime,
+ .expectedDisplayTime = presentationTime,
+ };
+ }
+
if (m_imageRotationSession)
m_lastPixelBuffer = m_imageRotationSession->rotate(m_lastPixelBuffer.get());
@@ -2583,7 +2574,7 @@
if (!m_videoOutput)
createVideoOutput();
- return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
+ return m_videoOutput->hasImageForTime(PAL::toMediaTime([m_avPlayerItem currentTime]));
}
void MediaPlayerPrivateAVFoundationObjC::updateLastImage(UpdateType type)
@@ -2601,7 +2592,7 @@
// Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
// for the requested time has already been retrieved. In this case, the last valid image (if any)
// should be displayed.
- if ((m_isGatheringVideoFrameMetadata || !updateLastPixelBuffer()) && (m_lastImage || !m_lastPixelBuffer))
+ if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
return;
if (!m_pixelBufferConformer) {
@@ -2636,8 +2627,7 @@
RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::pixelBufferForCurrentTime()
{
- if (!m_isGatheringVideoFrameMetadata)
- updateLastPixelBuffer();
+ updateLastPixelBuffer();
return m_lastPixelBuffer;
}
@@ -2668,7 +2658,11 @@
std::optional<RunLoop::Timer<MediaPlayerPrivateAVFoundationObjC>> timeoutTimer;
if (!m_runLoopNestingLevel) {
- [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
+ m_waitForVideoOutputMediaDataWillChangeObserver = WTF::makeUnique<Observer<void()>>([this, logIdentifier = LOGIDENTIFIER] () mutable {
+ if (m_runLoopNestingLevel)
+ RunLoop::main().stop();
+ });
+ m_videoOutput->addCurrentImageChangedObserver(*m_waitForVideoOutputMediaDataWillChangeObserver);
timeoutTimer.emplace(RunLoop::main(), [&] {
RunLoop::main().stop();
@@ -2695,15 +2689,7 @@
void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange()
{
- if (m_runLoopNestingLevel) {
- if (RunLoop::isMain())
- RunLoop::main().stop();
- else {
- RunLoop::main().dispatch([] {
- RunLoop::main().stop();
- });
- }
- }
+ checkNewVideoFrameMetadata();
}
#if ENABLE(LEGACY_ENCRYPTED_MEDIA)
@@ -4088,37 +4074,4 @@
@end
-@implementation WebCoreAVFPullDelegate {
- WeakPtr<WebCore::MediaPlayerPrivateAVFoundationObjC> _player;
-}
-
-@synthesize semaphore = m_semaphore;
-
-- (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player
-{
- self = [super init];
- if (!self)
- return nil;
- _player = WTFMove(player);
- return self;
-}
-
-- (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
-{
- UNUSED_PARAM(output);
- m_semaphore.signal();
- callOnMainThread([self, strongSelf = RetainPtr { self }] {
- if (_player)
- _player->outputMediaDataWillChange();
- });
-}
-
-- (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
-{
- UNUSED_PARAM(output);
- // No-op.
-}
-
-@end
-
#endif
Added: trunk/Source/WebCore/platform/graphics/avfoundation/objc/QueuedVideoOutput.h (0 => 288031)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/QueuedVideoOutput.h (rev 0)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/QueuedVideoOutput.h 2022-01-14 21:58:40 UTC (rev 288031)
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2022 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO)
+
+#include <map>
+#include <wtf/Deque.h>
+#include <wtf/MediaTime.h>
+#include <wtf/Observer.h>
+#include <wtf/RefCounted.h>
+#include <wtf/RetainPtr.h>
+#include <wtf/WeakHashSet.h>
+
+OBJC_CLASS AVPlayer;
+OBJC_CLASS AVPlayerItem;
+OBJC_CLASS AVPlayerItemVideoOutput;
+OBJC_CLASS WebQueuedVideoOutputDelegate;
+
+typedef struct __CVBuffer *CVPixelBufferRef;
+
+namespace WebCore {
+
+class QueuedVideoOutput
+ : public RefCounted<QueuedVideoOutput>
+ , public CanMakeWeakPtr<QueuedVideoOutput> {
+ WTF_MAKE_FAST_ALLOCATED;
+public:
+ static Ref<QueuedVideoOutput> create(AVPlayerItem*, AVPlayer*);
+ ~QueuedVideoOutput();
+
+ void invalidate();
+ bool hasImageForTime(const MediaTime&) const;
+
+ struct VideoFrameEntry {
+ RetainPtr<CVPixelBufferRef> pixelBuffer;
+ MediaTime displayTime;
+ };
+ VideoFrameEntry takeVideoFrameEntryForTime(const MediaTime&);
+
+ void addVideoFrameEntries(Vector<VideoFrameEntry>&&);
+ void purgeVideoFrameEntries();
+
+ using CurrentImageChangedObserver = Observer<void()>;
+ void addCurrentImageChangedObserver(const CurrentImageChangedObserver&);
+
+ using ImageMap = std::map<MediaTime, RetainPtr<CVPixelBufferRef>>;
+
+ void rateChanged(float);
+
+private:
+ QueuedVideoOutput(AVPlayerItem*, AVPlayer*);
+
+ void purgeImagesBeforeTime(const MediaTime&);
+ void configureNextImageTimeObserver();
+ void cancelNextImageTimeObserver();
+ void nextImageTimeReached();
+
+ RetainPtr<AVPlayerItem> m_playerItem;
+ RetainPtr<AVPlayer> m_player;
+ RetainPtr<WebQueuedVideoOutputDelegate> m_delegate;
+ RetainPtr<AVPlayerItemVideoOutput> m_videoOutput;
+ RetainPtr<id> m_videoTimebaseObserver;
+ RetainPtr<id> m_nextImageTimebaseObserver;
+
+ ImageMap m_videoFrames;
+ WeakHashSet<CurrentImageChangedObserver> m_currentImageChangedObservers;
+
+ bool m_paused { true };
+};
+
+}
+
+#endif
Added: trunk/Source/WebCore/platform/graphics/avfoundation/objc/QueuedVideoOutput.mm (0 => 288031)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/QueuedVideoOutput.mm (rev 0)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/QueuedVideoOutput.mm 2022-01-14 21:58:40 UTC (rev 288031)
@@ -0,0 +1,308 @@
+/*
+ * Copyright (C) 2022 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "QueuedVideoOutput.h"
+
+#if ENABLE(VIDEO)
+
+#include <AVFoundation/AVPlayerItemOutput.h>
+#include <pal/avfoundation/MediaTimeAVFoundation.h>
+#include <pal/spi/cocoa/AVFoundationSPI.h>
+#include <wtf/text/StringConcatenateNumbers.h>
+
+#include <pal/cf/CoreMediaSoftLink.h>
+#include <pal/cocoa/AVFoundationSoftLink.h>
+
+@interface WebQueuedVideoOutputDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
+ WeakPtr<WebCore::QueuedVideoOutput> _parent;
+}
+- (id)initWithParent:(WebCore::QueuedVideoOutput*)parent;
+- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)output;
+- (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
+- (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(void*)context;
+@end
+
+@implementation WebQueuedVideoOutputDelegate
+- (id)initWithParent:(WebCore::QueuedVideoOutput*)parent
+{
+ self = [super init];
+ if (!self)
+ return nil;
+
+ _parent = parent;
+ return self;
+}
+
+- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)output
+{
+ ASSERT([output isKindOfClass:PAL::getAVPlayerItemVideoOutputClass()]);
+ auto* videoOutput = (AVPlayerItemVideoOutput*)output;
+
+ Vector<WebCore::QueuedVideoOutput::VideoFrameEntry> videoFrameEntries;
+ do {
+ CMTime earliestTime = [videoOutput earliestAvailablePixelBufferItemTime];
+ if (CMTIME_IS_INVALID(earliestTime))
+ break;
+
+ auto pixelBuffer = adoptCF([videoOutput copyPixelBufferForItemTime:earliestTime itemTimeForDisplay:nil]);
+ if (!pixelBuffer)
+ break;
+
+ videoFrameEntries.append({ WTFMove(pixelBuffer), PAL::toMediaTime(earliestTime) });
+ } while (true);
+
+ if (videoFrameEntries.isEmpty())
+ return;
+
+ callOnMainRunLoop([videoFrameEntries = WTFMove(videoFrameEntries), parent = _parent] () mutable {
+ if (parent)
+ parent->addVideoFrameEntries(WTFMove(videoFrameEntries));
+ });
+}
+
+- (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output
+{
+ ASSERT([output isKindOfClass:PAL::getAVPlayerItemVideoOutputClass()]);
+ auto* videoOutput = (AVPlayerItemVideoOutput*)output;
+ [videoOutput requestNotificationOfMediaDataChangeAsSoonAsPossible];
+
+ callOnMainRunLoop([parent = _parent] {
+ if (parent)
+ parent->purgeVideoFrameEntries();
+ });
+}
+
+- (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(void*)context
+{
+ if (![keyPath isEqualToString:@"rate"])
+ return;
+
+ auto rateValue = (NSNumber*)[change valueForKey:NSKeyValueChangeNewKey];
+ ASSERT([rateValue isKindOfClass:NSNumber.class]);
+ auto rate = rateValue.floatValue;
+
+ ensureOnMainRunLoop([parent = _parent, rate] {
+ if (parent)
+ parent->rateChanged(rate);
+ });
+}
+@end
+
+namespace WebCore {
+
+static dispatch_queue_t globalOutputDelegateQueue()
+{
+ static dispatch_queue_t globalQueue;
+ static dispatch_once_t onceToken;
+ dispatch_once(&onceToken, ^{
+ globalQueue = dispatch_queue_create("WebQueuedVideoOutputDelegate queue", DISPATCH_QUEUE_SERIAL);
+ });
+ return globalQueue;
+}
+
+Ref<QueuedVideoOutput> QueuedVideoOutput::create(AVPlayerItem* item, AVPlayer* player)
+{
+ return adoptRef(*new QueuedVideoOutput(item, player));
+}
+
+QueuedVideoOutput::QueuedVideoOutput(AVPlayerItem* item, AVPlayer* player)
+ : m_playerItem(item)
+ , m_player(player)
+ , m_delegate(adoptNS([[WebQueuedVideoOutputDelegate alloc] initWithParent:this]))
+{
+ m_videoOutput = adoptNS([PAL::allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:nil]);
+ [m_videoOutput setDelegate:m_delegate.get() queue:globalOutputDelegateQueue()];
+ [m_videoOutput requestNotificationOfMediaDataChangeAsSoonAsPossible];
+
+ [m_playerItem addOutput:m_videoOutput.get()];
+
+ [m_player addObserver:m_delegate.get() forKeyPath:@"rate" options:(NSKeyValueObservingOptionNew | NSKeyValueObservingOptionInitial) context:nil];
+
+ m_videoTimebaseObserver = [m_player addPeriodicTimeObserverForInterval:PAL::CMTimeMake(1, 60) queue:globalOutputDelegateQueue() usingBlock:[weakThis = WeakPtr { *this }, protectedDelegate = m_delegate, protectedOutput = m_videoOutput](CMTime currentTime) mutable {
+
+ // Periodically check for new available pixel buffers.
+ [protectedDelegate outputMediaDataWillChange:protectedOutput.get()];
+
+ // And purge the back buffer of past frames.
+ callOnMainRunLoop([weakThis = weakThis, time = PAL::toMediaTime(currentTime)] {
+ if (weakThis)
+ weakThis->purgeImagesBeforeTime(time);
+ });
+ }];
+}
+
+QueuedVideoOutput::~QueuedVideoOutput()
+{
+ invalidate();
+}
+
+void QueuedVideoOutput::invalidate()
+{
+ if (m_videoTimebaseObserver) {
+ [m_player removeTimeObserver:m_videoTimebaseObserver.get()];
+ m_videoTimebaseObserver = nil;
+ }
+
+ cancelNextImageTimeObserver();
+
+ if (m_videoOutput) {
+ [m_playerItem removeOutput:m_videoOutput.get()];
+ [m_videoOutput setDelegate:nil queue:nil];
+ m_videoOutput = nil;
+ }
+
+ if (m_player) {
+ [m_player removeObserver:m_delegate.get() forKeyPath:@"rate"];
+ m_player = nil;
+ }
+
+ m_playerItem = nil;
+}
+
+template <typename MaybeConstMap, typename MaybeConstIter = decltype(MaybeConstMap().begin())>
+MaybeConstIter findImageForTime(MaybeConstMap& map, const MediaTime& time)
+{
+ // Outputs of AVPlayerItemVideoOutput have a display time, but not a duration; they are valid
+ // until the arrival of a later frame. Find this frame efficiently using set::upper_bound to find
+ // the next displayable frame, and walk backwards by one to find a frame for the target time.
+ if (map.empty())
+ return map.end();
+
+ auto iter = map.upper_bound(time);
+ if (iter == map.begin())
+ return map.end();
+
+ return --iter;
+}
+
+bool QueuedVideoOutput::hasImageForTime(const MediaTime& time) const
+{
+ return findImageForTime(m_videoFrames, time) != m_videoFrames.end();
+}
+
+auto QueuedVideoOutput::takeVideoFrameEntryForTime(const MediaTime& time) -> VideoFrameEntry
+{
+ auto iter = findImageForTime(m_videoFrames, time);
+ if (iter == m_videoFrames.end())
+ return { nullptr, MediaTime::invalidTime() };
+
+ VideoFrameEntry entry = { WTFMove(iter->second), iter->first };
+
+ // Purge all frames before `time`, so that repeated calls with the same time don't return
+ // successively earlier images.
+ m_videoFrames.erase(m_videoFrames.begin(), ++iter);
+
+ return entry;
+}
+
+void QueuedVideoOutput::addCurrentImageChangedObserver(const CurrentImageChangedObserver& observer)
+{
+ m_currentImageChangedObservers.add(observer);
+ configureNextImageTimeObserver();
+
+ [m_videoOutput requestNotificationOfMediaDataChangeAsSoonAsPossible];
+}
+
+void QueuedVideoOutput::configureNextImageTimeObserver()
+{
+ if (m_nextImageTimebaseObserver)
+ return;
+
+ auto currentTime = PAL::toMediaTime([m_player currentTime]);
+ auto iter = findImageForTime(m_videoFrames, currentTime);
+ if (iter == m_videoFrames.end() || ++iter == m_videoFrames.end())
+ return;
+
+ auto nextImageTime = iter->first;
+
+ m_nextImageTimebaseObserver = [m_player addBoundaryTimeObserverForTimes:@[[NSValue valueWithCMTime:PAL::toCMTime(currentTime)]] queue:globalOutputDelegateQueue() usingBlock:[weakThis = WeakPtr { *this }, protectedDelegate = m_delegate, protectedOutput = m_videoOutput] () mutable {
+ callOnMainRunLoop([weakThis = WTFMove(weakThis)] {
+ if (weakThis)
+ weakThis->nextImageTimeReached();
+ });
+ }];
+}
+
+void QueuedVideoOutput::cancelNextImageTimeObserver()
+{
+ if (!m_nextImageTimebaseObserver)
+ return;
+
+ [m_player removeTimeObserver:m_nextImageTimebaseObserver.get()];
+ m_nextImageTimebaseObserver = nil;
+}
+
+void QueuedVideoOutput::nextImageTimeReached()
+{
+ cancelNextImageTimeObserver();
+ auto observers = std::exchange(m_currentImageChangedObservers, { });
+ observers.forEach([](auto& observer) { observer(); });
+}
+
+void QueuedVideoOutput::addVideoFrameEntries(Vector<VideoFrameEntry>&& videoFrameEntries)
+{
+ bool needsImageForCurrentTimeChanged = false;
+ bool hasCurrentImageChangedObservers = m_currentImageChangedObservers.computeSize();
+ MediaTime currentTime = hasCurrentImageChangedObservers ? PAL::toMediaTime([m_player currentTime]) : MediaTime::invalidTime();
+
+ for (auto& entry : videoFrameEntries) {
+ if (hasCurrentImageChangedObservers && entry.displayTime <= currentTime)
+ needsImageForCurrentTimeChanged = true;
+ m_videoFrames.emplace(entry.displayTime, WTFMove(entry.pixelBuffer));
+ }
+
+ if (needsImageForCurrentTimeChanged)
+ nextImageTimeReached();
+ else if (hasCurrentImageChangedObservers)
+ configureNextImageTimeObserver();
+
+ if (m_paused)
+ [m_videoOutput requestNotificationOfMediaDataChangeAsSoonAsPossible];
+}
+
+void QueuedVideoOutput::purgeVideoFrameEntries()
+{
+ cancelNextImageTimeObserver();
+ m_videoFrames.clear();
+}
+
+void QueuedVideoOutput::purgeImagesBeforeTime(const MediaTime& time)
+{
+ m_videoFrames.erase(m_videoFrames.begin(), findImageForTime(m_videoFrames, time));
+}
+
+void QueuedVideoOutput::rateChanged(float rate)
+{
+ m_paused = !rate;
+
+ if (m_paused)
+ [m_videoOutput requestNotificationOfMediaDataChangeAsSoonAsPossible];
+}
+
+}
+
+#endif
+