Title: [294058] trunk/Source
Revision
294058
Author
[email protected]
Date
2022-05-11 10:44:40 -0700 (Wed, 11 May 2022)

Log Message

MediaPlayerPrivateRemote::nativeImageForCurrentTime should not use IOSurfaces
https://bugs.webkit.org/show_bug.cgi?id=240226
<rdar://problem/92969905>

Reviewed by Eric Carlson.

Source/WebCore:

Add a way for MediaPlayer to pass the VideoFrame related to the given frame metadata.

Covered by existing tests, in particular:
- LayoutTests/media/video-canvas-createPattern.html
- LayoutTests/media/video-canvas-drawing.html
- LayoutTests/media/video-canvas-drawing-output.html

* platform/graphics/MediaPlayer.cpp:
* platform/graphics/MediaPlayer.h:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm:
* platform/mediastream/libwebrtc/VideoFrameLibWebRTC.h:

Source/WebKit:

On GPUProcess side, when receiving notification of a new frame, create a remote frame
in RemoteMediaPlayerProxy::mediaPlayerOnNewVideoFrameMetadata
and send it to MediaPlayerPrivateRemote::pushVideoFrameMetadata on WebProcess side.
When being asked to create a NativeImage, MediaPlayerPrivateRemote can use that remote image
to get a NativeImage using its videoFrameObjectHeapProxy.
If the remote video frame is not available, get it through videoFrameForCurrentTime and convert it in the same way.
This creates in that case a double IPC. We should probably remove these two IPCs and use VideoFrame as much as we can in follow-up patches.

* GPUProcess/media/RemoteMediaPlayerProxy.h:
* GPUProcess/media/RemoteVideoFrameObjectHeap.cpp:
* GPUProcess/media/RemoteVideoFrameObjectHeap.h:
* GPUProcess/media/RemoteVideoFrameObjectHeap.messages.in:
* GPUProcess/media/cocoa/RemoteMediaPlayerProxyCocoa.mm:
* WebProcess/GPU/media/MediaPlayerPrivateRemote.cpp:
* WebProcess/GPU/media/MediaPlayerPrivateRemote.h:
* WebProcess/GPU/media/MediaPlayerPrivateRemote.messages.in:
* WebProcess/GPU/media/cocoa/MediaPlayerPrivateRemoteCocoa.mm:
* WebProcess/GPU/webrtc/RemoteVideoFrameObjectHeapProxyProcessor.cpp:
* WebProcess/GPU/webrtc/SharedVideoFrame.cpp:
* WebProcess/GPU/webrtc/SharedVideoFrame.h:

Modified Paths

Diff

Modified: trunk/Source/WebCore/ChangeLog (294057 => 294058)


--- trunk/Source/WebCore/ChangeLog	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebCore/ChangeLog	2022-05-11 17:44:40 UTC (rev 294058)
@@ -1,5 +1,26 @@
 2022-05-11  Youenn Fablet  <[email protected]>
 
+        MediaPlayerPrivateRemote::nativeImageForCurrentTime should not use IOSurfaces
+        https://bugs.webkit.org/show_bug.cgi?id=240226
+        <rdar://problem/92969905>
+
+        Reviewed by Eric Carlson.
+
+        Add a way for MediaPlayer to pass the VideoFrame related to the given frame metadata.
+
+        Covered by existing tests, in particular:
+        - LayoutTests/media/video-canvas-createPattern.html
+        - LayoutTests/media/video-canvas-drawing.html
+        - LayoutTests/media/video-canvas-drawing-output.html
+
+        * platform/graphics/MediaPlayer.cpp:
+        * platform/graphics/MediaPlayer.h:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm:
+        * platform/mediastream/libwebrtc/VideoFrameLibWebRTC.h:
+
+2022-05-11  Youenn Fablet  <[email protected]>
+
         Introduce a canvas-drawImage specific method to get a NativeImage from a video element
         https://bugs.webkit.org/show_bug.cgi?id=240275
 

Modified: trunk/Source/WebCore/platform/graphics/MediaPlayer.cpp (294057 => 294058)


--- trunk/Source/WebCore/platform/graphics/MediaPlayer.cpp	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebCore/platform/graphics/MediaPlayer.cpp	2022-05-11 17:44:40 UTC (rev 294058)
@@ -1785,9 +1785,9 @@
 }
 
 #if PLATFORM(COCOA)
-void MediaPlayer::onNewVideoFrameMetadata(VideoFrameMetadata&& metadata)
+void MediaPlayer::onNewVideoFrameMetadata(VideoFrameMetadata&& metadata, RetainPtr<CVPixelBufferRef>&& buffer)
 {
-    client().mediaPlayerOnNewVideoFrameMetadata(WTFMove(metadata));
+    client().mediaPlayerOnNewVideoFrameMetadata(WTFMove(metadata), WTFMove(buffer));
 }
 #endif
 

Modified: trunk/Source/WebCore/platform/graphics/MediaPlayer.h (294057 => 294058)


--- trunk/Source/WebCore/platform/graphics/MediaPlayer.h	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebCore/platform/graphics/MediaPlayer.h	2022-05-11 17:44:40 UTC (rev 294058)
@@ -288,7 +288,7 @@
     virtual void mediaPlayerQueueTaskOnEventLoop(Function<void()>&& task) { callOnMainThread(WTFMove(task)); }
 
 #if PLATFORM(COCOA)
-    virtual void mediaPlayerOnNewVideoFrameMetadata(VideoFrameMetadata&&) { }
+    virtual void mediaPlayerOnNewVideoFrameMetadata(VideoFrameMetadata&&, RetainPtr<CVPixelBufferRef>&&) { }
 #endif
 
     virtual bool mediaPlayerPrefersSandboxedParsing() const { return false; }
@@ -591,7 +591,7 @@
     void removeVideoTrack(VideoTrackPrivate&);
 
 #if PLATFORM(COCOA)
-    void onNewVideoFrameMetadata(VideoFrameMetadata&&);
+    void onNewVideoFrameMetadata(VideoFrameMetadata&&, RetainPtr<CVPixelBufferRef>&&);
 #endif
 
     bool requiresTextTrackRepresentation() const;

Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm (294057 => 294058)


--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm	2022-05-11 17:44:40 UTC (rev 294058)
@@ -1473,7 +1473,7 @@
     if (!updateLastPixelBuffer() && !m_videoFrameMetadata)
         return;
 
-    player()->onNewVideoFrameMetadata(WTFMove(*m_videoFrameMetadata));
+    player()->onNewVideoFrameMetadata(WTFMove(*m_videoFrameMetadata), m_lastPixelBuffer.get());
 }
 
 void MediaPlayerPrivateAVFoundationObjC::stopVideoFrameMetadataGathering()

Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm (294057 => 294058)


--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm	2022-05-11 17:44:40 UTC (rev 294058)
@@ -1429,7 +1429,7 @@
     metadata.presentationTime = PAL::CMTimeGetSeconds(currentTime);
 
     m_videoFrameMetadata = metadata;
-    m_player->onNewVideoFrameMetadata(WTFMove(metadata));
+    m_player->onNewVideoFrameMetadata(WTFMove(metadata), m_lastPixelBuffer.get());
 }
 
 void MediaPlayerPrivateMediaSourceAVFObjC::stopVideoFrameMetadataGathering()

Modified: trunk/Source/WebCore/platform/mediastream/libwebrtc/VideoFrameLibWebRTC.h (294057 => 294058)


--- trunk/Source/WebCore/platform/mediastream/libwebrtc/VideoFrameLibWebRTC.h	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebCore/platform/mediastream/libwebrtc/VideoFrameLibWebRTC.h	2022-05-11 17:44:40 UTC (rev 294058)
@@ -44,7 +44,7 @@
     using ConversionCallback = Function<RetainPtr<CVPixelBufferRef>(webrtc::VideoFrameBuffer&)>;
     static Ref<VideoFrameLibWebRTC> create(MediaTime, bool isMirrored, Rotation, rtc::scoped_refptr<webrtc::VideoFrameBuffer>&&, ConversionCallback&&);
 
-    rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer() { return m_buffer; }
+    rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer() const { return m_buffer; }
 
 private:
     VideoFrameLibWebRTC(MediaTime, bool isMirrored, Rotation, rtc::scoped_refptr<webrtc::VideoFrameBuffer>&&, ConversionCallback&&);

Modified: trunk/Source/WebKit/ChangeLog (294057 => 294058)


--- trunk/Source/WebKit/ChangeLog	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebKit/ChangeLog	2022-05-11 17:44:40 UTC (rev 294058)
@@ -1,5 +1,34 @@
 2022-05-11  Youenn Fablet  <[email protected]>
 
+        MediaPlayerPrivateRemote::nativeImageForCurrentTime should not use IOSurfaces
+        https://bugs.webkit.org/show_bug.cgi?id=240226
+        <rdar://problem/92969905>
+
+        Reviewed by Eric Carlson.
+
+        On GPUProcess side, when receiving notification of a new frame, create a remote frame
+        in RemoteMediaPlayerProxy::mediaPlayerOnNewVideoFrameMetadata
+        and send it to MediaPlayerPrivateRemote::pushVideoFrameMetadata on WebProcess side.
+        When being asked to create a NativeImage, MediaPlayerPrivateRemote can use that remote image
+        to get a NativeImage using its videoFrameObjectHeapProxy.
+        If the remote video frame is not available, get it through videoFrameForCurrentTime and convert it in the same way.
+        This creates in that case a double IPC. We should probably remove these two IPCs and use VideoFrame as much as we can in follow-up patches.
+
+        * GPUProcess/media/RemoteMediaPlayerProxy.h:
+        * GPUProcess/media/RemoteVideoFrameObjectHeap.cpp:
+        * GPUProcess/media/RemoteVideoFrameObjectHeap.h:
+        * GPUProcess/media/RemoteVideoFrameObjectHeap.messages.in:
+        * GPUProcess/media/cocoa/RemoteMediaPlayerProxyCocoa.mm:
+        * WebProcess/GPU/media/MediaPlayerPrivateRemote.cpp:
+        * WebProcess/GPU/media/MediaPlayerPrivateRemote.h:
+        * WebProcess/GPU/media/MediaPlayerPrivateRemote.messages.in:
+        * WebProcess/GPU/media/cocoa/MediaPlayerPrivateRemoteCocoa.mm:
+        * WebProcess/GPU/webrtc/RemoteVideoFrameObjectHeapProxyProcessor.cpp:
+        * WebProcess/GPU/webrtc/SharedVideoFrame.cpp:
+        * WebProcess/GPU/webrtc/SharedVideoFrame.h:
+
+2022-05-11  Youenn Fablet  <[email protected]>
+
         Introduce a canvas-drawImage specific method to get a NativeImage from a video element
         https://bugs.webkit.org/show_bug.cgi?id=240275
 

Modified: trunk/Source/WebKit/GPUProcess/media/RemoteMediaPlayerProxy.h (294057 => 294058)


--- trunk/Source/WebKit/GPUProcess/media/RemoteMediaPlayerProxy.h	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebKit/GPUProcess/media/RemoteMediaPlayerProxy.h	2022-05-11 17:44:40 UTC (rev 294058)
@@ -329,7 +329,7 @@
     void startVideoFrameMetadataGathering();
     void stopVideoFrameMetadataGathering();
 #if PLATFORM(COCOA)
-    void mediaPlayerOnNewVideoFrameMetadata(WebCore::VideoFrameMetadata&&);
+    void mediaPlayerOnNewVideoFrameMetadata(WebCore::VideoFrameMetadata&&, RetainPtr<CVPixelBufferRef>&&);
 #endif
 
     void playerContentBoxRectChanged(const WebCore::LayoutRect&);

Modified: trunk/Source/WebKit/GPUProcess/media/RemoteVideoFrameObjectHeap.cpp (294057 => 294058)


--- trunk/Source/WebKit/GPUProcess/media/RemoteVideoFrameObjectHeap.cpp	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebKit/GPUProcess/media/RemoteVideoFrameObjectHeap.cpp	2022-05-11 17:44:40 UTC (rev 294058)
@@ -35,6 +35,7 @@
 #include <wtf/WorkQueue.h>
 
 #if PLATFORM(COCOA)
+#include <WebCore/ColorSpaceCG.h>
 #include <WebCore/PixelBufferConformerCV.h>
 #include <WebCore/VideoFrameCV.h>
 #include <pal/cf/CoreMediaSoftLink.h>
@@ -137,20 +138,29 @@
     completionHandler(WTFMove(pixelBuffer));
 }
 
-void RemoteVideoFrameObjectHeap::convertBuffer(SharedVideoFrame::Buffer&& buffer, CompletionHandler<void()>&& callback)
+void RemoteVideoFrameObjectHeap::convertFrameBuffer(SharedVideoFrame&& sharedVideoFrame, CompletionHandler<void(WebCore::DestinationColorSpace)>&& callback)
 {
-    auto scope = makeScopeExit([&callback] { callback(); });
+    DestinationColorSpace destinationColorSpace { DestinationColorSpace::SRGB().platformColorSpace() };
+    auto scope = makeScopeExit([&callback, &destinationColorSpace] { callback(destinationColorSpace); });
 
-    auto pixelBuffer = m_sharedVideoFrameReader.readBuffer(WTFMove(buffer));
-    if (!pixelBuffer) {
+    RefPtr<VideoFrame> frame;
+    if (std::holds_alternative<RemoteVideoFrameReadReference>(sharedVideoFrame.buffer))
+        frame = get(WTFMove(std::get<RemoteVideoFrameReadReference>(sharedVideoFrame.buffer)));
+    else
+        frame = m_sharedVideoFrameReader.read(WTFMove(sharedVideoFrame));
+
+    if (!frame) {
         m_connection->send(Messages::RemoteVideoFrameObjectHeapProxyProcessor::NewConvertedVideoFrameBuffer { { } }, 0);
         return;
     }
 
+    RetainPtr<CVPixelBufferRef> buffer = frame->pixelBuffer();
+    destinationColorSpace = DestinationColorSpace(createCGColorSpaceForCVPixelBuffer(buffer.get()));
+
     createPixelConformerIfNeeded();
-    auto convertedBuffer = m_pixelBufferConformer->convert(pixelBuffer.get());
+    auto convertedBuffer = m_pixelBufferConformer->convert(buffer.get());
     if (!convertedBuffer) {
-        RELEASE_LOG_ERROR(WebRTC, "RemoteVideoFrameObjectHeap::convertBuffer conformer failed");
+        RELEASE_LOG_ERROR(WebRTC, "RemoteVideoFrameObjectHeap::convertFrameBuffer conformer failed");
         m_connection->send(Messages::RemoteVideoFrameObjectHeapProxyProcessor::NewConvertedVideoFrameBuffer { { } }, 0);
         return;
     }

Modified: trunk/Source/WebKit/GPUProcess/media/RemoteVideoFrameObjectHeap.h (294057 => 294058)


--- trunk/Source/WebKit/GPUProcess/media/RemoteVideoFrameObjectHeap.h	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebKit/GPUProcess/media/RemoteVideoFrameObjectHeap.h	2022-05-11 17:44:40 UTC (rev 294058)
@@ -29,6 +29,7 @@
 #include "Connection.h"
 #include "RemoteVideoFrameProxy.h"
 #include "ThreadSafeObjectHeap.h"
+#include <WebCore/DestinationColorSpace.h>
 #include <WebCore/VideoFrame.h>
 
 #if PLATFORM(COCOA)
@@ -65,7 +66,7 @@
 #if PLATFORM(COCOA)
     void getVideoFrameBuffer(RemoteVideoFrameReadReference&&, bool canSendIOSurface);
     void pixelBuffer(RemoteVideoFrameReadReference&&, CompletionHandler<void(RetainPtr<CVPixelBufferRef>)>&&);
-    void convertBuffer(SharedVideoFrame::Buffer&&, CompletionHandler<void()>&&);
+    void convertFrameBuffer(SharedVideoFrame&&, CompletionHandler<void(WebCore::DestinationColorSpace)>&&);
     void setSharedVideoFrameSemaphore(IPC::Semaphore&&);
     void setSharedVideoFrameMemory(const SharedMemory::IPCHandle&);
 #endif

Modified: trunk/Source/WebKit/GPUProcess/media/RemoteVideoFrameObjectHeap.messages.in (294057 => 294058)


--- trunk/Source/WebKit/GPUProcess/media/RemoteVideoFrameObjectHeap.messages.in	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebKit/GPUProcess/media/RemoteVideoFrameObjectHeap.messages.in	2022-05-11 17:44:40 UTC (rev 294058)
@@ -28,7 +28,7 @@
     PixelBuffer(WebKit::RemoteVideoFrameReadReference read) -> (RetainPtr<CVPixelBufferRef> result) Synchronous
     SetSharedVideoFrameSemaphore(IPC::Semaphore semaphore)
     SetSharedVideoFrameMemory(WebKit::SharedMemory::IPCHandle storageHandle)
-    ConvertBuffer(struct WebKit::SharedVideoFrame::Buffer buffer) -> () Synchronous
+    ConvertFrameBuffer(struct WebKit::SharedVideoFrame frame) -> (WebCore::DestinationColorSpace space) Synchronous
 #endif
     ReleaseVideoFrame(WebKit::RemoteVideoFrameWriteReference write)
 }

Modified: trunk/Source/WebKit/GPUProcess/media/cocoa/RemoteMediaPlayerProxyCocoa.mm (294057 => 294058)


--- trunk/Source/WebKit/GPUProcess/media/cocoa/RemoteMediaPlayerProxyCocoa.mm	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebKit/GPUProcess/media/cocoa/RemoteMediaPlayerProxyCocoa.mm	2022-05-11 17:44:40 UTC (rev 294058)
@@ -34,6 +34,7 @@
 #import <QuartzCore/QuartzCore.h>
 #import <WebCore/FloatSize.h>
 #import <WebCore/IOSurface.h>
+#import <WebCore/VideoFrameCV.h>
 #import <wtf/MachSendRight.h>
 
 namespace WebKit {
@@ -89,9 +90,10 @@
     setVideoInlineSizeIfPossible(size);
 }
 
-void RemoteMediaPlayerProxy::mediaPlayerOnNewVideoFrameMetadata(VideoFrameMetadata&& metadata)
+void RemoteMediaPlayerProxy::mediaPlayerOnNewVideoFrameMetadata(VideoFrameMetadata&& metadata, RetainPtr<CVPixelBufferRef>&& buffer)
 {
-    m_webProcessConnection->send(Messages::MediaPlayerPrivateRemote::PushVideoFrameMetadata(metadata), m_id);
+    auto properties = m_videoFrameObjectHeap->add(WebCore::VideoFrameCV::create({ }, false, VideoFrame::Rotation::None, WTFMove(buffer)));
+    m_webProcessConnection->send(Messages::MediaPlayerPrivateRemote::PushVideoFrameMetadata(metadata, properties), m_id);
 }
 
 void RemoteMediaPlayerProxy::nativeImageForCurrentTime(CompletionHandler<void(std::optional<WTF::MachSendRight>&&, WebCore::DestinationColorSpace)>&& completionHandler)

Modified: trunk/Source/WebKit/WebProcess/GPU/media/MediaPlayerPrivateRemote.cpp (294057 => 294058)


--- trunk/Source/WebKit/WebProcess/GPU/media/MediaPlayerPrivateRemote.cpp	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebKit/WebProcess/GPU/media/MediaPlayerPrivateRemote.cpp	2022-05-11 17:44:40 UTC (rev 294058)
@@ -1431,6 +1431,9 @@
 void MediaPlayerPrivateRemote::stopVideoFrameMetadataGathering()
 {
     m_isGatheringVideoFrameMetadata = false;
+#if PLATFORM(COCOA)
+    m_videoFrameGatheredWithVideoFrameMetadata = nullptr;
+#endif
     connection().send(Messages::RemoteMediaPlayerProxy::StopVideoFrameMetadataGathering(), m_id);
 }
 

Modified: trunk/Source/WebKit/WebProcess/GPU/media/MediaPlayerPrivateRemote.h (294057 => 294058)


--- trunk/Source/WebKit/WebProcess/GPU/media/MediaPlayerPrivateRemote.h	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebKit/WebProcess/GPU/media/MediaPlayerPrivateRemote.h	2022-05-11 17:44:40 UTC (rev 294058)
@@ -417,7 +417,7 @@
     void playerContentBoxRectChanged(const WebCore::LayoutRect&) final;
 
 #if PLATFORM(COCOA)
-    void pushVideoFrameMetadata(WebCore::VideoFrameMetadata&&);
+    void pushVideoFrameMetadata(WebCore::VideoFrameMetadata&&, RemoteVideoFrameProxy::Properties&&);
 #endif
     RemoteVideoFrameObjectHeapProxy& videoFrameObjectHeapProxy() const { return m_manager.gpuProcessConnection().videoFrameObjectHeapProxy(); }
 
@@ -470,6 +470,9 @@
     std::optional<bool> m_shouldMaintainAspectRatio;
     std::optional<bool> m_pageIsVisible;
     RefPtr<RemoteVideoFrameProxy> m_videoFrameForCurrentTime;
+#if PLATFORM(COCOA)
+    RefPtr<RemoteVideoFrameProxy> m_videoFrameGatheredWithVideoFrameMetadata;
+#endif
     std::optional<WebCore::VideoFrameMetadata> m_videoFrameMetadata;
     bool m_isGatheringVideoFrameMetadata { false };
 };

Modified: trunk/Source/WebKit/WebProcess/GPU/media/MediaPlayerPrivateRemote.messages.in (294057 => 294058)


--- trunk/Source/WebKit/WebProcess/GPU/media/MediaPlayerPrivateRemote.messages.in	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebKit/WebProcess/GPU/media/MediaPlayerPrivateRemote.messages.in	2022-05-11 17:44:40 UTC (rev 294058)
@@ -93,7 +93,7 @@
 #endif
 
 #if PLATFORM(COCOA)
-    PushVideoFrameMetadata(struct WebCore::VideoFrameMetadata metadata);
+    PushVideoFrameMetadata(struct WebCore::VideoFrameMetadata metadata, WebKit::RemoteVideoFrameProxy::Properties frameProperties);
 #endif
 }
 

Modified: trunk/Source/WebKit/WebProcess/GPU/media/cocoa/MediaPlayerPrivateRemoteCocoa.mm (294057 => 294058)


--- trunk/Source/WebKit/WebProcess/GPU/media/cocoa/MediaPlayerPrivateRemoteCocoa.mm	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebKit/WebProcess/GPU/media/cocoa/MediaPlayerPrivateRemoteCocoa.mm	2022-05-11 17:44:40 UTC (rev 294058)
@@ -32,8 +32,6 @@
 #import "RemoteMediaPlayerProxyMessages.h"
 #import "WebCoreArgumentCoders.h"
 #import <WebCore/ColorSpaceCG.h>
-#import <WebCore/IOSurface.h>
-#import <WebCore/PixelBufferConformerCV.h>
 #import <pal/spi/cocoa/QuartzCoreSPI.h>
 #import <wtf/MachSendRight.h>
 
@@ -49,11 +47,13 @@
 }
 #endif
 
-void MediaPlayerPrivateRemote::pushVideoFrameMetadata(WebCore::VideoFrameMetadata&& videoFrameMetadata)
+void MediaPlayerPrivateRemote::pushVideoFrameMetadata(WebCore::VideoFrameMetadata&& videoFrameMetadata, RemoteVideoFrameProxy::Properties&& properties)
 {
+    auto videoFrame = RemoteVideoFrameProxy::create(connection(), videoFrameObjectHeapProxy(), WTFMove(properties));
     if (!m_isGatheringVideoFrameMetadata)
         return;
     m_videoFrameMetadata = WTFMove(videoFrameMetadata);
+    m_videoFrameGatheredWithVideoFrameMetadata = WTFMove(videoFrame);
 }
 
 RefPtr<NativeImage> MediaPlayerPrivateRemote::nativeImageForCurrentTime()
@@ -61,23 +61,11 @@
     if (readyState() < MediaPlayer::ReadyState::HaveCurrentData)
         return { };
 
-    std::optional<MachSendRight> sendRight;
-    auto colorSpace = DestinationColorSpace::SRGB();
-    if (!connection().sendSync(Messages::RemoteMediaPlayerProxy::NativeImageForCurrentTime(), Messages::RemoteMediaPlayerProxy::NativeImageForCurrentTime::Reply(sendRight, colorSpace), m_id))
+    auto videoFrame = m_videoFrameGatheredWithVideoFrameMetadata ? RefPtr<WebCore::VideoFrame>(m_videoFrameGatheredWithVideoFrameMetadata) : videoFrameForCurrentTime();
+    if (!videoFrame)
         return nullptr;
 
-    if (!sendRight)
-        return nullptr;
-
-    auto surface = WebCore::IOSurface::createFromSendRight(WTFMove(*sendRight), colorSpace);
-    if (!surface)
-        return nullptr;
-
-    auto platformImage = WebCore::IOSurface::sinkIntoImage(WTFMove(surface));
-    if (!platformImage)
-        return nullptr;
-
-    return NativeImage::create(WTFMove(platformImage));
+    return WebProcess::singleton().ensureGPUProcessConnection().videoFrameObjectHeapProxy().getNativeImage(*videoFrame);
 }
 
 WebCore::DestinationColorSpace MediaPlayerPrivateRemote::colorSpace()

Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/RemoteVideoFrameObjectHeapProxyProcessor.cpp (294057 => 294058)


--- trunk/Source/WebKit/WebProcess/GPU/webrtc/RemoteVideoFrameObjectHeapProxyProcessor.cpp	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/RemoteVideoFrameObjectHeapProxyProcessor.cpp	2022-05-11 17:44:40 UTC (rev 294058)
@@ -137,15 +137,14 @@
     if (m_sharedVideoFrameWriter.isDisabled())
         m_sharedVideoFrameWriter = { };
 
-    auto nativePixelBuffer = videoFrame.pixelBuffer();
-    auto colorSpace = createCGColorSpaceForCVPixelBuffer(nativePixelBuffer);
-    auto buffer = m_sharedVideoFrameWriter.writeBuffer(nativePixelBuffer,
+    auto frame = m_sharedVideoFrameWriter.write(videoFrame,
         [&](auto& semaphore) { connection.send(Messages::RemoteVideoFrameObjectHeap::SetSharedVideoFrameSemaphore { semaphore }, 0); },
         [&](auto& handle) { connection.send(Messages::RemoteVideoFrameObjectHeap::SetSharedVideoFrameMemory { handle }, 0); });
-    if (!buffer)
+    if (!frame)
         return nullptr;
 
-    auto result = connection.sendSync(Messages::RemoteVideoFrameObjectHeap::ConvertBuffer { *buffer }, Messages::RemoteVideoFrameObjectHeap::ConvertBuffer::Reply { }, 0, GPUProcessConnection::defaultTimeout);
+    DestinationColorSpace destinationColorSpace { DestinationColorSpace::SRGB().platformColorSpace() };
+    auto result = connection.sendSync(Messages::RemoteVideoFrameObjectHeap::ConvertFrameBuffer { *frame }, Messages::RemoteVideoFrameObjectHeap::ConvertFrameBuffer::Reply { destinationColorSpace }, 0, GPUProcessConnection::defaultTimeout);
     if (!result) {
         m_sharedVideoFrameWriter.disable();
         return nullptr;
@@ -154,7 +153,7 @@
     m_conversionSemaphore.wait();
 
     auto pixelBuffer = WTFMove(m_convertedBuffer);
-    return pixelBuffer ? NativeImage::create(PixelBufferConformerCV::imageFrom32BGRAPixelBuffer(WTFMove(pixelBuffer), colorSpace.get())) : nullptr;
+    return pixelBuffer ? NativeImage::create(PixelBufferConformerCV::imageFrom32BGRAPixelBuffer(WTFMove(pixelBuffer), destinationColorSpace.platformColorSpace())) : nullptr;
 }
 
 }

Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.cpp (294057 => 294058)


--- trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.cpp	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.cpp	2022-05-11 17:44:40 UTC (rev 294058)
@@ -99,7 +99,7 @@
     return true;
 }
 
-std::optional<SharedVideoFrame> SharedVideoFrameWriter::write(VideoFrame& frame, const Function<void(IPC::Semaphore&)>& newSemaphoreCallback, const Function<void(const SharedMemory::IPCHandle&)>& newMemoryCallback)
+std::optional<SharedVideoFrame> SharedVideoFrameWriter::write(const VideoFrame& frame, const Function<void(IPC::Semaphore&)>& newSemaphoreCallback, const Function<void(const SharedMemory::IPCHandle&)>& newMemoryCallback)
 {
     auto buffer = writeBuffer(frame, newSemaphoreCallback, newMemoryCallback);
     if (!buffer)
@@ -107,7 +107,7 @@
     return SharedVideoFrame { frame.presentationTime(), frame.isMirrored(), frame.rotation(), WTFMove(*buffer) };
 }
 
-std::optional<SharedVideoFrame::Buffer> SharedVideoFrameWriter::writeBuffer(VideoFrame& frame, const Function<void(IPC::Semaphore&)>& newSemaphoreCallback, const Function<void(const SharedMemory::IPCHandle&)>& newMemoryCallback)
+std::optional<SharedVideoFrame::Buffer> SharedVideoFrameWriter::writeBuffer(const VideoFrame& frame, const Function<void(IPC::Semaphore&)>& newSemaphoreCallback, const Function<void(const SharedMemory::IPCHandle&)>& newMemoryCallback)
 {
     if (is<RemoteVideoFrameProxy>(frame))
         return downcast<RemoteVideoFrameProxy>(frame).newReadReference();

Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.h (294057 => 294058)


--- trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.h	2022-05-11 17:20:36 UTC (rev 294057)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.h	2022-05-11 17:44:40 UTC (rev 294058)
@@ -68,7 +68,7 @@
 public:
     SharedVideoFrameWriter();
 
-    std::optional<SharedVideoFrame> write(WebCore::VideoFrame&, const Function<void(IPC::Semaphore&)>&, const Function<void(const SharedMemory::IPCHandle&)>&);
+    std::optional<SharedVideoFrame> write(const WebCore::VideoFrame&, const Function<void(IPC::Semaphore&)>&, const Function<void(const SharedMemory::IPCHandle&)>&);
     std::optional<SharedVideoFrame::Buffer> writeBuffer(CVPixelBufferRef, const Function<void(IPC::Semaphore&)>&, const Function<void(const SharedMemory::IPCHandle&)>&, bool canSendIOSurface = true);
 #if USE(LIBWEBRTC)
     std::optional<SharedVideoFrame::Buffer> writeBuffer(const webrtc::VideoFrame&, const Function<void(IPC::Semaphore&)>&, const Function<void(const SharedMemory::IPCHandle&)>&);
@@ -81,7 +81,7 @@
     bool allocateStorage(size_t, const Function<void(const SharedMemory::IPCHandle&)>&);
     bool prepareWriting(const WebCore::SharedVideoFrameInfo&, const Function<void(IPC::Semaphore&)>&, const Function<void(const SharedMemory::IPCHandle&)>&);
 
-    std::optional<SharedVideoFrame::Buffer> writeBuffer(WebCore::VideoFrame&, const Function<void(IPC::Semaphore&)>&, const Function<void(const SharedMemory::IPCHandle&)>&);
+    std::optional<SharedVideoFrame::Buffer> writeBuffer(const WebCore::VideoFrame&, const Function<void(IPC::Semaphore&)>&, const Function<void(const SharedMemory::IPCHandle&)>&);
 #if USE(LIBWEBRTC)
     std::optional<SharedVideoFrame::Buffer> writeBuffer(webrtc::VideoFrameBuffer&, const Function<void(IPC::Semaphore&)>&, const Function<void(const SharedMemory::IPCHandle&)>&);
 #endif
_______________________________________________
webkit-changes mailing list
[email protected]
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to