Title: [290501] trunk/Source
Revision
290501
Author
[email protected]
Date
2022-02-25 00:51:49 -0800 (Fri, 25 Feb 2022)

Log Message

Optimize black frame sending in MediaRecorderPrivate
https://bugs.webkit.org/show_bug.cgi?id=237027

Reviewed by Kimmo Kinnunen.

Source/WebCore:

Allow generating black frames as IOSurfaces.
Covered by existing tests.

* platform/graphics/cv/CVUtilities.h:
* platform/graphics/cv/CVUtilities.mm:

Source/WebKit:

Add support for sending black frames through SharedVideoFrame.
In that case, we only send width and height and we reconstruct a black frame on receiver side.
Make use of SharedVideoFrame in RemoteMediaRecorder.
Set ownership in SharedVideoFrame to the corresponding WebProcess.

Covered by existing tests, in particular http/wpt/mediarecorder/mute-tracks.html.

* GPUProcess/webrtc/LibWebRTCCodecsProxy.h:
* GPUProcess/webrtc/LibWebRTCCodecsProxy.mm:
* GPUProcess/webrtc/RemoteMediaRecorder.cpp:
* GPUProcess/webrtc/RemoteMediaRecorder.h:
* GPUProcess/webrtc/RemoteMediaRecorder.messages.in:
* GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp:
* WebProcess/GPU/webrtc/MediaRecorderPrivate.cpp:
* WebProcess/GPU/webrtc/MediaRecorderPrivate.h:
* WebProcess/GPU/webrtc/SharedVideoFrame.cpp:
* WebProcess/GPU/webrtc/SharedVideoFrame.h:

Modified Paths

Diff

Modified: trunk/Source/WebCore/ChangeLog (290500 => 290501)


--- trunk/Source/WebCore/ChangeLog	2022-02-25 08:26:02 UTC (rev 290500)
+++ trunk/Source/WebCore/ChangeLog	2022-02-25 08:51:49 UTC (rev 290501)
@@ -1,3 +1,16 @@
+2022-02-25  Youenn Fablet  <[email protected]>
+
+        Optimize black frame sending in MediaRecorderPrivate
+        https://bugs.webkit.org/show_bug.cgi?id=237027
+
+        Reviewed by Kimmo Kinnunen.
+
+        Allow generating black frames as IOSurfaces.
+        Covered by existing tests.
+
+        * platform/graphics/cv/CVUtilities.h:
+        * platform/graphics/cv/CVUtilities.mm:
+
 2022-02-24  Matt Woodrow  <[email protected]>
 
         Simplify grid RTL handling

Modified: trunk/Source/WebCore/platform/graphics/cv/CVUtilities.h (290500 => 290501)


--- trunk/Source/WebCore/platform/graphics/cv/CVUtilities.h	2022-02-25 08:26:02 UTC (rev 290500)
+++ trunk/Source/WebCore/platform/graphics/cv/CVUtilities.h	2022-02-25 08:51:49 UTC (rev 290501)
@@ -54,6 +54,6 @@
 // Should be called with non-empty ProcessIdentity.
 WEBCORE_EXPORT void setOwnershipIdentityForCVPixelBuffer(CVPixelBufferRef, const ProcessIdentity&);
 
-WEBCORE_EXPORT RetainPtr<CVPixelBufferRef> createBlackPixelBuffer(size_t width, size_t height);
+WEBCORE_EXPORT RetainPtr<CVPixelBufferRef> createBlackPixelBuffer(size_t width, size_t height, bool shouldUseIOSurface = false);
 
 }

Modified: trunk/Source/WebCore/platform/graphics/cv/CVUtilities.mm (290500 => 290501)


--- trunk/Source/WebCore/platform/graphics/cv/CVUtilities.mm	2022-02-25 08:26:02 UTC (rev 290500)
+++ trunk/Source/WebCore/platform/graphics/cv/CVUtilities.mm	2022-02-25 08:51:49 UTC (rev 290501)
@@ -163,13 +163,15 @@
     IOSurface::setOwnershipIdentity(surface, owner);
 }
 
-RetainPtr<CVPixelBufferRef> createBlackPixelBuffer(size_t width, size_t height)
+RetainPtr<CVPixelBufferRef> createBlackPixelBuffer(size_t width, size_t height, bool shouldUseIOSurface)
 {
     OSType format = preferedPixelBufferFormat();
     ASSERT(format == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange);
 
+    NSDictionary *pixelAttributes = @{ (__bridge NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{ } };
+
     CVPixelBufferRef pixelBuffer = nullptr;
-    auto status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, format, nullptr, &pixelBuffer);
+    auto status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, format, shouldUseIOSurface ? (__bridge CFDictionaryRef)pixelAttributes : nullptr, &pixelBuffer);
     ASSERT_UNUSED(status, status == noErr);
 
     status = CVPixelBufferLockBaseAddress(pixelBuffer, 0);

Modified: trunk/Source/WebKit/ChangeLog (290500 => 290501)


--- trunk/Source/WebKit/ChangeLog	2022-02-25 08:26:02 UTC (rev 290500)
+++ trunk/Source/WebKit/ChangeLog	2022-02-25 08:51:49 UTC (rev 290501)
@@ -1,5 +1,30 @@
 2022-02-25  Youenn Fablet  <[email protected]>
 
+        Optimize black frame sending in MediaRecorderPrivate
+        https://bugs.webkit.org/show_bug.cgi?id=237027
+
+        Reviewed by Kimmo Kinnunen.
+
+        Add support for sending black frames through SharedVideoFrame.
+        In that case, we only send width and height and we reconstruct a black frame on receiver side.
+        Make use of SharedVideoFrame in RemoteMediaRecorder.
+        Set ownership in SharedVideoFrame to the corresponding WebProcess.
+
+        Covered by existing tests, in particular http/wpt/mediarecorder/mute-tracks.html.
+
+        * GPUProcess/webrtc/LibWebRTCCodecsProxy.h:
+        * GPUProcess/webrtc/LibWebRTCCodecsProxy.mm:
+        * GPUProcess/webrtc/RemoteMediaRecorder.cpp:
+        * GPUProcess/webrtc/RemoteMediaRecorder.h:
+        * GPUProcess/webrtc/RemoteMediaRecorder.messages.in:
+        * GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp:
+        * WebProcess/GPU/webrtc/MediaRecorderPrivate.cpp:
+        * WebProcess/GPU/webrtc/MediaRecorderPrivate.h:
+        * WebProcess/GPU/webrtc/SharedVideoFrame.cpp:
+        * WebProcess/GPU/webrtc/SharedVideoFrame.h:
+
+2022-02-25  Youenn Fablet  <[email protected]>
+
         Remove dead code in GPUProcessConnection::dispatchMessage
         https://bugs.webkit.org/show_bug.cgi?id=237135
 

Modified: trunk/Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.h (290500 => 290501)


--- trunk/Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.h	2022-02-25 08:26:02 UTC (rev 290500)
+++ trunk/Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.h	2022-02-25 08:51:49 UTC (rev 290501)
@@ -34,6 +34,7 @@
 #include "RemoteVideoFrameIdentifier.h"
 #include "SharedMemory.h"
 #include "SharedVideoFrame.h"
+#include <WebCore/ProcessIdentity.h>
 #include <wtf/Lock.h>
 
 namespace IPC {
@@ -104,6 +105,7 @@
     HashMap<RTCEncoderIdentifier, Encoder> m_encoders WTF_GUARDED_BY_LOCK(m_lock); // Only modified on the libWebRTCCodecsQueue but may get accessed from the main thread.
     Ref<WorkQueue> m_queue;
     Ref<RemoteVideoFrameObjectHeap> m_videoFrameObjectHeap;
+    WebCore::ProcessIdentity m_resourceOwner;
 };
 
 }

Modified: trunk/Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.mm (290500 => 290501)


--- trunk/Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.mm	2022-02-25 08:26:02 UTC (rev 290500)
+++ trunk/Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.mm	2022-02-25 08:51:49 UTC (rev 290501)
@@ -53,6 +53,7 @@
     : m_gpuConnectionToWebProcess(connection)
     , m_queue(connection.gpuProcess().libWebRTCCodecsQueue())
     , m_videoFrameObjectHeap(connection.videoFrameObjectHeap())
+    , m_resourceOwner(connection.webProcessIdentity())
 {
     m_gpuConnectionToWebProcess.connection().addThreadMessageReceiver(Messages::LibWebRTCCodecsProxy::messageReceiverName(), this);
 }
@@ -281,7 +282,7 @@
         return;
 
     if (!encoder->frameReader)
-        encoder->frameReader = makeUnique<SharedVideoFrameReader>(Ref { m_videoFrameObjectHeap });
+        encoder->frameReader = makeUnique<SharedVideoFrameReader>(Ref { m_videoFrameObjectHeap }, m_resourceOwner);
     encoder->frameReader->setSemaphore(WTFMove(semaphore));
 }
 
@@ -295,7 +296,7 @@
         return;
 
     if (!encoder->frameReader)
-        encoder->frameReader = makeUnique<SharedVideoFrameReader>(Ref { m_videoFrameObjectHeap });
+        encoder->frameReader = makeUnique<SharedVideoFrameReader>(Ref { m_videoFrameObjectHeap }, m_resourceOwner);
     encoder->frameReader->setSharedMemory(ipcHandle);
 }
 

Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.cpp (290500 => 290501)


--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.cpp	2022-02-25 08:26:02 UTC (rev 290500)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.cpp	2022-02-25 08:51:49 UTC (rev 290501)
@@ -33,7 +33,6 @@
 #include "RemoteVideoFrameObjectHeap.h"
 #include "SharedRingBufferStorage.h"
 #include <WebCore/CARingBuffer.h>
-#include <WebCore/ImageTransferSessionVT.h>
 #include <WebCore/MediaSampleAVFObjC.h>
 #include <WebCore/RemoteVideoSample.h>
 #include <WebCore/WebAudioBufferList.h>
@@ -57,8 +56,7 @@
     : m_gpuConnectionToWebProcess(gpuConnectionToWebProcess)
     , m_identifier(identifier)
     , m_writer(WTFMove(writer))
-    , m_sharedVideoFrameReader(Ref { gpuConnectionToWebProcess.videoFrameObjectHeap() })
-    , m_videoFrameObjectHeap(gpuConnectionToWebProcess.videoFrameObjectHeap())
+    , m_sharedVideoFrameReader(Ref { gpuConnectionToWebProcess.videoFrameObjectHeap() }, { gpuConnectionToWebProcess.webProcessIdentity() })
 {
     if (recordAudio)
         m_ringBuffer = makeUnique<CARingBuffer>();
@@ -90,37 +88,10 @@
     m_writer->appendAudioSampleBuffer(*m_audioBufferList, m_description, time, numberOfFrames);
 }
 
-void RemoteMediaRecorder::videoSampleAvailable(WebCore::RemoteVideoSample&& remoteSample, std::optional<RemoteVideoFrameReadReference> sampleReference)
+void RemoteMediaRecorder::videoSampleAvailable(SharedVideoFrame&& sharedVideoFrame)
 {
-    RefPtr<MediaSample> sample;
-    if (sampleReference) {
-        sample = m_videoFrameObjectHeap->retire(WTFMove(*sampleReference), mediaRecorderDefaultTimeout);
-        if (!sample) {
-            // In case of GPUProcess crash, we might enqueue previous GPUProcess samples, ignore them.
-            return;
-        }
-    } else if (!remoteSample.surface()) {
-        auto pixelBuffer = m_sharedVideoFrameReader.read();
-        if (!pixelBuffer)
-            return;
-
-        sample = MediaSampleAVFObjC::createImageSample(WTFMove(pixelBuffer), remoteSample.rotation(), remoteSample.mirrored(), remoteSample.time());
-    } else {
-        if (!m_imageTransferSession || m_imageTransferSession->pixelFormat() != remoteSample.videoFormat())
-            m_imageTransferSession = ImageTransferSessionVT::create(remoteSample.videoFormat());
-
-        if (!m_imageTransferSession) {
-            ASSERT_NOT_REACHED();
-            return;
-        }
-
-        sample = m_imageTransferSession->createMediaSample(remoteSample);
-        if (!sample) {
-            ASSERT_NOT_REACHED();
-            return;
-        }
-    }
-    m_writer->appendVideoSampleBuffer(*sample);
+    if (auto sample = m_sharedVideoFrameReader.read(WTFMove(sharedVideoFrame)))
+        m_writer->appendVideoSampleBuffer(*sample);
 }
 
 void RemoteMediaRecorder::fetchData(CompletionHandler<void(IPC::DataReference&&, double)>&& completionHandler)

Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.h (290500 => 290501)


--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.h	2022-02-25 08:26:02 UTC (rev 290500)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.h	2022-02-25 08:51:49 UTC (rev 290501)
@@ -44,7 +44,6 @@
 
 namespace WebCore {
 class CARingBuffer;
-class ImageTransferSessionVT;
 class RemoteVideoSample;
 class WebAudioBufferList;
 struct MediaRecorderPrivateOptions;
@@ -53,7 +52,6 @@
 namespace WebKit {
 
 class GPUConnectionToWebProcess;
-class RemoteVideoFrameObjectHeap;
 class SharedRingBufferStorage;
 
 class RemoteMediaRecorder : private IPC::MessageReceiver {
@@ -74,7 +72,7 @@
     // IPC::MessageReceiver
     void audioSamplesStorageChanged(const SharedMemory::IPCHandle&, const WebCore::CAAudioStreamDescription&, uint64_t numberOfFrames);
     void audioSamplesAvailable(MediaTime, uint64_t numberOfFrames);
-    void videoSampleAvailable(WebCore::RemoteVideoSample&&, std::optional<RemoteVideoFrameReadReference>);
+    void videoSampleAvailable(SharedVideoFrame&&);
     void fetchData(CompletionHandler<void(IPC::DataReference&&, double)>&&);
     void stopRecording(CompletionHandler<void()>&&);
     void pause(CompletionHandler<void()>&&);
@@ -89,10 +87,8 @@
     WebCore::CAAudioStreamDescription m_description;
     std::unique_ptr<WebCore::CARingBuffer> m_ringBuffer;
     std::unique_ptr<WebCore::WebAudioBufferList> m_audioBufferList;
-    std::unique_ptr<WebCore::ImageTransferSessionVT> m_imageTransferSession;
 
     SharedVideoFrameReader m_sharedVideoFrameReader;
-    Ref<RemoteVideoFrameObjectHeap> m_videoFrameObjectHeap;
 };
 
 }

Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.messages.in (290500 => 290501)


--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.messages.in	2022-02-25 08:26:02 UTC (rev 290500)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.messages.in	2022-02-25 08:51:49 UTC (rev 290501)
@@ -26,7 +26,7 @@
 messages -> RemoteMediaRecorder NotRefCounted {
     AudioSamplesStorageChanged(WebKit::SharedMemory::IPCHandle storageHandle, WebCore::CAAudioStreamDescription description, uint64_t numberOfFrames)
     AudioSamplesAvailable(MediaTime time, uint64_t numberOfFrames)
-    VideoSampleAvailable(WebCore::RemoteVideoSample sample, std::optional<WebKit::RemoteVideoFrameReadReference> remoteVideoFrameReadReference)
+    VideoSampleAvailable(struct WebKit::SharedVideoFrame frame)
     FetchData() -> (IPC::DataReference buffer, double timeCode) Async
     StopRecording() -> () Async
     Pause() -> () Async

Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp (290500 => 290501)


--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp	2022-02-25 08:26:02 UTC (rev 290500)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp	2022-02-25 08:51:49 UTC (rev 290501)
@@ -52,7 +52,7 @@
     , m_identifier(identifier)
     , m_connection(WTFMove(connection))
     , m_sampleBufferDisplayLayer(LocalSampleBufferDisplayLayer::create(*this))
-    , m_sharedVideoFrameReader(&m_gpuConnection.videoFrameObjectHeap())
+    , m_sharedVideoFrameReader(Ref { m_gpuConnection.videoFrameObjectHeap() }, m_gpuConnection.webProcessIdentity())
 {
     ASSERT(m_sampleBufferDisplayLayer);
 }
@@ -122,7 +122,6 @@
 void RemoteSampleBufferDisplayLayer::enqueue(SharedVideoFrame&& frame)
 {
     auto sample = m_sharedVideoFrameReader.read(WTFMove(frame));
-    ASSERT(sample);
     if (!sample)
         return;
 

Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.cpp (290500 => 290501)


--- trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.cpp	2022-02-25 08:26:02 UTC (rev 290500)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.cpp	2022-02-25 08:51:49 UTC (rev 290501)
@@ -96,46 +96,22 @@
 void MediaRecorderPrivate::videoSampleAvailable(MediaSample& sample, VideoSampleMetadata)
 {
     if (shouldMuteVideo()) {
-        // FIXME: We could optimize sending black frames by only sending width/height.
-        if (!m_blackFrame) {
+        if (!m_blackFrameSize) {
             auto size = sample.presentationSize();
-            m_blackFrame = createBlackPixelBuffer(static_cast<size_t>(size.width()), static_cast<size_t>(size.height()));
+            m_blackFrameSize = WebCore::IntSize { static_cast<int>(size.width()), static_cast<int>(size.height()) };
         }
-        auto remoteSample = RemoteVideoSample::create(m_blackFrame.get(), sample.presentationTime(), sample.videoRotation(), RemoteVideoSample::ShouldCheckForIOSurface::No);
-        if (!copySharedVideoFrame(remoteSample->imageBuffer()))
-            return;
-        m_connection->send(Messages::RemoteMediaRecorder::VideoSampleAvailable { WTFMove(*remoteSample), { } }, m_identifier);
+        SharedVideoFrame sharedVideoFrame { sample.presentationTime(), sample.videoMirrored(), sample.videoRotation(), *m_blackFrameSize };
+        m_connection->send(Messages::RemoteMediaRecorder::VideoSampleAvailable { sharedVideoFrame }, m_identifier);
         return;
     }
 
-    m_blackFrame = nullptr;
-
-    std::optional<RemoteVideoFrameReadReference> remoteVideoFrameReadReference;
-    std::unique_ptr<RemoteVideoSample> remoteSample;
-    if (is<RemoteVideoFrameProxy>(sample)) {
-        remoteVideoFrameReadReference = downcast<RemoteVideoFrameProxy>(sample).read();
-        remoteSample = RemoteVideoSample::create(nullptr, sample.presentationTime(), sample.videoRotation(), RemoteVideoSample::ShouldCheckForIOSurface::No);
-    } else {
-        remoteSample = RemoteVideoSample::create(sample, RemoteVideoSample::ShouldCheckForIOSurface::No);
-        if (!remoteSample->surface()) {
-            // buffer is not IOSurface, we need to copy to shared video frame.
-            if (!copySharedVideoFrame(remoteSample->imageBuffer()))
-                return;
-        }
-    }
-
-    m_connection->send(Messages::RemoteMediaRecorder::VideoSampleAvailable { WTFMove(*remoteSample), remoteVideoFrameReadReference }, m_identifier);
-}
-
-
-bool MediaRecorderPrivate::copySharedVideoFrame(CVPixelBufferRef pixelBuffer)
-{
-    if (!pixelBuffer)
-        return false;
-    return m_sharedVideoFrameWriter.write(pixelBuffer,
+    m_blackFrameSize = { };
+    auto sharedVideoFrame = m_sharedVideoFrameWriter.write(sample,
         [this](auto& semaphore) { m_connection->send(Messages::RemoteMediaRecorder::SetSharedVideoFrameSemaphore { semaphore }, m_identifier); },
         [this](auto& handle) { m_connection->send(Messages::RemoteMediaRecorder::SetSharedVideoFrameMemory { handle }, m_identifier); }
     );
+    if (sharedVideoFrame)
+        m_connection->send(Messages::RemoteMediaRecorder::VideoSampleAvailable { WTFMove(*sharedVideoFrame) }, m_identifier);
 }
 
 void MediaRecorderPrivate::audioSamplesAvailable(const MediaTime& time, const PlatformAudioData& audioData, const AudioStreamDescription& description, size_t numberOfFrames)

Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.h (290500 => 290501)


--- trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.h	2022-02-25 08:26:02 UTC (rev 290500)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.h	2022-02-25 08:51:49 UTC (rev 290501)
@@ -69,13 +69,11 @@
     void gpuProcessConnectionDidClose(GPUProcessConnection&) final;
 
     void storageChanged(SharedMemory*, const WebCore::CAAudioStreamDescription& format, size_t frameCount);
-    bool copySharedVideoFrame(CVPixelBufferRef);
 
     MediaRecorderIdentifier m_identifier;
     Ref<WebCore::MediaStreamPrivate> m_stream;
     Ref<IPC::Connection> m_connection;
 
-    RetainPtr<CVPixelBufferRef> m_blackFrame;
     std::unique_ptr<WebCore::CARingBuffer> m_ringBuffer;
     WebCore::CAAudioStreamDescription m_description { };
     std::unique_ptr<WebCore::WebAudioBufferList> m_silenceAudioBuffer;
@@ -83,6 +81,7 @@
     WebCore::MediaRecorderPrivateOptions m_options;
     bool m_hasVideo { false };
     bool m_isStopped { false };
+    std::optional<WebCore::IntSize> m_blackFrameSize;
 
     SharedVideoFrameWriter m_sharedVideoFrameWriter;
 };

Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.cpp (290500 => 290501)


--- trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.cpp	2022-02-25 08:26:02 UTC (rev 290500)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.cpp	2022-02-25 08:51:49 UTC (rev 290501)
@@ -134,15 +134,14 @@
     m_semaphore->signal();
 }
 
-SharedVideoFrameReader::SharedVideoFrameReader(RefPtr<RemoteVideoFrameObjectHeap>&& objectHeap, UseIOSurfaceBufferPool useIOSurfaceBufferPool)
+SharedVideoFrameReader::SharedVideoFrameReader(RefPtr<RemoteVideoFrameObjectHeap>&& objectHeap, const ProcessIdentity& resourceOwner, UseIOSurfaceBufferPool useIOSurfaceBufferPool)
     : m_objectHeap(WTFMove(objectHeap))
+    , m_resourceOwner(resourceOwner)
     , m_useIOSurfaceBufferPool(useIOSurfaceBufferPool)
 {
 }
 
-SharedVideoFrameReader::SharedVideoFrameReader()
-{
-}
+SharedVideoFrameReader::SharedVideoFrameReader() = default;
 
 RetainPtr<CVPixelBufferRef> SharedVideoFrameReader::read()
 {
@@ -164,7 +163,10 @@
     if (m_storage->size() < info->storageSize())
         return { };
 
-    return info->createPixelBufferFromMemory(data + SharedVideoFrameInfoEncodingLength, pixelBufferPool(*info));
+    auto result = info->createPixelBufferFromMemory(data + SharedVideoFrameInfoEncodingLength, pixelBufferPool(*info));
+    if (result && m_resourceOwner && m_useIOSurfaceBufferPool == UseIOSurfaceBufferPool::Yes)
+        setOwnershipIdentityForCVPixelBuffer(result.get(), m_resourceOwner);
+    return result;
 }
 
 RefPtr<MediaSample> SharedVideoFrameReader::read(SharedVideoFrame&& sharedVideoFrame)
@@ -176,9 +178,9 @@
             return nullptr;
 
         auto sample = m_objectHeap->retire(WTFMove(reference), 0_s);
-        ASSERT(sample && sample->pixelBuffer());
         if (!sample)
             return nullptr;
+        ASSERT(sample->pixelBuffer());
         return sample->pixelBuffer();
     } , [](MachSendRight&& sendRight) -> RetainPtr<CVPixelBufferRef> {
         auto surface = WebCore::IOSurface::createFromSendRight(WTFMove(sendRight), DestinationColorSpace::SRGB());
@@ -187,6 +189,14 @@
         return WebCore::createCVPixelBuffer(surface->surface()).value_or(nullptr);
     }, [this](std::nullptr_t representation) -> RetainPtr<CVPixelBufferRef> {
         return read();
+    }, [this](IntSize size) -> RetainPtr<CVPixelBufferRef> {
+        if (m_blackFrameSize != size) {
+            m_blackFrameSize = size;
+            m_blackFrame = WebCore::createBlackPixelBuffer(m_blackFrameSize.width(), m_blackFrameSize.height(), m_useIOSurfaceBufferPool == UseIOSurfaceBufferPool::Yes);
+            if (m_resourceOwner && m_useIOSurfaceBufferPool == UseIOSurfaceBufferPool::Yes)
+                setOwnershipIdentityForCVPixelBuffer(m_blackFrame.get(), m_resourceOwner);
+        }
+        return m_blackFrame.get();
     });
 
     if (!pixelBuffer)

Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.h (290500 => 290501)


--- trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.h	2022-02-25 08:26:02 UTC (rev 290500)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.h	2022-02-25 08:51:49 UTC (rev 290501)
@@ -31,6 +31,7 @@
 #include "RemoteVideoFrameIdentifier.h"
 #include "SharedMemory.h"
 #include <WebCore/MediaSample.h>
+#include <WebCore/ProcessIdentity.h>
 #include <wtf/RefPtr.h>
 #include <wtf/RetainPtr.h>
 #include <wtf/UniqueRef.h>
@@ -54,7 +55,7 @@
     MediaTime time;
     bool mirrored { false };
     WebCore::MediaSample::VideoRotation rotation { WebCore::MediaSample::VideoRotation::None };
-    std::variant<std::nullptr_t, RemoteVideoFrameReadReference, MachSendRight> buffer;
+    std::variant<std::nullptr_t, RemoteVideoFrameReadReference, MachSendRight, WebCore::IntSize> buffer;
 
     template<class Encoder> void encode(Encoder&) const;
     template<class Decoder> static std::optional<SharedVideoFrame> decode(Decoder&);
@@ -87,7 +88,7 @@
     WTF_MAKE_FAST_ALLOCATED;
 public:
     enum class UseIOSurfaceBufferPool { No, Yes };
-    explicit SharedVideoFrameReader(RefPtr<RemoteVideoFrameObjectHeap>&&, UseIOSurfaceBufferPool = UseIOSurfaceBufferPool::Yes);
+    explicit SharedVideoFrameReader(RefPtr<RemoteVideoFrameObjectHeap>&&, const WebCore::ProcessIdentity& = { }, UseIOSurfaceBufferPool = UseIOSurfaceBufferPool::Yes);
     SharedVideoFrameReader();
 
     void setSemaphore(IPC::Semaphore&& semaphore) { m_semaphore = WTFMove(semaphore); }
@@ -100,7 +101,8 @@
     CVPixelBufferPoolRef pixelBufferPool(const WebCore::SharedVideoFrameInfo&);
 
     RefPtr<RemoteVideoFrameObjectHeap> m_objectHeap;
-    UseIOSurfaceBufferPool m_useIOSurfaceBufferPool { UseIOSurfaceBufferPool::Yes };
+    WebCore::ProcessIdentity m_resourceOwner;
+    UseIOSurfaceBufferPool m_useIOSurfaceBufferPool { UseIOSurfaceBufferPool::No };
     IPC::Semaphore m_semaphore;
     RefPtr<SharedMemory> m_storage;
 
@@ -108,6 +110,8 @@
     OSType m_bufferPoolType { 0 };
     uint32_t m_bufferPoolWidth { 0 };
     uint32_t m_bufferPoolHeight { 0 };
+    WebCore::IntSize m_blackFrameSize;
+    RetainPtr<CVPixelBufferRef> m_blackFrame;
 };
 
 template<class Encoder> void SharedVideoFrame::encode(Encoder& encoder) const
_______________________________________________
webkit-changes mailing list
[email protected]
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to