Diff
Modified: trunk/Source/WebKit/ChangeLog (290357 => 290358)
--- trunk/Source/WebKit/ChangeLog 2022-02-23 08:50:18 UTC (rev 290357)
+++ trunk/Source/WebKit/ChangeLog 2022-02-23 09:12:02 UTC (rev 290358)
@@ -1,5 +1,36 @@
2022-02-23 Youenn Fablet <[email protected]>
+ RemoteSampleBufferDisplayLayer::enqueueSample should not change media samples owned by its object heap
+ https://bugs.webkit.org/show_bug.cgi?id=237025
+
+ Reviewed by Darin Adler.
+
+ From past bugs, it is risky to change CMSampleBuffer dictionaries if they can be used on various threads (encoder, display layer...).
+ Instead, we create a new MediaSample that is wrapping the same CVPixelBufferRef, on which can safely apply setAsDisplayImmediately.
+
+ We introduce SharedVideoFrame as a replacement to RemoteVideoSample.
+ We beef up SharedVideoFrameWriter and SharedVideoFrameReader to handle any MediaSample,
+ with specific handling for remote frames, IOSurface frames or other in memory frames.
+ We make use of SharedVideoFrame in SampleBufferDisplayLayer, which allows to unify enqueueSample and enqueueCV in one IPC message,
+ which is easier to maintain.
+
+ We update call sites that create SharedVideoFrameReader to either take nullptr or a object heap reference when they will need it in the future.
+
+ Covered by existing tests.
+
+ * GPUProcess/webrtc/LibWebRTCCodecsProxy.mm:
+ * GPUProcess/webrtc/RemoteMediaRecorder.cpp:
+ * GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp:
+ * GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.h:
+ * GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.messages.in:
+ * WebProcess/GPU/webrtc/RemoteVideoFrameObjectHeapProxyProcessor.cpp:
+ * WebProcess/GPU/webrtc/SampleBufferDisplayLayer.cpp:
+ * WebProcess/GPU/webrtc/SampleBufferDisplayLayer.h:
+ * WebProcess/GPU/webrtc/SharedVideoFrame.cpp:
+ * WebProcess/GPU/webrtc/SharedVideoFrame.h:
+
+2022-02-23 Youenn Fablet <[email protected]>
+
Optimize RemoteVideoFrame handling in WebProcess WebRTC pipeline
https://bugs.webkit.org/show_bug.cgi?id=236970
Modified: trunk/Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.mm (290357 => 290358)
--- trunk/Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.mm 2022-02-23 08:50:18 UTC (rev 290357)
+++ trunk/Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.mm 2022-02-23 09:12:02 UTC (rev 290358)
@@ -285,7 +285,7 @@
return;
if (!encoder->frameReader)
- encoder->frameReader = makeUnique<SharedVideoFrameReader>();
+ encoder->frameReader = makeUnique<SharedVideoFrameReader>(Ref { m_videoFrameObjectHeap });
encoder->frameReader->setSemaphore(WTFMove(semaphore));
}
@@ -299,7 +299,7 @@
return;
if (!encoder->frameReader)
- encoder->frameReader = makeUnique<SharedVideoFrameReader>();
+ encoder->frameReader = makeUnique<SharedVideoFrameReader>(Ref { m_videoFrameObjectHeap });
encoder->frameReader->setSharedMemory(ipcHandle);
}
Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.cpp (290357 => 290358)
--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.cpp 2022-02-23 08:50:18 UTC (rev 290357)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.cpp 2022-02-23 09:12:02 UTC (rev 290358)
@@ -57,6 +57,7 @@
: m_gpuConnectionToWebProcess(gpuConnectionToWebProcess)
, m_identifier(identifier)
, m_writer(WTFMove(writer))
+ , m_sharedVideoFrameReader(Ref { gpuConnectionToWebProcess.videoFrameObjectHeap() })
, m_videoFrameObjectHeap(gpuConnectionToWebProcess.videoFrameObjectHeap())
{
if (recordAudio)
Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp (290357 => 290358)
--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp 2022-02-23 08:50:18 UTC (rev 290357)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp 2022-02-23 09:12:02 UTC (rev 290358)
@@ -52,7 +52,7 @@
, m_identifier(identifier)
, m_connection(WTFMove(connection))
, m_sampleBufferDisplayLayer(LocalSampleBufferDisplayLayer::create(*this))
- , m_videoFrameObjectHeap(m_gpuConnection.videoFrameObjectHeap())
+ , m_sharedVideoFrameReader(&m_gpuConnection.videoFrameObjectHeap())
{
ASSERT(m_sampleBufferDisplayLayer);
}
@@ -119,42 +119,9 @@
m_sampleBufferDisplayLayer->pause();
}
-void RemoteSampleBufferDisplayLayer::enqueueSample(RemoteVideoFrameReadReference&& sample)
+void RemoteSampleBufferDisplayLayer::enqueue(SharedVideoFrame&& frame)
{
- auto mediaSample = m_videoFrameObjectHeap->retire(WTFMove(sample), defaultTimeout);
- if (!mediaSample) {
- // In case of GPUProcess crash, we might enqueue previous GPUProcess samples, ignore them.
- return;
- }
- ASSERT(is<MediaSampleAVFObjC>(mediaSample));
- if (!is<MediaSampleAVFObjC>(mediaSample))
- return;
-
- auto& avfMediaSample = downcast<MediaSampleAVFObjC>(*mediaSample);
- MediaSampleAVFObjC::setAsDisplayImmediately(avfMediaSample);
- m_sampleBufferDisplayLayer->enqueueSample(avfMediaSample);
-}
-
-void RemoteSampleBufferDisplayLayer::enqueueSampleCV(WebCore::RemoteVideoSample&& remoteSample)
-{
- RefPtr<MediaSample> sample;
- if (!remoteSample.surface()) {
- auto pixelBuffer = m_sharedVideoFrameReader.read();
- if (!pixelBuffer)
- return;
-
- sample = MediaSampleAVFObjC::createImageSample(WTFMove(pixelBuffer), remoteSample.rotation(), remoteSample.mirrored(), remoteSample.time());
- } else {
- if (!m_imageTransferSession || m_imageTransferSession->pixelFormat() != remoteSample.videoFormat())
- m_imageTransferSession = ImageTransferSessionVT::create(remoteSample.videoFormat());
-
- ASSERT(m_imageTransferSession);
- if (!m_imageTransferSession)
- return;
-
- sample = m_imageTransferSession->createMediaSample(remoteSample);
- }
-
+ auto sample = m_sharedVideoFrameReader.read(WTFMove(frame));
ASSERT(sample);
if (!sample)
return;
Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.h (290357 => 290358)
--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.h 2022-02-23 08:50:18 UTC (rev 290357)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.h 2022-02-23 09:12:02 UTC (rev 290358)
@@ -45,7 +45,6 @@
namespace WebKit {
class GPUConnectionToWebProcess;
-class RemoteVideoFrameObjectHeap;
class RemoteSampleBufferDisplayLayer : public WebCore::SampleBufferDisplayLayer::Client, public IPC::MessageReceiver, private IPC::MessageSender {
WTF_MAKE_FAST_ALLOCATED;
@@ -77,8 +76,7 @@
void flushAndRemoveImage();
void play();
void pause();
- void enqueueSample(RemoteVideoFrameReadReference&&);
- void enqueueSampleCV(WebCore::RemoteVideoSample&&);
+ void enqueue(SharedVideoFrame&&);
void clearEnqueuedSamples();
void setSharedVideoFrameSemaphore(IPC::Semaphore&&);
void setSharedVideoFrameMemory(const SharedMemory::IPCHandle&);
@@ -97,7 +95,6 @@
std::unique_ptr<WebCore::LocalSampleBufferDisplayLayer> m_sampleBufferDisplayLayer;
std::unique_ptr<LayerHostingContext> m_layerHostingContext;
SharedVideoFrameReader m_sharedVideoFrameReader;
- Ref<RemoteVideoFrameObjectHeap> m_videoFrameObjectHeap;
ThreadAssertion m_consumeThread NO_UNIQUE_ADDRESS;
};
Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.messages.in (290357 => 290358)
--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.messages.in 2022-02-23 08:50:18 UTC (rev 290357)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.messages.in 2022-02-23 09:12:02 UTC (rev 290358)
@@ -32,8 +32,7 @@
UpdateBoundsAndPosition(CGRect bounds, WebCore::MediaSample::VideoRotation rotation)
Flush()
FlushAndRemoveImage()
- EnqueueSample(WebKit::RemoteVideoFrameReadReference sample)
- EnqueueSampleCV(WebCore::RemoteVideoSample sample)
+ Enqueue(struct WebKit::SharedVideoFrame frame)
ClearEnqueuedSamples()
Play()
Pause()
Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/RemoteVideoFrameObjectHeapProxyProcessor.cpp (290357 => 290358)
--- trunk/Source/WebKit/WebProcess/GPU/webrtc/RemoteVideoFrameObjectHeapProxyProcessor.cpp 2022-02-23 08:50:18 UTC (rev 290357)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/RemoteVideoFrameObjectHeapProxyProcessor.cpp 2022-02-23 09:12:02 UTC (rev 290358)
@@ -28,6 +28,7 @@
#if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(VIDEO)
+#include "RemoteVideoFrameObjectHeap.h"
#include "RemoteVideoFrameObjectHeapMessages.h"
#include "RemoteVideoFrameObjectHeapProxyProcessorMessages.h"
#include "RemoteVideoFrameProxy.h"
@@ -42,6 +43,7 @@
RemoteVideoFrameObjectHeapProxyProcessor::RemoteVideoFrameObjectHeapProxyProcessor(GPUProcessConnection& connection)
: m_connectionID(connection.connection().uniqueID())
, m_queue(WorkQueue::create("RemoteVideoFrameObjectHeapProxy", WorkQueue::QOS::UserInteractive))
+ , m_sharedVideoFrameReader(nullptr)
{
connection.addClient(*this);
connection.connection().addWorkQueueMessageReceiver(Messages::RemoteVideoFrameObjectHeapProxyProcessor::messageReceiverName(), m_queue, this);
Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/SampleBufferDisplayLayer.cpp (290357 => 290358)
--- trunk/Source/WebKit/WebProcess/GPU/webrtc/SampleBufferDisplayLayer.cpp 2022-02-23 08:50:18 UTC (rev 290357)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/SampleBufferDisplayLayer.cpp 2022-02-23 09:12:02 UTC (rev 290358)
@@ -128,35 +128,19 @@
m_connection->send(Messages::RemoteSampleBufferDisplayLayer::Pause { }, m_identifier);
}
-bool SampleBufferDisplayLayer::copySharedVideoFrame(CVPixelBufferRef pixelBuffer)
-{
- if (!pixelBuffer)
- return false;
- return m_sharedVideoFrameWriter.write(pixelBuffer,
- [this](auto& semaphore) { m_connection->send(Messages::RemoteSampleBufferDisplayLayer::SetSharedVideoFrameSemaphore { semaphore }, m_identifier); },
- [this](auto& handle) { m_connection->send(Messages::RemoteSampleBufferDisplayLayer::SetSharedVideoFrameMemory { handle }, m_identifier); }
- );
-}
-
void SampleBufferDisplayLayer::enqueueSample(MediaSample& sample)
{
if (m_paused)
return;
- if (is<RemoteVideoFrameProxy>(sample)) {
- auto& remoteSample = downcast<RemoteVideoFrameProxy>(sample);
- m_connection->send(Messages::RemoteSampleBufferDisplayLayer::EnqueueSample { remoteSample.read() }, m_identifier);
+ auto sharedVideoFrame = m_sharedVideoFrameWriter.write(sample,
+ [this](auto& semaphore) { m_connection->send(Messages::RemoteSampleBufferDisplayLayer::SetSharedVideoFrameSemaphore { semaphore }, m_identifier); },
+ [this](auto& handle) { m_connection->send(Messages::RemoteSampleBufferDisplayLayer::SetSharedVideoFrameMemory { handle }, m_identifier); }
+ );
+ if (!sharedVideoFrame)
return;
- }
- auto remoteSample = RemoteVideoSample::create(sample, RemoteVideoSample::ShouldCheckForIOSurface::No);
- if (!remoteSample->surface()) {
- // buffer is not IOSurface, we need to copy to shared video frame.
- if (!copySharedVideoFrame(remoteSample->imageBuffer()))
- return;
- }
-
- m_connection->send(Messages::RemoteSampleBufferDisplayLayer::EnqueueSampleCV { *remoteSample }, m_identifier);
+ m_connection->send(Messages::RemoteSampleBufferDisplayLayer::Enqueue { *sharedVideoFrame }, m_identifier);
}
void SampleBufferDisplayLayer::clearEnqueuedSamples()
Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/SampleBufferDisplayLayer.h (290357 => 290358)
--- trunk/Source/WebKit/WebProcess/GPU/webrtc/SampleBufferDisplayLayer.h 2022-02-23 08:50:18 UTC (rev 290357)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/SampleBufferDisplayLayer.h 2022-02-23 09:12:02 UTC (rev 290358)
@@ -75,7 +75,6 @@
void gpuProcessConnectionDidClose(GPUProcessConnection&) final;
void setDidFail(bool);
- bool copySharedVideoFrame(CVPixelBufferRef);
GPUProcessConnection* m_gpuProcessConnection;
WeakPtr<SampleBufferDisplayLayerManager> m_manager;
Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.cpp (290357 => 290358)
--- trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.cpp 2022-02-23 08:50:18 UTC (rev 290357)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.cpp 2022-02-23 09:12:02 UTC (rev 290358)
@@ -29,9 +29,17 @@
#if ENABLE(GPU_PROCESS) && PLATFORM(COCOA) && ENABLE(VIDEO)
#include "Logging.h"
+#include "RemoteVideoFrameObjectHeap.h"
+#include "RemoteVideoFrameProxy.h"
+#include <WebCore/CVUtilities.h>
+#include <WebCore/IOSurface.h>
+#include <WebCore/MediaSampleAVFObjC.h>
#include <WebCore/SharedVideoFrameInfo.h>
#include <wtf/Scope.h>
+#include <pal/cf/CoreMediaSoftLink.h>
+#include <WebCore/CoreVideoSoftLink.h>
+
namespace WebKit {
using namespace WebCore;
@@ -80,6 +88,26 @@
return true;
}
+std::optional<SharedVideoFrame> SharedVideoFrameWriter::write(MediaSample& frame, const Function<void(IPC::Semaphore&)>& newSemaphoreCallback, const Function<void(const SharedMemory::IPCHandle&)>& newMemoryCallback)
+{
+ SharedVideoFrame sharedVideoFrame { frame.presentationTime(), frame.videoMirrored(), frame.videoRotation(), nullptr };
+ if (is<RemoteVideoFrameProxy>(frame)) {
+ sharedVideoFrame.buffer = downcast<RemoteVideoFrameProxy>(frame).read();
+ return sharedVideoFrame;
+ }
+ if (is<MediaSampleAVFObjC>(frame)) {
+ auto pixelBuffer = downcast<MediaSampleAVFObjC>(frame).pixelBuffer();
+ IOSurfaceRef surface = pixelBuffer ? CVPixelBufferGetIOSurface(pixelBuffer) : nullptr;
+ if (surface) {
+ sharedVideoFrame.buffer = MachSendRight::adopt(IOSurfaceCreateMachPort(surface));
+ return sharedVideoFrame;
+ }
+ }
+ if (!write(frame.pixelBuffer(), newSemaphoreCallback, newMemoryCallback))
+ return { };
+ return sharedVideoFrame;
+}
+
bool SharedVideoFrameWriter::write(CVPixelBufferRef pixelBuffer, const Function<void(IPC::Semaphore&)>& newSemaphoreCallback, const Function<void(const SharedMemory::IPCHandle&)>& newMemoryCallback)
{
auto info = SharedVideoFrameInfo::fromCVPixelBuffer(pixelBuffer);
@@ -94,7 +122,7 @@
{
auto info = SharedVideoFrameInfo::fromVideoFrame(frame);
if (!prepareWriting(info, newSemaphoreCallback, newMemoryCallback))
- return false;
+ return { };
return info.writeVideoFrame(frame, static_cast<uint8_t*>(m_storage->data()));
}
@@ -106,6 +134,12 @@
m_semaphore->signal();
}
+SharedVideoFrameReader::SharedVideoFrameReader(RefPtr<RemoteVideoFrameObjectHeap>&& objectHeap, UseIOSurfaceBufferPool useIOSurfaceBufferPool)
+ : m_objectHeap(WTFMove(objectHeap))
+ , m_useIOSurfaceBufferPool(useIOSurfaceBufferPool)
+{
+}
+
RetainPtr<CVPixelBufferRef> SharedVideoFrameReader::read()
{
if (!m_storage)
@@ -129,6 +163,34 @@
return info->createPixelBufferFromMemory(data + SharedVideoFrameInfoEncodingLength, pixelBufferPool(*info));
}
+RefPtr<MediaSample> SharedVideoFrameReader::read(SharedVideoFrame&& sharedVideoFrame)
+{
+ auto pixelBuffer = switchOn(WTFMove(sharedVideoFrame.buffer),
+ [this](RemoteVideoFrameReadReference&& reference) -> RetainPtr<CVPixelBufferRef> {
+ ASSERT(m_objectHeap);
+ if (!m_objectHeap)
+ return nullptr;
+
+ auto sample = m_objectHeap->retire(WTFMove(reference), 0_s);
+ ASSERT(sample && sample->pixelBuffer());
+ if (!sample)
+ return nullptr;
+ return sample->pixelBuffer();
+ } , [](MachSendRight&& sendRight) -> RetainPtr<CVPixelBufferRef> {
+ auto surface = WebCore::IOSurface::createFromSendRight(WTFMove(sendRight), DestinationColorSpace::SRGB());
+ if (!surface)
+ return nullptr;
+ return WebCore::createCVPixelBuffer(surface->surface()).value_or(nullptr);
+ }, [this](std::nullptr_t representation) -> RetainPtr<CVPixelBufferRef> {
+ return read();
+ });
+
+ if (!pixelBuffer)
+ return nullptr;
+
+ return MediaSampleAVFObjC::createImageSample(WTFMove(pixelBuffer), sharedVideoFrame.rotation, sharedVideoFrame.mirrored, sharedVideoFrame.time);
+}
+
CVPixelBufferPoolRef SharedVideoFrameReader::pixelBufferPool(const SharedVideoFrameInfo& info)
{
if (m_useIOSurfaceBufferPool == UseIOSurfaceBufferPool::No)
Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.h (290357 => 290358)
--- trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.h 2022-02-23 08:50:18 UTC (rev 290357)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/SharedVideoFrame.h 2022-02-23 09:12:02 UTC (rev 290358)
@@ -28,7 +28,9 @@
#if ENABLE(GPU_PROCESS) && PLATFORM(COCOA) && ENABLE(VIDEO)
#include "IPCSemaphore.h"
+#include "RemoteVideoFrameIdentifier.h"
#include "SharedMemory.h"
+#include <WebCore/MediaSample.h>
#include <wtf/RefPtr.h>
#include <wtf/RetainPtr.h>
#include <wtf/UniqueRef.h>
@@ -46,11 +48,24 @@
namespace WebKit {
+class RemoteVideoFrameObjectHeap;
+
+struct SharedVideoFrame {
+ MediaTime time;
+ bool mirrored { false };
+ WebCore::MediaSample::VideoRotation rotation { WebCore::MediaSample::VideoRotation::None };
+ std::variant<std::nullptr_t, RemoteVideoFrameReadReference, MachSendRight> buffer;
+
+ template<class Encoder> void encode(Encoder&) const;
+ template<class Decoder> static std::optional<SharedVideoFrame> decode(Decoder&);
+};
+
class SharedVideoFrameWriter {
WTF_MAKE_FAST_ALLOCATED;
public:
SharedVideoFrameWriter();
+ std::optional<SharedVideoFrame> write(WebCore::MediaSample&, const Function<void(IPC::Semaphore&)>&, const Function<void(const SharedMemory::IPCHandle&)>&);
bool write(CVPixelBufferRef, const Function<void(IPC::Semaphore&)>&, const Function<void(const SharedMemory::IPCHandle&)>&);
#if USE(LIBWEBRTC)
bool write(const webrtc::VideoFrame&, const Function<void(IPC::Semaphore&)>&, const Function<void(const SharedMemory::IPCHandle&)>&);
@@ -72,16 +87,18 @@
WTF_MAKE_FAST_ALLOCATED;
public:
enum class UseIOSurfaceBufferPool { No, Yes };
- explicit SharedVideoFrameReader(UseIOSurfaceBufferPool = UseIOSurfaceBufferPool::Yes);
+ explicit SharedVideoFrameReader(RefPtr<RemoteVideoFrameObjectHeap>&&, UseIOSurfaceBufferPool = UseIOSurfaceBufferPool::Yes);
void setSemaphore(IPC::Semaphore&& semaphore) { m_semaphore = WTFMove(semaphore); }
bool setSharedMemory(const SharedMemory::IPCHandle&);
RetainPtr<CVPixelBufferRef> read();
+ RefPtr<WebCore::MediaSample> read(SharedVideoFrame&&);
private:
CVPixelBufferPoolRef pixelBufferPool(const WebCore::SharedVideoFrameInfo&);
+ RefPtr<RemoteVideoFrameObjectHeap> m_objectHeap;
UseIOSurfaceBufferPool m_useIOSurfaceBufferPool;
IPC::Semaphore m_semaphore;
RefPtr<SharedMemory> m_storage;
@@ -92,11 +109,58 @@
uint32_t m_bufferPoolHeight { 0 };
};
-inline SharedVideoFrameReader::SharedVideoFrameReader(UseIOSurfaceBufferPool useIOSurfaceBufferPool)
- : m_useIOSurfaceBufferPool(useIOSurfaceBufferPool)
+template<class Encoder> void SharedVideoFrame::encode(Encoder& encoder) const
{
+ encoder << time;
+ encoder << mirrored;
+ encoder << rotation;
+
+ switchOn(buffer,
+ [&](std::nullptr_t representation) {
+ encoder << (uint8_t)0;
+ }, [&](const RemoteVideoFrameReadReference& reference) {
+ encoder << (uint8_t)1;
+ encoder << reference;
+ } , [&](const MachSendRight& sendRight) {
+ encoder << (uint8_t)2;
+ encoder << sendRight;
+ });
}
+template<class Decoder> std::optional<SharedVideoFrame> SharedVideoFrame::decode(Decoder& decoder)
+{
+ SharedVideoFrame frame;
+ if (!decoder.decode(frame.time))
+ return { };
+
+ if (!decoder.decode(frame.mirrored))
+ return { };
+
+ if (!decoder.decode(frame.rotation))
+ return { };
+
+ uint8_t bufferType;
+ if (!decoder.decode(bufferType))
+ return { };
+
+ if (bufferType > 2)
+ return { };
+
+ if (bufferType == 1) {
+ std::optional<RemoteVideoFrameReadReference> reference;
+ decoder >> reference;
+ if (!reference)
+ return { };
+ frame.buffer = WTFMove(*reference);
+ } else if (bufferType == 2) {
+ MachSendRight sendRight;
+ if (!decoder.decode(sendRight))
+ return { };
+ frame.buffer = WTFMove(sendRight);
+ }
+ return frame;
}
+}
+
#endif