Diff
Modified: trunk/Source/WebKit/ChangeLog (289049 => 289050)
--- trunk/Source/WebKit/ChangeLog 2022-02-03 14:07:43 UTC (rev 289049)
+++ trunk/Source/WebKit/ChangeLog 2022-02-03 14:31:17 UTC (rev 289050)
@@ -1,5 +1,29 @@
2022-02-03 Youenn Fablet <[email protected]>
+ MediaRecorderPrivate should not need to create IOSurfaces
+ https://bugs.webkit.org/show_bug.cgi?id=235953
+
+ Reviewed by Eric Carlson.
+
+ Make use of SharedVideoFrameReader and SharedVideoFrameWriter between SampleBufferDisplayLayer and RemoteSampleBufferDisplayLayer.
+ Covered by existing MediaRecorder tests.
+
+ * GPUProcess/webrtc/RemoteMediaRecorder.cpp:
+ (WebKit::RemoteMediaRecorder::videoSampleAvailable):
+ (WebKit::RemoteMediaRecorder::setSharedVideoFrameSemaphore):
+ (WebKit::RemoteMediaRecorder::setSharedVideoFrameMemory):
+ * GPUProcess/webrtc/RemoteMediaRecorder.h:
+ * GPUProcess/webrtc/RemoteMediaRecorder.messages.in:
+ * SourcesCocoa.txt:
+ * WebKit.xcodeproj/project.pbxproj:
+ * WebProcess/GPU/webrtc/MediaRecorderPrivate.cpp:
+ (WebKit::MediaRecorderPrivate::videoSampleAvailable):
+ (WebKit::MediaRecorderPrivate::copySharedVideoFrame):
+ * WebProcess/GPU/webrtc/MediaRecorderPrivate.h:
+ * WebProcess/GPU/webrtc/MediaRecorderPrivate.mm: Removed.
+
+2022-02-03 Youenn Fablet <[email protected]>
+
SampleBufferDiplayLayer should not need to create IOSurfaces
https://bugs.webkit.org/show_bug.cgi?id=235954
Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.cpp (289049 => 289050)
--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.cpp 2022-02-03 14:07:43 UTC (rev 289049)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.cpp 2022-02-03 14:31:17 UTC (rev 289050)
@@ -33,6 +33,7 @@
#include "SharedRingBufferStorage.h"
#include <WebCore/CARingBuffer.h>
#include <WebCore/ImageTransferSessionVT.h>
+#include <WebCore/MediaSampleAVFObjC.h>
#include <WebCore/RemoteVideoSample.h>
#include <WebCore/WebAudioBufferList.h>
#include <wtf/CompletionHandler.h>
@@ -87,21 +88,30 @@
void RemoteMediaRecorder::videoSampleAvailable(WebCore::RemoteVideoSample&& remoteSample)
{
- if (!m_imageTransferSession || m_imageTransferSession->pixelFormat() != remoteSample.videoFormat())
- m_imageTransferSession = ImageTransferSessionVT::create(remoteSample.videoFormat());
+ RefPtr<MediaSample> sample;
+ if (!remoteSample.surface()) {
+ auto pixelBuffer = m_sharedVideoFrameReader.read();
+ if (!pixelBuffer)
+ return;
- if (!m_imageTransferSession) {
- ASSERT_NOT_REACHED();
- return;
- }
+ sample = MediaSampleAVFObjC::createImageSample(WTFMove(pixelBuffer), remoteSample.rotation(), remoteSample.mirrored());
+ sample->setTimestamps(remoteSample.time(), MediaTime { });
+ } else {
+ if (!m_imageTransferSession || m_imageTransferSession->pixelFormat() != remoteSample.videoFormat())
+ m_imageTransferSession = ImageTransferSessionVT::create(remoteSample.videoFormat());
- auto sampleBuffer = m_imageTransferSession->createMediaSample(remoteSample);
- if (!sampleBuffer) {
- ASSERT_NOT_REACHED();
- return;
+ if (!m_imageTransferSession) {
+ ASSERT_NOT_REACHED();
+ return;
+ }
+
+ sample = m_imageTransferSession->createMediaSample(remoteSample);
+ if (!sample) {
+ ASSERT_NOT_REACHED();
+ return;
+ }
}
-
- m_writer->appendVideoSampleBuffer(*sampleBuffer);
+ m_writer->appendVideoSampleBuffer(*sample);
}
void RemoteMediaRecorder::fetchData(CompletionHandler<void(IPC::DataReference&&, double)>&& completionHandler)
@@ -131,8 +141,22 @@
completionHandler();
}
+void RemoteMediaRecorder::setSharedVideoFrameSemaphore(IPC::Semaphore&& semaphore)
+{
+ m_sharedVideoFrameReader.setSemaphore(WTFMove(semaphore));
}
+void RemoteMediaRecorder::setSharedVideoFrameMemory(const SharedMemory::IPCHandle& ipcHandle)
+{
+ auto memory = SharedMemory::map(ipcHandle.handle, SharedMemory::Protection::ReadOnly);
+ if (!memory)
+ return;
+
+ m_sharedVideoFrameReader.setSharedMemory(memory.releaseNonNull());
+}
+
+}
+
#undef MESSAGE_CHECK
#endif // PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM)
Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.h (289049 => 289050)
--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.h 2022-02-03 14:07:43 UTC (rev 289049)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.h 2022-02-03 14:31:17 UTC (rev 289050)
@@ -31,6 +31,7 @@
#include "MediaRecorderIdentifier.h"
#include "MessageReceiver.h"
#include "SharedMemory.h"
+#include "SharedVideoFrame.h"
#include <WebCore/CAAudioStreamDescription.h>
#include <WebCore/MediaRecorderPrivateWriterCocoa.h>
#include <wtf/MediaTime.h>
@@ -76,6 +77,8 @@
void stopRecording(CompletionHandler<void()>&&);
void pause(CompletionHandler<void()>&&);
void resume(CompletionHandler<void()>&&);
+ void setSharedVideoFrameSemaphore(IPC::Semaphore&&);
+ void setSharedVideoFrameMemory(const SharedMemory::IPCHandle&);
GPUConnectionToWebProcess& m_gpuConnectionToWebProcess;
MediaRecorderIdentifier m_identifier;
@@ -85,6 +88,8 @@
std::unique_ptr<WebCore::CARingBuffer> m_ringBuffer;
std::unique_ptr<WebCore::WebAudioBufferList> m_audioBufferList;
std::unique_ptr<WebCore::ImageTransferSessionVT> m_imageTransferSession;
+
+ SharedVideoFrameReader m_sharedVideoFrameReader;
};
}
Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.messages.in (289049 => 289050)
--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.messages.in 2022-02-03 14:07:43 UTC (rev 289049)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.messages.in 2022-02-03 14:31:17 UTC (rev 289050)
@@ -31,6 +31,8 @@
StopRecording() -> () Async
Pause() -> () Async
Resume() -> () Async
+ SetSharedVideoFrameSemaphore(IPC::Semaphore semaphore)
+ SetSharedVideoFrameMemory(WebKit::SharedMemory::IPCHandle storageHandle)
}
#endif
Modified: trunk/Source/WebKit/SourcesCocoa.txt (289049 => 289050)
--- trunk/Source/WebKit/SourcesCocoa.txt 2022-02-03 14:07:43 UTC (rev 289049)
+++ trunk/Source/WebKit/SourcesCocoa.txt 2022-02-03 14:31:17 UTC (rev 289050)
@@ -632,7 +632,6 @@
WebProcess/GPU/media/ios/RemoteMediaSessionHelper.cpp
WebProcess/GPU/webrtc/AudioMediaStreamTrackRendererInternalUnitManager.cpp
WebProcess/GPU/webrtc/LibWebRTCCodecs.mm
-WebProcess/GPU/webrtc/MediaRecorderPrivate.mm
WebProcess/InjectedBundle/API/c/WKBundlePageBanner.cpp
Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.cpp (289049 => 289050)
--- trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.cpp 2022-02-03 14:07:43 UTC (rev 289049)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.cpp 2022-02-03 14:31:17 UTC (rev 289050)
@@ -96,26 +96,34 @@
if (!m_blackFrame) {
auto blackFrameDescription = CMSampleBufferGetFormatDescription(sample.platformSample().sample.cmSampleBuffer);
auto dimensions = CMVideoFormatDescriptionGetDimensions(blackFrameDescription);
- auto blackFrame = createBlackPixelBuffer(dimensions.width, dimensions.height);
- // FIXME: We convert to get an IOSurface. We could optimize this.
- m_blackFrame = convertToBGRA(blackFrame.get());
+ m_blackFrame = createBlackPixelBuffer(dimensions.width, dimensions.height);
}
- remoteSample = RemoteVideoSample::create(m_blackFrame.get(), sample.presentationTime(), sample.videoRotation());
+ remoteSample = RemoteVideoSample::create(m_blackFrame.get(), sample.presentationTime(), sample.videoRotation(), RemoteVideoSample::ShouldCheckForIOSurface::No);
} else {
m_blackFrame = nullptr;
- remoteSample = RemoteVideoSample::create(sample);
- if (!remoteSample) {
- // FIXME: Optimize this code path.
- auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(sample.platformSample().sample.cmSampleBuffer));
- auto newPixelBuffer = convertToBGRA(pixelBuffer);
- remoteSample = RemoteVideoSample::create(newPixelBuffer.get(), sample.presentationTime(), sample.videoRotation());
- }
+ remoteSample = RemoteVideoSample::create(sample, RemoteVideoSample::ShouldCheckForIOSurface::No);
}
- if (remoteSample)
- m_connection->send(Messages::RemoteMediaRecorder::VideoSampleAvailable { WTFMove(*remoteSample) }, m_identifier);
+ if (!remoteSample->surface()) {
+ // buffer is not IOSurface, we need to copy to shared video frame.
+ if (!copySharedVideoFrame(remoteSample->imageBuffer()))
+ return;
+ }
+
+ m_connection->send(Messages::RemoteMediaRecorder::VideoSampleAvailable { WTFMove(*remoteSample) }, m_identifier);
}
+
+bool MediaRecorderPrivate::copySharedVideoFrame(CVPixelBufferRef pixelBuffer)
+{
+ if (!pixelBuffer)
+ return false;
+ return m_sharedVideoFrameWriter.write(pixelBuffer,
+ [this](auto& semaphore) { m_connection->send(Messages::RemoteMediaRecorder::SetSharedVideoFrameSemaphore { semaphore }, m_identifier); },
+ [this](auto& handle) { m_connection->send(Messages::RemoteMediaRecorder::SetSharedVideoFrameMemory { handle }, m_identifier); }
+ );
+}
+
void MediaRecorderPrivate::audioSamplesAvailable(const MediaTime& time, const PlatformAudioData& audioData, const AudioStreamDescription& description, size_t numberOfFrames)
{
// Heap allocations are forbidden on the audio thread for performance reasons so we need to
Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.h (289049 => 289050)
--- trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.h 2022-02-03 14:07:43 UTC (rev 289049)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.h 2022-02-03 14:31:17 UTC (rev 289050)
@@ -29,9 +29,9 @@
#include "MediaRecorderIdentifier.h"
#include "SharedRingBufferStorage.h"
+#include "SharedVideoFrame.h"
#include <WebCore/MediaRecorderPrivate.h>
-#include <WebCore/PixelBufferConformerCV.h>
#include <wtf/MediaTime.h>
#include <wtf/WeakPtr.h>
@@ -66,7 +66,7 @@
void resumeRecording(CompletionHandler<void()>&&) final;
void storageChanged(SharedMemory*, const WebCore::CAAudioStreamDescription& format, size_t frameCount);
- RetainPtr<CVPixelBufferRef> convertToBGRA(CVPixelBufferRef);
+ bool copySharedVideoFrame(CVPixelBufferRef);
MediaRecorderIdentifier m_identifier;
Ref<WebCore::MediaStreamPrivate> m_stream;
@@ -81,7 +81,7 @@
bool m_hasVideo { false };
bool m_isStopped { false };
- std::unique_ptr<WebCore::PixelBufferConformerCV> m_pixelBufferConformer;
+ SharedVideoFrameWriter m_sharedVideoFrameWriter;
};
}
Deleted: trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.mm (289049 => 289050)
--- trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.mm 2022-02-03 14:07:43 UTC (rev 289049)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.mm 2022-02-03 14:31:17 UTC (rev 289050)
@@ -1,45 +0,0 @@
-/*
- * Copyright (C) 2020 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
- * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
- * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
- * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
- * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
- * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
- * THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "MediaRecorderPrivate.h"
-
-#if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM)
-
-#include <WebCore/CoreVideoSoftLink.h>
-
-namespace WebKit {
-using namespace WebCore;
-
-RetainPtr<CVPixelBufferRef> MediaRecorderPrivate::convertToBGRA(CVPixelBufferRef pixelBuffer)
-{
- if (!m_pixelBufferConformer)
- m_pixelBufferConformer = makeUnique<PixelBufferConformerCV>((__bridge CFDictionaryRef)@{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) });
- return m_pixelBufferConformer->convert(pixelBuffer);
-}
-
-}
-
-#endif