Diff
Modified: trunk/Source/WebCore/ChangeLog (291549 => 291550)
--- trunk/Source/WebCore/ChangeLog 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebCore/ChangeLog 2022-03-21 07:14:14 UTC (rev 291550)
@@ -1,3 +1,29 @@
+2022-03-21 Youenn Fablet <you...@apple.com>
+
+ Remove use of MediaSampleAVFObjC from WebRTC pipelines
+ https://bugs.webkit.org/show_bug.cgi?id=237706
+ <rdar://problem/90425391>
+
+ Reviewed by Eric Carlson.
+
+ Replace MediaSampleAVFObjC by VideoFrameCV when handling CVPixelBuffers.
+
+ Covered by existing tests.
+
+ * platform/graphics/avfoundation/objc/MediaSampleAVFObjC.h:
+ * platform/graphics/avfoundation/objc/MediaSampleAVFObjC.mm:
+ * platform/graphics/cv/ImageRotationSessionVT.mm:
+ * platform/graphics/cv/ImageTransferSessionVT.h:
+ * platform/graphics/cv/ImageTransferSessionVT.mm:
+ * platform/graphics/cv/VideoFrameCV.h:
+ * platform/graphics/cv/VideoFrameCV.mm:
+ * platform/mediastream/RealtimeVideoSource.cpp:
+ * platform/mediastream/RealtimeVideoSource.h:
+ * platform/mediastream/mac/AVVideoCaptureSource.mm:
+ * platform/mediastream/mac/MockRealtimeVideoSourceMac.mm:
+ * platform/mediastream/mac/RealtimeIncomingVideoSourceCocoa.h:
+ * platform/mediastream/mac/RealtimeIncomingVideoSourceCocoa.mm:
+
2022-03-20 Diego Pino Garcia <dp...@igalia.com>
[WPE] Unreviewed, fix non-unified build after r291474 and r291508
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaSampleAVFObjC.h (291549 => 291550)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaSampleAVFObjC.h 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaSampleAVFObjC.h 2022-03-21 07:14:14 UTC (rev 291550)
@@ -44,12 +44,7 @@
static Ref<MediaSampleAVFObjC> create(CMSampleBufferRef sample, uint64_t trackID) { return adoptRef(*new MediaSampleAVFObjC(sample, trackID)); }
static Ref<MediaSampleAVFObjC> create(CMSampleBufferRef sample, AtomString trackID) { return adoptRef(*new MediaSampleAVFObjC(sample, trackID)); }
static Ref<MediaSampleAVFObjC> create(CMSampleBufferRef sample, VideoRotation rotation = VideoRotation::None, bool mirrored = false) { return adoptRef(*new MediaSampleAVFObjC(sample, rotation, mirrored)); }
- static RefPtr<MediaSampleAVFObjC> createFromPixelBuffer(PixelBuffer&&);
- WEBCORE_EXPORT static RefPtr<MediaSampleAVFObjC> createFromPixelBuffer(RetainPtr<CVPixelBufferRef>&&, VideoRotation, bool mirrored, MediaTime presentationTime = { }, MediaTime decodingTime = { });
- WEBCORE_EXPORT static void setAsDisplayImmediately(MediaSample&);
- static RetainPtr<CMSampleBufferRef> cloneSampleBufferAndSetAsDisplayImmediately(CMSampleBufferRef);
-
WEBCORE_EXPORT RefPtr<JSC::Uint8ClampedArray> getRGBAImageData() const override;
MediaTime presentationTime() const override;
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaSampleAVFObjC.mm (291549 => 291550)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaSampleAVFObjC.mm 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaSampleAVFObjC.mm 2022-03-21 07:14:14 UTC (rev 291550)
@@ -75,58 +75,6 @@
MediaSampleAVFObjC::~MediaSampleAVFObjC() = default;
-RefPtr<MediaSampleAVFObjC> MediaSampleAVFObjC::createFromPixelBuffer(PixelBuffer&& pixelBuffer)
-{
- auto size = pixelBuffer.size();
- auto width = size.width();
- auto height = size.height();
-
- auto data = ""
- auto dataBaseAddress = data->data();
- auto leakedData = &data.leakRef();
-
- auto derefBuffer = [] (void* context, const void*) {
- static_cast<JSC::Uint8ClampedArray*>(context)->deref();
- };
-
- CVPixelBufferRef cvPixelBufferRaw = nullptr;
- auto status = CVPixelBufferCreateWithBytes(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, dataBaseAddress, width * 4, derefBuffer, leakedData, nullptr, &cvPixelBufferRaw);
-
- auto cvPixelBuffer = adoptCF(cvPixelBufferRaw);
- if (!cvPixelBuffer) {
- derefBuffer(leakedData, nullptr);
- return nullptr;
- }
- ASSERT_UNUSED(status, !status);
- return createFromPixelBuffer(WTFMove(cvPixelBuffer), VideoRotation::None, false);
-}
-
-RefPtr<MediaSampleAVFObjC> MediaSampleAVFObjC::createFromPixelBuffer(RetainPtr<CVPixelBufferRef>&& pixelBuffer, VideoRotation rotation, bool mirrored, MediaTime presentationTime, MediaTime decodingTime)
-{
- CMVideoFormatDescriptionRef formatDescriptionRaw = nullptr;
- auto status = PAL::CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer.get(), &formatDescriptionRaw);
- if (status || !formatDescriptionRaw) {
- ASSERT_NOT_REACHED();
- return nullptr;
- }
- auto formatDescription = adoptCF(formatDescriptionRaw);
-
- CMSampleTimingInfo sampleTimingInformation = { PAL::kCMTimeInvalid, PAL::toCMTime(presentationTime), PAL::toCMTime(decodingTime) };
- CMSampleBufferRef sampleBufferRaw = nullptr;
- status = PAL::CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer.get(), formatDescription.get(), &sampleTimingInformation, &sampleBufferRaw);
- if (status || !sampleBufferRaw) {
- ASSERT_NOT_REACHED();
- return nullptr;
- }
- auto sampleBuffer = adoptCF(sampleBufferRaw);
- CFArrayRef attachmentsArray = PAL::CMSampleBufferGetSampleAttachmentsArray(sampleBuffer.get(), true);
- for (CFIndex i = 0, count = CFArrayGetCount(attachmentsArray); i < count; ++i) {
- CFMutableDictionaryRef attachments = checked_cf_cast<CFMutableDictionaryRef>(CFArrayGetValueAtIndex(attachmentsArray, i));
- CFDictionarySetValue(attachments, PAL::kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
- }
- return create(sampleBuffer.get(), rotation, mirrored);
-}
-
MediaTime MediaSampleAVFObjC::presentationTime() const
{
auto timeStamp = PAL::CMSampleBufferGetOutputPresentationTimeStamp(m_sample.get());
@@ -397,11 +345,6 @@
}
}
-void MediaSampleAVFObjC::setAsDisplayImmediately(MediaSample& sample)
-{
- setSampleBufferAsDisplayImmediately(sample.platformSample().sample.cmSampleBuffer);
-}
-
bool MediaSampleAVFObjC::isHomogeneous() const
{
CFArrayRef attachmentsArray = PAL::CMSampleBufferGetSampleAttachmentsArray(m_sample.get(), true);
@@ -472,39 +415,6 @@
return samples;
}
-RetainPtr<CMSampleBufferRef> MediaSampleAVFObjC::cloneSampleBufferAndSetAsDisplayImmediately(CMSampleBufferRef sample)
-{
- auto pixelBuffer = static_cast<CVImageBufferRef>(PAL::CMSampleBufferGetImageBuffer(sample));
- if (!pixelBuffer)
- return nullptr;
-
- CMVideoFormatDescriptionRef formatDescription = nullptr;
- auto status = PAL::CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &formatDescription);
- if (status)
- return nullptr;
- auto retainedFormatDescription = adoptCF(formatDescription);
-
- CMItemCount itemCount = 0;
- status = PAL::CMSampleBufferGetSampleTimingInfoArray(sample, 0, nullptr, &itemCount);
- if (status)
- return nullptr;
-
- Vector<CMSampleTimingInfo> timingInfoArray;
- timingInfoArray.grow(itemCount);
- status = PAL::CMSampleBufferGetSampleTimingInfoArray(sample, itemCount, timingInfoArray.data(), nullptr);
- if (status)
- return nullptr;
-
- CMSampleBufferRef newSampleBuffer;
- status = PAL::CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, formatDescription, timingInfoArray.data(), &newSampleBuffer);
- if (status)
- return nullptr;
-
- setSampleBufferAsDisplayImmediately(newSampleBuffer);
-
- return adoptCF(newSampleBuffer);
-}
-
CVPixelBufferRef MediaSampleAVFObjC::pixelBuffer() const
{
return static_cast<CVPixelBufferRef>(PAL::CMSampleBufferGetImageBuffer(m_sample.get()));
Modified: trunk/Source/WebCore/platform/graphics/cv/ImageRotationSessionVT.mm (291549 => 291550)
--- trunk/Source/WebCore/platform/graphics/cv/ImageRotationSessionVT.mm 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebCore/platform/graphics/cv/ImageRotationSessionVT.mm 2022-03-21 07:14:14 UTC (rev 291550)
@@ -127,9 +127,9 @@
return result;
}
-RetainPtr<CVPixelBufferRef> ImageRotationSessionVT::rotate(MediaSample& sample, const RotationProperties& rotation, IsCGImageCompatible cgImageCompatible)
+RetainPtr<CVPixelBufferRef> ImageRotationSessionVT::rotate(MediaSample& videoFrame, const RotationProperties& rotation, IsCGImageCompatible cgImageCompatible)
{
- auto pixelBuffer = static_cast<CVPixelBufferRef>(PAL::CMSampleBufferGetImageBuffer(sample.platformSample().sample.cmSampleBuffer));
+ auto pixelBuffer = videoFrame.pixelBuffer();
ASSERT(pixelBuffer);
if (!pixelBuffer)
return nullptr;
Modified: trunk/Source/WebCore/platform/graphics/cv/ImageTransferSessionVT.h (291549 => 291550)
--- trunk/Source/WebCore/platform/graphics/cv/ImageTransferSessionVT.h 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebCore/platform/graphics/cv/ImageTransferSessionVT.h 2022-03-21 07:14:14 UTC (rev 291550)
@@ -26,7 +26,7 @@
#pragma once
#include "IntSize.h"
-#include "MediaSample.h"
+#include "VideoFrame.h"
#include <wtf/RetainPtr.h>
typedef struct CGImage *CGImageRef;
@@ -45,7 +45,7 @@
return std::unique_ptr<ImageTransferSessionVT>(new ImageTransferSessionVT(pixelFormat, shouldUseIOSurface));
}
- RefPtr<MediaSample> convertMediaSample(MediaSample&, const IntSize&);
+ RefPtr<MediaSample> convertVideoFrame(MediaSample&, const IntSize&);
RefPtr<MediaSample> createMediaSample(CGImageRef, const MediaTime&, const IntSize&, MediaSample::VideoRotation = MediaSample::VideoRotation::None, bool mirrored = false);
RefPtr<MediaSample> createMediaSample(CMSampleBufferRef, const MediaTime&, const IntSize&, MediaSample::VideoRotation = MediaSample::VideoRotation::None, bool mirrored = false);
Modified: trunk/Source/WebCore/platform/graphics/cv/ImageTransferSessionVT.mm (291549 => 291550)
--- trunk/Source/WebCore/platform/graphics/cv/ImageTransferSessionVT.mm 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebCore/platform/graphics/cv/ImageTransferSessionVT.mm 2022-03-21 07:14:14 UTC (rev 291550)
@@ -29,9 +29,10 @@
#import "CVUtilities.h"
#import "GraphicsContextCG.h"
#import "Logging.h"
-#import "MediaSampleAVFObjC.h"
+#import "VideoFrameCV.h"
#import <CoreMedia/CMFormatDescription.h>
#import <CoreMedia/CMSampleBuffer.h>
+#import <pal/avfoundation/MediaTimeAVFoundation.h>
#if !PLATFORM(MACCATALYST)
#import <pal/spi/cocoa/IOSurfaceSPI.h>
@@ -254,18 +255,16 @@
}
#endif
-RefPtr<MediaSample> ImageTransferSessionVT::convertMediaSample(MediaSample& sample, const IntSize& size)
+RefPtr<MediaSample> ImageTransferSessionVT::convertVideoFrame(MediaSample& videoFrame, const IntSize& size)
{
- ASSERT(sample.platformSample().type == PlatformSample::CMSampleBufferType);
+ if (size == expandedIntSize(videoFrame.presentationSize()))
+ return &videoFrame;
- if (size == expandedIntSize(sample.presentationSize()))
- return &sample;
-
- auto resizedBuffer = convertCMSampleBuffer(sample.platformSample().sample.cmSampleBuffer, size);
+ auto resizedBuffer = convertPixelBuffer(videoFrame.pixelBuffer(), size);
if (!resizedBuffer)
return nullptr;
- return MediaSampleAVFObjC::create(resizedBuffer.get(), sample.videoRotation(), sample.videoMirrored());
+ return VideoFrameCV::create(videoFrame.presentationTime(), videoFrame.videoMirrored(), videoFrame.videoRotation(), WTFMove(resizedBuffer));
}
#if !PLATFORM(MACCATALYST)
@@ -275,7 +274,7 @@
if (!sampleBuffer)
return nullptr;
- return MediaSampleAVFObjC::create(sampleBuffer.get(), rotation, mirrored);
+ return VideoFrameCV::create(sampleBuffer.get(), mirrored, rotation);
}
#endif
@@ -285,7 +284,7 @@
if (!sampleBuffer)
return nullptr;
- return MediaSampleAVFObjC::create(sampleBuffer.get(), rotation, mirrored);
+ return VideoFrameCV::create(sampleBuffer.get(), mirrored, rotation);
}
RefPtr<MediaSample> ImageTransferSessionVT::createMediaSample(CMSampleBufferRef buffer, const MediaTime& sampleTime, const IntSize& size, MediaSample::VideoRotation rotation, bool mirrored)
@@ -294,7 +293,7 @@
if (!sampleBuffer)
return nullptr;
- return MediaSampleAVFObjC::create(sampleBuffer.get(), rotation, mirrored);
+ return VideoFrameCV::create(sampleBuffer.get(), mirrored, rotation);
}
} // namespace WebCore
Modified: trunk/Source/WebCore/platform/graphics/cv/VideoFrameCV.h (291549 => 291550)
--- trunk/Source/WebCore/platform/graphics/cv/VideoFrameCV.h 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebCore/platform/graphics/cv/VideoFrameCV.h 2022-03-21 07:14:14 UTC (rev 291550)
@@ -40,6 +40,7 @@
class VideoFrameCV : public VideoFrame {
public:
WEBCORE_EXPORT static Ref<VideoFrameCV> create(MediaTime presentationTime, bool isMirrored, VideoRotation, RetainPtr<CVPixelBufferRef>&&);
+ WEBCORE_EXPORT static Ref<VideoFrameCV> create(CMSampleBufferRef, bool isMirrored, VideoRotation);
static RefPtr<VideoFrameCV> createFromPixelBuffer(PixelBuffer&&);
WEBCORE_EXPORT ~VideoFrameCV();
Modified: trunk/Source/WebCore/platform/graphics/cv/VideoFrameCV.mm (291549 => 291550)
--- trunk/Source/WebCore/platform/graphics/cv/VideoFrameCV.mm 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebCore/platform/graphics/cv/VideoFrameCV.mm 2022-03-21 07:14:14 UTC (rev 291550)
@@ -28,11 +28,22 @@
#if ENABLE(VIDEO) && USE(AVFOUNDATION)
#include "CVUtilities.h"
+#include "PixelBuffer.h"
#include "ProcessIdentity.h"
#include "CoreVideoSoftLink.h"
namespace WebCore {
+Ref<VideoFrameCV> VideoFrameCV::create(CMSampleBufferRef sampleBuffer, bool isMirrored, VideoRotation rotation)
+{
+ auto pixelBuffer = static_cast<CVPixelBufferRef>(PAL::CMSampleBufferGetImageBuffer(sampleBuffer));
+ auto timeStamp = PAL::CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer);
+ if (CMTIME_IS_INVALID(timeStamp))
+ timeStamp = PAL::CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+
+ return VideoFrameCV::create(PAL::toMediaTime(timeStamp), isMirrored, rotation, pixelBuffer);
+}
+
Ref<VideoFrameCV> VideoFrameCV::create(MediaTime presentationTime, bool isMirrored, VideoRotation rotation, RetainPtr<CVPixelBufferRef>&& pixelBuffer)
{
ASSERT(pixelBuffer);
Modified: trunk/Source/WebCore/platform/mediastream/RealtimeVideoSource.cpp (291549 => 291550)
--- trunk/Source/WebCore/platform/mediastream/RealtimeVideoSource.cpp 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebCore/platform/mediastream/RealtimeVideoSource.cpp 2022-03-21 07:14:14 UTC (rev 291550)
@@ -30,7 +30,7 @@
#if PLATFORM(COCOA)
#include "ImageTransferSessionVT.h"
-#include "MediaSampleAVFObjC.h"
+#include "VideoFrameCV.h"
#endif
namespace WebCore {
@@ -172,31 +172,23 @@
}
#if PLATFORM(COCOA)
-RefPtr<MediaSample> RealtimeVideoSource::adaptVideoSample(MediaSample& sample)
+RefPtr<MediaSample> RealtimeVideoSource::adaptVideoFrame(MediaSample& videoFrame)
{
- if (sample.platformSample().type != PlatformSample::CMSampleBufferType) {
- // FIXME: Support more efficiently downsampling of remote video frames by downsampling in GPUProcess.
- auto newSample = MediaSampleAVFObjC::createFromPixelBuffer(sample.pixelBuffer(), sample.videoRotation(), sample.videoMirrored(), sample.presentationTime(), { });
- if (!newSample)
- return nullptr;
- return adaptVideoSample(*newSample);
- }
- ASSERT(sample.platformSample().type == PlatformSample::CMSampleBufferType);
- if (!m_imageTransferSession || m_imageTransferSession->pixelFormat() != sample.videoPixelFormat())
- m_imageTransferSession = ImageTransferSessionVT::create(sample.videoPixelFormat(), m_shouldUseIOSurface);
+ if (!m_imageTransferSession || m_imageTransferSession->pixelFormat() != videoFrame.videoPixelFormat())
+ m_imageTransferSession = ImageTransferSessionVT::create(videoFrame.videoPixelFormat(), m_shouldUseIOSurface);
ASSERT(m_imageTransferSession);
if (!m_imageTransferSession)
return nullptr;
- auto mediaSample = m_imageTransferSession->convertMediaSample(sample, size());
- ASSERT(mediaSample);
+ auto newVideoFrame = m_imageTransferSession->convertVideoFrame(videoFrame, size());
+ ASSERT(newVideoFrame);
- return mediaSample;
+ return newVideoFrame;
}
#endif
-void RealtimeVideoSource::videoSampleAvailable(MediaSample& sample, VideoFrameTimeMetadata metadata)
+void RealtimeVideoSource::videoSampleAvailable(MediaSample& videoFrame, VideoFrameTimeMetadata metadata)
{
if (m_frameDecimation > 1 && ++m_frameDecimationCounter % m_frameDecimation)
return;
@@ -208,15 +200,15 @@
#if PLATFORM(COCOA)
auto size = this->size();
- if (!size.isEmpty() && size != expandedIntSize(sample.presentationSize())) {
- if (auto mediaSample = adaptVideoSample(sample)) {
- RealtimeMediaSource::videoSampleAvailable(*mediaSample, metadata);
+ if (!size.isEmpty() && size != expandedIntSize(videoFrame.presentationSize())) {
+ if (auto newVideoFrame = adaptVideoFrame(videoFrame)) {
+ RealtimeMediaSource::videoSampleAvailable(*newVideoFrame, metadata);
return;
}
}
#endif
- RealtimeMediaSource::videoSampleAvailable(sample, metadata);
+ RealtimeMediaSource::videoSampleAvailable(videoFrame, metadata);
}
Ref<RealtimeMediaSource> RealtimeVideoSource::clone()
Modified: trunk/Source/WebCore/platform/mediastream/RealtimeVideoSource.h (291549 => 291550)
--- trunk/Source/WebCore/platform/mediastream/RealtimeVideoSource.h 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebCore/platform/mediastream/RealtimeVideoSource.h 2022-03-21 07:14:14 UTC (rev 291550)
@@ -75,7 +75,7 @@
void videoSampleAvailable(MediaSample&, VideoFrameTimeMetadata) final;
#if PLATFORM(COCOA)
- RefPtr<MediaSample> adaptVideoSample(MediaSample&);
+ RefPtr<MediaSample> adaptVideoFrame(MediaSample&);
#endif
#if !RELEASE_LOG_DISABLED
Modified: trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm (291549 => 291550)
--- trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm 2022-03-21 07:14:14 UTC (rev 291550)
@@ -33,12 +33,12 @@
#import "IntRect.h"
#import "Logging.h"
#import "MediaConstraints.h"
-#import "MediaSampleAVFObjC.h"
#import "PlatformLayer.h"
#import "RealtimeMediaSourceCenter.h"
#import "RealtimeMediaSourceSettings.h"
#import "RealtimeVideoSource.h"
#import "RealtimeVideoUtilities.h"
+#import "VideoFrameCV.h"
#import <AVFoundation/AVCaptureDevice.h>
#import <AVFoundation/AVCaptureInput.h>
#import <AVFoundation/AVCaptureOutput.h>
@@ -45,6 +45,7 @@
#import <AVFoundation/AVCaptureSession.h>
#import <AVFoundation/AVError.h>
#import <objc/runtime.h>
+#import <pal/avfoundation/MediaTimeAVFoundation.h>
#import <pal/spi/cocoa/AVFoundationSPI.h>
#import "CoreVideoSoftLink.h"
@@ -560,12 +561,12 @@
if (++m_framesCount <= framesToDropWhenStarting)
return;
- auto sample = MediaSampleAVFObjC::create(sampleBuffer, m_sampleRotation, [captureConnection isVideoMirrored]);
- m_buffer = &sample.get();
- setIntrinsicSize(expandedIntSize(sample->presentationSize()));
+ auto videoFrame = VideoFrameCV::create(sampleBuffer, [captureConnection isVideoMirrored], m_sampleRotation);
+ m_buffer = &videoFrame.get();
+ setIntrinsicSize(expandedIntSize(videoFrame->presentationSize()));
VideoFrameTimeMetadata metadata;
metadata.captureTime = MonotonicTime::now().secondsSinceEpoch();
- dispatchMediaSampleToObservers(WTFMove(sample), metadata);
+ dispatchMediaSampleToObservers(WTFMove(videoFrame), metadata);
}
void AVVideoCaptureSource::captureSessionIsRunningDidChange(bool state)
Modified: trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeVideoSourceMac.mm (291549 => 291550)
--- trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeVideoSourceMac.mm 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeVideoSourceMac.mm 2022-03-21 07:14:14 UTC (rev 291550)
@@ -36,7 +36,6 @@
#import "ImageBuffer.h"
#import "ImageTransferSessionVT.h"
#import "MediaConstraints.h"
-#import "MediaSampleAVFObjC.h"
#import "MockRealtimeMediaSourceCenter.h"
#import "NotImplemented.h"
#import "PlatformLayer.h"
Modified: trunk/Source/WebCore/platform/mediastream/mac/RealtimeIncomingVideoSourceCocoa.h (291549 => 291550)
--- trunk/Source/WebCore/platform/mediastream/mac/RealtimeIncomingVideoSourceCocoa.h 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebCore/platform/mediastream/mac/RealtimeIncomingVideoSourceCocoa.h 2022-03-21 07:14:14 UTC (rev 291550)
@@ -29,10 +29,9 @@
#if USE(LIBWEBRTC)
-#include "MediaSample.h"
#include "RealtimeIncomingVideoSource.h"
+#include "VideoFrame.h"
-using CMSampleBufferRef = struct opaqueCMSampleBuffer*;
using CVPixelBufferPoolRef = struct __CVPixelBufferPool*;
using CVPixelBufferRef = struct __CVBuffer*;
@@ -50,8 +49,8 @@
RealtimeIncomingVideoSourceCocoa(rtc::scoped_refptr<webrtc::VideoTrackInterface>&&, String&&);
RetainPtr<CVPixelBufferRef> pixelBufferFromVideoFrame(const webrtc::VideoFrame&);
CVPixelBufferPoolRef pixelBufferPool(size_t width, size_t height, webrtc::BufferType);
- RefPtr<MediaSample> toVideoFrame(const webrtc::VideoFrame&, MediaSample::VideoRotation);
- RefPtr<MediaSample> createMediaSampleFromCVPixelBuffer(CVPixelBufferRef, MediaSample::VideoRotation, int64_t);
+ RefPtr<VideoFrame> toVideoFrame(const webrtc::VideoFrame&, MediaSample::VideoRotation);
+ Ref<VideoFrame> createVideoSampleFromCVPixelBuffer(CVPixelBufferRef, MediaSample::VideoRotation, int64_t);
// rtc::VideoSinkInterface
void OnFrame(const webrtc::VideoFrame&) final;
Modified: trunk/Source/WebCore/platform/mediastream/mac/RealtimeIncomingVideoSourceCocoa.mm (291549 => 291550)
--- trunk/Source/WebCore/platform/mediastream/mac/RealtimeIncomingVideoSourceCocoa.mm 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebCore/platform/mediastream/mac/RealtimeIncomingVideoSourceCocoa.mm 2022-03-21 07:14:14 UTC (rev 291550)
@@ -32,7 +32,7 @@
#import "CVUtilities.h"
#import "Logging.h"
-#import "MediaSampleAVFObjC.h"
+#import "VideoFrameCV.h"
#import "VideoFrameLibWebRTC.h"
#import <wtf/cf/TypeCastsCF.h>
@@ -91,33 +91,12 @@
return m_pixelBufferPool.get();
}
-RefPtr<MediaSample> RealtimeIncomingVideoSourceCocoa::createMediaSampleFromCVPixelBuffer(CVPixelBufferRef pixelBuffer, MediaSample::VideoRotation rotation, int64_t timeStamp)
+Ref<VideoFrame> RealtimeIncomingVideoSourceCocoa::createVideoSampleFromCVPixelBuffer(CVPixelBufferRef pixelBuffer, MediaSample::VideoRotation rotation, int64_t timeStamp)
{
- CMSampleTimingInfo timingInfo;
- timingInfo.presentationTimeStamp = PAL::CMTimeMake(timeStamp, 1000000);
- timingInfo.decodeTimeStamp = PAL::kCMTimeInvalid;
- timingInfo.duration = PAL::kCMTimeInvalid;
-
- CMVideoFormatDescriptionRef formatDescription;
- OSStatus ostatus = PAL::CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, (CVImageBufferRef)pixelBuffer, &formatDescription);
- if (ostatus != noErr) {
- ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "Failed to initialize CMVideoFormatDescription with error ", static_cast<int>(ostatus));
- return nullptr;
- }
-
- CMSampleBufferRef sampleBuffer;
- ostatus = PAL::CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, (CVImageBufferRef)pixelBuffer, formatDescription, &timingInfo, &sampleBuffer);
- CFRelease(formatDescription);
- if (ostatus != noErr) {
- ERROR_LOG_IF(loggerPtr(), LOGIDENTIFIER, "Failed to create the sample buffer with error ", static_cast<int>(ostatus));
- return nullptr;
- }
-
- auto sample = adoptCF(sampleBuffer);
- return MediaSampleAVFObjC::create(sample.get(), rotation);
+ return VideoFrameCV::create(MediaTime(timeStamp, 1000000), false, rotation, pixelBuffer);
}
-RefPtr<MediaSample> RealtimeIncomingVideoSourceCocoa::toVideoFrame(const webrtc::VideoFrame& frame, MediaSample::VideoRotation rotation)
+RefPtr<VideoFrame> RealtimeIncomingVideoSourceCocoa::toVideoFrame(const webrtc::VideoFrame& frame, MediaSample::VideoRotation rotation)
{
if (muted()) {
if (!m_blackFrame || m_blackFrameWidth != frame.width() || m_blackFrameHeight != frame.height()) {
@@ -125,7 +104,7 @@
m_blackFrameHeight = frame.height();
m_blackFrame = createBlackPixelBuffer(m_blackFrameWidth, m_blackFrameHeight);
}
- return createMediaSampleFromCVPixelBuffer(m_blackFrame.get(), rotation, frame.timestamp_us());
+ return createVideoSampleFromCVPixelBuffer(m_blackFrame.get(), rotation, frame.timestamp_us());
}
if (auto* provider = videoFrameBufferProvider(frame)) {
@@ -137,7 +116,7 @@
// If we already have a CVPixelBufferRef, use it directly.
if (auto pixelBuffer = webrtc::pixelBufferFromFrame(frame))
- return createMediaSampleFromCVPixelBuffer(pixelBuffer, rotation, frame.timestamp_us());
+ return createVideoSampleFromCVPixelBuffer(pixelBuffer, rotation, frame.timestamp_us());
// In case of in memory libwebrtc samples, we have non interleaved YUV data, let's lazily create CVPixelBuffers if needed.
return VideoFrameLibWebRTC::create(MediaTime(frame.timestamp_us(), 1000000), false, rotation, frame.video_frame_buffer(), [protectedThis = Ref { *this }, this](auto& buffer) {
Modified: trunk/Source/WebKit/ChangeLog (291549 => 291550)
--- trunk/Source/WebKit/ChangeLog 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebKit/ChangeLog 2022-03-21 07:14:14 UTC (rev 291550)
@@ -1,3 +1,16 @@
+2022-03-21 Youenn Fablet <you...@apple.com>
+
+ Remove use of MediaSampleAVFObjC from WebRTC pipelines
+ https://bugs.webkit.org/show_bug.cgi?id=237706
+ <rdar://problem/90425391>
+
+ Reviewed by Eric Carlson.
+
+ * GPUProcess/webrtc/LibWebRTCCodecsProxy.mm:
+ * GPUProcess/webrtc/RemoteMediaRecorder.cpp:
+ * GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp:
+ * UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp:
+
2022-03-19 Tim Horton <timothy_hor...@apple.com>
Fix the build (hopefully)
Modified: trunk/Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.mm (291549 => 291550)
--- trunk/Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.mm 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.mm 2022-03-21 07:14:14 UTC (rev 291550)
@@ -39,7 +39,7 @@
#import "WebCoreArgumentCoders.h"
#import <WebCore/CVUtilities.h>
#import <WebCore/LibWebRTCProvider.h>
-#import <WebCore/MediaSampleAVFObjC.h>
+#import <WebCore/VideoFrameCV.h>
#import <webrtc/sdk/WebKit/WebKitDecoder.h>
#import <webrtc/sdk/WebKit/WebKitEncoder.h>
#import <wtf/BlockPtr.h>
@@ -97,13 +97,11 @@
if (useRemoteFrames)
videoFrameObjectHeap = m_videoFrameObjectHeap.ptr();
return [identifier, connection = m_connection, resourceOwner = m_resourceOwner, videoFrameObjectHeap = WTFMove(videoFrameObjectHeap)] (CVPixelBufferRef pixelBuffer, uint32_t timeStampNs, uint32_t timeStamp) mutable {
- auto sample = WebCore::MediaSampleAVFObjC::createFromPixelBuffer(pixelBuffer, WebCore::MediaSample::VideoRotation::None, false, MediaTime(timeStampNs, 1), { });
- if (!sample)
- return;
+ auto videoFrame = WebCore::VideoFrameCV::create(MediaTime(timeStampNs, 1), false, WebCore::MediaSample::VideoRotation::None, pixelBuffer);
if (resourceOwner)
- sample->setOwnershipIdentity(resourceOwner);
+ videoFrame->setOwnershipIdentity(resourceOwner);
if (videoFrameObjectHeap) {
- auto properties = videoFrameObjectHeap->add(sample.releaseNonNull());
+ auto properties = videoFrameObjectHeap->add(WTFMove(videoFrame));
connection->send(Messages::LibWebRTCCodecs::CompletedDecoding { identifier, timeStamp, timeStampNs, WTFMove(properties) }, 0);
return;
}
Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.cpp (291549 => 291550)
--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.cpp 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.cpp 2022-03-21 07:14:14 UTC (rev 291550)
@@ -33,7 +33,6 @@
#include "RemoteVideoFrameObjectHeap.h"
#include "SharedRingBufferStorage.h"
#include <WebCore/CARingBuffer.h>
-#include <WebCore/MediaSampleAVFObjC.h>
#include <WebCore/WebAudioBufferList.h>
#include <wtf/CompletionHandler.h>
Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp (291549 => 291550)
--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp 2022-03-21 07:14:14 UTC (rev 291550)
@@ -33,7 +33,6 @@
#include "SampleBufferDisplayLayerMessages.h"
#include <WebCore/ImageTransferSessionVT.h>
#include <WebCore/LocalSampleBufferDisplayLayer.h>
-#include <WebCore/MediaSampleAVFObjC.h>
namespace WebKit {
Modified: trunk/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp (291549 => 291550)
--- trunk/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp 2022-03-21 02:11:50 UTC (rev 291549)
+++ trunk/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp 2022-03-21 07:14:14 UTC (rev 291550)
@@ -41,9 +41,9 @@
#include <WebCore/CARingBuffer.h>
#include <WebCore/ImageRotationSessionVT.h>
#include <WebCore/MediaConstraints.h>
-#include <WebCore/MediaSampleAVFObjC.h>
#include <WebCore/RealtimeMediaSourceCenter.h>
#include <WebCore/RealtimeVideoSource.h>
+#include <WebCore/VideoFrameCV.h>
#include <WebCore/WebAudioBufferList.h>
#include <wtf/UniqueRef.h>
@@ -182,7 +182,7 @@
{
if (m_shouldApplyRotation && sample.videoRotation() != MediaSample::VideoRotation::None) {
auto pixelBuffer = rotatePixelBuffer(sample);
- return MediaSampleAVFObjC::createFromPixelBuffer(WTFMove(pixelBuffer), MediaSample::VideoRotation::None, sample.videoMirrored(), sample.presentationTime(), sample.decodeTime());
+ return VideoFrameCV::create(sample.presentationTime(), sample.videoMirrored(), MediaSample::VideoRotation::None, WTFMove(pixelBuffer));
}
return &sample;
}