- Revision
- 262289
- Author
- [email protected]
- Date
- 2020-05-29 05:15:05 -0700 (Fri, 29 May 2020)
Log Message
MediaPlayerPrivateMediaStreamAVFObjC should enqueue samples in a background thread
https://bugs.webkit.org/show_bug.cgi?id=212073
Reviewed by Eric Carlson.
Do not hop to the main thread when rendering video samples anymore.
Instead, we enqueue to the display layer in the background thread but still hop to the main thread for two things:
- Update of various states of the player
- keep a ref to the video sample if canvas rendering is needed.
Most display layer operations stay in the main thread (creation, flushing...).
Deletion of the display layer and access from a background are covered by a lock.
The m_canEnqueueDisplayLayer boolean ensures we do not enqueue too early when the display layer is not yet properly initialized.
LocalSampleBufferDisplayLayer needs to handle the fact that enqueueing might be done in a background thread.
Instead of introducing a lock, we introduce a work queue and we hop to this queue whenever we need to enqueue/mutate the pending samples.
Covered by existing tests and manual testing.
* platform/graphics/avfoundation/objc/LocalSampleBufferDisplayLayer.h:
* platform/graphics/avfoundation/objc/LocalSampleBufferDisplayLayer.mm:
(-[WebAVSampleBufferStatusChangeListener observeValueForKeyPath:ofObject:change:context:]):
(WebCore::LocalSampleBufferDisplayLayer::enqueueSample):
(WebCore::LocalSampleBufferDisplayLayer::enqueueSampleBuffer):
(WebCore::LocalSampleBufferDisplayLayer::requestNotificationWhenReadyForVideoData):
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm:
(WebCore::videoTransformationMatrix):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::processNewVideoSampleAvailable):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::videoSampleAvailable):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::applicationDidBecomeActive):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::flushRenderers):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::ensureLayers):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyLayers):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::updateRenderingMode):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::checkSelectedVideoTrack):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::paintCurrentFrameInContext):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::setBufferingPolicy):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::rootLayerBoundsDidChange):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::videoTransformationMatrix): Deleted.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueCorrectedVideoSample): Deleted.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayLayer): Deleted.
Modified Paths
Diff
Modified: trunk/Source/WebCore/ChangeLog (262288 => 262289)
--- trunk/Source/WebCore/ChangeLog 2020-05-29 11:41:02 UTC (rev 262288)
+++ trunk/Source/WebCore/ChangeLog 2020-05-29 12:15:05 UTC (rev 262289)
@@ -1,3 +1,48 @@
+2020-05-29 Youenn Fablet <[email protected]>
+
+ MediaPlayerPrivateMediaStreamAVFObjC should enqueue samples in a background thread
+ https://bugs.webkit.org/show_bug.cgi?id=212073
+
+ Reviewed by Eric Carlson.
+
+ Do not hop to the main thread when rendering video samples anymore.
+ Instead, we enqueue to the display layer in the background thread but still hop to the main thread for two things:
+ - Update of various states of the player
+ - keep a ref to the video sample if canvas rendering is needed.
+
+ Most display layer operations stay in the main thread (creation, flushing...).
+ Deletion of the display layer and access from a background are covered by a lock.
+ The m_canEnqueueDisplayLayer boolean ensures we do not enqueue too early when the display layer is not yet properly initialized.
+
+ LocalSampleBufferDisplayLayer needs to handle the fact that enqueueing might be done in a background thread.
+ Instead of introducing a lock, we introduce a work queue and we hop to this queue whenever we need to enqueue/mutate the pending samples.
+
+ Covered by existing tests and manual testing.
+
+ * platform/graphics/avfoundation/objc/LocalSampleBufferDisplayLayer.h:
+ * platform/graphics/avfoundation/objc/LocalSampleBufferDisplayLayer.mm:
+ (-[WebAVSampleBufferStatusChangeListener observeValueForKeyPath:ofObject:change:context:]):
+ (WebCore::LocalSampleBufferDisplayLayer::enqueueSample):
+ (WebCore::LocalSampleBufferDisplayLayer::enqueueSampleBuffer):
+ (WebCore::LocalSampleBufferDisplayLayer::requestNotificationWhenReadyForVideoData):
+ * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h:
+ * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm:
+ (WebCore::videoTransformationMatrix):
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::processNewVideoSampleAvailable):
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::videoSampleAvailable):
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::applicationDidBecomeActive):
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::flushRenderers):
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::ensureLayers):
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyLayers):
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::updateRenderingMode):
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::checkSelectedVideoTrack):
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::paintCurrentFrameInContext):
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::setBufferingPolicy):
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::rootLayerBoundsDidChange):
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::videoTransformationMatrix): Deleted.
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueCorrectedVideoSample): Deleted.
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayLayer): Deleted.
+
2020-05-29 Carlos Garcia Campos <[email protected]>
[GTK4] Implement script dialogs
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/LocalSampleBufferDisplayLayer.h (262288 => 262289)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/LocalSampleBufferDisplayLayer.h 2020-05-29 11:41:02 UTC (rev 262288)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/LocalSampleBufferDisplayLayer.h 2020-05-29 12:15:05 UTC (rev 262289)
@@ -31,6 +31,7 @@
#include <wtf/Deque.h>
#include <wtf/Forward.h>
#include <wtf/RetainPtr.h>
+#include <wtf/WorkQueue.h>
OBJC_CLASS AVSampleBufferDisplayLayer;
OBJC_CLASS WebAVSampleBufferStatusChangeListener;
@@ -37,7 +38,7 @@
namespace WebCore {
-class WEBCORE_EXPORT LocalSampleBufferDisplayLayer final : public SampleBufferDisplayLayer, public CanMakeWeakPtr<LocalSampleBufferDisplayLayer> {
+class WEBCORE_EXPORT LocalSampleBufferDisplayLayer final : public SampleBufferDisplayLayer, public CanMakeWeakPtr<LocalSampleBufferDisplayLayer, WeakPtrFactoryInitialization::Eager> {
WTF_MAKE_FAST_ALLOCATED;
public:
static std::unique_ptr<LocalSampleBufferDisplayLayer> create(Client&);
@@ -78,6 +79,7 @@
void removeOldSamplesFromPendingQueue();
void addSampleToPendingQueue(MediaSample&);
void requestNotificationWhenReadyForVideoData();
+ void enqueueSampleBuffer(MediaSample&);
private:
RetainPtr<WebAVSampleBufferStatusChangeListener> m_statusChangeListener;
@@ -84,7 +86,10 @@
RetainPtr<AVSampleBufferDisplayLayer> m_sampleBufferDisplayLayer;
RetainPtr<PlatformLayer> m_rootLayer;
RenderPolicy m_renderPolicy { RenderPolicy::TimingInfo };
+
+ RefPtr<WorkQueue> m_processingQueue;
+ // Only accessed through m_processingQueue or if m_processingQueue is null.
using PendingSampleQueue = Deque<Ref<MediaSample>>;
PendingSampleQueue m_pendingVideoSampleQueue;
};
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/LocalSampleBufferDisplayLayer.mm (262288 => 262289)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/LocalSampleBufferDisplayLayer.mm 2020-05-29 11:41:02 UTC (rev 262288)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/LocalSampleBufferDisplayLayer.mm 2020-05-29 12:15:05 UTC (rev 262289)
@@ -105,33 +105,24 @@
UNUSED_PARAM(context);
UNUSED_PARAM(keyPath);
UNUSED_PARAM(change);
- ASSERT(_parent);
- if (!_parent)
+ if (![object isKindOfClass:PAL::getAVSampleBufferDisplayLayerClass()])
return;
- if ([object isKindOfClass:PAL::getAVSampleBufferDisplayLayerClass()]) {
- ASSERT(object == _parent->displayLayer());
-
- if ([keyPath isEqualToString:@"status"]) {
- callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self)] {
- if (!protectedSelf->_parent)
- return;
-
+ if ([keyPath isEqualToString:@"status"]) {
+ callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self)] {
+ if (protectedSelf->_parent)
protectedSelf->_parent->layerStatusDidChange();
- });
- return;
- }
+ });
+ return;
+ }
- if ([keyPath isEqualToString:@"error"]) {
- callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self)] {
- if (!protectedSelf->_parent)
- return;
-
+ if ([keyPath isEqualToString:@"error"]) {
+ callOnMainThread([protectedSelf = RetainPtr<WebAVSampleBufferStatusChangeListener>(self)] {
+ if (protectedSelf->_parent)
protectedSelf->_parent->layerErrorDidChange();
- });
- return;
- }
+ });
+ return;
}
}
@end
@@ -160,6 +151,7 @@
: SampleBufferDisplayLayer(client)
, m_statusChangeListener(adoptNS([[WebAVSampleBufferStatusChangeListener alloc] initWithParent:this]))
, m_sampleBufferDisplayLayer(WTFMove(sampleBufferDisplayLayer))
+ , m_processingQueue(WorkQueue::create("LocalSampleBufferDisplayLayer queue"))
{
}
@@ -191,6 +183,8 @@
LocalSampleBufferDisplayLayer::~LocalSampleBufferDisplayLayer()
{
+ m_processingQueue = nullptr;
+
[m_statusChangeListener stop];
m_pendingVideoSampleQueue.clear();
@@ -281,12 +275,16 @@
void LocalSampleBufferDisplayLayer::flush()
{
- [m_sampleBufferDisplayLayer flush];
+ m_processingQueue->dispatch([this] {
+ [m_sampleBufferDisplayLayer flush];
+ });
}
void LocalSampleBufferDisplayLayer::flushAndRemoveImage()
{
- [m_sampleBufferDisplayLayer flushAndRemoveImage];
+ m_processingQueue->dispatch([this] {
+ [m_sampleBufferDisplayLayer flushAndRemoveImage];
+ });
}
static const double rendererLatency = 0.02;
@@ -293,12 +291,20 @@
void LocalSampleBufferDisplayLayer::enqueueSample(MediaSample& sample)
{
- if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
- addSampleToPendingQueue(sample);
- requestNotificationWhenReadyForVideoData();
- return;
- }
+ m_processingQueue->dispatch([this, sample = makeRef(sample)] {
+ if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
+ addSampleToPendingQueue(sample);
+ requestNotificationWhenReadyForVideoData();
+ return;
+ }
+ enqueueSampleBuffer(sample);
+ });
+}
+void LocalSampleBufferDisplayLayer::enqueueSampleBuffer(MediaSample& sample)
+{
+ ASSERT(!isMainThread());
+
auto sampleToEnqueue = sample.platformSample().sample.cmSampleBuffer;
auto now = MediaTime::createWithDouble(MonotonicTime::now().secondsSinceEpoch().value() + rendererLatency);
@@ -315,6 +321,8 @@
void LocalSampleBufferDisplayLayer::removeOldSamplesFromPendingQueue()
{
+ ASSERT(!isMainThread());
+
if (m_pendingVideoSampleQueue.isEmpty())
return;
@@ -334,6 +342,8 @@
void LocalSampleBufferDisplayLayer::addSampleToPendingQueue(MediaSample& sample)
{
+ ASSERT(!isMainThread());
+
removeOldSamplesFromPendingQueue();
m_pendingVideoSampleQueue.append(sample);
}
@@ -340,13 +350,15 @@
void LocalSampleBufferDisplayLayer::clearEnqueuedSamples()
{
- m_pendingVideoSampleQueue.clear();
+ m_processingQueue->dispatch([this] {
+ m_pendingVideoSampleQueue.clear();
+ });
}
void LocalSampleBufferDisplayLayer::requestNotificationWhenReadyForVideoData()
{
auto weakThis = makeWeakPtr(*this);
- [m_sampleBufferDisplayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
+ [m_sampleBufferDisplayLayer requestMediaDataWhenReadyOnQueue:m_processingQueue->dispatchQueue() usingBlock:^{
if (!weakThis)
return;
@@ -358,8 +370,7 @@
return;
}
- auto sample = m_pendingVideoSampleQueue.takeFirst();
- enqueueSample(sample.get());
+ enqueueSampleBuffer(m_pendingVideoSampleQueue.takeFirst().get());
}
}];
}
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h (262288 => 262289)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h 2020-05-29 11:41:02 UTC (rev 262288)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h 2020-05-29 12:15:05 UTC (rev 262289)
@@ -137,6 +137,7 @@
void flushRenderers();
+ void processNewVideoSample(MediaSample&, bool hasChangedOrientation);
void enqueueVideoSample(MediaSample&);
void requestNotificationWhenReadyForVideoData();
@@ -212,8 +213,6 @@
AudioSourceProvider* audioSourceProvider() final;
- CGAffineTransform videoTransformationMatrix(MediaSample&, bool forceUpdate = false);
-
void applicationDidBecomeActive() final;
bool hideRootLayer() const { return (!activeVideoTrack() || m_waitingForFirstImage) && m_displayMode != PaintItBlack; }
@@ -246,9 +245,17 @@
float m_volume { 1 };
DisplayMode m_displayMode { None };
PlaybackState m_playbackState { PlaybackState::None };
+ Optional<CGAffineTransform> m_videoTransform;
+
+ // Used on both main thread and sample thread.
+ std::unique_ptr<SampleBufferDisplayLayer> m_sampleBufferDisplayLayer;
+ Lock m_sampleBufferDisplayLayerLock;
+ bool m_shouldUpdateDisplayLayer { true };
+ // Written on main thread, read on sample thread.
+ bool m_canEnqueueDisplayLayer { false };
+ // Used on sample thread.
MediaSample::VideoRotation m_videoRotation { MediaSample::VideoRotation::None };
- CGAffineTransform m_videoTransform;
- std::unique_ptr<SampleBufferDisplayLayer> m_sampleBufferDisplayLayer;
+ bool m_videoMirrored { false };
Ref<const Logger> m_logger;
const void* m_logIdentifier;
@@ -259,13 +266,11 @@
RetainPtr<WebRootSampleBufferBoundsChangeListener> m_boundsChangeListener;
- bool m_videoMirrored { false };
bool m_playing { false };
bool m_muted { false };
bool m_ended { false };
bool m_hasEverEnqueuedVideoFrame { false };
bool m_pendingSelectedTrackCheck { false };
- bool m_transformIsValid { false };
bool m_visible { false };
bool m_haveSeenMetadata { false };
bool m_waitingForFirstImage { false };
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm (262288 => 262289)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm 2020-05-29 11:41:02 UTC (rev 262288)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm 2020-05-29 12:15:05 UTC (rev 262289)
@@ -41,6 +41,7 @@
#import <pal/avfoundation/MediaTimeAVFoundation.h>
#import <pal/spi/cocoa/AVFoundationSPI.h>
#import <pal/system/Clock.h>
+#import <wtf/Lock.h>
#import <wtf/MainThread.h>
#import <wtf/NeverDestroyed.h>
@@ -139,7 +140,7 @@
, m_videoLayerManager(makeUnique<VideoLayerManagerObjC>(m_logger, m_logIdentifier))
{
INFO_LOG(LOGIDENTIFIER);
- // MediaPlayerPrivateMediaStreamAVFObjC::videoSampleAvailable expects a weak pointer to be created in the constructor.
+ // MediaPlayerPrivateMediaStreamAVFObjC::processNewVideoSample expects a weak pointer to be created in the constructor.
m_boundsChangeListener = adoptNS([[WebRootSampleBufferBoundsChangeListener alloc] initWithCallback:[this, weakThis = makeWeakPtr(this)] {
if (!weakThis)
return;
@@ -227,11 +228,8 @@
#pragma mark -
#pragma mark AVSampleBuffer Methods
-CGAffineTransform MediaPlayerPrivateMediaStreamAVFObjC::videoTransformationMatrix(MediaSample& sample, bool forceUpdate)
+static inline CGAffineTransform videoTransformationMatrix(MediaSample& sample)
{
- if (!forceUpdate && m_transformIsValid)
- return m_videoTransform;
-
CMSampleBufferRef sampleBuffer = sample.platformSample().sample.cmSampleBuffer;
CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(sampleBuffer));
size_t width = CVPixelBufferGetWidth(pixelBuffer);
@@ -239,19 +237,28 @@
if (!width || !height)
return CGAffineTransformIdentity;
- ASSERT(m_videoRotation >= MediaSample::VideoRotation::None);
- ASSERT(m_videoRotation <= MediaSample::VideoRotation::Left);
-
- m_videoTransform = CGAffineTransformMakeRotation(static_cast<int>(m_videoRotation) * M_PI / 180);
+ auto videoTransform = CGAffineTransformMakeRotation(static_cast<int>(sample.videoRotation()) * M_PI / 180);
if (sample.videoMirrored())
- m_videoTransform = CGAffineTransformScale(m_videoTransform, -1, 1);
+ videoTransform = CGAffineTransformScale(videoTransform, -1, 1);
- m_transformIsValid = true;
- return m_videoTransform;
+ return videoTransform;
}
+void MediaPlayerPrivateMediaStreamAVFObjC::videoSampleAvailable(MediaSample& sample)
+{
+ processNewVideoSample(sample, sample.videoRotation() != m_videoRotation || sample.videoMirrored() != m_videoMirrored);
+ enqueueVideoSample(sample);
+}
+
void MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample(MediaSample& sample)
{
+ if (!m_canEnqueueDisplayLayer)
+ return;
+
+ auto locker = tryHoldLock(m_sampleBufferDisplayLayerLock);
+ if (!locker)
+ return;
+
if (!m_sampleBufferDisplayLayer || m_sampleBufferDisplayLayer->didFail())
return;
@@ -258,18 +265,23 @@
if (sample.videoRotation() != m_videoRotation || sample.videoMirrored() != m_videoMirrored) {
m_videoRotation = sample.videoRotation();
m_videoMirrored = sample.videoMirrored();
- m_sampleBufferDisplayLayer->updateAffineTransform(videoTransformationMatrix(sample, true));
- updateDisplayLayer();
+ m_sampleBufferDisplayLayer->updateAffineTransform(videoTransformationMatrix(sample));
+ m_shouldUpdateDisplayLayer = true;
}
+ if (m_shouldUpdateDisplayLayer) {
+ m_sampleBufferDisplayLayer->updateBoundsAndPosition(m_sampleBufferDisplayLayer->rootLayer().bounds, m_videoRotation);
+ m_shouldUpdateDisplayLayer = false;
+ }
+
m_sampleBufferDisplayLayer->enqueueSample(sample);
}
-void MediaPlayerPrivateMediaStreamAVFObjC::videoSampleAvailable(MediaSample& sample)
+void MediaPlayerPrivateMediaStreamAVFObjC::processNewVideoSample(MediaSample& sample, bool hasChangedOrientation)
{
if (!isMainThread()) {
- callOnMainThread([weakThis = makeWeakPtr(this), sample = makeRef(sample)]() mutable {
+ callOnMainThread([weakThis = makeWeakPtr(this), sample = makeRef(sample), hasChangedOrientation]() mutable {
if (weakThis)
- weakThis->videoSampleAvailable(sample.get());
+ weakThis->processNewVideoSample(sample.get(), hasChangedOrientation);
});
return;
}
@@ -277,6 +289,9 @@
if (!m_activeVideoTrack)
return;
+ if (hasChangedOrientation)
+ m_videoTransform = { };
+
if (!m_imagePainter.mediaSample || m_displayMode != PausedImage) {
m_imagePainter.mediaSample = &sample;
m_imagePainter.cgImage = nullptr;
@@ -287,8 +302,6 @@
if (m_displayMode != LivePreview && !m_waitingForFirstImage)
return;
- enqueueVideoSample(sample);
-
if (!m_hasEverEnqueuedVideoFrame) {
m_hasEverEnqueuedVideoFrame = true;
m_player->firstVideoFrameAvailable();
@@ -335,6 +348,7 @@
if (!activeVideoTrack || !activeVideoTrack->enabled())
return;
+ m_canEnqueueDisplayLayer = false;
m_sampleBufferDisplayLayer = SampleBufferDisplayLayer::create(*this);
ERROR_LOG_IF(!m_sampleBufferDisplayLayer, LOGIDENTIFIER, "Creating the SampleBufferDisplayLayer failed.");
if (!m_sampleBufferDisplayLayer)
@@ -351,16 +365,21 @@
return;
}
updateRenderingMode();
- updateDisplayLayer();
+ m_shouldUpdateDisplayLayer = true;
m_videoLayerManager->setVideoLayer(m_sampleBufferDisplayLayer->rootLayer(), size);
[m_boundsChangeListener begin:m_sampleBufferDisplayLayer->rootLayer()];
+
+ m_canEnqueueDisplayLayer = true;
});
}
void MediaPlayerPrivateMediaStreamAVFObjC::destroyLayers()
{
+ m_canEnqueueDisplayLayer = false;
+
+ auto locker = holdLock(m_sampleBufferDisplayLayerLock);
if (m_sampleBufferDisplayLayer)
m_sampleBufferDisplayLayer = nullptr;
@@ -661,7 +680,7 @@
return;
scheduleDeferredTask([this] {
- m_transformIsValid = false;
+ m_videoTransform = { };
if (m_player)
m_player->renderingModeChanged();
});
@@ -944,7 +963,9 @@
auto image = m_imagePainter.cgImage.get();
FloatRect imageRect(0, 0, CGImageGetWidth(image), CGImageGetHeight(image));
- AffineTransform videoTransform = videoTransformationMatrix(*m_imagePainter.mediaSample);
+ if (!m_videoTransform)
+ m_videoTransform = videoTransformationMatrix(*m_imagePainter.mediaSample);
+ AffineTransform videoTransform = *m_videoTransform;
FloatRect transformedDestRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(destRect);
context.concatCTM(videoTransform);
context.drawNativeImage(image, imageRect.size(), transformedDestRect, imageRect);
@@ -1010,17 +1031,9 @@
pixelBufferConformer = nullptr;
}
-void MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayLayer()
-{
- if (!m_sampleBufferDisplayLayer)
- return;
-
- m_sampleBufferDisplayLayer->updateBoundsAndPosition(m_sampleBufferDisplayLayer->rootLayer().bounds, m_videoRotation);
-}
-
void MediaPlayerPrivateMediaStreamAVFObjC::rootLayerBoundsDidChange()
{
- updateDisplayLayer();
+ m_shouldUpdateDisplayLayer = true;
}
WTFLogChannel& MediaPlayerPrivateMediaStreamAVFObjC::logChannel() const