Diff
Modified: trunk/LayoutTests/ChangeLog (203422 => 203423)
--- trunk/LayoutTests/ChangeLog 2016-07-19 21:55:51 UTC (rev 203422)
+++ trunk/LayoutTests/ChangeLog 2016-07-19 22:35:51 UTC (rev 203423)
@@ -1,3 +1,17 @@
+2016-07-19 George Ruan <[email protected]>
+
+ HTMLVideoElement frames do not update on iOS when src is a MediaStream blob
+ https://bugs.webkit.org/show_bug.cgi?id=159833
+ <rdar://problem/27379487>
+
+ Reviewed by Eric Carlson.
+
+ * fast/mediastream/MediaStream-video-element-displays-buffer-expected.txt: Added.
+ * fast/mediastream/MediaStream-video-element-displays-buffer.html: Added. Checks that
+ a video element with a mediastream source displays frames that are neither black or transparent.
+ * fast/mediastream/resources/getUserMedia-helper.js:
+ (setupVideoElementWithStream): Sets up video element with global variable mediastream.
+
2016-07-19 Ryan Haddad <[email protected]>
Land test expectations for rdar://problem/27356144.
Added: trunk/LayoutTests/fast/mediastream/MediaStream-video-element-displays-buffer-expected.txt (0 => 203423)
--- trunk/LayoutTests/fast/mediastream/MediaStream-video-element-displays-buffer-expected.txt (rev 0)
+++ trunk/LayoutTests/fast/mediastream/MediaStream-video-element-displays-buffer-expected.txt 2016-07-19 22:35:51 UTC (rev 203423)
@@ -0,0 +1,17 @@
+Tests that the stream displays captured buffers to the video element.
+
+On success, you will see a series of "PASS" messages, followed by "TEST COMPLETE".
+
+
+PASS mediaDevices.getUserMedia generated a stream successfully.
+video.src = ""
+video.play()
+
+ === checking pixels ===
+PASS isPixelTransparent(buffer) is true
+PASS isPixelTransparent(buffer) is false
+PASS isPixelBlack(buffer) is false
+PASS successfullyParsed is true
+
+TEST COMPLETE
+
Added: trunk/LayoutTests/fast/mediastream/MediaStream-video-element-displays-buffer.html (0 => 203423)
--- trunk/LayoutTests/fast/mediastream/MediaStream-video-element-displays-buffer.html (rev 0)
+++ trunk/LayoutTests/fast/mediastream/MediaStream-video-element-displays-buffer.html 2016-07-19 22:35:51 UTC (rev 203423)
@@ -0,0 +1,71 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <script src=""
+ <script src=""
+</head>
+<body _onload_="start()">
+<p id="description"></p>
+<div id="console"></div>
+<video controls width="680" height="360"></video>
+<canvas width="680" height="360"></canvas>
+<script>
+ let mediaStream;
+ let video;
+ let canvas;
+ let context;
+
+ let buffer;
+
+ function isPixelTransparent(pixel)
+ {
+ return pixel[0] === 0 && pixel[1] === 0 && pixel[2] === 0 && pixel[3] === 0;
+ }
+
+ function isPixelBlack(pixel)
+ {
+ return pixel[0] === 255 && pixel[1] === 255 && pixel[2] === 255 && pixel[3] === 255;
+ }
+
+ function verifyFramesBeingDisplayed()
+ {
+ debug('<br> === checking pixels ===');
+
+ context.clearRect(0, 0, canvas.width, canvas.height);
+
+ buffer = context.getImageData(0, 0, 1, 1).data;
+ shouldBeTrue('isPixelTransparent(buffer)');
+
+ context.drawImage(video, 0, 0, canvas.width, canvas.height);
+
+ buffer = context.getImageData(0, 0, 1, 1).data;
+ shouldBeFalse('isPixelTransparent(buffer)');
+ shouldBeFalse('isPixelBlack(buffer)');
+
+ finishJSTest();
+ }
+
+ function canplay()
+ {
+ evalAndLog('video.play()');
+ }
+
+ function start()
+ {
+ description("Tests that the stream displays captured buffers to the video element.");
+
+ canvas = document.querySelector('canvas');
+ context = canvas.getContext('2d');
+
+ video = document.querySelector('video');
+ video.addEventListener('canplay', canplay, false);
+ video.addEventListener('canplaythrough', verifyFramesBeingDisplayed, false);
+
+ getUserMedia("allow", {video:true}, setupVideoElementWithStream);
+ }
+
+ window.jsTestIsAsync = true;
+</script>
+<script src=""
+</body>
+</html>
\ No newline at end of file
Modified: trunk/LayoutTests/fast/mediastream/resources/getUserMedia-helper.js (203422 => 203423)
--- trunk/LayoutTests/fast/mediastream/resources/getUserMedia-helper.js 2016-07-19 21:55:51 UTC (rev 203422)
+++ trunk/LayoutTests/fast/mediastream/resources/getUserMedia-helper.js 2016-07-19 22:35:51 UTC (rev 203423)
@@ -24,3 +24,10 @@
testFailed('getUserMedia failed:' + e);
finishJSTest();
}
+
+function setupVideoElementWithStream(stream)
+{
+ mediaStream = stream;
+ testPassed('mediaDevices.getUserMedia generated a stream successfully.');
+ evalAndLog('video.src = ""
+}
\ No newline at end of file
Modified: trunk/Source/WebCore/ChangeLog (203422 => 203423)
--- trunk/Source/WebCore/ChangeLog 2016-07-19 21:55:51 UTC (rev 203422)
+++ trunk/Source/WebCore/ChangeLog 2016-07-19 22:35:51 UTC (rev 203423)
@@ -1,3 +1,55 @@
+2016-07-19 George Ruan <[email protected]>
+
+ HTMLVideoElement frames do not update on iOS when src is a MediaStream blob
+ https://bugs.webkit.org/show_bug.cgi?id=159833
+ <rdar://problem/27379487>
+
+ Reviewed by Eric Carlson.
+
+ Test: fast/mediastream/MediaStream-video-element-displays-buffer.html
+
+ * WebCore.xcodeproj/project.pbxproj:
+ * platform/graphics/avfoundation/MediaSampleAVFObjC.h: Change create to return a Ref<T> instead
+ of RefPtr<T>
+ * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h: Make observer of
+ MediaStreamTrackPrivate and make MediaPlayer use an AVSampleBufferDisplayLayer instead of CALayer.
+ * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm: Ditto.
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC): Clean up
+ observers and AVSampleBufferDisplayLayer
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::isAvailable): Ensures AVSampleBufferDisplayLayer
+ is available.
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSampleBufferFromTrack): Placeholder.
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSampleBufferFromTrack): Responsible
+ for enqueuing sample buffers to the active video track.
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::ensureLayer): Ensures that an AVSampleBufferDisplayLayer
+ exists.
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyLayer): Destroys the AVSampleBufferDisplayLayer.
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::platformLayer): Replace CALayer with AVSampleBufferDisplayLayer.
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::currentDisplayMode): Ditto.
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::sampleBufferUpdated): Called from MediaStreamTrackPrivate when a
+ new SampleBuffer is available.
+ (WebCore::updateTracksOfType): Manage adding and removing self as observer from tracks.
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::updateTracks): Replace CALayer with AVSampleBufferDisplayLayer
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::acceleratedRenderingStateChanged): Copied from
+ MediaPlayerPrivateMediaSourceAVFObjC.mm
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::load): Deleted CALayer.
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::updateDisplayMode): Deleted process of updating CALayer.
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::updateIntrinsicSize): Deleted CALayer.
+ (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::createPreviewLayers): Deleted.
+ * platform/mediastream/MediaStreamPrivate.cpp:
+ (WebCore::MediaStreamPrivate::updateActiveVideoTrack): Remove redundant check.
+ * platform/mediastream/MediaStreamTrackPrivate.cpp:
+ (WebCore::MediaStreamTrackPrivate::sourceHasMoreMediaData): Called from RealtimeMediaSource when a new SampleBuffer
+ is available.
+ * platform/mediastream/MediaStreamTrackPrivate.h:
+ (WebCore::MediaStreamTrackPrivate::Observer::sampleBufferUpdated): Relays to MediaPlayerPrivateMediaStream that
+ a new SampleBuffer is available to enqueue to the AVSampleBufferDisplayLayer.
+ * platform/mediastream/RealtimeMediaSource.cpp:
+ (WebCore::RealtimeMediaSource::mediaDataUpdated): Relays to all observers that a new SampleBuffer is available.
+ * platform/mediastream/RealtimeMediaSource.h:
+ * platform/mediastream/mac/AVVideoCaptureSource.mm:
+ (WebCore::AVVideoCaptureSource::processNewFrame): Calls mediaDataUpdated when a new SampleBuffer is captured.
+
2016-07-19 Anders Carlsson <[email protected]>
Get rid of a #define private public hack in WebCore
Modified: trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj (203422 => 203423)
--- trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj 2016-07-19 21:55:51 UTC (rev 203422)
+++ trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj 2016-07-19 22:35:51 UTC (rev 203423)
@@ -985,6 +985,7 @@
1AFE119A0CBFFCC4003017FA /* JSSQLResultSetRowList.h in Headers */ = {isa = PBXBuildFile; fileRef = 1AFE11980CBFFCC4003017FA /* JSSQLResultSetRowList.h */; };
1B124D8D1D380B7000ECDFB0 /* MediaSampleAVFObjC.h in Headers */ = {isa = PBXBuildFile; fileRef = 1B124D8C1D380B7000ECDFB0 /* MediaSampleAVFObjC.h */; };
1B124D8F1D380BB600ECDFB0 /* MediaSampleAVFObjC.mm in Sources */ = {isa = PBXBuildFile; fileRef = 1B124D8E1D380BB600ECDFB0 /* MediaSampleAVFObjC.mm */; };
+ 1BF9DB3C1D3973AD0026AEB7 /* MediaSample.h in Headers */ = {isa = PBXBuildFile; fileRef = CD641EC7181ED60100EE4C41 /* MediaSample.h */; settings = {ATTRIBUTES = (Private, ); }; };
1C010700192594DF008A4201 /* InlineTextBoxStyle.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 1C0106FE192594DF008A4201 /* InlineTextBoxStyle.cpp */; };
1C010701192594DF008A4201 /* InlineTextBoxStyle.h in Headers */ = {isa = PBXBuildFile; fileRef = 1C0106FF192594DF008A4201 /* InlineTextBoxStyle.h */; };
1C0939EA1A13E12900B788E5 /* CachedSVGFont.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 1C0939E81A13E12900B788E5 /* CachedSVGFont.cpp */; };
@@ -27034,6 +27035,7 @@
75793EC90D0CE72D007FC0AC /* JSMessageEvent.h in Headers */,
E1ADEDDA0E76BD93004A1A5E /* JSMessagePort.h in Headers */,
41F584C7104652CB009CAA64 /* JSMessagePortCustom.h in Headers */,
+ 1BF9DB3C1D3973AD0026AEB7 /* MediaSample.h in Headers */,
2D6F3E951C1F85550061DBD4 /* JSMockPageOverlay.h in Headers */,
E38838991BAD145F00D62EE3 /* JSModuleLoader.h in Headers */,
A86629D109DA2B48009633A5 /* JSMouseEvent.h in Headers */,
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/MediaSampleAVFObjC.h (203422 => 203423)
--- trunk/Source/WebCore/platform/graphics/avfoundation/MediaSampleAVFObjC.h 2016-07-19 21:55:51 UTC (rev 203422)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/MediaSampleAVFObjC.h 2016-07-19 22:35:51 UTC (rev 203423)
@@ -32,8 +32,8 @@
class MediaSampleAVFObjC final : public MediaSample {
public:
- static RefPtr<MediaSampleAVFObjC> create(CMSampleBufferRef sample, int trackID) { return adoptRef(new MediaSampleAVFObjC(sample, trackID)); }
- static RefPtr<MediaSampleAVFObjC> create(CMSampleBufferRef sample) { return adoptRef(new MediaSampleAVFObjC(sample)); }
+ static Ref<MediaSampleAVFObjC> create(CMSampleBufferRef sample, int trackID) { return adoptRef(*new MediaSampleAVFObjC(sample, trackID)); }
+ static Ref<MediaSampleAVFObjC> create(CMSampleBufferRef sample) { return adoptRef(*new MediaSampleAVFObjC(sample)); }
private:
MediaSampleAVFObjC(CMSampleBufferRef sample)
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h (203422 => 203423)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h 2016-07-19 21:55:51 UTC (rev 203422)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h 2016-07-19 22:35:51 UTC (rev 203423)
@@ -29,6 +29,7 @@
#if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
#include "MediaPlayerPrivate.h"
+#include "MediaSample.h"
#include "MediaStreamPrivate.h"
#include <wtf/Function.h>
#include <wtf/MediaTime.h>
@@ -36,6 +37,7 @@
#include <wtf/WeakPtr.h>
OBJC_CLASS AVSampleBufferAudioRenderer;
+OBJC_CLASS AVSampleBufferDisplayLayer;
OBJC_CLASS AVStreamSession;
typedef struct opaqueCMSampleBuffer *CMSampleBufferRef;
@@ -51,7 +53,7 @@
class VideoFullscreenLayerManager;
#endif
-class MediaPlayerPrivateMediaStreamAVFObjC : public MediaPlayerPrivateInterface, public MediaStreamPrivate::Observer {
+class MediaPlayerPrivateMediaStreamAVFObjC : public MediaPlayerPrivateInterface, public MediaStreamPrivate::Observer, public MediaStreamTrackPrivate::Observer {
public:
explicit MediaPlayerPrivateMediaStreamAVFObjC(MediaPlayer*);
virtual ~MediaPlayerPrivateMediaStreamAVFObjC();
@@ -70,6 +72,9 @@
WeakPtr<MediaPlayerPrivateMediaStreamAVFObjC> createWeakPtr() { return m_weakPtrFactory.createWeakPtr(); }
+ void ensureLayer();
+ void destroyLayer();
+
private:
// MediaPlayerPrivateInterface
@@ -117,10 +122,14 @@
void setSize(const IntSize&) override { /* No-op */ }
+ void enqueueAudioSampleBufferFromTrack(MediaStreamTrackPrivate&, PlatformSample);
+ void enqueueVideoSampleBufferFromTrack(MediaStreamTrackPrivate&, PlatformSample);
+
void paint(GraphicsContext&, const FloatRect&) override;
void paintCurrentFrameInContext(GraphicsContext&, const FloatRect&) override;
bool metaDataAvailable() const { return m_mediaStreamPrivate && m_readyState >= MediaPlayer::HaveMetadata; }
+ void acceleratedRenderingStateChanged() override;
bool supportsAcceleratedRendering() const override { return true; }
bool hasSingleSecurityOrigin() const override { return true; }
@@ -139,7 +148,6 @@
void updateReadyState();
void updateIntrinsicSize(const FloatSize&);
- void createPreviewLayers();
void updateTracks();
void renderingModeChanged();
@@ -160,6 +168,13 @@
void didAddTrack(MediaStreamTrackPrivate&) override;
void didRemoveTrack(MediaStreamTrackPrivate&) override;
+ // MediaStreamPrivateTrack::Observer
+ void trackEnded(MediaStreamTrackPrivate&) override { };
+ void trackMutedChanged(MediaStreamTrackPrivate&) override { };
+ void trackSettingsChanged(MediaStreamTrackPrivate&) override { };
+ void trackEnabledChanged(MediaStreamTrackPrivate&) override { };
+ void sampleBufferUpdated(MediaStreamTrackPrivate&, MediaSample&) override;
+
#if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
void setVideoFullscreenLayer(PlatformLayer*, std::function<void()> completionHandler) override;
void setVideoFullscreenFrame(FloatRect) override;
@@ -168,8 +183,7 @@
MediaPlayer* m_player { nullptr };
WeakPtrFactory<MediaPlayerPrivateMediaStreamAVFObjC> m_weakPtrFactory;
RefPtr<MediaStreamPrivate> m_mediaStreamPrivate;
- mutable RetainPtr<CALayer> m_previewLayer;
- mutable RetainPtr<PlatformLayer> m_videoBackgroundLayer;
+ RetainPtr<AVSampleBufferDisplayLayer> m_sampleBufferDisplayLayer;
RetainPtr<CGImageRef> m_pausedImage;
std::unique_ptr<Clock> m_clock;
@@ -185,6 +199,7 @@
bool m_muted { false };
bool m_haveEverPlayed { false };
bool m_ended { false };
+ bool m_hasEverEnqueuedVideoFrame { false };
#if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
std::unique_ptr<VideoFullscreenLayerManager> m_videoFullscreenLayerManager;
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm (203422 => 203423)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm 2016-07-19 21:55:51 UTC (rev 203422)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm 2016-07-19 22:35:51 UTC (rev 203423)
@@ -36,6 +36,7 @@
#import "Logging.h"
#import "MediaStreamPrivate.h"
#import "VideoTrackPrivateMediaStream.h"
+#import <AVFoundation/AVSampleBufferDisplayLayer.h>
#import <QuartzCore/CALayer.h>
#import <QuartzCore/CATransaction.h>
#import <objc_runtime.h>
@@ -52,6 +53,8 @@
SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
+SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
+
namespace WebCore {
#pragma mark -
@@ -71,8 +74,17 @@
MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC()
{
LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC(%p)", this);
- if (m_mediaStreamPrivate)
+ if (m_mediaStreamPrivate) {
m_mediaStreamPrivate->removeObserver(*this);
+
+ for (auto& track : m_mediaStreamPrivate->tracks())
+ track->removeObserver(*this);
+ }
+
+ m_audioTrackMap.clear();
+ m_videoTrackMap.clear();
+
+ destroyLayer();
}
#pragma mark -
@@ -87,7 +99,7 @@
bool MediaPlayerPrivateMediaStreamAVFObjC::isAvailable()
{
- return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
+ return AVFoundationLibrary() && isCoreMediaFrameworkAvailable() && getAVSampleBufferDisplayLayerClass();
}
void MediaPlayerPrivateMediaStreamAVFObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
@@ -105,6 +117,60 @@
}
#pragma mark -
+#pragma mark AVSampleBuffer Methods
+
+void MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSampleBufferFromTrack(MediaStreamTrackPrivate&, PlatformSample)
+{
+ // FIXME: https://bugs.webkit.org/show_bug.cgi?id=159836
+}
+
+void MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSampleBufferFromTrack(MediaStreamTrackPrivate& track, PlatformSample platformSample)
+{
+ if (&track != m_mediaStreamPrivate->activeVideoTrack())
+ return;
+
+ if (m_displayMode == LivePreview && [m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
+ [m_sampleBufferDisplayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
+
+ if (!m_hasEverEnqueuedVideoFrame) {
+ m_hasEverEnqueuedVideoFrame = true;
+ m_player->firstVideoFrameAvailable();
+ }
+ }
+}
+
+void MediaPlayerPrivateMediaStreamAVFObjC::ensureLayer()
+{
+ if (m_sampleBufferDisplayLayer)
+ return;
+
+ m_sampleBufferDisplayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
+#ifndef NDEBUG
+ [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer"];
+#endif
+
+ renderingModeChanged();
+
+#if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
+ m_videoFullscreenLayerManager->setVideoLayer(m_sampleBufferDisplayLayer.get(), snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size());
+#endif
+}
+
+void MediaPlayerPrivateMediaStreamAVFObjC::destroyLayer()
+{
+ if (!m_sampleBufferDisplayLayer)
+ return;
+
+ [m_sampleBufferDisplayLayer flush];
+ m_sampleBufferDisplayLayer = nullptr;
+ renderingModeChanged();
+
+#if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
+ m_videoFullscreenLayerManager->didDestroyVideoLayer();
+#endif
+}
+
+#pragma mark -
#pragma mark MediaPlayerPrivateInterface Overrides
void MediaPlayerPrivateMediaStreamAVFObjC::load(const String&)
@@ -129,7 +195,6 @@
{
LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::load(%p)", this);
- m_previewLayer = nullptr;
m_intrinsicSize = FloatSize();
m_mediaStreamPrivate = &stream;
@@ -157,19 +222,19 @@
PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::platformLayer() const
{
- if (!m_videoBackgroundLayer || m_displayMode == None)
+ if (!m_sampleBufferDisplayLayer || m_displayMode == None)
return nullptr;
#if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
return m_videoFullscreenLayerManager->videoInlineLayer();
#else
- return m_videoBackgroundLayer.get();
+ return m_sampleBufferDisplayLayer.get();
#endif
}
MediaPlayerPrivateMediaStreamAVFObjC::DisplayMode MediaPlayerPrivateMediaStreamAVFObjC::currentDisplayMode() const
{
- if (m_ended || m_intrinsicSize.isEmpty() || !metaDataAvailable() || !m_videoBackgroundLayer)
+ if (m_ended || m_intrinsicSize.isEmpty() || !metaDataAvailable() || !m_sampleBufferDisplayLayer)
return None;
if (m_mediaStreamPrivate->activeVideoTrack() && !m_mediaStreamPrivate->activeVideoTrack()->enabled())
@@ -194,51 +259,6 @@
if (m_displayMode == None)
return;
-
- [CATransaction begin];
- [CATransaction setAnimationDuration:0];
- [CATransaction setDisableActions:YES];
-
- do {
- if (m_displayMode < LivePreview) {
-
- if (m_displayMode == PausedImage) {
- if (m_videoBackgroundLayer.get().contents)
- break;
-
- RefPtr<Image> image = m_mediaStreamPrivate->currentFrameImage();
- if (!image) {
- m_displayMode = PaintItBlack;
- continue;
- }
-
- m_pausedImage = image->getCGImageRef();
- if (!m_pausedImage) {
- m_displayMode = PaintItBlack;
- continue;
- }
-
- m_videoBackgroundLayer.get().contents = (id)m_pausedImage.get();
- m_videoBackgroundLayer.get().backgroundColor = nil;
- } else {
- m_videoBackgroundLayer.get().contents = nil;
- m_videoBackgroundLayer.get().backgroundColor = cachedCGColor(Color::black);
- m_pausedImage = nullptr;
- }
-
- m_previewLayer.get().hidden = true;
-
- } else {
-
- m_previewLayer.get().hidden = false;
- m_videoBackgroundLayer.get().contents = nil;
- m_pausedImage = nullptr;
- }
-
- break;
- } while (1);
-
- [CATransaction commit];
}
void MediaPlayerPrivateMediaStreamAVFObjC::play()
@@ -387,46 +407,8 @@
return;
m_intrinsicSize = size;
-
- if (m_videoBackgroundLayer || !m_player || !m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player))
- return;
-
- if (!m_mediaStreamPrivate || !m_mediaStreamPrivate->platformLayer())
- return;
-
- createPreviewLayers();
}
-void MediaPlayerPrivateMediaStreamAVFObjC::createPreviewLayers()
-{
- if (!m_videoBackgroundLayer) {
- m_videoBackgroundLayer = adoptNS([[CALayer alloc] init]);
- m_videoBackgroundLayer.get().name = @"MediaPlayerPrivateMediaStreamAVFObjC preview background layer";
-
-#if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
- m_videoFullscreenLayerManager->setVideoLayer(m_videoBackgroundLayer.get(), snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size());
-#endif
- }
-
- if (!m_previewLayer) {
- m_previewLayer = m_mediaStreamPrivate->platformLayer();
- if (m_previewLayer) {
- m_previewLayer.get().contentsGravity = kCAGravityResizeAspect;
- m_previewLayer.get().anchorPoint = CGPointZero;
- if (!m_playing)
- m_previewLayer.get().hidden = true;
-
- [m_videoBackgroundLayer addSublayer:m_previewLayer.get()];
-#if PLATFORM(MAC)
- [m_previewLayer setFrame:[m_videoBackgroundLayer bounds]];
- [m_previewLayer setAutoresizingMask:(kCALayerWidthSizable | kCALayerHeightSizable)];
-#endif
- }
- }
-
- renderingModeChanged();
-}
-
void MediaPlayerPrivateMediaStreamAVFObjC::renderingModeChanged()
{
updateDisplayMode();
@@ -473,6 +455,26 @@
updateTracks();
}
+void MediaPlayerPrivateMediaStreamAVFObjC::sampleBufferUpdated(MediaStreamTrackPrivate& track, MediaSample& mediaSample)
+{
+ ASSERT(track.id() == mediaSample.trackID());
+ ASSERT(mediaSample.platformSample().type == PlatformSample::CMSampleBufferType);
+ ASSERT(m_mediaStreamPrivate);
+
+
+ switch (track.type()) {
+ case RealtimeMediaSource::None:
+ // Do nothing.
+ break;
+ case RealtimeMediaSource::Audio:
+ // FIXME: https://bugs.webkit.org/show_bug.cgi?id=159836
+ break;
+ case RealtimeMediaSource::Video:
+ enqueueVideoSampleBufferFromTrack(track, mediaSample.platformSample());
+ break;
+ }
+}
+
#if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
void MediaPlayerPrivateMediaStreamAVFObjC::setVideoFullscreenLayer(PlatformLayer *videoFullscreenLayer, std::function<void()> completionHandler)
{
@@ -486,7 +488,7 @@
#endif
template <typename RefT, typename PassRefT>
-void updateTracksOfType(HashMap<String, RefT>& trackMap, RealtimeMediaSource::Type trackType, MediaStreamTrackPrivateVector& currentTracks, RefT (*itemFactory)(MediaStreamTrackPrivate&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT), std::function<void(RefT, int)> configureCallback)
+void updateTracksOfType(HashMap<String, RefT>& trackMap, RealtimeMediaSource::Type trackType, MediaStreamTrackPrivateVector& currentTracks, RefT (*itemFactory)(MediaStreamTrackPrivate&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT), std::function<void(RefT, int)> configureCallback, MediaPlayerPrivateMediaStreamAVFObjC* trackObserver)
{
Vector<RefT> removedTracks;
Vector<RefT> addedTracks;
@@ -519,17 +521,20 @@
for (const auto& track : trackMap.values())
configureCallback(track, index++);
- for (auto& track : removedTracks)
+ for (auto& track : removedTracks) {
(player->*removedFunction)(track);
+ track->streamTrack()->removeObserver(*trackObserver);
+ }
- for (auto& track : addedTracks)
+ for (auto& track : addedTracks) {
(player->*addedFunction)(track);
+ track->streamTrack()->addObserver(*trackObserver);
+ }
}
void MediaPlayerPrivateMediaStreamAVFObjC::updateTracks()
{
MediaStreamTrackPrivateVector currentTracks = m_mediaStreamPrivate->tracks();
- bool selectedVideoTrackChanged = false;
std::function<void(RefPtr<AudioTrackPrivateMediaStream>, int)> enableAudioTrack = [this](auto track, int index)
{
@@ -536,24 +541,17 @@
track->setTrackIndex(index);
track->setEnabled(track->streamTrack()->enabled() && !track->streamTrack()->muted());
};
- updateTracksOfType(m_audioTrackMap, RealtimeMediaSource::Audio, currentTracks, &AudioTrackPrivateMediaStream::create, m_player, &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack, enableAudioTrack);
+ updateTracksOfType(m_audioTrackMap, RealtimeMediaSource::Audio, currentTracks, &AudioTrackPrivateMediaStream::create, m_player, &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack, enableAudioTrack, this);
- std::function<void(RefPtr<VideoTrackPrivateMediaStream>, int)> enableVideoTrack = [this, &selectedVideoTrackChanged](auto track, int index)
+ std::function<void(RefPtr<VideoTrackPrivateMediaStream>, int)> enableVideoTrack = [this](auto track, int index)
{
- bool wasSelected = track->selected();
track->setTrackIndex(index);
track->setSelected(track->streamTrack() == m_mediaStreamPrivate->activeVideoTrack());
- if (wasSelected != track->selected())
- selectedVideoTrackChanged = true;
+
+ if (track->selected())
+ ensureLayer();
};
- updateTracksOfType(m_videoTrackMap, RealtimeMediaSource::Video, currentTracks, &VideoTrackPrivateMediaStream::create, m_player, &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack, enableVideoTrack);
-
- if (selectedVideoTrackChanged) {
- if (m_previewLayer)
- m_previewLayer = nullptr;
-
- createPreviewLayers();
- }
+ updateTracksOfType(m_videoTrackMap, RealtimeMediaSource::Video, currentTracks, &VideoTrackPrivateMediaStream::create, m_player, &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack, enableVideoTrack, this);
}
std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaStreamAVFObjC::seekable() const
@@ -593,6 +591,14 @@
}
}
+void MediaPlayerPrivateMediaStreamAVFObjC::acceleratedRenderingStateChanged()
+{
+ if (m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player))
+ ensureLayer();
+ else
+ destroyLayer();
+}
+
String MediaPlayerPrivateMediaStreamAVFObjC::engineDescription() const
{
static NeverDestroyed<String> description(ASCIILiteral("AVFoundation MediaStream Engine"));
Modified: trunk/Source/WebCore/platform/mediastream/MediaStreamPrivate.cpp (203422 => 203423)
--- trunk/Source/WebCore/platform/mediastream/MediaStreamPrivate.cpp 2016-07-19 21:55:51 UTC (rev 203422)
+++ trunk/Source/WebCore/platform/mediastream/MediaStreamPrivate.cpp 2016-07-19 22:35:51 UTC (rev 203423)
@@ -257,7 +257,7 @@
{
m_activeVideoTrack = nullptr;
for (auto& track : m_trackSet.values()) {
- if (!track->ended() && track->type() == RealtimeMediaSource::Type::Video && !track->ended()) {
+ if (!track->ended() && track->type() == RealtimeMediaSource::Type::Video) {
m_activeVideoTrack = track.get();
break;
}
Modified: trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp (203422 => 203423)
--- trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp 2016-07-19 21:55:51 UTC (rev 203422)
+++ trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp 2016-07-19 22:35:51 UTC (rev 203423)
@@ -213,6 +213,13 @@
return !m_isEnded;
}
+void MediaStreamTrackPrivate::sourceHasMoreMediaData(MediaSample& mediaSample)
+{
+ mediaSample.setTrackID(id());
+ for (auto& observer : m_observers)
+ observer->sampleBufferUpdated(*this, mediaSample);
+}
+
} // namespace WebCore
#endif // ENABLE(MEDIA_STREAM)
Modified: trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h (203422 => 203423)
--- trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h 2016-07-19 21:55:51 UTC (rev 203422)
+++ trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h 2016-07-19 22:35:51 UTC (rev 203423)
@@ -37,6 +37,7 @@
class AudioSourceProvider;
class GraphicsContext;
+class MediaSample;
class MediaSourceSettings;
class RealtimeMediaSourceCapabilities;
@@ -50,6 +51,7 @@
virtual void trackMutedChanged(MediaStreamTrackPrivate&) = 0;
virtual void trackSettingsChanged(MediaStreamTrackPrivate&) = 0;
virtual void trackEnabledChanged(MediaStreamTrackPrivate&) = 0;
+ virtual void sampleBufferUpdated(MediaStreamTrackPrivate&, MediaSample&) { };
};
static RefPtr<MediaStreamTrackPrivate> create(RefPtr<RealtimeMediaSource>&&);
@@ -104,6 +106,7 @@
void sourceMutedChanged() final;
void sourceSettingsChanged() final;
bool preventSourceFromStopping() final;
+ void sourceHasMoreMediaData(MediaSample&) final;
Vector<Observer*> m_observers;
RefPtr<RealtimeMediaSource> m_source;
Modified: trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp (203422 => 203423)
--- trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp 2016-07-19 21:55:51 UTC (rev 203422)
+++ trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp 2016-07-19 22:35:51 UTC (rev 203423)
@@ -96,6 +96,12 @@
observer->sourceSettingsChanged();
}
+void RealtimeMediaSource::mediaDataUpdated(MediaSample& mediaSample)
+{
+ for (auto& observer : m_observers)
+ observer->sourceHasMoreMediaData(mediaSample);
+}
+
bool RealtimeMediaSource::readonly() const
{
return m_readonly;
Modified: trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h (203422 => 203423)
--- trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h 2016-07-19 21:55:51 UTC (rev 203422)
+++ trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h 2016-07-19 22:35:51 UTC (rev 203423)
@@ -39,6 +39,7 @@
#include "AudioSourceProvider.h"
#include "Image.h"
#include "MediaConstraints.h"
+#include "MediaSample.h"
#include "PlatformLayer.h"
#include "RealtimeMediaSourceCapabilities.h"
#include <wtf/RefCounted.h>
@@ -66,6 +67,9 @@
// Observer state queries.
virtual bool preventSourceFromStopping() = 0;
+
+ // Media data changes.
+ virtual void sourceHasMoreMediaData(MediaSample&) = 0;
};
virtual ~RealtimeMediaSource() { }
@@ -87,6 +91,7 @@
virtual RefPtr<RealtimeMediaSourceCapabilities> capabilities() = 0;
virtual const RealtimeMediaSourceSettings& settings() = 0;
void settingsDidChanged();
+ void mediaDataUpdated(MediaSample&);
bool stopped() const { return m_stopped; }
Modified: trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm (203422 => 203423)
--- trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm 2016-07-19 21:55:51 UTC (rev 203422)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm 2016-07-19 22:35:51 UTC (rev 203423)
@@ -34,6 +34,7 @@
#import "IntRect.h"
#import "Logging.h"
#import "MediaConstraints.h"
+#import "MediaSampleAVFObjC.h"
#import "NotImplemented.h"
#import "PlatformLayer.h"
#import "RealtimeMediaSourceCenter.h"
@@ -298,7 +299,9 @@
}
if (settingsChanged)
- this->settingsDidChanged();
+ settingsDidChanged();
+
+ mediaDataUpdated(MediaSampleAVFObjC::create(sampleBuffer.get()));
}
void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType*)