Diff
Modified: trunk/LayoutTests/ChangeLog (216053 => 216054)
--- trunk/LayoutTests/ChangeLog 2017-05-02 01:28:05 UTC (rev 216053)
+++ trunk/LayoutTests/ChangeLog 2017-05-02 01:33:37 UTC (rev 216054)
@@ -1,3 +1,12 @@
+2017-05-01 Youenn Fablet <[email protected]>
+
+ Ensure RealtimeOutgoingVideoSource sends a black frame when its related source is muted
+ https://bugs.webkit.org/show_bug.cgi?id=171497
+
+ Reviewed by Eric Carlson.
+
+ * webrtc/video-mute.html: Updating the test to make debugging clearer.
+
2017-05-01 Matt Lewis <[email protected]>
Marked LayoutTest/http/tests/preload/not_delaying_window_onload_before_discovery.html as flaky failure.
Modified: trunk/LayoutTests/webrtc/video-mute-expected.txt (216053 => 216054)
--- trunk/LayoutTests/webrtc/video-mute-expected.txt 2017-05-02 01:28:05 UTC (rev 216053)
+++ trunk/LayoutTests/webrtc/video-mute-expected.txt 2017-05-02 01:33:37 UTC (rev 216054)
@@ -1,4 +1,9 @@
+Video should be running, go to black and running.
+Following, should be a snapshot of the video, a black frame and a snapshot of the video.
+
+PASS Setting video exchange
+PASS Track is enabled, video should not be black
+PASS Track is disabled, video should be black
+PASS Track is enabled, video should not be black
-PASS Outgoing muted/unmuted video track
-
Modified: trunk/LayoutTests/webrtc/video-mute.html (216053 => 216054)
--- trunk/LayoutTests/webrtc/video-mute.html 2017-05-02 01:28:05 UTC (rev 216053)
+++ trunk/LayoutTests/webrtc/video-mute.html 2017-05-02 01:33:37 UTC (rev 216054)
@@ -7,15 +7,17 @@
<script src=""
</head>
<body>
- <video id="video" autoplay width="640" height="480"></video>
- <canvas id="canvas" width="640" height="480"></canvas>
+ <div>Video should be running, go to black and running.</div>
+ <div>Following, should be a snapshot of the video, a black frame and a snapshot of the video.</div>
+ <video id="video" autoplay playsInline width="320" height="240"></video>
+ <canvas id="canvas1" width="320" height="240"></canvas>
+ <canvas id="canvas2" width="320" height="240"></canvas>
+ <canvas id="canvas3" width="320" height="240"></canvas>
<script src =""
<script>
-video = document.getElementById("video");
-canvas = document.getElementById("canvas");
-
-function isVideoBlack()
+function isVideoBlack(id)
{
+ var canvas = document.getElementById(id);
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
canvas.getContext('2d').drawImage(video, 0, 0, canvas.width, canvas.height);
@@ -30,19 +32,21 @@
return true;
}
-function pollVideoBlackCheck(expected, resolve)
+function pollVideoBlackCheck(expected, id, resolve)
{
- if (isVideoBlack() === expected)
- resolve();
+ if (isVideoBlack(id) === expected) {
+ resolve();
+ return;
+ }
- setTimeout(() => pollVideoBlackCheck(expected, resolve), 50);
+ setTimeout(() => pollVideoBlackCheck(expected, id, resolve), 50);
}
-function checkVideoBlack(expected, message)
+function checkVideoBlack(expected, id)
{
return new Promise((resolve, reject) => {
- pollVideoBlackCheck(expected, resolve);
- setTimeout(() => reject(message), 5000);
+ pollVideoBlackCheck(expected, id, resolve);
+ setTimeout(reject, 5000);
});
}
@@ -51,7 +55,7 @@
if (window.testRunner)
testRunner.setUserMediaPermission(true);
- return navigator.mediaDevices.getUserMedia({ video: {width: 640, height: 480}}).then((localStream) => {
+ return navigator.mediaDevices.getUserMedia({video: {width: 320, height: 240, facingMode: "environment"}}).then((localStream) => {
return new Promise((resolve, reject) => {
track = localStream.getVideoTracks()[0];
@@ -65,16 +69,22 @@
}).then((remoteStream) => {
video.srcObject = remoteStream;
return video.play();
- }).then(() => {
- return checkVideoBlack(false, "track is enabled, video should not be black");
- }).then(() => {
- track.enabled = false;
- return checkVideoBlack(true, "track is disabled, video should be black");
- }).then(() => {
- track.enabled = true;
- return checkVideoBlack(false, "track is reenabled, video should not be black");
});
-}, "Outgoing muted/unmuted video track");
+}, "Setting video exchange");
+
+promise_test((test) => {
+ return checkVideoBlack(false, "canvas1");
+}, "Track is enabled, video should not be black");
+
+promise_test((test) => {
+ track.enabled = false;
+ return checkVideoBlack(true, "canvas2");
+}, "Track is disabled, video should be black");
+
+promise_test((test) => {
+ track.enabled = true;
+ return checkVideoBlack(true, "canvas2");
+}, "Track is enabled, video should not be black");
</script>
</body>
</html>
Modified: trunk/Source/WebCore/ChangeLog (216053 => 216054)
--- trunk/Source/WebCore/ChangeLog 2017-05-02 01:28:05 UTC (rev 216053)
+++ trunk/Source/WebCore/ChangeLog 2017-05-02 01:33:37 UTC (rev 216054)
@@ -1,3 +1,30 @@
+2017-05-01 Youenn Fablet <[email protected]>
+
+ Ensure RealtimeOutgoingVideoSource sends a black frame when its related source is muted
+ https://bugs.webkit.org/show_bug.cgi?id=171497
+
+ Reviewed by Eric Carlson.
+
+ Covered by updated test.
+
+ When the track is being muted or disabled, send a black frame explicitly.
+ VideoToolBox sometimes does not output a frame until it receives the other.
+ That is why we end up sending two frames, the second asynchronously so that libwebrtc will not skip it.
+ Also storing the rotation so that we keep the same rotation for black frames.
+ Storing width and height for consistency as well.
+
+ * platform/mediastream/mac/RealtimeOutgoingVideoSource.cpp:
+ (WebCore::RealtimeOutgoingVideoSource::RealtimeOutgoingVideoSource):
+ (WebCore::RealtimeOutgoingVideoSource::setSource):
+ (WebCore::RealtimeOutgoingVideoSource::stop):
+ (WebCore::RealtimeOutgoingVideoSource::sourceMutedChanged):
+ (WebCore::RealtimeOutgoingVideoSource::sourceEnabledChanged):
+ (WebCore::RealtimeOutgoingVideoSource::setSizeFromSource):
+ (WebCore::RealtimeOutgoingVideoSource::sendBlackFrame):
+ (WebCore::RealtimeOutgoingVideoSource::sendFrame):
+ (WebCore::RealtimeOutgoingVideoSource::videoSampleAvailable):
+ * platform/mediastream/mac/RealtimeOutgoingVideoSource.h:
+
2017-05-01 Joseph Pecoraro <[email protected]>
Simplify Resource Timing handling of cached resource
Modified: trunk/Source/WebCore/platform/mediastream/mac/RealtimeOutgoingVideoSource.cpp (216053 => 216054)
--- trunk/Source/WebCore/platform/mediastream/mac/RealtimeOutgoingVideoSource.cpp 2017-05-02 01:28:05 UTC (rev 216053)
+++ trunk/Source/WebCore/platform/mediastream/mac/RealtimeOutgoingVideoSource.cpp 2017-05-02 01:33:37 UTC (rev 216054)
@@ -34,6 +34,7 @@
#include <webrtc/common_video/include/corevideo_frame_buffer.h>
#include <webrtc/common_video/libyuv/include/webrtc_libyuv.h>
#include <webrtc/media/base/videoframe.h>
+#include <wtf/MainThread.h>
#include "CoreMediaSoftLink.h"
#include "CoreVideoSoftLink.h"
@@ -42,8 +43,10 @@
RealtimeOutgoingVideoSource::RealtimeOutgoingVideoSource(Ref<RealtimeMediaSource>&& videoSource)
: m_videoSource(WTFMove(videoSource))
+ , m_blackFrameTimer(*this, &RealtimeOutgoingVideoSource::sendOneBlackFrame)
{
m_videoSource->addObserver(*this);
+ setSizeFromSource();
}
bool RealtimeOutgoingVideoSource::setSource(Ref<RealtimeMediaSource>&& newSource)
@@ -59,6 +62,7 @@
m_videoSource->removeObserver(*this);
m_videoSource = WTFMove(newSource);
m_videoSource->addObserver(*this);
+ setSizeFromSource();
return true;
}
@@ -65,18 +69,35 @@
void RealtimeOutgoingVideoSource::stop()
{
m_videoSource->removeObserver(*this);
+ m_blackFrameTimer.stop();
+ m_isStopped = true;
}
void RealtimeOutgoingVideoSource::sourceMutedChanged()
{
+ ASSERT(m_muted != m_videoSource->muted());
m_muted = m_videoSource->muted();
+
+ if (m_muted && m_sinks.size() && m_enabled)
+ sendBlackFrame();
}
void RealtimeOutgoingVideoSource::sourceEnabledChanged()
{
+ ASSERT(m_enabled != m_videoSource->enabled());
m_enabled = m_videoSource->enabled();
+
+ if (!m_enabled && m_sinks.size() && !m_muted)
+ sendBlackFrame();
}
+void RealtimeOutgoingVideoSource::setSizeFromSource()
+{
+ const auto& settings = m_videoSource->settings();
+ m_width = settings.width();
+ m_height = settings.height();
+}
+
bool RealtimeOutgoingVideoSource::GetStats(Stats*)
{
return false;
@@ -94,9 +115,26 @@
m_sinks.removeFirst(sink);
}
-void RealtimeOutgoingVideoSource::sendFrame(rtc::scoped_refptr<webrtc::VideoFrameBuffer>&& buffer, webrtc::VideoRotation rotation)
+void RealtimeOutgoingVideoSource::sendBlackFrame()
{
- webrtc::VideoFrame frame(buffer, 0, 0, rotation);
+ if (!m_blackFrame) {
+ auto frame = m_bufferPool.CreateBuffer(m_width, m_height);
+ frame->SetToBlack();
+ m_blackFrame = WTFMove(frame);
+ }
+ sendOneBlackFrame();
+ // FIXME: We should not need to send two black frames but VTB requires that so we are sure a black frame is sent over the wire.
+ m_blackFrameTimer.startOneShot(0_s);
+}
+
+void RealtimeOutgoingVideoSource::sendOneBlackFrame()
+{
+ sendFrame(rtc::scoped_refptr<webrtc::VideoFrameBuffer>(m_blackFrame));
+}
+
+void RealtimeOutgoingVideoSource::sendFrame(rtc::scoped_refptr<webrtc::VideoFrameBuffer>&& buffer)
+{
+ webrtc::VideoFrame frame(buffer, 0, 0, m_currentRotation);
for (auto* sink : m_sinks)
sink->OnFrame(frame);
}
@@ -106,38 +144,30 @@
if (!m_sinks.size())
return;
- // FIXME: Shouldn't we use RealtimeMediaSource::size()
- const auto& settings = m_videoSource->settings();
-
- if (m_muted || !m_enabled) {
- auto blackBuffer = m_bufferPool.CreateBuffer(settings.width(), settings.height());
- blackBuffer->SetToBlack();
- sendFrame(WTFMove(blackBuffer), webrtc::kVideoRotation_0);
+ if (m_muted || !m_enabled)
return;
- }
- ASSERT(sample.platformSample().type == PlatformSample::CMSampleBufferType);
- auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(sample.platformSample().sample.cmSampleBuffer));
- auto pixelFormatType = CVPixelBufferGetPixelFormatType(pixelBuffer);
-
- webrtc::VideoRotation rotation;
switch (sample.videoRotation()) {
case MediaSample::VideoRotation::None:
- rotation = webrtc::kVideoRotation_0;
+ m_currentRotation = webrtc::kVideoRotation_0;
break;
case MediaSample::VideoRotation::UpsideDown:
- rotation = webrtc::kVideoRotation_180;
+ m_currentRotation = webrtc::kVideoRotation_180;
break;
case MediaSample::VideoRotation::Right:
- rotation = webrtc::kVideoRotation_90;
+ m_currentRotation = webrtc::kVideoRotation_90;
break;
case MediaSample::VideoRotation::Left:
- rotation = webrtc::kVideoRotation_270;
+ m_currentRotation = webrtc::kVideoRotation_270;
break;
}
+ ASSERT(sample.platformSample().type == PlatformSample::CMSampleBufferType);
+ auto pixelBuffer = static_cast<CVPixelBufferRef>(CMSampleBufferGetImageBuffer(sample.platformSample().sample.cmSampleBuffer));
+ auto pixelFormatType = CVPixelBufferGetPixelFormatType(pixelBuffer);
+
if (pixelFormatType == kCVPixelFormatType_420YpCbCr8Planar || pixelFormatType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
- sendFrame(new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(pixelBuffer), rotation);
+ sendFrame(new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(pixelBuffer));
return;
}
@@ -144,16 +174,15 @@
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
auto* source = reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
- auto newBuffer = m_bufferPool.CreateBuffer(settings.width(), settings.height());
+ auto newBuffer = m_bufferPool.CreateBuffer(m_width, m_height);
if (pixelFormatType == kCVPixelFormatType_32BGRA)
- webrtc::ConvertToI420(webrtc::kARGB, source, 0, 0, settings.width(), settings.height(), 0, webrtc::kVideoRotation_0, newBuffer);
+ webrtc::ConvertToI420(webrtc::kARGB, source, 0, 0, m_width, m_height, 0, webrtc::kVideoRotation_0, newBuffer);
else {
- // FIXME: Mock source conversion works with kBGRA while regular camera works with kARGB
ASSERT(pixelFormatType == kCVPixelFormatType_32ARGB);
- webrtc::ConvertToI420(webrtc::kBGRA, source, 0, 0, settings.width(), settings.height(), 0, webrtc::kVideoRotation_0, newBuffer);
+ webrtc::ConvertToI420(webrtc::kBGRA, source, 0, 0, m_width, m_height, 0, webrtc::kVideoRotation_0, newBuffer);
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
- sendFrame(WTFMove(newBuffer), rotation);
+ sendFrame(WTFMove(newBuffer));
}
} // namespace WebCore
Modified: trunk/Source/WebCore/platform/mediastream/mac/RealtimeOutgoingVideoSource.h (216053 => 216054)
--- trunk/Source/WebCore/platform/mediastream/mac/RealtimeOutgoingVideoSource.h 2017-05-02 01:28:05 UTC (rev 216053)
+++ trunk/Source/WebCore/platform/mediastream/mac/RealtimeOutgoingVideoSource.h 2017-05-02 01:33:37 UTC (rev 216054)
@@ -32,6 +32,7 @@
#include "LibWebRTCMacros.h"
#include "RealtimeMediaSource.h"
+#include <Timer.h>
#include <webrtc/api/mediastreaminterface.h>
#include <webrtc/base/optional.h>
#include <webrtc/common_video/include/i420_buffer_pool.h>
@@ -56,7 +57,10 @@
private:
RealtimeOutgoingVideoSource(Ref<RealtimeMediaSource>&&);
- void sendFrame(rtc::scoped_refptr<webrtc::VideoFrameBuffer>&&, webrtc::VideoRotation);
+ void sendFrame(rtc::scoped_refptr<webrtc::VideoFrameBuffer>&&);
+ void sendBlackFrame();
+ void sendOneBlackFrame();
+ void setSizeFromSource();
// Notifier API
void RegisterObserver(webrtc::ObserverInterface*) final { }
@@ -78,6 +82,7 @@
// RealtimeMediaSource::Observer API
void sourceMutedChanged() final;
void sourceEnabledChanged() final;
+ void sourceSettingsChanged() final { setSizeFromSource(); }
void videoSampleAvailable(MediaSample&) final;
Vector<rtc::VideoSinkInterface<webrtc::VideoFrame>*> m_sinks;
@@ -86,6 +91,12 @@
bool m_enabled { true };
bool m_muted { false };
std::optional<RealtimeMediaSourceSettings> m_initialSettings;
+ webrtc::VideoRotation m_currentRotation { webrtc::kVideoRotation_0 };
+ uint32_t m_width { 0 };
+ uint32_t m_height { 0 };
+ bool m_isStopped { false };
+ Timer m_blackFrameTimer;
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> m_blackFrame;
};
} // namespace WebCore