Diff
Modified: trunk/LayoutTests/ChangeLog (213982 => 213983)
--- trunk/LayoutTests/ChangeLog 2017-03-15 16:35:18 UTC (rev 213982)
+++ trunk/LayoutTests/ChangeLog 2017-03-15 16:38:10 UTC (rev 213983)
@@ -1,5 +1,23 @@
2017-03-15 Youenn Fablet <you...@apple.com>
+ run-webkit-tests is always creating mock libwebrtc tracks
+ https://bugs.webkit.org/show_bug.cgi?id=169658
+
+ Reviewed by Alex Christensen.
+
+ * TestExpectations:
+ * webrtc/audio-peer-connection-webaudio.html:
+ * webrtc/peer-connection-audio-mute-expected.txt: Added.
+ * webrtc/peer-connection-audio-mute.html: Added.
+ * webrtc/routines.js:
+ (analyseAudio):
+ * webrtc/video-expected.txt:
+ * webrtc/video-mute-expected.txt: Added.
+ * webrtc/video-mute.html: Added.
+ * webrtc/video.html:
+
+2017-03-15 Youenn Fablet <you...@apple.com>
+
Preventive clean-up: ensure RTCPeerConnection stays valid when calling postTask
https://bugs.webkit.org/show_bug.cgi?id=169661
Modified: trunk/LayoutTests/TestExpectations (213982 => 213983)
--- trunk/LayoutTests/TestExpectations 2017-03-15 16:35:18 UTC (rev 213982)
+++ trunk/LayoutTests/TestExpectations 2017-03-15 16:38:10 UTC (rev 213983)
@@ -708,7 +708,7 @@
media/session [ Skip ]
# WebRTC backend not enabled by default on Mac/iOS release bots.
-# GTK enables some of this tests on their TestExpectations file.
+# GTK enables some of these tests on their TestExpectations file.
[ Release ] webrtc [ Skip ]
[ Debug ] webrtc/audio-peer-connection-webaudio.html [ Failure ]
Modified: trunk/LayoutTests/TestExpectations.orig (213982 => 213983)
--- trunk/LayoutTests/TestExpectations.orig 2017-03-15 16:35:18 UTC (rev 213982)
+++ trunk/LayoutTests/TestExpectations.orig 2017-03-15 16:38:10 UTC (rev 213983)
@@ -733,6 +733,18 @@
imported/w3c/web-platform-tests/XMLHttpRequest/open-url-redirected-worker-origin.htm [ Skip ]
imported/w3c/web-platform-tests/html/webappapis/system-state-and-capabilities/the-navigator-object/NavigatorID.html [ Skip ]
imported/w3c/web-platform-tests/html/webappapis/system-state-and-capabilities/the-navigator-object/NavigatorID.worker.html [ Skip ]
+imported/w3c/web-platform-tests/XMLHttpRequest/anonymous-mode-unsupported.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/open-after-setrequestheader.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/open-referer.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/send-accept-language.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/setrequestheader-allow-empty-value.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/setrequestheader-allow-whitespace-in-value.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/setrequestheader-case-insensitive.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/setrequestheader-header-allowed.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/setrequestheader-header-forbidden.htm [ Failure ]
+imported/w3c/web-platform-tests/XMLHttpRequest/setrequestheader-open-setrequestheader.htm [ Failure ]
+imported/w3c/web-platform-tests/html/dom/interfaces.worker.html [ Failure ]
+imported/w3c/web-platform-tests/html/webappapis/scripting/events/event-handler-attributes-body-window.html [ Failure ]
# Only iOS WK1 has testRunner.setPagePaused.
fast/dom/timer-fire-after-page-pause.html [ Skip ]
Modified: trunk/LayoutTests/webrtc/audio-peer-connection-webaudio.html (213982 => 213983)
--- trunk/LayoutTests/webrtc/audio-peer-connection-webaudio.html 2017-03-15 16:35:18 UTC (rev 213982)
+++ trunk/LayoutTests/webrtc/audio-peer-connection-webaudio.html 2017-03-15 16:38:10 UTC (rev 213983)
@@ -7,75 +7,28 @@
<script src=""
<script src =""
<script>
- var test = async_test(() => {
+ promise_test((test) => {
if (window.testRunner)
testRunner.setUserMediaPermission(true);
- var heardHum = false;
- var heardBop = false;
- var heardBip = false;
-
- navigator.mediaDevices.getUserMedia({audio: true}).then((stream) => {
+ return navigator.mediaDevices.getUserMedia({audio: true}).then((stream) => {
if (window.internals)
internals.useMockRTCPeerConnectionFactory("TwoRealPeerConnections");
-
- createConnections((firstConnection) => {
- firstConnection.addStream(stream);
- }, (secondConnection) => {
- secondConnection._onaddstream_ = (streamEvent) => {
- var context = new webkitAudioContext();
- var sourceNode = context.createMediaStreamSource(streamEvent.stream);
- var analyser = context.createAnalyser();
- var gain = context.createGain();
-
- analyser.fftSize = 2048;
- analyser.smoothingTimeConstant = 0;
- analyser.minDecibels = -100;
- analyser.maxDecibels = 0;
- gain.gain.value = 0;
-
- sourceNode.connect(analyser);
- analyser.connect(gain);
- gain.connect(context.destination);
-
- function analyse() {
- var freqDomain = new Uint8Array(analyser.frequencyBinCount);
- analyser.getByteFrequencyData(freqDomain);
-
- var hasFrequency = expectedFrequency => {
- var bin = Math.floor(expectedFrequency * analyser.fftSize / context.sampleRate);
- return bin < freqDomain.length && freqDomain[bin] >= 150;
- };
-
- if (!heardHum)
- heardHum = hasFrequency(150);
-
- if (!heardBip)
- heardBip = hasFrequency(1500);
-
- if (!heardBop)
- heardBop = hasFrequency(500);
-
- if (heardHum && heardBip && heardBop)
- done();
- };
-
- var done = test.step_func_done(() => {
- clearTimeout(timeout);
- clearInterval(interval);
-
- assert_true(heardHum, "heard hum");
- assert_true(heardBip, "heard bip");
- assert_true(heardBop, "heard bop");
- test.done();
- });
-
- var timeout = setTimeout(done, 3000);
- var interval = setInterval(analyse, 1000 / 30);
- analyse();
- }
+ return new Promise((resolve, reject) => {
+ createConnections((firstConnection) => {
+ firstConnection.addStream(stream);
+ }, (secondConnection) => {
+ secondConnection._onaddstream_ = (streamEvent) => { resolve(streamEvent.stream); };
+ });
+ setTimeout(() => reject("Test timed out"), 5000);
+ }).then((stream) => {
+ return analyseAudio(stream, 1000);
+ }).then((results) => {
+ assert_true(results.heardHum, "heard hum");
+ assert_true(results.heardBip, "heard bip");
+ assert_true(results.heardBop, "heard bop");
});
- });
+ });
}, "Basic audio playback through a peer connection");
</script>
</head>
Added: trunk/LayoutTests/webrtc/peer-connection-audio-mute-expected.txt (0 => 213983)
--- trunk/LayoutTests/webrtc/peer-connection-audio-mute-expected.txt (rev 0)
+++ trunk/LayoutTests/webrtc/peer-connection-audio-mute-expected.txt 2017-03-15 16:38:10 UTC (rev 213983)
@@ -0,0 +1,3 @@
+
+FAIL Muting and unmuting an audio track assert_true: heard hum expected true got false
+
Added: trunk/LayoutTests/webrtc/peer-connection-audio-mute.html (0 => 213983)
--- trunk/LayoutTests/webrtc/peer-connection-audio-mute.html (rev 0)
+++ trunk/LayoutTests/webrtc/peer-connection-audio-mute.html 2017-03-15 16:38:10 UTC (rev 213983)
@@ -0,0 +1,65 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>Testing local audio capture playback causes "playing" event to fire</title>
+ <script src=""
+ <script src=""
+</head>
+<body>
+ <script src =""
+ <script>
+ promise_test((test) => {
+ if (window.testRunner)
+ testRunner.setUserMediaPermission(true);
+
+ return navigator.mediaDevices.getUserMedia({audio: true}).then((stream) => {
+ if (window.internals)
+ internals.useMockRTCPeerConnectionFactory("TwoRealPeerConnections");
+
+ var stream;
+ return new Promise((resolve, reject) => {
+ createConnections((firstConnection) => {
+ firstConnection.addStream(stream);
+ }, (secondConnection) => {
+ secondConnection._onaddstream_ = (streamEvent) => {
+ stream = streamEvent.stream;
+ resolve();
+ };
+ });
+ }).then(() => {
+ return waitFor(500);
+ }).then(() => {
+ return analyseAudio(stream, 500).then((results) => {
+ assert_true(results.heardHum, "heard hum");
+ assert_true(results.heardBip, "heard bip");
+ assert_true(results.heardBop, "heard bop");
+ });
+ }).then(() => {
+ stream.getAudioTracks().forEach((track) => {
+ track.enabled = false;
+ });
+ return waitFor(500);
+ }).then(() => {
+ return analyseAudio(stream, 500).then((results) => {
+ assert_false(results.heardHum, "heard hum");
+ assert_false(results.heardBip, "heard bip");
+ assert_false(results.heardBop, "heard bop");
+ });
+ }).then(() => {
+ stream.getAudioTracks().forEach((track) => {
+ track.enabled = true;
+ });
+ return waitFor(500);
+ }).then(() => {
+ return analyseAudio(stream, 500).then((results) => {
+ assert_true(results.heardHum, "heard hum");
+ assert_true(results.heardBip, "heard bip");
+ assert_true(results.heardBop, "heard bop");
+ });
+ });
+ });
+ }, "Muting and unmuting an audio track");
+ </script>
+</body>
+</html>
Modified: trunk/LayoutTests/webrtc/routines.js (213982 => 213983)
--- trunk/LayoutTests/webrtc/routines.js 2017-03-15 16:35:18 UTC (rev 213982)
+++ trunk/LayoutTests/webrtc/routines.js 2017-03-15 16:38:10 UTC (rev 213983)
@@ -71,3 +71,62 @@
{
assert_unreached();
}
+
+function analyseAudio(stream, duration)
+{
+ return new Promise((resolve, reject) => {
+ var context = new webkitAudioContext();
+ var sourceNode = context.createMediaStreamSource(stream);
+ var analyser = context.createAnalyser();
+ var gain = context.createGain();
+
+ var results = { heardHum: false, heardBip: false, heardBop: false };
+
+ analyser.fftSize = 2048;
+ analyser.smoothingTimeConstant = 0;
+ analyser.minDecibels = -100;
+ analyser.maxDecibels = 0;
+ gain.gain.value = 0;
+
+ sourceNode.connect(analyser);
+ analyser.connect(gain);
+ gain.connect(context.destination);
+
+ function analyse() {
+ var freqDomain = new Uint8Array(analyser.frequencyBinCount);
+ analyser.getByteFrequencyData(freqDomain);
+
+ var hasFrequency = expectedFrequency => {
+ var bin = Math.floor(expectedFrequency * analyser.fftSize / context.sampleRate);
+ return bin < freqDomain.length && freqDomain[bin] >= 150;
+ };
+
+ if (!results.heardHum)
+ results.heardHum = hasFrequency(150);
+
+ if (!results.heardBip)
+ results.heardBip = hasFrequency(1500);
+
+ if (!results.heardBop)
+ results.heardBop = hasFrequency(500);
+
+ if (results.heardHum && results.heardBip && results.heardBop)
+ done();
+ };
+
+ function done() {
+ clearTimeout(timeout);
+ clearInterval(interval);
+ resolve(results);
+ }
+
+ var timeout = setTimeout(done, 3 * duration);
+ var interval = setInterval(analyse, duration / 30);
+ analyse();
+ });
+}
+
+function waitFor(duration)
+{
+ return new Promise((resolve) => setTimeout(resolve, duration));
+}
Modified: trunk/LayoutTests/webrtc/video-expected.txt (213982 => 213983)
--- trunk/LayoutTests/webrtc/video-expected.txt 2017-03-15 16:35:18 UTC (rev 213982)
+++ trunk/LayoutTests/webrtc/video-expected.txt 2017-03-15 16:38:10 UTC (rev 213983)
@@ -1,4 +1,4 @@
-FAIL Basic video exchange assert_true: expected true got false
+PASS Basic video exchange
Added: trunk/LayoutTests/webrtc/video-mute-expected.txt (0 => 213983)
--- trunk/LayoutTests/webrtc/video-mute-expected.txt (rev 0)
+++ trunk/LayoutTests/webrtc/video-mute-expected.txt 2017-03-15 16:38:10 UTC (rev 213983)
@@ -0,0 +1,4 @@
+
+
+PASS Video muted/unmuted track
+
Added: trunk/LayoutTests/webrtc/video-mute.html (0 => 213983)
--- trunk/LayoutTests/webrtc/video-mute.html (rev 0)
+++ trunk/LayoutTests/webrtc/video-mute.html 2017-03-15 16:38:10 UTC (rev 213983)
@@ -0,0 +1,69 @@
+<!doctype html>
+<html>
+ <head>
+ <meta charset="utf-8">
+ <title>Testing basic video exchange from offerer to receiver</title>
+ <script src=""
+ <script src=""
+ </head>
+ <body>
+ <video id="video" autoplay=""></video>
+ <canvas id="canvas" width="640" height="480"></canvas>
+ <script src =""
+ <script>
+video = document.getElementById("video");
+canvas = document.getElementById("canvas");
+// FIXME: We should use tracks
+
+function isVideoBlack()
+{
+ canvas.width = video.videoWidth;
+ canvas.height = video.videoHeight;
+ canvas.getContext('2d').drawImage(video, 0, 0, canvas.width, canvas.height);
+
+ imageData = canvas.getContext('2d').getImageData(10, 325, 250, 1);
+ data = ""
+ for (var cptr = 0; cptr < canvas.width * canvas.height; ++cptr) {
+ if (data[4 * cptr] || data[4 * cptr + 1] || data[4 * cptr + 2])
+ return false;
+ }
+ return true;
+}
+
+var track;
+promise_test((test) => {
+ if (window.testRunner)
+ testRunner.setUserMediaPermission(true);
+
+ return navigator.mediaDevices.getUserMedia({ video: true}).then((stream) => {
+ return new Promise((resolve, reject) => {
+ if (window.internals)
+ internals.useMockRTCPeerConnectionFactory("TwoRealPeerConnections");
+
+ createConnections((firstConnection) => {
+ firstConnection.addStream(stream);
+ }, (secondConnection) => {
+ secondConnection._onaddstream_ = (streamEvent) => { resolve(streamEvent.stream); };
+ });
+ setTimeout(() => reject("Test timed out"), 5000);
+ });
+ }).then((stream) => {
+ video.srcObject = stream;
+ track = stream.getVideoTracks()[0];
+ return video.play();
+ }).then(() => {
+ assert_false(isVideoBlack());
+ }).then(() => {
+ track.enabled = false;
+ return waitFor(500);
+ }).then(() => {
+ assert_true(isVideoBlack());
+ track.enabled = true;
+ return waitFor(500);
+ }).then(() => {
+ assert_false(isVideoBlack());
+ });
+}, "Video muted/unmuted track");
+ </script>
+ </body>
+</html>
Modified: trunk/LayoutTests/webrtc/video.html (213982 => 213983)
--- trunk/LayoutTests/webrtc/video.html 2017-03-15 16:35:18 UTC (rev 213982)
+++ trunk/LayoutTests/webrtc/video.html 2017-03-15 16:38:10 UTC (rev 213983)
@@ -11,12 +11,6 @@
<canvas id="canvas" width="640" height="480"></canvas>
<script src =""
<script>
-if (window.internals)
- internals.useMockRTCPeerConnectionFactory("TwoRealPeerConnections");
-
-if (window.testRunner)
- testRunner.setUserMediaPermission(true);
-
video = document.getElementById("video");
canvas = document.getElementById("canvas");
// FIXME: We should use tracks
@@ -23,56 +17,50 @@
function testImage()
{
- try {
- canvas.width = video.videoWidth;
- canvas.height = video.videoHeight;
- canvas.getContext('2d').drawImage(video, 0, 0, canvas.width, canvas.height);
+ canvas.width = video.videoWidth;
+ canvas.height = video.videoHeight;
+ canvas.getContext('2d').drawImage(video, 0, 0, canvas.width, canvas.height);
- imageData = canvas.getContext('2d').getImageData(10, 325, 250, 1);
- data = ""
+ imageData = canvas.getContext('2d').getImageData(10, 325, 250, 1);
+ data = ""
- var index = 20;
- assert_true(data[index] < 100);
- assert_true(data[index + 1] < 100);
- assert_true(data[index + 2] < 100);
+ var index = 20;
+ assert_true(data[index] < 100);
+ assert_true(data[index + 1] < 100);
+ assert_true(data[index + 2] < 100);
- index = 80;
- assert_true(data[index] > 200);
- assert_true(data[index + 1] > 200);
- assert_true(data[index + 2] > 200);
+ index = 80;
+ assert_true(data[index] > 200);
+ assert_true(data[index + 1] > 200);
+ assert_true(data[index + 2] > 200);
- index += 80;
- assert_true(data[index] > 200);
- assert_true(data[index + 1] > 200);
- assert_true(data[index + 2] < 100);
-
- finishTest();
- } catch(e) {
- errorTest(e);
- }
+ index += 80;
+ assert_true(data[index] > 200);
+ assert_true(data[index + 1] > 200);
+ assert_true(data[index + 2] < 100);
}
-function testStream(stream)
-{
- video.srcObject = stream;
- // Video may play with black frames
- video._onplay_ = setTimeout(() => {
- testImage();
- }, 1000);
-}
+promise_test((test) => {
+ if (window.testRunner)
+ testRunner.setUserMediaPermission(true);
-var finishTest, errorTest;
-promise_test((test) => {
return navigator.mediaDevices.getUserMedia({ video: true}).then((stream) => {
return new Promise((resolve, reject) => {
- finishTest = resolve;
- errorTest = reject;
+ if (window.internals)
+ internals.useMockRTCPeerConnectionFactory("TwoRealPeerConnections");
+
createConnections((firstConnection) => {
firstConnection.addStream(stream);
}, (secondConnection) => {
- secondConnection._onaddstream_ = (streamEvent) => { testStream(streamEvent.stream); };
+ secondConnection._onaddstream_ = (streamEvent) => { resolve(streamEvent.stream); };
});
+ setTimeout(() => reject("Test timed out"), 5000);
});
+ }).then((stream) => {
+ video.srcObject = stream;
+ return video.play();
+ }).then(() => {
+ testImage();
});
}, "Basic video exchange");
</script>
Modified: trunk/Source/WebCore/ChangeLog (213982 => 213983)
--- trunk/Source/WebCore/ChangeLog 2017-03-15 16:35:18 UTC (rev 213982)
+++ trunk/Source/WebCore/ChangeLog 2017-03-15 16:38:10 UTC (rev 213983)
@@ -1,5 +1,22 @@
2017-03-15 Youenn Fablet <you...@apple.com>
+ run-webkit-tests is always creating mock libwebrtc tracks
+ https://bugs.webkit.org/show_bug.cgi?id=169658
+
+ Reviewed by Alex Christensen.
+
+ Tests: webrtc/peer-connection-audio-mute.html
+ webrtc/video-mute.html
+
+ Creating real libwebrtc av tracks in case of RealTwoPeerConnections mock factory.
+
+ * testing/MockLibWebRTCPeerConnection.cpp:
+ (WebCore::MockLibWebRTCPeerConnectionFactory::CreateVideoTrack):
+ (WebCore::MockLibWebRTCPeerConnectionFactory::CreateAudioTrack):
+ * testing/MockLibWebRTCPeerConnection.h:
+
+2017-03-15 Youenn Fablet <you...@apple.com>
+
Preventive clean-up: ensure RTCPeerConnection stays valid when calling postTask
https://bugs.webkit.org/show_bug.cgi?id=169661
Modified: trunk/Source/WebCore/testing/MockLibWebRTCPeerConnection.cpp (213982 => 213983)
--- trunk/Source/WebCore/testing/MockLibWebRTCPeerConnection.cpp 2017-03-15 16:35:18 UTC (rev 213982)
+++ trunk/Source/WebCore/testing/MockLibWebRTCPeerConnection.cpp 2017-03-15 16:38:10 UTC (rev 213983)
@@ -189,6 +189,20 @@
return new rtc::RefCountedObject<MockLibWebRTCPeerConnection>(*observer);
}
+rtc::scoped_refptr<webrtc::VideoTrackInterface> MockLibWebRTCPeerConnectionFactory::CreateVideoTrack(const std::string& id, webrtc::VideoTrackSourceInterface* source)
+{
+ if (m_testCase == "TwoRealPeerConnections")
+ return realPeerConnectionFactory()->CreateVideoTrack(id, source);
+ return new rtc::RefCountedObject<MockLibWebRTCVideoTrack>(id, source);
+}
+
+rtc::scoped_refptr<webrtc::AudioTrackInterface> MockLibWebRTCPeerConnectionFactory::CreateAudioTrack(const std::string& id, webrtc::AudioSourceInterface* source)
+{
+ if (m_testCase == "TwoRealPeerConnections")
+ return realPeerConnectionFactory()->CreateAudioTrack(id, source);
+ return new rtc::RefCountedObject<MockLibWebRTCAudioTrack>(id, source);
+}
+
rtc::scoped_refptr<webrtc::MediaStreamInterface> MockLibWebRTCPeerConnectionFactory::CreateLocalMediaStream(const std::string& label)
{
return new rtc::RefCountedObject<webrtc::MediaStream>(label);
Modified: trunk/Source/WebCore/testing/MockLibWebRTCPeerConnection.h (213982 => 213983)
--- trunk/Source/WebCore/testing/MockLibWebRTCPeerConnection.h 2017-03-15 16:35:18 UTC (rev 213982)
+++ trunk/Source/WebCore/testing/MockLibWebRTCPeerConnection.h 2017-03-15 16:38:10 UTC (rev 213983)
@@ -244,8 +244,9 @@
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> CreateVideoSource(cricket::VideoCapturer*) final { return nullptr; }
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> CreateVideoSource(cricket::VideoCapturer*, const webrtc::MediaConstraintsInterface*) final { return nullptr; }
- rtc::scoped_refptr<webrtc::VideoTrackInterface> CreateVideoTrack(const std::string& id, webrtc::VideoTrackSourceInterface* source) final { return new rtc::RefCountedObject<MockLibWebRTCVideoTrack>(id, source); }
- rtc::scoped_refptr<webrtc::AudioTrackInterface> CreateAudioTrack(const std::string& id, webrtc::AudioSourceInterface* source) final { return new rtc::RefCountedObject<MockLibWebRTCAudioTrack>(id, source); }
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> CreateVideoTrack(const std::string&, webrtc::VideoTrackSourceInterface*) final;
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> CreateAudioTrack(const std::string&, webrtc::AudioSourceInterface*) final;
+
bool StartAecDump(rtc::PlatformFile, int64_t) final { return false; }
void StopAecDump() final { }