Diff
Modified: trunk/Source/WebKit/ChangeLog (271204 => 271205)
--- trunk/Source/WebKit/ChangeLog 2021-01-06 19:19:26 UTC (rev 271204)
+++ trunk/Source/WebKit/ChangeLog 2021-01-06 19:22:03 UTC (rev 271205)
@@ -1,3 +1,29 @@
+2021-01-06 Sihui Liu <[email protected]>
+
+ Stop speech recognition if page becomes invisible
+ https://bugs.webkit.org/show_bug.cgi?id=220073
+ <rdar://problem/72710704>
+
+ Reviewed by Youenn Fablet.
+
+ We should not allow speech recognition on page invisible to user, because page may get content of captured audio
+ without user's notice. To do this, we should:
+ 1. deny speech recognition request on invisibile page
+ 2. abort ongoing speech recognition if page becomes invisible
+
+ API test: WebKit2.SpeechRecognitionPageBecomesInvisible
+
+ * UIProcess/SpeechRecognitionPermissionManager.cpp:
+ (WebKit::SpeechRecognitionPermissionManager::continueProcessingRequest):
+ * UIProcess/SpeechRecognitionServer.cpp:
+ (WebKit::SpeechRecognitionServer::abortForPageIsBecomingInvisible):
+ * UIProcess/SpeechRecognitionServer.h:
+ * UIProcess/WebPageProxy.cpp:
+ (WebKit::WebPageProxy::dispatchActivityStateChange):
+ * UIProcess/WebProcessProxy.cpp:
+ (WebKit::WebProcessProxy::pageIsBecomingInvisible):
+ * UIProcess/WebProcessProxy.h:
+
2021-01-06 Alex Christensen <[email protected]>
Use sendWithAsyncReply instead of iOS-specific GenericCallbacks
Modified: trunk/Source/WebKit/UIProcess/SpeechRecognitionPermissionManager.cpp (271204 => 271205)
--- trunk/Source/WebKit/UIProcess/SpeechRecognitionPermissionManager.cpp 2021-01-06 19:19:26 UTC (rev 271204)
+++ trunk/Source/WebKit/UIProcess/SpeechRecognitionPermissionManager.cpp 2021-01-06 19:22:03 UTC (rev 271205)
@@ -161,6 +161,11 @@
}
ASSERT(m_userPermissionCheck == CheckResult::Granted);
+ if (!m_page.isViewVisible()) {
+ completeCurrentRequest(SpeechRecognitionPermissionDecision::Deny);
+ return;
+ }
+
completeCurrentRequest(SpeechRecognitionPermissionDecision::Grant);
}
Modified: trunk/Source/WebKit/UIProcess/SpeechRecognitionServer.cpp (271204 => 271205)
--- trunk/Source/WebKit/UIProcess/SpeechRecognitionServer.cpp 2021-01-06 19:19:26 UTC (rev 271204)
+++ trunk/Source/WebKit/UIProcess/SpeechRecognitionServer.cpp 2021-01-06 19:22:03 UTC (rev 271205)
@@ -168,6 +168,20 @@
}
}
+void SpeechRecognitionServer::abortForPageIsBecomingInvisible()
+{
+ if (!m_recognizer)
+ return;
+
+ auto currentClientIdentifier = m_recognizer->currentClientIdentifier();
+ if (!currentClientIdentifier)
+ return;
+
+ auto error = WebCore::SpeechRecognitionError { WebCore::SpeechRecognitionErrorType::Aborted, "Page is no longer visible"_s };
+ sendUpdate(*currentClientIdentifier, WebCore::SpeechRecognitionUpdateType::Error, error);
+ m_recognizer->reset();
+}
+
void SpeechRecognitionServer::sendUpdate(WebCore::SpeechRecognitionConnectionClientIdentifier clientIdentifier, WebCore::SpeechRecognitionUpdateType type, Optional<WebCore::SpeechRecognitionError> error, Optional<Vector<WebCore::SpeechRecognitionResultData>> result)
{
auto update = WebCore::SpeechRecognitionUpdate::create(clientIdentifier, type);
Modified: trunk/Source/WebKit/UIProcess/SpeechRecognitionServer.h (271204 => 271205)
--- trunk/Source/WebKit/UIProcess/SpeechRecognitionServer.h 2021-01-06 19:19:26 UTC (rev 271204)
+++ trunk/Source/WebKit/UIProcess/SpeechRecognitionServer.h 2021-01-06 19:22:03 UTC (rev 271205)
@@ -64,6 +64,7 @@
void abort(WebCore::SpeechRecognitionConnectionClientIdentifier);
void invalidate(WebCore::SpeechRecognitionConnectionClientIdentifier);
void mute();
+ void abortForPageIsBecomingInvisible();
private:
void requestPermissionForRequest(WebCore::SpeechRecognitionRequest&);
Modified: trunk/Source/WebKit/UIProcess/WebPageProxy.cpp (271204 => 271205)
--- trunk/Source/WebKit/UIProcess/WebPageProxy.cpp 2021-01-06 19:19:26 UTC (rev 271204)
+++ trunk/Source/WebKit/UIProcess/WebPageProxy.cpp 2021-01-06 19:22:03 UTC (rev 271205)
@@ -1995,8 +1995,12 @@
if ((changed & ActivityState::WindowIsActive) && isViewWindowActive())
updateCurrentModifierState();
- if ((m_potentiallyChangedActivityStateFlags & ActivityState::IsVisible) && isViewVisible())
- viewIsBecomingVisible();
+ if ((m_potentiallyChangedActivityStateFlags & ActivityState::IsVisible)) {
+ if (isViewVisible())
+ viewIsBecomingVisible();
+ else
+ m_process->pageIsBecomingInvisible(m_webPageID);
+ }
bool isNowInWindow = (changed & ActivityState::IsInWindow) && isInWindow();
// We always want to wait for the Web process to reply if we've been in-window before and are coming back in-window.
Modified: trunk/Source/WebKit/UIProcess/WebProcessProxy.cpp (271204 => 271205)
--- trunk/Source/WebKit/UIProcess/WebProcessProxy.cpp 2021-01-06 19:19:26 UTC (rev 271204)
+++ trunk/Source/WebKit/UIProcess/WebProcessProxy.cpp 2021-01-06 19:22:03 UTC (rev 271205)
@@ -1788,6 +1788,12 @@
speechRecognitionServer->mute();
}
+void WebProcessProxy::pageIsBecomingInvisible(WebCore::PageIdentifier identifier)
+{
+ if (auto server = m_speechRecognitionServerMap.get(identifier))
+ server->abortForPageIsBecomingInvisible();
+}
+
#if PLATFORM(WATCHOS)
void WebProcessProxy::startBackgroundActivityForFullscreenInput()
Modified: trunk/Source/WebKit/UIProcess/WebProcessProxy.h (271204 => 271205)
--- trunk/Source/WebKit/UIProcess/WebProcessProxy.h 2021-01-06 19:19:26 UTC (rev 271204)
+++ trunk/Source/WebKit/UIProcess/WebProcessProxy.h 2021-01-06 19:22:03 UTC (rev 271205)
@@ -405,6 +405,7 @@
SpeechRecognitionRemoteRealtimeMediaSourceManager& ensureSpeechRecognitionRemoteRealtimeMediaSourceManager();
#endif
void pageMutedStateChanged(WebCore::PageIdentifier, WebCore::MediaProducer::MutedStateFlags);
+ void pageIsBecomingInvisible(WebCore::PageIdentifier);
protected:
WebProcessProxy(WebProcessPool&, WebsiteDataStore*, IsPrewarmed);
Modified: trunk/Tools/ChangeLog (271204 => 271205)
--- trunk/Tools/ChangeLog 2021-01-06 19:19:26 UTC (rev 271204)
+++ trunk/Tools/ChangeLog 2021-01-06 19:22:03 UTC (rev 271205)
@@ -1,3 +1,17 @@
+2021-01-06 Sihui Liu <[email protected]>
+
+ Stop speech recognition if page becomes invisible
+ https://bugs.webkit.org/show_bug.cgi?id=220073
+ <rdar://problem/72710704>
+
+ Reviewed by Youenn Fablet.
+
+ * TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm: updated
+ WebKit2.SpeechRecognitionErrorWhenStartingAudioCaptureOnDifferentPage for updated behavior.
+ (TestWebKitAPI::TEST):
+ * TestWebKitAPI/cocoa/TestWKWebView.mm:
+ (-[TestWKWebView _setUpTestWindow:]): set the origin of host window to be the origin of frame of TestWKWebView.
+
2021-01-06 Jonathan Bedard <[email protected]>
[webkitscmpy] Add command to canonicalize unpushed commits (Follow-up fix)
Modified: trunk/Tools/TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm (271204 => 271205)
--- trunk/Tools/TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm 2021-01-06 19:19:26 UTC (rev 271204)
+++ trunk/Tools/TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm 2021-01-06 19:22:03 UTC (rev 271205)
@@ -128,9 +128,9 @@
preferences._speechRecognitionEnabled = YES;
preferences._mediaCaptureRequiresSecureConnection = NO;
auto delegate = adoptNS([[SpeechRecognitionPermissionUIDelegate alloc] init]);
- auto firstWebView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration.get()]);
+ auto firstWebView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 100, 100) configuration:configuration.get()]);
[firstWebView setUIDelegate:delegate.get()];
- auto secondWebView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration.get()]);
+ auto secondWebView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(100, 0, 100, 100) configuration:configuration.get()]);
[secondWebView setUIDelegate:delegate.get()];
// First page starts recognition successfully.
@@ -167,4 +167,46 @@
EXPECT_WK_STREQ(@"Recorder Mute", [lastScriptMessage body]);
}
+// FIXME: enable this test on iOS when https://webkit.org/b/175204 is fixed.
+#if PLATFORM(MAC)
+
+TEST(WebKit2, SpeechRecognitionPageBecomesInvisible)
+{
+ auto configuration = adoptNS([[WKWebViewConfiguration alloc] init]);
+ auto handler = adoptNS([[SpeechRecognitionMessageHandler alloc] init]);
+ [[configuration userContentController] addScriptMessageHandler:handler.get() name:@"testHandler"];
+ auto preferences = [configuration preferences];
+ preferences._mockCaptureDevicesEnabled = YES;
+ preferences._speechRecognitionEnabled = YES;
+ auto webView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration.get()]);
+ auto delegate = adoptNS([[SpeechRecognitionPermissionUIDelegate alloc] init]);
+ [webView setUIDelegate:delegate.get()];
+
+ // Page is visible.
+ shouldGrantPermissionRequest = true;
+ receivedScriptMessage = false;
+ [webView synchronouslyLoadTestPageNamed:@"speechrecognition-basic"];
+ [webView stringByEvaluatingJavaScript:@"start()"];
+ TestWebKitAPI::Util::run(&receivedScriptMessage);
+ EXPECT_WK_STREQ(@"Start", [lastScriptMessage body]);
+
+ // Hide page.
+ receivedScriptMessage = false;
+#if PLATFORM(MAC)
+ [webView.get().window setIsVisible:NO];
+#else
+ webView.get().window.hidden = YES;
+#endif
+ TestWebKitAPI::Util::run(&receivedScriptMessage);
+ EXPECT_WK_STREQ(@"Error: aborted - Page is no longer visible", [lastScriptMessage body]);
+
+ // Page is invisible.
+ receivedScriptMessage = false;
+ [webView evaluateJavaScript:@"start()" completionHandler:nil];
+ TestWebKitAPI::Util::run(&receivedScriptMessage);
+ EXPECT_WK_STREQ(@"Error: not-allowed - Permission check failed", [lastScriptMessage body]);
+}
+
+#endif
+
} // namespace TestWebKitAPI
Modified: trunk/Tools/TestWebKitAPI/cocoa/TestWKWebView.mm (271204 => 271205)
--- trunk/Tools/TestWebKitAPI/cocoa/TestWKWebView.mm 2021-01-06 19:19:26 UTC (rev 271204)
+++ trunk/Tools/TestWebKitAPI/cocoa/TestWKWebView.mm 2021-01-06 19:22:03 UTC (rev 271205)
@@ -462,7 +462,7 @@
{
#if PLATFORM(MAC)
_hostWindow = adoptNS([[TestWKWebViewHostWindow alloc] initWithWebView:self contentRect:frame styleMask:(NSWindowStyleMaskBorderless | NSWindowStyleMaskMiniaturizable) backing:NSBackingStoreBuffered defer:NO]);
- [_hostWindow setFrameOrigin:NSMakePoint(0, 0)];
+ [_hostWindow setFrameOrigin:frame.origin];
[_hostWindow setIsVisible:YES];
[_hostWindow contentView].wantsLayer = YES;
[[_hostWindow contentView] addSubview:self];