- Revision
- 271169
- Author
- [email protected]
- Date
- 2021-01-05 12:24:51 -0800 (Tue, 05 Jan 2021)
Log Message
Stop speech recognition if page becomes invisible
https://bugs.webkit.org/show_bug.cgi?id=220073
<rdar://problem/72710704>
Patch by Sihui Liu <[email protected]> on 2021-01-05
Reviewed by Youenn Fablet.
Source/WebKit:
We should not allow speech recognition on page invisible to user, because page may get content of captured audio
without user's notice. To do this, we should:
1. deny speech recognition request on invisibile page
2. abort ongoing speech recognition if page becomes invisible
API test: WebKit2.SpeechRecognitionPageBecomesInvisible
* UIProcess/SpeechRecognitionPermissionManager.cpp:
(WebKit::SpeechRecognitionPermissionManager::continueProcessingRequest):
* UIProcess/SpeechRecognitionServer.cpp:
(WebKit::SpeechRecognitionServer::abortForPageIsBecomingInvisible):
* UIProcess/SpeechRecognitionServer.h:
* UIProcess/WebPageProxy.cpp:
(WebKit::WebPageProxy::dispatchActivityStateChange):
* UIProcess/WebProcessProxy.cpp:
(WebKit::WebProcessProxy::pageIsBecomingInvisible):
* UIProcess/WebProcessProxy.h:
Tools:
* TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm:
(TestWebKitAPI::TEST):
Modified Paths
Diff
Modified: trunk/Source/WebKit/ChangeLog (271168 => 271169)
--- trunk/Source/WebKit/ChangeLog 2021-01-05 20:13:38 UTC (rev 271168)
+++ trunk/Source/WebKit/ChangeLog 2021-01-05 20:24:51 UTC (rev 271169)
@@ -1,3 +1,29 @@
+2021-01-05 Sihui Liu <[email protected]>
+
+ Stop speech recognition if page becomes invisible
+ https://bugs.webkit.org/show_bug.cgi?id=220073
+ <rdar://problem/72710704>
+
+ Reviewed by Youenn Fablet.
+
+ We should not allow speech recognition on page invisible to user, because page may get content of captured audio
+ without user's notice. To do this, we should:
+ 1. deny speech recognition request on invisibile page
+ 2. abort ongoing speech recognition if page becomes invisible
+
+ API test: WebKit2.SpeechRecognitionPageBecomesInvisible
+
+ * UIProcess/SpeechRecognitionPermissionManager.cpp:
+ (WebKit::SpeechRecognitionPermissionManager::continueProcessingRequest):
+ * UIProcess/SpeechRecognitionServer.cpp:
+ (WebKit::SpeechRecognitionServer::abortForPageIsBecomingInvisible):
+ * UIProcess/SpeechRecognitionServer.h:
+ * UIProcess/WebPageProxy.cpp:
+ (WebKit::WebPageProxy::dispatchActivityStateChange):
+ * UIProcess/WebProcessProxy.cpp:
+ (WebKit::WebProcessProxy::pageIsBecomingInvisible):
+ * UIProcess/WebProcessProxy.h:
+
2021-01-05 Alex Christensen <[email protected]>
Use sendWithAsyncReply instead of ValidateCommandCallback
Modified: trunk/Source/WebKit/UIProcess/SpeechRecognitionPermissionManager.cpp (271168 => 271169)
--- trunk/Source/WebKit/UIProcess/SpeechRecognitionPermissionManager.cpp 2021-01-05 20:13:38 UTC (rev 271168)
+++ trunk/Source/WebKit/UIProcess/SpeechRecognitionPermissionManager.cpp 2021-01-05 20:24:51 UTC (rev 271169)
@@ -161,6 +161,11 @@
}
ASSERT(m_userPermissionCheck == CheckResult::Granted);
+ if (!m_page.isViewVisible()) {
+ completeCurrentRequest(SpeechRecognitionPermissionDecision::Deny);
+ return;
+ }
+
completeCurrentRequest(SpeechRecognitionPermissionDecision::Grant);
}
Modified: trunk/Source/WebKit/UIProcess/SpeechRecognitionServer.cpp (271168 => 271169)
--- trunk/Source/WebKit/UIProcess/SpeechRecognitionServer.cpp 2021-01-05 20:13:38 UTC (rev 271168)
+++ trunk/Source/WebKit/UIProcess/SpeechRecognitionServer.cpp 2021-01-05 20:24:51 UTC (rev 271169)
@@ -168,6 +168,20 @@
}
}
+void SpeechRecognitionServer::abortForPageIsBecomingInvisible()
+{
+ if (!m_recognizer)
+ return;
+
+ auto currentClientIdentifier = m_recognizer->currentClientIdentifier();
+ if (!currentClientIdentifier)
+ return;
+
+ auto error = WebCore::SpeechRecognitionError { WebCore::SpeechRecognitionErrorType::Aborted, "Page is no longer visible"_s };
+ sendUpdate(*currentClientIdentifier, WebCore::SpeechRecognitionUpdateType::Error, error);
+ m_recognizer->reset();
+}
+
void SpeechRecognitionServer::sendUpdate(WebCore::SpeechRecognitionConnectionClientIdentifier clientIdentifier, WebCore::SpeechRecognitionUpdateType type, Optional<WebCore::SpeechRecognitionError> error, Optional<Vector<WebCore::SpeechRecognitionResultData>> result)
{
auto update = WebCore::SpeechRecognitionUpdate::create(clientIdentifier, type);
Modified: trunk/Source/WebKit/UIProcess/SpeechRecognitionServer.h (271168 => 271169)
--- trunk/Source/WebKit/UIProcess/SpeechRecognitionServer.h 2021-01-05 20:13:38 UTC (rev 271168)
+++ trunk/Source/WebKit/UIProcess/SpeechRecognitionServer.h 2021-01-05 20:24:51 UTC (rev 271169)
@@ -64,6 +64,7 @@
void abort(WebCore::SpeechRecognitionConnectionClientIdentifier);
void invalidate(WebCore::SpeechRecognitionConnectionClientIdentifier);
void mute();
+ void abortForPageIsBecomingInvisible();
private:
void requestPermissionForRequest(WebCore::SpeechRecognitionRequest&);
Modified: trunk/Source/WebKit/UIProcess/WebPageProxy.cpp (271168 => 271169)
--- trunk/Source/WebKit/UIProcess/WebPageProxy.cpp 2021-01-05 20:13:38 UTC (rev 271168)
+++ trunk/Source/WebKit/UIProcess/WebPageProxy.cpp 2021-01-05 20:24:51 UTC (rev 271169)
@@ -1995,8 +1995,12 @@
if ((changed & ActivityState::WindowIsActive) && isViewWindowActive())
updateCurrentModifierState();
- if ((m_potentiallyChangedActivityStateFlags & ActivityState::IsVisible) && isViewVisible())
- viewIsBecomingVisible();
+ if ((m_potentiallyChangedActivityStateFlags & ActivityState::IsVisible)) {
+ if (isViewVisible())
+ viewIsBecomingVisible();
+ else
+ m_process->pageIsBecomingInvisible(m_webPageID);
+ }
bool isNowInWindow = (changed & ActivityState::IsInWindow) && isInWindow();
// We always want to wait for the Web process to reply if we've been in-window before and are coming back in-window.
Modified: trunk/Source/WebKit/UIProcess/WebProcessProxy.cpp (271168 => 271169)
--- trunk/Source/WebKit/UIProcess/WebProcessProxy.cpp 2021-01-05 20:13:38 UTC (rev 271168)
+++ trunk/Source/WebKit/UIProcess/WebProcessProxy.cpp 2021-01-05 20:24:51 UTC (rev 271169)
@@ -1788,6 +1788,12 @@
speechRecognitionServer->mute();
}
+void WebProcessProxy::pageIsBecomingInvisible(WebCore::PageIdentifier identifier)
+{
+ if (auto server = m_speechRecognitionServerMap.get(identifier))
+ server->abortForPageIsBecomingInvisible();
+}
+
#if PLATFORM(WATCHOS)
void WebProcessProxy::startBackgroundActivityForFullscreenInput()
Modified: trunk/Source/WebKit/UIProcess/WebProcessProxy.h (271168 => 271169)
--- trunk/Source/WebKit/UIProcess/WebProcessProxy.h 2021-01-05 20:13:38 UTC (rev 271168)
+++ trunk/Source/WebKit/UIProcess/WebProcessProxy.h 2021-01-05 20:24:51 UTC (rev 271169)
@@ -405,6 +405,7 @@
SpeechRecognitionRemoteRealtimeMediaSourceManager& ensureSpeechRecognitionRemoteRealtimeMediaSourceManager();
#endif
void pageMutedStateChanged(WebCore::PageIdentifier, WebCore::MediaProducer::MutedStateFlags);
+ void pageIsBecomingInvisible(WebCore::PageIdentifier);
protected:
WebProcessProxy(WebProcessPool&, WebsiteDataStore*, IsPrewarmed);
Modified: trunk/Tools/ChangeLog (271168 => 271169)
--- trunk/Tools/ChangeLog 2021-01-05 20:13:38 UTC (rev 271168)
+++ trunk/Tools/ChangeLog 2021-01-05 20:24:51 UTC (rev 271169)
@@ -1,3 +1,14 @@
+2021-01-05 Sihui Liu <[email protected]>
+
+ Stop speech recognition if page becomes invisible
+ https://bugs.webkit.org/show_bug.cgi?id=220073
+ <rdar://problem/72710704>
+
+ Reviewed by Youenn Fablet.
+
+ * TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm:
+ (TestWebKitAPI::TEST):
+
2021-01-05 Alexey Proskuryakov <[email protected]>
Revert part of https://trac.webkit.org/r271158
Modified: trunk/Tools/TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm (271168 => 271169)
--- trunk/Tools/TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm 2021-01-05 20:13:38 UTC (rev 271168)
+++ trunk/Tools/TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm 2021-01-05 20:24:51 UTC (rev 271169)
@@ -167,4 +167,46 @@
EXPECT_WK_STREQ(@"Recorder Mute", [lastScriptMessage body]);
}
+// FIXME: enable this test on iOS when https://webkit.org/b/175204 is fixed.
+#if PLATFORM(MAC)
+
+TEST(WebKit2, SpeechRecognitionPageBecomesInvisible)
+{
+ auto configuration = adoptNS([[WKWebViewConfiguration alloc] init]);
+ auto handler = adoptNS([[SpeechRecognitionMessageHandler alloc] init]);
+ [[configuration userContentController] addScriptMessageHandler:handler.get() name:@"testHandler"];
+ auto preferences = [configuration preferences];
+ preferences._mockCaptureDevicesEnabled = YES;
+ preferences._speechRecognitionEnabled = YES;
+ auto webView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration.get()]);
+ auto delegate = adoptNS([[SpeechRecognitionPermissionUIDelegate alloc] init]);
+ [webView setUIDelegate:delegate.get()];
+
+ // Page is visible.
+ shouldGrantPermissionRequest = true;
+ receivedScriptMessage = false;
+ [webView synchronouslyLoadTestPageNamed:@"speechrecognition-basic"];
+ [webView stringByEvaluatingJavaScript:@"start()"];
+ TestWebKitAPI::Util::run(&receivedScriptMessage);
+ EXPECT_WK_STREQ(@"Start", [lastScriptMessage body]);
+
+ // Hide page.
+ receivedScriptMessage = false;
+#if PLATFORM(MAC)
+ [webView.get().window setIsVisible:NO];
+#else
+ webView.get().window.hidden = YES;
+#endif
+ TestWebKitAPI::Util::run(&receivedScriptMessage);
+ EXPECT_WK_STREQ(@"Error: aborted - Page is no longer visible", [lastScriptMessage body]);
+
+ // Page is invisible.
+ receivedScriptMessage = false;
+ [webView evaluateJavaScript:@"start()" completionHandler:nil];
+ TestWebKitAPI::Util::run(&receivedScriptMessage);
+ EXPECT_WK_STREQ(@"Error: not-allowed - Permission check failed", [lastScriptMessage body]);
+}
+
+#endif
+
} // namespace TestWebKitAPI