Modified: trunk/Source/WebKit/WebProcess/GPU/media/RemoteAudioDestinationProxy.cpp (276188 => 276189)
--- trunk/Source/WebKit/WebProcess/GPU/media/RemoteAudioDestinationProxy.cpp 2021-04-17 01:24:04 UTC (rev 276188)
+++ trunk/Source/WebKit/WebProcess/GPU/media/RemoteAudioDestinationProxy.cpp 2021-04-17 01:30:19 UTC (rev 276189)
@@ -69,7 +69,6 @@
, m_inputDeviceId(inputDeviceId)
, m_numberOfInputChannels(numberOfInputChannels)
{
- connectToGPUProcess();
}
void RemoteAudioDestinationProxy::startRenderingThread()
@@ -99,47 +98,48 @@
m_renderThread = nullptr;
}
-void RemoteAudioDestinationProxy::connectToGPUProcess()
+GPUProcessConnection& RemoteAudioDestinationProxy::ensureGPUProcessConnection()
{
- auto& connection = WebProcess::singleton().ensureGPUProcessConnection();
- connection.addClient(*this);
- auto didSucceed = connection.connection().sendSync(
- Messages::RemoteAudioDestinationManager::CreateAudioDestination(m_inputDeviceId, m_numberOfInputChannels, numberOfOutputChannels(), sampleRate(), hardwareSampleRate(), m_renderSemaphore), Messages::RemoteAudioDestinationManager::CreateAudioDestination::Reply(m_destinationID), 0);
+ if (!m_gpuProcessConnection) {
+ m_gpuProcessConnection = makeWeakPtr(WebProcess::singleton().ensureGPUProcessConnection());
+ m_gpuProcessConnection->addClient(*this);
- if (!didSucceed) {
- // The GPUProcess likely crashed during this synchronous IPC. gpuProcessConnectionDidClose() will get called to reconnect to the GPUProcess.
- RELEASE_LOG_ERROR(Media, "RemoteAudioDestinationProxy::connectToGPUProcess: Failed to send RemoteAudioDestinationManager::CreateAudioDestination() IPC (GPU process likely crashed)");
- return;
- }
-
-
#if PLATFORM(COCOA)
- m_currentFrame = 0;
- AudioStreamBasicDescription streamFormat;
- getAudioStreamBasicDescription(streamFormat);
- m_ringBuffer->allocate(streamFormat, m_numberOfFrames);
- m_audioBufferList = makeUnique<WebCore::WebAudioBufferList>(streamFormat);
- m_audioBufferList->setSampleCount(WebCore::AudioUtilities::renderQuantumSize);
+ m_currentFrame = 0;
+ AudioStreamBasicDescription streamFormat;
+ getAudioStreamBasicDescription(streamFormat);
+ m_ringBuffer->allocate(streamFormat, m_numberOfFrames);
+ m_audioBufferList = makeUnique<WebCore::WebAudioBufferList>(streamFormat);
+ m_audioBufferList->setSampleCount(WebCore::AudioUtilities::renderQuantumSize);
#endif
- startRenderingThread();
+ startRenderingThread();
+ }
+ return *m_gpuProcessConnection;
}
+RemoteAudioDestinationIdentifier RemoteAudioDestinationProxy::destinationID()
+{
+ if (!m_destinationID)
+ ensureGPUProcessConnection().connection().sendSync(Messages::RemoteAudioDestinationManager::CreateAudioDestination(m_inputDeviceId, m_numberOfInputChannels, numberOfOutputChannels(), sampleRate(), hardwareSampleRate(), m_renderSemaphore), Messages::RemoteAudioDestinationManager::CreateAudioDestination::Reply(m_destinationID), 0);
+ return m_destinationID;
+}
+
RemoteAudioDestinationProxy::~RemoteAudioDestinationProxy()
{
- auto& connection = WebProcess::singleton().ensureGPUProcessConnection();
+ if (m_gpuProcessConnection && m_destinationID) {
+ m_gpuProcessConnection->connection().sendWithAsyncReply(
+ Messages::RemoteAudioDestinationManager::DeleteAudioDestination(m_destinationID), [] {
+ // Can't remove this from proxyMap() here because the object would have been already deleted.
+ });
+ }
- connection.connection().sendWithAsyncReply(
- Messages::RemoteAudioDestinationManager::DeleteAudioDestination(m_destinationID), [] {
- // Can't remove this from proxyMap() here because the object would have been already deleted.
- });
-
stopRenderingThread();
}
void RemoteAudioDestinationProxy::startRendering(CompletionHandler<void(bool)>&& completionHandler)
{
- WebProcess::singleton().ensureGPUProcessConnection().connection().sendWithAsyncReply(Messages::RemoteAudioDestinationManager::StartAudioDestination(m_destinationID), [this, protectedThis = makeRef(*this), completionHandler = WTFMove(completionHandler)](bool isPlaying) mutable {
+ ensureGPUProcessConnection().connection().sendWithAsyncReply(Messages::RemoteAudioDestinationManager::StartAudioDestination(destinationID()), [this, protectedThis = makeRef(*this), completionHandler = WTFMove(completionHandler)](bool isPlaying) mutable {
setIsPlaying(isPlaying);
completionHandler(isPlaying);
});
@@ -147,7 +147,7 @@
void RemoteAudioDestinationProxy::stopRendering(CompletionHandler<void(bool)>&& completionHandler)
{
- WebProcess::singleton().ensureGPUProcessConnection().connection().sendWithAsyncReply(Messages::RemoteAudioDestinationManager::StopAudioDestination(m_destinationID), [this, protectedThis = makeRef(*this), completionHandler = WTFMove(completionHandler)](bool isPlaying) mutable {
+ ensureGPUProcessConnection().connection().sendWithAsyncReply(Messages::RemoteAudioDestinationManager::StopAudioDestination(destinationID()), [this, protectedThis = makeRef(*this), completionHandler = WTFMove(completionHandler)](bool isPlaying) mutable {
setIsPlaying(isPlaying);
completionHandler(!isPlaying);
});
@@ -167,6 +167,9 @@
#if PLATFORM(COCOA)
void RemoteAudioDestinationProxy::storageChanged(SharedMemory* storage, const WebCore::CAAudioStreamDescription& format, size_t frameCount)
{
+ if (!m_gpuProcessConnection)
+ return;
+
SharedMemory::Handle handle;
if (storage)
storage->createHandle(handle, SharedMemory::Protection::ReadOnly);
@@ -178,7 +181,7 @@
uint64_t dataSize = 0;
#endif
- WebProcess::singleton().ensureGPUProcessConnection().connection().send(Messages::RemoteAudioDestinationManager::AudioSamplesStorageChanged { m_destinationID, SharedMemory::IPCHandle { WTFMove(handle), dataSize }, format, frameCount }, 0);
+ m_gpuProcessConnection->connection().send(Messages::RemoteAudioDestinationManager::AudioSamplesStorageChanged { destinationID(), SharedMemory::IPCHandle { WTFMove(handle), dataSize }, format, frameCount }, 0);
}
#endif
@@ -187,9 +190,9 @@
oldConnection.removeClient(*this);
stopRenderingThread();
+ m_gpuProcessConnection = nullptr;
+ m_destinationID = { };
- connectToGPUProcess();
-
if (isPlaying())
startRendering([](bool) { });
}
Modified: trunk/Source/WebKit/WebProcess/GPU/media/RemoteAudioDestinationProxy.h (276188 => 276189)
--- trunk/Source/WebKit/WebProcess/GPU/media/RemoteAudioDestinationProxy.h 2021-04-17 01:24:04 UTC (rev 276188)
+++ trunk/Source/WebKit/WebProcess/GPU/media/RemoteAudioDestinationProxy.h 2021-04-17 01:30:19 UTC (rev 276189)
@@ -79,7 +79,8 @@
void stopRenderingThread();
void renderQuantum();
- void connectToGPUProcess();
+ RemoteAudioDestinationIdentifier destinationID();
+ GPUProcessConnection& ensureGPUProcessConnection();
// GPUProcessConnection::Client.
void gpuProcessConnectionDidClose(GPUProcessConnection&) final;
@@ -95,8 +96,9 @@
void storageChanged(SharedMemory*, const WebCore::CAAudioStreamDescription& format, size_t frameCount);
#endif
- RemoteAudioDestinationIdentifier m_destinationID;
+ RemoteAudioDestinationIdentifier m_destinationID; // Call destinationID() getter to make sure the destinationID is valid.
+ WeakPtr<GPUProcessConnection> m_gpuProcessConnection;
#if PLATFORM(COCOA)
uint64_t m_numberOfFrames { 0 };
std::unique_ptr<WebCore::CARingBuffer> m_ringBuffer;
Modified: trunk/Tools/TestWebKitAPI/Tests/WebKitCocoa/GPUProcess.mm (276188 => 276189)
--- trunk/Tools/TestWebKitAPI/Tests/WebKitCocoa/GPUProcess.mm 2021-04-17 01:24:04 UTC (rev 276188)
+++ trunk/Tools/TestWebKitAPI/Tests/WebKitCocoa/GPUProcess.mm 2021-04-17 01:30:19 UTC (rev 276189)
@@ -579,3 +579,52 @@
TestWebKitAPI::Util::run(&done);
});
}
+
+TEST(GPUProcess, ExitsUnderMemoryPressureWebAudioNonRenderingAudioContext)
+{
+ auto configuration = adoptNS([[WKWebViewConfiguration alloc] init]);
+ WKPreferencesSetBoolValueForKeyForTesting((__bridge WKPreferencesRef)[configuration preferences], true, WKStringCreateWithUTF8CString("UseGPUProcessForMediaEnabled"));
+ WKPreferencesSetBoolValueForKeyForTesting((__bridge WKPreferencesRef)[configuration preferences], true, WKStringCreateWithUTF8CString("CaptureVideoInGPUProcessEnabled"));
+ WKPreferencesSetBoolValueForKeyForTesting((__bridge WKPreferencesRef)[configuration preferences], true, WKStringCreateWithUTF8CString("UseGPUProcessForCanvasRenderingEnabled"));
+ WKPreferencesSetBoolValueForKeyForTesting((__bridge WKPreferencesRef)[configuration preferences], false, WKStringCreateWithUTF8CString("UseGPUProcessForDOMRenderingEnabled"));
+
+ auto webView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 400, 400) configuration:configuration.get()]);
+ [webView synchronouslyLoadTestPageNamed:@"audio-context-playing"];
+
+ // evaluateJavaScript gives us the user gesture we need to reliably start audio playback on all platforms.
+ __block bool done = false;
+ [webView evaluateJavaScript:@"startPlaying()" completionHandler:^(id result, NSError *error) {
+ EXPECT_TRUE(!error);
+ done = true;
+ }];
+ TestWebKitAPI::Util::run(&done);
+
+ // A GPUProcess should get launched.
+ while (![configuration.get().processPool _gpuProcessIdentifier])
+ TestWebKitAPI::Util::sleep(0.1);
+ auto gpuProcessPID = [configuration.get().processPool _gpuProcessIdentifier];
+
+ // Simulate memory pressure (notifyutil -p org.WebKit.lowMemory).
+ notify_post("org.WebKit.lowMemory");
+
+ // Make sure the GPUProcess does not exit since it is still needed.
+ TestWebKitAPI::Util::sleep(0.5);
+ EXPECT_EQ(gpuProcessPID, [configuration.get().processPool _gpuProcessIdentifier]);
+
+ // Suspend audio rendering.
+ [webView evaluateJavaScript:@"context.suspend() && true" completionHandler:^(id result, NSError *error) {
+ EXPECT_TRUE(!error);
+ done = true;
+ }];
+
+ // The GPUProcess should exit on memory pressure.
+ do {
+ // Simulate memory pressure (notifyutil -p org.WebKit.lowMemory).
+ notify_post("org.WebKit.lowMemory");
+ TestWebKitAPI::Util::sleep(0.1);
+ } while ([configuration.get().processPool _gpuProcessIdentifier]);
+
+ // The GPUProcess should not relaunch.
+ TestWebKitAPI::Util::sleep(0.5);
+ EXPECT_EQ(0, [configuration.get().processPool _gpuProcessIdentifier]);
+}