Modified: trunk/Source/_javascript_Core/heap/Heap.cpp (284667 => 284668)
--- trunk/Source/_javascript_Core/heap/Heap.cpp 2021-10-22 05:05:42 UTC (rev 284667)
+++ trunk/Source/_javascript_Core/heap/Heap.cpp 2021-10-22 07:19:52 UTC (rev 284668)
@@ -291,8 +291,7 @@
, m_handleSet(vm)
, m_codeBlocks(makeUnique<CodeBlockSet>())
, m_jitStubRoutines(makeUnique<JITStubRoutineSet>())
- , m_vm(vm)
- // We seed with 10ms so that GCActivityCallback::didAllocate doesn't continuously
+ // We seed with 10ms so that GCActivityCallback::didAllocate doesn't continuously
// schedule the timer if we've never done a collection.
, m_fullActivityCallback(GCActivityCallback::tryCreateFullTimer(this))
, m_edenActivityCallback(GCActivityCallback::tryCreateEdenTimer(this))
@@ -395,7 +394,7 @@
m_isShuttingDown = true;
- RELEASE_ASSERT(!m_vm.entryScope);
+ RELEASE_ASSERT(!vm().entryScope);
RELEASE_ASSERT(m_mutatorState == MutatorState::Running);
if (m_collectContinuouslyThread) {
@@ -483,13 +482,13 @@
// and use a temp Vector for the actual releasing.
if (!m_delayedReleaseRecursionCount++) {
while (!m_delayedReleaseObjects.isEmpty()) {
- ASSERT(m_vm.currentThreadIsHoldingAPILock());
+ ASSERT(vm().currentThreadIsHoldingAPILock());
auto objectsToRelease = WTFMove(m_delayedReleaseObjects);
{
// We need to drop locks before calling out to arbitrary code.
- JSLock::DropAllLocks dropAllLocks(m_vm);
+ JSLock::DropAllLocks dropAllLocks(vm());
#if USE(FOUNDATION)
void* context = objc_autoreleasePoolPush();
@@ -556,7 +555,7 @@
void Heap::protect(JSValue k)
{
ASSERT(k);
- ASSERT(m_vm.currentThreadIsHoldingAPILock());
+ ASSERT(vm().currentThreadIsHoldingAPILock());
if (!k.isCell())
return;
@@ -567,7 +566,7 @@
bool Heap::unprotect(JSValue k)
{
ASSERT(k);
- ASSERT(m_vm.currentThreadIsHoldingAPILock());
+ ASSERT(vm().currentThreadIsHoldingAPILock());
if (!k.isCell())
return false;
@@ -594,32 +593,33 @@
void Heap::finalizeUnconditionalFinalizers()
{
- vm().builtinExecutables()->finalizeUnconditionally();
- finalizeMarkedUnconditionalFinalizers<FunctionExecutable>(vm().functionExecutableSpace.space);
- finalizeMarkedUnconditionalFinalizers<SymbolTable>(vm().symbolTableSpace);
- finalizeMarkedUnconditionalFinalizers<ExecutableToCodeBlockEdge>(vm().executableToCodeBlockEdgesWithFinalizers); // We run this before CodeBlock's unconditional finalizer since CodeBlock looks at the owner executable's installed CodeBlock in its finalizeUnconditionally.
- vm().forEachCodeBlockSpace(
+ VM& vm = this->vm();
+ vm.builtinExecutables()->finalizeUnconditionally();
+ finalizeMarkedUnconditionalFinalizers<FunctionExecutable>(vm.functionExecutableSpace.space);
+ finalizeMarkedUnconditionalFinalizers<SymbolTable>(vm.symbolTableSpace);
+ finalizeMarkedUnconditionalFinalizers<ExecutableToCodeBlockEdge>(vm.executableToCodeBlockEdgesWithFinalizers); // We run this before CodeBlock's unconditional finalizer since CodeBlock looks at the owner executable's installed CodeBlock in its finalizeUnconditionally.
+ vm.forEachCodeBlockSpace(
[&] (auto& space) {
this->finalizeMarkedUnconditionalFinalizers<CodeBlock>(space.set);
});
- finalizeMarkedUnconditionalFinalizers<StructureRareData>(vm().structureRareDataSpace);
- finalizeMarkedUnconditionalFinalizers<UnlinkedFunctionExecutable>(vm().unlinkedFunctionExecutableSpace.set);
- if (vm().m_weakSetSpace)
- finalizeMarkedUnconditionalFinalizers<JSWeakSet>(*vm().m_weakSetSpace);
- if (vm().m_weakMapSpace)
- finalizeMarkedUnconditionalFinalizers<JSWeakMap>(*vm().m_weakMapSpace);
- if (vm().m_weakObjectRefSpace)
- finalizeMarkedUnconditionalFinalizers<JSWeakObjectRef>(*vm().m_weakObjectRefSpace);
- if (vm().m_errorInstanceSpace)
- finalizeMarkedUnconditionalFinalizers<ErrorInstance>(*vm().m_errorInstanceSpace);
+ finalizeMarkedUnconditionalFinalizers<StructureRareData>(vm.structureRareDataSpace);
+ finalizeMarkedUnconditionalFinalizers<UnlinkedFunctionExecutable>(vm.unlinkedFunctionExecutableSpace.set);
+ if (vm.m_weakSetSpace)
+ finalizeMarkedUnconditionalFinalizers<JSWeakSet>(*vm.m_weakSetSpace);
+ if (vm.m_weakMapSpace)
+ finalizeMarkedUnconditionalFinalizers<JSWeakMap>(*vm.m_weakMapSpace);
+ if (vm.m_weakObjectRefSpace)
+ finalizeMarkedUnconditionalFinalizers<JSWeakObjectRef>(*vm.m_weakObjectRefSpace);
+ if (vm.m_errorInstanceSpace)
+ finalizeMarkedUnconditionalFinalizers<ErrorInstance>(*vm.m_errorInstanceSpace);
// FinalizationRegistries currently rely on serial finalization because they can post tasks to the deferredWorkTimer, which normally expects tasks to only be posted by the API lock holder.
- if (vm().m_finalizationRegistrySpace)
- finalizeMarkedUnconditionalFinalizers<JSFinalizationRegistry>(*vm().m_finalizationRegistrySpace);
+ if (vm.m_finalizationRegistrySpace)
+ finalizeMarkedUnconditionalFinalizers<JSFinalizationRegistry>(*vm.m_finalizationRegistrySpace);
#if ENABLE(WEBASSEMBLY)
- if (vm().m_webAssemblyCodeBlockSpace)
- finalizeMarkedUnconditionalFinalizers<JSWebAssemblyCodeBlock>(*vm().m_webAssemblyCodeBlockSpace);
+ if (vm.m_webAssemblyCodeBlockSpace)
+ finalizeMarkedUnconditionalFinalizers<JSWebAssemblyCodeBlock>(*vm.m_webAssemblyCodeBlockSpace);
#endif
}
@@ -638,7 +638,7 @@
if (!Options::useJIT())
return;
#if ENABLE(JIT)
- JITWorklist::ensureGlobalWorklist().completeAllPlansForVM(m_vm);
+ JITWorklist::ensureGlobalWorklist().completeAllPlansForVM(vm());
#endif // ENABLE(JIT)
}
@@ -648,7 +648,7 @@
m_codeBlocks->iterateCurrentlyExecuting(func);
#if ENABLE(JIT)
if (Options::useJIT())
- JITWorklist::ensureGlobalWorklist().iterateCodeBlocksForGC(visitor, m_vm, func);
+ JITWorklist::ensureGlobalWorklist().iterateCodeBlocksForGC(visitor, vm(), func);
#else
UNUSED_PARAM(visitor);
#endif // ENABLE(JIT)
@@ -700,7 +700,7 @@
void Heap::gatherJSStackRoots(ConservativeRoots& roots)
{
#if ENABLE(C_LOOP)
- m_vm.interpreter->cloopStack().gatherConservativeRoots(roots, *m_jitStubRoutines, *m_codeBlocks);
+ vm().interpreter->cloopStack().gatherConservativeRoots(roots, *m_jitStubRoutines, *m_codeBlocks);
#else
UNUSED_PARAM(roots);
#endif
@@ -711,8 +711,9 @@
#if ENABLE(DFG_JIT)
if (!Options::useJIT())
return;
- m_vm.gatherScratchBufferRoots(roots);
- m_vm.scanSideState(roots);
+ VM& vm = this->vm();
+ vm.gatherScratchBufferRoots(roots);
+ vm.scanSideState(roots);
#else
UNUSED_PARAM(roots);
#endif
@@ -731,7 +732,7 @@
if (!Options::useJIT())
return;
#if ENABLE(JIT)
- JITWorklist::ensureGlobalWorklist().removeDeadPlans(m_vm);
+ JITWorklist::ensureGlobalWorklist().removeDeadPlans(vm());
#endif // ENABLE(JIT)
}
@@ -759,7 +760,7 @@
{
if (auto* analyzer = heapProfiler.activeHeapAnalyzer()) {
HeapIterationScope heapIterationScope(*this);
- GatherExtraHeapData functor(m_vm, *analyzer);
+ GatherExtraHeapData functor(vm(), *analyzer);
m_objectSpace.forEachLiveCell(heapIterationScope, functor);
}
}
@@ -910,7 +911,7 @@
if (m_collectionScope && effort == DeleteAllCodeIfNotCollecting)
return;
- VM& vm = m_vm;
+ VM& vm = this->vm();
PreventCollectionScope preventCollectionScope(*this);
// If _javascript_ is running, it's not safe to delete all _javascript_ code, since
@@ -955,7 +956,7 @@
if (m_collectionScope && effort == DeleteAllCodeIfNotCollecting)
return;
- VM& vm = m_vm;
+ VM& vm = this->vm();
PreventCollectionScope preventCollectionScope(*this);
RELEASE_ASSERT(!m_collectionScope);
@@ -1614,7 +1615,7 @@
return false;
}
} else {
- sanitizeStackForVM(m_vm);
+ sanitizeStackForVM(vm());
handleNeedFinalize();
}
stopThePeriphery(conn);
@@ -1829,7 +1830,7 @@
case RunCurrentPhaseResult::Continue:
break;
case RunCurrentPhaseResult::NeedCurrentThreadState:
- sanitizeStackForVM(m_vm);
+ sanitizeStackForVM(vm());
auto lambda = [&] (CurrentThreadState& state) {
for (;;) {
RunCurrentPhaseResult result = runCurrentPhase(GCConductor::Mutator, &state);
@@ -1973,7 +1974,7 @@
if (false)
dataLog("Relinquished the conn.\n");
- sanitizeStackForVM(m_vm);
+ sanitizeStackForVM(vm());
Locker locker { *m_threadLock };
if (!m_requests.isEmpty())
@@ -2204,7 +2205,7 @@
void Heap::deleteSourceProviderCaches()
{
if (m_lastCollectionScope && m_lastCollectionScope.value() == CollectionScope::Full)
- m_vm.clearSourceProviderCaches();
+ vm().clearSourceProviderCaches();
}
void Heap::notifyIncrementalSweeper()
@@ -2323,7 +2324,7 @@
ASSERT(externalMemorySize() <= extraMemorySize());
#endif
- if (HeapProfiler* heapProfiler = m_vm.heapProfiler()) {
+ if (HeapProfiler* heapProfiler = vm().heapProfiler()) {
gatherExtraHeapData(*heapProfiler);
removeDeadHeapSnapshotNodes(*heapProfiler);
}
@@ -2381,7 +2382,7 @@
bool Heap::isValidAllocation(size_t)
{
- if (!isValidThreadState(m_vm))
+ if (!isValidThreadState(vm()))
return false;
if (isCurrentThreadBusy())
@@ -2749,14 +2750,15 @@
m_constraintSet->add(
"Msr", "Misc Small Roots",
MAKE_MARKING_CONSTRAINT_EXECUTOR_PAIR(([this] (auto& visitor) {
+ VM& vm = this->vm();
if constexpr (objcAPIEnabled) {
SetRootMarkReasonScope rootScope(visitor, RootMarkReason::ExternalRememberedSet);
- scanExternalRememberedSet(m_vm, visitor);
+ scanExternalRememberedSet(vm, visitor);
}
- if (m_vm.smallStrings.needsToBeVisited(*m_collectionScope)) {
+ if (vm.smallStrings.needsToBeVisited(*m_collectionScope)) {
SetRootMarkReasonScope rootScope(visitor, RootMarkReason::StrongReferences);
- m_vm.smallStrings.visitStrongReferences(visitor);
+ vm.smallStrings.visitStrongReferences(visitor);
}
{
@@ -2779,14 +2781,14 @@
{
SetRootMarkReasonScope rootScope(visitor, RootMarkReason::VMExceptions);
- visitor.appendUnbarriered(m_vm.exception());
- visitor.appendUnbarriered(m_vm.lastException());
+ visitor.appendUnbarriered(vm.exception());
+ visitor.appendUnbarriered(vm.lastException());
// We're going to m_terminationException directly instead of going through
// the exception() getter because we want to assert in the getter that the
// TerminationException has been reified. Here, we don't care if it is
// reified or not.
- visitor.appendUnbarriered(m_vm.m_terminationException);
+ visitor.appendUnbarriered(vm.m_terminationException);
}
})),
ConstraintVolatility::GreyedByExecution);
@@ -2804,13 +2806,14 @@
MAKE_MARKING_CONSTRAINT_EXECUTOR_PAIR(([this] (auto& visitor) {
SetRootMarkReasonScope rootScope(visitor, RootMarkReason::Debugger);
+ VM& vm = this->vm();
if constexpr (samplingProfilerSupported)
- visitSamplingProfiler(m_vm, visitor);
+ visitSamplingProfiler(vm, visitor);
- if (m_vm.typeProfiler())
- m_vm.typeProfilerLog()->visit(visitor);
+ if (vm.typeProfiler())
+ vm.typeProfilerLog()->visit(visitor);
- if (auto* shadowChicken = m_vm.shadowChicken())
+ if (auto* shadowChicken = vm.shadowChicken())
shadowChicken->visitChildren(visitor);
})),
ConstraintVolatility::GreyedByExecution);
@@ -2868,7 +2871,7 @@
// FIXME: This is almost certainly unnecessary.
// https://bugs.webkit.org/show_bug.cgi?id=166829
JITWorklist::ensureGlobalWorklist().iterateCodeBlocksForGC(visitor,
- m_vm,
+ vm(),
[&] (CodeBlock* codeBlock) {
visitor.appendUnbarriered(codeBlock);
});