Diff
Modified: trunk/Source/_javascript_Core/ChangeLog (205849 => 205850)
--- trunk/Source/_javascript_Core/ChangeLog 2016-09-13 04:32:06 UTC (rev 205849)
+++ trunk/Source/_javascript_Core/ChangeLog 2016-09-13 04:33:19 UTC (rev 205850)
@@ -1,3 +1,57 @@
+2016-09-12 Filip Pizlo <[email protected]>
+
+ MarkedBlock should be able to use flipIfNecessary() as the "I'm not empty" trigger
+ https://bugs.webkit.org/show_bug.cgi?id=161869
+
+ Reviewed by Saam Barati.
+
+ In bug 161581, I'm going to use flipIfNecessary() during marking to trigger the "I'm not
+ empty" hook, which will set a bit in the markingNotEmpty bitvector.
+
+ For this to work, we need to ensure that nobody else uses flipIfNecessary() during marking.
+ If anyone else does it but they aren't marking new objects, then this prevents
+ flipIfNecessary() from triggering when the first object is marked, which means we won't
+ always detect when a block became non-empty.
+
+ I addressed this by adding a isMarking flag, and asserting in flipIfNecessary() that the flag
+ isn't set. flipIfNecessaryDuringMarking() is used only on the marking path, so that code
+ knows that it can trigger something like noteMarked(). The only places that were using
+ flipIfNecessary() should have been using needsFlip() anyway.
+
+ * heap/CellContainer.h:
+ * heap/CellContainerInlines.h:
+ (JSC::CellContainer::needsFlip):
+ * heap/Heap.cpp:
+ (JSC::Heap::markRoots):
+ (JSC::Heap::beginMarking):
+ (JSC::Heap::endMarking):
+ (JSC::Heap::clearLivenessData): Deleted.
+ (JSC::Heap::converge): Deleted.
+ (JSC::Heap::resetVisitors): Deleted.
+ * heap/Heap.h:
+ * heap/HeapInlines.h:
+ (JSC::Heap::testAndSetMarked):
+ * heap/LargeAllocation.h:
+ (JSC::LargeAllocation::flipIfNecessaryDuringMarking):
+ (JSC::LargeAllocation::flipIfNecessaryConcurrently): Deleted.
+ * heap/MarkedBlock.cpp:
+ (JSC::MarkedBlock::flipIfNecessarySlow):
+ (JSC::MarkedBlock::flipIfNecessaryDuringMarkingSlow):
+ (JSC::MarkedBlock::flipIfNecessaryConcurrentlySlow): Deleted.
+ * heap/MarkedBlock.h:
+ (JSC::MarkedBlock::flipIfNecessaryDuringMarking):
+ (JSC::MarkedBlock::Handle::flipIfNecessaryDuringMarking):
+ (JSC::MarkedBlock::flipIfNecessaryConcurrently): Deleted.
+ (JSC::MarkedBlock::Handle::flipIfNecessaryConcurrently): Deleted.
+ * heap/MarkedSpace.h:
+ (JSC::MarkedSpace::isMarking):
+ (JSC::MarkedSpace::setIsMarking):
+ (JSC::MarkedSpace::largeAllocationsForThisCollectionSize): Deleted.
+ * heap/SlotVisitor.cpp:
+ (JSC::SlotVisitor::setMarkedAndAppendToMarkStack):
+ * heap/WeakBlock.cpp:
+ (JSC::WeakBlock::visit):
+
2016-09-12 Saam Barati <[email protected]>
HashMapImpl should take into account m_deleteCount in its load factor and it should be able to rehash the table to be smaller
Modified: trunk/Source/_javascript_Core/heap/CellContainer.h (205849 => 205850)
--- trunk/Source/_javascript_Core/heap/CellContainer.h 2016-09-13 04:32:06 UTC (rev 205849)
+++ trunk/Source/_javascript_Core/heap/CellContainer.h 2016-09-13 04:33:19 UTC (rev 205850)
@@ -75,6 +75,7 @@
void flipIfNecessary(HeapVersion);
void flipIfNecessary();
+ bool needsFlip() const;
bool isMarked() const;
bool isMarked(HeapCell*) const;
Modified: trunk/Source/_javascript_Core/heap/CellContainerInlines.h (205849 => 205850)
--- trunk/Source/_javascript_Core/heap/CellContainerInlines.h 2016-09-13 04:32:06 UTC (rev 205849)
+++ trunk/Source/_javascript_Core/heap/CellContainerInlines.h 2016-09-13 04:33:19 UTC (rev 205850)
@@ -85,5 +85,12 @@
markedBlock().flipIfNecessary();
}
+inline bool CellContainer::needsFlip() const
+{
+ if (isLargeAllocation())
+ return false;
+ return markedBlock().needsFlip();
+}
+
} // namespace JSC
Modified: trunk/Source/_javascript_Core/heap/Heap.cpp (205849 => 205850)
--- trunk/Source/_javascript_Core/heap/Heap.cpp 2016-09-13 04:32:06 UTC (rev 205849)
+++ trunk/Source/_javascript_Core/heap/Heap.cpp 2016-09-13 04:33:19 UTC (rev 205850)
@@ -419,7 +419,7 @@
m_slotVisitor.clearMarkStack();
}
- clearLivenessData();
+ beginMarking();
m_parallelMarkersShouldExit = false;
@@ -471,7 +471,7 @@
visitSamplingProfiler();
visitShadowChicken();
traceCodeBlocksAndJITStubRoutines();
- converge();
+ m_slotVisitor.drainFromShared(SlotVisitor::MasterDrain);
}
TimingScope postConvergenceTimingScope(*this, "Heap::markRoots after convergence");
@@ -487,7 +487,7 @@
}
m_helperClient.finish();
updateObjectCounts(gcStartTime);
- resetVisitors();
+ endMarking();
}
void Heap::gatherStackRoots(ConservativeRoots& roots, void* stackOrigin, void* stackTop, MachineThreads::RegisterState& calleeSavedRegisters)
@@ -514,9 +514,9 @@
#endif
}
-void Heap::clearLivenessData()
+void Heap::beginMarking()
{
- TimingScope timingScope(*this, "Heap::clearLivenessData");
+ TimingScope timingScope(*this, "Heap::beginMarking");
if (m_operationInProgress == FullCollection)
m_codeBlocks->clearMarksForFullCollection();
@@ -529,6 +529,8 @@
TimingScope clearMarksTimingScope(*this, "m_objectSpace.clearMarks");
m_objectSpace.flip();
}
+
+ m_objectSpace.setIsMarking(true);
}
void Heap::visitExternalRememberedSet()
@@ -727,11 +729,6 @@
m_slotVisitor.donateAndDrain();
}
-void Heap::converge()
-{
- m_slotVisitor.drainFromShared(SlotVisitor::MasterDrain);
-}
-
void Heap::visitWeakHandles(HeapRootVisitor& visitor)
{
TimingScope timingScope(*this, "Heap::visitWeakHandles");
@@ -772,7 +769,7 @@
m_totalBytesVisited += m_totalBytesVisitedThisCycle;
}
-void Heap::resetVisitors()
+void Heap::endMarking()
{
m_slotVisitor.reset();
@@ -781,6 +778,8 @@
ASSERT(m_sharedMarkStack.isEmpty());
m_weakReferenceHarvesters.removeAll();
+
+ m_objectSpace.setIsMarking(false);
}
size_t Heap::objectCount()
Modified: trunk/Source/_javascript_Core/heap/Heap.h (205849 => 205850)
--- trunk/Source/_javascript_Core/heap/Heap.h 2016-09-13 04:32:06 UTC (rev 205849)
+++ trunk/Source/_javascript_Core/heap/Heap.h 2016-09-13 04:33:19 UTC (rev 205850)
@@ -308,7 +308,7 @@
void gatherStackRoots(ConservativeRoots&, void* stackOrigin, void* stackTop, MachineThreads::RegisterState&);
void gatherJSStackRoots(ConservativeRoots&);
void gatherScratchBufferRoots(ConservativeRoots&);
- void clearLivenessData();
+ void beginMarking();
void visitExternalRememberedSet();
void visitSmallStrings();
void visitConservativeRoots(ConservativeRoots&);
@@ -322,10 +322,9 @@
void visitSamplingProfiler();
void visitShadowChicken();
void traceCodeBlocksAndJITStubRoutines();
- void converge();
void visitWeakHandles(HeapRootVisitor&);
void updateObjectCounts(double gcStartTime);
- void resetVisitors();
+ void endMarking();
void reapWeakHandles();
void pruneStaleEntriesFromWeakGCMaps();
Modified: trunk/Source/_javascript_Core/heap/HeapInlines.h (205849 => 205850)
--- trunk/Source/_javascript_Core/heap/HeapInlines.h 2016-09-13 04:32:06 UTC (rev 205849)
+++ trunk/Source/_javascript_Core/heap/HeapInlines.h 2016-09-13 04:33:19 UTC (rev 205850)
@@ -115,7 +115,7 @@
if (cell->isLargeAllocation())
return cell->largeAllocation().testAndSetMarked();
MarkedBlock& block = cell->markedBlock();
- block.flipIfNecessaryConcurrently(version);
+ block.flipIfNecessaryDuringMarking(version);
return block.testAndSetMarked(cell);
}
Modified: trunk/Source/_javascript_Core/heap/LargeAllocation.h (205849 => 205850)
--- trunk/Source/_javascript_Core/heap/LargeAllocation.h 2016-09-13 04:32:06 UTC (rev 205849)
+++ trunk/Source/_javascript_Core/heap/LargeAllocation.h 2016-09-13 04:33:19 UTC (rev 205850)
@@ -71,6 +71,8 @@
bool isNewlyAllocated() const { return m_isNewlyAllocated; }
ALWAYS_INLINE bool isMarked() { return m_isMarked.load(std::memory_order_relaxed); }
bool isMarkedOrNewlyAllocated() { return isMarked() || isNewlyAllocated(); }
+ bool isMarkedOrNewlyAllocated(HeapCell*) { return isMarkedOrNewlyAllocated(); }
+ bool isMarkedOrNewlyAllocatedDuringWeakVisiting(HeapVersion, HeapCell*) { return isMarkedOrNewlyAllocated(); }
bool isLive() { return isMarkedOrNewlyAllocated(); }
bool hasValidCell() const { return m_hasValidCell; }
@@ -105,7 +107,7 @@
const AllocatorAttributes& attributes() const { return m_attributes; }
void flipIfNecessary(uint64_t) { }
- void flipIfNecessaryConcurrently(uint64_t) { }
+ void flipIfNecessaryDuringMarking(uint64_t) { }
ALWAYS_INLINE bool testAndSetMarked()
{
Modified: trunk/Source/_javascript_Core/heap/MarkedBlock.cpp (205849 => 205850)
--- trunk/Source/_javascript_Core/heap/MarkedBlock.cpp 2016-09-13 04:32:06 UTC (rev 205849)
+++ trunk/Source/_javascript_Core/heap/MarkedBlock.cpp 2016-09-13 04:33:19 UTC (rev 205850)
@@ -358,11 +358,13 @@
void MarkedBlock::flipIfNecessarySlow()
{
ASSERT(needsFlip());
+ ASSERT(!vm()->heap.objectSpace().isMarking());
clearMarks();
}
-void MarkedBlock::flipIfNecessaryConcurrentlySlow()
+void MarkedBlock::flipIfNecessaryDuringMarkingSlow()
{
+ ASSERT(vm()->heap.objectSpace().isMarking());
LockHolder locker(m_lock);
if (needsFlip())
clearMarks();
Modified: trunk/Source/_javascript_Core/heap/MarkedBlock.h (205849 => 205850)
--- trunk/Source/_javascript_Core/heap/MarkedBlock.h 2016-09-13 04:32:06 UTC (rev 205849)
+++ trunk/Source/_javascript_Core/heap/MarkedBlock.h 2016-09-13 04:33:19 UTC (rev 205850)
@@ -186,7 +186,7 @@
bool needsFlip();
- void flipIfNecessaryConcurrently(HeapVersion);
+ void flipIfNecessaryDuringMarking(HeapVersion);
void flipIfNecessary(HeapVersion);
void flipIfNecessary();
@@ -251,6 +251,7 @@
bool testAndSetMarked(const void*);
bool isMarkedOrNewlyAllocated(const HeapCell*);
+ bool isMarkedOrNewlyAllocatedDuringWeakVisiting(HeapVersion, const HeapCell*);
bool isAtom(const void*);
void setMarked(const void*);
@@ -267,7 +268,7 @@
bool needsFlip(HeapVersion);
bool needsFlip();
- void flipIfNecessaryConcurrently(HeapVersion);
+ void flipIfNecessaryDuringMarking(HeapVersion);
void flipIfNecessary(HeapVersion);
void flipIfNecessary();
@@ -283,7 +284,7 @@
MarkedBlock(VM&, Handle&);
Atom* atoms();
- void flipIfNecessaryConcurrentlySlow();
+ void flipIfNecessaryDuringMarkingSlow();
void flipIfNecessarySlow();
void clearMarks();
void clearHasAnyMarked();
@@ -474,10 +475,10 @@
flipIfNecessarySlow();
}
-inline void MarkedBlock::flipIfNecessaryConcurrently(HeapVersion heapVersion)
+inline void MarkedBlock::flipIfNecessaryDuringMarking(HeapVersion heapVersion)
{
if (UNLIKELY(needsFlip(heapVersion)))
- flipIfNecessaryConcurrentlySlow();
+ flipIfNecessaryDuringMarkingSlow();
WTF::loadLoadFence();
}
@@ -486,9 +487,9 @@
block().flipIfNecessary(heapVersion);
}
-inline void MarkedBlock::Handle::flipIfNecessaryConcurrently(HeapVersion heapVersion)
+inline void MarkedBlock::Handle::flipIfNecessaryDuringMarking(HeapVersion heapVersion)
{
- block().flipIfNecessaryConcurrently(heapVersion);
+ block().flipIfNecessaryDuringMarking(heapVersion);
}
inline void MarkedBlock::Handle::flipForEdenCollection()
@@ -561,6 +562,13 @@
return isMarked(cell) || (m_handle.m_newlyAllocated && m_handle.isNewlyAllocated(cell));
}
+inline bool MarkedBlock::isMarkedOrNewlyAllocatedDuringWeakVisiting(HeapVersion heapVersion, const HeapCell* cell)
+{
+ if (needsFlip(heapVersion))
+ return false;
+ return isMarkedOrNewlyAllocated(cell);
+}
+
inline bool MarkedBlock::Handle::isLive(const HeapCell* cell)
{
assertFlipped();
Modified: trunk/Source/_javascript_Core/heap/MarkedSpace.h (205849 => 205850)
--- trunk/Source/_javascript_Core/heap/MarkedSpace.h 2016-09-13 04:32:06 UTC (rev 205849)
+++ trunk/Source/_javascript_Core/heap/MarkedSpace.h 2016-09-13 04:33:19 UTC (rev 205850)
@@ -168,6 +168,13 @@
LargeAllocation** largeAllocationsForThisCollectionBegin() const { return m_largeAllocationsForThisCollectionBegin; }
LargeAllocation** largeAllocationsForThisCollectionEnd() const { return m_largeAllocationsForThisCollectionEnd; }
unsigned largeAllocationsForThisCollectionSize() const { return m_largeAllocationsForThisCollectionSize; }
+
+ // When this is true it means that we have flipped but the mark bits haven't converged yet.
+ bool isMarking() const { return m_isMarking; }
+
+ // FIXME: After https://bugs.webkit.org/show_bug.cgi?id=161581, MarkedSpace will control this
+ // flag directly.
+ void setIsMarking(bool value) { m_isMarking = value; }
private:
friend class LLIntOffsetsExtractor;
@@ -196,6 +203,7 @@
HeapVersion m_version { initialVersion };
size_t m_capacity;
bool m_isIterating;
+ bool m_isMarking { false };
MarkedBlockSet m_blocks;
Vector<MarkedBlock::Handle*> m_blocksWithNewObjects;
Vector<LargeAllocation*> m_largeAllocations;
Modified: trunk/Source/_javascript_Core/heap/SlotVisitor.cpp (205849 => 205850)
--- trunk/Source/_javascript_Core/heap/SlotVisitor.cpp 2016-09-13 04:32:06 UTC (rev 205849)
+++ trunk/Source/_javascript_Core/heap/SlotVisitor.cpp 2016-09-13 04:33:19 UTC (rev 205850)
@@ -198,7 +198,7 @@
template<typename ContainerType>
ALWAYS_INLINE void SlotVisitor::setMarkedAndAppendToMarkStack(ContainerType& container, JSCell* cell)
{
- container.flipIfNecessaryConcurrently(m_version);
+ container.flipIfNecessaryDuringMarking(m_version);
if (container.testAndSetMarked(cell))
return;
Modified: trunk/Source/_javascript_Core/heap/SlotVisitor.h (205849 => 205850)
--- trunk/Source/_javascript_Core/heap/SlotVisitor.h 2016-09-13 04:32:06 UTC (rev 205849)
+++ trunk/Source/_javascript_Core/heap/SlotVisitor.h 2016-09-13 04:33:19 UTC (rev 205850)
@@ -123,6 +123,8 @@
void dump(PrintStream&) const;
bool isBuildingHeapSnapshot() const { return !!m_heapSnapshotBuilder; }
+
+ HeapVersion version() const { return m_version; }
private:
friend class ParallelModeEnabler;
Modified: trunk/Source/_javascript_Core/heap/WeakBlock.cpp (205849 => 205850)
--- trunk/Source/_javascript_Core/heap/WeakBlock.cpp 2016-09-13 04:32:06 UTC (rev 205849)
+++ trunk/Source/_javascript_Core/heap/WeakBlock.cpp 2016-09-13 04:33:19 UTC (rev 205850)
@@ -96,22 +96,13 @@
ASSERT(!m_sweepResult.isNull());
}
-void WeakBlock::visit(HeapRootVisitor& heapRootVisitor)
+template<typename ContainerType>
+void WeakBlock::specializedVisit(ContainerType& container, HeapRootVisitor& heapRootVisitor)
{
- // If a block is completely empty, a visit won't have any effect.
- if (isEmpty())
- return;
-
- // If this WeakBlock doesn't belong to a CellContainer, we won't even be here.
- ASSERT(m_container);
+ SlotVisitor& visitor = heapRootVisitor.visitor();
- m_container.flipIfNecessary();
+ HeapVersion version = visitor.version();
- // We only visit after marking.
- ASSERT(m_container.isMarked());
-
- SlotVisitor& visitor = heapRootVisitor.visitor();
-
for (size_t i = 0; i < weakImplCount(); ++i) {
WeakImpl* weakImpl = &weakImpls()[i];
if (weakImpl->state() != WeakImpl::Live)
@@ -122,7 +113,7 @@
continue;
const JSValue& jsValue = weakImpl->jsValue();
- if (m_container.isMarkedOrNewlyAllocated(jsValue.asCell()))
+ if (container.isMarkedOrNewlyAllocatedDuringWeakVisiting(version, jsValue.asCell()))
continue;
if (!weakHandleOwner->isReachableFromOpaqueRoots(Handle<Unknown>::wrapSlot(&const_cast<JSValue&>(jsValue)), weakImpl->context(), visitor))
@@ -132,6 +123,21 @@
}
}
+void WeakBlock::visit(HeapRootVisitor& heapRootVisitor)
+{
+ // If a block is completely empty, a visit won't have any effect.
+ if (isEmpty())
+ return;
+
+ // If this WeakBlock doesn't belong to a CellContainer, we won't even be here.
+ ASSERT(m_container);
+
+ if (m_container.isLargeAllocation())
+ specializedVisit(m_container.largeAllocation(), heapRootVisitor);
+ else
+ specializedVisit(m_container.markedBlock(), heapRootVisitor);
+}
+
void WeakBlock::reap()
{
// If a block is completely empty, a reaping won't have any effect.
Modified: trunk/Source/_javascript_Core/heap/WeakBlock.h (205849 => 205850)
--- trunk/Source/_javascript_Core/heap/WeakBlock.h 2016-09-13 04:32:06 UTC (rev 205849)
+++ trunk/Source/_javascript_Core/heap/WeakBlock.h 2016-09-13 04:33:19 UTC (rev 205850)
@@ -72,6 +72,9 @@
private:
static FreeCell* asFreeCell(WeakImpl*);
+
+ template<typename ContainerType>
+ void specializedVisit(ContainerType&, HeapRootVisitor&);
explicit WeakBlock(CellContainer);
void finalize(WeakImpl*);