Title: [240216] trunk/Source/_javascript_Core
Revision
240216
Author
[email protected]
Date
2019-01-20 12:39:33 -0800 (Sun, 20 Jan 2019)

Log Message

[JSC] Shrink data structure size in JSC/heap
https://bugs.webkit.org/show_bug.cgi?id=193612

Reviewed by Saam Barati.

This patch reduces the size of data structures in JSC/heap. Basically, we reorder the members to remove paddings.

For Subspace, we drop CellAttributes `m_attributes`. Instead, we use `heapCellType->attributes()`. And we use
FreeList::cellSize() instead of holding m_cellSize in LocalAllocator.

This change reduces the size of JSC::VM too since it includes JSC::Heap. The size of VM becomes from 78208 to 76696.

* heap/BlockDirectory.cpp:
* heap/BlockDirectory.h:
* heap/CollectionScope.h:
* heap/CompleteSubspace.cpp:
(JSC::CompleteSubspace::allocatorForSlow):
* heap/FreeList.h:
(JSC::FreeList::offsetOfCellSize):
(JSC::FreeList::cellSize const):
* heap/Heap.cpp:
(JSC::Heap::Heap):
(JSC::Heap::updateObjectCounts):
(JSC::Heap::addToRememberedSet):
(JSC::Heap::runBeginPhase):
(JSC::Heap::willStartCollection):
(JSC::Heap::pruneStaleEntriesFromWeakGCMaps):
(JSC::Heap::deleteSourceProviderCaches):
(JSC::Heap::notifyIncrementalSweeper):
(JSC::Heap::updateAllocationLimits):
* heap/Heap.h:
* heap/IsoAlignedMemoryAllocator.h:
* heap/LargeAllocation.cpp:
* heap/LocalAllocator.cpp:
(JSC::LocalAllocator::LocalAllocator):
* heap/LocalAllocator.h:
(JSC::LocalAllocator::cellSize const):
(JSC::LocalAllocator::offsetOfCellSize):
* heap/MarkedSpace.cpp:
(JSC::MarkedSpace::MarkedSpace):
* heap/MarkedSpace.h:
* heap/MarkingConstraint.h:
* heap/Subspace.cpp:
(JSC::Subspace::initialize):
* heap/Subspace.h:
(JSC::Subspace::attributes const): Deleted.
* heap/SubspaceInlines.h:
(JSC::Subspace::forEachMarkedCell):
(JSC::Subspace::forEachMarkedCellInParallel):
(JSC::Subspace::forEachLiveCell):
(JSC::Subspace::attributes const):

Modified Paths

Diff

Modified: trunk/Source/_javascript_Core/ChangeLog (240215 => 240216)


--- trunk/Source/_javascript_Core/ChangeLog	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/ChangeLog	2019-01-20 20:39:33 UTC (rev 240216)
@@ -1,3 +1,57 @@
+2019-01-20  Yusuke Suzuki  <[email protected]>
+
+        [JSC] Shrink data structure size in JSC/heap
+        https://bugs.webkit.org/show_bug.cgi?id=193612
+
+        Reviewed by Saam Barati.
+
+        This patch reduces the size of data structures in JSC/heap. Basically, we reorder the members to remove paddings.
+
+        For Subspace, we drop CellAttributes `m_attributes`. Instead, we use `heapCellType->attributes()`. And we use
+        FreeList::cellSize() instead of holding m_cellSize in LocalAllocator.
+
+        This change reduces the size of JSC::VM too since it includes JSC::Heap. The size of VM becomes from 78208 to 76696.
+
+        * heap/BlockDirectory.cpp:
+        * heap/BlockDirectory.h:
+        * heap/CollectionScope.h:
+        * heap/CompleteSubspace.cpp:
+        (JSC::CompleteSubspace::allocatorForSlow):
+        * heap/FreeList.h:
+        (JSC::FreeList::offsetOfCellSize):
+        (JSC::FreeList::cellSize const):
+        * heap/Heap.cpp:
+        (JSC::Heap::Heap):
+        (JSC::Heap::updateObjectCounts):
+        (JSC::Heap::addToRememberedSet):
+        (JSC::Heap::runBeginPhase):
+        (JSC::Heap::willStartCollection):
+        (JSC::Heap::pruneStaleEntriesFromWeakGCMaps):
+        (JSC::Heap::deleteSourceProviderCaches):
+        (JSC::Heap::notifyIncrementalSweeper):
+        (JSC::Heap::updateAllocationLimits):
+        * heap/Heap.h:
+        * heap/IsoAlignedMemoryAllocator.h:
+        * heap/LargeAllocation.cpp:
+        * heap/LocalAllocator.cpp:
+        (JSC::LocalAllocator::LocalAllocator):
+        * heap/LocalAllocator.h:
+        (JSC::LocalAllocator::cellSize const):
+        (JSC::LocalAllocator::offsetOfCellSize):
+        * heap/MarkedSpace.cpp:
+        (JSC::MarkedSpace::MarkedSpace):
+        * heap/MarkedSpace.h:
+        * heap/MarkingConstraint.h:
+        * heap/Subspace.cpp:
+        (JSC::Subspace::initialize):
+        * heap/Subspace.h:
+        (JSC::Subspace::attributes const): Deleted.
+        * heap/SubspaceInlines.h:
+        (JSC::Subspace::forEachMarkedCell):
+        (JSC::Subspace::forEachMarkedCellInParallel):
+        (JSC::Subspace::forEachLiveCell):
+        (JSC::Subspace::attributes const):
+
 2019-01-20  Tadeu Zagallo  <[email protected]>
 
         Cache bytecode to disk

Modified: trunk/Source/_javascript_Core/heap/BlockDirectory.cpp (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/BlockDirectory.cpp	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/BlockDirectory.cpp	2019-01-20 20:39:33 UTC (rev 240216)
@@ -32,6 +32,7 @@
 #include "IncrementalSweeper.h"
 #include "JSCInlines.h"
 #include "MarkedBlockInlines.h"
+#include "SubspaceInlines.h"
 #include "SuperSampler.h"
 #include "VM.h"
 

Modified: trunk/Source/_javascript_Core/heap/BlockDirectory.h (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/BlockDirectory.h	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/BlockDirectory.h	2019-01-20 20:39:33 UTC (rev 240216)
@@ -173,11 +173,15 @@
 
     // Mutator uses this to guard resizing the bitvectors. Those things in the GC that may run
     // concurrently to the mutator must lock this when accessing the bitvectors.
-    Lock m_bitvectorLock;
 #define BLOCK_DIRECTORY_BIT_DECLARATION(lowerBitName, capitalBitName) \
     FastBitVector m_ ## lowerBitName;
     FOR_EACH_BLOCK_DIRECTORY_BIT(BLOCK_DIRECTORY_BIT_DECLARATION)
 #undef BLOCK_DIRECTORY_BIT_DECLARATION
+    Lock m_bitvectorLock;
+    Lock m_localAllocatorsLock;
+    CellAttributes m_attributes;
+
+    unsigned m_cellSize;
     
     // After you do something to a block based on one of these cursors, you clear the bit in the
     // corresponding bitvector and leave the cursor where it was.
@@ -184,8 +188,6 @@
     size_t m_emptyCursor { 0 };
     size_t m_unsweptCursor { 0 }; // Points to the next block that is a candidate for incremental sweeping.
     
-    unsigned m_cellSize;
-    CellAttributes m_attributes;
     // FIXME: All of these should probably be references.
     // https://bugs.webkit.org/show_bug.cgi?id=166988
     Heap* m_heap { nullptr };
@@ -194,7 +196,6 @@
     BlockDirectory* m_nextDirectoryInSubspace { nullptr };
     BlockDirectory* m_nextDirectoryInAlignedMemoryAllocator { nullptr };
     
-    Lock m_localAllocatorsLock;
     SentinelLinkedList<LocalAllocator, BasicRawSentinelNode<LocalAllocator>> m_localAllocators;
 };
 

Modified: trunk/Source/_javascript_Core/heap/CollectionScope.h (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/CollectionScope.h	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/CollectionScope.h	2019-01-20 20:39:33 UTC (rev 240216)
@@ -27,7 +27,7 @@
 
 namespace JSC {
 
-enum class CollectionScope { Eden, Full };
+enum class CollectionScope : uint8_t { Eden, Full };
 
 const char* collectionScopeName(CollectionScope);
 

Modified: trunk/Source/_javascript_Core/heap/CompleteSubspace.cpp (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/CompleteSubspace.cpp	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/CompleteSubspace.cpp	2019-01-20 20:39:33 UTC (rev 240216)
@@ -77,7 +77,7 @@
         return allocator;
 
     if (false)
-        dataLog("Creating BlockDirectory/LocalAllocator for ", m_name, ", ", m_attributes, ", ", sizeClass, ".\n");
+        dataLog("Creating BlockDirectory/LocalAllocator for ", m_name, ", ", attributes(), ", ", sizeClass, ".\n");
     
     std::unique_ptr<BlockDirectory> uniqueDirectory =
         std::make_unique<BlockDirectory>(m_space.heap(), sizeClass);

Modified: trunk/Source/_javascript_Core/heap/FreeList.h (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/FreeList.h	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/FreeList.h	2019-01-20 20:39:33 UTC (rev 240216)
@@ -84,8 +84,11 @@
     static ptrdiff_t offsetOfPayloadEnd() { return OBJECT_OFFSETOF(FreeList, m_payloadEnd); }
     static ptrdiff_t offsetOfRemaining() { return OBJECT_OFFSETOF(FreeList, m_remaining); }
     static ptrdiff_t offsetOfOriginalSize() { return OBJECT_OFFSETOF(FreeList, m_originalSize); }
+    static ptrdiff_t offsetOfCellSize() { return OBJECT_OFFSETOF(FreeList, m_cellSize); }
     
     void dump(PrintStream&) const;
+
+    unsigned cellSize() const { return m_cellSize; }
     
 private:
     FreeCell* head() const { return FreeCell::descramble(m_scrambledHead, m_secret); }

Modified: trunk/Source/_javascript_Core/heap/Heap.cpp (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/Heap.cpp	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/Heap.cpp	2019-01-20 20:39:33 UTC (rev 240216)
@@ -274,20 +274,9 @@
     : m_heapType(heapType)
     , m_ramSize(Options::forceRAMSize() ? Options::forceRAMSize() : ramSize())
     , m_minBytesPerCycle(minHeapSize(m_heapType, m_ramSize))
-    , m_sizeAfterLastCollect(0)
-    , m_sizeAfterLastFullCollect(0)
-    , m_sizeBeforeLastFullCollect(0)
-    , m_sizeAfterLastEdenCollect(0)
-    , m_sizeBeforeLastEdenCollect(0)
-    , m_bytesAllocatedThisCycle(0)
-    , m_bytesAbandonedSinceLastFullCollect(0)
     , m_maxEdenSize(m_minBytesPerCycle)
     , m_maxHeapSize(m_minBytesPerCycle)
-    , m_shouldDoFullCollection(false)
-    , m_totalBytesVisited(0)
     , m_objectSpace(this)
-    , m_extraMemorySize(0)
-    , m_deprecatedExtraMemorySize(0)
     , m_machineThreads(std::make_unique<MachineThreads>())
     , m_collectorSlotVisitor(std::make_unique<SlotVisitor>(*this, "C"))
     , m_mutatorSlotVisitor(std::make_unique<SlotVisitor>(*this, "M"))
@@ -297,20 +286,13 @@
     , m_handleSet(vm)
     , m_codeBlocks(std::make_unique<CodeBlockSet>())
     , m_jitStubRoutines(std::make_unique<JITStubRoutineSet>())
-    , m_isSafeToCollect(false)
     , m_vm(vm)
     // We seed with 10ms so that GCActivityCallback::didAllocate doesn't continuously 
     // schedule the timer if we've never done a collection.
-    , m_lastFullGCLength(0.01)
-    , m_lastEdenGCLength(0.01)
     , m_fullActivityCallback(GCActivityCallback::tryCreateFullTimer(this))
     , m_edenActivityCallback(GCActivityCallback::tryCreateEdenTimer(this))
     , m_sweeper(adoptRef(*new IncrementalSweeper(this)))
     , m_stopIfNecessaryTimer(adoptRef(*new StopIfNecessaryTimer(vm)))
-    , m_deferralDepth(0)
-#if USE(FOUNDATION)
-    , m_delayedReleaseRecursionCount(0)
-#endif
     , m_sharedCollectorMarkStack(std::make_unique<MarkStackArray>())
     , m_sharedMutatorMarkStack(std::make_unique<MarkStackArray>())
     , m_helperClient(&heapHelperPool())
@@ -767,7 +749,7 @@
 
 void Heap::updateObjectCounts()
 {
-    if (m_collectionScope == CollectionScope::Full)
+    if (m_collectionScope && m_collectionScope.value() == CollectionScope::Full)
         m_totalBytesVisited = 0;
 
     m_totalBytesVisitedThisCycle = bytesVisited();
@@ -963,7 +945,7 @@
             // path. So, we don't have to remember this object. We could return here. But we go
             // further and attempt to re-white the object.
             
-            RELEASE_ASSERT(m_collectionScope == CollectionScope::Full);
+            RELEASE_ASSERT(m_collectionScope && m_collectionScope.value() == CollectionScope::Full);
             
             if (cell->atomicCompareExchangeCellStateStrong(CellState::PossiblyBlack, CellState::DefinitelyWhite) == CellState::PossiblyBlack) {
                 // Now we protect against this race:
@@ -1234,7 +1216,7 @@
         
     prepareForMarking();
         
-    if (m_collectionScope == CollectionScope::Full) {
+    if (m_collectionScope && m_collectionScope.value() == CollectionScope::Full) {
         m_opaqueRoots.clear();
         m_collectorSlotVisitor->clearMarkStacks();
         m_mutatorMarkStack->clear();
@@ -2127,7 +2109,7 @@
         if (false)
             dataLog("Eden collection!\n");
     }
-    if (m_collectionScope == CollectionScope::Full) {
+    if (m_collectionScope && m_collectionScope.value() == CollectionScope::Full) {
         m_sizeBeforeLastFullCollect = m_sizeAfterLastCollect + m_bytesAllocatedThisCycle;
         m_extraMemorySize = 0;
         m_deprecatedExtraMemorySize = 0;
@@ -2138,7 +2120,7 @@
         if (m_fullActivityCallback)
             m_fullActivityCallback->willCollect();
     } else {
-        ASSERT(m_collectionScope == CollectionScope::Eden);
+        ASSERT(m_collectionScope && m_collectionScope.value() == CollectionScope::Eden);
         m_sizeBeforeLastEdenCollect = m_sizeAfterLastCollect + m_bytesAllocatedThisCycle;
     }
 
@@ -2161,7 +2143,7 @@
 
 void Heap::pruneStaleEntriesFromWeakGCMaps()
 {
-    if (m_collectionScope != CollectionScope::Full)
+    if (!m_collectionScope || m_collectionScope.value() != CollectionScope::Full)
         return;
     for (WeakGCMapBase* weakGCMap : m_weakGCMaps)
         weakGCMap->pruneStaleEntries();
@@ -2180,13 +2162,13 @@
 
 void Heap::deleteSourceProviderCaches()
 {
-    if (*m_lastCollectionScope == CollectionScope::Full)
+    if (m_lastCollectionScope && m_lastCollectionScope.value() == CollectionScope::Full)
         m_vm->clearSourceProviderCaches();
 }
 
 void Heap::notifyIncrementalSweeper()
 {
-    if (m_collectionScope == CollectionScope::Full) {
+    if (m_collectionScope && m_collectionScope.value() == CollectionScope::Full) {
         if (!m_logicallyEmptyWeakBlocks.isEmpty())
             m_indexOfNextLogicallyEmptyWeakBlockToSweep = 0;
     }
@@ -2231,7 +2213,7 @@
     if (verbose)
         dataLog("extraMemorySize() = ", extraMemorySize(), ", currentHeapSize = ", currentHeapSize, "\n");
     
-    if (m_collectionScope == CollectionScope::Full) {
+    if (m_collectionScope && m_collectionScope.value() == CollectionScope::Full) {
         // To avoid pathological GC churn in very small and very large heaps, we set
         // the new allocation limit based on the current size of the heap, with a
         // fixed minimum.

Modified: trunk/Source/_javascript_Core/heap/Heap.h (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/Heap.h	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/Heap.h	2019-01-20 20:39:33 UTC (rev 240216)
@@ -44,6 +44,7 @@
 #include <wtf/Deque.h>
 #include <wtf/HashCountedSet.h>
 #include <wtf/HashSet.h>
+#include <wtf/Markable.h>
 #include <wtf/ParallelHelperPool.h>
 #include <wtf/Threading.h>
 
@@ -558,32 +559,34 @@
     static bool shouldSweepSynchronously();
     
     const HeapType m_heapType;
+    MutatorState m_mutatorState { MutatorState::Running };
     const size_t m_ramSize;
     const size_t m_minBytesPerCycle;
-    size_t m_sizeAfterLastCollect;
-    size_t m_sizeAfterLastFullCollect;
-    size_t m_sizeBeforeLastFullCollect;
-    size_t m_sizeAfterLastEdenCollect;
-    size_t m_sizeBeforeLastEdenCollect;
+    size_t m_sizeAfterLastCollect { 0 };
+    size_t m_sizeAfterLastFullCollect { 0 };
+    size_t m_sizeBeforeLastFullCollect { 0 };
+    size_t m_sizeAfterLastEdenCollect { 0 };
+    size_t m_sizeBeforeLastEdenCollect { 0 };
 
-    size_t m_bytesAllocatedThisCycle;
-    size_t m_bytesAbandonedSinceLastFullCollect;
+    size_t m_bytesAllocatedThisCycle { 0 };
+    size_t m_bytesAbandonedSinceLastFullCollect { 0 };
     size_t m_maxEdenSize;
     size_t m_maxEdenSizeWhenCritical;
     size_t m_maxHeapSize;
-    bool m_shouldDoFullCollection;
-    size_t m_totalBytesVisited;
-    size_t m_totalBytesVisitedThisCycle;
+    size_t m_totalBytesVisited { 0 };
+    size_t m_totalBytesVisitedThisCycle { 0 };
     double m_incrementBalance { 0 };
     
-    Optional<CollectionScope> m_collectionScope;
-    Optional<CollectionScope> m_lastCollectionScope;
-    MutatorState m_mutatorState { MutatorState::Running };
+    bool m_shouldDoFullCollection { false };
+    Markable<CollectionScope, EnumMarkableTraits<CollectionScope>> m_collectionScope;
+    Markable<CollectionScope, EnumMarkableTraits<CollectionScope>> m_lastCollectionScope;
+    Lock m_raceMarkStackLock;
+
     StructureIDTable m_structureIDTable;
     MarkedSpace m_objectSpace;
     GCIncomingRefCountedSet<ArrayBuffer> m_arrayBuffers;
-    size_t m_extraMemorySize;
-    size_t m_deprecatedExtraMemorySize;
+    size_t m_extraMemorySize { 0 };
+    size_t m_deprecatedExtraMemorySize { 0 };
 
     HashSet<const JSCell*> m_copyingRememberedSet;
 
@@ -595,10 +598,7 @@
     std::unique_ptr<SlotVisitor> m_collectorSlotVisitor;
     std::unique_ptr<SlotVisitor> m_mutatorSlotVisitor;
     std::unique_ptr<MarkStackArray> m_mutatorMarkStack;
-
-    Lock m_raceMarkStackLock;
     std::unique_ptr<MarkStackArray> m_raceMarkStack;
-
     std::unique_ptr<MarkingConstraintSet> m_constraintSet;
 
     // We pool the slot visitors used by parallel marking threads. It's useful to be able to
@@ -607,7 +607,6 @@
     // them at the end.
     Vector<std::unique_ptr<SlotVisitor>> m_parallelSlotVisitors;
     Vector<SlotVisitor*> m_availableParallelSlotVisitors;
-    Lock m_parallelSlotVisitorLock;
     
     HandleSet m_handleSet;
     std::unique_ptr<CodeBlockSet> m_codeBlocks;
@@ -614,15 +613,16 @@
     std::unique_ptr<JITStubRoutineSet> m_jitStubRoutines;
     FinalizerOwner m_finalizerOwner;
     
-    bool m_isSafeToCollect;
+    Lock m_parallelSlotVisitorLock;
+    bool m_isSafeToCollect { false };
     bool m_isShuttingDown { false };
+    bool m_mutatorShouldBeFenced { Options::forceFencedBarrier() };
 
-    bool m_mutatorShouldBeFenced { Options::forceFencedBarrier() };
     unsigned m_barrierThreshold { Options::forceFencedBarrier() ? tautologicalThreshold : blackThreshold };
 
     VM* m_vm;
-    Seconds m_lastFullGCLength;
-    Seconds m_lastEdenGCLength;
+    Seconds m_lastFullGCLength { 10_ms };
+    Seconds m_lastEdenGCLength { 10_ms };
 
     Vector<WeakBlock*> m_logicallyEmptyWeakBlocks;
     size_t m_indexOfNextLogicallyEmptyWeakBlockToSweep { WTF::notFound };
@@ -636,34 +636,26 @@
     
     Vector<HeapFinalizerCallback> m_heapFinalizerCallbacks;
     
-    unsigned m_deferralDepth;
-    bool m_didDeferGCWork { false };
-
     std::unique_ptr<HeapVerifier> m_verifier;
 
 #if USE(FOUNDATION)
     Vector<RetainPtr<CFTypeRef>> m_delayedReleaseObjects;
-    unsigned m_delayedReleaseRecursionCount;
+    unsigned m_delayedReleaseRecursionCount { 0 };
 #endif
 #if USE(GLIB)
     Vector<std::unique_ptr<JSCGLibWrapperObject>> m_delayedReleaseObjects;
     unsigned m_delayedReleaseRecursionCount { 0 };
 #endif
+    unsigned m_deferralDepth { 0 };
 
     HashSet<WeakGCMapBase*> m_weakGCMaps;
     
-    Lock m_visitRaceLock;
-
-    Lock m_markingMutex;
-    Condition m_markingConditionVariable;
     std::unique_ptr<MarkStackArray> m_sharedCollectorMarkStack;
     std::unique_ptr<MarkStackArray> m_sharedMutatorMarkStack;
     unsigned m_numberOfActiveParallelMarkers { 0 };
     unsigned m_numberOfWaitingParallelMarkers { 0 };
-    bool m_parallelMarkersShouldExit { false };
 
     ConcurrentPtrHashSet m_opaqueRoots;
-
     static const size_t s_blockFragmentLength = 32;
 
     ParallelHelperClient m_helperClient;
@@ -684,6 +676,10 @@
     static const unsigned mutatorWaitingBit = 1u << 5u; // Allows the mutator to use this as a condition variable.
     Atomic<unsigned> m_worldState;
     bool m_worldIsStopped { false };
+    Lock m_visitRaceLock;
+    Lock m_markingMutex;
+    Condition m_markingConditionVariable;
+
     MonotonicTime m_beforeGC;
     MonotonicTime m_afterGC;
     MonotonicTime m_stopTime;
@@ -692,6 +688,7 @@
     GCRequest m_currentRequest;
     Ticket m_lastServedTicket { 0 };
     Ticket m_lastGrantedTicket { 0 };
+
     CollectorPhase m_lastPhase { CollectorPhase::NotRunning };
     CollectorPhase m_currentPhase { CollectorPhase::NotRunning };
     CollectorPhase m_nextPhase { CollectorPhase::NotRunning };
@@ -698,6 +695,9 @@
     bool m_threadShouldStop { false };
     bool m_threadIsStopping { false };
     bool m_mutatorDidRun { true };
+    bool m_didDeferGCWork { false };
+    bool m_shouldStopCollectingContinuously { false };
+
     uint64_t m_mutatorExecutionVersion { 0 };
     uint64_t m_phaseVersion { 0 };
     Box<Lock> m_threadLock;
@@ -704,14 +704,6 @@
     Ref<AutomaticThreadCondition> m_threadCondition; // The mutator must not wait on this. It would cause a deadlock.
     RefPtr<AutomaticThread> m_thread;
 
-#if PLATFORM(IOS_FAMILY)
-    unsigned m_precentAvailableMemoryCachedCallCount;
-    bool m_overCriticalMemoryThreshold;
-#endif
-
-    Lock m_collectContinuouslyLock;
-    Condition m_collectContinuouslyCondition;
-    bool m_shouldStopCollectingContinuously { false };
     RefPtr<WTF::Thread> m_collectContinuouslyThread { nullptr };
     
     MonotonicTime m_lastGCStartTime;
@@ -723,6 +715,15 @@
     
     CurrentThreadState* m_currentThreadState { nullptr };
     WTF::Thread* m_currentThread { nullptr }; // It's OK if this becomes a dangling pointer.
+
+#if PLATFORM(IOS_FAMILY)
+    unsigned m_precentAvailableMemoryCachedCallCount;
+    bool m_overCriticalMemoryThreshold;
+#endif
+
+    bool m_parallelMarkersShouldExit { false };
+    Lock m_collectContinuouslyLock;
+    Condition m_collectContinuouslyCondition;
 };
 
 } // namespace JSC

Modified: trunk/Source/_javascript_Core/heap/IsoAlignedMemoryAllocator.h (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/IsoAlignedMemoryAllocator.h	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/IsoAlignedMemoryAllocator.h	2019-01-20 20:39:33 UTC (rev 240216)
@@ -40,11 +40,11 @@
     void dump(PrintStream&) const override;
 
 private:
-    Lock m_lock;
     Vector<void*> m_blocks;
     HashMap<void*, unsigned> m_blockIndices;
     FastBitVector m_committed;
     unsigned m_firstUncommitted { 0 };
+    Lock m_lock;
 };
 
 } // namespace JSC

Modified: trunk/Source/_javascript_Core/heap/LargeAllocation.cpp (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/LargeAllocation.cpp	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/LargeAllocation.cpp	2019-01-20 20:39:33 UTC (rev 240216)
@@ -30,6 +30,7 @@
 #include "Heap.h"
 #include "JSCInlines.h"
 #include "Operations.h"
+#include "SubspaceInlines.h"
 
 namespace JSC {
 

Modified: trunk/Source/_javascript_Core/heap/LocalAllocator.cpp (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/LocalAllocator.cpp	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/LocalAllocator.cpp	2019-01-20 20:39:33 UTC (rev 240216)
@@ -34,8 +34,7 @@
 
 LocalAllocator::LocalAllocator(BlockDirectory* directory)
     : m_directory(directory)
-    , m_cellSize(directory->m_cellSize)
-    , m_freeList(m_cellSize)
+    , m_freeList(directory->m_cellSize)
 {
     auto locker = holdLock(directory->m_localAllocatorsLock);
     directory->m_localAllocators.append(this);

Modified: trunk/Source/_javascript_Core/heap/LocalAllocator.h (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/LocalAllocator.h	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/LocalAllocator.h	2019-01-20 20:39:33 UTC (rev 240216)
@@ -43,7 +43,7 @@
     
     void* allocate(GCDeferralContext*, AllocationFailureMode);
     
-    unsigned cellSize() const { return m_cellSize; }
+    unsigned cellSize() const { return m_freeList.cellSize(); }
 
     void stopAllocating();
     void prepareForAllocation();
@@ -67,8 +67,8 @@
     ALWAYS_INLINE void doTestCollectionsIfNeeded(GCDeferralContext*);
 
     BlockDirectory* m_directory;
-    unsigned m_cellSize;
     FreeList m_freeList;
+
     MarkedBlock::Handle* m_currentBlock { nullptr };
     MarkedBlock::Handle* m_lastActiveBlock { nullptr };
     
@@ -84,7 +84,7 @@
 
 inline ptrdiff_t LocalAllocator::offsetOfCellSize()
 {
-    return OBJECT_OFFSETOF(LocalAllocator, m_cellSize);
+    return OBJECT_OFFSETOF(LocalAllocator, m_freeList) + FreeList::offsetOfCellSize();
 }
 
 } // namespace JSC

Modified: trunk/Source/_javascript_Core/heap/MarkedSpace.cpp (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/MarkedSpace.cpp	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/MarkedSpace.cpp	2019-01-20 20:39:33 UTC (rev 240216)
@@ -197,8 +197,6 @@
 
 MarkedSpace::MarkedSpace(Heap* heap)
     : m_heap(heap)
-    , m_capacity(0)
-    , m_isIterating(false)
 {
     initializeSizeClassForStepSize();
 }

Modified: trunk/Source/_javascript_Core/heap/MarkedSpace.h (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/MarkedSpace.h	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/MarkedSpace.h	2019-01-20 20:39:33 UTC (rev 240216)
@@ -201,22 +201,22 @@
     unsigned m_largeAllocationsNurseryOffset { 0 };
     unsigned m_largeAllocationsOffsetForThisCollection { 0 };
     unsigned m_largeAllocationsNurseryOffsetForSweep { 0 };
+    unsigned m_largeAllocationsForThisCollectionSize { 0 };
     LargeAllocation** m_largeAllocationsForThisCollectionBegin { nullptr };
     LargeAllocation** m_largeAllocationsForThisCollectionEnd { nullptr };
-    unsigned m_largeAllocationsForThisCollectionSize { 0 };
 
     Heap* m_heap;
+    size_t m_capacity { 0 };
     HeapVersion m_markingVersion { initialVersion };
     HeapVersion m_newlyAllocatedVersion { initialVersion };
-    size_t m_capacity;
-    bool m_isIterating;
+    bool m_isIterating { false };
     bool m_isMarking { false };
+    Lock m_directoryLock;
     MarkedBlockSet m_blocks;
     
     SentinelLinkedList<WeakSet, BasicRawSentinelNode<WeakSet>> m_activeWeakSets;
     SentinelLinkedList<WeakSet, BasicRawSentinelNode<WeakSet>> m_newActiveWeakSets;
 
-    Lock m_directoryLock;
     SinglyLinkedListWithTail<BlockDirectory> m_directories;
 
     friend class HeapVerifier;

Modified: trunk/Source/_javascript_Core/heap/MarkingConstraint.h (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/MarkingConstraint.h	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/MarkingConstraint.h	2019-01-20 20:39:33 UTC (rev 240216)
@@ -82,13 +82,13 @@
 private:
     friend class MarkingConstraintSet; // So it can set m_index.
     
-    unsigned m_index { UINT_MAX };
     CString m_abbreviatedName;
     CString m_name;
+    size_t m_lastVisitCount { 0 };
+    unsigned m_index { UINT_MAX };
     ConstraintVolatility m_volatility;
     ConstraintConcurrency m_concurrency;
     ConstraintParallelism m_parallelism;
-    size_t m_lastVisitCount { 0 };
     Lock m_lock;
 };
 

Modified: trunk/Source/_javascript_Core/heap/Subspace.cpp (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/Subspace.cpp	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/Subspace.cpp	2019-01-20 20:39:33 UTC (rev 240216)
@@ -45,7 +45,6 @@
 
 void Subspace::initialize(HeapCellType* heapCellType, AlignedMemoryAllocator* alignedMemoryAllocator)
 {
-    m_attributes = heapCellType->attributes();
     m_heapCellType = heapCellType;
     m_alignedMemoryAllocator = alignedMemoryAllocator;
     m_directoryForEmptyAllocation = m_alignedMemoryAllocator->firstDirectory();

Modified: trunk/Source/_javascript_Core/heap/Subspace.h (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/Subspace.h	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/Subspace.h	2019-01-20 20:39:33 UTC (rev 240216)
@@ -50,7 +50,7 @@
     const char* name() const { return m_name.data(); }
     MarkedSpace& space() const { return m_space; }
     
-    const CellAttributes& attributes() const { return m_attributes; }
+    const CellAttributes& attributes() const;
     HeapCellType* heapCellType() const { return m_heapCellType; }
     AlignedMemoryAllocator* alignedMemoryAllocator() const { return m_alignedMemoryAllocator; }
     
@@ -106,9 +106,6 @@
     
     MarkedSpace& m_space;
     
-    CString m_name;
-    CellAttributes m_attributes;
-
     HeapCellType* m_heapCellType { nullptr };
     AlignedMemoryAllocator* m_alignedMemoryAllocator { nullptr };
     
@@ -116,6 +113,8 @@
     BlockDirectory* m_directoryForEmptyAllocation { nullptr }; // Uses the MarkedSpace linked list of blocks.
     SentinelLinkedList<LargeAllocation, BasicRawSentinelNode<LargeAllocation>> m_largeAllocations;
     Subspace* m_nextSubspaceInAlignedMemoryAllocator { nullptr };
+
+    CString m_name;
 };
 
 } // namespace JSC

Modified: trunk/Source/_javascript_Core/heap/SubspaceInlines.h (240215 => 240216)


--- trunk/Source/_javascript_Core/heap/SubspaceInlines.h	2019-01-20 20:37:30 UTC (rev 240215)
+++ trunk/Source/_javascript_Core/heap/SubspaceInlines.h	2019-01-20 20:39:33 UTC (rev 240216)
@@ -26,6 +26,7 @@
 #pragma once
 
 #include "BlockDirectoryInlines.h"
+#include "HeapCellType.h"
 #include "JSCast.h"
 #include "MarkedBlock.h"
 #include "MarkedSpace.h"
@@ -76,10 +77,11 @@
                     return IterationStatus::Continue;
                 });
         });
+    CellAttributes attributes = this->attributes();
     forEachLargeAllocation(
         [&] (LargeAllocation* allocation) {
             if (allocation->isMarked())
-                func(allocation->cell(), m_attributes.cellKind);
+                func(allocation->cell(), attributes.cellKind);
         });
 }
 
@@ -112,10 +114,11 @@
                 m_needToVisitLargeAllocations = false;
             }
             
+            CellAttributes attributes = m_subspace.attributes();
             m_subspace.forEachLargeAllocation(
                 [&] (LargeAllocation* allocation) {
                     if (allocation->isMarked())
-                        m_func(visitor, allocation->cell(), m_subspace.m_attributes.cellKind);
+                        m_func(visitor, allocation->cell(), attributes.cellKind);
                 });
         }
         
@@ -141,12 +144,18 @@
                     return IterationStatus::Continue;
                 });
         });
+    CellAttributes attributes = this->attributes();
     forEachLargeAllocation(
         [&] (LargeAllocation* allocation) {
             if (allocation->isLive())
-                func(allocation->cell(), m_attributes.cellKind);
+                func(allocation->cell(), attributes.cellKind);
         });
 }
 
+inline const CellAttributes& Subspace::attributes() const
+{
+    return m_heapCellType->attributes();
+}
+
 } // namespace JSC
 
_______________________________________________
webkit-changes mailing list
[email protected]
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to