Title: [262786] trunk/Source/_javascript_Core
Revision
262786
Author
[email protected]
Date
2020-06-09 02:20:44 -0700 (Tue, 09 Jun 2020)

Log Message

[JSC] Shrink __DATA,(__data,__bss,__common) more
https://bugs.webkit.org/show_bug.cgi?id=212863

Reviewed by Sam Weinig.

1. Use `unsigned` instead of `size_t` in GC size-class array. We know that this number never exceeds largeCutoff,
   which must be much maller than UINT32_MAX.
2. Add missing const to various variables to put them DATA,__const instead of DATA,__data etc.

* heap/MarkedSpace.cpp:
(JSC::MarkedSpace::initializeSizeClassForStepSize):
* heap/MarkedSpace.h:
* heap/VisitRaceKey.cpp:
* heap/VisitRaceKey.h:
* inspector/agents/InspectorDebuggerAgent.cpp:
* inspector/agents/InspectorDebuggerAgent.h:
* runtime/PropertyDescriptor.cpp:
* runtime/PropertyDescriptor.h:

Modified Paths

Diff

Modified: trunk/Source/_javascript_Core/ChangeLog (262785 => 262786)


--- trunk/Source/_javascript_Core/ChangeLog	2020-06-09 09:13:43 UTC (rev 262785)
+++ trunk/Source/_javascript_Core/ChangeLog	2020-06-09 09:20:44 UTC (rev 262786)
@@ -1,3 +1,24 @@
+2020-06-09  Yusuke Suzuki  <[email protected]>
+
+        [JSC] Shrink __DATA,(__data,__bss,__common) more
+        https://bugs.webkit.org/show_bug.cgi?id=212863
+
+        Reviewed by Sam Weinig.
+
+        1. Use `unsigned` instead of `size_t` in GC size-class array. We know that this number never exceeds largeCutoff,
+           which must be much maller than UINT32_MAX.
+        2. Add missing const to various variables to put them DATA,__const instead of DATA,__data etc.
+
+        * heap/MarkedSpace.cpp:
+        (JSC::MarkedSpace::initializeSizeClassForStepSize):
+        * heap/MarkedSpace.h:
+        * heap/VisitRaceKey.cpp:
+        * heap/VisitRaceKey.h:
+        * inspector/agents/InspectorDebuggerAgent.cpp:
+        * inspector/agents/InspectorDebuggerAgent.h:
+        * runtime/PropertyDescriptor.cpp:
+        * runtime/PropertyDescriptor.h:
+
 2020-06-08  Keith Miller  <[email protected]>
 
         Removed unneeded POINTER_WIDTH macro from b3

Modified: trunk/Source/_javascript_Core/heap/MarkedSpace.cpp (262785 => 262786)


--- trunk/Source/_javascript_Core/heap/MarkedSpace.cpp	2020-06-09 09:13:43 UTC (rev 262785)
+++ trunk/Source/_javascript_Core/heap/MarkedSpace.cpp	2020-06-09 09:20:44 UTC (rev 262786)
@@ -30,118 +30,113 @@
 
 namespace JSC {
 
-std::array<size_t, MarkedSpace::numSizeClasses> MarkedSpace::s_sizeClassForSizeStep;
+std::array<unsigned, MarkedSpace::numSizeClasses> MarkedSpace::s_sizeClassForSizeStep;
 
 namespace {
 
-const Vector<size_t>& sizeClasses()
+static Vector<size_t> sizeClasses()
 {
-    static Vector<size_t>* result;
-    static std::once_flag once;
-    std::call_once(
-        once,
-        [] {
-            result = new Vector<size_t>();
-            
-            if (UNLIKELY(Options::dumpSizeClasses())) {
-                dataLog("Block size: ", MarkedBlock::blockSize, "\n");
-                dataLog("Footer size: ", sizeof(MarkedBlock::Footer), "\n");
-            }
-            
-            auto add = [&] (size_t sizeClass) {
-                sizeClass = WTF::roundUpToMultipleOf<MarkedBlock::atomSize>(sizeClass);
-                dataLogLnIf(Options::dumpSizeClasses(), "Adding JSC MarkedSpace size class: ", sizeClass);
-                // Perform some validation as we go.
-                RELEASE_ASSERT(!(sizeClass % MarkedSpace::sizeStep));
-                if (result->isEmpty())
-                    RELEASE_ASSERT(sizeClass == MarkedSpace::sizeStep);
-                result->append(sizeClass);
-            };
-            
-            // This is a definition of the size classes in our GC. It must define all of the
-            // size classes from sizeStep up to largeCutoff.
+    Vector<size_t> result;
+
+    if (UNLIKELY(Options::dumpSizeClasses())) {
+        dataLog("Block size: ", MarkedBlock::blockSize, "\n");
+        dataLog("Footer size: ", sizeof(MarkedBlock::Footer), "\n");
+    }
     
-            // Have very precise size classes for the small stuff. This is a loop to make it easy to reduce
-            // atomSize.
-            for (size_t size = MarkedSpace::sizeStep; size < MarkedSpace::preciseCutoff; size += MarkedSpace::sizeStep)
-                add(size);
-            
-            // We want to make sure that the remaining size classes minimize internal fragmentation (i.e.
-            // the wasted space at the tail end of a MarkedBlock) while proceeding roughly in an exponential
-            // way starting at just above the precise size classes to four cells per block.
-            
-            dataLogLnIf(Options::dumpSizeClasses(), "    Marked block payload size: ", static_cast<size_t>(MarkedSpace::blockPayload));
-            
-            for (unsigned i = 0; ; ++i) {
-                double approximateSize = MarkedSpace::preciseCutoff * pow(Options::sizeClassProgression(), i);
-                dataLogLnIf(Options::dumpSizeClasses(), "    Next size class as a double: ", approximateSize);
+    auto add = [&] (size_t sizeClass) {
+        sizeClass = WTF::roundUpToMultipleOf<MarkedBlock::atomSize>(sizeClass);
+        dataLogLnIf(Options::dumpSizeClasses(), "Adding JSC MarkedSpace size class: ", sizeClass);
+        // Perform some validation as we go.
+        RELEASE_ASSERT(!(sizeClass % MarkedSpace::sizeStep));
+        if (result.isEmpty())
+            RELEASE_ASSERT(sizeClass == MarkedSpace::sizeStep);
+        result.append(sizeClass);
+    };
+    
+    // This is a definition of the size classes in our GC. It must define all of the
+    // size classes from sizeStep up to largeCutoff.
+
+    // Have very precise size classes for the small stuff. This is a loop to make it easy to reduce
+    // atomSize.
+    for (size_t size = MarkedSpace::sizeStep; size < MarkedSpace::preciseCutoff; size += MarkedSpace::sizeStep)
+        add(size);
+    
+    // We want to make sure that the remaining size classes minimize internal fragmentation (i.e.
+    // the wasted space at the tail end of a MarkedBlock) while proceeding roughly in an exponential
+    // way starting at just above the precise size classes to four cells per block.
+    
+    dataLogLnIf(Options::dumpSizeClasses(), "    Marked block payload size: ", static_cast<size_t>(MarkedSpace::blockPayload));
+    
+    for (unsigned i = 0; ; ++i) {
+        double approximateSize = MarkedSpace::preciseCutoff * pow(Options::sizeClassProgression(), i);
+        dataLogLnIf(Options::dumpSizeClasses(), "    Next size class as a double: ", approximateSize);
+
+        size_t approximateSizeInBytes = static_cast<size_t>(approximateSize);
+        dataLogLnIf(Options::dumpSizeClasses(), "    Next size class as bytes: ", approximateSizeInBytes);
+
+        // Make sure that the computer did the math correctly.
+        RELEASE_ASSERT(approximateSizeInBytes >= MarkedSpace::preciseCutoff);
         
-                size_t approximateSizeInBytes = static_cast<size_t>(approximateSize);
-                dataLogLnIf(Options::dumpSizeClasses(), "    Next size class as bytes: ", approximateSizeInBytes);
+        if (approximateSizeInBytes > MarkedSpace::largeCutoff)
+            break;
         
-                // Make sure that the computer did the math correctly.
-                RELEASE_ASSERT(approximateSizeInBytes >= MarkedSpace::preciseCutoff);
-                
-                if (approximateSizeInBytes > MarkedSpace::largeCutoff)
-                    break;
-                
-                size_t sizeClass =
-                    WTF::roundUpToMultipleOf<MarkedSpace::sizeStep>(approximateSizeInBytes);
-                dataLogLnIf(Options::dumpSizeClasses(), "    Size class: ", sizeClass);
-                
-                // Optimize the size class so that there isn't any slop at the end of the block's
-                // payload.
-                unsigned cellsPerBlock = MarkedSpace::blockPayload / sizeClass;
-                size_t possiblyBetterSizeClass = (MarkedSpace::blockPayload / cellsPerBlock) & ~(MarkedSpace::sizeStep - 1);
-                dataLogLnIf(Options::dumpSizeClasses(), "    Possibly better size class: ", possiblyBetterSizeClass);
+        size_t sizeClass =
+            WTF::roundUpToMultipleOf<MarkedSpace::sizeStep>(approximateSizeInBytes);
+        dataLogLnIf(Options::dumpSizeClasses(), "    Size class: ", sizeClass);
+        
+        // Optimize the size class so that there isn't any slop at the end of the block's
+        // payload.
+        unsigned cellsPerBlock = MarkedSpace::blockPayload / sizeClass;
+        size_t possiblyBetterSizeClass = (MarkedSpace::blockPayload / cellsPerBlock) & ~(MarkedSpace::sizeStep - 1);
+        dataLogLnIf(Options::dumpSizeClasses(), "    Possibly better size class: ", possiblyBetterSizeClass);
 
-                // The size class we just came up with is better than the other one if it reduces
-                // total wastage assuming we only allocate cells of that size.
-                size_t originalWastage = MarkedSpace::blockPayload - cellsPerBlock * sizeClass;
-                size_t newWastage = (possiblyBetterSizeClass - sizeClass) * cellsPerBlock;
-                dataLogLnIf(Options::dumpSizeClasses(), "    Original wastage: ", originalWastage, ", new wastage: ", newWastage);
-                
-                size_t betterSizeClass;
-                if (newWastage > originalWastage)
-                    betterSizeClass = sizeClass;
-                else
-                    betterSizeClass = possiblyBetterSizeClass;
-                
-                dataLogLnIf(Options::dumpSizeClasses(), "    Choosing size class: ", betterSizeClass);
-                
-                if (betterSizeClass == result->last()) {
-                    // Defense for when expStep is small.
-                    continue;
-                }
-                
-                // This is usually how we get out of the loop.
-                if (betterSizeClass > MarkedSpace::largeCutoff
-                    || betterSizeClass > Options::preciseAllocationCutoff())
-                    break;
-                
-                add(betterSizeClass);
-            }
+        // The size class we just came up with is better than the other one if it reduces
+        // total wastage assuming we only allocate cells of that size.
+        size_t originalWastage = MarkedSpace::blockPayload - cellsPerBlock * sizeClass;
+        size_t newWastage = (possiblyBetterSizeClass - sizeClass) * cellsPerBlock;
+        dataLogLnIf(Options::dumpSizeClasses(), "    Original wastage: ", originalWastage, ", new wastage: ", newWastage);
+        
+        size_t betterSizeClass;
+        if (newWastage > originalWastage)
+            betterSizeClass = sizeClass;
+        else
+            betterSizeClass = possiblyBetterSizeClass;
+        
+        dataLogLnIf(Options::dumpSizeClasses(), "    Choosing size class: ", betterSizeClass);
+        
+        if (betterSizeClass == result.last()) {
+            // Defense for when expStep is small.
+            continue;
+        }
+        
+        // This is usually how we get out of the loop.
+        if (betterSizeClass > MarkedSpace::largeCutoff
+            || betterSizeClass > Options::preciseAllocationCutoff())
+            break;
+        
+        add(betterSizeClass);
+    }
 
-            // Manually inject size classes for objects we know will be allocated in high volume.
-            // FIXME: All of these things should have IsoSubspaces.
-            // https://bugs.webkit.org/show_bug.cgi?id=179876
-            add(256);
+    // Manually inject size classes for objects we know will be allocated in high volume.
+    // FIXME: All of these things should have IsoSubspaces.
+    // https://bugs.webkit.org/show_bug.cgi?id=179876
+    add(256);
 
-            {
-                // Sort and deduplicate.
-                std::sort(result->begin(), result->end());
-                auto it = std::unique(result->begin(), result->end());
-                result->shrinkCapacity(it - result->begin());
-            }
+    {
+        // Sort and deduplicate.
+        std::sort(result.begin(), result.end());
+        auto it = std::unique(result.begin(), result.end());
+        result.shrinkCapacity(it - result.begin());
+    }
 
-            dataLogLnIf(Options::dumpSizeClasses(), "JSC Heap MarkedSpace size class dump: ", listDump(*result));
+    dataLogLnIf(Options::dumpSizeClasses(), "JSC Heap MarkedSpace size class dump: ", listDump(result));
 
-            // We have an optimiation in MarkedSpace::optimalSizeFor() that assumes things about
-            // the size class table. This checks our results against that function's assumptions.
-            for (size_t size = MarkedSpace::sizeStep, i = 0; size <= MarkedSpace::preciseCutoff; size += MarkedSpace::sizeStep, i++)
-                RELEASE_ASSERT(result->at(i) == size);
-        });
-    return *result;
+    // We have an optimization in MarkedSpace::optimalSizeFor() that assumes things about
+    // the size class table. This checks our results against that function's assumptions.
+    for (size_t size = MarkedSpace::sizeStep, i = 0; size <= MarkedSpace::preciseCutoff; size += MarkedSpace::sizeStep, i++)
+        RELEASE_ASSERT(result.at(i) == size);
+
+    return result;
 }
 
 template<typename TableType, typename SizeClassCons, typename DefaultCons>
@@ -171,9 +166,11 @@
             buildSizeClassTable(
                 s_sizeClassForSizeStep,
                 [&] (size_t sizeClass) -> size_t {
+                    RELEASE_ASSERT(sizeClass <= UINT32_MAX);
                     return sizeClass;
                 },
                 [&] (size_t sizeClass) -> size_t {
+                    RELEASE_ASSERT(sizeClass <= UINT32_MAX);
                     return sizeClass;
                 });
         });

Modified: trunk/Source/_javascript_Core/heap/MarkedSpace.h (262785 => 262786)


--- trunk/Source/_javascript_Core/heap/MarkedSpace.h	2020-06-09 09:13:43 UTC (rev 262785)
+++ trunk/Source/_javascript_Core/heap/MarkedSpace.h	2020-06-09 09:20:44 UTC (rev 262786)
@@ -65,6 +65,7 @@
     // ensures that we only use the size class approach if it means being able to pack two things
     // into one block.
     static constexpr size_t largeCutoff = (blockPayload / 2) & ~(sizeStep - 1);
+    static_assert(largeCutoff <= UINT32_MAX);
 
     // We have an extra size class for size zero.
     static constexpr size_t numSizeClasses = largeCutoff / sizeStep + 1;
@@ -175,7 +176,7 @@
     
     void dumpBits(PrintStream& = WTF::dataFile());
     
-    JS_EXPORT_PRIVATE static std::array<size_t, numSizeClasses> s_sizeClassForSizeStep;
+    JS_EXPORT_PRIVATE static std::array<unsigned, numSizeClasses> s_sizeClassForSizeStep;
     
 private:
     friend class CompleteSubspace;

Modified: trunk/Source/_javascript_Core/heap/VisitRaceKey.cpp (262785 => 262786)


--- trunk/Source/_javascript_Core/heap/VisitRaceKey.cpp	2020-06-09 09:13:43 UTC (rev 262785)
+++ trunk/Source/_javascript_Core/heap/VisitRaceKey.cpp	2020-06-09 09:20:44 UTC (rev 262786)
@@ -28,7 +28,7 @@
 
 namespace JSC {
 
-const char* VisitRaceKey::m_deletedValueRaceName = "deleted value";
+const char* const VisitRaceKey::m_deletedValueRaceName = "deleted value";
 
 void VisitRaceKey::dump(PrintStream& out) const
 {

Modified: trunk/Source/_javascript_Core/heap/VisitRaceKey.h (262785 => 262786)


--- trunk/Source/_javascript_Core/heap/VisitRaceKey.h	2020-06-09 09:13:43 UTC (rev 262785)
+++ trunk/Source/_javascript_Core/heap/VisitRaceKey.h	2020-06-09 09:20:44 UTC (rev 262786)
@@ -79,7 +79,7 @@
     }
 
 private:
-    static const char* m_deletedValueRaceName;
+    static const char* const m_deletedValueRaceName;
     
     JSCell* m_cell { nullptr };
     const char* m_raceName { nullptr };

Modified: trunk/Source/_javascript_Core/inspector/agents/InspectorDebuggerAgent.cpp (262785 => 262786)


--- trunk/Source/_javascript_Core/inspector/agents/InspectorDebuggerAgent.cpp	2020-06-09 09:13:43 UTC (rev 262785)
+++ trunk/Source/_javascript_Core/inspector/agents/InspectorDebuggerAgent.cpp	2020-06-09 09:20:44 UTC (rev 262786)
@@ -44,7 +44,7 @@
 
 namespace Inspector {
 
-const char* InspectorDebuggerAgent::backtraceObjectGroup = "backtrace";
+const char* const InspectorDebuggerAgent::backtraceObjectGroup = "backtrace";
 
 // Objects created and retained by evaluating breakpoint actions are put into object groups
 // according to the breakpoint action identifier assigned by the frontend. A breakpoint may

Modified: trunk/Source/_javascript_Core/inspector/agents/InspectorDebuggerAgent.h (262785 => 262786)


--- trunk/Source/_javascript_Core/inspector/agents/InspectorDebuggerAgent.h	2020-06-09 09:13:43 UTC (rev 262785)
+++ trunk/Source/_javascript_Core/inspector/agents/InspectorDebuggerAgent.h	2020-06-09 09:20:44 UTC (rev 262786)
@@ -55,7 +55,7 @@
 public:
     ~InspectorDebuggerAgent() override;
 
-    static const char* backtraceObjectGroup;
+    static const char* const backtraceObjectGroup;
 
     // InspectorAgentBase
     void didCreateFrontendAndBackend(FrontendRouter*, BackendDispatcher*) final;

Modified: trunk/Source/_javascript_Core/runtime/PropertyDescriptor.cpp (262785 => 262786)


--- trunk/Source/_javascript_Core/runtime/PropertyDescriptor.cpp	2020-06-09 09:13:43 UTC (rev 262785)
+++ trunk/Source/_javascript_Core/runtime/PropertyDescriptor.cpp	2020-06-09 09:20:44 UTC (rev 262786)
@@ -31,7 +31,6 @@
 #include "JSCJSValueInlines.h"
 
 namespace JSC {
-unsigned PropertyDescriptor::defaultAttributes = PropertyAttribute::DontDelete | PropertyAttribute::DontEnum | PropertyAttribute::ReadOnly;
 
 bool PropertyDescriptor::writable() const
 {

Modified: trunk/Source/_javascript_Core/runtime/PropertyDescriptor.h (262785 => 262786)


--- trunk/Source/_javascript_Core/runtime/PropertyDescriptor.h	2020-06-09 09:13:43 UTC (rev 262785)
+++ trunk/Source/_javascript_Core/runtime/PropertyDescriptor.h	2020-06-09 09:20:44 UTC (rev 262786)
@@ -27,6 +27,7 @@
 
 #include "DefinePropertyAttributes.h"
 #include "JSCJSValue.h"
+#include "PropertySlot.h"
 
 namespace JSC {
 
@@ -82,7 +83,8 @@
     unsigned attributesOverridingCurrent(const PropertyDescriptor& current) const;
 
 private:
-    JS_EXPORT_PRIVATE static unsigned defaultAttributes;
+    static constexpr unsigned defaultAttributes = PropertyAttribute::DontDelete | PropertyAttribute::DontEnum | PropertyAttribute::ReadOnly;
+
     bool operator==(const PropertyDescriptor&) { return false; }
     enum { WritablePresent = 1, EnumerablePresent = 2, ConfigurablePresent = 4};
     // May be a getter/setter
_______________________________________________
webkit-changes mailing list
[email protected]
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to