Title: [255987] trunk/Source/_javascript_Core
Revision
255987
Author
[email protected]
Date
2020-02-06 15:25:01 -0800 (Thu, 06 Feb 2020)

Log Message

[JSC] CodeBlock::shrinkToFit should shrink m_constantRegisters and m_constantsSourceCodeRepresentation in 64bit architectures
https://bugs.webkit.org/show_bug.cgi?id=207356

Reviewed by Mark Lam.

Only 32bit architectures are using m_constantRegisters's address. 64bit architectures are not relying on m_constantRegisters's address.
This patches fixes the thing so that CodeBlock::shrinkToFit will shrink m_constantRegisters and m_constantsSourceCodeRepresentation
regardless of whether this is EarlyShrink or not. We also move DFG/FTL's LateShrink call to the place after calling DFGCommon reallyAdd
since they can add more constant registers.

* bytecode/CodeBlock.cpp:
(JSC::CodeBlock::shrinkToFit):
* bytecode/CodeBlock.h:
* dfg/DFGJITCompiler.cpp:
(JSC::DFG::JITCompiler::compile):
(JSC::DFG::JITCompiler::compileFunction):
* dfg/DFGJITFinalizer.cpp:
(JSC::DFG::JITFinalizer::finalizeCommon):
* dfg/DFGPlan.cpp:
(JSC::DFG::Plan::compileInThreadImpl):
(JSC::DFG::Plan::finalizeWithoutNotifyingCallback):
* jit/JIT.cpp:
(JSC::JIT::link):
* jit/JIT.h:
* jit/JITInlines.h:
(JSC::JIT::emitLoadDouble):
(JSC::JIT::emitLoadInt32ToDouble): Deleted.

Modified Paths

Diff

Modified: trunk/Source/_javascript_Core/ChangeLog (255986 => 255987)


--- trunk/Source/_javascript_Core/ChangeLog	2020-02-06 23:17:11 UTC (rev 255986)
+++ trunk/Source/_javascript_Core/ChangeLog	2020-02-06 23:25:01 UTC (rev 255987)
@@ -1,3 +1,33 @@
+2020-02-06  Yusuke Suzuki  <[email protected]>
+
+        [JSC] CodeBlock::shrinkToFit should shrink m_constantRegisters and m_constantsSourceCodeRepresentation in 64bit architectures
+        https://bugs.webkit.org/show_bug.cgi?id=207356
+
+        Reviewed by Mark Lam.
+
+        Only 32bit architectures are using m_constantRegisters's address. 64bit architectures are not relying on m_constantRegisters's address.
+        This patches fixes the thing so that CodeBlock::shrinkToFit will shrink m_constantRegisters and m_constantsSourceCodeRepresentation
+        regardless of whether this is EarlyShrink or not. We also move DFG/FTL's LateShrink call to the place after calling DFGCommon reallyAdd
+        since they can add more constant registers.
+
+        * bytecode/CodeBlock.cpp:
+        (JSC::CodeBlock::shrinkToFit):
+        * bytecode/CodeBlock.h:
+        * dfg/DFGJITCompiler.cpp:
+        (JSC::DFG::JITCompiler::compile):
+        (JSC::DFG::JITCompiler::compileFunction):
+        * dfg/DFGJITFinalizer.cpp:
+        (JSC::DFG::JITFinalizer::finalizeCommon):
+        * dfg/DFGPlan.cpp:
+        (JSC::DFG::Plan::compileInThreadImpl):
+        (JSC::DFG::Plan::finalizeWithoutNotifyingCallback):
+        * jit/JIT.cpp:
+        (JSC::JIT::link):
+        * jit/JIT.h:
+        * jit/JITInlines.h:
+        (JSC::JIT::emitLoadDouble):
+        (JSC::JIT::emitLoadInt32ToDouble): Deleted.
+
 2020-02-05  Don Olmstead  <[email protected]>
 
         [PlayStation] Build a shared _javascript_Core

Modified: trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp (255986 => 255987)


--- trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp	2020-02-06 23:17:11 UTC (rev 255986)
+++ trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp	2020-02-06 23:25:01 UTC (rev 255987)
@@ -1953,14 +1953,20 @@
     return false;
 }
 
-void CodeBlock::shrinkToFit(ShrinkMode shrinkMode)
+void CodeBlock::shrinkToFit(const ConcurrentJSLocker&, ShrinkMode shrinkMode)
 {
     ConcurrentJSLocker locker(m_lock);
 
-    if (shrinkMode == EarlyShrink) {
+#if USE(JSVALUE32_64)
+    // Only 32bit Baseline JIT is touching m_constantRegisters address directly.
+    if (shrinkMode == ShrinkMode::EarlyShrink)
         m_constantRegisters.shrinkToFit();
-        m_constantsSourceCodeRepresentation.shrinkToFit();
-        
+#else
+    m_constantRegisters.shrinkToFit();
+#endif
+    m_constantsSourceCodeRepresentation.shrinkToFit();
+
+    if (shrinkMode == ShrinkMode::EarlyShrink) {
         if (m_rareData) {
             m_rareData->m_switchJumpTables.shrinkToFit();
             m_rareData->m_stringSwitchJumpTables.shrinkToFit();

Modified: trunk/Source/_javascript_Core/bytecode/CodeBlock.h (255986 => 255987)


--- trunk/Source/_javascript_Core/bytecode/CodeBlock.h	2020-02-06 23:17:11 UTC (rev 255986)
+++ trunk/Source/_javascript_Core/bytecode/CodeBlock.h	2020-02-06 23:25:01 UTC (rev 255987)
@@ -635,7 +635,7 @@
 
     DirectEvalCodeCache& directEvalCodeCache() { createRareDataIfNecessary(); return m_rareData->m_directEvalCodeCache; }
 
-    enum ShrinkMode {
+    enum class ShrinkMode {
         // Shrink prior to generating machine code that may point directly into vectors.
         EarlyShrink,
 
@@ -642,9 +642,9 @@
         // Shrink after generating machine code, and after possibly creating new vectors
         // and appending to others. At this time it is not safe to shrink certain vectors
         // because we would have generated machine code that references them directly.
-        LateShrink
+        LateShrink,
     };
-    void shrinkToFit(ShrinkMode);
+    void shrinkToFit(const ConcurrentJSLocker&, ShrinkMode);
 
     // Functions for controlling when JITting kicks in, in a mixed mode
     // execution world.

Modified: trunk/Source/_javascript_Core/dfg/DFGJITCompiler.cpp (255986 => 255987)


--- trunk/Source/_javascript_Core/dfg/DFGJITCompiler.cpp	2020-02-06 23:17:11 UTC (rev 255986)
+++ trunk/Source/_javascript_Core/dfg/DFGJITCompiler.cpp	2020-02-06 23:25:01 UTC (rev 255987)
@@ -392,8 +392,6 @@
     link(*linkBuffer);
     m_speculative->linkOSREntries(*linkBuffer);
 
-    codeBlock()->shrinkToFit(CodeBlock::LateShrink);
-
     disassemble(*linkBuffer);
 
     m_graph.m_plan.setFinalizer(makeUnique<JITFinalizer>(
@@ -493,8 +491,6 @@
     link(*linkBuffer);
     m_speculative->linkOSREntries(*linkBuffer);
     
-    codeBlock()->shrinkToFit(CodeBlock::LateShrink);
-
     if (requiresArityFixup)
         linkBuffer->link(callArityFixup, FunctionPtr<JITThunkPtrTag>(vm().getCTIStub(arityFixupGenerator).code()));
 

Modified: trunk/Source/_javascript_Core/dfg/DFGJITFinalizer.cpp (255986 => 255987)


--- trunk/Source/_javascript_Core/dfg/DFGJITFinalizer.cpp	2020-02-06 23:17:11 UTC (rev 255986)
+++ trunk/Source/_javascript_Core/dfg/DFGJITFinalizer.cpp	2020-02-06 23:25:01 UTC (rev 255987)
@@ -83,13 +83,6 @@
 {
     CodeBlock* codeBlock = m_plan.codeBlock();
 
-    // Some JIT finalizers may have added more constants. Shrink-to-fit those things now.
-    {
-        ConcurrentJSLocker locker(codeBlock->m_lock);
-        codeBlock->constants().shrinkToFit();
-        codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
-    }
-
 #if ENABLE(FTL_JIT)
     m_jitCode->optimizeAfterWarmUp(codeBlock);
 #endif // ENABLE(FTL_JIT)

Modified: trunk/Source/_javascript_Core/dfg/DFGPlan.cpp (255986 => 255987)


--- trunk/Source/_javascript_Core/dfg/DFGPlan.cpp	2020-02-06 23:17:11 UTC (rev 255986)
+++ trunk/Source/_javascript_Core/dfg/DFGPlan.cpp	2020-02-06 23:25:01 UTC (rev 255987)
@@ -279,7 +279,10 @@
     // in the CodeBlock. This is a good time to perform an early shrink, which is more
     // powerful than a late one. It's safe to do so because we haven't generated any code
     // that references any of the tables directly, yet.
-    m_codeBlock->shrinkToFit(CodeBlock::EarlyShrink);
+    {
+        ConcurrentJSLocker locker(m_codeBlock->m_lock);
+        m_codeBlock->shrinkToFit(locker, CodeBlock::ShrinkMode::EarlyShrink);
+    }
 
     if (validationEnabled())
         validate(dfg);
@@ -617,6 +620,7 @@
         {
             ConcurrentJSLocker locker(m_codeBlock->m_lock);
             m_codeBlock->jitCode()->shrinkToFit(locker);
+            m_codeBlock->shrinkToFit(locker, CodeBlock::ShrinkMode::LateShrink);
         }
 
         if (validationEnabled()) {

Modified: trunk/Source/_javascript_Core/jit/JIT.cpp (255986 => 255987)


--- trunk/Source/_javascript_Core/jit/JIT.cpp	2020-02-06 23:17:11 UTC (rev 255986)
+++ trunk/Source/_javascript_Core/jit/JIT.cpp	2020-02-06 23:25:01 UTC (rev 255987)
@@ -943,7 +943,10 @@
         static_cast<double>(result.size()) /
         static_cast<double>(m_codeBlock->instructionsSize()));
 
-    m_codeBlock->shrinkToFit(CodeBlock::LateShrink);
+    {
+        ConcurrentJSLocker locker(m_codeBlock->m_lock);
+        m_codeBlock->shrinkToFit(locker, CodeBlock::ShrinkMode::LateShrink);
+    }
     m_codeBlock->setJITCode(
         adoptRef(*new DirectJITCode(result, withArityCheck, JITType::BaselineJIT)));
 

Modified: trunk/Source/_javascript_Core/jit/JIT.h (255986 => 255987)


--- trunk/Source/_javascript_Core/jit/JIT.h	2020-02-06 23:17:11 UTC (rev 255986)
+++ trunk/Source/_javascript_Core/jit/JIT.h	2020-02-06 23:25:01 UTC (rev 255987)
@@ -337,9 +337,6 @@
         void compileOpEqJumpSlow(Vector<SlowCaseEntry>::iterator&, CompileOpEqType, int jumpTarget);
         bool isOperandConstantDouble(VirtualRegister);
         
-        void emitLoadDouble(VirtualRegister, FPRegisterID value);
-        void emitLoadInt32ToDouble(VirtualRegister, FPRegisterID value);
-
         enum WriteBarrierMode { UnconditionalWriteBarrier, ShouldFilterBase, ShouldFilterValue, ShouldFilterBaseAndValue };
         // value register in write barrier is used before any scratch registers
         // so may safely be the same as either of the scratch registers.
@@ -417,6 +414,7 @@
 #if USE(JSVALUE32_64)
         bool getOperandConstantInt(VirtualRegister op1, VirtualRegister op2, VirtualRegister& op, int32_t& constant);
 
+        void emitLoadDouble(VirtualRegister, FPRegisterID value);
         void emitLoadTag(VirtualRegister, RegisterID tag);
         void emitLoadPayload(VirtualRegister, RegisterID payload);
 

Modified: trunk/Source/_javascript_Core/jit/JITInlines.h (255986 => 255987)


--- trunk/Source/_javascript_Core/jit/JITInlines.h	2020-02-06 23:17:11 UTC (rev 255986)
+++ trunk/Source/_javascript_Core/jit/JITInlines.h	2020-02-06 23:25:01 UTC (rev 255987)
@@ -377,6 +377,15 @@
 
 #if USE(JSVALUE32_64)
 
+inline void JIT::emitLoadDouble(VirtualRegister reg, FPRegisterID value)
+{
+    if (reg.isConstant()) {
+        WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(reg);
+        loadDouble(TrustedImmPtr(&inConstantPool), value);
+    } else
+        loadDouble(addressFor(reg), value);
+}
+
 inline void JIT::emitLoadTag(VirtualRegister reg, RegisterID tag)
 {
     if (reg.isConstant()) {
@@ -440,25 +449,6 @@
     emitLoad(reg1, tag1, payload1);
 }
 
-inline void JIT::emitLoadDouble(VirtualRegister reg, FPRegisterID value)
-{
-    if (reg.isConstant()) {
-        WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(reg);
-        loadDouble(TrustedImmPtr(&inConstantPool), value);
-    } else
-        loadDouble(addressFor(reg), value);
-}
-
-inline void JIT::emitLoadInt32ToDouble(VirtualRegister reg, FPRegisterID value)
-{
-    if (reg.isConstant()) {
-        WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(reg);
-        char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
-        convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
-    } else
-        convertInt32ToDouble(payloadFor(reg), value);
-}
-
 inline void JIT::emitStore(VirtualRegister reg, RegisterID tag, RegisterID payload, RegisterID base)
 {
     store32(payload, payloadFor(reg, base));
@@ -614,24 +604,6 @@
         emitJumpSlowCaseIfNotJSCell(reg);
 }
 
-inline void JIT::emitLoadDouble(VirtualRegister reg, FPRegisterID value)
-{
-    if (reg.isConstant()) {
-        WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(reg);
-        loadDouble(TrustedImmPtr(&inConstantPool), value);
-    } else
-        loadDouble(addressFor(reg), value);
-}
-
-inline void JIT::emitLoadInt32ToDouble(VirtualRegister reg, FPRegisterID value)
-{
-    if (reg.isConstant()) {
-        ASSERT(isOperandConstantInt(reg));
-        convertInt32ToDouble(Imm32(getConstantOperand(reg).asInt32()), value);
-    } else
-        convertInt32ToDouble(addressFor(reg), value);
-}
-
 ALWAYS_INLINE JIT::PatchableJump JIT::emitPatchableJumpIfNotInt(RegisterID reg)
 {
     return patchableBranch64(Below, reg, numberTagRegister);
_______________________________________________
webkit-changes mailing list
[email protected]
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to