Diff
Modified: trunk/Source/_javascript_Core/ChangeLog (255540 => 255541)
--- trunk/Source/_javascript_Core/ChangeLog 2020-02-01 03:36:42 UTC (rev 255540)
+++ trunk/Source/_javascript_Core/ChangeLog 2020-02-01 05:22:47 UTC (rev 255541)
@@ -1,5 +1,35 @@
2020-01-31 Yusuke Suzuki <[email protected]>
+ [JSC] ShrinkToFit some vectors kept by JIT data structures
+ https://bugs.webkit.org/show_bug.cgi?id=207085
+
+ Reviewed by Mark Lam.
+
+ 1. We are allocating RareCaseProfile by using SegmentedVector since JIT code is directly accessing to RareCaseProfile*. But when creating RareCaseProfile, we can know
+ how many RareCaseProfiles should we create: RareCaseProfile is created per slow paths of Baseline JIT bytecode. Since we already scan bytecode for the main paths,
+ we can count it and use this number when creating RareCaseProfile.
+ 2. Vectors held by PolymorphicAccess and PolymorphicCallStubRoutine should be kept small by calling shrinkToFit.
+
+ * bytecode/CodeBlock.cpp:
+ (JSC::CodeBlock::setRareCaseProfiles):
+ (JSC::CodeBlock::shrinkToFit):
+ (JSC::CodeBlock::addRareCaseProfile): Deleted.
+ * bytecode/CodeBlock.h:
+ * bytecode/PolyProtoAccessChain.cpp:
+ (JSC::PolyProtoAccessChain::create):
+ * bytecode/PolymorphicAccess.cpp:
+ (JSC::PolymorphicAccess::regenerate):
+ * bytecode/ValueProfile.h:
+ (JSC::RareCaseProfile::RareCaseProfile):
+ * jit/JIT.cpp:
+ (JSC::JIT::privateCompileMainPass):
+ (JSC::JIT::privateCompileSlowCases):
+ * jit/JIT.h:
+ * jit/PolymorphicCallStubRoutine.cpp:
+ (JSC::PolymorphicCallStubRoutine::PolymorphicCallStubRoutine):
+
+2020-01-31 Yusuke Suzuki <[email protected]>
+
[JSC] DFG::CommonData::shrinkToFit called before DFG::Plan::reallyAdd is called
https://bugs.webkit.org/show_bug.cgi?id=207083
Modified: trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp (255540 => 255541)
--- trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp 2020-02-01 03:36:42 UTC (rev 255540)
+++ trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp 2020-02-01 05:22:47 UTC (rev 255541)
@@ -1577,12 +1577,10 @@
return nullptr;
}
-RareCaseProfile* CodeBlock::addRareCaseProfile(BytecodeIndex bytecodeIndex)
+void CodeBlock::setRareCaseProfiles(RefCountedArray<RareCaseProfile>&& rareCaseProfiles)
{
ConcurrentJSLocker locker(m_lock);
- auto& jitData = ensureJITData(locker);
- jitData.m_rareCaseProfiles.append(RareCaseProfile(bytecodeIndex));
- return &jitData.m_rareCaseProfiles.last();
+ ensureJITData(locker).m_rareCaseProfiles = WTFMove(rareCaseProfiles);
}
RareCaseProfile* CodeBlock::rareCaseProfileForBytecodeIndex(const ConcurrentJSLocker&, BytecodeIndex bytecodeIndex)
@@ -1956,11 +1954,6 @@
{
ConcurrentJSLocker locker(m_lock);
-#if ENABLE(JIT)
- if (auto* jitData = m_jitData.get())
- jitData->m_rareCaseProfiles.shrinkToFit();
-#endif
-
if (shrinkMode == EarlyShrink) {
m_constantRegisters.shrinkToFit();
m_constantsSourceCodeRepresentation.shrinkToFit();
Modified: trunk/Source/_javascript_Core/bytecode/CodeBlock.h (255540 => 255541)
--- trunk/Source/_javascript_Core/bytecode/CodeBlock.h 2020-02-01 03:36:42 UTC (rev 255540)
+++ trunk/Source/_javascript_Core/bytecode/CodeBlock.h 2020-02-01 05:22:47 UTC (rev 255541)
@@ -279,7 +279,7 @@
Bag<CallLinkInfo> m_callLinkInfos;
SentinelLinkedList<CallLinkInfo, PackedRawSentinelNode<CallLinkInfo>> m_incomingCalls;
SentinelLinkedList<PolymorphicCallNode, PackedRawSentinelNode<PolymorphicCallNode>> m_incomingPolymorphicCalls;
- SegmentedVector<RareCaseProfile, 8> m_rareCaseProfiles;
+ RefCountedArray<RareCaseProfile> m_rareCaseProfiles;
std::unique_ptr<PCToCodeOriginMap> m_pcToCodeOriginMap;
std::unique_ptr<RegisterAtOffsetList> m_calleeSaveRegisters;
JITCodeMap m_jitCodeMap;
@@ -342,7 +342,7 @@
void setCalleeSaveRegisters(RegisterSet);
void setCalleeSaveRegisters(std::unique_ptr<RegisterAtOffsetList>);
- RareCaseProfile* addRareCaseProfile(BytecodeIndex);
+ void setRareCaseProfiles(RefCountedArray<RareCaseProfile>&&);
RareCaseProfile* rareCaseProfileForBytecodeIndex(const ConcurrentJSLocker&, BytecodeIndex);
unsigned rareCaseProfileCountForBytecodeIndex(const ConcurrentJSLocker&, BytecodeIndex);
Modified: trunk/Source/_javascript_Core/bytecode/PolyProtoAccessChain.cpp (255540 => 255541)
--- trunk/Source/_javascript_Core/bytecode/PolyProtoAccessChain.cpp 2020-02-01 03:36:42 UTC (rev 255540)
+++ trunk/Source/_javascript_Core/bytecode/PolyProtoAccessChain.cpp 2020-02-01 05:22:47 UTC (rev 255541)
@@ -79,6 +79,7 @@
if (!found && !!target)
return nullptr;
+ result->m_chain.shrinkToFit();
return result;
}
Modified: trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.cpp (255540 => 255541)
--- trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.cpp 2020-02-01 03:36:42 UTC (rev 255540)
+++ trunk/Source/_javascript_Core/bytecode/PolymorphicAccess.cpp 2020-02-01 05:22:47 UTC (rev 255541)
@@ -729,12 +729,15 @@
m_stubRoutine = createJITStubRoutine(code, vm, codeBlock, doesCalls, cellsToMark, WTFMove(state.m_callLinkInfos), codeBlockThatOwnsExceptionHandlers, callSiteIndexForExceptionHandling);
m_watchpoints = WTFMove(state.watchpoints);
- if (!state.weakReferences.isEmpty())
+ if (!state.weakReferences.isEmpty()) {
+ state.weakReferences.shrinkToFit();
m_weakReferences = makeUnique<Vector<WriteBarrier<JSCell>>>(WTFMove(state.weakReferences));
+ }
if (PolymorphicAccessInternal::verbose)
dataLog("Returning: ", code.code(), "\n");
m_list = WTFMove(cases);
+ m_list.shrinkToFit();
AccessGenerationResult::Kind resultKind;
if (m_list.size() >= Options::maxAccessVariantListSize() || generatedFinalCode)
Modified: trunk/Source/_javascript_Core/bytecode/ValueProfile.h (255540 => 255541)
--- trunk/Source/_javascript_Core/bytecode/ValueProfile.h 2020-02-01 03:36:42 UTC (rev 255540)
+++ trunk/Source/_javascript_Core/bytecode/ValueProfile.h 2020-02-01 05:22:47 UTC (rev 255541)
@@ -163,12 +163,12 @@
struct RareCaseProfile {
RareCaseProfile(BytecodeIndex bytecodeIndex)
: m_bytecodeIndex(bytecodeIndex)
- , m_counter(0)
{
}
+ RareCaseProfile() = default;
- BytecodeIndex m_bytecodeIndex;
- uint32_t m_counter;
+ BytecodeIndex m_bytecodeIndex { };
+ uint32_t m_counter { 0 };
};
inline BytecodeIndex getRareCaseProfileBytecodeIndex(RareCaseProfile* rareCaseProfile)
Modified: trunk/Source/_javascript_Core/jit/JIT.cpp (255540 => 255541)
--- trunk/Source/_javascript_Core/jit/JIT.cpp 2020-02-01 03:36:42 UTC (rev 255540)
+++ trunk/Source/_javascript_Core/jit/JIT.cpp 2020-02-01 05:22:47 UTC (rev 255541)
@@ -139,6 +139,11 @@
m_bytecodeIndex = BytecodeIndex(m_bytecodeIndex.offset() + currentInstruction->size()); \
break;
+#define NEXT_OPCODE_IN_MAIN(name) \
+ if (previousSlowCasesSize != m_slowCases.size()) \
+ ++m_bytecodeCountHavingSlowCase; \
+ NEXT_OPCODE(name)
+
#define DEFINE_SLOW_OP(name) \
case op_##name: { \
if (m_bytecodeIndex >= startBytecodeIndex) { \
@@ -145,7 +150,7 @@
JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_##name); \
slowPathCall.call(); \
} \
- NEXT_OPCODE(op_##name); \
+ NEXT_OPCODE_IN_MAIN(op_##name); \
}
#define DEFINE_OP(name) \
@@ -153,7 +158,7 @@
if (m_bytecodeIndex >= startBytecodeIndex) { \
emit_##name(currentInstruction); \
} \
- NEXT_OPCODE(name); \
+ NEXT_OPCODE_IN_MAIN(name); \
}
#define DEFINE_SLOWCASE_OP(name) \
@@ -232,7 +237,9 @@
}
}
+ m_bytecodeCountHavingSlowCase = 0;
for (m_bytecodeIndex = BytecodeIndex(0); m_bytecodeIndex.offset() < instructionCount; ) {
+ unsigned previousSlowCasesSize = m_slowCases.size();
if (m_bytecodeIndex == startBytecodeIndex && startBytecodeIndex.offset() > 0) {
// We've proven all bytecode instructions up until here are unreachable.
// Let's ensure that by crashing if it's ever hit.
@@ -496,7 +503,12 @@
m_instanceOfIndex = 0;
m_byValInstructionIndex = 0;
m_callLinkInfoIndex = 0;
+
+ RefCountedArray<RareCaseProfile> rareCaseProfiles;
+ if (shouldEmitProfiling())
+ rareCaseProfiles = RefCountedArray<RareCaseProfile>(m_bytecodeCountHavingSlowCase);
+ unsigned bytecodeCountHavingSlowCase = 0;
for (Vector<SlowCaseEntry>::iterator iter = m_slowCases.begin(); iter != m_slowCases.end();) {
m_bytecodeIndex = iter->to;
@@ -506,9 +518,9 @@
const Instruction* currentInstruction = m_codeBlock->instructions().at(m_bytecodeIndex).ptr();
- RareCaseProfile* rareCaseProfile = 0;
+ RareCaseProfile* rareCaseProfile = nullptr;
if (shouldEmitProfiling())
- rareCaseProfile = m_codeBlock->addRareCaseProfile(m_bytecodeIndex);
+ rareCaseProfile = &rareCaseProfiles.at(bytecodeCountHavingSlowCase);
if (JITInternal::verbose)
dataLogLn("Old JIT emitting slow code for ", m_bytecodeIndex, " at offset ", (long)debugOffset());
@@ -617,8 +629,10 @@
add32(TrustedImm32(1), AbsoluteAddress(&rareCaseProfile->m_counter));
emitJumpSlowToHot(jump(), 0);
+ ++bytecodeCountHavingSlowCase;
}
+ RELEASE_ASSERT(bytecodeCountHavingSlowCase == m_bytecodeCountHavingSlowCase);
RELEASE_ASSERT(m_getByIdIndex == m_getByIds.size());
RELEASE_ASSERT(m_getByIdWithThisIndex == m_getByIdsWithThis.size());
RELEASE_ASSERT(m_putByIdIndex == m_putByIds.size());
@@ -626,6 +640,9 @@
RELEASE_ASSERT(m_instanceOfIndex == m_instanceOfs.size());
RELEASE_ASSERT(m_callLinkInfoIndex == m_callCompilationInfo.size());
+ if (shouldEmitProfiling())
+ m_codeBlock->setRareCaseProfiles(WTFMove(rareCaseProfiles));
+
#ifndef NDEBUG
// Reset this, in order to guard its use with ASSERTs.
m_bytecodeIndex = BytecodeIndex();
Modified: trunk/Source/_javascript_Core/jit/JIT.h (255540 => 255541)
--- trunk/Source/_javascript_Core/jit/JIT.h 2020-02-01 03:36:42 UTC (rev 255540)
+++ trunk/Source/_javascript_Core/jit/JIT.h 2020-02-01 05:22:47 UTC (rev 255541)
@@ -939,6 +939,7 @@
unsigned m_instanceOfIndex { UINT_MAX };
unsigned m_byValInstructionIndex { UINT_MAX };
unsigned m_callLinkInfoIndex { UINT_MAX };
+ unsigned m_bytecodeCountHavingSlowCase { 0 };
Label m_arityCheck;
std::unique_ptr<LinkBuffer> m_linkBuffer;
Modified: trunk/Source/_javascript_Core/jit/PolymorphicCallStubRoutine.cpp (255540 => 255541)
--- trunk/Source/_javascript_Core/jit/PolymorphicCallStubRoutine.cpp 2020-02-01 03:36:42 UTC (rev 255540)
+++ trunk/Source/_javascript_Core/jit/PolymorphicCallStubRoutine.cpp 2020-02-01 05:22:47 UTC (rev 255541)
@@ -68,16 +68,17 @@
CallLinkInfo& info, const Vector<PolymorphicCallCase>& cases,
UniqueArray<uint32_t>&& fastCounts)
: GCAwareJITStubRoutine(codeRef, vm)
+ , m_variants(cases.size())
, m_fastCounts(WTFMove(fastCounts))
{
- for (PolymorphicCallCase callCase : cases) {
- m_variants.append(WriteBarrier<JSCell>(vm, owner, callCase.variant().rawCalleeCell()));
+ for (unsigned index = 0; index < cases.size(); ++index) {
+ const PolymorphicCallCase& callCase = cases[index];
+ m_variants[index].set(vm, owner, callCase.variant().rawCalleeCell());
if (shouldDumpDisassemblyFor(callerFrame->codeBlock()))
dataLog("Linking polymorphic call in ", FullCodeOrigin(callerFrame->codeBlock(), callerFrame->codeOrigin()), " to ", callCase.variant(), ", codeBlock = ", pointerDump(callCase.codeBlock()), "\n");
if (CodeBlock* codeBlock = callCase.codeBlock())
codeBlock->linkIncomingPolymorphicCall(callerFrame, m_callNodes.add(&info));
}
- m_variants.shrinkToFit();
WTF::storeStoreFence();
}