Title: [295614] trunk
Revision
295614
Author
ysuz...@apple.com
Date
2022-06-16 16:08:33 -0700 (Thu, 16 Jun 2022)

Log Message

[JSC] Always create StructureStubInfo for op_get_by_val
https://bugs.webkit.org/show_bug.cgi?id=241669
rdar://75146284

Reviewed by Saam Barati and Mark Lam.

DFG OSR exit requires StructureStubInfo for getter / setter calls. However very generic baseline JIT
op_get_by_val does not create StructureStubInfo. It is possible that OSR exit crashes because of this
missing StructureStubInfo. Let's consider the following edge case.

1. Now, Baseline detects that this is very generic op_get_by_val. So we do not create StructureStubInfo.
2. This function is inlined in DFG. And DFG emits IC for this GetByVal.
3. (2)'s DFG function collects information in DFG-level IC. And luckily, in this inlined call path, it was not so generic.
4. Then, due to different OSR exit or something, we recreate DFG code for this function with (2)'s inlining.
5. DFG detects that DFG-level IC has more specialized information. So it can inline getter call in this op_get_by_val.
6. Inside this getter, we perform OSR exit.
7. Looking into Baseline, and we found that there is no StructureStubInfo!

We always create StructureStubInfo. In very generic op_get_by_val case, we create this with tookSlowPath = true.
And we emit empty inline path to record doneLocation. So, OSR exit can jump to this place.

We also clean up StructureStubInfo code.

1. "start" is renamed to startLocation. And we do not record it in DataIC case since it is not necessary.
2. Rename inlineSize to inlineCodeSize.
3. Add some assertions to ensure that this path is not used for DataIC case.
4. We also record opcode value in the crashing RELEASE_ASSERT to get more information if this does not fix the issue.

* Source/_javascript_Core/bytecode/InlineAccess.cpp:
(JSC::linkCodeInline):
(JSC::InlineAccess::generateArrayLength):
(JSC::InlineAccess::generateStringLength):
(JSC::InlineAccess::rewireStubAsJumpInAccessNotUsingInlineAccess):
(JSC::InlineAccess::rewireStubAsJumpInAccess):
(JSC::InlineAccess::resetStubAsJumpInAccess):
* Source/_javascript_Core/bytecode/StructureStubInfo.cpp:
(JSC::StructureStubInfo::initializeFromUnlinkedStructureStubInfo):
(JSC::StructureStubInfo::initializeFromDFGUnlinkedStructureStubInfo):
* Source/_javascript_Core/bytecode/StructureStubInfo.h:
(JSC::StructureStubInfo::inlineCodeSize const):
(JSC::StructureStubInfo::inlineSize const): Deleted.
* Source/_javascript_Core/dfg/DFGInlineCacheWrapperInlines.h:
(JSC::DFG::InlineCacheWrapper<GeneratorType>::finalize):
* Source/_javascript_Core/dfg/DFGJITCode.h:
* Source/_javascript_Core/dfg/DFGOSRExitCompilerCommon.cpp:
(JSC::DFG::callerReturnPC):
* Source/_javascript_Core/jit/JIT.cpp:
(JSC::JIT::link):
* Source/_javascript_Core/jit/JITInlineCacheGenerator.cpp:
(JSC::JITInlineCacheGenerator::finalize):
(JSC::JITGetByValGenerator::generateEmptyPath):
* Source/_javascript_Core/jit/JITInlineCacheGenerator.h:
* Source/_javascript_Core/jit/JITPropertyAccess.cpp:
(JSC::JIT::emit_op_get_by_val):
* JSTests/stress/get-by-val-generic-structurestubinfo.js: Added.
(let.program):
(runMono.let.o.get x):
(runMono):
(runPoly):

Canonical link: https://commits.webkit.org/251619@main

Modified Paths

Added Paths

Diff

Added: trunk/JSTests/stress/get-by-val-generic-structurestubinfo.js (0 => 295614)


--- trunk/JSTests/stress/get-by-val-generic-structurestubinfo.js	                        (rev 0)
+++ trunk/JSTests/stress/get-by-val-generic-structurestubinfo.js	2022-06-16 23:08:33 UTC (rev 295614)
@@ -0,0 +1,55 @@
+//@ requireOptions("--getByValICMaxNumberOfIdentifiers=2")
+
+let program = `
+    function shouldBe(actual, expected) {
+        if (actual !== expected)
+            throw new Error('bad value: ' + actual);
+    }
+    noInline(shouldBe);
+
+    function foo(o, p) {
+        return o[p];
+    }
+    noInline(foo);
+
+    function runMono() {
+        let o = {
+            get x() {
+                if ($vm.ftlTrue()) OSRExit();
+                return 42;
+            }
+        };
+        for (let i = 0; i < 1000000; ++i) {
+            shouldBe(foo(o, "x"), 42);
+        }
+    }
+
+    function runPoly() {
+        let o = {
+            a: 1,
+            b: 2,
+            c: 4,
+            d: 4,
+            e: 4,
+            f: 4,
+            g: 4,
+        };
+        for (let i = 0; i < 1000000; ++i) {
+            foo(o, "a");
+            foo(o, "b");
+            foo(o, "c");
+            foo(o, "d");
+            foo(o, "e");
+            foo(o, "f");
+            foo(o, "g");
+            foo(o, "h");
+            foo(o, "i");
+        }
+    }
+`;
+
+let g1 = runString(program);
+g1.runPoly();
+
+let g2 = runString(program);
+g2.runMono();

Modified: trunk/Source/_javascript_Core/bytecode/InlineAccess.cpp (295613 => 295614)


--- trunk/Source/_javascript_Core/bytecode/InlineAccess.cpp	2022-06-16 21:48:09 UTC (rev 295613)
+++ trunk/Source/_javascript_Core/bytecode/InlineAccess.cpp	2022-06-16 23:08:33 UTC (rev 295614)
@@ -152,9 +152,9 @@
 template <typename Function>
 ALWAYS_INLINE static bool linkCodeInline(const char* name, CCallHelpers& jit, StructureStubInfo& stubInfo, const Function& function)
 {
-    if (jit.m_assembler.buffer().codeSize() <= stubInfo.inlineSize()) {
+    if (jit.m_assembler.buffer().codeSize() <= stubInfo.inlineCodeSize()) {
         bool needsBranchCompaction = true;
-        LinkBuffer linkBuffer(jit, stubInfo.start, stubInfo.inlineSize(), LinkBuffer::Profile::InlineCache, JITCompilationMustSucceed, needsBranchCompaction);
+        LinkBuffer linkBuffer(jit, stubInfo.startLocation, stubInfo.inlineCodeSize(), LinkBuffer::Profile::InlineCache, JITCompilationMustSucceed, needsBranchCompaction);
         ASSERT(linkBuffer.isValid());
         function(linkBuffer);
         FINALIZE_CODE(linkBuffer, NoPtrTag, "InlineAccessType: '%s'", name);
@@ -169,7 +169,7 @@
     constexpr bool failIfCantInline = false;
     if (failIfCantInline) {
         dataLog("Failure for: ", name, "\n");
-        dataLog("real size: ", jit.m_assembler.buffer().codeSize(), " inline size:", stubInfo.inlineSize(), "\n");
+        dataLog("real size: ", jit.m_assembler.buffer().codeSize(), " inline size:", stubInfo.inlineCodeSize(), "\n");
         CRASH();
     }
 
@@ -310,6 +310,7 @@
 
 bool InlineAccess::generateArrayLength(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JSArray* array)
 {
+    ASSERT_UNUSED(codeBlock, !codeBlock->useDataIC());
     ASSERT_UNUSED(codeBlock, isCacheableArrayLength(codeBlock, stubInfo, array));
 
     if (!stubInfo.hasConstantIdentifier)
@@ -348,6 +349,7 @@
 
 bool InlineAccess::generateStringLength(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
 {
+    ASSERT_UNUSED(codeBlock, !codeBlock->useDataIC());
     ASSERT_UNUSED(codeBlock, isCacheableStringLength(codeBlock, stubInfo));
 
     if (!stubInfo.hasConstantIdentifier)
@@ -416,7 +418,7 @@
         return;
     }
 
-    CCallHelpers::emitJITCodeOver(stubInfo.start.retagged<JSInternalPtrTag>(), scopedLambda<void(CCallHelpers&)>([&](CCallHelpers& jit) {
+    CCallHelpers::emitJITCodeOver(stubInfo.startLocation.retagged<JSInternalPtrTag>(), scopedLambda<void(CCallHelpers&)>([&](CCallHelpers& jit) {
         // We don't need a nop sled here because nobody should be jumping into the middle of an IC.
         auto jump = jit.jump();
         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
@@ -433,7 +435,7 @@
         return;
     }
 
-    CCallHelpers::emitJITCodeOver(stubInfo.start.retagged<JSInternalPtrTag>(), scopedLambda<void(CCallHelpers&)>([&](CCallHelpers& jit) {
+    CCallHelpers::emitJITCodeOver(stubInfo.startLocation.retagged<JSInternalPtrTag>(), scopedLambda<void(CCallHelpers&)>([&](CCallHelpers& jit) {
         // We don't need a nop sled here because nobody should be jumping into the middle of an IC.
         auto jump = jit.jump();
         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
@@ -450,7 +452,7 @@
         return;
     }
 
-    CCallHelpers::emitJITCodeOver(stubInfo.start.retagged<JSInternalPtrTag>(), scopedLambda<void(CCallHelpers&)>([&](CCallHelpers& jit) {
+    CCallHelpers::emitJITCodeOver(stubInfo.startLocation.retagged<JSInternalPtrTag>(), scopedLambda<void(CCallHelpers&)>([&](CCallHelpers& jit) {
         // We don't need a nop sled here because nobody should be jumping into the middle of an IC.
         auto jump = jit.jump();
         auto slowPathStartLocation = stubInfo.slowPathStartLocation;

Modified: trunk/Source/_javascript_Core/bytecode/StructureStubInfo.cpp (295613 => 295614)


--- trunk/Source/_javascript_Core/bytecode/StructureStubInfo.cpp	2022-06-16 21:48:09 UTC (rev 295613)
+++ trunk/Source/_javascript_Core/bytecode/StructureStubInfo.cpp	2022-06-16 23:08:33 UTC (rev 295614)
@@ -487,7 +487,6 @@
 void StructureStubInfo::initializeFromUnlinkedStructureStubInfo(const BaselineUnlinkedStructureStubInfo& unlinkedStubInfo)
 {
     accessType = unlinkedStubInfo.accessType;
-    start = unlinkedStubInfo.start;
     doneLocation = unlinkedStubInfo.doneLocation;
     slowPathStartLocation = unlinkedStubInfo.slowPathStartLocation;
     callSiteIndex = CallSiteIndex(BytecodeIndex(unlinkedStubInfo.bytecodeIndex.offset()));
@@ -494,6 +493,7 @@
     codeOrigin = CodeOrigin(unlinkedStubInfo.bytecodeIndex);
     m_codePtr = slowPathStartLocation;
     propertyIsInt32 = unlinkedStubInfo.propertyIsInt32;
+    tookSlowPath = unlinkedStubInfo.tookSlowPath;
     useDataIC = true;
 
     usedRegisters = RegisterSet::stubUnavailableRegisters();
@@ -654,7 +654,6 @@
 void StructureStubInfo::initializeFromDFGUnlinkedStructureStubInfo(const DFG::UnlinkedStructureStubInfo& unlinkedStubInfo)
 {
     accessType = unlinkedStubInfo.accessType;
-    start = unlinkedStubInfo.start;
     doneLocation = unlinkedStubInfo.doneLocation;
     slowPathStartLocation = unlinkedStubInfo.slowPathStartLocation;
     callSiteIndex = unlinkedStubInfo.callSiteIndex;
@@ -666,6 +665,7 @@
     propertyIsString = unlinkedStubInfo.propertyIsString;
     prototypeIsKnownObject = unlinkedStubInfo.prototypeIsKnownObject;
     hasConstantIdentifier = unlinkedStubInfo.hasConstantIdentifier;
+    tookSlowPath = unlinkedStubInfo.tookSlowPath;
     useDataIC = true;
 
     usedRegisters = unlinkedStubInfo.usedRegisters;

Modified: trunk/Source/_javascript_Core/bytecode/StructureStubInfo.h (295613 => 295614)


--- trunk/Source/_javascript_Core/bytecode/StructureStubInfo.h	2022-06-16 21:48:09 UTC (rev 295613)
+++ trunk/Source/_javascript_Core/bytecode/StructureStubInfo.h	2022-06-16 23:08:33 UTC (rev 295614)
@@ -153,9 +153,11 @@
 
     bool containsPC(void* pc) const;
 
-    uint32_t inlineSize() const
+    uint32_t inlineCodeSize() const
     {
-        int32_t inlineSize = MacroAssembler::differenceBetweenCodePtr(start, doneLocation);
+        if (useDataIC)
+            return 0;
+        int32_t inlineSize = MacroAssembler::differenceBetweenCodePtr(startLocation, doneLocation);
         ASSERT(inlineSize >= 0);
         return inlineSize;
     }
@@ -382,7 +384,9 @@
     // That's not so bad - we'll get rid of the redundant ones once we regenerate.
     HashSet<BufferedStructure, BufferedStructure::Hash, BufferedStructure::KeyTraits> m_bufferedStructures WTF_GUARDED_BY_LOCK(m_bufferedStructuresLock);
 public:
-    CodeLocationLabel<JITStubRoutinePtrTag> start; // This is either the start of the inline IC for *byId caches. or the location of patchable jump for 'instanceof' caches.
+    // This is either the start of the inline IC for *byId caches. or the location of patchable jump for 'instanceof' caches.
+    // If useDataIC is true, then it is nullptr.
+    CodeLocationLabel<JITStubRoutinePtrTag> startLocation;
     CodeLocationLabel<JSInternalPtrTag> doneLocation;
     CodeLocationLabel<JITStubRoutinePtrTag> slowPathStartLocation;
 
@@ -480,11 +484,11 @@
     PutKind putKind { PutKind::Direct };
     PrivateFieldPutKind privateFieldPutKind { PrivateFieldPutKind::none() };
     ECMAMode ecmaMode { ECMAMode::sloppy() };
-    bool propertyIsInt32 { false };
-    bool propertyIsString { false };
-    bool propertyIsSymbol { false };
-    bool prototypeIsKnownObject { false };
-    CodeLocationLabel<JITStubRoutinePtrTag> start; // This is either the start of the inline IC for *byId caches. or the location of patchable jump for 'instanceof' caches.
+    bool propertyIsInt32 : 1 { false };
+    bool propertyIsString : 1 { false };
+    bool propertyIsSymbol : 1 { false };
+    bool prototypeIsKnownObject : 1 { false };
+    bool tookSlowPath : 1 { false };
     CodeLocationLabel<JSInternalPtrTag> doneLocation;
     CodeLocationLabel<JITStubRoutinePtrTag> slowPathStartLocation;
 };

Modified: trunk/Source/_javascript_Core/dfg/DFGInlineCacheWrapperInlines.h (295613 => 295614)


--- trunk/Source/_javascript_Core/dfg/DFGInlineCacheWrapperInlines.h	2022-06-16 21:48:09 UTC (rev 295613)
+++ trunk/Source/_javascript_Core/dfg/DFGInlineCacheWrapperInlines.h	2022-06-16 23:08:33 UTC (rev 295614)
@@ -37,7 +37,6 @@
 {
     m_generator.reportSlowPathCall(m_slowPath->label(), m_slowPath->call());
     if (m_generator.m_unlinkedStubInfo) {
-        m_generator.m_unlinkedStubInfo->start = fastPath.locationOf<JITStubRoutinePtrTag>(m_generator.m_start);
         m_generator.m_unlinkedStubInfo->doneLocation = fastPath.locationOf<JSInternalPtrTag>(m_generator.m_done);
         m_generator.m_unlinkedStubInfo->slowPathStartLocation = fastPath.locationOf<JITStubRoutinePtrTag>(m_generator.m_slowPathBegin);
     } else

Modified: trunk/Source/_javascript_Core/dfg/DFGJITCode.h (295613 => 295614)


--- trunk/Source/_javascript_Core/dfg/DFGJITCode.h	2022-06-16 21:48:09 UTC (rev 295613)
+++ trunk/Source/_javascript_Core/dfg/DFGJITCode.h	2022-06-16 23:08:33 UTC (rev 295614)
@@ -53,8 +53,8 @@
 
 struct UnlinkedStructureStubInfo : JSC::UnlinkedStructureStubInfo {
     CodeOrigin codeOrigin;
+    RegisterSet usedRegisters;
     CallSiteIndex callSiteIndex;
-    RegisterSet usedRegisters;
     GPRReg m_baseGPR { InvalidGPRReg };
     GPRReg m_valueGPR { InvalidGPRReg };
     GPRReg m_extraGPR { InvalidGPRReg };

Modified: trunk/Source/_javascript_Core/dfg/DFGOSRExitCompilerCommon.cpp (295613 => 295614)


--- trunk/Source/_javascript_Core/dfg/DFGOSRExitCompilerCommon.cpp	2022-06-16 21:48:09 UTC (rev 295613)
+++ trunk/Source/_javascript_Core/dfg/DFGOSRExitCompilerCommon.cpp	2022-06-16 23:08:33 UTC (rev 295614)
@@ -214,7 +214,7 @@
         case InlineCallFrame::GetterCall:
         case InlineCallFrame::SetterCall: {
             StructureStubInfo* stubInfo = baselineCodeBlockForCaller->findStubInfo(CodeOrigin(callBytecodeIndex));
-            RELEASE_ASSERT(stubInfo);
+            RELEASE_ASSERT(stubInfo, callInstruction.opcodeID());
             jumpTarget = stubInfo->doneLocation.retagged<JSEntryPtrTag>();
             break;
         }

Modified: trunk/Source/_javascript_Core/jit/JIT.cpp (295613 => 295614)


--- trunk/Source/_javascript_Core/jit/JIT.cpp	2022-06-16 21:48:09 UTC (rev 295613)
+++ trunk/Source/_javascript_Core/jit/JIT.cpp	2022-06-16 23:08:33 UTC (rev 295614)
@@ -946,7 +946,6 @@
 
     auto finalizeICs = [&] (auto& generators) {
         for (auto& gen : generators) {
-            gen.m_unlinkedStubInfo->start = patchBuffer.locationOf<JITStubRoutinePtrTag>(gen.m_start);
             gen.m_unlinkedStubInfo->doneLocation = patchBuffer.locationOf<JSInternalPtrTag>(gen.m_done);
             gen.m_unlinkedStubInfo->slowPathStartLocation = patchBuffer.locationOf<JITStubRoutinePtrTag>(gen.m_slowPathBegin);
         }

Modified: trunk/Source/_javascript_Core/jit/JITInlineCacheGenerator.cpp (295613 => 295614)


--- trunk/Source/_javascript_Core/jit/JITInlineCacheGenerator.cpp	2022-06-16 21:48:09 UTC (rev 295613)
+++ trunk/Source/_javascript_Core/jit/JITInlineCacheGenerator.cpp	2022-06-16 23:08:33 UTC (rev 295614)
@@ -63,7 +63,7 @@
     LinkBuffer& fastPath, LinkBuffer& slowPath, CodeLocationLabel<JITStubRoutinePtrTag> start)
 {
     ASSERT(m_stubInfo);
-    m_stubInfo->start = start;
+    m_stubInfo->startLocation = start;
     m_stubInfo->doneLocation = fastPath.locationOf<JSInternalPtrTag>(m_done);
 
     if (!m_stubInfo->useDataIC)
@@ -534,6 +534,12 @@
     m_done = jit.label();
 }
 
+void JITGetByValGenerator::generateEmptyPath(CCallHelpers& jit)
+{
+    m_start = jit.label();
+    m_done = jit.label();
+}
+
 void JITGetByValGenerator::finalize(LinkBuffer& fastPath, LinkBuffer& slowPath)
 {
     ASSERT(m_stubInfo);

Modified: trunk/Source/_javascript_Core/jit/JITInlineCacheGenerator.h (295613 => 295614)


--- trunk/Source/_javascript_Core/jit/JITInlineCacheGenerator.h	2022-06-16 21:48:09 UTC (rev 295613)
+++ trunk/Source/_javascript_Core/jit/JITInlineCacheGenerator.h	2022-06-16 23:08:33 UTC (rev 295614)
@@ -558,6 +558,8 @@
     
     void generateFastPath(CCallHelpers&);
 
+    void generateEmptyPath(CCallHelpers&);
+
     template<typename StubInfo>
     static void setUpStubInfo(StubInfo& stubInfo,
         AccessType accessType, CodeOrigin codeOrigin, CallSiteIndex callSiteIndex, const RegisterSet& usedRegisters,

Modified: trunk/Source/_javascript_Core/jit/JITPropertyAccess.cpp (295613 => 295614)


--- trunk/Source/_javascript_Core/jit/JITPropertyAccess.cpp	2022-06-16 21:48:09 UTC (rev 295613)
+++ trunk/Source/_javascript_Core/jit/JITPropertyAccess.cpp	2022-06-16 23:08:33 UTC (rev 295614)
@@ -62,41 +62,44 @@
     emitGetVirtualRegister(base, baseJSR);
     emitGetVirtualRegister(property, propertyJSR);
 
+    auto [ stubInfo, stubInfoIndex ] = addUnlinkedStructureStubInfo();
+    JITGetByValGenerator gen(
+        nullptr, stubInfo, JITType::BaselineJIT, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), AccessType::GetByVal, RegisterSet::stubUnavailableRegisters(),
+        baseJSR, propertyJSR, resultJSR, stubInfoGPR);
+    if (isOperandConstantInt(property))
+        stubInfo->propertyIsInt32 = true;
+    gen.m_unlinkedStubInfoConstantIndex = stubInfoIndex;
+
     if (bytecode.metadata(m_profiledCodeBlock).m_seenIdentifiers.count() > Options::getByValICMaxNumberOfIdentifiers()) {
+        stubInfo->tookSlowPath = true;
+
         auto notCell = branchIfNotCell(baseJSR);
         emitArrayProfilingSiteWithCell(bytecode, baseJSR.payloadGPR(), scratchGPR);
         notCell.link(this);
         loadGlobalObject(scratchGPR);
-        callOperationWithProfile(bytecode, operationGetByVal, dst, scratchGPR, baseJSR, propertyJSR);
+        callOperationWithResult(operationGetByVal, resultJSR, scratchGPR, baseJSR, propertyJSR);
+
+        gen.generateEmptyPath(*this);
     } else {
         emitJumpSlowCaseIfNotJSCell(baseJSR, base);
         emitArrayProfilingSiteWithCell(bytecode, baseJSR.payloadGPR(), scratchGPR);
 
-        auto [ stubInfo, stubInfoIndex ] = addUnlinkedStructureStubInfo();
-        JITGetByValGenerator gen(
-            nullptr, stubInfo, JITType::BaselineJIT, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), AccessType::GetByVal, RegisterSet::stubUnavailableRegisters(),
-            baseJSR, propertyJSR, resultJSR, stubInfoGPR);
-        if (isOperandConstantInt(property))
-            stubInfo->propertyIsInt32 = true;
-        gen.m_unlinkedStubInfoConstantIndex = stubInfoIndex;
-
         gen.generateBaselineDataICFastPath(*this, stubInfoIndex, stubInfoGPR);
-        resetSP(); // We might OSR exit here, so we need to conservatively reset SP
+    }
 
-        addSlowCase();
-        m_getByVals.append(gen);
+    addSlowCase();
+    m_getByVals.append(gen);
 
-        setFastPathResumePoint();
-        emitValueProfilingSite(bytecode, resultJSR);
-        emitPutVirtualRegister(dst, resultJSR);
-    }
+    resetSP(); // We might OSR exit here, so we need to conservatively reset SP
+    setFastPathResumePoint();
+    emitValueProfilingSite(bytecode, resultJSR);
+    emitPutVirtualRegister(dst, resultJSR);
 }
 
 template<typename OpcodeType>
 void JIT::generateGetByValSlowCase(const OpcodeType& bytecode, Vector<SlowCaseEntry>::iterator& iter)
 {
-    if (!hasAnySlowCases(iter))
-        return;
+    ASSERT(hasAnySlowCases(iter));
 
     uint32_t bytecodeOffset = m_bytecodeIndex.offset();
     ASSERT(BytecodeIndex(bytecodeOffset) == m_bytecodeIndex);
@@ -109,11 +112,13 @@
     Label coldPathBegin = label();
     linkAllSlowCases(iter);
 
-    move(TrustedImm32(bytecodeOffset), bytecodeOffsetGPR);
-    loadConstant(gen.m_unlinkedStubInfoConstantIndex, stubInfoGPR);
-    materializePointerIntoMetadata(bytecode, OpcodeType::Metadata::offsetOfArrayProfile(), profileGPR);
+    if (!gen.m_unlinkedStubInfo->tookSlowPath) {
+        move(TrustedImm32(bytecodeOffset), bytecodeOffsetGPR);
+        loadConstant(gen.m_unlinkedStubInfoConstantIndex, stubInfoGPR);
+        materializePointerIntoMetadata(bytecode, OpcodeType::Metadata::offsetOfArrayProfile(), profileGPR);
 
-    emitNakedNearCall(vm().getCTIStub(slow_op_get_by_val_callSlowOperationThenCheckExceptionGenerator).retaggedCode<NoPtrTag>());
+        emitNakedNearCall(vm().getCTIStub(slow_op_get_by_val_callSlowOperationThenCheckExceptionGenerator).retaggedCode<NoPtrTag>());
+    }
 
     gen.reportSlowPathCall(coldPathBegin, Call());
 }
_______________________________________________
webkit-changes mailing list
webkit-changes@lists.webkit.org
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to