Title: [254934] trunk/Source/_javascript_Core
Revision
254934
Author
[email protected]
Date
2020-01-22 12:31:33 -0800 (Wed, 22 Jan 2020)

Log Message

[JSC] Attempt to fix BytecodeIndex handling in 32bit
https://bugs.webkit.org/show_bug.cgi?id=206577

Reviewed by Keith Miller.

This patch mechanically lists up places using Instruction* as BytecodeIndex and fixes it,
since 32bit also starts using an offset as BytecodeIndex. This patch also fixes several
places where LLInt PB is not handled correctly in 32bit after we start using PB register
even in 32bit.

* bytecode/CodeBlock.cpp:
(JSC::CodeBlock::bytecodeIndexFromCallSiteIndex):
* dfg/DFGOSRExitCompilerCommon.cpp:
(JSC::DFG::reifyInlinedCallFrames):
(JSC::DFG::adjustAndJumpToTarget):
* jit/JITCall32_64.cpp:
(JSC::JIT::compileOpCall):
* jit/JITInlines.h:
(JSC::JIT::updateTopCallFrame):
* jit/JITOpcodes32_64.cpp:
(JSC::JIT::emit_op_log_shadow_chicken_tail):
* jit/JITPropertyAccess32_64.cpp:
(JSC::JIT::emit_op_get_by_val):
(JSC::JIT::emitPutByValWithCachedId):
(JSC::JIT::emit_op_try_get_by_id):
(JSC::JIT::emit_op_get_by_id_direct):
(JSC::JIT::emit_op_get_by_id):
(JSC::JIT::emit_op_get_by_id_with_this):
(JSC::JIT::emit_op_put_by_id):
(JSC::JIT::emit_op_in_by_id):
* llint/LLIntData.cpp:
(JSC::LLInt::Data::performAssertions):
* llint/LowLevelInterpreter.cpp:
(JSC::CLoop::execute):
* runtime/SamplingProfiler.cpp:
(JSC::tryGetBytecodeIndex):
(JSC::SamplingProfiler::processUnverifiedStackTraces):

Modified Paths

Diff

Modified: trunk/Source/_javascript_Core/ChangeLog (254933 => 254934)


--- trunk/Source/_javascript_Core/ChangeLog	2020-01-22 19:45:59 UTC (rev 254933)
+++ trunk/Source/_javascript_Core/ChangeLog	2020-01-22 20:31:33 UTC (rev 254934)
@@ -1,3 +1,43 @@
+2020-01-22  Yusuke Suzuki  <[email protected]>
+
+        [JSC] Attempt to fix BytecodeIndex handling in 32bit
+        https://bugs.webkit.org/show_bug.cgi?id=206577
+
+        Reviewed by Keith Miller.
+
+        This patch mechanically lists up places using Instruction* as BytecodeIndex and fixes it,
+        since 32bit also starts using an offset as BytecodeIndex. This patch also fixes several
+        places where LLInt PB is not handled correctly in 32bit after we start using PB register
+        even in 32bit.
+
+        * bytecode/CodeBlock.cpp:
+        (JSC::CodeBlock::bytecodeIndexFromCallSiteIndex):
+        * dfg/DFGOSRExitCompilerCommon.cpp:
+        (JSC::DFG::reifyInlinedCallFrames):
+        (JSC::DFG::adjustAndJumpToTarget):
+        * jit/JITCall32_64.cpp:
+        (JSC::JIT::compileOpCall):
+        * jit/JITInlines.h:
+        (JSC::JIT::updateTopCallFrame):
+        * jit/JITOpcodes32_64.cpp:
+        (JSC::JIT::emit_op_log_shadow_chicken_tail):
+        * jit/JITPropertyAccess32_64.cpp:
+        (JSC::JIT::emit_op_get_by_val):
+        (JSC::JIT::emitPutByValWithCachedId):
+        (JSC::JIT::emit_op_try_get_by_id):
+        (JSC::JIT::emit_op_get_by_id_direct):
+        (JSC::JIT::emit_op_get_by_id):
+        (JSC::JIT::emit_op_get_by_id_with_this):
+        (JSC::JIT::emit_op_put_by_id):
+        (JSC::JIT::emit_op_in_by_id):
+        * llint/LLIntData.cpp:
+        (JSC::LLInt::Data::performAssertions):
+        * llint/LowLevelInterpreter.cpp:
+        (JSC::CLoop::execute):
+        * runtime/SamplingProfiler.cpp:
+        (JSC::tryGetBytecodeIndex):
+        (JSC::SamplingProfiler::processUnverifiedStackTraces):
+
 2020-01-22  Saam Barati  <[email protected]>
 
         Throw away baseline code if there is an optimized replacement

Modified: trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp (254933 => 254934)


--- trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp	2020-01-22 19:45:59 UTC (rev 254933)
+++ trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp	2020-01-22 20:31:33 UTC (rev 254934)
@@ -3305,14 +3305,9 @@
 {
     Optional<BytecodeIndex> bytecodeIndex;
     JITType jitType = this->jitType();
-    if (jitType == JITType::InterpreterThunk || jitType == JITType::BaselineJIT) {
-#if USE(JSVALUE64)
+    if (jitType == JITType::InterpreterThunk || jitType == JITType::BaselineJIT)
         bytecodeIndex = callSiteIndex.bytecodeIndex();
-#else
-        Instruction* instruction = bitwise_cast<Instruction*>(callSiteIndex.bits());
-        bytecodeIndex = this->bytecodeIndex(instruction);
-#endif
-    } else if (jitType == JITType::DFGJIT || jitType == JITType::FTLJIT) {
+    else if (jitType == JITType::DFGJIT || jitType == JITType::FTLJIT) {
 #if ENABLE(DFG_JIT)
         RELEASE_ASSERT(canGetCodeOrigin(callSiteIndex));
         CodeOrigin origin = codeOrigin(callSiteIndex);

Modified: trunk/Source/_javascript_Core/dfg/DFGOSRExitCompilerCommon.cpp (254933 => 254934)


--- trunk/Source/_javascript_Core/dfg/DFGOSRExitCompilerCommon.cpp	2020-01-22 19:45:59 UTC (rev 254933)
+++ trunk/Source/_javascript_Core/dfg/DFGOSRExitCompilerCommon.cpp	2020-01-22 20:31:33 UTC (rev 254934)
@@ -307,24 +307,18 @@
         if (callerIsLLInt) {
             CodeBlock* baselineCodeBlockForCaller = jit.baselineCodeBlockFor(*trueCaller);
             jit.storePtr(CCallHelpers::TrustedImmPtr(baselineCodeBlockForCaller->metadataTable()), calleeSaveSlot(inlineCallFrame, baselineCodeBlock, LLInt::Registers::metadataTableGPR));
-#if USE(JSVALUE64)
             jit.storePtr(CCallHelpers::TrustedImmPtr(baselineCodeBlockForCaller->instructionsRawPointer()), calleeSaveSlot(inlineCallFrame, baselineCodeBlock, LLInt::Registers::pbGPR));
-#endif
         }
 
         if (!inlineCallFrame->isVarargs())
             jit.store32(AssemblyHelpers::TrustedImm32(inlineCallFrame->argumentCountIncludingThis), AssemblyHelpers::payloadFor(VirtualRegister(inlineCallFrame->stackOffset + CallFrameSlot::argumentCountIncludingThis)));
-#if USE(JSVALUE64)
         jit.storePtr(callerFrameGPR, AssemblyHelpers::addressForByteOffset(inlineCallFrame->callerFrameOffset()));
         uint32_t locationBits = CallSiteIndex(baselineCodeBlock->bytecodeIndexForExit(codeOrigin->bytecodeIndex())).bits();
         jit.store32(AssemblyHelpers::TrustedImm32(locationBits), AssemblyHelpers::tagFor(VirtualRegister(inlineCallFrame->stackOffset + CallFrameSlot::argumentCountIncludingThis)));
+#if USE(JSVALUE64)
         if (!inlineCallFrame->isClosureCall)
             jit.store64(AssemblyHelpers::TrustedImm64(JSValue::encode(JSValue(inlineCallFrame->calleeConstant()))), AssemblyHelpers::addressFor(VirtualRegister(inlineCallFrame->stackOffset + CallFrameSlot::callee)));
 #else // USE(JSVALUE64) // so this is the 32-bit part
-        jit.storePtr(callerFrameGPR, AssemblyHelpers::addressForByteOffset(inlineCallFrame->callerFrameOffset()));
-        const Instruction* instruction = baselineCodeBlock->instructions().at(codeOrigin->bytecodeIndex()).ptr();
-        uint32_t locationBits = CallSiteIndex().bits();
-        jit.store32(AssemblyHelpers::TrustedImm32(locationBits), AssemblyHelpers::tagFor(VirtualRegister(inlineCallFrame->stackOffset + CallFrameSlot::argumentCountIncludingThis)));
         jit.store32(AssemblyHelpers::TrustedImm32(JSValue::CellTag), AssemblyHelpers::tagFor(VirtualRegister(inlineCallFrame->stackOffset + CallFrameSlot::callee)));
         if (!inlineCallFrame->isClosureCall)
             jit.storePtr(AssemblyHelpers::TrustedImmPtr(inlineCallFrame->calleeConstant()), AssemblyHelpers::payloadFor(VirtualRegister(inlineCallFrame->stackOffset + CallFrameSlot::callee)));
@@ -333,13 +327,7 @@
 
     // Don't need to set the toplevel code origin if we only did inline tail calls
     if (codeOrigin) {
-#if USE(JSVALUE64)
         uint32_t locationBits = CallSiteIndex(BytecodeIndex(codeOrigin->bytecodeIndex().offset())).bits();
-#else
-        auto bytecodeIndex = jit.baselineCodeBlock()->bytecodeIndexForExit(codeOrigin->bytecodeIndex());
-        const Instruction* instruction = jit.baselineCodeBlock()->instructions(bytecodeIndex).at().ptr();
-        uint32_t locationBits = CallSiteIndex(bitwise_cast<uint32_t>(instruction)).bits();
-#endif
         jit.store32(AssemblyHelpers::TrustedImm32(locationBits), AssemblyHelpers::tagFor(CallFrameSlot::argumentCountIncludingThis));
     }
 }
@@ -407,12 +395,8 @@
         }
 
         jit.move(CCallHelpers::TrustedImmPtr(codeBlockForExit->metadataTable()), LLInt::Registers::metadataTableGPR);
-#if USE(JSVALUE64)
         jit.move(CCallHelpers::TrustedImmPtr(codeBlockForExit->instructionsRawPointer()), LLInt::Registers::pbGPR);
         jit.move(CCallHelpers::TrustedImm32(bytecodeIndex.offset()), LLInt::Registers::pcGPR);
-#else
-        jit.move(CCallHelpers::TrustedImmPtr(&currentInstruction), LLInt::Registers::pcGPR);
-#endif
         jumpTarget = destination.retagged<OSRExitPtrTag>().executableAddress();
     } else {
         codeBlockForExit->m_hasLinkedOSRExit = true;

Modified: trunk/Source/_javascript_Core/jit/JITCall32_64.cpp (254933 => 254934)


--- trunk/Source/_javascript_Core/jit/JITCall32_64.cpp	2020-01-22 19:45:59 UTC (rev 254933)
+++ trunk/Source/_javascript_Core/jit/JITCall32_64.cpp	2020-01-22 20:31:33 UTC (rev 254934)
@@ -288,7 +288,8 @@
     compileSetupFrame(bytecode, info);
     // SP holds newCallFrame + sizeof(CallerFrameAndPC), with ArgumentCount initialized.
     
-    uint32_t locationBits = CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(instruction))).bits();
+    auto bytecodeIndex = m_codeBlock->bytecodeIndex(instruction);
+    uint32_t locationBits = CallSiteIndex(bytecodeIndex).bits();
     store32(TrustedImm32(locationBits), tagFor(CallFrameSlot::argumentCountIncludingThis));
     emitLoad(callee, regT1, regT0); // regT1, regT0 holds callee.
 

Modified: trunk/Source/_javascript_Core/jit/JITInlines.h (254933 => 254934)


--- trunk/Source/_javascript_Core/jit/JITInlines.h	2020-01-22 19:45:59 UTC (rev 254933)
+++ trunk/Source/_javascript_Core/jit/JITInlines.h	2020-01-22 20:31:33 UTC (rev 254934)
@@ -104,12 +104,7 @@
 
 ALWAYS_INLINE void JIT::updateTopCallFrame()
 {
-#if USE(JSVALUE32_64)
-    const Instruction* instruction = m_codeBlock->instructions().at(m_bytecodeIndex.offset()).ptr();
-    uint32_t locationBits = CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(instruction))).bits();
-#else
     uint32_t locationBits = CallSiteIndex(m_bytecodeIndex).bits();
-#endif
     store32(TrustedImm32(locationBits), tagFor(CallFrameSlot::argumentCountIncludingThis));
     
     // FIXME: It's not clear that this is needed. JITOperations tend to update the top call frame on

Modified: trunk/Source/_javascript_Core/jit/JITOpcodes32_64.cpp (254933 => 254934)


--- trunk/Source/_javascript_Core/jit/JITOpcodes32_64.cpp	2020-01-22 19:45:59 UTC (rev 254933)
+++ trunk/Source/_javascript_Core/jit/JITOpcodes32_64.cpp	2020-01-22 20:31:33 UTC (rev 254934)
@@ -1426,7 +1426,7 @@
     emitLoadTag(bytecode.m_thisValue.offset(), regT1);
     JSValueRegs thisRegs(regT1, regT2);
     emitLoadPayload(bytecode.m_scope.offset(), regT3);
-    logShadowChickenTailPacket(shadowPacketReg, thisRegs, regT3, m_codeBlock, CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))));
+    logShadowChickenTailPacket(shadowPacketReg, thisRegs, regT3, m_codeBlock, CallSiteIndex(m_bytecodeIndex));
 }
 
 } // namespace JSC

Modified: trunk/Source/_javascript_Core/jit/JITPropertyAccess32_64.cpp (254933 => 254934)


--- trunk/Source/_javascript_Core/jit/JITPropertyAccess32_64.cpp	2020-01-22 19:45:59 UTC (rev 254933)
+++ trunk/Source/_javascript_Core/jit/JITPropertyAccess32_64.cpp	2020-01-22 20:31:33 UTC (rev 254934)
@@ -157,7 +157,7 @@
         emitArrayProfilingSiteWithCell(regT0, regT4, profile);
 
         JITGetByValGenerator gen(
-            m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
+            m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), RegisterSet::stubUnavailableRegisters(),
             JSValueRegs::payloadOnly(regT0), JSValueRegs(regT3, regT2), JSValueRegs(regT1, regT0));
         if (isOperandConstantInt(property))
             gen.stubInfo()->propertyIsInt32 = true;
@@ -365,7 +365,7 @@
 
     const Instruction* currentInstruction = m_codeBlock->instructions().at(byValInfo->bytecodeIndex).ptr();
     JITPutByIdGenerator gen(
-        m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
+        m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), RegisterSet::stubUnavailableRegisters(),
         JSValueRegs::payloadOnly(regT0), JSValueRegs(regT3, regT2), regT1, m_codeBlock->ecmaMode(), putKind);
     gen.generateFastPath(*this);
     doneCases.append(jump());
@@ -429,7 +429,7 @@
     emitJumpSlowCaseIfNotJSCell(base, regT1);
 
     JITGetByIdGenerator gen(
-        m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
+        m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), RegisterSet::stubUnavailableRegisters(),
         ident->impl(), JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0), AccessType::TryGetById);
     gen.generateFastPath(*this);
     addSlowCase(gen.slowPathJump());
@@ -468,7 +468,7 @@
     emitJumpSlowCaseIfNotJSCell(base, regT1);
 
     JITGetByIdGenerator gen(
-        m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
+        m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), RegisterSet::stubUnavailableRegisters(),
         ident->impl(), JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0), AccessType::GetByIdDirect);
     gen.generateFastPath(*this);
     addSlowCase(gen.slowPathJump());
@@ -514,7 +514,7 @@
     }
 
     JITGetByIdGenerator gen(
-        m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
+        m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), RegisterSet::stubUnavailableRegisters(),
         ident->impl(), JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0), AccessType::GetById);
     gen.generateFastPath(*this);
     addSlowCase(gen.slowPathJump());
@@ -555,7 +555,7 @@
     emitJumpSlowCaseIfNotJSCell(thisVReg, regT4);
 
     JITGetByIdWithThisGenerator gen(
-        m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
+        m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), RegisterSet::stubUnavailableRegisters(),
         ident->impl(), JSValueRegs(regT1, regT0), JSValueRegs::payloadOnly(regT0), JSValueRegs(regT4, regT3));
     gen.generateFastPath(*this);
     addSlowCase(gen.slowPathJump());
@@ -598,7 +598,7 @@
     emitJumpSlowCaseIfNotJSCell(base, regT1);
 
     JITPutByIdGenerator gen(
-        m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
+        m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), RegisterSet::stubUnavailableRegisters(),
         JSValueRegs::payloadOnly(regT0), JSValueRegs(regT3, regT2),
         regT1, m_codeBlock->ecmaMode(), direct ? Direct : NotDirect);
     
@@ -642,7 +642,7 @@
     emitJumpSlowCaseIfNotJSCell(base, regT1);
 
     JITInByIdGenerator gen(
-        m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
+        m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), RegisterSet::stubUnavailableRegisters(),
         ident->impl(), JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0));
     gen.generateFastPath(*this);
     addSlowCase(gen.slowPathJump());

Modified: trunk/Source/_javascript_Core/llint/LowLevelInterpreter.asm (254933 => 254934)


--- trunk/Source/_javascript_Core/llint/LowLevelInterpreter.asm	2020-01-22 19:45:59 UTC (rev 254933)
+++ trunk/Source/_javascript_Core/llint/LowLevelInterpreter.asm	2020-01-22 20:31:33 UTC (rev 254934)
@@ -793,6 +793,7 @@
         storep metadataTable, -PtrSize[cfr]
     elsif ARMv7 or MIPS
         storep metadataTable, -4[cfr]
+        storep PB, -8[cfr]
     elsif ARM64 or ARM64E
         emit "stp x27, x28, [x29, #-16]"
         emit "stp x25, x26, [x29, #-32]"
@@ -816,6 +817,7 @@
         loadp -PtrSize[cfr], metadataTable
     elsif ARMv7 or MIPS
         loadp -4[cfr], metadataTable
+        loadp -8[cfr], PB
     elsif ARM64 or ARM64E
         emit "ldp x25, x26, [x29, #-32]"
         emit "ldp x27, x28, [x29, #-16]"

Modified: trunk/Source/_javascript_Core/llint/LowLevelInterpreter.cpp (254933 => 254934)


--- trunk/Source/_javascript_Core/llint/LowLevelInterpreter.cpp	2020-01-22 19:45:59 UTC (rev 254933)
+++ trunk/Source/_javascript_Core/llint/LowLevelInterpreter.cpp	2020-01-22 20:31:33 UTC (rev 254934)
@@ -383,18 +383,6 @@
 #define RECORD_OPCODE_STATS(__opcode)
 #endif
 
-#if USE(JSVALUE32_64)
-#define FETCH_OPCODE() *pc.i8p
-#else // USE(JSVALUE64)
-#define FETCH_OPCODE() *bitwise_cast<OpcodeID*>(pcBase.i8p + pc.i)
-#endif // USE(JSVALUE64)
-
-#define NEXT_INSTRUCTION() \
-    do {                         \
-        opcode = FETCH_OPCODE(); \
-        DISPATCH_OPCODE();       \
-    } while (false)
-
 #if ENABLE(COMPUTED_GOTO_OPCODES)
 
     //========================================================================
@@ -485,7 +473,6 @@
     #undef LLINT_OPCODE_ENTRY
 #endif
 
-    #undef NEXT_INSTRUCTION
     #undef DEFINE_OPCODE
     #undef CHECK_FOR_TIMEOUT
     #undef CAST

Modified: trunk/Source/_javascript_Core/runtime/SamplingProfiler.cpp (254933 => 254934)


--- trunk/Source/_javascript_Core/runtime/SamplingProfiler.cpp	2020-01-22 19:45:59 UTC (rev 254933)
+++ trunk/Source/_javascript_Core/runtime/SamplingProfiler.cpp	2020-01-22 20:31:33 UTC (rev 254934)
@@ -455,18 +455,10 @@
     RELEASE_ASSERT(!codeBlock->hasCodeOrigins());
 #endif
 
-#if USE(JSVALUE64)
     unsigned bytecodeOffset = llintPC;
     if (bytecodeOffset < codeBlock->instructionsSize())
         return BytecodeIndex(bytecodeOffset);
     return BytecodeIndex();
-#else
-    Instruction* instruction = bitwise_cast<Instruction*>(llintPC);
-
-    if (codeBlock->instructions().contains(instruction))
-        return BytecodeIndex(codeBlock->bytecodeOffset(instruction));
-    return BytecodeIndex();
-#endif
 }
 
 void SamplingProfiler::processUnverifiedStackTraces(const AbstractLocker&)
@@ -613,12 +605,7 @@
                 // by ignoring it.
                 BytecodeIndex bytecodeIndex = BytecodeIndex(0);
                 if (topCodeBlock->jitType() == JITType::InterpreterThunk || topCodeBlock->jitType() == JITType::BaselineJIT) {
-                    unsigned bits;
-#if USE(JSVALUE64)
-                    bits = static_cast<unsigned>(bitwise_cast<uintptr_t>(unprocessedStackTrace.llintPC));
-#else
-                    bits = bitwise_cast<unsigned>(unprocessedStackTrace.llintPC);
-#endif
+                    unsigned bits = static_cast<unsigned>(bitwise_cast<uintptr_t>(unprocessedStackTrace.llintPC));
                     bytecodeIndex = tryGetBytecodeIndex(bits, topCodeBlock);
 
                     UNUSED_PARAM(bytecodeIndex); // FIXME: do something with this info for the web inspector: https://bugs.webkit.org/show_bug.cgi?id=153455
_______________________________________________
webkit-changes mailing list
[email protected]
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to