Modified: trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp (278586 => 278587)
--- trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp 2021-06-08 01:28:30 UTC (rev 278586)
+++ trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp 2021-06-08 01:29:31 UTC (rev 278587)
@@ -2384,7 +2384,6 @@
patchpoint->numGPScratchRegisters = 1;
patchpoint->clobber(RegisterSet::macroScratchRegisters());
State* state = &m_ftlState;
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -2415,12 +2414,12 @@
#endif
if (mathICGenerationState->shouldSlowPathRepatch) {
- SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, semanticNodeOrigin, exceptions.get(),
- repatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(semanticNodeOrigin), params[1].gpr(), CCallHelpers::TrustedImmPtr(mathIC));
+ SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, node->origin.semantic, exceptions.get(),
+ repatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(node->origin.semantic), params[1].gpr(), CCallHelpers::TrustedImmPtr(mathIC));
mathICGenerationState->slowPathCall = call.call();
} else {
- SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, semanticNodeOrigin,
- exceptions.get(), nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(semanticNodeOrigin), params[1].gpr());
+ SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, node->origin.semantic,
+ exceptions.get(), nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(node->origin.semantic), params[1].gpr());
mathICGenerationState->slowPathCall = call.call();
}
jit.jump().linkTo(done, &jit);
@@ -2439,8 +2438,8 @@
});
} else {
callOperation(
- *state, params.unavailableRegisters(), jit, semanticNodeOrigin, exceptions.get(),
- nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(semanticNodeOrigin), params[1].gpr());
+ *state, params.unavailableRegisters(), jit, node->origin.semantic, exceptions.get(),
+ nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(node->origin.semantic), params[1].gpr());
}
#if ENABLE(MATH_IC_STATS)
@@ -2492,7 +2491,6 @@
patchpoint->numFPScratchRegisters = 2;
patchpoint->clobber(RegisterSet::macroScratchRegisters());
State* state = &m_ftlState;
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -2526,12 +2524,12 @@
#endif
if (mathICGenerationState->shouldSlowPathRepatch) {
- SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, semanticNodeOrigin, exceptions.get(),
- repatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(semanticNodeOrigin), params[1].gpr(), params[2].gpr(), CCallHelpers::TrustedImmPtr(mathIC));
+ SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, node->origin.semantic, exceptions.get(),
+ repatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(node->origin.semantic), params[1].gpr(), params[2].gpr(), CCallHelpers::TrustedImmPtr(mathIC));
mathICGenerationState->slowPathCall = call.call();
} else {
- SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, semanticNodeOrigin,
- exceptions.get(), nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(semanticNodeOrigin), params[1].gpr(), params[2].gpr());
+ SlowPathCall call = callOperation(*state, params.unavailableRegisters(), jit, node->origin.semantic,
+ exceptions.get(), nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(node->origin.semantic), params[1].gpr(), params[2].gpr());
mathICGenerationState->slowPathCall = call.call();
}
jit.jump().linkTo(done, &jit);
@@ -2550,8 +2548,8 @@
});
} else {
callOperation(
- *state, params.unavailableRegisters(), jit, semanticNodeOrigin, exceptions.get(),
- nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(semanticNodeOrigin), params[1].gpr(), params[2].gpr());
+ *state, params.unavailableRegisters(), jit, node->origin.semantic, exceptions.get(),
+ nonRepatchingFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(node->origin.semantic), params[1].gpr(), params[2].gpr());
}
#if ENABLE(MATH_IC_STATS)
@@ -3995,11 +3993,10 @@
State* state = &m_ftlState;
bool baseIsCell = abstractValue(node->child1()).isType(SpecCell);
- CodeOrigin nodeSemanticOrigin = node->origin.semantic;
patchpoint->setGenerator([=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
- CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(nodeSemanticOrigin);
+ CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
// This is the direct exit target for operation calls.
Box<CCallHelpers::JumpList> exceptions = exceptionHandle->scheduleExitCreation(params)->jumps(jit);
@@ -4014,7 +4011,7 @@
GPRReg propertyGPR = params[2].gpr();
auto generator = Box<JITGetByValGenerator>::create(
- jit.codeBlock(), nodeSemanticOrigin, callSiteIndex, AccessType::GetPrivateName,
+ jit.codeBlock(), node->origin.semantic, callSiteIndex, AccessType::GetPrivateName,
params.unavailableRegisters(), JSValueRegs(baseGPR), JSValueRegs(propertyGPR), JSValueRegs(resultGPR));
CCallHelpers::Jump notCell;
@@ -4032,9 +4029,9 @@
generator->slowPathJump().link(&jit);
CCallHelpers::Label slowPathBegin = jit.label();
CCallHelpers::Call slowPathCall = callOperation(
- *state, params.unavailableRegisters(), jit, nodeSemanticOrigin,
+ *state, params.unavailableRegisters(), jit, node->origin.semantic,
exceptions.get(), operationGetPrivateNameOptimize, resultGPR,
- jit.codeBlock()->globalObjectFor(nodeSemanticOrigin),
+ jit.codeBlock()->globalObjectFor(node->origin.semantic),
CCallHelpers::TrustedImmPtr(generator->stubInfo()), baseGPR, propertyGPR).call();
jit.jump().linkTo(done, &jit);
@@ -4128,11 +4125,10 @@
State* state = &m_ftlState;
bool baseIsCell = abstractValue(m_node->child1()).isType(SpecCell);
- CodeOrigin nodeSemanticOrigin = node->origin.semantic;
patchpoint->setGenerator([=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
- CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(nodeSemanticOrigin);
+ CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
// This is the direct exit target for operation calls.
Box<CCallHelpers::JumpList> exceptions = exceptionHandle->scheduleExitCreation(params)->jumps(jit);
@@ -4146,7 +4142,7 @@
GPRReg brandGPR = params[1].gpr();
auto generator = Box<JITPrivateBrandAccessGenerator>::create(
- jit.codeBlock(), nodeSemanticOrigin, callSiteIndex, accessType,
+ jit.codeBlock(), node->origin.semantic, callSiteIndex, accessType,
params.unavailableRegisters(), JSValueRegs(baseGPR), JSValueRegs(brandGPR));
CCallHelpers::Jump notCell;
@@ -4176,9 +4172,9 @@
generator->slowPathJump().link(&jit);
CCallHelpers::Label slowPathBegin = jit.label();
CCallHelpers::Call slowPathCall = callOperation(
- *state, params.unavailableRegisters(), jit, nodeSemanticOrigin,
+ *state, params.unavailableRegisters(), jit, node->origin.semantic,
exceptions.get(), appropriatePrivateAccessFunction(accessType), InvalidGPRReg,
- jit.codeBlock()->globalObjectFor(nodeSemanticOrigin),
+ jit.codeBlock()->globalObjectFor(node->origin.semantic),
CCallHelpers::TrustedImmPtr(generator->stubInfo()), baseGPR, brandGPR).call();
jit.jump().linkTo(done, &jit);
@@ -4538,14 +4534,13 @@
preparePatchpointForExceptions(patchpoint);
State* state = &m_ftlState;
-
- CodeOrigin nodeSemanticOrigin = node->origin.semantic;
+
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
CallSiteIndex callSiteIndex =
- state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(nodeSemanticOrigin);
+ state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
Box<CCallHelpers::JumpList> exceptions =
exceptionHandle->scheduleExitCreation(params)->jumps(jit);
@@ -4554,7 +4549,7 @@
exceptionHandle->scheduleExitCreationForUnwind(params, callSiteIndex);
auto generator = Box<JITPutByIdGenerator>::create(
- jit.codeBlock(), nodeSemanticOrigin, callSiteIndex,
+ jit.codeBlock(), node->origin.semantic, callSiteIndex,
params.unavailableRegisters(), identifier, JSValueRegs(params[0].gpr()),
JSValueRegs(params[1].gpr()), GPRInfo::patchpointScratchRegister, ecmaMode,
putKind);
@@ -4569,9 +4564,9 @@
generator->slowPathJump().link(&jit);
CCallHelpers::Label slowPathBegin = jit.label();
CCallHelpers::Call slowPathCall = callOperation(
- *state, params.unavailableRegisters(), jit, nodeSemanticOrigin,
+ *state, params.unavailableRegisters(), jit, node->origin.semantic,
exceptions.get(), generator->slowPathFunction(), InvalidGPRReg,
- jit.codeBlock()->globalObjectFor(nodeSemanticOrigin),
+ jit.codeBlock()->globalObjectFor(node->origin.semantic),
CCallHelpers::TrustedImmPtr(generator->stubInfo()), params[1].gpr(),
params[0].gpr(), identifier.rawBits()).call();
jit.jump().linkTo(done, &jit);
@@ -5258,11 +5253,10 @@
RefPtr<PatchpointExceptionHandle> exceptionHandle = preparePatchpointForExceptions(patchpoint);
State* state = &m_ftlState;
- CodeOrigin nodeSemanticOrigin = node->origin.semantic;
patchpoint->setGenerator([=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
- CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(nodeSemanticOrigin);
+ CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
// This is the direct exit target for operation calls.
Box<CCallHelpers::JumpList> exceptions = exceptionHandle->scheduleExitCreation(params)->jumps(jit);
@@ -5277,7 +5271,7 @@
GPRReg propertyGPR = params[2].gpr();
auto generator = Box<JITGetByValGenerator>::create(
- jit.codeBlock(), nodeSemanticOrigin, callSiteIndex, AccessType::GetByVal,
+ jit.codeBlock(), node->origin.semantic, callSiteIndex, AccessType::GetByVal,
params.unavailableRegisters(), JSValueRegs(baseGPR), JSValueRegs(propertyGPR), JSValueRegs(resultGPR));
generator->stubInfo()->propertyIsString = propertyIsString;
@@ -5299,9 +5293,9 @@
generator->slowPathJump().link(&jit);
CCallHelpers::Label slowPathBegin = jit.label();
CCallHelpers::Call slowPathCall = callOperation(
- *state, params.unavailableRegisters(), jit, nodeSemanticOrigin,
+ *state, params.unavailableRegisters(), jit, node->origin.semantic,
exceptions.get(), operationGetByValOptimize, resultGPR,
- jit.codeBlock()->globalObjectFor(nodeSemanticOrigin),
+ jit.codeBlock()->globalObjectFor(node->origin.semantic),
CCallHelpers::TrustedImmPtr(generator->stubInfo()), CCallHelpers::TrustedImmPtr(nullptr), baseGPR, propertyGPR).call();
jit.jump().linkTo(done, &jit);
@@ -5859,16 +5853,12 @@
State* state = &m_ftlState;
Node* node = m_node;
- CodeOrigin nodeSemanticOrigin = node->origin.semantic;
- auto child1UseKind = node->child1().useKind();
- auto child2UseKind = node->child2().useKind();
- auto ecmaMode = node->ecmaMode().value();
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
CallSiteIndex callSiteIndex =
- state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(nodeSemanticOrigin);
+ state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
Box<CCallHelpers::JumpList> exceptions =
exceptionHandle->scheduleExitCreation(params)->jumps(jit);
@@ -5880,7 +5870,7 @@
ASSERT(base.gpr() != params.gpScratch(0));
ASSERT(returnGPR != params.gpScratch(0));
- if (child1UseKind)
+ if (node->child1().useKind() == UntypedUse)
slowCases.append(jit.branchIfNotCell(base));
constexpr auto optimizationFunction = [&] () {
@@ -5895,7 +5885,7 @@
return CCallHelpers::TrustedImmPtr(subscriptValue.rawBits());
else {
ASSERT(params.gpScratch(0) != params[2].gpr());
- if (child2UseKind == UntypedUse)
+ if (node->child2().useKind() == UntypedUse)
slowCases.append(jit.branchIfNotCell(JSValueRegs(params[2].gpr())));
return JSValueRegs(params[2].gpr());
}
@@ -5904,12 +5894,12 @@
const auto generator = [&] {
if constexpr (kind == DelByKind::Normal) {
return Box<JITDelByIdGenerator>::create(
- jit.codeBlock(), nodeSemanticOrigin, callSiteIndex,
+ jit.codeBlock(), node->origin.semantic, callSiteIndex,
params.unavailableRegisters(), subscriptValue, base,
JSValueRegs(returnGPR), params.gpScratch(0));
} else {
return Box<JITDelByValGenerator>::create(
- jit.codeBlock(), nodeSemanticOrigin, callSiteIndex,
+ jit.codeBlock(), node->origin.semantic, callSiteIndex,
params.unavailableRegisters(), base,
subscript, JSValueRegs(returnGPR), params.gpScratch(0));
}
@@ -5926,11 +5916,11 @@
slowCases.link(&jit);
CCallHelpers::Label slowPathBegin = jit.label();
CCallHelpers::Call slowPathCall = callOperation(
- *state, params.unavailableRegisters(), jit, nodeSemanticOrigin,
+ *state, params.unavailableRegisters(), jit, node->origin.semantic,
exceptions.get(), optimizationFunction, returnGPR,
- jit.codeBlock()->globalObjectFor(nodeSemanticOrigin),
+ jit.codeBlock()->globalObjectFor(node->origin.semantic),
CCallHelpers::TrustedImmPtr(generator->stubInfo()), base,
- subscript, CCallHelpers::TrustedImm32(ecmaMode)).call();
+ subscript, CCallHelpers::TrustedImm32(node->ecmaMode().value())).call();
jit.jump().linkTo(done, &jit);
generator->reportSlowPathCall(slowPathBegin, slowPathCall);
@@ -9786,8 +9776,6 @@
CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite();
State* state = &m_ftlState;
VM* vm = &this->vm();
- CodeOrigin nodeSemanticOrigin = node->origin.semantic;
- auto nodeOp = node->op();
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -9799,9 +9787,9 @@
CCallHelpers::TrustedImm32(callSiteIndex.bits()),
CCallHelpers::tagFor(VirtualRegister(CallFrameSlot::argumentCountIncludingThis)));
- CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(nodeSemanticOrigin);
+ CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(node->origin.semantic);
callLinkInfo->setUpCall(
- nodeOp == Construct ? CallLinkInfo::Construct : CallLinkInfo::Call, GPRInfo::regT0);
+ node->op() == Construct ? CallLinkInfo::Construct : CallLinkInfo::Call, GPRInfo::regT0);
auto slowPath = callLinkInfo->emitFastPath(jit, GPRInfo::regT0, InvalidGPRReg, CallLinkInfo::UseDataIC::No);
CCallHelpers::Jump done = jit.jump();
@@ -9808,7 +9796,7 @@
slowPath.link(&jit);
auto slowPathStart = jit.label();
- jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(nodeSemanticOrigin)), GPRInfo::regT3);
+ jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(node->origin.semantic)), GPRInfo::regT3);
callLinkInfo->emitSlowPath(*vm, jit);
done.link(&jit);
@@ -9898,7 +9886,6 @@
}
CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite();
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
State* state = &m_ftlState;
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
@@ -9932,7 +9919,7 @@
shuffleData.numPassedArgs = numPassedArgs;
shuffleData.setupCalleeSaveRegisters(jit.codeBlock());
- CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(semanticNodeOrigin);
+ CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(node->origin.semantic);
callLinkInfo->setUpCall(CallLinkInfo::DirectTailCall, InvalidGPRReg);
CCallHelpers::Label mainPath = jit.label();
@@ -9949,7 +9936,7 @@
CCallHelpers::Label slowPath = jit.label();
callOperation(
*state, toSave, jit,
- semanticNodeOrigin, exceptions.get(), operationLinkDirectCall,
+ node->origin.semantic, exceptions.get(), operationLinkDirectCall,
InvalidGPRReg, CCallHelpers::TrustedImmPtr(callLinkInfo), calleeGPR).call();
jit.jump().linkTo(mainPath, &jit);
callLinkInfo->setExecutableDuringCompilation(executable);
@@ -9964,7 +9951,7 @@
return;
}
- CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(semanticNodeOrigin);
+ CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(node->origin.semantic);
callLinkInfo->setUpCall(
isConstruct ? CallLinkInfo::DirectConstruct : CallLinkInfo::DirectCall, InvalidGPRReg);
@@ -9991,7 +9978,7 @@
callOperation(
*state, params.unavailableRegisters(), jit,
- semanticNodeOrigin, exceptions.get(), operationLinkDirectCall,
+ node->origin.semantic, exceptions.get(), operationLinkDirectCall,
InvalidGPRReg, CCallHelpers::TrustedImmPtr(callLinkInfo),
calleeGPR).call();
jit.jump().linkTo(mainPath, &jit);
@@ -10067,7 +10054,6 @@
CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite();
State* state = &m_ftlState;
VM* vm = &this->vm();
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -10105,7 +10091,7 @@
slowPathShuffler.setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
slowPathShuffler.prepareForSlowPath();
- jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(semanticNodeOrigin)), GPRInfo::regT3);
+ jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(node->origin.semantic)), GPRInfo::regT3);
callLinkInfo->emitSlowPath(*vm, jit);
auto doneLocation = jit.label();
@@ -10119,60 +10105,7 @@
});
});
}
-
- struct CapturedForPhantomNewArrayWithSpreadCase {
- unsigned parameterOffset;
- };
- struct CapturedForPhantomNewArrayBufferCase {
- int64_t value;
- int32_t currentStoreOffset;
- };
- struct CapturedForPhantomNewArrayBufferEnd {
- unsigned arrayLength;
- };
- struct CapturedForPhantomCreateRest {
- InlineCallFrame* inlineCallFrame;
- unsigned numberOfArgumentsToSkip;
- unsigned parameterOffset;
- };
- struct VarargsSpreadArgumentToEmit {
- enum Type {
- PhantomNewArrayWithSpreadCase,
- PhantomNewArrayBufferCase,
- PhantomNewArrayBufferEnd,
- PhantomCreateRest
- } m_type;
- union {
- CapturedForPhantomNewArrayWithSpreadCase m_phantomNewArrayWithSpreadCase;
- CapturedForPhantomNewArrayBufferCase m_phantomNewArrayBufferCase;
- CapturedForPhantomNewArrayBufferEnd m_phantomNewArrayBufferEnd;
- CapturedForPhantomCreateRest m_phantomCreateRest;
- };
-
- VarargsSpreadArgumentToEmit(VarargsSpreadArgumentToEmit::Type t, unsigned arg)
- : m_type(t)
- {
- if (m_type == PhantomNewArrayWithSpreadCase)
- m_phantomNewArrayWithSpreadCase = { arg };
- else {
- ASSERT(t == PhantomNewArrayBufferEnd);
- m_phantomNewArrayBufferEnd = { arg };
- }
- }
- VarargsSpreadArgumentToEmit(VarargsSpreadArgumentToEmit::Type t, int64_t value, int32_t currentStoreOffset)
- : m_type(t)
- , m_phantomNewArrayBufferCase({ value, currentStoreOffset })
- {
- ASSERT(t == PhantomNewArrayBufferCase);
- }
- VarargsSpreadArgumentToEmit(VarargsSpreadArgumentToEmit::Type t, InlineCallFrame* inlineCallFrame, unsigned numberOfArgumentsToSkip, unsigned parameterOffset)
- : m_type(t)
- , m_phantomCreateRest({ inlineCallFrame, numberOfArgumentsToSkip, parameterOffset })
- {
- ASSERT(t == PhantomCreateRest);
- }
- };
-
+
void compileCallOrConstructVarargsSpread()
{
Node* node = m_node;
@@ -10187,20 +10120,13 @@
Vector<LValue, 2> spreadLengths;
Vector<LValue, 8> patchpointArguments;
HashMap<InlineCallFrame*, LValue, WTF::DefaultHash<InlineCallFrame*>, WTF::NullableHashTraits<InlineCallFrame*>> cachedSpreadLengths;
- // Because the patchpoint generator runs late in Air, the dfg graph will be long gone.
- // So we must load everything relevant right now, and make sure that they are captured by value by the lambda that acts as the generator
- // One particularly tricky point is that the generator would like to walk over the tree rooted at this node, exploring through PhantomNewArrayWithSpread and PhantomNewArrayBuffer, emitting code along the way.
- // Instead, we do that walk here, and record just enough information in the following vector to emit the right code at the end of Air.
- Vector<VarargsSpreadArgumentToEmit> argumentsToEmitFromRightToLeft;
- int storeOffset = CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register));
- unsigned paramsOffset = 4;
- unsigned index = 0;
auto pushAndCountArgumentsFromRightToLeft = recursableLambda([&](auto self, Node* target) -> void {
- switch (target->op()) {
- case PhantomSpread:
+ if (target->op() == PhantomSpread) {
self(target->child1().node());
return;
- case PhantomNewArrayWithSpread: {
+ }
+
+ if (target->op() == PhantomNewArrayWithSpread) {
BitVector* bitVector = target->bitVector();
for (unsigned i = target->numChildren(); i--; ) {
if (bitVector->get(i))
@@ -10209,45 +10135,27 @@
++staticArgumentCount;
LValue argument = this->lowJSValue(m_graph.varArgChild(target, i));
patchpointArguments.append(argument);
- argumentsToEmitFromRightToLeft.append({ VarargsSpreadArgumentToEmit::Type::PhantomNewArrayWithSpreadCase, paramsOffset + (index++)});
}
}
return;
}
- case PhantomNewArrayBuffer: {
- auto* array = target->castOperand<JSImmutableButterfly*>();
- unsigned arrayLength = array->length();
- staticArgumentCount += arrayLength;
- Checked<int32_t> offsetCount { 1 };
- for (unsigned i = arrayLength; i--; ++offsetCount) {
- Checked<int32_t> currentStoreOffset { storeOffset };
- currentStoreOffset -= (offsetCount * static_cast<int32_t>(sizeof(Register)));
- // Because varargs values are drained as JSValue, we should not generate value
- // in Double form even if PhantomNewArrayBuffer's indexingType is ArrayWithDouble.
- int64_t value = JSValue::encode(array->get(i));
- argumentsToEmitFromRightToLeft.append({ VarargsSpreadArgumentToEmit::Type::PhantomNewArrayBufferCase, value, currentStoreOffset.value() });
- }
- argumentsToEmitFromRightToLeft.append({ VarargsSpreadArgumentToEmit::Type::PhantomNewArrayBufferEnd, arrayLength });
+
+ if (target->op() == PhantomNewArrayBuffer) {
+ staticArgumentCount += target->castOperand<JSImmutableButterfly*>()->length();
return;
}
- case PhantomCreateRest: {
- InlineCallFrame* inlineCallFrame = target->origin.semantic.inlineCallFrame();
- unsigned numberOfArgumentsToSkip = target->numberOfArgumentsToSkip();
- unsigned parameterOffset = paramsOffset + (index++);
- LValue length = cachedSpreadLengths.ensure(inlineCallFrame, [&] () {
- return m_out.zeroExtPtr(this->getSpreadLengthFromInlineCallFrame(inlineCallFrame, numberOfArgumentsToSkip));
- }).iterator->value;
- patchpointArguments.append(length);
- spreadLengths.append(length);
- argumentsToEmitFromRightToLeft.append({ VarargsSpreadArgumentToEmit::Type::PhantomCreateRest, inlineCallFrame, numberOfArgumentsToSkip, parameterOffset });
- return;
- }
- default:
- RELEASE_ASSERT_NOT_REACHED();
- }
+
+ RELEASE_ASSERT(target->op() == PhantomCreateRest);
+ InlineCallFrame* inlineCallFrame = target->origin.semantic.inlineCallFrame();
+ unsigned numberOfArgumentsToSkip = target->numberOfArgumentsToSkip();
+ LValue length = cachedSpreadLengths.ensure(inlineCallFrame, [&] () {
+ return m_out.zeroExtPtr(this->getSpreadLengthFromInlineCallFrame(inlineCallFrame, numberOfArgumentsToSkip));
+ }).iterator->value;
+ patchpointArguments.append(length);
+ spreadLengths.append(length);
});
+
pushAndCountArgumentsFromRightToLeft(arguments);
-
LValue argumentCountIncludingThis = m_out.constIntPtr(staticArgumentCount + 1);
for (LValue length : spreadLengths)
argumentCountIncludingThis = m_out.add(length, argumentCountIncludingThis);
@@ -10275,14 +10183,12 @@
WTF::roundUpToMultipleOf(stackAlignmentBytes(), 5 * sizeof(EncodedJSValue));
m_proc.requestCallArgAreaSizeInBytes(minimumJSCallAreaSize);
-
+
CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite();
State* state = &m_ftlState;
VM* vm = &this->vm();
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
- auto nodeOp = node->op();
patchpoint->setGenerator(
- [=, argumentsToEmit = WTFMove(argumentsToEmitFromRightToLeft)] (CCallHelpers& jit, const StackmapGenerationParams& params) {
+ [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
CallSiteIndex callSiteIndex =
state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(codeOrigin);
@@ -10296,7 +10202,7 @@
CCallHelpers::TrustedImm32(callSiteIndex.bits()),
CCallHelpers::tagFor(VirtualRegister(CallFrameSlot::argumentCountIncludingThis)));
- CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(semanticNodeOrigin);
+ CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(node->origin.semantic);
RegisterSet usedRegisters = RegisterSet::allRegisters();
usedRegisters.exclude(RegisterSet::volatileRegistersForJSCall());
@@ -10360,54 +10266,74 @@
jit.store32(scratchGPR2, CCallHelpers::Address(scratchGPR1, CallFrameSlot::argumentCountIncludingThis * static_cast<int>(sizeof(Register)) + PayloadOffset));
- for (const auto& argumentToEmit : argumentsToEmit) {
- switch (argumentToEmit.m_type) {
- case VarargsSpreadArgumentToEmit::PhantomNewArrayWithSpreadCase: {
- unsigned parameterOffset = argumentToEmit.m_phantomNewArrayWithSpreadCase.parameterOffset;
- jit.subPtr(CCallHelpers::TrustedImmPtr(static_cast<size_t>(1)), scratchGPR2);
- getValueFromRep(params[parameterOffset], scratchGPR3);
- jit.store64(scratchGPR3, CCallHelpers::BaseIndex(scratchGPR1, scratchGPR2, CCallHelpers::TimesEight, storeOffset));
- continue;
+ int storeOffset = CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register));
+
+ unsigned paramsOffset = 4;
+ unsigned index = 0;
+ auto emitArgumentsFromRightToLeft = recursableLambda([&](auto self, Node* target) -> void {
+ if (target->op() == PhantomSpread) {
+ self(target->child1().node());
+ return;
}
- case VarargsSpreadArgumentToEmit::PhantomNewArrayBufferCase: {
- int64_t value = argumentToEmit.m_phantomNewArrayBufferCase.value;
- int32_t currentStoreOffset = argumentToEmit.m_phantomNewArrayBufferCase.currentStoreOffset;
- jit.move(CCallHelpers::TrustedImm64(value), scratchGPR3);
- jit.store64(scratchGPR3, CCallHelpers::BaseIndex(scratchGPR1, scratchGPR2, CCallHelpers::TimesEight, currentStoreOffset));
- continue;
+
+ if (target->op() == PhantomNewArrayWithSpread) {
+ BitVector* bitVector = target->bitVector();
+ for (unsigned i = target->numChildren(); i--; ) {
+ if (bitVector->get(i))
+ self(state->graph.varArgChild(target, i).node());
+ else {
+ jit.subPtr(CCallHelpers::TrustedImmPtr(static_cast<size_t>(1)), scratchGPR2);
+ getValueFromRep(params[paramsOffset + (index++)], scratchGPR3);
+ jit.store64(scratchGPR3,
+ CCallHelpers::BaseIndex(scratchGPR1, scratchGPR2, CCallHelpers::TimesEight, storeOffset));
+ }
+ }
+ return;
}
- case VarargsSpreadArgumentToEmit::PhantomNewArrayBufferEnd: {
- size_t arrayLength = static_cast<size_t>(argumentToEmit.m_phantomNewArrayBufferEnd.arrayLength);
- jit.subPtr(CCallHelpers::TrustedImmPtr(arrayLength), scratchGPR2);
- continue;
+
+ if (target->op() == PhantomNewArrayBuffer) {
+ auto* array = target->castOperand<JSImmutableButterfly*>();
+ Checked<int32_t> offsetCount { 1 };
+ for (unsigned i = array->length(); i--; ++offsetCount) {
+ // Because varargs values are drained as JSValue, we should not generate value
+ // in Double form even if PhantomNewArrayBuffer's indexingType is ArrayWithDouble.
+ int64_t value = JSValue::encode(array->get(i));
+ jit.move(CCallHelpers::TrustedImm64(value), scratchGPR3);
+ Checked<int32_t> currentStoreOffset { storeOffset };
+ currentStoreOffset -= (offsetCount * static_cast<int32_t>(sizeof(Register)));
+ jit.store64(scratchGPR3,
+ CCallHelpers::BaseIndex(scratchGPR1, scratchGPR2, CCallHelpers::TimesEight, currentStoreOffset));
+ }
+ jit.subPtr(CCallHelpers::TrustedImmPtr(static_cast<size_t>(array->length())), scratchGPR2);
+ return;
}
- case VarargsSpreadArgumentToEmit::PhantomCreateRest: {
- InlineCallFrame* inlineCallFrame = argumentToEmit.m_phantomCreateRest.inlineCallFrame;
- unsigned numberOfArgumentsToSkip = argumentToEmit.m_phantomCreateRest.numberOfArgumentsToSkip;
- unsigned parameterOffset = argumentToEmit.m_phantomCreateRest.parameterOffset;
- B3::ValueRep numArgumentsToCopy = params[parameterOffset];
- getValueFromRep(numArgumentsToCopy, scratchGPR3);
- int loadOffset = (AssemblyHelpers::argumentsStart(inlineCallFrame).offset() + numberOfArgumentsToSkip) * static_cast<int>(sizeof(Register));
+ RELEASE_ASSERT(target->op() == PhantomCreateRest);
+ InlineCallFrame* inlineCallFrame = target->origin.semantic.inlineCallFrame();
- auto done = jit.branchTestPtr(MacroAssembler::Zero, scratchGPR3);
- auto loopStart = jit.label();
- jit.subPtr(CCallHelpers::TrustedImmPtr(static_cast<size_t>(1)), scratchGPR3);
- jit.subPtr(CCallHelpers::TrustedImmPtr(static_cast<size_t>(1)), scratchGPR2);
- jit.load64(CCallHelpers::BaseIndex(GPRInfo::callFrameRegister, scratchGPR3, CCallHelpers::TimesEight, loadOffset), scratchGPR4);
- jit.store64(scratchGPR4,
- CCallHelpers::BaseIndex(scratchGPR1, scratchGPR2, CCallHelpers::TimesEight, storeOffset));
- jit.branchTestPtr(CCallHelpers::NonZero, scratchGPR3).linkTo(loopStart, &jit);
- done.link(&jit);
- }
- }
- }
+ unsigned numberOfArgumentsToSkip = target->numberOfArgumentsToSkip();
+
+ B3::ValueRep numArgumentsToCopy = params[paramsOffset + (index++)];
+ getValueFromRep(numArgumentsToCopy, scratchGPR3);
+ int loadOffset = (AssemblyHelpers::argumentsStart(inlineCallFrame).offset() + numberOfArgumentsToSkip) * static_cast<int>(sizeof(Register));
+
+ auto done = jit.branchTestPtr(MacroAssembler::Zero, scratchGPR3);
+ auto loopStart = jit.label();
+ jit.subPtr(CCallHelpers::TrustedImmPtr(static_cast<size_t>(1)), scratchGPR3);
+ jit.subPtr(CCallHelpers::TrustedImmPtr(static_cast<size_t>(1)), scratchGPR2);
+ jit.load64(CCallHelpers::BaseIndex(GPRInfo::callFrameRegister, scratchGPR3, CCallHelpers::TimesEight, loadOffset), scratchGPR4);
+ jit.store64(scratchGPR4,
+ CCallHelpers::BaseIndex(scratchGPR1, scratchGPR2, CCallHelpers::TimesEight, storeOffset));
+ jit.branchTestPtr(CCallHelpers::NonZero, scratchGPR3).linkTo(loopStart, &jit);
+ done.link(&jit);
+ });
+ emitArgumentsFromRightToLeft(arguments);
}
{
CCallHelpers::Jump dontThrow = jit.jump();
slowCase.link(&jit);
- jit.setupArguments<decltype(operationThrowStackOverflowForVarargs)>(jit.codeBlock()->globalObjectFor(semanticNodeOrigin));
+ jit.setupArguments<decltype(operationThrowStackOverflowForVarargs)>(jit.codeBlock()->globalObjectFor(node->origin.semantic));
jit.prepareCallOperation(jit.vm());
callWithExceptionCheck(operationThrowStackOverflowForVarargs);
jit.abortWithReason(DFGVarargsThrowingPathDidNotThrow);
@@ -10421,9 +10347,9 @@
jit.store64(scratchGPR3, CCallHelpers::calleeArgumentSlot(0));
CallLinkInfo::CallType callType;
- if (nodeOp == ConstructVarargs || nodeOp == ConstructForwardVarargs)
+ if (node->op() == ConstructVarargs || node->op() == ConstructForwardVarargs)
callType = CallLinkInfo::ConstructVarargs;
- else if (nodeOp == TailCallVarargs || nodeOp == TailCallForwardVarargs)
+ else if (node->op() == TailCallVarargs || node->op() == TailCallForwardVarargs)
callType = CallLinkInfo::TailCallVarargs;
else
callType = CallLinkInfo::CallVarargs;
@@ -10451,7 +10377,7 @@
if (isTailCall)
jit.emitRestoreCalleeSaves();
- jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(semanticNodeOrigin)), GPRInfo::regT3);
+ jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(node->origin.semantic)), GPRInfo::regT3);
callLinkInfo->emitSlowPath(*vm, jit);
if (isTailCall)
@@ -10557,14 +10483,6 @@
CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite();
State* state = &m_ftlState;
VM* vm = &this->vm();
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
- InlineCallFrame* inlineCallFrame;
- if (node->child3())
- inlineCallFrame = node->child3()->origin.semantic.inlineCallFrame();
- else
- inlineCallFrame = semanticNodeOrigin.inlineCallFrame();
- CallVarargsData* data = ""
- auto nodeOp = node->op();
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -10580,7 +10498,8 @@
CCallHelpers::TrustedImm32(callSiteIndex.bits()),
CCallHelpers::tagFor(VirtualRegister(CallFrameSlot::argumentCountIncludingThis)));
- CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(semanticNodeOrigin);
+ CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(node->origin.semantic);
+ CallVarargsData* data = ""
unsigned argIndex = 1;
GPRReg calleeGPR = params[argIndex++].gpr();
@@ -10664,6 +10583,11 @@
jit.move(CCallHelpers::TrustedImm32(originalStackHeight / sizeof(EncodedJSValue)), scratchGPR2);
CCallHelpers::JumpList slowCase;
+ InlineCallFrame* inlineCallFrame;
+ if (node->child3())
+ inlineCallFrame = node->child3()->origin.semantic.inlineCallFrame();
+ else
+ inlineCallFrame = node->origin.semantic.inlineCallFrame();
// emitSetupVarargsFrameFastCase modifies the stack pointer if it succeeds.
emitSetupVarargsFrameFastCase(*vm, jit, scratchGPR2, scratchGPR1, scratchGPR2, scratchGPR3, inlineCallFrame, data->firstVarArgOffset, slowCase);
@@ -10670,7 +10594,7 @@
CCallHelpers::Jump done = jit.jump();
slowCase.link(&jit);
- jit.setupArguments<decltype(operationThrowStackOverflowForVarargs)>(jit.codeBlock()->globalObjectFor(semanticNodeOrigin));
+ jit.setupArguments<decltype(operationThrowStackOverflowForVarargs)>(jit.codeBlock()->globalObjectFor(node->origin.semantic));
jit.prepareCallOperation(jit.vm());
callWithExceptionCheck(bitwise_cast<void(*)()>(operationThrowStackOverflowForVarargs));
jit.abortWithReason(DFGVarargsThrowingPathDidNotThrow);
@@ -10678,7 +10602,7 @@
done.link(&jit);
} else {
jit.move(CCallHelpers::TrustedImm32(originalStackHeight / sizeof(EncodedJSValue)), scratchGPR1);
- jit.setupArguments<decltype(operationSizeFrameForVarargs)>(jit.codeBlock()->globalObjectFor(semanticNodeOrigin), argumentsGPR, scratchGPR1, CCallHelpers::TrustedImm32(data->firstVarArgOffset));
+ jit.setupArguments<decltype(operationSizeFrameForVarargs)>(jit.codeBlock()->globalObjectFor(node->origin.semantic), argumentsGPR, scratchGPR1, CCallHelpers::TrustedImm32(data->firstVarArgOffset));
jit.prepareCallOperation(jit.vm());
callWithExceptionCheck(bitwise_cast<void(*)()>(operationSizeFrameForVarargs));
@@ -10687,7 +10611,7 @@
argumentsLateRep.emitRestore(jit, argumentsGPR);
emitSetVarargsFrame(jit, scratchGPR1, false, scratchGPR2, scratchGPR2);
jit.addPtr(CCallHelpers::TrustedImm32(-minimumJSCallAreaSize), scratchGPR2, CCallHelpers::stackPointerRegister);
- jit.setupArguments<decltype(operationSetupVarargsFrame)>(jit.codeBlock()->globalObjectFor(semanticNodeOrigin), scratchGPR2, argumentsGPR, CCallHelpers::TrustedImm32(data->firstVarArgOffset), scratchGPR1);
+ jit.setupArguments<decltype(operationSetupVarargsFrame)>(jit.codeBlock()->globalObjectFor(node->origin.semantic), scratchGPR2, argumentsGPR, CCallHelpers::TrustedImm32(data->firstVarArgOffset), scratchGPR1);
jit.prepareCallOperation(jit.vm());
callWithExceptionCheck(bitwise_cast<void(*)()>(operationSetupVarargsFrame));
@@ -10704,9 +10628,9 @@
jit.store64(thisGPR, CCallHelpers::calleeArgumentSlot(0));
CallLinkInfo::CallType callType;
- if (nodeOp == ConstructVarargs || nodeOp == ConstructForwardVarargs)
+ if (node->op() == ConstructVarargs || node->op() == ConstructForwardVarargs)
callType = CallLinkInfo::ConstructVarargs;
- else if (nodeOp == TailCallVarargs || nodeOp == TailCallForwardVarargs)
+ else if (node->op() == TailCallVarargs || node->op() == TailCallForwardVarargs)
callType = CallLinkInfo::TailCallVarargs;
else
callType = CallLinkInfo::CallVarargs;
@@ -10732,7 +10656,7 @@
if (isTailCall)
jit.emitRestoreCalleeSaves();
- jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(semanticNodeOrigin)), GPRInfo::regT3);
+ jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()->globalObjectFor(node->origin.semantic)), GPRInfo::regT3);
callLinkInfo->emitSlowPath(*vm, jit);
if (isTailCall)
@@ -10806,8 +10730,6 @@
CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite();
State* state = &m_ftlState;
VM& vm = this->vm();
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
- auto ecmaMode = node->ecmaMode().value();
JSGlobalObject* globalObject = m_graph.globalObjectFor(m_origin.semantic);
patchpoint->setGenerator(
[=, &vm] (CCallHelpers& jit, const StackmapGenerationParams& params) {
@@ -10822,7 +10744,7 @@
CCallHelpers::TrustedImm32(callSiteIndex.bits()),
CCallHelpers::tagFor(VirtualRegister(CallFrameSlot::argumentCountIncludingThis)));
- CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(semanticNodeOrigin);
+ CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(node->origin.semantic);
callLinkInfo->setUpCall(CallLinkInfo::Call, GPRInfo::regT0);
jit.addPtr(CCallHelpers::TrustedImm32(-static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC))), CCallHelpers::stackPointerRegister, GPRInfo::regT1);
@@ -10834,7 +10756,7 @@
unsigned requiredBytes = sizeof(CallerFrameAndPC) + sizeof(CallFrame*) * 2;
requiredBytes = WTF::roundUpToMultipleOf(stackAlignmentBytes(), requiredBytes);
jit.subPtr(CCallHelpers::TrustedImm32(requiredBytes), CCallHelpers::stackPointerRegister);
- jit.move(CCallHelpers::TrustedImm32(ecmaMode), GPRInfo::regT2);
+ jit.move(CCallHelpers::TrustedImm32(node->ecmaMode().value()), GPRInfo::regT2);
jit.setupArguments<decltype(operationCallEval)>(globalObject, GPRInfo::regT1, GPRInfo::regT2);
jit.prepareCallOperation(vm);
jit.move(CCallHelpers::TrustedImmPtr(tagCFunction<OperationPtrTag>(operationCallEval)), GPRInfo::nonPreservedNonArgumentGPR0);
@@ -12383,12 +12305,12 @@
RefPtr<PatchpointExceptionHandle> exceptionHandle = preparePatchpointForExceptions(patchpoint);
State* state = &m_ftlState;
- CodeOrigin semanticNodeOrigin = m_node->origin.semantic;
+ Node* node = m_node;
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
- CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(semanticNodeOrigin);
+ CallSiteIndex callSiteIndex = state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
// This is the direct exit target for operation calls.
Box<CCallHelpers::JumpList> exceptions = exceptionHandle->scheduleExitCreation(params)->jumps(jit);
@@ -12406,12 +12328,12 @@
const auto generator = [&] {
if constexpr (kind == InByKind::Normal) {
return Box<JITInByIdGenerator>::create(
- jit.codeBlock(), semanticNodeOrigin, callSiteIndex,
+ jit.codeBlock(), node->origin.semantic, callSiteIndex,
params.unavailableRegisters(), subscriptValue, base,
JSValueRegs(returnGPR));
} else {
return Box<JITInByValGenerator>::create(
- jit.codeBlock(), semanticNodeOrigin, callSiteIndex,
+ jit.codeBlock(), node->origin.semantic, callSiteIndex,
params.unavailableRegisters(), base, subscript,
JSValueRegs(returnGPR));
}
@@ -12429,16 +12351,16 @@
CCallHelpers::Call slowPathCall;
if constexpr (kind == InByKind::Normal) {
slowPathCall = callOperation(
- *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+ *state, params.unavailableRegisters(), jit, node->origin.semantic,
exceptions.get(), operationInByIdOptimize, returnGPR,
- jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+ jit.codeBlock()->globalObjectFor(node->origin.semantic),
CCallHelpers::TrustedImmPtr(generator->stubInfo()),
base, subscript).call();
} else {
slowPathCall = callOperation(
- *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+ *state, params.unavailableRegisters(), jit, node->origin.semantic,
exceptions.get(), operationInByValOptimize, returnGPR,
- jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+ jit.codeBlock()->globalObjectFor(node->origin.semantic),
CCallHelpers::TrustedImmPtr(generator->stubInfo()),
CCallHelpers::TrustedImmPtr(nullptr), base, subscript).call();
}
@@ -12665,7 +12587,6 @@
RefPtr<PatchpointExceptionHandle> exceptionHandle =
preparePatchpointForExceptions(patchpoint);
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -12689,7 +12610,7 @@
slowCases.append(jit.branchIfNotCell(prototypeGPR));
CallSiteIndex callSiteIndex =
- state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(semanticNodeOrigin);
+ state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
// This is the direct exit target for operation calls.
Box<CCallHelpers::JumpList> exceptions =
@@ -12696,7 +12617,7 @@
exceptionHandle->scheduleExitCreation(params)->jumps(jit);
auto generator = Box<JITInstanceOfGenerator>::create(
- jit.codeBlock(), semanticNodeOrigin, callSiteIndex,
+ jit.codeBlock(), node->origin.semantic, callSiteIndex,
params.unavailableRegisters(), resultGPR, valueGPR, prototypeGPR, scratchGPR,
scratch2GPR, prototypeIsObject);
generator->generateFastPath(jit);
@@ -12711,9 +12632,9 @@
slowCases.link(&jit);
CCallHelpers::Label slowPathBegin = jit.label();
CCallHelpers::Call slowPathCall = callOperation(
- *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+ *state, params.unavailableRegisters(), jit, node->origin.semantic,
exceptions.get(), optimizationFunction, resultGPR,
- jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+ jit.codeBlock()->globalObjectFor(node->origin.semantic),
CCallHelpers::TrustedImmPtr(generator->stubInfo()), valueGPR,
prototypeGPR).call();
jit.jump().linkTo(done, &jit);
@@ -14171,13 +14092,12 @@
preparePatchpointForExceptions(patchpoint);
State* state = &m_ftlState;
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
CallSiteIndex callSiteIndex =
- state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(semanticNodeOrigin);
+ state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
// This is the direct exit target for operation calls.
Box<CCallHelpers::JumpList> exceptions =
@@ -14189,7 +14109,7 @@
exceptionHandle->scheduleExitCreationForUnwind(params, callSiteIndex);
auto generator = Box<JITGetByIdGenerator>::create(
- jit.codeBlock(), semanticNodeOrigin, callSiteIndex,
+ jit.codeBlock(), node->origin.semantic, callSiteIndex,
params.unavailableRegisters(), identifier, JSValueRegs(params[1].gpr()),
JSValueRegs(params[0].gpr()), type);
@@ -14205,9 +14125,9 @@
generator->slowPathJump().link(&jit);
CCallHelpers::Label slowPathBegin = jit.label();
CCallHelpers::Call slowPathCall = callOperation(
- *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+ *state, params.unavailableRegisters(), jit, node->origin.semantic,
exceptions.get(), optimizationFunction, params[0].gpr(),
- jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+ jit.codeBlock()->globalObjectFor(node->origin.semantic),
CCallHelpers::TrustedImmPtr(generator->stubInfo()), params[1].gpr(),
CCallHelpers::TrustedImmPtr(identifier.rawBits())).call();
jit.jump().linkTo(done, &jit);
@@ -14241,13 +14161,12 @@
preparePatchpointForExceptions(patchpoint);
State* state = &m_ftlState;
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
CallSiteIndex callSiteIndex =
- state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(semanticNodeOrigin);
+ state->jitCode->common.codeOrigins->addUniqueCallSiteIndex(node->origin.semantic);
// This is the direct exit target for operation calls.
Box<CCallHelpers::JumpList> exceptions =
@@ -14259,7 +14178,7 @@
exceptionHandle->scheduleExitCreationForUnwind(params, callSiteIndex);
auto generator = Box<JITGetByIdWithThisGenerator>::create(
- jit.codeBlock(), semanticNodeOrigin, callSiteIndex,
+ jit.codeBlock(), node->origin.semantic, callSiteIndex,
params.unavailableRegisters(), identifier, JSValueRegs(params[0].gpr()),
JSValueRegs(params[1].gpr()), JSValueRegs(params[2].gpr()));
@@ -14275,9 +14194,9 @@
generator->slowPathJump().link(&jit);
CCallHelpers::Label slowPathBegin = jit.label();
CCallHelpers::Call slowPathCall = callOperation(
- *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+ *state, params.unavailableRegisters(), jit, node->origin.semantic,
exceptions.get(), optimizationFunction, params[0].gpr(),
- jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+ jit.codeBlock()->globalObjectFor(node->origin.semantic),
CCallHelpers::TrustedImmPtr(generator->stubInfo()), params[1].gpr(),
params[2].gpr(), CCallHelpers::TrustedImmPtr(identifier.rawBits())).call();
jit.jump().linkTo(done, &jit);
@@ -14672,7 +14591,6 @@
State* state = &m_ftlState;
Node* node = m_node;
NodeType op = m_node->op();
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
JSValue child1Constant = m_state.forNode(m_node->child1()).value();
auto nodeIndex = m_nodeIndexInGraph;
@@ -14694,7 +14612,7 @@
RefPtr<OSRExitHandle> handle = exitDescriptor->emitOSRExitLater(*state, BadType, origin, params, nodeIndex, osrExitArgumentOffset);
- SnippetParams domJITParams(*state, params, semanticNodeOrigin, nullptr, WTFMove(regs), WTFMove(gpScratch), WTFMove(fpScratch));
+ SnippetParams domJITParams(*state, params, node, nullptr, WTFMove(regs), WTFMove(gpScratch), WTFMove(fpScratch));
CCallHelpers::JumpList failureCases = domJIT->generator()->run(jit, domJITParams);
CCallHelpers::JumpList notJSCastFailureCases;
if (op == CheckNotJSCast) {
@@ -14814,7 +14732,6 @@
State* state = &m_ftlState;
Node* node = m_node;
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -14836,7 +14753,7 @@
Box<CCallHelpers::JumpList> exceptions = exceptionHandle->scheduleExitCreation(params)->jumps(jit);
- SnippetParams domJITParams(*state, params, semanticNodeOrigin, exceptions, WTFMove(regs), WTFMove(gpScratch), WTFMove(fpScratch));
+ SnippetParams domJITParams(*state, params, node, exceptions, WTFMove(regs), WTFMove(gpScratch), WTFMove(fpScratch));
domJIT->generator()->run(jit, domJITParams);
});
patchpoint->effects = Effects::forCall();
@@ -15593,7 +15510,6 @@
patchpoint->clobber(RegisterSet::macroScratchRegisters());
patchpoint->resultConstraints = { ValueRep::SomeEarlyRegister };
State* state = &m_ftlState;
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -15619,16 +15535,16 @@
generator->slowPathJumpList().link(&jit);
callOperation(
- *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+ *state, params.unavailableRegisters(), jit, node->origin.semantic,
exceptions.get(), slowPathFunction, params[0].gpr(),
- jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+ jit.codeBlock()->globalObjectFor(node->origin.semantic),
params[1].gpr(), params[2].gpr());
jit.jump().linkTo(done, &jit);
});
} else {
callOperation(
- *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
- exceptions.get(), slowPathFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(semanticNodeOrigin), params[1].gpr(),
+ *state, params.unavailableRegisters(), jit, node->origin.semantic,
+ exceptions.get(), slowPathFunction, params[0].gpr(), jit.codeBlock()->globalObjectFor(node->origin.semantic), params[1].gpr(),
params[2].gpr());
}
});
@@ -15661,7 +15577,6 @@
patchpoint->clobber(RegisterSet::macroScratchRegisters());
patchpoint->resultConstraints = { ValueRep::SomeEarlyRegister };
State* state = &m_ftlState;
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -15683,9 +15598,9 @@
generator->slowPathJumpList().link(&jit);
callOperation(
- *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+ *state, params.unavailableRegisters(), jit, node->origin.semantic,
exceptions.get(), slowPathFunction, params[0].gpr(),
- jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+ jit.codeBlock()->globalObjectFor(node->origin.semantic),
params[1].gpr(), params[2].gpr());
jit.jump().linkTo(done, &jit);
});
@@ -15719,7 +15634,6 @@
patchpoint->clobber(RegisterSet::macroScratchRegisters());
patchpoint->resultConstraints = { ValueRep::SomeEarlyRegister };
State* state = &m_ftlState;
- CodeOrigin semanticNodeOrigin = node->origin.semantic;
patchpoint->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -15747,9 +15661,9 @@
? operationValueBitRShift : operationValueBitURShift;
callOperation(
- *state, params.unavailableRegisters(), jit, semanticNodeOrigin,
+ *state, params.unavailableRegisters(), jit, node->origin.semantic,
exceptions.get(), slowPathFunction, params[0].gpr(),
- jit.codeBlock()->globalObjectFor(semanticNodeOrigin),
+ jit.codeBlock()->globalObjectFor(node->origin.semantic),
params[1].gpr(), params[2].gpr());
jit.jump().linkTo(done, &jit);
});
@@ -20092,7 +20006,6 @@
BlockIndex blockIndex = block->index;
unsigned nodeIndex = node ? node->index() : UINT_MAX;
#if !ASSERT_ENABLED
- auto nodeOp = node ? node->op() : LastNodeType;
m_out.patchpoint(Void)->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams&) {
AllowMacroScratchRegisterUsage allowScratch(jit);
@@ -20100,7 +20013,7 @@
jit.move(CCallHelpers::TrustedImm32(blockIndex), GPRInfo::regT0);
jit.move(CCallHelpers::TrustedImm32(nodeIndex), GPRInfo::regT1);
if (node)
- jit.move(CCallHelpers::TrustedImm32(nodeOp), GPRInfo::regT2);
+ jit.move(CCallHelpers::TrustedImm32(node->op()), GPRInfo::regT2);
jit.abortWithReason(FTLCrash);
});
#else // ASSERT_ENABLED