Diff
Modified: branches/jsCStack/Source/_javascript_Core/ChangeLog (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/ChangeLog 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/ChangeLog 2013-12-07 05:46:22 UTC (rev 160267)
@@ -1,5 +1,56 @@
2013-12-06 Michael Saboff <[email protected]>
+ CStack Branch: Enable basic _javascript_ functionality in LLInt
+ https://bugs.webkit.org/show_bug.cgi?id=125378
+
+ Reviewed by Filip Pizlo.
+
+ This provides basic LLInt only functionality for X86_64. It runs simple scripts.
+ There are several places where the code is tagged with "&&&& FIXME: ..." comments
+ as placeholders where more work needs to be done.
+
+ Added X86 compliant prologue / epilogues at the head and tail of functions.
+ Changed LLInt calls to leave the caller framePointer in callFrame register and
+ pass the callee framePointer (-16) in SP so that the callee receiving prologue
+ will make store the ReturnPC and CallerFrame at the right location in the call
+ frame header. Created a stack pointer sanity check macro (checkStackPointerAlignment)
+ in the LLInt that will cause a breakpoint on failure.
+
+ * dfg/DFGJITCompiler.cpp:
+ (JSC::DFG::JITCompiler::compileEntry):
+ * dfg/DFGSpeculativeJIT64.cpp:
+ (JSC::DFG::SpeculativeJIT::compile):
+ * ftl/FTLLink.cpp:
+ (JSC::FTL::compileEntry):
+ (JSC::FTL::link):
+ * ftl/FTLThunks.cpp:
+ (JSC::FTL::osrExitGenerationThunkGenerator):
+ (JSC::FTL::slowPathCallThunkGenerator):
+ * interpreter/Interpreter.cpp:
+ (JSC::unwindCallFrame):
+ * interpreter/ProtoCallFrame.cpp:
+ (JSC::ProtoCallFrame::init):
+ * jit/AssemblyHelpers.h:
+ (JSC::AssemblyHelpers::emitFunctionPrologue):
+ (JSC::AssemblyHelpers::emitFunctionEpilogue):
+ * jit/JIT.cpp:
+ (JSC::JIT::privateCompile):
+ * jit/JITCall.cpp:
+ (JSC::JIT::privateCompileClosureCall):
+ * jit/JITOpcodes.cpp:
+ (JSC::JIT::emit_op_end):
+ (JSC::JIT::emit_op_ret):
+ (JSC::JIT::emit_op_ret_object_or_this):
+ * jit/Repatch.cpp:
+ (JSC::linkClosureCall):
+ * jit/ThunkGenerators.cpp:
+ (JSC::slowPathFor):
+ (JSC::nativeForGenerator):
+ * llint/LowLevelInterpreter.asm:
+ * llint/LowLevelInterpreter64.asm:
+
+2013-12-06 Michael Saboff <[email protected]>
+
Merged from trunk r160244: <http://trac.webkit.org/changeset/160244>
Split sizing of VarArgs frames from loading arguments for the frame
Modified: branches/jsCStack/Source/_javascript_Core/dfg/DFGJITCompiler.cpp (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/dfg/DFGJITCompiler.cpp 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/dfg/DFGJITCompiler.cpp 2013-12-07 05:46:22 UTC (rev 160267)
@@ -98,8 +98,7 @@
// We'll need to convert the remaining cti_ style calls (specifically the stack
// check) which will be dependent on stack layout. (We'd need to account for this in
// both normal return code and when jumping to an exception handler).
- preserveReturnAddressAfterCall(GPRInfo::regT2);
- emitPutReturnPCToCallFrameHeader(GPRInfo::regT2);
+ emitFunctionPrologue();
emitPutImmediateToCallFrameHeader(m_codeBlock, JSStack::CodeBlock);
}
Modified: branches/jsCStack/Source/_javascript_Core/dfg/DFGSpeculativeJIT64.cpp (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/dfg/DFGSpeculativeJIT64.cpp 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/dfg/DFGSpeculativeJIT64.cpp 2013-12-07 05:46:22 UTC (rev 160267)
@@ -3423,12 +3423,7 @@
JSValueOperand op1(this, node->child1());
m_jit.move(op1.gpr(), GPRInfo::returnValueGPR);
- // Grab the return address.
- m_jit.emitGetReturnPCFromCallFrameHeaderPtr(GPRInfo::regT1);
- // Restore our caller's "r".
- m_jit.emitGetCallerFrameFromCallFrameHeaderPtr(GPRInfo::callFrameRegister);
- // Return.
- m_jit.restoreReturnAddressBeforeReturn(GPRInfo::regT1);
+ m_jit.emitFunctionEpilogue();
m_jit.ret();
noResult(node);
Modified: branches/jsCStack/Source/_javascript_Core/ftl/FTLLink.cpp (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/ftl/FTLLink.cpp 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/ftl/FTLLink.cpp 2013-12-07 05:46:22 UTC (rev 160267)
@@ -45,8 +45,7 @@
static void compileEntry(CCallHelpers& jit)
{
- jit.preserveReturnAddressAfterCall(GPRInfo::regT2);
- jit.emitPutReturnPCToCallFrameHeader(GPRInfo::regT2);
+ jit.emitFunctionPrologue();
jit.emitPutImmediateToCallFrameHeader(jit.codeBlock(), JSStack::CodeBlock);
}
@@ -96,9 +95,7 @@
CCallHelpers::TrustedImmPtr(reinterpret_cast<void*>(state.generatedFunction)),
GPRInfo::nonArgGPR0);
jit.call(GPRInfo::nonArgGPR0);
- jit.emitGetReturnPCFromCallFrameHeaderPtr(GPRInfo::regT1);
- jit.emitGetCallerFrameFromCallFrameHeaderPtr(GPRInfo::callFrameRegister);
- jit.restoreReturnAddressBeforeReturn(GPRInfo::regT1);
+ jit.emitFunctionEpilogue();
jit.ret();
stackCheck.link(&jit);
@@ -165,9 +162,7 @@
CCallHelpers::TrustedImmPtr(reinterpret_cast<void*>(state.generatedFunction)),
GPRInfo::nonArgGPR0);
jit.call(GPRInfo::nonArgGPR0);
- jit.emitGetReturnPCFromCallFrameHeaderPtr(GPRInfo::regT1);
- jit.emitGetCallerFrameFromCallFrameHeaderPtr(GPRInfo::callFrameRegister);
- jit.restoreReturnAddressBeforeReturn(GPRInfo::regT1);
+ jit.emitFunctionEpilogue();
jit.ret();
linkBuffer = adoptPtr(new LinkBuffer(
Modified: branches/jsCStack/Source/_javascript_Core/ftl/FTLThunks.cpp (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/ftl/FTLThunks.cpp 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/ftl/FTLThunks.cpp 2013-12-07 05:46:22 UTC (rev 160267)
@@ -74,6 +74,7 @@
// Prepare for tail call.
jit.pop(GPRInfo::regT1);
jit.pop(GPRInfo::regT1);
+ // &&&&& FIXME: Need to address the right way to adjust CFR and SP
jit.pop(MacroAssembler::framePointerRegister);
// At this point we're sitting on the return address - so if we did a jump right now, the
@@ -81,7 +82,8 @@
// restore all registers.
jit.restoreReturnAddressBeforeReturn(GPRInfo::regT0);
-
+ // &&&& Through here
+
restoreAllRegisters(jit, buffer);
jit.ret();
@@ -119,11 +121,13 @@
currentOffset += sizeof(double);
}
+ // &&&& FIXME: Need to do soemething like jit.emitFunctionPrologue();
jit.preserveReturnAddressAfterCall(GPRInfo::nonArgGPR0);
jit.storePtr(GPRInfo::nonArgGPR0, AssemblyHelpers::Address(MacroAssembler::stackPointerRegister, key.offset()));
JITCompiler::Call call = jit.call();
-
+
+ // &&&& FIXME: Need to do something like jit.emitFunctionEpilogue();
jit.loadPtr(AssemblyHelpers::Address(MacroAssembler::stackPointerRegister, key.offset()), GPRInfo::nonPreservedNonReturnGPR);
jit.restoreReturnAddressBeforeReturn(GPRInfo::nonPreservedNonReturnGPR);
Modified: branches/jsCStack/Source/_javascript_Core/interpreter/Interpreter.cpp (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/interpreter/Interpreter.cpp 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/interpreter/Interpreter.cpp 2013-12-07 05:46:22 UTC (rev 160267)
@@ -456,7 +456,10 @@
CallFrame* callerFrame = callFrame->callerFrame();
if (callerFrame->isVMEntrySentinel()) {
- callFrame->vm().topCallFrame = callerFrame->vmEntrySentinelCallerFrame();
+ if (callerFrame->vmEntrySentinelCallerFrame())
+ callFrame->vm().topCallFrame = callerFrame->vmEntrySentinelCallerFrame();
+ else
+ callFrame->vm().topCallFrame = callFrame; // _returnFromJavaScript will pop the frame off.
return false;
}
return true;
Modified: branches/jsCStack/Source/_javascript_Core/interpreter/ProtoCallFrame.cpp (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/interpreter/ProtoCallFrame.cpp 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/interpreter/ProtoCallFrame.cpp 2013-12-07 05:46:22 UTC (rev 160267)
@@ -43,6 +43,10 @@
if (paddedArgsCount < numParameters)
paddedArgsCount = numParameters;
}
+ // &&&& FIXME: Align the combination of sentinel frame + callee frame
+ // Maybe this should be in callToJavaScript.
+ if (!(paddedArgsCount & 1))
+ paddedArgsCount++;
this->setPaddedArgsCount(paddedArgsCount);
this->clearCurrentVPC();
this->setThisValue(thisValue);
Modified: branches/jsCStack/Source/_javascript_Core/jit/AssemblyHelpers.h (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/jit/AssemblyHelpers.h 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/jit/AssemblyHelpers.h 2013-12-07 05:46:22 UTC (rev 160267)
@@ -60,6 +60,19 @@
AssemblerType_T& assembler() { return m_assembler; }
#if CPU(X86_64) || CPU(X86)
+ void emitFunctionPrologue()
+ {
+ push(framePointerRegister);
+ move(stackPointerRegister, framePointerRegister);
+ }
+
+ void emitFunctionEpilogue()
+ {
+ // FIXME: The stack pointer need to be calculated from the framePointer
+ move(framePointerRegister, stackPointerRegister);
+ pop(framePointerRegister);
+ }
+
void preserveReturnAddressAfterCall(GPRReg reg)
{
pop(reg);
Modified: branches/jsCStack/Source/_javascript_Core/jit/JIT.cpp (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/jit/JIT.cpp 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/jit/JIT.cpp 2013-12-07 05:46:22 UTC (rev 160267)
@@ -506,8 +506,7 @@
if (m_randomGenerator.getUint32() & 1)
nop();
- preserveReturnAddressAfterCall(regT2);
- emitPutReturnPCToCallFrameHeader(regT2);
+ emitFunctionPrologue();
emitPutImmediateToCallFrameHeader(m_codeBlock, JSStack::CodeBlock);
Label beginLabel(this);
@@ -564,8 +563,7 @@
arityCheck = label();
store8(TrustedImm32(0), &m_codeBlock->m_shouldAlwaysBeInlined);
- preserveReturnAddressAfterCall(regT2);
- emitPutReturnPCToCallFrameHeader(regT2);
+ emitFunctionPrologue();
emitPutImmediateToCallFrameHeader(m_codeBlock, JSStack::CodeBlock);
load32(payloadFor(JSStack::ArgumentCount), regT1);
Modified: branches/jsCStack/Source/_javascript_Core/jit/JITCall.cpp (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/jit/JITCall.cpp 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/jit/JITCall.cpp 2013-12-07 05:46:22 UTC (rev 160267)
@@ -226,7 +226,7 @@
void JIT::privateCompileClosureCall(CallLinkInfo* callLinkInfo, CodeBlock* calleeCodeBlock, Structure* expectedStructure, ExecutableBase* expectedExecutable, MacroAssemblerCodePtr codePtr)
{
JumpList slowCases;
-
+
slowCases.append(branchTestPtr(NonZero, regT0, tagMaskRegister));
slowCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(expectedStructure)));
slowCases.append(branchPtr(NotEqual, Address(regT0, JSFunction::offsetOfExecutable()), TrustedImmPtr(expectedExecutable)));
Modified: branches/jsCStack/Source/_javascript_Core/jit/JITOpcodes.cpp (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/jit/JITOpcodes.cpp 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/jit/JITOpcodes.cpp 2013-12-07 05:46:22 UTC (rev 160267)
@@ -73,7 +73,7 @@
{
RELEASE_ASSERT(returnValueGPR != callFrameRegister);
emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
- restoreReturnAddressBeforeReturn(Address(callFrameRegister, CallFrame::returnPCOffset()));
+ emitFunctionEpilogue();
ret();
}
@@ -262,14 +262,7 @@
// Return the result in %eax.
emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
- // Grab the return address.
- emitGetReturnPCFromCallFrameHeaderPtr(regT1);
-
- // Restore our caller's "r".
- emitGetCallerFrameFromCallFrameHeaderPtr(callFrameRegister);
-
- // Return.
- restoreReturnAddressBeforeReturn(regT1);
+ emitFunctionEpilogue();
ret();
}
@@ -288,11 +281,8 @@
// Grab the return address.
emitGetReturnPCFromCallFrameHeaderPtr(regT1);
- // Restore our caller's "r".
- emitGetCallerFrameFromCallFrameHeaderPtr(callFrameRegister);
-
// Return.
- restoreReturnAddressBeforeReturn(regT1);
+ emitFunctionEpilogue();
ret();
// Return 'this' in %eax.
@@ -300,14 +290,8 @@
notObject.link(this);
emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueGPR);
- // Grab the return address.
- emitGetReturnPCFromCallFrameHeaderPtr(regT1);
-
- // Restore our caller's "r".
- emitGetCallerFrameFromCallFrameHeaderPtr(callFrameRegister);
-
// Return.
- restoreReturnAddressBeforeReturn(regT1);
+ emitFunctionEpilogue();
ret();
}
Modified: branches/jsCStack/Source/_javascript_Core/jit/Repatch.cpp (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/jit/Repatch.cpp 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/jit/Repatch.cpp 2013-12-07 05:46:22 UTC (rev 160267)
@@ -1384,6 +1384,9 @@
stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
#endif
stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::nonArgGPR2);
+
+ // &&&& FIXME: Think the following instruction should be a poke
+ stubJit.breakpoint();
stubJit.restoreReturnAddressBeforeReturn(GPRInfo::nonArgGPR2);
AssemblyHelpers::Jump slow = stubJit.jump();
Modified: branches/jsCStack/Source/_javascript_Core/jit/ThunkGenerators.cpp (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/jit/ThunkGenerators.cpp 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/jit/ThunkGenerators.cpp 2013-12-07 05:46:22 UTC (rev 160267)
@@ -82,9 +82,9 @@
static void slowPathFor(
CCallHelpers& jit, VM* vm, P_JITOperation_E slowPathFunction)
{
- jit.preserveReturnAddressAfterCall(GPRInfo::nonArgGPR2);
- emitPointerValidation(jit, GPRInfo::nonArgGPR2);
- jit.emitPutReturnPCToCallFrameHeader(GPRInfo::nonArgGPR2);
+ // &&&& FIXME: Need to cleanup frame below like emitFunctionEpilogue()
+ jit.breakpoint();
+ jit.emitFunctionPrologue();
jit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
jit.setupArgumentsExecState();
jit.move(CCallHelpers::TrustedImmPtr(bitwise_cast<void*>(slowPathFunction)), GPRInfo::nonArgGPR0);
@@ -250,7 +250,8 @@
int executableOffsetToFunction = NativeExecutable::offsetOfNativeFunctionFor(kind);
JSInterfaceJIT jit(vm);
-
+
+ jit.emitFunctionPrologue();
jit.emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock);
jit.storePtr(JSInterfaceJIT::callFrameRegister, &vm->topCallFrame);
@@ -261,9 +262,6 @@
jit.emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, JSInterfaceJIT::regT1, JSInterfaceJIT::regT0);
jit.emitPutCellToCallFrameHeader(JSInterfaceJIT::regT1, JSStack::ScopeChain);
- jit.peek(JSInterfaceJIT::regT1);
- jit.emitPutReturnPCToCallFrameHeader(JSInterfaceJIT::regT1);
-
// Calling convention: f(ecx, edx, ...);
// Host function signature: f(ExecState*);
jit.move(JSInterfaceJIT::callFrameRegister, X86Registers::ecx);
@@ -390,6 +388,7 @@
#endif
// Return.
+ jit.emitFunctionEpilogue();
jit.ret();
// Handle an exception
Modified: branches/jsCStack/Source/_javascript_Core/llint/LowLevelInterpreter.asm (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/llint/LowLevelInterpreter.asm 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/llint/LowLevelInterpreter.asm 2013-12-07 05:46:22 UTC (rev 160267)
@@ -206,6 +206,29 @@
end
end
+macro preserveCallerPCAndCFR()
+ if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or ARM64 or MIPS or SH4
+ # In C_LOOP case, we're only preserving the bytecode vPC.
+ # FIXME: Need to fix for other ports
+ # move lr, destinationRegister
+ elsif X86 or X86_64
+ push cfr
+ move sp, cfr
+ else
+ error
+ end
+end
+
+macro restoreCallerPCAndCFR()
+ if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or ARM64 or MIPS or SH4
+ # In C_LOOP case, we're only preserving the bytecode vPC.
+ # FIXME: Need to fix for other ports
+ # move lr, destinationRegister
+ elsif X86 or X86_64
+ move cfr, sp
+ pop cfr
+ end
+end
macro preserveReturnAddressAfterCall(destinationRegister)
if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or ARM64 or MIPS or SH4
# In C_LOOP case, we're only preserving the bytecode vPC.
@@ -234,11 +257,13 @@
end
end
-macro callTargetFunction(callLinkInfo)
+macro callTargetFunction(callLinkInfo, calleeFramePtr)
if C_LOOP
cloopCallJSFunction LLIntCallLinkInfo::machineCodeTarget[callLinkInfo]
else
+ prepareStackPointerForJSCall(calleeFramePtr)
call LLIntCallLinkInfo::machineCodeTarget[callLinkInfo]
+ restoreStackPointerAfterJSCall()
dispatchAfterCall()
end
end
@@ -250,7 +275,9 @@
if C_LOOP
cloopCallJSFunction callee
else
+ prepareStackPointerForJSCall(t1) # The slow patch leaves the calle ExecState* in t1
call callee
+ restoreStackPointerAfterJSCall()
dispatchAfterCall()
end
end)
@@ -313,10 +340,9 @@
# Do the bare minimum required to execute code. Sets up the PC, leave the CodeBlock*
# in t1. May also trigger prologue entry OSR.
macro prologue(codeBlockGetter, codeBlockSetter, osrSlowPath, traceSlowPath)
- preserveReturnAddressAfterCall(t2)
-
# Set up the call frame and check if we should OSR.
- storep t2, ReturnPC[cfr]
+ preserveCallerPCAndCFR()
+
if EXECUTION_TRACING
callSlowPath(traceSlowPath)
end
@@ -326,8 +352,10 @@
cCall2(osrSlowPath, cfr, PC)
move t1, cfr
btpz t0, .recover
- loadp ReturnPC[cfr], t2
- restoreReturnAddressBeforeReturn(t2)
+ # &&&& FIXME: Not sure this is right
+ break
+ # loadp ReturnPC[cfr], t2
+ # restoreReturnAddressBeforeReturn(t2)
jmp t0
.recover:
codeBlockGetter(t1)
@@ -335,6 +363,10 @@
end
codeBlockSetter(t1)
+ loadi CodeBlock::m_numCalleeRegisters[t1], t2
+ lshiftp 3, t2
+ subp t2, sp
+
# Set up the PC.
if JSVALUE64
loadp CodeBlock::m_instructions[t1], PB
@@ -415,9 +447,7 @@
end
macro doReturn()
- loadp ReturnPC[cfr], t2
- loadp CallerFrame[cfr], cfr
- restoreReturnAddressBeforeReturn(t2)
+ restoreCallerPCAndCFR()
ret
end
@@ -429,10 +459,10 @@
# Note, if these stubs or one of their related macros are changed, make the
# equivalent changes in jit/JITStubsX86.h and/or jit/JITStubsMSVC64.asm
_callToJavaScript:
- doCallToJavaScript(makeJavaScriptCall, doReturnFromJavaScript)
+ doCallToJavaScript(makeJavaScriptCall)
_callToNativeFunction:
- doCallToJavaScript(makeHostFunctionCall, doReturnFromHostFunction)
+ doCallToJavaScript(makeHostFunctionCall)
end
# Indicate the beginning of LLInt.
Modified: branches/jsCStack/Source/_javascript_Core/llint/LowLevelInterpreter64.asm (160266 => 160267)
--- branches/jsCStack/Source/_javascript_Core/llint/LowLevelInterpreter64.asm 2013-12-07 02:55:23 UTC (rev 160266)
+++ branches/jsCStack/Source/_javascript_Core/llint/LowLevelInterpreter64.asm 2013-12-07 05:46:22 UTC (rev 160267)
@@ -52,6 +52,7 @@
end
macro cCall2(function, arg1, arg2)
+ checkStackPointerAlignment(t5, 0xbad0c002)
if X86_64
move arg1, t5
move arg2, t4
@@ -69,6 +70,7 @@
# This barely works. arg3 and arg4 should probably be immediates.
macro cCall4(function, arg1, arg2, arg3, arg4)
+ checkStackPointerAlignment(t5, 0xbad0c004)
if X86_64
move arg1, t5
move arg2, t4
@@ -88,6 +90,15 @@
end
end
+macro checkStackPointerAlignment(tempReg, location)
+ andp sp, 0xf, tempReg
+ btpz tempReg, .stackPointerOkay
+ move location, tempReg
+ break
+.stackPointerOkay:
+end
+
+
macro functionPrologue(extraStackSpace)
if X86_64
push cfr
@@ -95,32 +106,71 @@
elsif ARM64
pushLRAndFP
end
+ subp extraStackSpace, sp
+end
+
+macro functionEpilogue(extraStackSpace)
+ addp extraStackSpace, sp
+ if X86_64
+ pop cfr
+ elsif ARM64
+ popLRAndFP
+ end
+end
+
+macro callToJavaScriptPrologue(extraStackSpace)
+ if X86_64
+# push cfr
+# move sp, cfr
+ elsif ARM64
+ pushLRAndFP
+ end
pushCalleeSaves
if X86_64
subp extraStackSpace, sp
end
end
-macro functionEpilogue(extraStackSpace)
+macro callToJavaScriptEpilogue(extraStackSpace)
+ addp (CallFrameHeaderSlots-1)*8, cfr, sp
+ loadp CallerFrame[cfr], cfr
+
if X86_64
addp extraStackSpace, sp
end
popCalleeSaves
if X86_64
- pop cfr
+# pop cfr
elsif ARM64
popLRAndFP
end
end
-macro doCallToJavaScript(makeCall, doReturn)
+macro moveStackPointerForCallframe(codeblock)
+ loadi CodeBlock::m_numCalleeRegisters[codeblock], t1
+ lshiftp 3, t1
+ subp cfr, t1, sp
+end
+
+macro prepareStackPointerForJSCall(calleeFramePtr)
+ addp 16, calleeFramePtr, sp
+end
+
+macro restoreStackPointerAfterJSCall()
+ loadp CodeBlock[cfr], t1
+ loadi CodeBlock::m_numCalleeRegisters[t1], t1
+ lshiftp 3, t1
+ subp cfr, t1, sp
+end
+
+macro doCallToJavaScript(makeCall)
if X86_64
const entry = t5
const vmTopCallFrame = t4
const protoCallFrame = t1
const topOfStack = t2
- const extraStackSpace = 8
+ const extraStackSpace = 16
const previousCFR = t0
const previousPC = t6
const temp1 = t0
@@ -144,31 +194,31 @@
loadp [sp], previousPC
end
move cfr, previousCFR
- functionPrologue(extraStackSpace)
+ callToJavaScriptPrologue(extraStackSpace)
- move topOfStack, cfr
+ checkStackPointerAlignment(temp2, 0xbad0dc01)
+
+ move sp, cfr
subp (CallFrameHeaderSlots-1)*8, cfr
storep 0, ArgumentCount[cfr]
storep vmTopCallFrame, Callee[cfr]
- loadp [vmTopCallFrame], temp1
- storep temp1, ScopeChain[cfr]
+ loadp [vmTopCallFrame], temp2
+ storep temp2, ScopeChain[cfr]
storep 1, CodeBlock[cfr]
storep previousPC, ReturnPC[cfr]
storep previousCFR, CallerFrame[cfr]
- move cfr, temp1
loadi ProtoCallFrame::paddedArgCount[protoCallFrame], temp2
addp CallFrameHeaderSlots, temp2, temp2
lshiftp 3, temp2
- subp temp2, cfr
- storep temp1, CallerFrame[cfr]
+ subp cfr, temp2, sp
move 5, temp1
.copyHeaderLoop:
subi 1, temp1
loadp [protoCallFrame, temp1, 8], temp3
- storep temp3, CodeBlock[cfr, temp1, 8]
+ storep temp3, CodeBlock[sp, temp1, 8]
btinz temp1, .copyHeaderLoop
loadi ProtoCallFrame::argCountAndCodeOriginValue[protoCallFrame], temp2
@@ -180,7 +230,7 @@
move ValueUndefined, temp1
.fillExtraArgsLoop:
subi 1, temp3
- storep temp1, ThisArgumentOffset+8[cfr, temp3, 8]
+ storep temp1, ThisArgumentOffset+8[sp, temp3, 8]
bineq temp2, temp3, .fillExtraArgsLoop
.copyArgs:
@@ -190,17 +240,21 @@
btiz temp2, .copyArgsDone
subi 1, temp2
loadp [temp1, temp2, 8], temp3
- storep temp3, ThisArgumentOffset+8[cfr, temp2, 8]
+ storep temp3, ThisArgumentOffset+8[sp, temp2, 8]
jmp .copyArgsLoop
.copyArgsDone:
- storep cfr, [vmTopCallFrame]
+ storep sp, [vmTopCallFrame]
move 0xffff000000000000, csr1
addp 2, csr1, csr2
+ checkStackPointerAlignment(temp3, 0xbad0dc02)
+
makeCall(entry, temp1)
+ checkStackPointerAlignment(temp3, 0xbad0dc03)
+
bpeq CodeBlock[cfr], 1, .calleeFramePopped
loadp CallerFrame[cfr], cfr
@@ -209,34 +263,50 @@
loadp ScopeChain[cfr], temp3
storep temp3, [temp2]
- doReturn(extraStackSpace)
+ checkStackPointerAlignment(temp3, 0xbad0dc04)
+
+ callToJavaScriptEpilogue(extraStackSpace)
+ ret
end
+
macro makeJavaScriptCall(entry, temp)
+ addp 16, sp
call entry
+ subp 16, sp
end
+
macro makeHostFunctionCall(entry, temp)
move entry, temp
if X86_64
- move cfr, t5
+ move sp, t5
elsif ARM64 or C_LOOP
- move cfr, a0
+ move sp, a0
end
+ addp 16, sp
call temp
+ subp 16, sp
end
-macro doReturnFromJavaScript(extraStackSpace)
+
_returnFromJavaScript:
- functionEpilogue(extraStackSpace)
- ret
-end
+ subp 16, sp
-macro doReturnFromHostFunction(extraStackSpace)
- functionEpilogue(extraStackSpace)
+ checkStackPointerAlignment(t3, 0xbad0eeee)
+
+ bpeq CodeBlock[cfr], 1, .calleeFramePopped
+ loadp CallerFrame[cfr], cfr
+
+.calleeFramePopped:
+ loadp Callee[cfr], t3 # VM.topCallFrame
+ loadp ScopeChain[cfr], t6
+ storep t6, [t3]
+
+ callToJavaScriptEpilogue(16)
ret
-end
+
macro prepareStateForCCall()
leap [PB, PC, 8], PC
move PB, t3
@@ -273,7 +343,6 @@
storei PC, ArgumentCount + TagOffset[cfr]
prepareStateForCCall()
cCall2(slowPath, cfr, PC)
- move t1, cfr
action(t0)
end
@@ -1674,10 +1743,8 @@
storeq t0, ScopeChain[t3]
loadisFromInstruction(3, t2)
storei PC, ArgumentCount + TagOffset[cfr]
- storeq cfr, CallerFrame[t3]
storei t2, ArgumentCount + PayloadOffset[t3]
- move t3, cfr
- callTargetFunction(t1)
+ callTargetFunction(t1,t3)
.opCallSlow:
slowPathForCall(slowPath)
@@ -1831,11 +1898,13 @@
_llint_throw_during_call_trampoline:
- preserveReturnAddressAfterCall(t2)
+ pop t2
jmp _llint_throw_from_slow_path_trampoline
macro nativeCallTrampoline(executableOffsetToFunction)
+
+ functionPrologue(0)
storep 0, CodeBlock[cfr]
if X86_64
loadp ScopeChain[cfr], t0
@@ -1845,15 +1914,11 @@
loadp CallerFrame[cfr], t0
loadq ScopeChain[t0], t1
storeq t1, ScopeChain[cfr]
- peek 0, t1
- storep t1, ReturnPC[cfr]
move cfr, t5 # t5 = rdi
- subp 16 - 8, sp
loadp Callee[cfr], t4 # t4 = rsi
loadp JSFunction::m_executable[t4], t1
- move t0, cfr # Restore cfr to avoid loading from stack
+ checkStackPointerAlignment(t3, 0xdead0001)
call executableOffsetToFunction[t1]
- addp 16 - 8, sp
loadp ScopeChain[cfr], t3
andp MarkedBlockMask, t3
loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
@@ -1904,9 +1969,12 @@
end
btqnz VM::m_exception[t3], .exception
+ functionEpilogue(0)
ret
.exception:
- preserveReturnAddressAfterCall(t1) # This is really only needed on X86_64
+ if X86_64
+ pop t1
+ end
loadi ArgumentCount + TagOffset[cfr], PC
loadp CodeBlock[cfr], PB
loadp CodeBlock::m_vm[PB], t0