Reviewers: danno,
Message:
StackOperandForReturnAddress is used instead of StackOperandForPc.
Description:
Introduce StackOperandForReturnAddress operand to access return address in
the
stack
Please review this at https://codereview.chromium.org/20628003/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files:
M src/x64/macro-assembler-x64.h
M src/x64/stub-cache-x64.cc
Index: src/x64/macro-assembler-x64.h
diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h
index
9d5d2a31c5ceee61652cd923ca83e8a13d1388ca..e611c8ae27999a9520d0aa07bfa050b51a0bb831
100644
--- a/src/x64/macro-assembler-x64.h
+++ b/src/x64/macro-assembler-x64.h
@@ -1518,6 +1518,10 @@ inline Operand StackSpaceOperand(int index) {
}
+inline Operand StackOperandForReturnAddress(int32_t disp) {
+ return Operand(rsp, disp);
+}
+
#ifdef GENERATED_CODE_COVERAGE
extern void LogGeneratedCodeCoverage(const char* file_line);
Index: src/x64/stub-cache-x64.cc
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index
542018fddd084c20fb9fc1f024a4d8a92e41cccd..39ff656ec45be81f7fa5c9c633b4dbe92c227981
100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -410,9 +410,9 @@ static void ReserveSpaceForFastApiCall(MacroAssembler*
masm, Register scratch) {
// -- rsp[0] : return address
// -- rsp[8] : last argument in the internal frame of the caller
// -----------------------------------
- __ movq(scratch, Operand(rsp, 0));
+ __ movq(scratch, StackOperandForReturnAddress(0));
__ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
- __ movq(Operand(rsp, 0), scratch);
+ __ movq(StackOperandForReturnAddress(0), scratch);
__ Move(scratch, Smi::FromInt(0));
for (int i = 1; i <= kFastApiCallArguments; i++) {
__ movq(Operand(rsp, i * kPointerSize), scratch);
@@ -431,8 +431,9 @@ static void FreeSpaceForFastApiCall(MacroAssembler*
masm, Register scratch) {
// -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal
// frame.
// -----------------------------------
- __ movq(scratch, Operand(rsp, 0));
- __ movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch);
+ __ movq(scratch, StackOperandForReturnAddress(0));
+ __ movq(StackOperandForReturnAddress(kFastApiCallArguments *
kPointerSize),
+ scratch);
__ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments));
}
@@ -2350,8 +2351,9 @@ Handle<Code> CallStubCompiler::CompileFastApiCall(
name, depth, &miss);
// Move the return address on top of the stack.
- __ movq(rax, Operand(rsp, kFastApiCallArguments * kPointerSize));
- __ movq(Operand(rsp, 0 * kPointerSize), rax);
+ __ movq(rax,
+ StackOperandForReturnAddress(kFastApiCallArguments *
kPointerSize));
+ __ movq(StackOperandForReturnAddress(0), rax);
GenerateFastApiCall(masm(), optimization, argc);
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.