Reviewers: danno, Jakob,
Description:
Use kRegisterSize/kPCOnStackSize/kFPOnStackSize when calling C++ runtime
Please review this at https://codereview.chromium.org/20262004/
SVN Base: http://v8.googlecode.com/svn/branches/bleeding_edge/
Affected files:
M src/x64/code-stubs-x64.cc
M src/x64/debug-x64.cc
M src/x64/deoptimizer-x64.cc
M src/x64/lithium-codegen-x64.cc
M src/x64/macro-assembler-x64.cc
Index: src/x64/code-stubs-x64.cc
===================================================================
--- src/x64/code-stubs-x64.cc (revision 15866)
+++ src/x64/code-stubs-x64.cc (working copy)
@@ -2764,11 +2764,11 @@
// Argument 9: Pass current isolate address.
__ LoadAddress(kScratchRegister,
ExternalReference::isolate_address(masm->isolate()));
- __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
+ __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
kScratchRegister);
// Argument 8: Indicate that this is a direct call from JavaScript.
- __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize),
+ __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize),
Immediate(1));
// Argument 7: Start (high end) of backtracking stack memory area.
@@ -2776,13 +2776,13 @@
__ movq(r9, Operand(kScratchRegister, 0));
__ movq(kScratchRegister, address_of_regexp_stack_memory_size);
__ addq(r9, Operand(kScratchRegister, 0));
- __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r9);
+ __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9);
// Argument 6: Set the number of capture registers to zero to force
global
// regexps to behave as non-global. This does not affect non-global
regexps.
// Argument 6 is passed in r9 on Linux and on the stack on Windows.
#ifdef _WIN64
- __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize),
+ __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize),
Immediate(0));
#else
__ Set(r9, 0);
@@ -2793,7 +2793,7 @@
ExternalReference::address_of_static_offsets_vector(isolate));
// Argument 5 passed in r8 on Linux and on the stack on Windows.
#ifdef _WIN64
- __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kPointerSize), r8);
+ __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
#endif
// rdi: subject string
@@ -3878,8 +3878,8 @@
// Read result values stored on stack. Result is stored
// above the four argument mirror slots and the two
// Arguments object slots.
- __ movq(rax, Operand(rsp, 6 * kPointerSize));
- __ movq(rdx, Operand(rsp, 7 * kPointerSize));
+ __ movq(rax, Operand(rsp, 6 * kRegisterSize));
+ __ movq(rdx, Operand(rsp, 7 * kRegisterSize));
}
#endif
__ lea(rcx, Operand(rax, 1));
@@ -6496,10 +6496,10 @@
__ push(arg_reg_2);
// Calculate the original stack pointer and store it in the second arg.
- __ lea(arg_reg_2, Operand(rsp, (kNumSavedRegisters + 1) * kPointerSize));
+ __ lea(arg_reg_2, Operand(rsp, (kNumSavedRegisters + 1) *
kRegisterSize));
// Calculate the function address to the first arg.
- __ movq(arg_reg_1, Operand(rsp, kNumSavedRegisters * kPointerSize));
+ __ movq(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
__ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
// Save the remainder of the volatile registers.
Index: src/x64/debug-x64.cc
===================================================================
--- src/x64/debug-x64.cc (revision 15866)
+++ src/x64/debug-x64.cc (working copy)
@@ -176,7 +176,7 @@
// If this call did not replace a call but patched other code then there
will
// be an unwanted return address left on the stack. Here we get rid of
that.
if (convert_call_to_jmp) {
- __ addq(rsp, Immediate(kPointerSize));
+ __ addq(rsp, Immediate(kPCOnStackSize));
}
// Now that the break point has been handled, resume normal execution by
Index: src/x64/deoptimizer-x64.cc
===================================================================
--- src/x64/deoptimizer-x64.cc (revision 15866)
+++ src/x64/deoptimizer-x64.cc (working copy)
@@ -407,7 +407,7 @@
__ push(r);
}
- const int kSavedRegistersAreaSize = kNumberOfRegisters * kPointerSize +
+ const int kSavedRegistersAreaSize = kNumberOfRegisters * kRegisterSize +
kDoubleRegsSize;
// We use this to keep the value of the fifth argument temporarily.
@@ -421,8 +421,9 @@
// Get the address of the location in the code object
// and compute the fp-to-sp delta in register arg5.
__ movq(arg_reg_4,
- Operand(rsp, kSavedRegistersAreaSize + 1 * kPointerSize));
- __ lea(arg5, Operand(rsp, kSavedRegistersAreaSize + 2 * kPointerSize));
+ Operand(rsp, kSavedRegistersAreaSize + 1 * kPCOnStackSize));
+ __ lea(arg5, Operand(rsp, kSavedRegistersAreaSize + 1 * kPCOnStackSize +
+ 1 * kRegisterSize));
__ subq(arg5, rbp);
__ neg(arg5);
@@ -466,7 +467,7 @@
}
// Remove the bailout id and return address from the stack.
- __ addq(rsp, Immediate(2 * kPointerSize));
+ __ addq(rsp, Immediate(1 * kRegisterSize + 1 * kPCOnStackSize));
// Compute a pointer to the unwinding limit in register rcx; that is
// the first stack slot not part of the input frame.
Index: src/x64/lithium-codegen-x64.cc
===================================================================
--- src/x64/lithium-codegen-x64.cc (revision 15866)
+++ src/x64/lithium-codegen-x64.cc (working copy)
@@ -3062,7 +3062,7 @@
Register result = ToRegister(instr->result());
if (instr->hydrogen()->from_inlined()) {
- __ lea(result, Operand(rsp, -2 * kPointerSize));
+ __ lea(result, Operand(rsp, -1 * kPCOnStackSize + -1 *
kFPOnStackSize));
} else {
// Check for arguments adapter frame.
Label done, adapted;
Index: src/x64/macro-assembler-x64.cc
===================================================================
--- src/x64/macro-assembler-x64.cc (revision 15866)
+++ src/x64/macro-assembler-x64.cc (working copy)
@@ -3507,7 +3507,7 @@
// Optionally save all XMM registers.
if (save_doubles) {
int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
- arg_stack_space * kPointerSize;
+ arg_stack_space * kRegisterSize;
subq(rsp, Immediate(space));
int offset = -2 * kPointerSize;
for (int i = 0; i < XMMRegister::NumAllocatableRegisters(); i++) {
@@ -3515,7 +3515,7 @@
movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
}
} else if (arg_stack_space > 0) {
- subq(rsp, Immediate(arg_stack_space * kPointerSize));
+ subq(rsp, Immediate(arg_stack_space * kRegisterSize));
}
// Get the required frame alignment for the OS.
@@ -3560,7 +3560,7 @@
}
}
// Get the return address from the stack and restore the frame pointer.
- movq(rcx, Operand(rbp, 1 * kPointerSize));
+ movq(rcx, Operand(rbp, 1 * kFPOnStackSize));
movq(rbp, Operand(rbp, 0 * kPointerSize));
// Drop everything up to and including the arguments and the receiver
@@ -4375,7 +4375,7 @@
ASSERT(IsPowerOf2(frame_alignment));
int argument_slots_on_stack =
ArgumentStackSlotsForCFunctionCall(num_arguments);
- subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
+ subq(rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize));
and_(rsp, Immediate(-frame_alignment));
movq(Operand(rsp, argument_slots_on_stack * kPointerSize),
kScratchRegister);
}
@@ -4400,7 +4400,7 @@
ASSERT(num_arguments >= 0);
int argument_slots_on_stack =
ArgumentStackSlotsForCFunctionCall(num_arguments);
- movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
+ movq(rsp, Operand(rsp, argument_slots_on_stack * kRegisterSize));
}
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.