Reviewers: Michael Starzinger,
Description:
[builtins] Pass correct number of arguments after adapting arguments.
The call protocol requires that the register dedicated to the number of
actual arguments (i.e. rax on x64) always contains the actual arguments.
That means after adapting arguments it should match the number of
expected arguments. But currently we pass some semi-random value
(usually some stack address) after adapting arguments.
It looks like this is currently not observable anywhere, because our
builtins and functions either don't look at the number of arguments and
just make hard coded (unchecked) assumptions, or are marked as "don't
adapt arguments", which bypasses the broken code in the trampoline for
arguments adaption. Nevertheless this should be fixed.
[email protected]
Please review this at https://codereview.chromium.org/1306423003/
Base URL: https://chromium.googlesource.com/v8/v8.git@master
Affected files (+62, -35 lines):
M src/arm/builtins-arm.cc
M src/arm64/builtins-arm64.cc
M src/ia32/builtins-ia32.cc
M src/mips/builtins-mips.cc
M src/mips64/builtins-mips64.cc
M src/x64/builtins-x64.cc
Index: src/arm/builtins-arm.cc
diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc
index
79f1cda6e3e70742f23e42c69938524134b9cf21..ab2d77e2898e7a9107925736cff15afd2e87c360
100644
--- a/src/arm/builtins-arm.cc
+++ b/src/arm/builtins-arm.cc
@@ -1755,7 +1755,7 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&enough);
EnterArgumentsAdaptorFrame(masm);
- // Calculate copy start address into r0 and copy end address into r2.
+ // Calculate copy start address into r0 and copy end address into r4.
// r0: actual number of arguments as a smi
// r1: function
// r2: expected number of arguments
@@ -1763,19 +1763,20 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
// adjust for return address and receiver
__ add(r0, r0, Operand(2 * kPointerSize));
- __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
+ __ sub(r4, r0, Operand(r2, LSL, kPointerSizeLog2));
// Copy the arguments (including the receiver) to the new stack frame.
// r0: copy start address
// r1: function
- // r2: copy end address
+ // r2: expected number of arguments
// r3: code entry to call
+ // r4: copy end address
Label copy;
__ bind(©);
__ ldr(ip, MemOperand(r0, 0));
__ push(ip);
- __ cmp(r0, r2); // Compare before moving to next argument.
+ __ cmp(r0, r4); // Compare before moving to next argument.
__ sub(r0, r0, Operand(kPointerSize));
__ b(ne, ©);
@@ -1833,20 +1834,23 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r2: expected number of arguments
// r3: code entry to call
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
- __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
+ __ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2));
// Adjust for frame.
- __ sub(r2, r2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
+ __ sub(r4, r4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2 * kPointerSize));
Label fill;
__ bind(&fill);
__ push(ip);
- __ cmp(sp, r2);
+ __ cmp(sp, r4);
__ b(ne, &fill);
}
// Call the entry point.
__ bind(&invoke);
+ __ mov(r0, r2);
+ // r0 : expected number of arguments
+ // r1 : function (passed through to callee)
__ Call(r3);
// Store offset of return address for deoptimizer.
Index: src/arm64/builtins-arm64.cc
diff --git a/src/arm64/builtins-arm64.cc b/src/arm64/builtins-arm64.cc
index
dc867815b0de534c3900c7574d0ed72b0a1f83a6..2dcfb1bd48afd1913ae0025a50d83195feafc5e3
100644
--- a/src/arm64/builtins-arm64.cc
+++ b/src/arm64/builtins-arm64.cc
@@ -1803,19 +1803,19 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
Register copy_to = x12;
Register scratch1 = x13, scratch2 = x14;
- __ Lsl(argc_expected, argc_expected, kPointerSizeLog2);
+ __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
// Adjust for fp, lr, and the receiver.
__ Add(copy_start, fp, 3 * kPointerSize);
__ Add(copy_start, copy_start, Operand(argc_actual, LSL,
kPointerSizeLog2));
- __ Sub(copy_end, copy_start, argc_expected);
+ __ Sub(copy_end, copy_start, scratch2);
__ Sub(copy_end, copy_end, kPointerSize);
__ Mov(copy_to, jssp);
// Claim space for the arguments, the receiver, and one extra slot.
// The extra slot ensures we do not write under jssp. It will be popped
// later.
- __ Add(scratch1, argc_expected, 2 * kPointerSize);
+ __ Add(scratch1, scratch2, 2 * kPointerSize);
__ Claim(scratch1, 1);
// Copy the arguments (including the receiver) to the new stack frame.
@@ -1868,7 +1868,7 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ Bind(&no_strong_error);
EnterArgumentsAdaptorFrame(masm);
- __ Lsl(argc_expected, argc_expected, kPointerSizeLog2);
+ __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
__ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
// Adjust for fp, lr, and the receiver.
@@ -1881,7 +1881,7 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Claim space for the arguments, the receiver, and one extra slot.
// The extra slot ensures we do not write under jssp. It will be popped
// later.
- __ Add(scratch1, argc_expected, 2 * kPointerSize);
+ __ Add(scratch1, scratch2, 2 * kPointerSize);
__ Claim(scratch1, 1);
// Copy the arguments (including the receiver) to the new stack frame.
@@ -1913,6 +1913,9 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Arguments have been adapted. Now call the entry point.
__ Bind(&invoke);
+ __ Mov(argc_actual, argc_expected);
+ // x0 : expected number of arguments
+ // x1 : function (passed through to callee)
__ Call(code_entry);
// Store offset of return address for deoptimizer.
Index: src/ia32/builtins-ia32.cc
diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc
index
672d69829cd6af62c70725cbedac3e28be89ff96..477856cf3f49e7b69cf5af8cdaebd14b8111c450
100644
--- a/src/ia32/builtins-ia32.cc
+++ b/src/ia32/builtins-ia32.cc
@@ -1643,16 +1643,17 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Copy receiver and all expected arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
- __ lea(eax, Operand(ebp, eax, times_4, offset));
- __ mov(edi, -1); // account for receiver
+ __ lea(edi, Operand(ebp, eax, times_4, offset));
+ __ mov(eax, -1); // account for receiver
Label copy;
__ bind(©);
- __ inc(edi);
- __ push(Operand(eax, 0));
- __ sub(eax, Immediate(kPointerSize));
- __ cmp(edi, ebx);
+ __ inc(eax);
+ __ push(Operand(edi, 0));
+ __ sub(edi, Immediate(kPointerSize));
+ __ cmp(eax, ebx);
__ j(less, ©);
+ // eax now contains the expected number of arguments.
__ jmp(&invoke);
}
@@ -1681,6 +1682,9 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&no_strong_error);
EnterArgumentsAdaptorFrame(masm);
+ // Remember expected arguments in ecx.
+ __ mov(ecx, ebx);
+
// Copy receiver and all actual arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
__ lea(edi, Operand(ebp, eax, times_4, offset));
@@ -1705,12 +1709,17 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ push(Immediate(masm->isolate()->factory()->undefined_value()));
__ cmp(eax, ebx);
__ j(less, &fill);
+
+ // Restore expected arguments.
+ __ mov(eax, ecx);
}
// Call the entry point.
__ bind(&invoke);
// Restore function pointer.
__ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+ // eax : expected number of arguments
+ // edi : function (passed through to callee)
__ call(edx);
// Store offset of return address for deoptimizer.
Index: src/mips/builtins-mips.cc
diff --git a/src/mips/builtins-mips.cc b/src/mips/builtins-mips.cc
index
ca15f265cbc192ccb9b6280ee65c805f765ac562..78630a23b61fc28f3deddd02ce39d44bf7987391
100644
--- a/src/mips/builtins-mips.cc
+++ b/src/mips/builtins-mips.cc
@@ -1758,26 +1758,27 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&enough);
EnterArgumentsAdaptorFrame(masm);
- // Calculate copy start address into a0 and copy end address into a2.
+ // Calculate copy start address into a0 and copy end address into t0.
__ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
__ Addu(a0, fp, a0);
// Adjust for return address and receiver.
__ Addu(a0, a0, Operand(2 * kPointerSize));
// Compute copy end address.
- __ sll(a2, a2, kPointerSizeLog2);
- __ subu(a2, a0, a2);
+ __ sll(t0, a2, kPointerSizeLog2);
+ __ subu(t0, a0, t0);
// Copy the arguments (including the receiver) to the new stack frame.
// a0: copy start address
// a1: function
- // a2: copy end address
+ // a2: expected number of arguments
// a3: code entry to call
+ // t0: copy end address
Label copy;
__ bind(©);
__ lw(t0, MemOperand(a0));
__ push(t0);
- __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(a2));
+ __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t0));
__ addiu(a0, a0, -kPointerSize); // In delay slot.
__ jmp(&invoke);
@@ -1840,21 +1841,24 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// a3: code entry to call
__ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
__ sll(t2, a2, kPointerSizeLog2);
- __ Subu(a2, fp, Operand(t2));
+ __ Subu(t1, fp, Operand(t2));
// Adjust for frame.
- __ Subu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
+ __ Subu(t1, t1, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2 * kPointerSize));
Label fill;
__ bind(&fill);
__ Subu(sp, sp, kPointerSize);
- __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
+ __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(t1));
__ sw(t0, MemOperand(sp));
}
// Call the entry point.
__ bind(&invoke);
+ __ mov(a0, a2);
+ // a0: expected number of arguments
+ // a1: function
__ Call(a3);
// Store offset of return address for deoptimizer.
Index: src/mips64/builtins-mips64.cc
diff --git a/src/mips64/builtins-mips64.cc b/src/mips64/builtins-mips64.cc
index
11bc8516a4901d464d9acea5af01da8f8a61ab2b..147719dededde063a215e9b3bf00c342fb0628bf
100644
--- a/src/mips64/builtins-mips64.cc
+++ b/src/mips64/builtins-mips64.cc
@@ -1757,26 +1757,27 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&enough);
EnterArgumentsAdaptorFrame(masm);
- // Calculate copy start address into a0 and copy end address into a2.
+ // Calculate copy start address into a0 and copy end address into t0.
__ SmiScale(a0, a0, kPointerSizeLog2);
__ Daddu(a0, fp, a0);
// Adjust for return address and receiver.
__ Daddu(a0, a0, Operand(2 * kPointerSize));
// Compute copy end address.
- __ dsll(a2, a2, kPointerSizeLog2);
- __ dsubu(a2, a0, a2);
+ __ dsll(t0, a2, kPointerSizeLog2);
+ __ dsubu(t0, a0, t0);
// Copy the arguments (including the receiver) to the new stack frame.
// a0: copy start address
// a1: function
- // a2: copy end address
+ // a2: expected number of arguments
// a3: code entry to call
+ // t0: copy end address
Label copy;
__ bind(©);
__ ld(a4, MemOperand(a0));
__ push(a4);
- __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(a2));
+ __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t0));
__ daddiu(a0, a0, -kPointerSize); // In delay slot.
__ jmp(&invoke);
@@ -1839,21 +1840,24 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// a3: code entry to call
__ LoadRoot(a4, Heap::kUndefinedValueRootIndex);
__ dsll(a6, a2, kPointerSizeLog2);
- __ Dsubu(a2, fp, Operand(a6));
+ __ Dsubu(t1, fp, Operand(a6));
// Adjust for frame.
- __ Dsubu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp
+
- 2 * kPointerSize));
+ __ Dsubu(t1, t1, Operand(StandardFrameConstants::kFixedFrameSizeFromFp
+
+ 2 * kPointerSize));
Label fill;
__ bind(&fill);
__ Dsubu(sp, sp, kPointerSize);
- __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
+ __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(t1));
__ sd(a4, MemOperand(sp));
}
// Call the entry point.
__ bind(&invoke);
+ __ mov(a0, a2);
+ // a0: actual number of arguments
+ // a1: function
__ Call(a3);
// Store offset of return address for deoptimizer.
Index: src/x64/builtins-x64.cc
diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc
index
e550835d42b4e6267817e8959cd542f722172243..eaae88aa3780639d30a27601499595a17a9e7701
100644
--- a/src/x64/builtins-x64.cc
+++ b/src/x64/builtins-x64.cc
@@ -1780,6 +1780,9 @@ void
Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Call the entry point.
__ bind(&invoke);
+ __ movp(rax, rbx);
+ // rax : expected number of arguments
+ // rdi: function (passed through to callee)
__ call(rdx);
// Store offset of return address for deoptimizer.
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/d/optout.