Reviewers: fschneider,

Description:
Fix and enable NEW_NON_STRICT_FAST ArgumentsAccess stub on x64.

[email protected]
BUG=v8:1903


Please review this at http://codereview.chromium.org/9179010/

SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge

Affected files:
  M src/x64/code-stubs-x64.cc
  M src/x64/full-codegen-x64.cc


Index: src/x64/code-stubs-x64.cc
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 3cd6740b684bab838e57b148bc82738d3debe84f..c280b37f79600d24bc99b3d4d31c93c23aada43d 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -2357,6 +2357,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
   const int kParameterMapHeaderSize =
       FixedArray::kHeaderSize + 2 * kPointerSize;
   Label no_parameter_map;
+  __ xor_(r8, r8);
   __ testq(rbx, rbx);
   __ j(zero, &no_parameter_map, Label::kNear);
   __ lea(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
@@ -2450,16 +2451,13 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
   Label parameters_loop, parameters_test;

   // Load tagged parameter count into r9.
-  __ movq(r9, Operand(rsp, 1 * kPointerSize));
+  __ Integer64PlusConstantToSmi(r9, rbx, 0);
   __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
-  __ addq(r8, Operand(rsp, 3 * kPointerSize));
+  __ addq(r8, Operand(rsp, 1 * kPointerSize));
   __ subq(r8, r9);
   __ Move(r11, factory->the_hole_value());
   __ movq(rdx, rdi);
-  __ SmiToInteger64(kScratchRegister, r9);
-  __ lea(rdi, Operand(rdi, kScratchRegister,
-                      times_pointer_size,
-                      kParameterMapHeaderSize));
+ __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
   // r9 = loop variable (tagged)
   // r8 = mapping index (tagged)
   // r11 = the hole value
@@ -2497,7 +2495,6 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
   __ movq(rdx, Operand(rsp, 2 * kPointerSize));
   // Untag rcx and r8 for the loop below.
   __ SmiToInteger64(rcx, rcx);
-  __ SmiToInteger64(r8, r8);
   __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0));
   __ subq(rdx, kScratchRegister);
   __ jmp(&arguments_test, Label::kNear);
Index: src/x64/full-codegen-x64.cc
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 24df20ba767bd76df3fe06cbae9109d56afe6512..c3dea0ea8df3c8465ba5f9dac8c0e5ba48bb1cdc 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -222,13 +222,19 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
            Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
     __ push(rdx);
     __ Push(Smi::FromInt(num_parameters));
-    // Arguments to ArgumentsAccessStub:
+    // Arguments to ArgumentsAccessStub and/or New...:
     //   function, receiver address, parameter count.
     // The stub will rewrite receiver and parameter count if the previous
     // stack frame was an arguments adapter frame.
-    ArgumentsAccessStub stub(
-        is_classic_mode() ? ArgumentsAccessStub::NEW_NON_STRICT_SLOW
-                          : ArgumentsAccessStub::NEW_STRICT);
+    ArgumentsAccessStub::Type type;
+    if (!is_classic_mode()) {
+      type = ArgumentsAccessStub::NEW_STRICT;
+    } else if (function()->has_duplicate_parameters()) {
+      type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
+    } else {
+      type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
+    }
+    ArgumentsAccessStub stub(type);
     __ CallStub(&stub);

     SetVar(arguments, rax, rbx, rdx);


--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to