Revision: 21499
Author:   [email protected]
Date:     Mon May 26 13:59:24 2014 UTC
Log: Reland "Customized support for feedback on calls to Array." and follow-up fixes.

Comparing one CallIC::State to another was not done correctly, leading to a failure to patch a CallIC when transitioning from monomorphic Array to megamorphic.

BUG=chromium:377198,chromium:377290
LOG=Y
[email protected]

Review URL: https://codereview.chromium.org/305493003
http://code.google.com/p/v8/source/detail?r=21499

Added:
 /branches/bleeding_edge/test/mjsunit/regress/regress-377290.js
Modified:
 /branches/bleeding_edge/src/arm/code-stubs-arm.cc
 /branches/bleeding_edge/src/arm64/code-stubs-arm64.cc
 /branches/bleeding_edge/src/ast.h
 /branches/bleeding_edge/src/code-stubs.h
 /branches/bleeding_edge/src/hydrogen.cc
 /branches/bleeding_edge/src/hydrogen.h
 /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc
 /branches/bleeding_edge/src/ic.cc
 /branches/bleeding_edge/src/ic.h
 /branches/bleeding_edge/src/mips/code-stubs-mips.cc
 /branches/bleeding_edge/src/objects.cc
 /branches/bleeding_edge/src/type-info.cc
 /branches/bleeding_edge/src/type-info.h
 /branches/bleeding_edge/src/typing.cc
 /branches/bleeding_edge/src/x64/code-stubs-x64.cc
 /branches/bleeding_edge/test/mjsunit/array-constructor-feedback.js
 /branches/bleeding_edge/test/mjsunit/array-feedback.js

=======================================
--- /dev/null
+++ /branches/bleeding_edge/test/mjsunit/regress/regress-377290.js Mon May 26 13:59:24 2014 UTC
@@ -0,0 +1,17 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Flags: --expose-gc
+
+Object.prototype.__defineGetter__('constructor', function() { throw 42; });
+__v_7 = [
+  function() { [].push() },
+];
+for (var __v_6 = 0; __v_6 < 5; ++__v_6) {
+  for (var __v_8 in __v_7) {
+    print(__v_8, " -> ", __v_7[__v_8]);
+    gc();
+    try { __v_7[__v_8](); } catch (e) {};
+  }
+}
=======================================
--- /branches/bleeding_edge/src/arm/code-stubs-arm.cc Mon May 26 09:04:00 2014 UTC +++ /branches/bleeding_edge/src/arm/code-stubs-arm.cc Mon May 26 13:59:24 2014 UTC
@@ -2934,11 +2934,13 @@
 }


-void CallFunctionStub::Generate(MacroAssembler* masm) {
+static void CallFunctionNoFeedback(MacroAssembler* masm,
+                                   int argc, bool needs_checks,
+                                   bool call_as_method) {
   // r1 : the function to call
   Label slow, non_function, wrap, cont;

-  if (NeedsChecks()) {
+  if (needs_checks) {
     // Check that the function is really a JavaScript function.
     // r1: pushed function (to be verified)
     __ JumpIfSmi(r1, &non_function);
@@ -2950,18 +2952,17 @@

   // Fast-case: Invoke the function now.
   // r1: pushed function
-  int argc = argc_;
   ParameterCount actual(argc);

-  if (CallAsMethod()) {
-    if (NeedsChecks()) {
+  if (call_as_method) {
+    if (needs_checks) {
       EmitContinueIfStrictOrNative(masm, &cont);
     }

     // Compute the receiver in sloppy mode.
     __ ldr(r3, MemOperand(sp, argc * kPointerSize));

-    if (NeedsChecks()) {
+    if (needs_checks) {
       __ JumpIfSmi(r3, &wrap);
       __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
       __ b(lt, &wrap);
@@ -2974,17 +2975,22 @@

   __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper());

-  if (NeedsChecks()) {
+  if (needs_checks) {
     // Slow-case: Non-function called.
     __ bind(&slow);
     EmitSlowCase(masm, argc, &non_function);
   }

-  if (CallAsMethod()) {
+  if (call_as_method) {
     __ bind(&wrap);
     EmitWrapCase(masm, argc, &cont);
   }
 }
+
+
+void CallFunctionStub::Generate(MacroAssembler* masm) {
+  CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
+}


 void CallConstructStub::Generate(MacroAssembler* masm) {
@@ -3046,7 +3052,7 @@
   __ bind(&do_call);
   // Set expected number of arguments to zero (not changing r0).
   __ mov(r2, Operand::Zero());
-  __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
           RelocInfo::CODE_TARGET);
 }

@@ -3058,6 +3064,51 @@
   __ ldr(vector, FieldMemOperand(vector,
SharedFunctionInfo::kFeedbackVectorOffset));
 }
+
+
+void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
+  // r1 - function
+  // r2 - feedback vector
+  // r3 - slot id
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
+  __ cmp(r1, r4);
+  __ b(ne, miss);
+
+  __ mov(r0, Operand(arg_count()));
+  __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
+  __ ldr(r2, FieldMemOperand(r4, FixedArray::kHeaderSize));
+  // Verify that r2 contains an AllocationSite
+  __ AssertUndefinedOrAllocationSite(r2, r4);
+  ArrayConstructorStub stub(masm->isolate(), arg_count());
+  __ TailCallStub(&stub);
+}
+
+
+void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
+  // r1 - function
+  // r2 - feedback vector
+  // r3 - slot id
+  Label miss;
+
+  if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
+    Generate_MonomorphicArray(masm, &miss);
+  } else {
+    // So far there is only one customer for our custom feedback scheme.
+    UNREACHABLE();
+  }
+
+  __ bind(&miss);
+  GenerateMiss(masm);
+
+ // The slow case, we need this no matter what to complete a call after a miss.
+  CallFunctionNoFeedback(masm,
+                         arg_count(),
+                         true,
+                         CallAsMethod());
+
+  // Unreachable.
+  __ stop("Unexpected code address");
+}


 void CallICStub::Generate(MacroAssembler* masm) {
@@ -3071,6 +3122,11 @@

   EmitLoadTypeFeedbackVector(masm, r2);

+  if (state_.stub_type() != CallIC::DEFAULT) {
+    Generate_CustomFeedbackCall(masm);
+    return;
+  }
+
   // The checks. First, does r1 match the recorded monomorphic target?
   __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
   __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize));
=======================================
--- /branches/bleeding_edge/src/arm64/code-stubs-arm64.cc Mon May 26 09:04:00 2014 UTC +++ /branches/bleeding_edge/src/arm64/code-stubs-arm64.cc Mon May 26 13:59:24 2014 UTC
@@ -3217,10 +3217,10 @@
 }


-void CallFunctionStub::Generate(MacroAssembler* masm) {
-  ASM_LOCATION("CallFunctionStub::Generate");
+static void CallFunctionNoFeedback(MacroAssembler* masm,
+                                   int argc, bool needs_checks,
+                                   bool call_as_method) {
   // x1  function    the function to call
-
   Register function = x1;
   Register type = x4;
   Label slow, non_function, wrap, cont;
@@ -3228,7 +3228,7 @@
// TODO(jbramley): This function has a lot of unnamed registers. Name them,
   // and tidy things up a bit.

-  if (NeedsChecks()) {
+  if (needs_checks) {
     // Check that the function is really a JavaScript function.
     __ JumpIfSmi(function, &non_function);

@@ -3238,18 +3238,17 @@

   // Fast-case: Invoke the function now.
   // x1  function  pushed function
-  int argc = argc_;
   ParameterCount actual(argc);

-  if (CallAsMethod()) {
-    if (NeedsChecks()) {
+  if (call_as_method) {
+    if (needs_checks) {
       EmitContinueIfStrictOrNative(masm, &cont);
     }

     // Compute the receiver in sloppy mode.
     __ Peek(x3, argc * kPointerSize);

-    if (NeedsChecks()) {
+    if (needs_checks) {
       __ JumpIfSmi(x3, &wrap);
__ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt);
     } else {
@@ -3263,18 +3262,23 @@
                     actual,
                     JUMP_FUNCTION,
                     NullCallWrapper());
-
-  if (NeedsChecks()) {
+  if (needs_checks) {
     // Slow-case: Non-function called.
     __ Bind(&slow);
     EmitSlowCase(masm, argc, function, type, &non_function);
   }

-  if (CallAsMethod()) {
+  if (call_as_method) {
     __ Bind(&wrap);
     EmitWrapCase(masm, argc, &cont);
   }
 }
+
+
+void CallFunctionStub::Generate(MacroAssembler* masm) {
+  ASM_LOCATION("CallFunctionStub::Generate");
+  CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
+}


 void CallConstructStub::Generate(MacroAssembler* masm) {
@@ -3354,6 +3358,59 @@
   __ Ldr(vector, FieldMemOperand(vector,
SharedFunctionInfo::kFeedbackVectorOffset));
 }
+
+
+void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
+  // x1 - function
+  // x2 - feedback vector
+  // x3 - slot id
+  Register function = x1;
+  Register feedback_vector = x2;
+  Register index = x3;
+  Register scratch = x4;
+
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch);
+  __ Cmp(function, scratch);
+  __ B(ne, miss);
+
+  Register allocation_site = feedback_vector;
+  __ Mov(x0, Operand(arg_count()));
+
+  __ Add(scratch, feedback_vector,
+         Operand::UntagSmiAndScale(index, kPointerSizeLog2));
+ __ Ldr(allocation_site, FieldMemOperand(scratch, FixedArray::kHeaderSize));
+
+  // Verify that x2 contains an AllocationSite
+  __ AssertUndefinedOrAllocationSite(allocation_site, scratch);
+  ArrayConstructorStub stub(masm->isolate(), arg_count());
+  __ TailCallStub(&stub);
+}
+
+
+void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
+  // x1 - function
+  // x2 - feedback vector
+  // x3 - slot id
+  Label miss;
+
+  if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
+    Generate_MonomorphicArray(masm, &miss);
+  } else {
+    // So far there is only one customer for our custom feedback scheme.
+    UNREACHABLE();
+  }
+
+  __ bind(&miss);
+  GenerateMiss(masm);
+
+ // The slow case, we need this no matter what to complete a call after a miss.
+  CallFunctionNoFeedback(masm,
+                         arg_count(),
+                         true,
+                         CallAsMethod());
+
+  __ Unreachable();
+}


 void CallICStub::Generate(MacroAssembler* masm) {
@@ -3374,6 +3431,11 @@

   EmitLoadTypeFeedbackVector(masm, feedback_vector);

+  if (state_.stub_type() != CallIC::DEFAULT) {
+    Generate_CustomFeedbackCall(masm);
+    return;
+  }
+
   // The checks. First, does x1 match the recorded monomorphic target?
   __ Add(x4, feedback_vector,
          Operand::UntagSmiAndScale(index, kPointerSizeLog2));
=======================================
--- /branches/bleeding_edge/src/ast.h   Mon May 26 09:04:00 2014 UTC
+++ /branches/bleeding_edge/src/ast.h   Mon May 26 13:59:24 2014 UTC
@@ -1761,12 +1761,26 @@
     }
     return !target_.is_null();
   }
+
+  bool global_call() const {
+    VariableProxy* proxy = expression_->AsVariableProxy();
+    return proxy != NULL && proxy->var()->IsUnallocated();
+  }
+
+  bool known_global_function() const {
+    return global_call() && !target_.is_null();
+  }

   Handle<JSFunction> target() { return target_; }

   Handle<Cell> cell() { return cell_; }
+
+  Handle<AllocationSite> allocation_site() { return allocation_site_; }

   void set_target(Handle<JSFunction> target) { target_ = target; }
+  void set_allocation_site(Handle<AllocationSite> site) {
+    allocation_site_ = site;
+  }
bool ComputeGlobalTarget(Handle<GlobalObject> global, LookupResult* lookup);

   BailoutId ReturnId() const { return return_id_; }
@@ -1809,6 +1823,7 @@

   Handle<JSFunction> target_;
   Handle<Cell> cell_;
+  Handle<AllocationSite> allocation_site_;
   int call_feedback_slot_;

   const BailoutId return_id_;
=======================================
--- /branches/bleeding_edge/src/code-stubs.h    Mon May 26 09:04:00 2014 UTC
+++ /branches/bleeding_edge/src/code-stubs.h    Mon May 26 13:59:24 2014 UTC
@@ -824,6 +824,8 @@

   // Code generation helpers.
   void GenerateMiss(MacroAssembler* masm);
+  void Generate_CustomFeedbackCall(MacroAssembler* masm);
+  void Generate_MonomorphicArray(MacroAssembler* masm, Label* miss);

   CallIC::State state_;
 };
=======================================
--- /branches/bleeding_edge/src/hydrogen.cc     Mon May 26 13:10:52 2014 UTC
+++ /branches/bleeding_edge/src/hydrogen.cc     Mon May 26 13:59:24 2014 UTC
@@ -2474,14 +2474,14 @@
   }

   // Special loop unfolding case
-  static const int kLoopUnfoldLimit = 8;
-  STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
+  STATIC_ASSERT(JSArray::kPreallocatedArrayElements <=
+                kElementLoopUnrollThreshold);
   int initial_capacity = -1;
   if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
     int constant_from = from->GetInteger32Constant();
     int constant_to = to->GetInteger32Constant();

-    if (constant_from == 0 && constant_to <= kLoopUnfoldLimit) {
+    if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) {
       initial_capacity = constant_to;
     }
   }
@@ -8232,6 +8232,56 @@
   }
   return graph()->GetConstantUndefined();
 }
+
+
+void HOptimizedGraphBuilder::BuildArrayCall(Expression* expression,
+                                            int arguments_count,
+                                            HValue* function,
+                                            Handle<AllocationSite> site) {
+  Add<HCheckValue>(function, array_function());
+
+  if (IsCallArrayInlineable(arguments_count, site)) {
+    BuildInlinedCallArray(expression, arguments_count, site);
+    return;
+  }
+
+  HInstruction* call = PreProcessCall(New<HCallNewArray>(
+      function, arguments_count + 1, site->GetElementsKind()));
+  if (expression->IsCall()) {
+    Drop(1);
+  }
+  ast_context()->ReturnInstruction(call, expression->id());
+}
+
+
+bool HOptimizedGraphBuilder::TryHandleArrayCall(Call* expr, HValue* function) {
+  if (!array_function().is_identical_to(expr->target())) {
+    return false;
+  }
+
+  Handle<AllocationSite> site = expr->allocation_site();
+  if (site.is_null()) return false;
+
+  BuildArrayCall(expr,
+                 expr->arguments()->length(),
+                 function,
+                 site);
+  return true;
+}
+
+
+bool HOptimizedGraphBuilder::TryHandleArrayCallNew(CallNew* expr,
+                                                   HValue* function) {
+  if (!array_function().is_identical_to(expr->target())) {
+    return false;
+  }
+
+  BuildArrayCall(expr,
+                 expr->arguments()->length(),
+                 function,
+                 expr->allocation_site());
+  return true;
+}


 void HOptimizedGraphBuilder::VisitCall(Call* expr) {
@@ -8328,8 +8378,7 @@
     // evaluation of the arguments.
     CHECK_ALIVE(VisitForValue(expr->expression()));
     HValue* function = Top();
-    bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
-    if (global_call) {
+    if (expr->global_call()) {
       Variable* var = proxy->var();
       bool known_global_function = false;
// If there is a global property cell for the name at compile time and
@@ -8363,6 +8412,7 @@
           return;
         }
         if (TryInlineApiFunctionCall(expr, receiver)) return;
+        if (TryHandleArrayCall(expr, function)) return;
         if (TryInlineCall(expr)) return;

         PushArgumentsFromEnvironment(argument_count);
@@ -8412,20 +8462,21 @@
 }


-void HOptimizedGraphBuilder::BuildInlinedCallNewArray(CallNew* expr) {
+void HOptimizedGraphBuilder::BuildInlinedCallArray(
+    Expression* expression,
+    int argument_count,
+    Handle<AllocationSite> site) {
+  ASSERT(!site.is_null());
+  ASSERT(argument_count >= 0 && argument_count <= 1);
   NoObservableSideEffectsScope no_effects(this);

-  int argument_count = expr->arguments()->length();
   // We should at least have the constructor on the expression stack.
   HValue* constructor = environment()->ExpressionStackAt(argument_count);

-  ElementsKind kind = expr->elements_kind();
-  Handle<AllocationSite> site = expr->allocation_site();
-  ASSERT(!site.is_null());
-
// Register on the site for deoptimization if the transition feedback changes.
   AllocationSite::AddDependentCompilationInfo(
       site, AllocationSite::TRANSITIONS, top_info());
+  ElementsKind kind = site->GetElementsKind();
   HInstruction* site_instruction = Add<HConstant>(site);

// In the single constant argument case, we may have to adjust elements kind
@@ -8448,32 +8499,12 @@
                                site_instruction,
                                constructor,
                                DISABLE_ALLOCATION_SITES);
-  HValue* new_object;
-  if (argument_count == 0) {
-    new_object = array_builder.AllocateEmptyArray();
-  } else if (argument_count == 1) {
-    HValue* argument = environment()->Top();
-    new_object = BuildAllocateArrayFromLength(&array_builder, argument);
-  } else {
-    HValue* length = Add<HConstant>(argument_count);
-    // Smi arrays need to initialize array elements with the hole because
-    // bailout could occur if the arguments don't fit in a smi.
-    //
- // TODO(mvstanton): If all the arguments are constants in smi range, then
-    // we could set fill_with_hole to false and save a few instructions.
-    JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
-        ? JSArrayBuilder::FILL_WITH_HOLE
-        : JSArrayBuilder::DONT_FILL_WITH_HOLE;
-    new_object = array_builder.AllocateArray(length, length, fill_mode);
-    HValue* elements = array_builder.GetElementsLocation();
-    for (int i = 0; i < argument_count; i++) {
- HValue* value = environment()->ExpressionStackAt(argument_count - i - 1);
-      HValue* constant_i = Add<HConstant>(i);
-      Add<HStoreKeyed>(elements, constant_i, value, kind);
-    }
-  }
+  HValue* new_object = argument_count == 0
+      ? array_builder.AllocateEmptyArray()
+      : BuildAllocateArrayFromLength(&array_builder, Top());

-  Drop(argument_count + 1);  // drop constructor and args.
+  int args_to_drop = argument_count + (expression->IsCall() ? 2 : 1);
+  Drop(args_to_drop);
   ast_context()->ReturnValue(new_object);
 }

@@ -8487,14 +8518,13 @@
 }


-bool HOptimizedGraphBuilder::IsCallNewArrayInlineable(CallNew* expr) {
+bool HOptimizedGraphBuilder::IsCallArrayInlineable(
+    int argument_count,
+    Handle<AllocationSite> site) {
   Handle<JSFunction> caller = current_info()->closure();
-  Handle<JSFunction> target(isolate()->native_context()->array_function(),
-                            isolate());
-  int argument_count = expr->arguments()->length();
+  Handle<JSFunction> target = array_function();
// We should have the function plus array arguments on the environment stack.
   ASSERT(environment()->length() >= (argument_count + 1));
-  Handle<AllocationSite> site = expr->allocation_site();
   ASSERT(!site.is_null());

   bool inline_ok = false;
@@ -8504,22 +8534,24 @@
       HValue* argument = Top();
       if (argument->IsConstant()) {
         // Do not inline if the constant length argument is not a smi or
-        // outside the valid range for a fast array.
+        // outside the valid range for unrolled loop initialization.
         HConstant* constant_argument = HConstant::cast(argument);
         if (constant_argument->HasSmiValue()) {
           int value = constant_argument->Integer32Value();
-          inline_ok = value >= 0 &&
-              value < JSObject::kInitialMaxFastElementArray;
+          inline_ok = value >= 0 && value <= kElementLoopUnrollThreshold;
           if (!inline_ok) {
             TraceInline(target, caller,
-                        "Length outside of valid array range");
+ "Constant length outside of valid inlining range.");
           }
         }
       } else {
-        inline_ok = true;
+        TraceInline(target, caller,
+                    "Dont inline [new] Array(n) where n isn't constant.");
       }
-    } else {
+    } else if (argument_count == 0) {
       inline_ok = true;
+    } else {
+      TraceInline(target, caller, "Too many arguments to inline.");
     }
   } else {
     TraceInline(target, caller, "AllocationSite requested no inlining.");
@@ -8644,25 +8676,10 @@
   } else {
// The constructor function is both an operand to the instruction and an
     // argument to the construct call.
-    Handle<JSFunction> array_function(
-        isolate()->native_context()->array_function(), isolate());
- bool use_call_new_array = expr->target().is_identical_to(array_function);
-    if (use_call_new_array && IsCallNewArrayInlineable(expr)) {
- // Verify we are still calling the array function for our native context.
-      Add<HCheckValue>(function, array_function);
-      BuildInlinedCallNewArray(expr);
-      return;
-    }
+    if (TryHandleArrayCallNew(expr, function)) return;

-    HBinaryCall* call;
-    if (use_call_new_array) {
-      Add<HCheckValue>(function, array_function);
-      call = New<HCallNewArray>(function, argument_count,
-                                expr->elements_kind());
-    } else {
-      call = New<HCallNew>(function, argument_count);
-    }
-    PreProcessCall(call);
+    HInstruction* call =
+        PreProcessCall(New<HCallNew>(function, argument_count));
     return ast_context()->ReturnInstruction(call, expr->id());
   }
 }
=======================================
--- /branches/bleeding_edge/src/hydrogen.h      Mon May 26 09:04:00 2014 UTC
+++ /branches/bleeding_edge/src/hydrogen.h      Mon May 26 13:59:24 2014 UTC
@@ -1295,6 +1295,10 @@

void AddSimulate(BailoutId id, RemovableSimulate removable = FIXED_SIMULATE);

+ // When initializing arrays, we'll unfold the loop if the number of elements
+  // is known at compile time and is <= kElementLoopUnrollThreshold.
+  static const int kElementLoopUnrollThreshold = 8;
+
  protected:
   virtual bool BuildGraph() = 0;

@@ -2242,6 +2246,11 @@
   // Try to optimize fun.apply(receiver, arguments) pattern.
   bool TryCallApply(Call* expr);

+  bool TryHandleArrayCall(Call* expr, HValue* function);
+  bool TryHandleArrayCallNew(CallNew* expr, HValue* function);
+ void BuildArrayCall(Expression* expr, int arguments_count, HValue* function,
+                      Handle<AllocationSite> cell);
+
   HValue* ImplicitReceiverFor(HValue* function,
                               Handle<JSFunction> target);

@@ -2325,8 +2334,13 @@
       ElementsKind fixed_elements_kind,
       HValue* byte_length, HValue* length);

-  bool IsCallNewArrayInlineable(CallNew* expr);
-  void BuildInlinedCallNewArray(CallNew* expr);
+  Handle<JSFunction> array_function() {
+    return handle(isolate()->native_context()->array_function());
+  }
+
+ bool IsCallArrayInlineable(int argument_count, Handle<AllocationSite> site);
+  void BuildInlinedCallArray(Expression* expression, int argument_count,
+                             Handle<AllocationSite> site);

   class PropertyAccessInfo {
    public:
=======================================
--- /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Mon May 26 09:04:00 2014 UTC +++ /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Mon May 26 13:59:24 2014 UTC
@@ -2335,11 +2335,13 @@
 }


-void CallFunctionStub::Generate(MacroAssembler* masm) {
+static void CallFunctionNoFeedback(MacroAssembler* masm,
+                                   int argc, bool needs_checks,
+                                   bool call_as_method) {
   // edi : the function to call
   Label slow, non_function, wrap, cont;

-  if (NeedsChecks()) {
+  if (needs_checks) {
     // Check that the function really is a JavaScript function.
     __ JumpIfSmi(edi, &non_function);

@@ -2349,17 +2351,17 @@
   }

   // Fast-case: Just invoke the function.
-  ParameterCount actual(argc_);
+  ParameterCount actual(argc);

-  if (CallAsMethod()) {
-    if (NeedsChecks()) {
+  if (call_as_method) {
+    if (needs_checks) {
       EmitContinueIfStrictOrNative(masm, &cont);
     }

     // Load the receiver from the stack.
-    __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
+    __ mov(eax, Operand(esp, (argc + 1) * kPointerSize));

-    if (NeedsChecks()) {
+    if (call_as_method) {
       __ JumpIfSmi(eax, &wrap);

       __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
@@ -2373,18 +2375,23 @@

   __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());

-  if (NeedsChecks()) {
+  if (needs_checks) {
     // Slow-case: Non-function called.
     __ bind(&slow);
     // (non_function is bound in EmitSlowCase)
-    EmitSlowCase(isolate(), masm, argc_, &non_function);
+    EmitSlowCase(masm->isolate(), masm, argc, &non_function);
   }

-  if (CallAsMethod()) {
+  if (call_as_method) {
     __ bind(&wrap);
-    EmitWrapCase(masm, argc_, &cont);
+    EmitWrapCase(masm, argc, &cont);
   }
 }
+
+
+void CallFunctionStub::Generate(MacroAssembler* masm) {
+  CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
+}


 void CallConstructStub::Generate(MacroAssembler* masm) {
@@ -2461,6 +2468,51 @@
   __ mov(vector, FieldOperand(vector,
                               SharedFunctionInfo::kFeedbackVectorOffset));
 }
+
+
+void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
+  // edi - function
+  // ebx - feedback vector
+  // edx - slot id
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
+  __ cmp(edi, ecx);
+  __ j(not_equal, miss);
+
+  __ mov(eax, arg_count());
+  __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
+                           FixedArray::kHeaderSize));
+  // Verify that ecx contains an AllocationSite
+  __ AssertUndefinedOrAllocationSite(ebx);
+  ArrayConstructorStub stub(masm->isolate(), arg_count());
+  __ TailCallStub(&stub);
+}
+
+
+void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
+  // edi - function
+  // ebx - feedback vector
+  // edx - slot id
+  Label miss;
+
+  if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
+    Generate_MonomorphicArray(masm, &miss);
+  } else {
+    // So far there is only one customer for our custom feedback scheme.
+    UNREACHABLE();
+  }
+
+  __ bind(&miss);
+  GenerateMiss(masm);
+
+ // The slow case, we need this no matter what to complete a call after a miss.
+  CallFunctionNoFeedback(masm,
+                         arg_count(),
+                         true,
+                         CallAsMethod());
+
+  // Unreachable.
+  __ int3();
+}


 void CallICStub::Generate(MacroAssembler* masm) {
@@ -2475,6 +2527,11 @@

   EmitLoadTypeFeedbackVector(masm, ebx);

+  if (state_.stub_type() != CallIC::DEFAULT) {
+    Generate_CustomFeedbackCall(masm);
+    return;
+  }
+
   // The checks. First, does edi match the recorded monomorphic target?
   __ cmp(edi, FieldOperand(ebx, edx, times_half_pointer_size,
                            FixedArray::kHeaderSize));
=======================================
--- /branches/bleeding_edge/src/ic.cc   Mon May 26 09:04:00 2014 UTC
+++ /branches/bleeding_edge/src/ic.cc   Mon May 26 13:59:24 2014 UTC
@@ -501,7 +501,14 @@
                    Code* target,
                    ConstantPoolArray* constant_pool) {
   // Currently, CallIC doesn't have state changes.
-  ASSERT(target->ic_state() == v8::internal::GENERIC);
+  if (target->ic_state() != v8::internal::MONOMORPHIC) return;
+  CallIC::State existing_state(target->extra_ic_state());
+
+  // Monomorphic array stubs don't need to be cleared because
+  // 1) the stub doesn't store information that should be cleared, and
+  // 2) the AllocationSite stored in the type feedback vector is immune
+  //    from gc type feedback clearing.
+  ASSERT(existing_state.stub_type() == MONOMORPHIC_ARRAY);
 }


@@ -1818,16 +1825,48 @@

 CallIC::State::State(ExtraICState extra_ic_state)
     : argc_(ArgcBits::decode(extra_ic_state)),
-      call_type_(CallTypeBits::decode(extra_ic_state)) {
+      call_type_(CallTypeBits::decode(extra_ic_state)),
+      stub_type_(StubTypeBits::decode(extra_ic_state)) {
 }


 ExtraICState CallIC::State::GetExtraICState() const {
   ExtraICState extra_ic_state =
       ArgcBits::encode(argc_) |
-      CallTypeBits::encode(call_type_);
+      CallTypeBits::encode(call_type_) |
+      StubTypeBits::encode(stub_type_);
   return extra_ic_state;
 }
+
+
+bool CallIC::DoCustomHandler(Handle<Object> receiver,
+                             Handle<Object> function,
+                             Handle<FixedArray> vector,
+                             Handle<Smi> slot,
+                             const State& state) {
+  ASSERT(FLAG_use_ic && function->IsJSFunction());
+
+  // Are we the array function?
+  Handle<JSFunction> array_function = Handle<JSFunction>(
+      isolate()->context()->native_context()->array_function(), isolate());
+  if (array_function.is_identical_to(Handle<JSFunction>::cast(function))) {
+    // Alter the slot.
+ Handle<AllocationSite> new_site = isolate()->factory()->NewAllocationSite();
+    vector->set(slot->value(), *new_site);
+    State new_state = state.ToMonomorphicArrayCallState();
+    CallICStub stub(isolate(), new_state);
+    set_target(*stub.GetCode());
+    Handle<String> name;
+    if (array_function->shared()->name()->IsString()) {
+      name = Handle<String>(String::cast(array_function->shared()->name()),
+                            isolate());
+    }
+
+    TRACE_IC("CallIC (Array call)", name);
+    return true;
+  }
+  return false;
+}


 void CallIC::HandleMiss(Handle<Object> receiver,
@@ -1837,18 +1876,35 @@
   State state(target()->extra_ic_state());
   Object* feedback = vector->get(slot->value());

-  if (feedback->IsJSFunction() || !function->IsJSFunction()) {
+  if (feedback->IsJSFunction() || !function->IsJSFunction() ||
+      state.stub_type() != DEFAULT) {
     // We are going generic.
-    ASSERT(!function->IsJSFunction() || *function != feedback);
-
     vector->set(slot->value(),
                 *TypeFeedbackInfo::MegamorphicSentinel(isolate()),
                 SKIP_WRITE_BARRIER);
+
+    State new_state = state.ToGenericState();
+    if (new_state != state) {
+      // Only happens when the array ic goes generic.
+      ASSERT(state.stub_type() == MONOMORPHIC_ARRAY &&
+             FLAG_use_ic);
+      CallICStub stub(isolate(), new_state);
+      Handle<Code> code = stub.GetCode();
+      set_target(*code);
+    }
+
     TRACE_GENERIC_IC(isolate(), "CallIC", "megamorphic");
   } else {
     // If we came here feedback must be the uninitialized sentinel,
     // and we are going monomorphic.
ASSERT(feedback == *TypeFeedbackInfo::UninitializedSentinel(isolate()));
+
+    // Do we want to install a custom handler?
+    if (FLAG_use_ic &&
+        DoCustomHandler(receiver, function, vector, slot, state)) {
+      return;
+    }
+
     Handle<JSFunction> js_function = Handle<JSFunction>::cast(function);
     Handle<Object> name(js_function->shared()->name(), isolate());
     TRACE_IC("CallIC", name);
=======================================
--- /branches/bleeding_edge/src/ic.h    Mon May 26 09:04:00 2014 UTC
+++ /branches/bleeding_edge/src/ic.h    Mon May 26 13:59:24 2014 UTC
@@ -333,20 +333,34 @@
 class CallIC: public IC {
  public:
   enum CallType { METHOD, FUNCTION };
+  enum StubType { DEFAULT, MONOMORPHIC_ARRAY };

   class State V8_FINAL BASE_EMBEDDED {
    public:
     explicit State(ExtraICState extra_ic_state);

+    static State MonomorphicArrayCallState(int argc, CallType call_type) {
+      return State(argc, call_type, MONOMORPHIC_ARRAY);
+    }
+
     static State DefaultCallState(int argc, CallType call_type) {
-      return State(argc, call_type);
+      return State(argc, call_type, DEFAULT);
     }

-    static State MegamorphicCallState(int argc, CallType call_type) {
-      return State(argc, call_type);
+    // Transition from the current state to another.
+    State ToGenericState() const {
+      return DefaultCallState(arg_count(), call_type());
     }

-    InlineCacheState GetICState() const { return ::v8::internal::GENERIC; }
+    State ToMonomorphicArrayCallState() const {
+      return MonomorphicArrayCallState(arg_count(), call_type());
+    }
+
+    InlineCacheState GetICState() const {
+      return stub_type_ == CallIC::DEFAULT
+          ? ::v8::internal::GENERIC
+          : ::v8::internal::MONOMORPHIC;
+    }

     ExtraICState GetExtraICState() const;

@@ -355,6 +369,7 @@

     int arg_count() const { return argc_; }
     CallType call_type() const { return call_type_; }
+    StubType stub_type() const { return stub_type_; }

     bool CallAsMethod() const { return call_type_ == METHOD; }

@@ -362,7 +377,8 @@

     bool operator==(const State& other_state) const {
       return (argc_ == other_state.argc_ &&
-              call_type_ == other_state.call_type_);
+              call_type_ == other_state.call_type_ &&
+              stub_type_ == other_state.stub_type_);
     }

     bool operator!=(const State& other_state) const {
@@ -370,17 +386,20 @@
     }

    private:
-    State(int argc,
-          CallType call_type)
+    State(int argc, CallType call_type, StubType stub_type)
         : argc_(argc),
-        call_type_(call_type) {
+        call_type_(call_type),
+        stub_type_(stub_type) {
     }

     class ArgcBits: public BitField<int, 0, Code::kArgumentsBits> {};
class CallTypeBits: public BitField<CallType, Code::kArgumentsBits, 1> {};
+    class StubTypeBits:
+ public BitField<StubType, Code::kArgumentsBits + 1, 1> {}; // NOLINT

     const int argc_;
     const CallType call_type_;
+    const StubType stub_type_;
   };

   explicit CallIC(Isolate* isolate)
@@ -392,6 +411,13 @@
                   Handle<FixedArray> vector,
                   Handle<Smi> slot);

+  // Returns true if a custom handler was installed.
+  bool DoCustomHandler(Handle<Object> receiver,
+                       Handle<Object> function,
+                       Handle<FixedArray> vector,
+                       Handle<Smi> slot,
+                       const State& new_state);
+
   // Code generator routines.
   static Handle<Code> initialize_stub(Isolate* isolate,
                                       int argc,
=======================================
--- /branches/bleeding_edge/src/mips/code-stubs-mips.cc Mon May 26 09:04:00 2014 UTC +++ /branches/bleeding_edge/src/mips/code-stubs-mips.cc Mon May 26 13:59:24 2014 UTC
@@ -3094,11 +3094,13 @@
 }


-void CallFunctionStub::Generate(MacroAssembler* masm) {
+static void CallFunctionNoFeedback(MacroAssembler* masm,
+                                   int argc, bool needs_checks,
+                                   bool call_as_method) {
   // a1 : the function to call
   Label slow, non_function, wrap, cont;

-  if (NeedsChecks()) {
+  if (needs_checks) {
     // Check that the function is really a JavaScript function.
     // a1: pushed function (to be verified)
     __ JumpIfSmi(a1, &non_function);
@@ -3110,18 +3112,17 @@

   // Fast-case: Invoke the function now.
   // a1: pushed function
-  int argc = argc_;
   ParameterCount actual(argc);

-  if (CallAsMethod()) {
-    if (NeedsChecks()) {
+  if (call_as_method) {
+    if (needs_checks) {
       EmitContinueIfStrictOrNative(masm, &cont);
     }

     // Compute the receiver in sloppy mode.
     __ lw(a3, MemOperand(sp, argc * kPointerSize));

-    if (NeedsChecks()) {
+    if (needs_checks) {
       __ JumpIfSmi(a3, &wrap);
       __ GetObjectType(a3, t0, t0);
       __ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE));
@@ -3134,18 +3135,23 @@

   __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper());

-  if (NeedsChecks()) {
+  if (needs_checks) {
     // Slow-case: Non-function called.
     __ bind(&slow);
     EmitSlowCase(masm, argc, &non_function);
   }

-  if (CallAsMethod()) {
+  if (call_as_method) {
     __ bind(&wrap);
     // Wrap the receiver and patch it back onto the stack.
     EmitWrapCase(masm, argc, &cont);
   }
 }
+
+
+void CallFunctionStub::Generate(MacroAssembler* masm) {
+  CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
+}


 void CallConstructStub::Generate(MacroAssembler* masm) {
@@ -3207,8 +3213,8 @@
   __ bind(&do_call);
   // Set expected number of arguments to zero (not changing r0).
   __ li(a2, Operand(0, RelocInfo::NONE32));
-  __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
-          RelocInfo::CODE_TARGET);
+  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+           RelocInfo::CODE_TARGET);
 }


@@ -3219,6 +3225,51 @@
   __ lw(vector, FieldMemOperand(vector,
SharedFunctionInfo::kFeedbackVectorOffset));
 }
+
+
+void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
+  // a1 - function
+  // a2 - feedback vector
+  // a3 - slot id
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, at);
+  __ Branch(miss, ne, a1, Operand(at));
+
+  __ li(a0, Operand(arg_count()));
+  __ sll(at, a3, kPointerSizeLog2 - kSmiTagSize);
+  __ Addu(at, a2, Operand(at));
+  __ lw(a2, FieldMemOperand(at, FixedArray::kHeaderSize));
+  // Verify that a2 contains an AllocationSite
+  __ AssertUndefinedOrAllocationSite(a2, at);
+  ArrayConstructorStub stub(masm->isolate(), arg_count());
+  __ TailCallStub(&stub);
+}
+
+
+void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
+  // a1 - function
+  // a2 - feedback vector
+  // a3 - slot id
+  Label miss;
+
+  if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
+    Generate_MonomorphicArray(masm, &miss);
+  } else {
+    // So far there is only one customer for our custom feedback scheme.
+    UNREACHABLE();
+  }
+
+  __ bind(&miss);
+  GenerateMiss(masm);
+
+ // The slow case, we need this no matter what to complete a call after a miss.
+  CallFunctionNoFeedback(masm,
+                         arg_count(),
+                         true,
+                         CallAsMethod());
+
+  // Unreachable.
+  __ stop("Unexpected code address");
+}


 void CallICStub::Generate(MacroAssembler* masm) {
@@ -3232,6 +3283,11 @@

   EmitLoadTypeFeedbackVector(masm, a2);

+  if (state_.stub_type() != CallIC::DEFAULT) {
+    Generate_CustomFeedbackCall(masm);
+    return;
+  }
+
   // The checks. First, does r1 match the recorded monomorphic target?
   __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
   __ Addu(t0, a2, Operand(t0));
=======================================
--- /branches/bleeding_edge/src/objects.cc      Mon May 26 09:04:00 2014 UTC
+++ /branches/bleeding_edge/src/objects.cc      Mon May 26 13:59:24 2014 UTC
@@ -11205,13 +11205,30 @@
 void SharedFunctionInfo::ClearTypeFeedbackInfo() {
   FixedArray* vector = feedback_vector();
   Heap* heap = GetHeap();
-  for (int i = 0; i < vector->length(); i++) {
+  int length = vector->length();
+
+  for (int i = 0; i < length; i++) {
     Object* obj = vector->get(i);
-    if (!obj->IsAllocationSite()) {
-      vector->set(
-          i,
-          TypeFeedbackInfo::RawUninitializedSentinel(heap),
-          SKIP_WRITE_BARRIER);
+    if (obj->IsHeapObject()) {
+      InstanceType instance_type =
+          HeapObject::cast(obj)->map()->instance_type();
+      switch (instance_type) {
+        case ALLOCATION_SITE_TYPE:
+          // AllocationSites are not cleared because they do not store
+          // information that leaks.
+          break;
+        case JS_FUNCTION_TYPE:
+          // No need to clear the native context array function.
+          if (obj == JSFunction::cast(obj)->context()->native_context()->
+              get(Context::ARRAY_FUNCTION_INDEX)) {
+            break;
+          }
+          // Fall through...
+
+        default:
+          vector->set(i, TypeFeedbackInfo::RawUninitializedSentinel(heap),
+                      SKIP_WRITE_BARRIER);
+      }
     }
   }
 }
=======================================
--- /branches/bleeding_edge/src/type-info.cc    Mon May 26 09:04:00 2014 UTC
+++ /branches/bleeding_edge/src/type-info.cc    Mon May 26 13:59:24 2014 UTC
@@ -97,9 +97,7 @@

 bool TypeFeedbackOracle::CallIsMonomorphic(int slot) {
   Handle<Object> value = GetInfo(slot);
-  return FLAG_pretenuring_call_new
-      ? value->IsJSFunction()
-      : value->IsAllocationSite() || value->IsJSFunction();
+  return value->IsAllocationSite() || value->IsJSFunction();
 }


@@ -134,7 +132,10 @@

 Handle<JSFunction> TypeFeedbackOracle::GetCallTarget(int slot) {
   Handle<Object> info = GetInfo(slot);
-  if (FLAG_pretenuring_call_new || info->IsJSFunction()) {
+  if (info->IsAllocationSite()) {
+    ASSERT(!FLAG_pretenuring_call_new);
+ return Handle<JSFunction>(isolate()->native_context()->array_function());
+  } else {
     return Handle<JSFunction>::cast(info);
   }

@@ -152,6 +153,15 @@
   ASSERT(info->IsAllocationSite());
   return Handle<JSFunction>(isolate()->native_context()->array_function());
 }
+
+
+Handle<AllocationSite> TypeFeedbackOracle::GetCallAllocationSite(int slot) {
+  Handle<Object> info = GetInfo(slot);
+  if (info->IsAllocationSite()) {
+    return Handle<AllocationSite>::cast(info);
+  }
+  return Handle<AllocationSite>::null();
+}


Handle<AllocationSite> TypeFeedbackOracle::GetCallNewAllocationSite(int slot) {
=======================================
--- /branches/bleeding_edge/src/type-info.h     Mon May 26 09:04:00 2014 UTC
+++ /branches/bleeding_edge/src/type-info.h     Mon May 26 13:59:24 2014 UTC
@@ -65,6 +65,7 @@
                                     Context* native_context);

   Handle<JSFunction> GetCallTarget(int slot);
+  Handle<AllocationSite> GetCallAllocationSite(int slot);
   Handle<JSFunction> GetCallNewTarget(int slot);
   Handle<AllocationSite> GetCallNewAllocationSite(int slot);

=======================================
--- /branches/bleeding_edge/src/typing.cc       Mon May 26 09:04:00 2014 UTC
+++ /branches/bleeding_edge/src/typing.cc       Mon May 26 13:59:24 2014 UTC
@@ -511,6 +511,9 @@
       expr->IsUsingCallFeedbackSlot(isolate()) &&
       oracle()->CallIsMonomorphic(expr->CallFeedbackSlot())) {
     expr->set_target(oracle()->GetCallTarget(expr->CallFeedbackSlot()));
+    Handle<AllocationSite> site =
+        oracle()->GetCallAllocationSite(expr->CallFeedbackSlot());
+    expr->set_allocation_site(site);
   }

   ZoneList<Expression*>* args = expr->arguments();
=======================================
--- /branches/bleeding_edge/src/x64/code-stubs-x64.cc Mon May 26 09:04:00 2014 UTC +++ /branches/bleeding_edge/src/x64/code-stubs-x64.cc Mon May 26 13:59:24 2014 UTC
@@ -2228,16 +2228,17 @@
 }


-void CallFunctionStub::Generate(MacroAssembler* masm) {
+static void CallFunctionNoFeedback(MacroAssembler* masm,
+                                   int argc, bool needs_checks,
+                                   bool call_as_method) {
   // rdi : the function to call

// wrap_and_call can only be true if we are compiling a monomorphic method.
   Isolate* isolate = masm->isolate();
   Label slow, non_function, wrap, cont;
-  int argc = argc_;
   StackArgumentsAccessor args(rsp, argc);

-  if (NeedsChecks()) {
+  if (needs_checks) {
     // Check that the function really is a JavaScript function.
     __ JumpIfSmi(rdi, &non_function);

@@ -2249,15 +2250,15 @@
   // Fast-case: Just invoke the function.
   ParameterCount actual(argc);

-  if (CallAsMethod()) {
-    if (NeedsChecks()) {
+  if (call_as_method) {
+    if (needs_checks) {
       EmitContinueIfStrictOrNative(masm, &cont);
     }

     // Load the receiver from the stack.
     __ movp(rax, args.GetReceiverOperand());

-    if (NeedsChecks()) {
+    if (needs_checks) {
       __ JumpIfSmi(rax, &wrap);

       __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
@@ -2271,17 +2272,22 @@

   __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());

-  if (NeedsChecks()) {
+  if (needs_checks) {
     // Slow-case: Non-function called.
     __ bind(&slow);
     EmitSlowCase(isolate, masm, &args, argc, &non_function);
   }

-  if (CallAsMethod()) {
+  if (call_as_method) {
     __ bind(&wrap);
     EmitWrapCase(masm, &args, &cont);
   }
 }
+
+
+void CallFunctionStub::Generate(MacroAssembler* masm) {
+  CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
+}


 void CallConstructStub::Generate(MacroAssembler* masm) {
@@ -2356,6 +2362,54 @@
   __ movp(vector, FieldOperand(vector,
                                SharedFunctionInfo::kFeedbackVectorOffset));
 }
+
+
+void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
+  // rdi - function
+  // rbx - feedback vector
+  // rdx - slot id (as integer)
+  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
+  __ cmpq(rdi, rcx);
+  __ j(not_equal, miss);
+
+  __ movq(rax, Immediate(arg_count()));
+  __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
+                            FixedArray::kHeaderSize));
+
+  // Verify that ecx contains an AllocationSite
+  __ AssertUndefinedOrAllocationSite(rbx);
+  ArrayConstructorStub stub(masm->isolate(), arg_count());
+  __ TailCallStub(&stub);
+}
+
+
+void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
+  // rdi - function
+  // rbx - feedback vector
+  // rdx - slot id
+  Label miss;
+
+  __ SmiToInteger32(rdx, rdx);
+
+  if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
+    Generate_MonomorphicArray(masm, &miss);
+  } else {
+    // So far there is only one customer for our custom feedback scheme.
+    UNREACHABLE();
+  }
+
+  __ bind(&miss);
+  GenerateMiss(masm);
+
+ // The slow case, we need this no matter what to complete a call after a miss.
+  CallFunctionNoFeedback(masm,
+                         arg_count(),
+                         true,
+                         CallAsMethod());
+
+  // Unreachable.
+  __ int3();
+}


 void CallICStub::Generate(MacroAssembler* masm) {
@@ -2372,6 +2426,11 @@

   EmitLoadTypeFeedbackVector(masm, rbx);

+  if (state_.stub_type() != CallIC::DEFAULT) {
+    Generate_CustomFeedbackCall(masm);
+    return;
+  }
+
   // The checks. First, does rdi match the recorded monomorphic target?
   __ SmiToInteger32(rdx, rdx);
   __ cmpq(rdi, FieldOperand(rbx, rdx, times_pointer_size,
=======================================
--- /branches/bleeding_edge/test/mjsunit/array-constructor-feedback.js Mon May 26 09:04:00 2014 UTC +++ /branches/bleeding_edge/test/mjsunit/array-constructor-feedback.js Mon May 26 13:59:24 2014 UTC
@@ -150,18 +150,11 @@
     a = bar(10);
     assertKind(elements_kind.fast, a);
     assertOptimized(bar);
-    // bar should deopt because the length is too large.
     a = bar(100000);
-    assertUnoptimized(bar);
-    assertKind(elements_kind.dictionary, a);
- // The allocation site now has feedback that means the array constructor
-    // will not be inlined.
-    %OptimizeFunctionOnNextCall(bar);
-    a = bar(100000);
     assertKind(elements_kind.dictionary, a);
     assertOptimized(bar);

-    // If the argument isn't a smi, it bails out as well
+    // If the argument isn't a smi, things should still work.
     a = bar("oops");
     assertOptimized(bar);
     assertKind(elements_kind.fast, a);
@@ -176,12 +169,6 @@
     barn(1, 2, 3);
     assertOptimized(barn);
     a = barn(1, "oops", 3);
- // The method should deopt, but learn from the failure to avoid inlining
-    // the array.
-    assertKind(elements_kind.fast, a);
-    assertUnoptimized(barn);
-    %OptimizeFunctionOnNextCall(barn);
-    a = barn(1, "oops", 3);
     assertOptimized(barn);
   })();

@@ -228,10 +215,8 @@
     assertTrue(Realm.eval(contextB, "bar2() instanceof Array"));
   })();

-  // Test: create array with packed feedback, then optimize/inline
-  // function. Verify that if we ask for a holey array then we deopt.
-  // Reoptimization will proceed with the correct feedback and we
-  // won't deopt anymore.
+  // Test: create array with packed feedback, then optimize function, which
+  // should deal with arguments that create holey arrays.
   (function() {
     function bar(len) { return new Array(len); }
     bar(0);
@@ -241,15 +226,16 @@
     assertOptimized(bar);
     assertFalse(isHoley(a));
     a = bar(1);  // ouch!
-    assertUnoptimized(bar);
+    assertOptimized(bar);
     assertTrue(isHoley(a));
-    // Try again
-    %OptimizeFunctionOnNextCall(bar);
     a = bar(100);
-    assertOptimized(bar);
     assertTrue(isHoley(a));
     a = bar(0);
     assertOptimized(bar);
-    assertTrue(isHoley(a));
+    // Crankshafted functions don't use mementos, so feedback still
+ // indicates a packed array is desired. (unless --nocrankshaft is in use).
+    if (4 != %GetOptimizationStatus(bar)) {
+      assertFalse(isHoley(a));
+    }
   })();
 }
=======================================
--- /branches/bleeding_edge/test/mjsunit/array-feedback.js Mon May 26 09:04:00 2014 UTC +++ /branches/bleeding_edge/test/mjsunit/array-feedback.js Mon May 26 13:59:24 2014 UTC
@@ -85,69 +85,86 @@
   // Verify that basic elements kind feedback works for non-constructor
   // array calls (as long as the call is made through an IC, and not
   // a CallStub).
-  // (function (){
-  //   function create0() {
-  //     return Array();
-  //   }
+  (function (){
+    function create0() {
+      return Array();
+    }

-  //   // Calls through ICs need warm up through uninitialized, then
-  //   // premonomorphic first.
-  //   create0();
-  //   create0();
-  //   a = create0();
-  //   assertKind(elements_kind.fast_smi_only, a);
-  //   a[0] = 3.5;
-  //   b = create0();
-  //   assertKind(elements_kind.fast_double, b);
+    // Calls through ICs need warm up through uninitialized, then
+    // premonomorphic first.
+    create0();
+    a = create0();
+    assertKind(elements_kind.fast_smi_only, a);
+    a[0] = 3.5;
+    b = create0();
+    assertKind(elements_kind.fast_double, b);

-  //   function create1(arg) {
-  //     return Array(arg);
-  //   }
+    function create1(arg) {
+      return Array(arg);
+    }

-  //   create1(0);
-  //   create1(0);
-  //   a = create1(0);
-  //   assertFalse(isHoley(a));
-  //   assertKind(elements_kind.fast_smi_only, a);
-  //   a[0] = "hello";
-  //   b = create1(10);
-  //   assertTrue(isHoley(b));
-  //   assertKind(elements_kind.fast, b);
+    create1(0);
+    create1(0);
+    a = create1(0);
+    assertFalse(isHoley(a));
+    assertKind(elements_kind.fast_smi_only, a);
+    a[0] = "hello";
+    b = create1(10);
+    assertTrue(isHoley(b));
+    assertKind(elements_kind.fast, b);

-  //   a = create1(100000);
-  //   assertKind(elements_kind.dictionary, a);
+    a = create1(100000);
+    assertKind(elements_kind.dictionary, a);

-  //   function create3(arg1, arg2, arg3) {
-  //     return Array(arg1, arg2, arg3);
-  //   }
+    function create3(arg1, arg2, arg3) {
+      return Array(arg1, arg2, arg3);
+    }

-  //   create3();
-  //   create3();
-  //   a = create3(1,2,3);
-  //   a[0] = 3.5;
-  //   b = create3(1,2,3);
-  //   assertKind(elements_kind.fast_double, b);
-  //   assertFalse(isHoley(b));
-  // })();
+    create3(1,2,3);
+    create3(1,2,3);
+    a = create3(1,2,3);
+    a[0] = 3.035;
+    assertKind(elements_kind.fast_double, a);
+    b = create3(1,2,3);
+    assertKind(elements_kind.fast_double, b);
+    assertFalse(isHoley(b));
+  })();


   // Verify that keyed calls work
-  // (function (){
-  //   function create0(name) {
-  //     return this[name]();
-  //   }
+  (function (){
+    function create0(name) {
+      return this[name]();
+    }

-  //   name = "Array";
-  //   create0(name);
-  //   create0(name);
-  //   a = create0(name);
-  //   a[0] = 3.5;
-  //   b = create0(name);
-  //   assertKind(elements_kind.fast_double, b);
-  // })();
+    name = "Array";
+    create0(name);
+    create0(name);
+    a = create0(name);
+    a[0] = 3.5;
+    b = create0(name);
+    assertKind(elements_kind.fast_double, b);
+  })();
+
+
+  // Verify that feedback is turned off if the call site goes megamorphic.
+  (function (){
+    function foo(arg) { return arg(); }
+    foo(Array);
+    foo(function() {});
+    foo(Array);
+
+    gc();
+
+    a = foo(Array);
+    a[0] = 3.5;
+    b = foo(Array);
+    // b doesn't benefit from elements kind feedback at a megamorphic site.
+    assertKind(elements_kind.fast_smi_only, b);
+  })();


-  // Verify that the IC can't be spoofed by patching
+  // Verify that crankshaft consumes type feedback.
   (function (){
     function create0() {
       return Array();
@@ -156,41 +173,40 @@
     create0();
     create0();
     a = create0();
-    assertKind(elements_kind.fast_smi_only, a);
-    var oldArray = this.Array;
-    this.Array = function() { return ["hi"]; };
+    a[0] = 3.5;
+    %OptimizeFunctionOnNextCall(create0);
+    create0();
+    create0();
     b = create0();
-    assertEquals(["hi"], b);
-    this.Array = oldArray;
-  })();
+    assertKind(elements_kind.fast_double, b);
+    assertOptimized(create0);
+
+    function create1(arg) {
+      return Array(arg);
+    }

-  // Verify that calls are still made through an IC after crankshaft,
-  // though the type information is reset.
-  // TODO(mvstanton): instead, consume the type feedback gathered up
-  // until crankshaft time.
-  // (function (){
-  //   function create0() {
-  //     return Array();
-  //   }
+    create1(8);
+    create1(8);
+    a = create1(8);
+    a[0] = 3.5;
+    %OptimizeFunctionOnNextCall(create1);
+    b = create1(8);
+    assertKind(elements_kind.fast_double, b);
+    assertOptimized(create1);

-  //   create0();
-  //   create0();
-  //   a = create0();
-  //   a[0] = 3.5;
-  //   %OptimizeFunctionOnNextCall(create0);
-  //   create0();
-  //   // This test only makes sense if crankshaft is allowed
-  //   if (4 != %GetOptimizationStatus(create0)) {
-  //     create0();
-  //     b = create0();
-  //     assertKind(elements_kind.fast_smi_only, b);
-  //     b[0] = 3.5;
-  //     c = create0();
-  //     assertKind(elements_kind.fast_double, c);
-  //     assertOptimized(create0);
-  //   }
-  // })();
+    function createN(arg1, arg2, arg3) {
+      return Array(arg1, arg2, arg3);
+    }

+    createN(1, 2, 3);
+    createN(1, 2, 3);
+    a = createN(1, 2, 3);
+    a[0] = 3.5;
+    %OptimizeFunctionOnNextCall(createN);
+    b = createN(1, 2, 3);
+    assertKind(elements_kind.fast_double, b);
+    assertOptimized(createN);
+  })();

   // Verify that cross context calls work
   (function (){

--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/d/optout.

Reply via email to