Revision: 21230
Author:   [email protected]
Date:     Fri May  9 15:55:45 2014 UTC
Log:      Reland r20974: Unify and simplify the FastCloneShallowArrayStub

- Don't bake in length/capacity into full codegen calls of stubs,
allowing boilerplates to increase their capacity without regenerating
code.
- Unify all variants of the clone stub into a single,
length-independent version.
- Various tweaks to make sure that the clone stub doesn't spill and
therefore need an eager stack frame.
- Handle all lengths of array literals in the fast case.

[email protected]

Review URL: https://codereview.chromium.org/272513004
http://code.google.com/p/v8/source/detail?r=21230

Modified:
 /branches/bleeding_edge/src/arm/code-stubs-arm.cc
 /branches/bleeding_edge/src/arm/full-codegen-arm.cc
 /branches/bleeding_edge/src/arm64/code-stubs-arm64.cc
 /branches/bleeding_edge/src/arm64/full-codegen-arm64.cc
 /branches/bleeding_edge/src/code-stubs-hydrogen.cc
 /branches/bleeding_edge/src/code-stubs.cc
 /branches/bleeding_edge/src/code-stubs.h
 /branches/bleeding_edge/src/compiler.h
 /branches/bleeding_edge/src/counters.h
 /branches/bleeding_edge/src/hydrogen-gvn.cc
 /branches/bleeding_edge/src/hydrogen-instructions.h
 /branches/bleeding_edge/src/hydrogen.cc
 /branches/bleeding_edge/src/hydrogen.h
 /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc
 /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc
 /branches/bleeding_edge/src/lithium.cc
 /branches/bleeding_edge/src/mips/full-codegen-mips.cc
 /branches/bleeding_edge/src/x64/code-stubs-x64.cc
 /branches/bleeding_edge/src/x64/full-codegen-x64.cc

=======================================
--- /branches/bleeding_edge/src/arm/code-stubs-arm.cc Fri May 9 13:01:50 2014 UTC +++ /branches/bleeding_edge/src/arm/code-stubs-arm.cc Fri May 9 15:55:45 2014 UTC
@@ -58,6 +58,11 @@
   static Register registers[] = { r3, r2, r1 };
   descriptor->register_param_count_ = 3;
   descriptor->register_params_ = registers;
+  static Representation representations[] = {
+    Representation::Tagged(),
+    Representation::Smi(),
+    Representation::Tagged() };
+  descriptor->register_param_representations_ = representations;
   descriptor->deoptimization_handler_ =
       Runtime::FunctionForId(
           Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
@@ -201,6 +206,11 @@
     descriptor->stack_parameter_count_ = r0;
     descriptor->register_param_count_ = 3;
     descriptor->register_params_ = registers_variable_args;
+    static Representation representations[] = {
+        Representation::Tagged(),
+        Representation::Tagged(),
+        Representation::Integer32() };
+    descriptor->register_param_representations_ = representations;
   }

   descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
@@ -228,6 +238,10 @@
     descriptor->stack_parameter_count_ = r0;
     descriptor->register_param_count_ = 2;
     descriptor->register_params_ = registers_variable_args;
+    static Representation representations[] = {
+        Representation::Tagged(),
+        Representation::Integer32() };
+    descriptor->register_param_representations_ = representations;
   }

   descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
=======================================
--- /branches/bleeding_edge/src/arm/full-codegen-arm.cc Fri May 9 14:28:59 2014 UTC +++ /branches/bleeding_edge/src/arm/full-codegen-arm.cc Fri May 9 15:55:45 2014 UTC
@@ -1792,33 +1792,12 @@
   __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
   __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
   __ mov(r1, Operand(constant_elements));
-  if (has_fast_elements && constant_elements_values->map() ==
-      isolate()->heap()->fixed_cow_array_map()) {
-    FastCloneShallowArrayStub stub(
-        isolate(),
-        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
-        allocation_site_mode,
-        length);
-    __ CallStub(&stub);
-    __ IncrementCounter(
-        isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
-  } else if (expr->depth() > 1 || Serializer::enabled(isolate()) ||
-             length > FastCloneShallowArrayStub::kMaximumClonedLength) {
+  if (expr->depth() > 1) {
     __ mov(r0, Operand(Smi::FromInt(flags)));
     __ Push(r3, r2, r1, r0);
     __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
   } else {
-    ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
-           FLAG_smi_only_arrays);
-    FastCloneShallowArrayStub::Mode mode =
-        FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
-
-    if (has_fast_elements) {
-      mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
-    }
-
-    FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode,
-                                   length);
+    FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
     __ CallStub(&stub);
   }

=======================================
--- /branches/bleeding_edge/src/arm64/code-stubs-arm64.cc Fri May 9 13:01:50 2014 UTC +++ /branches/bleeding_edge/src/arm64/code-stubs-arm64.cc Fri May 9 15:55:45 2014 UTC
@@ -65,6 +65,11 @@
   static Register registers[] = { x3, x2, x1 };
descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]);
   descriptor->register_params_ = registers;
+  static Representation representations[] = {
+    Representation::Tagged(),
+    Representation::Smi(),
+    Representation::Tagged() };
+  descriptor->register_param_representations_ = representations;
   descriptor->deoptimization_handler_ =
       Runtime::FunctionForId(
           Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
@@ -230,6 +235,11 @@
     descriptor->register_param_count_ =
sizeof(registers_variable_args) / sizeof(registers_variable_args[0]);
     descriptor->register_params_ = registers_variable_args;
+    static Representation representations[] = {
+        Representation::Tagged(),
+        Representation::Tagged(),
+        Representation::Integer32() };
+    descriptor->register_param_representations_ = representations;
   }

   descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
@@ -276,6 +286,10 @@
     descriptor->register_param_count_ =
sizeof(registers_variable_args) / sizeof(registers_variable_args[0]);
     descriptor->register_params_ = registers_variable_args;
+    static Representation representations[] = {
+        Representation::Tagged(),
+        Representation::Integer32() };
+    descriptor->register_param_representations_ = representations;
   }

   descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
=======================================
--- /branches/bleeding_edge/src/arm64/full-codegen-arm64.cc Tue May 6 09:28:08 2014 UTC +++ /branches/bleeding_edge/src/arm64/full-codegen-arm64.cc Fri May 9 15:55:45 2014 UTC
@@ -1795,35 +1795,12 @@
   __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
   __ Mov(x2, Smi::FromInt(expr->literal_index()));
   __ Mov(x1, Operand(constant_elements));
-  if (has_fast_elements && constant_elements_values->map() ==
-      isolate()->heap()->fixed_cow_array_map()) {
-    FastCloneShallowArrayStub stub(
-        isolate(),
-        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
-        allocation_site_mode,
-        length);
-    __ CallStub(&stub);
-    __ IncrementCounter(
-        isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11);
-  } else if ((expr->depth() > 1) || Serializer::enabled(isolate()) ||
-             length > FastCloneShallowArrayStub::kMaximumClonedLength) {
+  if (expr->depth() > 1) {
     __ Mov(x0, Smi::FromInt(flags));
     __ Push(x3, x2, x1, x0);
     __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
   } else {
-    ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
-           FLAG_smi_only_arrays);
-    FastCloneShallowArrayStub::Mode mode =
-        FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
-
-    if (has_fast_elements) {
-      mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
-    }
-
-    FastCloneShallowArrayStub stub(isolate(),
-                                   mode,
-                                   allocation_site_mode,
-                                   length);
+    FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
     __ CallStub(&stub);
   }

=======================================
--- /branches/bleeding_edge/src/code-stubs-hydrogen.cc Fri May 9 08:28:25 2014 UTC +++ /branches/bleeding_edge/src/code-stubs-hydrogen.cc Fri May 9 15:55:45 2014 UTC
@@ -127,9 +127,9 @@
bool runtime_stack_params = descriptor_->stack_parameter_count_.is_valid();
   HInstruction* stack_parameter_count = NULL;
   for (int i = 0; i < param_count; ++i) {
-    Representation r = descriptor_->IsParameterCountRegister(i)
-        ? Representation::Integer32()
-        : Representation::Tagged();
+    Representation r = descriptor_->register_param_representations_ == NULL
+        ? Representation::Tagged()
+        : descriptor_->register_param_representations_[i];
HParameter* param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
     start_environment->Bind(i, param);
     parameters_[i] = param;
@@ -330,8 +330,10 @@
   Factory* factory = isolate()->factory();
   HValue* undefined = graph()->GetConstantUndefined();
AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
-  FastCloneShallowArrayStub::Mode mode = casted_stub()->mode();
-  int length = casted_stub()->length();
+
+ // This stub is very performance sensitive, the generated code must be tuned
+  // so that it doesn't build and eager frame.
+  info()->MarkMustNotHaveEagerFrame();

   HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
                                                   GetParameter(1),
@@ -346,46 +348,40 @@
       AllocationSite::kTransitionInfoOffset);
   HInstruction* boilerplate = Add<HLoadNamedField>(
       allocation_site, static_cast<HValue*>(NULL), access);
-  HValue* push_value;
-  if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) {
-    HValue* elements = AddLoadElements(boilerplate);
-
-    IfBuilder if_fixed_cow(this);
-    if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
-    if_fixed_cow.Then();
-    push_value = BuildCloneShallowArray(boilerplate,
-                                        allocation_site,
-                                        alloc_site_mode,
-                                        FAST_ELEMENTS,
-                                        0/*copy-on-write*/);
-    environment()->Push(push_value);
-    if_fixed_cow.Else();
+  HValue* elements = AddLoadElements(boilerplate);
+  HValue* capacity = AddLoadFixedArrayLength(elements);
+  IfBuilder zero_capacity(this);
+ zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
+                                           Token::EQ);
+  zero_capacity.Then();
+  Push(BuildCloneShallowArrayEmpty(boilerplate,
+                                   allocation_site,
+                                   alloc_site_mode));
+  zero_capacity.Else();
+  IfBuilder if_fixed_cow(this);
+  if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
+  if_fixed_cow.Then();
+  Push(BuildCloneShallowArrayCow(boilerplate,
+                                 allocation_site,
+                                 alloc_site_mode,
+                                 FAST_ELEMENTS));
+  if_fixed_cow.Else();
+  IfBuilder if_fixed(this);
+  if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
+  if_fixed.Then();
+  Push(BuildCloneShallowArrayNonEmpty(boilerplate,
+                                      allocation_site,
+                                      alloc_site_mode,
+                                      FAST_ELEMENTS));

-    IfBuilder if_fixed(this);
-    if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
-    if_fixed.Then();
-    push_value = BuildCloneShallowArray(boilerplate,
-                                        allocation_site,
-                                        alloc_site_mode,
-                                        FAST_ELEMENTS,
-                                        length);
-    environment()->Push(push_value);
-    if_fixed.Else();
-    push_value = BuildCloneShallowArray(boilerplate,
-                                        allocation_site,
-                                        alloc_site_mode,
-                                        FAST_DOUBLE_ELEMENTS,
-                                        length);
-    environment()->Push(push_value);
-  } else {
-    ElementsKind elements_kind = casted_stub()->ComputeElementsKind();
-    push_value = BuildCloneShallowArray(boilerplate,
-                                        allocation_site,
-                                        alloc_site_mode,
-                                        elements_kind,
-                                        length);
-    environment()->Push(push_value);
-  }
+  if_fixed.Else();
+  Push(BuildCloneShallowArrayNonEmpty(boilerplate,
+                                      allocation_site,
+                                      alloc_site_mode,
+                                      FAST_DOUBLE_ELEMENTS));
+  if_fixed.End();
+  if_fixed_cow.End();
+  zero_capacity.End();

   checker.ElseDeopt("Uninitialized boilerplate literals");
   checker.End();
@@ -644,6 +640,9 @@
   HValue* result = NULL;
   switch (argument_class) {
     case NONE:
+ // This stub is very performance sensitive, the generated code must be
+      // tuned so that it doesn't build and eager frame.
+      info()->MarkMustNotHaveEagerFrame();
       result = array_builder.AllocateEmptyArray();
       break;
     case SINGLE:
@@ -667,6 +666,9 @@
   HValue* result = NULL;
   switch (argument_class) {
     case NONE:
+ // This stub is very performance sensitive, the generated code must be
+      // tuned so that it doesn't build and eager frame.
+      info()->MarkMustNotHaveEagerFrame();
       result = array_builder.AllocateEmptyArray();
       break;
     case SINGLE:
=======================================
--- /branches/bleeding_edge/src/code-stubs.cc   Wed Apr 30 14:33:35 2014 UTC
+++ /branches/bleeding_edge/src/code-stubs.cc   Fri May  9 15:55:45 2014 UTC
@@ -22,6 +22,7 @@
       hint_stack_parameter_count_(-1),
       function_mode_(NOT_JS_FUNCTION_STUB_MODE),
       register_params_(NULL),
+      register_param_representations_(NULL),
       deoptimization_handler_(NULL),
       handler_arguments_mode_(DONT_PASS_ARGUMENTS),
       miss_handler_(),
@@ -733,9 +734,7 @@

 // static
 void FastCloneShallowArrayStub::InstallDescriptors(Isolate* isolate) {
-  FastCloneShallowArrayStub stub(isolate,
-                                 FastCloneShallowArrayStub::CLONE_ELEMENTS,
-                                 DONT_TRACK_ALLOCATION_SITE, 0);
+  FastCloneShallowArrayStub stub(isolate, DONT_TRACK_ALLOCATION_SITE);
   InstallDescriptor(isolate, &stub);
 }

=======================================
--- /branches/bleeding_edge/src/code-stubs.h    Fri May  9 13:01:50 2014 UTC
+++ /branches/bleeding_edge/src/code-stubs.h    Fri May  9 15:55:45 2014 UTC
@@ -277,6 +277,11 @@
   int hint_stack_parameter_count_;
   StubFunctionMode function_mode_;
   Register* register_params_;
+ // Specifies Representations for the stub's parameter. Points to an array of
+  // Representations of the same length of the numbers of parameters to the
+  // stub, or if NULL (the default value), Representation of each parameter
+  // assumed to be Tagged()
+  Representation* register_param_representations_;

   Address deoptimization_handler_;
   HandlerArgumentsMode handler_arguments_mode_;
@@ -581,50 +586,18 @@
 class FastCloneShallowArrayStub : public HydrogenCodeStub {
  public:
   // Maximum length of copied elements array.
-  static const int kMaximumClonedLength = 8;
-  enum Mode {
-    CLONE_ELEMENTS,
-    CLONE_DOUBLE_ELEMENTS,
-    COPY_ON_WRITE_ELEMENTS,
-    CLONE_ANY_ELEMENTS,
-    LAST_CLONE_MODE = CLONE_ANY_ELEMENTS
-  };
-
-  static const int kFastCloneModeCount = LAST_CLONE_MODE + 1;
+  static const int kMaximumInlinedCloneLength = 8;

   FastCloneShallowArrayStub(Isolate* isolate,
-                            Mode mode,
-                            AllocationSiteMode allocation_site_mode,
-                            int length)
+                            AllocationSiteMode allocation_site_mode)
       : HydrogenCodeStub(isolate),
-        mode_(mode),
-        allocation_site_mode_(allocation_site_mode),
-        length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) {
-    ASSERT_GE(length_, 0);
-    ASSERT_LE(length_, kMaximumClonedLength);
-  }
+      allocation_site_mode_(allocation_site_mode) {}

-  Mode mode() const { return mode_; }
-  int length() const { return length_; }
   AllocationSiteMode allocation_site_mode() const {
     return allocation_site_mode_;
   }

-  ElementsKind ComputeElementsKind() const {
-    switch (mode()) {
-      case CLONE_ELEMENTS:
-      case COPY_ON_WRITE_ELEMENTS:
-        return FAST_ELEMENTS;
-      case CLONE_DOUBLE_ELEMENTS:
-        return FAST_DOUBLE_ELEMENTS;
-      case CLONE_ANY_ELEMENTS:
-        /*fall-through*/;
-    }
-    UNREACHABLE();
-    return LAST_ELEMENTS_KIND;
-  }
-
-  virtual Handle<Code> GenerateCode() V8_OVERRIDE;
+  virtual Handle<Code> GenerateCode();

   virtual void InitializeInterfaceDescriptor(
       CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE;
@@ -632,22 +605,13 @@
   static void InstallDescriptors(Isolate* isolate);

  private:
-  Mode mode_;
   AllocationSiteMode allocation_site_mode_;
-  int length_;

class AllocationSiteModeBits: public BitField<AllocationSiteMode, 0, 1> {};
-  class ModeBits: public BitField<Mode, 1, 4> {};
-  class LengthBits: public BitField<int, 5, 4> {};
   // Ensure data fits within available bits.
-  STATIC_ASSERT(LAST_ALLOCATION_SITE_MODE == 1);
-  STATIC_ASSERT(kFastCloneModeCount < 16);
-  STATIC_ASSERT(kMaximumClonedLength < 16);
   Major MajorKey() { return FastCloneShallowArray; }
   int NotMissMinorKey() {
-    return AllocationSiteModeBits::encode(allocation_site_mode_)
-        | ModeBits::encode(mode_)
-        | LengthBits::encode(length_);
+    return AllocationSiteModeBits::encode(allocation_site_mode_);
   }
 };

=======================================
--- /branches/bleeding_edge/src/compiler.h      Fri May  2 08:08:23 2014 UTC
+++ /branches/bleeding_edge/src/compiler.h      Fri May  9 15:55:45 2014 UTC
@@ -142,6 +142,14 @@
   bool requires_frame() const {
     return RequiresFrame::decode(flags_);
   }
+
+  void MarkMustNotHaveEagerFrame() {
+    flags_ |= MustNotHaveEagerFrame::encode(true);
+  }
+
+  bool GetMustNotHaveEagerFrame() const {
+    return MustNotHaveEagerFrame::decode(flags_);
+  }

   void SetParseRestriction(ParseRestriction restriction) {
     flags_ = ParseRestricitonField::update(flags_, restriction);
@@ -368,6 +376,8 @@
   class ParseRestricitonField: public BitField<ParseRestriction, 12, 1> {};
   // If the function requires a frame (for unspecified reasons)
   class RequiresFrame: public BitField<bool, 13, 1> {};
+  // If the function cannot build a frame (for unspecified reasons)
+  class MustNotHaveEagerFrame: public BitField<bool, 14, 1> {};

   unsigned flags_;

=======================================
--- /branches/bleeding_edge/src/counters.h      Fri May  9 12:59:24 2014 UTC
+++ /branches/bleeding_edge/src/counters.h      Fri May  9 15:55:45 2014 UTC
@@ -381,6 +381,7 @@
   SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs)           \
   SC(call_normal_stubs, V8.CallNormalStubs)                           \
   SC(call_megamorphic_stubs, V8.CallMegamorphicStubs)                 \
+  SC(inlined_copied_elements, V8.InlinedCopiedElements)              \
   SC(arguments_adaptors, V8.ArgumentsAdaptors)                        \
   SC(compilation_cache_hits, V8.CompilationCacheHits)                 \
   SC(compilation_cache_misses, V8.CompilationCacheMisses)             \
=======================================
--- /branches/bleeding_edge/src/hydrogen-gvn.cc Tue Apr 29 06:42:26 2014 UTC
+++ /branches/bleeding_edge/src/hydrogen-gvn.cc Fri May  9 15:55:45 2014 UTC
@@ -863,7 +863,8 @@
           stream.OutputToStdOut();
         }
       }
-      if (instr->CheckFlag(HValue::kUseGVN)) {
+      if (instr->CheckFlag(HValue::kUseGVN) &&
+          !instr->CheckFlag(HValue::kCantBeReplaced)) {
         ASSERT(!instr->HasObservableSideEffects());
         HInstruction* other = map->Lookup(instr);
         if (other != NULL) {
=======================================
--- /branches/bleeding_edge/src/hydrogen-instructions.h Fri May 9 12:59:24 2014 UTC +++ /branches/bleeding_edge/src/hydrogen-instructions.h Fri May 9 15:55:45 2014 UTC
@@ -619,6 +619,10 @@
     // flag.
     kUint32,
     kHasNoObservableSideEffects,
+ // Indicates an instruction shouldn't be replaced by optimization, this flag
+    // is useful to set in cases where recomputing a value is cheaper than
+    // extending the value's live range and spilling it.
+    kCantBeReplaced,
     // Indicates the instruction is live during dead code elimination.
     kIsLive,

@@ -6257,6 +6261,7 @@
   virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE;

   bool CanBeReplacedWith(HValue* other) const {
+    if (!CheckFlag(HValue::kCantBeReplaced)) return false;
     if (!type().Equals(other->type())) return false;
     if (!representation().Equals(other->representation())) return false;
     if (!other->IsLoadNamedField()) return true;
=======================================
--- /branches/bleeding_edge/src/hydrogen.cc     Fri May  9 12:19:59 2014 UTC
+++ /branches/bleeding_edge/src/hydrogen.cc     Fri May  9 15:55:45 2014 UTC
@@ -2388,15 +2388,26 @@
 }


-HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object) {
+HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
+                                                HValue* dependency) {
   return Add<HLoadNamedField>(
- object, static_cast<HValue*>(NULL), HObjectAccess::ForElementsPointer());
+      object, dependency, HObjectAccess::ForElementsPointer());
 }


-HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) {
+HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
+    HValue* array,
+    HValue* dependency) {
   return Add<HLoadNamedField>(
- object, static_cast<HValue*>(NULL), HObjectAccess::ForFixedArrayLength());
+      array, dependency, HObjectAccess::ForFixedArrayLength());
+}
+
+
+HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array,
+                                                   ElementsKind kind,
+                                                   HValue* dependency) {
+  return Add<HLoadNamedField>(
+      array, dependency, HObjectAccess::ForArrayLength(kind));
 }


@@ -2429,9 +2440,8 @@
   HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader(
       new_kind, new_capacity);

-  BuildCopyElements(elements, kind,
-                    new_elements, new_kind,
-                    length, new_capacity);
+  BuildCopyElements(object, elements, kind, new_elements,
+                    new_kind, length, new_capacity);

   Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
                         new_elements);
@@ -2444,8 +2454,8 @@
                                               ElementsKind elements_kind,
                                               HValue* from,
                                               HValue* to) {
- // Fast elements kinds need to be initialized in case statements below cause
-  // a garbage collection.
+ // Fast elements kinds need to be initialized in case statements below cause a
+  // garbage collection.
   Factory* factory = isolate()->factory();

   double nan_double = FixedDoubleArray::hole_nan_as_double();
@@ -2453,6 +2463,10 @@
       ? Add<HConstant>(factory->the_hole_value())
       : Add<HConstant>(nan_double);

+  if (to == NULL) {
+    to = AddLoadFixedArrayLength(elements);
+  }
+
   // Special loop unfolding case
   static const int kLoopUnfoldLimit = 8;
   STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
@@ -2478,156 +2492,244 @@
       Add<HStoreKeyed>(elements, key, hole, elements_kind);
     }
   } else {
-    LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
+ // Carefully loop backwards so that the "from" remains live through the loop + // rather than the to. This often corresponds to keeping length live rather + // then capacity, which helps register allocation, since length is used more
+    // other than capacity after filling with holes.
+    LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);

-    HValue* key = builder.BeginBody(from, to, Token::LT);
+    HValue* key = builder.BeginBody(to, from, Token::GT);

-    Add<HStoreKeyed>(elements, key, hole, elements_kind);
+    HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1());
+    adjusted_key->ClearFlag(HValue::kCanOverflow);
+
+    Add<HStoreKeyed>(elements, adjusted_key, hole, elements_kind);

     builder.EndBody();
   }
 }


-void HGraphBuilder::BuildCopyElements(HValue* from_elements,
+void HGraphBuilder::BuildCopyElements(HValue* array,
+                                      HValue* from_elements,
                                       ElementsKind from_elements_kind,
                                       HValue* to_elements,
                                       ElementsKind to_elements_kind,
                                       HValue* length,
                                       HValue* capacity) {
-  bool pre_fill_with_holes =
+  int constant_capacity = -1;
+  if (capacity != NULL &&
+      capacity->IsConstant() &&
+      HConstant::cast(capacity)->HasInteger32Value()) {
+    int constant_candidate = HConstant::cast(capacity)->Integer32Value();
+    if (constant_candidate <=
+        FastCloneShallowArrayStub::kMaximumInlinedCloneLength) {
+      constant_capacity = constant_candidate;
+    }
+  }
+
+  if (constant_capacity != -1) {
+    // Unroll the loop for small elements kinds.
+    for (int i = 0; i < constant_capacity; i++) {
+      HValue* key_constant = Add<HConstant>(i);
+      HInstruction* value = Add<HLoadKeyed>(from_elements, key_constant,
+                                            static_cast<HValue*>(NULL),
+                                            from_elements_kind);
+      Add<HStoreKeyed>(to_elements, key_constant, value, to_elements_kind);
+    }
+  } else {
+    bool pre_fill_with_holes =
       IsFastDoubleElementsKind(from_elements_kind) &&
       IsFastObjectElementsKind(to_elements_kind);

-  if (pre_fill_with_holes) {
-    // If the copy might trigger a GC, make sure that the FixedArray is
- // pre-initialized with holes to make sure that it's always in a consistent
-    // state.
-    BuildFillElementsWithHole(to_elements, to_elements_kind,
-                              graph()->GetConstant0(), capacity);
-  }
+    if (pre_fill_with_holes) {
+      // If the copy might trigger a GC, make sure that the FixedArray is
+      // pre-initialized with holes to make sure that it's always in a
+      // consistent state.
+      BuildFillElementsWithHole(to_elements, to_elements_kind,
+                                graph()->GetConstant0(), NULL);
+    } else if (capacity == NULL || !length->Equals(capacity)) {
+      BuildFillElementsWithHole(to_elements, to_elements_kind,
+                                length, NULL);
+    }

-  LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
+    if (capacity == NULL) {
+      capacity = AddLoadFixedArrayLength(to_elements);
+    }

- HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT);
+    LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);

-  HValue* element = Add<HLoadKeyed>(from_elements, key,
-                                    static_cast<HValue*>(NULL),
-                                    from_elements_kind,
-                                    ALLOW_RETURN_HOLE);
+    HValue* key = builder.BeginBody(length, graph()->GetConstant0(),
+                                    Token::GT);

-  ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
-                       IsFastSmiElementsKind(to_elements_kind))
+    key = AddUncasted<HSub>(key, graph()->GetConstant1());
+    key->ClearFlag(HValue::kCanOverflow);
+
+    HValue* element = Add<HLoadKeyed>(from_elements, key,
+                                      static_cast<HValue*>(NULL),
+                                      from_elements_kind,
+                                      ALLOW_RETURN_HOLE);
+
+    ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
+                         IsFastSmiElementsKind(to_elements_kind))
       ? FAST_HOLEY_ELEMENTS : to_elements_kind;

-  if (IsHoleyElementsKind(from_elements_kind) &&
-      from_elements_kind != to_elements_kind) {
-    IfBuilder if_hole(this);
-    if_hole.If<HCompareHoleAndBranch>(element);
-    if_hole.Then();
-    HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
+    if (IsHoleyElementsKind(from_elements_kind) &&
+        from_elements_kind != to_elements_kind) {
+      IfBuilder if_hole(this);
+      if_hole.If<HCompareHoleAndBranch>(element);
+      if_hole.Then();
+      HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
         ? Add<HConstant>(FixedDoubleArray::hole_nan_as_double())
         : graph()->GetConstantHole();
-    Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
-    if_hole.Else();
-    HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
-    store->SetFlag(HValue::kAllowUndefinedAsNaN);
-    if_hole.End();
-  } else {
-    HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
-    store->SetFlag(HValue::kAllowUndefinedAsNaN);
+      Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
+      if_hole.Else();
+ HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
+      store->SetFlag(HValue::kAllowUndefinedAsNaN);
+      if_hole.End();
+    } else {
+ HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
+      store->SetFlag(HValue::kAllowUndefinedAsNaN);
+    }
+
+    builder.EndBody();
   }

-  builder.EndBody();
-
-  if (!pre_fill_with_holes && length != capacity) {
-    // Fill unused capacity with the hole.
-    BuildFillElementsWithHole(to_elements, to_elements_kind,
-                              key, capacity);
-  }
+  Counters* counters = isolate()->counters();
+  AddIncrementCounter(counters->inlined_copied_elements());
 }

-
-HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate,
-                                              HValue* allocation_site,
-                                              AllocationSiteMode mode,
-                                              ElementsKind kind,
-                                              int length) {
-  NoObservableSideEffectsScope no_effects(this);
-
+HValue* HGraphBuilder::BuildCloneShallowArrayCommon(
+    HValue* boilerplate,
+    HValue* allocation_site,
+    HValue* extra_size,
+    HValue** return_elements,
+    AllocationSiteMode mode) {
   // All sizes here are multiples of kPointerSize.
-  int size = JSArray::kSize;
+  int array_size = JSArray::kSize;
   if (mode == TRACK_ALLOCATION_SITE) {
-    size += AllocationMemento::kSize;
+    array_size += AllocationMemento::kSize;
+  }
+
+  HValue* size_in_bytes = Add<HConstant>(array_size);
+  if (extra_size != NULL) {
+    size_in_bytes = AddUncasted<HAdd>(extra_size, size_in_bytes);
+    size_in_bytes->ClearFlag(HValue::kCanOverflow);
   }

-  HValue* size_in_bytes = Add<HConstant>(size);
   HInstruction* object = Add<HAllocate>(size_in_bytes,
                                         HType::JSObject(),
                                         NOT_TENURED,
                                         JS_OBJECT_TYPE);

   // Copy the JS array part.
-  for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
-    if ((i != JSArray::kElementsOffset) || (length == 0)) {
-      HObjectAccess access = HObjectAccess::ForJSArrayOffset(i);
-      Add<HStoreNamedField>(
-          object, access, Add<HLoadNamedField>(
-              boilerplate, static_cast<HValue*>(NULL), access));
-    }
-  }
+  HValue* map = Add<HLoadNamedField>(boilerplate,
+      static_cast<HValue*>(NULL), HObjectAccess::ForMap());
+  Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
+      Add<HConstant>(isolate()->factory()->empty_fixed_array()),
+                     INITIALIZING_STORE);
+  Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map,
+                        INITIALIZING_STORE);

   // Create an allocation site info if requested.
   if (mode == TRACK_ALLOCATION_SITE) {
     BuildCreateAllocationMemento(
         object, Add<HConstant>(JSArray::kSize), allocation_site);
   }
+
+  if (extra_size != NULL) {
+    HValue* elements = Add<HInnerAllocatedObject>(object,
+        Add<HConstant>(array_size));
+    if (return_elements != NULL) *return_elements = elements;
+  }
+
+  return object;
+}
+
+
+HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate,
+                                                 HValue* allocation_site,
+                                                 AllocationSiteMode mode,
+                                                 ElementsKind kind) {
+  HValue* result = BuildCloneShallowArrayCommon(boilerplate,
+      allocation_site, NULL, NULL, mode);
+
+  HValue* elements = AddLoadElements(boilerplate);
+  HObjectAccess access = HObjectAccess::ForElementsPointer();
+  Add<HStoreNamedField>(result, access, elements, INITIALIZING_STORE);
+
+  HValue* length = AddLoadArrayLength(boilerplate, kind);
+  access = HObjectAccess::ForArrayLength(kind);
+  Add<HStoreNamedField>(result, access, length, INITIALIZING_STORE);
+
+  return result;
+}
+
+
+HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate,
+                                                   HValue* allocation_site,
+ AllocationSiteMode mode) {
+  HValue* result = BuildCloneShallowArrayCommon(boilerplate,
+     allocation_site, NULL, NULL, mode);
+
+  HObjectAccess access = HObjectAccess::ForArrayLength(FAST_ELEMENTS);
+  Add<HStoreNamedField>(result, access, graph()->GetConstant0(),
+                        INITIALIZING_STORE);
+  access = HObjectAccess::ForElementsPointer();
+  Add<HStoreNamedField>(result, access,
+      Add<HConstant>(isolate()->factory()->empty_fixed_array()),
+                     INITIALIZING_STORE);
+
+  return result;
+}
+

-  if (length > 0) {
-    // We have to initialize the elements pointer if allocation folding is
-    // turned off.
-    if (!FLAG_use_gvn || !FLAG_use_allocation_folding) {
-      HConstant* empty_fixed_array = Add<HConstant>(
-          isolate()->factory()->empty_fixed_array());
-      Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
-          empty_fixed_array, INITIALIZING_STORE);
-    }
+HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
+ HValue* allocation_site, + AllocationSiteMode mode,
+                                                      ElementsKind kind) {
+  int elements_kind_size = IsFastDoubleElementsKind(kind)
+    ? kDoubleSize : kPointerSize;

-    HValue* boilerplate_elements = AddLoadElements(boilerplate);
-    HValue* object_elements;
-    if (IsFastDoubleElementsKind(kind)) {
- HValue* elems_size = Add<HConstant>(FixedDoubleArray::SizeFor(length));
-      object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
-          NOT_TENURED, FIXED_DOUBLE_ARRAY_TYPE);
-    } else {
-      HValue* elems_size = Add<HConstant>(FixedArray::SizeFor(length));
-      object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
-          NOT_TENURED, FIXED_ARRAY_TYPE);
-    }
-    Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
-                          object_elements);
+  HValue* boilerplate_elements = AddLoadElements(boilerplate);
+  HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements);
+  HValue* extra = AddUncasted<HMul>(capacity,
+                                    Add<HConstant>(elements_kind_size));
+  extra->ClearFlag(HValue::kCanOverflow);
+ extra = AddUncasted<HAdd>(extra, Add<HConstant>(FixedArray::kHeaderSize));
+  extra->ClearFlag(HValue::kCanOverflow);
+  HValue* elements = NULL;
+  HValue* result = BuildCloneShallowArrayCommon(boilerplate,
+      allocation_site, extra, &elements, mode);
+  Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(),
+                        elements, INITIALIZING_STORE);

-    // Copy the elements array header.
-    for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
-      HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
-      Add<HStoreNamedField>(
-          object_elements, access, Add<HLoadNamedField>(
-              boilerplate_elements, static_cast<HValue*>(NULL), access));
-    }
+  // The allocation for the cloned array above causes register pressure on
+  // machines with low register counts. Force a reload of the boilerplate
+ // elements here to free up a register for the allocation to avoid unnecessary
+  // spillage.
+  boilerplate_elements = AddLoadElements(boilerplate);
+  boilerplate_elements->SetFlag(HValue::kCantBeReplaced);

-    // Copy the elements array contents.
-    // TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold
- // copying loops with constant length up to a given boundary and use this
-    // helper here instead.
-    for (int i = 0; i < length; i++) {
-      HValue* key_constant = Add<HConstant>(i);
- HInstruction* value = Add<HLoadKeyed>(boilerplate_elements, key_constant, - static_cast<HValue*>(NULL), kind);
-      Add<HStoreKeyed>(object_elements, key_constant, value, kind);
-    }
+  // Copy the elements array header.
+  for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
+    HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
+    Add<HStoreNamedField>(elements, access,
+        Add<HLoadNamedField>(boilerplate_elements,
+                             static_cast<HValue*>(NULL), access),
+        INITIALIZING_STORE);
   }

-  return object;
+  // And the result of the length
+ HValue* length = Add<HLoadNamedField>(boilerplate, static_cast<HValue*>(NULL), + HObjectAccess::ForArrayLength(kind));
+  Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind),
+                        length, INITIALIZING_STORE);
+
+  BuildCopyElements(result, boilerplate_elements, kind, elements,
+                    kind, length, NULL);
+
+  return result;
 }


=======================================
--- /branches/bleeding_edge/src/hydrogen.h      Fri May  9 12:19:59 2014 UTC
+++ /branches/bleeding_edge/src/hydrogen.h      Fri May  9 15:55:45 2014 UTC
@@ -1397,7 +1397,8 @@
     store_map->SkipWriteBarrier();
     return store_map;
   }
-  HLoadNamedField* AddLoadElements(HValue* object);
+  HLoadNamedField* AddLoadElements(HValue* object,
+                                   HValue* dependency = NULL);

   bool MatchRotateRight(HValue* left,
                         HValue* right,
@@ -1413,7 +1414,12 @@
                                Maybe<int> fixed_right_arg,
                                HAllocationMode allocation_mode);

-  HLoadNamedField* AddLoadFixedArrayLength(HValue *object);
+  HLoadNamedField* AddLoadFixedArrayLength(HValue *object,
+                                           HValue *dependency = NULL);
+
+  HLoadNamedField* AddLoadArrayLength(HValue *object,
+                                      ElementsKind kind,
+                                      HValue *dependency = NULL);

   HValue* AddLoadJSBuiltin(Builtins::JavaScript builtin);

@@ -1753,18 +1759,33 @@
                                  HValue* from,
                                  HValue* to);

-  void BuildCopyElements(HValue* from_elements,
+  void BuildCopyElements(HValue* array,
+                         HValue* from_elements,
                          ElementsKind from_elements_kind,
                          HValue* to_elements,
                          ElementsKind to_elements_kind,
                          HValue* length,
                          HValue* capacity);

-  HValue* BuildCloneShallowArray(HValue* boilerplate,
-                                 HValue* allocation_site,
-                                 AllocationSiteMode mode,
-                                 ElementsKind kind,
-                                 int length);
+  HValue* BuildCloneShallowArrayCommon(HValue* boilerplate,
+                                       HValue* allocation_site,
+                                       HValue* extra_size,
+                                       HValue** return_elements,
+                                       AllocationSiteMode mode);
+
+  HValue* BuildCloneShallowArrayCow(HValue* boilerplate,
+                                    HValue* allocation_site,
+                                    AllocationSiteMode mode,
+                                    ElementsKind kind);
+
+  HValue* BuildCloneShallowArrayEmpty(HValue* boilerplate,
+                                      HValue* allocation_site,
+                                      AllocationSiteMode mode);
+
+  HValue* BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
+                                         HValue* allocation_site,
+                                         AllocationSiteMode mode,
+                                         ElementsKind kind);

   HValue* BuildElementIndexHash(HValue* index);

=======================================
--- /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Fri May 9 13:01:50 2014 UTC +++ /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Fri May 9 15:55:45 2014 UTC
@@ -63,6 +63,11 @@
   static Register registers[] = { eax, ebx, ecx };
   descriptor->register_param_count_ = 3;
   descriptor->register_params_ = registers;
+  static Representation representations[] = {
+    Representation::Tagged(),
+    Representation::Smi(),
+    Representation::Tagged() };
+  descriptor->register_param_representations_ = representations;
   descriptor->deoptimization_handler_ =
       Runtime::FunctionForId(
           Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
@@ -194,6 +199,11 @@
     descriptor->stack_parameter_count_ = eax;
     descriptor->register_param_count_ = 3;
     descriptor->register_params_ = registers_variable_args;
+    static Representation representations[] = {
+        Representation::Tagged(),
+        Representation::Tagged(),
+        Representation::Integer32() };
+    descriptor->register_param_representations_ = representations;
   }

   descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
@@ -221,6 +231,10 @@
     descriptor->stack_parameter_count_ = eax;
     descriptor->register_param_count_ = 2;
     descriptor->register_params_ = registers_variable_args;
+    static Representation representations[] = {
+        Representation::Tagged(),
+        Representation::Integer32() };
+    descriptor->register_param_representations_ = representations;
   }

   descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
=======================================
--- /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc Fri May 9 14:28:59 2014 UTC +++ /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc Fri May 9 15:55:45 2014 UTC
@@ -1729,50 +1729,19 @@
     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
   }

-  Heap* heap = isolate()->heap();
-  if (has_constant_fast_elements &&
-      constant_elements_values->map() == heap->fixed_cow_array_map()) {
-    // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
-    // change, so it's possible to specialize the stub in advance.
- __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
+  if (expr->depth() > 1) {
     __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
-    __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
-    __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
-    __ mov(ecx, Immediate(constant_elements));
-    FastCloneShallowArrayStub stub(
-        isolate(),
-        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
-        allocation_site_mode,
-        length);
-    __ CallStub(&stub);
-  } else if (expr->depth() > 1 || Serializer::enabled(isolate()) ||
-             length > FastCloneShallowArrayStub::kMaximumClonedLength) {
-    __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
     __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
     __ push(Immediate(Smi::FromInt(expr->literal_index())));
     __ push(Immediate(constant_elements));
     __ push(Immediate(Smi::FromInt(flags)));
     __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
   } else {
-    ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
-           FLAG_smi_only_arrays);
-    FastCloneShallowArrayStub::Mode mode =
-        FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
-
-    // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
-    // change, so it's possible to specialize the stub in advance.
-    if (has_constant_fast_elements) {
-      mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
-    }
-
     __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
     __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
     __ mov(ecx, Immediate(constant_elements));
-    FastCloneShallowArrayStub stub(isolate(),
-                                   mode,
-                                   allocation_site_mode,
-                                   length);
+    FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
     __ CallStub(&stub);
   }

=======================================
--- /branches/bleeding_edge/src/lithium.cc      Tue May  6 12:11:00 2014 UTC
+++ /branches/bleeding_edge/src/lithium.cc      Fri May  9 15:55:45 2014 UTC
@@ -449,6 +449,8 @@
                    CodeEndLinePosInfoRecordEvent(*code, jit_handler_data));

     CodeGenerator::PrintCode(code, info());
+    ASSERT(!(info()->GetMustNotHaveEagerFrame() &&
+             generator.NeedsEagerFrame()));
     return code;
   }
   assembler.AbortedCodeGeneration();
=======================================
--- /branches/bleeding_edge/src/mips/full-codegen-mips.cc Tue May 6 09:28:08 2014 UTC +++ /branches/bleeding_edge/src/mips/full-codegen-mips.cc Fri May 9 15:55:45 2014 UTC
@@ -1805,18 +1805,7 @@
   __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
   __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
   __ li(a1, Operand(constant_elements));
-  if (has_fast_elements && constant_elements_values->map() ==
-      isolate()->heap()->fixed_cow_array_map()) {
-    FastCloneShallowArrayStub stub(
-        isolate(),
-        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
-        allocation_site_mode,
-        length);
-    __ CallStub(&stub);
-    __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
-        1, a1, a2);
-  } else if (expr->depth() > 1 || Serializer::enabled(isolate()) ||
-             length > FastCloneShallowArrayStub::kMaximumClonedLength) {
+  if (expr->depth() > 1) {
     __ li(a0, Operand(Smi::FromInt(flags)));
     __ Push(a3, a2, a1, a0);
     __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
=======================================
--- /branches/bleeding_edge/src/x64/code-stubs-x64.cc Fri May 9 13:01:50 2014 UTC +++ /branches/bleeding_edge/src/x64/code-stubs-x64.cc Fri May 9 15:55:45 2014 UTC
@@ -59,6 +59,11 @@
   static Register registers[] = { rax, rbx, rcx };
   descriptor->register_param_count_ = 3;
   descriptor->register_params_ = registers;
+  static Representation representations[] = {
+    Representation::Tagged(),
+    Representation::Smi(),
+    Representation::Tagged() };
+  descriptor->register_param_representations_ = representations;
   descriptor->deoptimization_handler_ =
       Runtime::FunctionForId(
           Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
@@ -188,6 +193,11 @@
     descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
     descriptor->stack_parameter_count_ = rax;
     descriptor->register_param_count_ = 3;
+    static Representation representations[] = {
+        Representation::Tagged(),
+        Representation::Tagged(),
+        Representation::Integer32() };
+    descriptor->register_param_representations_ = representations;
     descriptor->register_params_ = registers_variable_args;
   }

@@ -216,6 +226,10 @@
     descriptor->stack_parameter_count_ = rax;
     descriptor->register_param_count_ = 2;
     descriptor->register_params_ = registers_variable_args;
+    static Representation representations[] = {
+        Representation::Tagged(),
+        Representation::Integer32() };
+    descriptor->register_param_representations_ = representations;
   }

   descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
=======================================
--- /branches/bleeding_edge/src/x64/full-codegen-x64.cc Fri May 9 14:28:59 2014 UTC +++ /branches/bleeding_edge/src/x64/full-codegen-x64.cc Fri May 9 15:55:45 2014 UTC
@@ -1766,49 +1766,19 @@
     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
   }

-  Heap* heap = isolate()->heap();
-  if (has_constant_fast_elements &&
-      constant_elements_values->map() == heap->fixed_cow_array_map()) {
-    // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
-    // change, so it's possible to specialize the stub in advance.
- __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
+  if (expr->depth() > 1) {
     __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
-    __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
-    __ Move(rbx, Smi::FromInt(expr->literal_index()));
-    __ Move(rcx, constant_elements);
-    FastCloneShallowArrayStub stub(
-        isolate(),
-        FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
-        allocation_site_mode,
-        length);
-    __ CallStub(&stub);
-  } else if (expr->depth() > 1 || Serializer::enabled(isolate()) ||
-             length > FastCloneShallowArrayStub::kMaximumClonedLength) {
-    __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
     __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
     __ Push(Smi::FromInt(expr->literal_index()));
     __ Push(constant_elements);
     __ Push(Smi::FromInt(flags));
     __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
   } else {
-    ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
-           FLAG_smi_only_arrays);
-    FastCloneShallowArrayStub::Mode mode =
-        FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
-
-    // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
-    // change, so it's possible to specialize the stub in advance.
-    if (has_constant_fast_elements) {
-      mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
-    }
-
     __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
     __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
     __ Move(rbx, Smi::FromInt(expr->literal_index()));
     __ Move(rcx, constant_elements);
-    FastCloneShallowArrayStub stub(isolate(),
-                                   mode,
-                                   allocation_site_mode, length);
+    FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
     __ CallStub(&stub);
   }

--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/d/optout.

Reply via email to