Revision: 16855
Author:   [email protected]
Date:     Fri Sep 20 09:27:40 2013 UTC
Log:      Some cleanup fixes

* Consolidated CopyJSObject and CopyJSObjectWithAllocationSite
* Factory.h helper for struct maps.
* BuildFastLiteral shouldn't create allocation sites in pretenuring mode.

BUG=
[email protected]

Review URL: https://codereview.chromium.org/24255005
http://code.google.com/p/v8/source/detail?r=16855

Modified:
 /branches/bleeding_edge/src/arm/code-stubs-arm.cc
 /branches/bleeding_edge/src/arm/macro-assembler-arm.cc
 /branches/bleeding_edge/src/code-stubs-hydrogen.cc
 /branches/bleeding_edge/src/factory.h
 /branches/bleeding_edge/src/heap.cc
 /branches/bleeding_edge/src/heap.h
 /branches/bleeding_edge/src/hydrogen.cc
 /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc
 /branches/bleeding_edge/src/ia32/macro-assembler-ia32.cc
 /branches/bleeding_edge/src/mips/code-stubs-mips.cc
 /branches/bleeding_edge/src/mips/macro-assembler-mips.cc
 /branches/bleeding_edge/src/runtime.cc
 /branches/bleeding_edge/src/x64/code-stubs-x64.cc

=======================================
--- /branches/bleeding_edge/src/arm/code-stubs-arm.cc Thu Sep 19 06:07:23 2013 UTC +++ /branches/bleeding_edge/src/arm/code-stubs-arm.cc Fri Sep 20 09:27:40 2013 UTC
@@ -4257,6 +4257,7 @@
   // Cache the called function in a global property cell.  Cache states
   // are uninitialized, monomorphic (indicated by a JSFunction), and
   // megamorphic.
+  // r0 : number of arguments to the construct function
   // r1 : the function to call
   // r2 : cache cell for call target
   Label initialize, done, miss, megamorphic, not_array_function;
@@ -4278,9 +4279,6 @@
// If we didn't have a matching function, and we didn't find the megamorph
   // sentinel, then we have in the cell either some other function or an
   // AllocationSite. Do a map check on the object in ecx.
-  Handle<Map> allocation_site_map(
-      masm->isolate()->heap()->allocation_site_map(),
-      masm->isolate());
   __ ldr(r5, FieldMemOperand(r3, 0));
   __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
   __ b(ne, &miss);
@@ -4317,6 +4315,7 @@
   {
     FrameScope scope(masm, StackFrame::INTERNAL);

+    // Arguments register must be smi-tagged to call out.
     __ SmiTag(r0);
     __ push(r0);
     __ push(r1);
=======================================
--- /branches/bleeding_edge/src/arm/macro-assembler-arm.cc Thu Sep 19 06:07:23 2013 UTC +++ /branches/bleeding_edge/src/arm/macro-assembler-arm.cc Fri Sep 20 09:27:40 2013 UTC
@@ -3892,7 +3892,7 @@
   b(gt, &no_memento_available);
   ldr(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize));
   cmp(scratch_reg,
-      Operand(Handle<Map>(isolate()->heap()->allocation_memento_map())));
+      Operand(isolate()->factory()->allocation_memento_map()));
   bind(&no_memento_available);
 }

=======================================
--- /branches/bleeding_edge/src/code-stubs-hydrogen.cc Thu Sep 19 14:13:34 2013 UTC +++ /branches/bleeding_edge/src/code-stubs-hydrogen.cc Fri Sep 20 09:27:40 2013 UTC
@@ -464,8 +464,7 @@
       JS_OBJECT_TYPE);

   // Store the map
-  Handle<Map> allocation_site_map(isolate()->heap()->allocation_site_map(),
-                                  isolate());
+ Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
   AddStoreMapConstant(object, allocation_site_map);

   // Store the payload (smi elements kind)
@@ -1064,11 +1063,12 @@
       Add<HConstant>(factory->empty_fixed_array());
   HValue* shared_info = GetParameter(0);

+  AddIncrementCounter(counters->fast_new_closure_total());
+
   // Create a new closure from the given function info in new space
   HValue* size = Add<HConstant>(JSFunction::kSize);
   HInstruction* js_function = Add<HAllocate>(size, HType::JSObject(),
NOT_TENURED, JS_FUNCTION_TYPE);
-  AddIncrementCounter(counters->fast_new_closure_total());

   int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
                                             casted_stub()->is_generator());
=======================================
--- /branches/bleeding_edge/src/factory.h       Tue Sep 10 14:33:06 2013 UTC
+++ /branches/bleeding_edge/src/factory.h       Fri Sep 20 09:27:40 2013 UTC
@@ -462,7 +462,15 @@
&isolate()->heap()->roots_[Heap::k##camel_name##RootIndex])); \
   }
   ROOT_LIST(ROOT_ACCESSOR)
-#undef ROOT_ACCESSOR_ACCESSOR
+#undef ROOT_ACCESSOR
+
+#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \ + inline Handle<Map> name##_map() { \ + return Handle<Map>(BitCast<Map**>( \ + &isolate()->heap()->roots_[Heap::k##Name##MapRootIndex])); \
+    }
+  STRUCT_LIST(STRUCT_MAP_ACCESSOR)
+#undef STRUCT_MAP_ACCESSOR

#define STRING_ACCESSOR(name, str) \ inline Handle<String> name() { \
=======================================
--- /branches/bleeding_edge/src/heap.cc Wed Sep 18 14:46:30 2013 UTC
+++ /branches/bleeding_edge/src/heap.cc Fri Sep 20 09:27:40 2013 UTC
@@ -2967,17 +2967,16 @@


 MaybeObject* Heap::AllocateAllocationSite() {
-  Object* result;
+  AllocationSite* site;
   MaybeObject* maybe_result = Allocate(allocation_site_map(),
                                        OLD_POINTER_SPACE);
-  if (!maybe_result->ToObject(&result)) return maybe_result;
-  AllocationSite* site = AllocationSite::cast(result);
+  if (!maybe_result->To(&site)) return maybe_result;
   site->Initialize();

   // Link the site
   site->set_weak_next(allocation_sites_list());
   set_allocation_sites_list(site);
-  return result;
+  return site;
 }


@@ -4924,7 +4923,7 @@
 }


-MaybeObject* Heap::CopyJSObject(JSObject* source) {
+MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
   // Never used to copy functions.  If functions need to be copied we
   // have to be careful to clear the literals array.
   SLOW_ASSERT(!source->IsJSFunction());
@@ -4934,6 +4933,9 @@
   int object_size = map->instance_size();
   Object* clone;

+  ASSERT(site == NULL || (AllocationSite::CanTrack(map->instance_type()) &&
+                          map->instance_type() == JS_ARRAY_TYPE));
+
   WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;

   // If we're forced to always allocate, we use the general allocation
@@ -4954,7 +4956,10 @@
   } else {
     wb_mode = SKIP_WRITE_BARRIER;

-    { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
+    { int adjusted_object_size = site != NULL
+          ? object_size + AllocationMemento::kSize
+          : object_size;
+ MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size);
       if (!maybe_clone->ToObject(&clone)) return maybe_clone;
     }
     SLOW_ASSERT(InNewSpace(clone));
@@ -4963,117 +4968,14 @@
     CopyBlock(HeapObject::cast(clone)->address(),
               source->address(),
               object_size);
-  }
-
-  SLOW_ASSERT(
- JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
-  FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
-  FixedArray* properties = FixedArray::cast(source->properties());
-  // Update elements if necessary.
-  if (elements->length() > 0) {
-    Object* elem;
-    { MaybeObject* maybe_elem;
-      if (elements->map() == fixed_cow_array_map()) {
-        maybe_elem = FixedArray::cast(elements);
-      } else if (source->HasFastDoubleElements()) {
- maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
-      } else {
-        maybe_elem = CopyFixedArray(FixedArray::cast(elements));
-      }
-      if (!maybe_elem->ToObject(&elem)) return maybe_elem;
-    }
- JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode);
-  }
-  // Update properties if necessary.
-  if (properties->length() > 0) {
-    Object* prop;
-    { MaybeObject* maybe_prop = CopyFixedArray(properties);
-      if (!maybe_prop->ToObject(&prop)) return maybe_prop;
-    }
-    JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode);
-  }
-  // Return the new clone.
-  return clone;
-}
-
-
-MaybeObject* Heap::CopyJSObjectWithAllocationSite(
-    JSObject* source,
-    AllocationSite* site) {
-  // Never used to copy functions.  If functions need to be copied we
-  // have to be careful to clear the literals array.
-  SLOW_ASSERT(!source->IsJSFunction());
-
-  // Make the clone.
-  Map* map = source->map();
-  int object_size = map->instance_size();
-  Object* clone;
-
-  ASSERT(AllocationSite::CanTrack(map->instance_type()));
-  ASSERT(map->instance_type() == JS_ARRAY_TYPE);
-  WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;

-  // If we're forced to always allocate, we use the general allocation
-  // functions which may leave us with an object in old space.
-  int adjusted_object_size = object_size;
-  if (always_allocate()) {
-    // We'll only track origin if we are certain to allocate in new space
-    const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4;
- if ((object_size + AllocationMemento::kSize) < kMinFreeNewSpaceAfterGC) {
-      adjusted_object_size += AllocationMemento::kSize;
+    if (site != NULL) {
+ AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
+          reinterpret_cast<Address>(clone) + object_size);
+      alloc_memento->set_map_no_write_barrier(allocation_memento_map());
+      ASSERT(site->map() == allocation_site_map());
+      alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
     }
-
-    { MaybeObject* maybe_clone =
-          AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE);
-      if (!maybe_clone->ToObject(&clone)) return maybe_clone;
-    }
-    Address clone_address = HeapObject::cast(clone)->address();
-    CopyBlock(clone_address,
-              source->address(),
-              object_size);
-    // Update write barrier for all fields that lie beyond the header.
-    int write_barrier_offset = adjusted_object_size > object_size
-        ? JSArray::kSize + AllocationMemento::kSize
-        : JSObject::kHeaderSize;
-    if (((object_size - write_barrier_offset) / kPointerSize) > 0) {
-      RecordWrites(clone_address,
-                   write_barrier_offset,
-                   (object_size - write_barrier_offset) / kPointerSize);
-    }
-
- // Track allocation site information, if we failed to allocate it inline.
-    if (InNewSpace(clone) &&
-        adjusted_object_size == object_size) {
-      MaybeObject* maybe_alloc_memento =
-          AllocateStruct(ALLOCATION_MEMENTO_TYPE);
-      AllocationMemento* alloc_memento;
-      if (maybe_alloc_memento->To(&alloc_memento)) {
-        alloc_memento->set_map_no_write_barrier(allocation_memento_map());
-        ASSERT(site->map() == allocation_site_map());
-        alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
-      }
-    }
-  } else {
-    wb_mode = SKIP_WRITE_BARRIER;
-    adjusted_object_size += AllocationMemento::kSize;
-
- { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size);
-      if (!maybe_clone->ToObject(&clone)) return maybe_clone;
-    }
-    SLOW_ASSERT(InNewSpace(clone));
-    // Since we know the clone is allocated in new space, we can copy
-    // the contents without worrying about updating the write barrier.
-    CopyBlock(HeapObject::cast(clone)->address(),
-              source->address(),
-              object_size);
-  }
-
-  if (adjusted_object_size > object_size) {
- AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
-        reinterpret_cast<Address>(clone) + object_size);
-    alloc_memento->set_map_no_write_barrier(allocation_memento_map());
-    ASSERT(site->map() == allocation_site_map());
-    alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
   }

   SLOW_ASSERT(
=======================================
--- /branches/bleeding_edge/src/heap.h  Wed Sep 18 14:46:30 2013 UTC
+++ /branches/bleeding_edge/src/heap.h  Fri Sep 20 09:27:40 2013 UTC
@@ -666,10 +666,9 @@
   // Returns a deep copy of the JavaScript object.
   // Properties and elements are copied too.
   // Returns failure if allocation failed.
-  MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source);
-
-  MUST_USE_RESULT MaybeObject* CopyJSObjectWithAllocationSite(
-      JSObject* source, AllocationSite* site);
+ // Optionally takes an AllocationSite to be appended in an AllocationMemento.
+  MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source,
+                                            AllocationSite* site = NULL);

   // Allocates the function prototype.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
=======================================
--- /branches/bleeding_edge/src/hydrogen.cc     Fri Sep 20 09:25:10 2013 UTC
+++ /branches/bleeding_edge/src/hydrogen.cc     Fri Sep 20 09:27:40 2013 UTC
@@ -1826,8 +1826,8 @@
   ASSERT(alloc_site != NULL);
   HInnerAllocatedObject* alloc_memento = Add<HInnerAllocatedObject>(
       previous_object, previous_object_size);
-  Handle<Map> alloc_memento_map(
-      isolate()->heap()->allocation_memento_map());
+  Handle<Map> alloc_memento_map =
+      isolate()->factory()->allocation_memento_map();
   AddStoreMapConstant(alloc_memento, alloc_memento_map);
   HObjectAccess access = HObjectAccess::ForAllocationMementoSite();
   Add<HStoreNamedField>(alloc_memento, access, alloc_site);
@@ -4297,17 +4297,26 @@
   ElementsKind boilerplate_elements_kind =
       Handle<JSObject>::cast(boilerplate_object)->GetElementsKind();

-  // TODO(mvstanton): This heuristic is only a temporary solution.  In the
- // end, we want to quit creating allocation site info after a certain number
-  // of GCs for a call site.
-  AllocationSiteMode mode = AllocationSite::GetMode(
-      boilerplate_elements_kind);
+ ASSERT(AllocationSite::CanTrack(boilerplate_object->map()->instance_type()));

   // Check whether to use fast or slow deep-copying for boilerplate.
   int max_properties = kMaxFastLiteralProperties;
   if (IsFastLiteral(boilerplate_object,
                     kMaxFastLiteralDepth,
                     &max_properties)) {
+ // TODO(mvstanton): This heuristic is only a temporary solution. In the + // end, we want to quit creating allocation site info after a certain number
+    // of GCs for a call site.
+    AllocationSiteMode mode = AllocationSite::GetMode(
+        boilerplate_elements_kind);
+
+ // it doesn't make sense to create allocation mementos if we are going to
+    // create in old space.
+    if (mode == TRACK_ALLOCATION_SITE &&
+        isolate()->heap()->GetPretenureMode() == TENURED) {
+      mode = DONT_TRACK_ALLOCATION_SITE;
+    }
+
     literal = BuildFastLiteral(boilerplate_object,
                                site,
                                mode);
@@ -8238,13 +8247,15 @@
   int object_offset = object_size;

   InstanceType instance_type = boilerplate_object->map()->instance_type();
-  bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
-      AllocationSite::CanTrack(instance_type);
+  bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE;

-  // If using allocation sites, then the payload on the site should already
-  // be filled in as a valid (boilerplate) array.
+  // If using allocation sites, then
+  // 1) the payload on the site should already be filled in as a valid
+  //    (boilerplate) array, and
+  // 2) we shouldn't be pretenuring the allocations.
   ASSERT(!create_allocation_site_info ||
-         AllocationSite::cast(*allocation_site_object)->IsLiteralSite());
+         (AllocationSite::cast(*allocation_site_object)->IsLiteralSite() &&
+          isolate()->heap()->GetPretenureMode() == NOT_TENURED));

   if (create_allocation_site_info) {
     object_size += AllocationMemento::kSize;
@@ -8257,7 +8268,6 @@
   HInstruction* object = Add<HAllocate>(object_size_constant, type,
       isolate()->heap()->GetPretenureMode(), instance_type);

-
   BuildEmitObjectHeader(boilerplate_object, object);

   if (create_allocation_site_info) {
=======================================
--- /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Thu Sep 19 06:07:23 2013 UTC +++ /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Fri Sep 20 09:27:40 2013 UTC
@@ -4119,6 +4119,7 @@
   // Cache the called function in a global property cell.  Cache states
   // are uninitialized, monomorphic (indicated by a JSFunction), and
   // megamorphic.
+  // eax : number of arguments to the construct function
   // ebx : cache cell for call target
   // edi : the function to call
   Isolate* isolate = masm->isolate();
@@ -4138,9 +4139,8 @@
// If we didn't have a matching function, and we didn't find the megamorph
   // sentinel, then we have in the cell either some other function or an
   // AllocationSite. Do a map check on the object in ecx.
-  Handle<Map> allocation_site_map(
-      masm->isolate()->heap()->allocation_site_map(),
-      masm->isolate());
+  Handle<Map> allocation_site_map =
+      masm->isolate()->factory()->allocation_site_map();
   __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
   __ j(not_equal, &miss);

@@ -4179,6 +4179,7 @@
   {
     FrameScope scope(masm, StackFrame::INTERNAL);

+    // Arguments register must be smi-tagged to call out.
     __ SmiTag(eax);
     __ push(eax);
     __ push(edi);
@@ -7211,9 +7212,8 @@
     __ inc(edx);
     __ mov(ecx, FieldOperand(ebx, Cell::kValueOffset));
     if (FLAG_debug_code) {
-      Handle<Map> allocation_site_map(
-          masm->isolate()->heap()->allocation_site_map(),
-          masm->isolate());
+      Handle<Map> allocation_site_map =
+          masm->isolate()->factory()->allocation_site_map();
       __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
       __ Assert(equal, kExpectedAllocationSiteInCell);
     }
@@ -7358,8 +7358,8 @@
   __ cmp(ebx, Immediate(undefined_sentinel));
   __ j(equal, &no_info);
   __ mov(edx, FieldOperand(ebx, Cell::kValueOffset));
-  __ cmp(FieldOperand(edx, 0), Immediate(Handle<Map>(
-      masm->isolate()->heap()->allocation_site_map())));
+  __ cmp(FieldOperand(edx, 0), Immediate(
+      masm->isolate()->factory()->allocation_site_map()));
   __ j(not_equal, &no_info);

   __ mov(edx, FieldOperand(edx, AllocationSite::kTransitionInfoOffset));
=======================================
--- /branches/bleeding_edge/src/ia32/macro-assembler-ia32.cc Thu Sep 19 06:07:23 2013 UTC +++ /branches/bleeding_edge/src/ia32/macro-assembler-ia32.cc Fri Sep 20 09:27:40 2013 UTC
@@ -3525,7 +3525,7 @@
   cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
   j(greater, &no_memento_available);
   cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
-      Immediate(Handle<Map>(isolate()->heap()->allocation_memento_map())));
+      Immediate(isolate()->factory()->allocation_memento_map()));
   bind(&no_memento_available);
 }

=======================================
--- /branches/bleeding_edge/src/mips/code-stubs-mips.cc Thu Sep 19 15:31:51 2013 UTC +++ /branches/bleeding_edge/src/mips/code-stubs-mips.cc Fri Sep 20 09:27:40 2013 UTC
@@ -4340,6 +4340,7 @@
   // Cache the called function in a global property cell.  Cache states
   // are uninitialized, monomorphic (indicated by a JSFunction), and
   // megamorphic.
+  // a0 : number of arguments to the construct function
   // a1 : the function to call
   // a2 : cache cell for call target
   Label initialize, done, miss, megamorphic, not_array_function;
@@ -4360,9 +4361,6 @@
// If we didn't have a matching function, and we didn't find the megamorph
   // sentinel, then we have in the cell either some other function or an
   // AllocationSite. Do a map check on the object in a3.
-  Handle<Map> allocation_site_map(
-      masm->isolate()->heap()->allocation_site_map(),
-      masm->isolate());
   __ lw(t1, FieldMemOperand(a3, 0));
   __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
   __ Branch(&miss, ne, t1, Operand(at));
@@ -4401,6 +4399,7 @@
         1 << 5  |  // a1
         1 << 6;    // a2

+    // Arguments register must be smi-tagged to call out.
     __ SmiTag(a0);
     __ MultiPush(kSavedRegs);

=======================================
--- /branches/bleeding_edge/src/mips/macro-assembler-mips.cc Thu Sep 19 15:31:51 2013 UTC +++ /branches/bleeding_edge/src/mips/macro-assembler-mips.cc Fri Sep 20 09:27:40 2013 UTC
@@ -5603,7 +5603,7 @@
   Branch(&no_memento_available, gt, scratch_reg, Operand(at));
   lw(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize));
   Branch(allocation_memento_present, cond, scratch_reg,
-      Operand(Handle<Map>(isolate()->heap()->allocation_memento_map())));
+         Operand(isolate()->factory()->allocation_memento_map()));
   bind(&no_memento_available);
 }

=======================================
--- /branches/bleeding_edge/src/runtime.cc      Thu Sep 19 21:26:01 2013 UTC
+++ /branches/bleeding_edge/src/runtime.cc      Fri Sep 20 09:27:40 2013 UTC
@@ -518,7 +518,10 @@
     ASSERT(*elements != isolate->heap()->empty_fixed_array());
     Handle<Object> boilerplate =
Runtime::CreateArrayLiteralBoilerplate(isolate, literals, elements);
-    if (boilerplate.is_null()) return site;
+    if (boilerplate.is_null()) {
+      ASSERT(site.is_null());
+      return site;
+    }
     site = isolate->factory()->NewAllocationSite();
     site->set_transition_info(*boilerplate);
     literals->set(literals_index, *site);
@@ -568,8 +571,7 @@
   AllocationSiteMode mode = AllocationSite::GetMode(
       boilerplate->GetElementsKind());
   if (mode == TRACK_ALLOCATION_SITE) {
-    return isolate->heap()->CopyJSObjectWithAllocationSite(
-        boilerplate, *site);
+    return isolate->heap()->CopyJSObject(boilerplate, *site);
   }

   return isolate->heap()->CopyJSObject(boilerplate);
=======================================
--- /branches/bleeding_edge/src/x64/code-stubs-x64.cc Thu Sep 19 06:07:23 2013 UTC +++ /branches/bleeding_edge/src/x64/code-stubs-x64.cc Fri Sep 20 09:27:40 2013 UTC
@@ -3231,6 +3231,7 @@
   // Cache the called function in a global property cell.  Cache states
   // are uninitialized, monomorphic (indicated by a JSFunction), and
   // megamorphic.
+  // rax : number of arguments to the construct function
   // rbx : cache cell for call target
   // rdi : the function to call
   Isolate* isolate = masm->isolate();
@@ -3250,9 +3251,8 @@
// If we didn't have a matching function, and we didn't find the megamorph
   // sentinel, then we have in the cell either some other function or an
   // AllocationSite. Do a map check on the object in rcx.
-  Handle<Map> allocation_site_map(
-      masm->isolate()->heap()->allocation_site_map(),
-      masm->isolate());
+  Handle<Map> allocation_site_map =
+      masm->isolate()->factory()->allocation_site_map();
   __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
   __ j(not_equal, &miss);

@@ -3288,6 +3288,7 @@
   {
     FrameScope scope(masm, StackFrame::INTERNAL);

+    // Arguments register must be smi-tagged to call out.
     __ Integer32ToSmi(rax, rax);
     __ push(rax);
     __ push(rdi);
@@ -6297,9 +6298,8 @@
     __ incl(rdx);
     __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset));
     if (FLAG_debug_code) {
-      Handle<Map> allocation_site_map(
-          masm->isolate()->heap()->allocation_site_map(),
-          masm->isolate());
+      Handle<Map> allocation_site_map =
+          masm->isolate()->factory()->allocation_site_map();
       __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
       __ Assert(equal, kExpectedAllocationSiteInCell);
     }
@@ -6446,7 +6446,7 @@
   __ j(equal, &no_info);
   __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset));
   __ Cmp(FieldOperand(rdx, 0),
-         Handle<Map>(masm->isolate()->heap()->allocation_site_map()));
+         masm->isolate()->factory()->allocation_site_map());
   __ j(not_equal, &no_info);

   __ movq(rdx, FieldOperand(rdx, AllocationSite::kTransitionInfoOffset));

--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.

Reply via email to