Revision: 17102
Author:   [email protected]
Date:     Fri Oct  4 07:25:24 2013 UTC
Log:      Make objects embedded in optimized code weak.

This introduces a global weak hash table that maps objects embedded in
optimized code to dependent code lists. Using this table we can deoptimize
optimized code whenever a weak object embedded in the code dies.

BUG=v8:2073
[email protected], [email protected]

Review URL: https://chromiumcodereview.appspot.com/23477061
http://code.google.com/p/v8/source/detail?r=17102

Modified:
 /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc
 /branches/bleeding_edge/src/factory.cc
 /branches/bleeding_edge/src/factory.h
 /branches/bleeding_edge/src/flag-definitions.h
 /branches/bleeding_edge/src/handles.cc
 /branches/bleeding_edge/src/handles.h
 /branches/bleeding_edge/src/heap-inl.h
 /branches/bleeding_edge/src/heap.cc
 /branches/bleeding_edge/src/heap.h
 /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc
 /branches/bleeding_edge/src/incremental-marking.cc
 /branches/bleeding_edge/src/mark-compact.cc
 /branches/bleeding_edge/src/mark-compact.h
 /branches/bleeding_edge/src/mips/lithium-codegen-mips.cc
 /branches/bleeding_edge/src/objects-debug.cc
 /branches/bleeding_edge/src/objects-inl.h
 /branches/bleeding_edge/src/objects-visiting-inl.h
 /branches/bleeding_edge/src/objects.cc
 /branches/bleeding_edge/src/objects.h
 /branches/bleeding_edge/src/serialize.cc
 /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc

=======================================
--- /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc Fri Oct 4 07:13:43 2013 UTC +++ /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc Fri Oct 4 07:25:24 2013 UTC
@@ -834,26 +834,31 @@

 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
   ZoneList<Handle<Map> > maps(1, zone());
+  ZoneList<Handle<JSObject> > objects(1, zone());
   int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
   for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
-    RelocInfo::Mode mode = it.rinfo()->rmode();
-    if (mode == RelocInfo::EMBEDDED_OBJECT &&
-        it.rinfo()->target_object()->IsMap()) {
-      Handle<Map> map(Map::cast(it.rinfo()->target_object()));
-      if (map->CanTransition()) {
+ if (Code::IsWeakEmbeddedObject(code->kind(), it.rinfo()->target_object())) {
+      if (it.rinfo()->target_object()->IsMap()) {
+        Handle<Map> map(Map::cast(it.rinfo()->target_object()));
         maps.Add(map, zone());
+      } else if (it.rinfo()->target_object()->IsJSObject()) {
+ Handle<JSObject> object(JSObject::cast(it.rinfo()->target_object()));
+        objects.Add(object, zone());
       }
     }
   }
 #ifdef VERIFY_HEAP
-  // This disables verification of weak embedded maps after full GC.
+  // This disables verification of weak embedded objects after full GC.
   // AddDependentCode can cause a GC, which would observe the state where
   // this code is not yet in the depended code lists of the embedded maps.
- NoWeakEmbeddedMapsVerificationScope disable_verification_of_embedded_maps;
+  NoWeakObjectVerificationScope disable_verification_of_embedded_objects;
 #endif
   for (int i = 0; i < maps.length(); i++) {
maps.at(i)->AddDependentCode(DependentCode::kWeaklyEmbeddedGroup, code);
   }
+  for (int i = 0; i < objects.length(); i++) {
+    AddWeakObjectToCodeDependency(isolate()->heap(), objects.at(i), code);
+  }
 }


=======================================
--- /branches/bleeding_edge/src/factory.cc      Mon Sep 23 11:11:31 2013 UTC
+++ /branches/bleeding_edge/src/factory.cc      Fri Oct  4 07:25:24 2013 UTC
@@ -124,6 +124,18 @@
                                                at_least_space_for),
                      ObjectHashTable);
 }
+
+
+Handle<WeakHashTable> Factory::NewWeakHashTable(int at_least_space_for) {
+  ASSERT(0 <= at_least_space_for);
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      WeakHashTable::Allocate(isolate()->heap(),
+                              at_least_space_for,
+                              WeakHashTable::USE_DEFAULT_MINIMUM_CAPACITY,
+                              TENURED),
+      WeakHashTable);
+}


Handle<DescriptorArray> Factory::NewDescriptorArray(int number_of_descriptors,
@@ -598,8 +610,11 @@


 Handle<FixedArray> Factory::CopySizeFixedArray(Handle<FixedArray> array,
-                                               int new_length) {
-  CALL_HEAP_FUNCTION(isolate(), array->CopySize(new_length), FixedArray);
+                                               int new_length,
+                                               PretenureFlag pretenure) {
+  CALL_HEAP_FUNCTION(isolate(),
+                     array->CopySize(new_length, pretenure),
+                     FixedArray);
 }


=======================================
--- /branches/bleeding_edge/src/factory.h       Mon Sep 23 11:11:31 2013 UTC
+++ /branches/bleeding_edge/src/factory.h       Fri Oct  4 07:25:24 2013 UTC
@@ -71,6 +71,8 @@

   Handle<ObjectHashTable> NewObjectHashTable(int at_least_space_for);

+  Handle<WeakHashTable> NewWeakHashTable(int at_least_space_for);
+
   Handle<DescriptorArray> NewDescriptorArray(int number_of_descriptors,
                                              int slack = 0);
   Handle<DeoptimizationInputData> NewDeoptimizationInputData(
@@ -265,7 +267,8 @@
   Handle<FixedArray> CopyFixedArray(Handle<FixedArray> array);

   Handle<FixedArray> CopySizeFixedArray(Handle<FixedArray> array,
-                                        int new_length);
+                                        int new_length,
+ PretenureFlag pretenure = NOT_TENURED);

   Handle<FixedDoubleArray> CopyFixedDoubleArray(
       Handle<FixedDoubleArray> array);
=======================================
--- /branches/bleeding_edge/src/flag-definitions.h Wed Oct 2 11:43:41 2013 UTC +++ /branches/bleeding_edge/src/flag-definitions.h Fri Oct 4 07:25:24 2013 UTC
@@ -511,6 +511,8 @@
             "garbage collect maps from which no objects can be reached")
 DEFINE_bool(weak_embedded_maps_in_optimized_code, true,
             "make maps embedded in optimized code weak")
+DEFINE_bool(weak_embedded_objects_in_optimized_code, true,
+            "make objects embedded in optimized code weak")
 DEFINE_bool(flush_code, true,
             "flush code that we expect not to use again (during full gc)")
 DEFINE_bool(flush_code_incrementally, true,
=======================================
--- /branches/bleeding_edge/src/handles.cc      Mon Sep 30 11:56:52 2013 UTC
+++ /branches/bleeding_edge/src/handles.cc      Fri Oct  4 07:25:24 2013 UTC
@@ -879,6 +879,17 @@
 #endif
   return deferred;
 }
+
+
+void AddWeakObjectToCodeDependency(Heap* heap,
+                                   Handle<Object> object,
+                                   Handle<Code> code) {
+  heap->EnsureWeakObjectToCodeTable();
+ Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(*object)); + dep = DependentCode::Insert(dep, DependentCode::kWeaklyEmbeddedGroup, code);
+  CALL_HEAP_FUNCTION_VOID(heap->isolate(),
+ heap->AddWeakObjectToCodeDependency(*object, *dep));
+}


 } }  // namespace v8::internal
=======================================
--- /branches/bleeding_edge/src/handles.h       Mon Sep 30 11:56:52 2013 UTC
+++ /branches/bleeding_edge/src/handles.h       Fri Oct  4 07:25:24 2013 UTC
@@ -321,6 +321,9 @@
                                                Handle<Object> key,
                                                Handle<Object> value);

+void AddWeakObjectToCodeDependency(Heap* heap,
+                                   Handle<Object> object,
+                                   Handle<Code> code);

 // Seal off the current HandleScope so that new handles can only be created
 // if a new HandleScope is entered.
=======================================
--- /branches/bleeding_edge/src/heap-inl.h      Wed Oct  2 11:04:54 2013 UTC
+++ /branches/bleeding_edge/src/heap-inl.h      Fri Oct  4 07:25:24 2013 UTC
@@ -846,15 +846,15 @@


 #ifdef VERIFY_HEAP
-NoWeakEmbeddedMapsVerificationScope::NoWeakEmbeddedMapsVerificationScope() {
+NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
   Isolate* isolate = Isolate::Current();
-  isolate->heap()->no_weak_embedded_maps_verification_scope_depth_++;
+  isolate->heap()->no_weak_object_verification_scope_depth_++;
 }


-NoWeakEmbeddedMapsVerificationScope::~NoWeakEmbeddedMapsVerificationScope() {
+NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() {
   Isolate* isolate = Isolate::Current();
-  isolate->heap()->no_weak_embedded_maps_verification_scope_depth_--;
+  isolate->heap()->no_weak_object_verification_scope_depth_--;
 }
 #endif

=======================================
--- /branches/bleeding_edge/src/heap.cc Wed Oct  2 11:04:54 2013 UTC
+++ /branches/bleeding_edge/src/heap.cc Fri Oct  4 07:25:24 2013 UTC
@@ -143,7 +143,7 @@
       scavenges_since_last_idle_round_(kIdleScavengeThreshold),
       gcs_since_last_deopt_(0),
 #ifdef VERIFY_HEAP
-      no_weak_embedded_maps_verification_scope_depth_(0),
+      no_weak_object_verification_scope_depth_(0),
 #endif
       promotion_queue_(this),
       configured_(false),
@@ -6730,6 +6730,7 @@
   native_contexts_list_ = undefined_value();
   array_buffers_list_ = undefined_value();
   allocation_sites_list_ = undefined_value();
+  weak_object_to_code_table_ = undefined_value();
   return true;
 }

@@ -6875,6 +6876,34 @@
   }
   UNREACHABLE();
 }
+
+
+MaybeObject* Heap::AddWeakObjectToCodeDependency(Object* obj,
+                                                 DependentCode* dep) {
+  ASSERT(!InNewSpace(obj));
+  ASSERT(!InNewSpace(dep));
+  MaybeObject* maybe_obj =
+      WeakHashTable::cast(weak_object_to_code_table_)->Put(obj, dep);
+  WeakHashTable* table;
+  if (!maybe_obj->To(&table)) return maybe_obj;
+  set_weak_object_to_code_table(table);
+ ASSERT_EQ(dep, WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj));
+  return weak_object_to_code_table_;
+}
+
+
+DependentCode* Heap::LookupWeakObjectToCodeDependency(Object* obj) {
+ Object* dep = WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj);
+  if (dep->IsDependentCode()) return DependentCode::cast(dep);
+  return DependentCode::cast(empty_fixed_array());
+}
+
+
+void Heap::EnsureWeakObjectToCodeTable() {
+  if (!weak_object_to_code_table()->IsHashTable()) {
+ set_weak_object_to_code_table(*isolate()->factory()->NewWeakHashTable(16));
+  }
+}


 #ifdef DEBUG
=======================================
--- /branches/bleeding_edge/src/heap.h  Wed Oct  2 11:04:54 2013 UTC
+++ /branches/bleeding_edge/src/heap.h  Fri Oct  4 07:25:24 2013 UTC
@@ -1307,6 +1307,8 @@
   }
   Object* allocation_sites_list() { return allocation_sites_list_; }
Object** allocation_sites_list_address() { return &allocation_sites_list_; }
+
+ Object* weak_object_to_code_table() { return weak_object_to_code_table_; }

   // Number of mark-sweeps.
   unsigned int ms_count() { return ms_count_; }
@@ -1399,8 +1401,8 @@
   void Verify();


-  bool weak_embedded_maps_verification_enabled() {
-    return no_weak_embedded_maps_verification_scope_depth_ == 0;
+  bool weak_embedded_objects_verification_enabled() {
+    return no_weak_object_verification_scope_depth_ == 0;
   }
 #endif

@@ -1853,6 +1855,16 @@
     Heap* heap_;
   };

+ MaybeObject* AddWeakObjectToCodeDependency(Object* obj, DependentCode* dep);
+
+  DependentCode* LookupWeakObjectToCodeDependency(Object* obj);
+
+  void InitializeWeakObjectToCodeTable() {
+    set_weak_object_to_code_table(undefined_value());
+  }
+
+  void EnsureWeakObjectToCodeTable();
+
  private:
   Heap();

@@ -1967,10 +1979,16 @@
   bool old_gen_exhausted_;

   // Weak list heads, threaded through the objects.
+ // List heads are initilized lazily and contain the undefined_value at start.
   Object* native_contexts_list_;
   Object* array_buffers_list_;
   Object* allocation_sites_list_;

+ // WeakHashTable that maps objects embedded in optimized code to dependent
+  // code list. It is initilized lazily and contains the undefined_value at
+  // start.
+  Object* weak_object_to_code_table_;
+
   StoreBufferRebuilder store_buffer_rebuilder_;

   struct StringTypeTable {
@@ -2279,6 +2297,15 @@

   void ClearObjectStats(bool clear_last_time_stats = false);

+  void set_weak_object_to_code_table(Object* value) {
+    ASSERT(!InNewSpace(value));
+    weak_object_to_code_table_ = value;
+  }
+
+  Object** weak_object_to_code_table_address() {
+    return &weak_object_to_code_table_;
+  }
+
   static const int kInitialStringTableSize = 2048;
   static const int kInitialEvalCacheSize = 64;
   static const int kInitialNumberStringCacheSize = 256;
@@ -2334,7 +2361,7 @@
   int gcs_since_last_deopt_;

 #ifdef VERIFY_HEAP
-  int no_weak_embedded_maps_verification_scope_depth_;
+  int no_weak_object_verification_scope_depth_;
 #endif

   static const int kMaxMarkSweepsInIdleRound = 7;
@@ -2368,7 +2395,7 @@
   friend class MarkCompactMarkingVisitor;
   friend class MapCompact;
 #ifdef VERIFY_HEAP
-  friend class NoWeakEmbeddedMapsVerificationScope;
+  friend class NoWeakObjectVerificationScope;
 #endif

   DISALLOW_COPY_AND_ASSIGN(Heap);
@@ -2433,10 +2460,10 @@
 };

 #ifdef VERIFY_HEAP
-class NoWeakEmbeddedMapsVerificationScope {
+class NoWeakObjectVerificationScope {
  public:
-  inline NoWeakEmbeddedMapsVerificationScope();
-  inline ~NoWeakEmbeddedMapsVerificationScope();
+  inline NoWeakObjectVerificationScope();
+  inline ~NoWeakObjectVerificationScope();
 };
 #endif

=======================================
--- /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Fri Oct 4 07:13:43 2013 UTC +++ /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Fri Oct 4 07:25:24 2013 UTC
@@ -1127,26 +1127,31 @@

 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
   ZoneList<Handle<Map> > maps(1, zone());
+  ZoneList<Handle<JSObject> > objects(1, zone());
   int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
   for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
-    RelocInfo::Mode mode = it.rinfo()->rmode();
-    if (mode == RelocInfo::EMBEDDED_OBJECT &&
-        it.rinfo()->target_object()->IsMap()) {
-      Handle<Map> map(Map::cast(it.rinfo()->target_object()));
-      if (map->CanTransition()) {
+ if (Code::IsWeakEmbeddedObject(code->kind(), it.rinfo()->target_object())) {
+      if (it.rinfo()->target_object()->IsMap()) {
+        Handle<Map> map(Map::cast(it.rinfo()->target_object()));
         maps.Add(map, zone());
+      } else if (it.rinfo()->target_object()->IsJSObject()) {
+ Handle<JSObject> object(JSObject::cast(it.rinfo()->target_object()));
+        objects.Add(object, zone());
       }
     }
   }
 #ifdef VERIFY_HEAP
-  // This disables verification of weak embedded maps after full GC.
+  // This disables verification of weak embedded objects after full GC.
   // AddDependentCode can cause a GC, which would observe the state where
   // this code is not yet in the depended code lists of the embedded maps.
- NoWeakEmbeddedMapsVerificationScope disable_verification_of_embedded_maps;
+  NoWeakObjectVerificationScope disable_verification_of_embedded_objects;
 #endif
   for (int i = 0; i < maps.length(); i++) {
maps.at(i)->AddDependentCode(DependentCode::kWeaklyEmbeddedGroup, code);
   }
+  for (int i = 0; i < objects.length(); i++) {
+    AddWeakObjectToCodeDependency(isolate()->heap(), objects.at(i), code);
+  }
 }


=======================================
--- /branches/bleeding_edge/src/incremental-marking.cc Wed Sep 11 18:30:01 2013 UTC +++ /branches/bleeding_edge/src/incremental-marking.cc Fri Oct 4 07:25:24 2013 UTC
@@ -648,6 +648,8 @@
   IncrementalMarkingRootMarkingVisitor visitor(this);
   heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);

+  heap_->mark_compact_collector()->MarkWeakObjectToCodeTable();
+
   // Ready to start incremental marking.
   if (FLAG_trace_incremental_marking) {
     PrintF("[IncrementalMarking] Running\n");
=======================================
--- /branches/bleeding_edge/src/mark-compact.cc Wed Oct  2 11:04:54 2013 UTC
+++ /branches/bleeding_edge/src/mark-compact.cc Fri Oct  4 07:25:24 2013 UTC
@@ -91,10 +91,8 @@

   void VisitEmbeddedPointer(RelocInfo* rinfo) {
     ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
-    if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps ||
-        rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION ||
-        !rinfo->target_object()->IsMap() ||
-        !Map::cast(rinfo->target_object())->CanTransition()) {
+    if (!Code::IsWeakEmbeddedObject(rinfo->host()->kind(),
+                                    rinfo->target_object())) {
       VisitPointer(rinfo->target_object_address());
     }
   }
@@ -433,9 +431,8 @@
 #endif

 #ifdef VERIFY_HEAP
-  if (FLAG_collect_maps && FLAG_weak_embedded_maps_in_optimized_code &&
-      heap()->weak_embedded_maps_verification_enabled()) {
-    VerifyWeakEmbeddedMapsInOptimizedCode();
+  if (heap()->weak_embedded_objects_verification_enabled()) {
+    VerifyWeakEmbeddedObjectsInOptimizedCode();
   }
   if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) {
     VerifyOmittedMapChecks();
@@ -501,7 +498,7 @@
 }


-void MarkCompactCollector::VerifyWeakEmbeddedMapsInOptimizedCode() {
+void MarkCompactCollector::VerifyWeakEmbeddedObjectsInOptimizedCode() {
   HeapObjectIterator code_iterator(heap()->code_space());
   for (HeapObject* obj = code_iterator.Next();
        obj != NULL;
@@ -509,7 +506,7 @@
     Code* code = Code::cast(obj);
     if (code->kind() != Code::OPTIMIZED_FUNCTION) continue;
     if (WillBeDeoptimized(code)) continue;
-    code->VerifyEmbeddedMapsDependency();
+    code->VerifyEmbeddedObjectsDependency();
   }
 }

@@ -1473,7 +1470,7 @@
     // Mark the backing hash table without pushing it on the marking stack.
     Object* table_object = weak_collection->table();
     if (!table_object->IsHashTable()) return;
-    ObjectHashTable* table = ObjectHashTable::cast(table_object);
+    WeakHashTable* table = WeakHashTable::cast(table_object);
     Object** table_slot =
HeapObject::RawField(weak_collection, JSWeakCollection::kTableOffset);
     MarkBit table_mark = Marking::MarkBitFrom(table);
@@ -2115,6 +2112,8 @@
   // Handle the string table specially.
   MarkStringTable(visitor);

+  MarkWeakObjectToCodeTable();
+
   // There may be overflowed objects in the heap.  Visit them now.
   while (marking_deque_.overflowed()) {
     RefillMarkingDeque();
@@ -2153,6 +2152,16 @@
   }
   ref_groups->Rewind(last);
 }
+
+
+void MarkCompactCollector::MarkWeakObjectToCodeTable() {
+  HeapObject* weak_object_to_code_table =
+      HeapObject::cast(heap()->weak_object_to_code_table());
+  if (!IsMarked(weak_object_to_code_table)) {
+    MarkBit mark = Marking::MarkBitFrom(weak_object_to_code_table);
+    SetMark(weak_object_to_code_table, mark);
+  }
+}


 // Mark all objects reachable from the objects on the marking stack.
@@ -2522,7 +2531,8 @@
     if (map_mark.Get()) {
       ClearNonLiveDependentCode(map->dependent_code());
     } else {
-      ClearAndDeoptimizeDependentCode(map);
+      ClearAndDeoptimizeDependentCode(map->dependent_code());
+ map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array()));
     }
   }

@@ -2536,6 +2546,31 @@
ClearNonLiveDependentCode(PropertyCell::cast(cell)->dependent_code());
     }
   }
+
+  if (heap_->weak_object_to_code_table()->IsHashTable()) {
+    WeakHashTable* table =
+        WeakHashTable::cast(heap_->weak_object_to_code_table());
+    uint32_t capacity = table->Capacity();
+    for (uint32_t i = 0; i < capacity; i++) {
+      uint32_t key_index = table->EntryToIndex(i);
+      Object* key = table->get(key_index);
+      if (!table->IsKey(key)) continue;
+      uint32_t value_index = table->EntryToValueIndex(i);
+      Object* value = table->get(value_index);
+      if (IsMarked(key)) {
+        if (!IsMarked(value)) {
+          HeapObject* obj = HeapObject::cast(value);
+          MarkBit mark = Marking::MarkBitFrom(obj);
+          SetMark(obj, mark);
+        }
+        ClearNonLiveDependentCode(DependentCode::cast(value));
+      } else {
+        ClearAndDeoptimizeDependentCode(DependentCode::cast(value));
+        table->set(key_index, heap_->the_hole_value());
+        table->set(value_index, heap_->the_hole_value());
+      }
+    }
+  }
 }


@@ -2601,9 +2636,9 @@
 }


-void MarkCompactCollector::ClearAndDeoptimizeDependentCode(Map* map) {
+void MarkCompactCollector::ClearAndDeoptimizeDependentCode(
+    DependentCode* entries) {
   DisallowHeapAllocation no_allocation;
-  DependentCode* entries = map->dependent_code();
   DependentCode::GroupStartIndexes starts(entries);
   int number_of_entries = starts.number_of_entries();
   if (number_of_entries == 0) return;
@@ -2619,7 +2654,6 @@
     }
     entries->clear_at(i);
   }
- map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array()));
 }


@@ -3457,6 +3491,13 @@
   updating_visitor.VisitPointer(heap_->native_contexts_list_address());

   heap_->string_table()->Iterate(&updating_visitor);
+ updating_visitor.VisitPointer(heap_->weak_object_to_code_table_address());
+  if (heap_->weak_object_to_code_table()->IsHashTable()) {
+    WeakHashTable* table =
+        WeakHashTable::cast(heap_->weak_object_to_code_table());
+    table->Iterate(&updating_visitor);
+    table->Rehash(heap_->undefined_value());
+  }

   // Update pointers from external string table.
   heap_->UpdateReferencesInExternalStringTable(
=======================================
--- /branches/bleeding_edge/src/mark-compact.h  Mon Sep 30 14:06:43 2013 UTC
+++ /branches/bleeding_edge/src/mark-compact.h  Fri Oct  4 07:25:24 2013 UTC
@@ -637,7 +637,7 @@
   void VerifyMarkbitsAreClean();
   static void VerifyMarkbitsAreClean(PagedSpace* space);
   static void VerifyMarkbitsAreClean(NewSpace* space);
-  void VerifyWeakEmbeddedMapsInOptimizedCode();
+  void VerifyWeakEmbeddedObjectsInOptimizedCode();
   void VerifyOmittedMapChecks();
 #endif

@@ -734,6 +734,10 @@
   bool sequential_sweeping() const {
     return sequential_sweeping_;
   }
+
+ // Mark the global table which maps weak objects to dependent code without
+  // marking its contents.
+  void MarkWeakObjectToCodeTable();

  private:
   MarkCompactCollector();
@@ -884,7 +888,7 @@
   void ClearNonLivePrototypeTransitions(Map* map);
   void ClearNonLiveMapTransitions(Map* map, MarkBit map_mark);

-  void ClearAndDeoptimizeDependentCode(Map* map);
+  void ClearAndDeoptimizeDependentCode(DependentCode* dependent_code);
   void ClearNonLiveDependentCode(DependentCode* dependent_code);

   // Marking detaches initial maps from SharedFunctionInfo objects
=======================================
--- /branches/bleeding_edge/src/mips/lithium-codegen-mips.cc Fri Oct 4 07:13:43 2013 UTC +++ /branches/bleeding_edge/src/mips/lithium-codegen-mips.cc Fri Oct 4 07:25:24 2013 UTC
@@ -807,26 +807,31 @@

 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
   ZoneList<Handle<Map> > maps(1, zone());
+  ZoneList<Handle<JSObject> > objects(1, zone());
   int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
   for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
-    RelocInfo::Mode mode = it.rinfo()->rmode();
-    if (mode == RelocInfo::EMBEDDED_OBJECT &&
-        it.rinfo()->target_object()->IsMap()) {
-      Handle<Map> map(Map::cast(it.rinfo()->target_object()));
-      if (map->CanTransition()) {
+ if (Code::IsWeakEmbeddedObject(code->kind(), it.rinfo()->target_object())) {
+      if (it.rinfo()->target_object()->IsMap()) {
+        Handle<Map> map(Map::cast(it.rinfo()->target_object()));
         maps.Add(map, zone());
+      } else if (it.rinfo()->target_object()->IsJSObject()) {
+ Handle<JSObject> object(JSObject::cast(it.rinfo()->target_object()));
+        objects.Add(object, zone());
       }
     }
   }
 #ifdef VERIFY_HEAP
-  // This disables verification of weak embedded maps after full GC.
+  // This disables verification of weak embedded objects after full GC.
   // AddDependentCode can cause a GC, which would observe the state where
   // this code is not yet in the depended code lists of the embedded maps.
- NoWeakEmbeddedMapsVerificationScope disable_verification_of_embedded_maps;
+  NoWeakObjectVerificationScope disable_verification_of_embedded_objects;
 #endif
   for (int i = 0; i < maps.length(); i++) {
maps.at(i)->AddDependentCode(DependentCode::kWeaklyEmbeddedGroup, code);
   }
+  for (int i = 0; i < objects.length(); i++) {
+    AddWeakObjectToCodeDependency(isolate()->heap(), objects.at(i), code);
+  }
 }


=======================================
--- /branches/bleeding_edge/src/objects-debug.cc Mon Sep 16 16:50:41 2013 UTC +++ /branches/bleeding_edge/src/objects-debug.cc Fri Oct 4 07:25:24 2013 UTC
@@ -665,16 +665,20 @@
 }


-void Code::VerifyEmbeddedMapsDependency() {
+void Code::VerifyEmbeddedObjectsDependency() {
   int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
   for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
-    RelocInfo::Mode mode = it.rinfo()->rmode();
-    if (mode == RelocInfo::EMBEDDED_OBJECT &&
-      it.rinfo()->target_object()->IsMap()) {
-      Map* map = Map::cast(it.rinfo()->target_object());
-      if (map->CanTransition()) {
+    Object* obj = it.rinfo()->target_object();
+    if (IsWeakEmbeddedObject(kind(), obj)) {
+      if (obj->IsMap()) {
+        Map* map = Map::cast(obj);
         CHECK(map->dependent_code()->Contains(
             DependentCode::kWeaklyEmbeddedGroup, this));
+      } else if (obj->IsJSObject()) {
+ Object* raw_table = GetIsolate()->heap()->weak_object_to_code_table();
+        WeakHashTable* table = WeakHashTable::cast(raw_table);
+        CHECK(DependentCode::cast(table->Lookup(obj))->Contains(
+            DependentCode::kWeaklyEmbeddedGroup, this));
       }
     }
   }
=======================================
--- /branches/bleeding_edge/src/objects-inl.h   Wed Oct  2 08:27:33 2013 UTC
+++ /branches/bleeding_edge/src/objects-inl.h   Fri Oct  4 07:25:24 2013 UTC
@@ -5953,6 +5953,34 @@
                                                        Object* key) {
   return key;
 }
+
+
+template <int entrysize>
+bool WeakHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
+  return key->SameValue(other);
+}
+
+
+template <int entrysize>
+uint32_t WeakHashTableShape<entrysize>::Hash(Object* key) {
+  intptr_t hash = reinterpret_cast<intptr_t>(key);
+  return (uint32_t)(hash & 0xFFFFFFFF);
+}
+
+
+template <int entrysize>
+uint32_t WeakHashTableShape<entrysize>::HashForObject(Object* key,
+                                                      Object* other) {
+  intptr_t hash = reinterpret_cast<intptr_t>(other);
+  return (uint32_t)(hash & 0xFFFFFFFF);
+}
+
+
+template <int entrysize>
+MaybeObject* WeakHashTableShape<entrysize>::AsObject(Heap* heap,
+                                                    Object* key) {
+  return key;
+}


 void Map::ClearCodeCache(Heap* heap) {
=======================================
--- /branches/bleeding_edge/src/objects-visiting-inl.h Wed Sep 11 10:51:06 2013 UTC +++ /branches/bleeding_edge/src/objects-visiting-inl.h Fri Oct 4 07:25:24 2013 UTC
@@ -261,10 +261,8 @@
   ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
   ASSERT(!rinfo->target_object()->IsConsString());
   HeapObject* object = HeapObject::cast(rinfo->target_object());
-  if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps ||
-      rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION ||
-      !object->IsMap() || !Map::cast(object)->CanTransition()) {
-    heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
+  heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
+  if (!Code::IsWeakEmbeddedObject(rinfo->host()->kind(), object)) {
     StaticVisitor::MarkObject(heap, object);
   }
 }
=======================================
--- /branches/bleeding_edge/src/objects.cc      Wed Oct  2 17:23:30 2013 UTC
+++ /branches/bleeding_edge/src/objects.cc      Fri Oct  4 07:25:24 2013 UTC
@@ -7732,11 +7732,11 @@
 }


-MaybeObject* FixedArray::CopySize(int new_length) {
+MaybeObject* FixedArray::CopySize(int new_length, PretenureFlag pretenure) {
   Heap* heap = GetHeap();
   if (new_length == 0) return heap->empty_fixed_array();
   Object* obj;
-  { MaybeObject* maybe_obj = heap->AllocateFixedArray(new_length);
+ { MaybeObject* maybe_obj = heap->AllocateFixedArray(new_length, pretenure);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }
   FixedArray* result = FixedArray::cast(obj);
@@ -11022,6 +11022,22 @@
 }


+bool Code::IsWeakEmbeddedObject(Kind kind, Object* object) {
+  if (kind != Code::OPTIMIZED_FUNCTION) return false;
+
+  if (object->IsMap()) {
+    return Map::cast(object)->CanTransition() &&
+           FLAG_collect_maps &&
+           FLAG_weak_embedded_maps_in_optimized_code;
+  }
+
+  if (object->IsJSObject()) {
+    return FLAG_weak_embedded_objects_in_optimized_code;
+  }
+
+  return false;
+}
+
 MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength(
     int capacity,
     int length) {
@@ -11382,7 +11398,7 @@
     int capacity = kCodesStartIndex + number_of_entries + 1;
     if (capacity > 5) capacity = capacity * 5 / 4;
     Handle<DependentCode> new_entries = Handle<DependentCode>::cast(
-        factory->CopySizeFixedArray(entries, capacity));
+        factory->CopySizeFixedArray(entries, capacity, TENURED));
     // The number of codes can change after GC.
     starts.Recompute(*entries);
     start = starts.at(group);
@@ -13887,7 +13903,9 @@


 template<typename Shape, typename Key>
-MaybeObject* HashTable<Shape, Key>::EnsureCapacity(int n, Key key) {
+MaybeObject* HashTable<Shape, Key>::EnsureCapacity(int n,
+                                                   Key key,
+ PretenureFlag pretenure) {
   int capacity = Capacity();
   int nof = NumberOfElements() + n;
   int nod = NumberOfDeletedElements();
@@ -13900,14 +13918,14 @@
   }

   const int kMinCapacityForPretenure = 256;
-  bool pretenure =
- (capacity > kMinCapacityForPretenure) && !GetHeap()->InNewSpace(this);
+  bool should_pretenure = pretenure == TENURED ||
+ ((capacity > kMinCapacityForPretenure) && !GetHeap()->InNewSpace(this));
   Object* obj;
   { MaybeObject* maybe_obj =
         Allocate(GetHeap(),
                  nof * 2,
                  USE_DEFAULT_MINIMUM_CAPACITY,
-                 pretenure ? TENURED : NOT_TENURED);
+                 should_pretenure ? TENURED : NOT_TENURED);
     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
   }

@@ -13975,6 +13993,8 @@

 template class HashTable<ObjectHashTableShape<2>, Object*>;

+template class HashTable<WeakHashTableShape<2>, Object*>;
+
 template class Dictionary<NameDictionaryShape, Name*>;

 template class Dictionary<SeededNumberDictionaryShape, uint32_t>;
@@ -15615,6 +15635,41 @@
   set_the_hole(EntryToIndex(entry) + 1);
   ElementRemoved();
 }
+
+
+Object* WeakHashTable::Lookup(Object* key) {
+  ASSERT(IsKey(key));
+  int entry = FindEntry(key);
+  if (entry == kNotFound) return GetHeap()->the_hole_value();
+  return get(EntryToValueIndex(entry));
+}
+
+
+MaybeObject* WeakHashTable::Put(Object* key, Object* value) {
+  ASSERT(IsKey(key));
+  int entry = FindEntry(key);
+  // Key is already in table, just overwrite value.
+  if (entry != kNotFound) {
+    set(EntryToValueIndex(entry), value);
+    return this;
+  }
+
+  // Check whether the hash table should be extended.
+  Object* obj;
+  { MaybeObject* maybe_obj = EnsureCapacity(1, key, TENURED);
+    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+  }
+  WeakHashTable* table = WeakHashTable::cast(obj);
+  table->AddEntry(table->FindInsertionEntry(Hash(key)), key, value);
+  return table;
+}
+
+
+void WeakHashTable::AddEntry(int entry, Object* key, Object* value) {
+  set(EntryToIndex(entry), key);
+  set(EntryToValueIndex(entry), value);
+  ElementAdded();
+}


 DeclaredAccessorDescriptorIterator::DeclaredAccessorDescriptorIterator(
=======================================
--- /branches/bleeding_edge/src/objects.h       Wed Oct  2 11:04:54 2013 UTC
+++ /branches/bleeding_edge/src/objects.h       Fri Oct  4 07:25:24 2013 UTC
@@ -1044,7 +1044,8 @@
   V(AccessCheckNeeded)                         \
   V(Cell)                                      \
   V(PropertyCell)                              \
-  V(ObjectHashTable)
+  V(ObjectHashTable)                           \
+  V(WeakHashTable)


 #define ERROR_MESSAGES_LIST(V) \
@@ -2891,7 +2892,8 @@

   // Copy operations.
   MUST_USE_RESULT inline MaybeObject* Copy();
-  MUST_USE_RESULT MaybeObject* CopySize(int new_length);
+  MUST_USE_RESULT MaybeObject* CopySize(int new_length,
+ PretenureFlag pretenure = NOT_TENURED);

   // Add the elements of a JSArray to this FixedArray.
   MUST_USE_RESULT MaybeObject* AddKeysFromJSArray(JSArray* array);
@@ -3526,7 +3528,10 @@
   MUST_USE_RESULT MaybeObject* Shrink(Key key);

   // Ensure enough space for n additional elements.
-  MUST_USE_RESULT MaybeObject* EnsureCapacity(int n, Key key);
+  MUST_USE_RESULT MaybeObject* EnsureCapacity(
+      int n,
+      Key key,
+      PretenureFlag pretenure = NOT_TENURED);
 };


@@ -3958,6 +3963,49 @@
   void AddEntry(int entry, Object* key, Object* value);
   void RemoveEntry(int entry);

+  // Returns the index to the value of an entry.
+  static inline int EntryToValueIndex(int entry) {
+    return EntryToIndex(entry) + 1;
+  }
+};
+
+
+template <int entrysize>
+class WeakHashTableShape : public BaseShape<Object*> {
+ public:
+  static inline bool IsMatch(Object* key, Object* other);
+  static inline uint32_t Hash(Object* key);
+  static inline uint32_t HashForObject(Object* key, Object* object);
+  MUST_USE_RESULT static inline MaybeObject* AsObject(Heap* heap,
+                                                      Object* key);
+  static const int kPrefixSize = 0;
+  static const int kEntrySize = entrysize;
+};
+
+
+// WeakHashTable maps keys that are arbitrary objects to object values.
+// It is used for the global weak hash table that maps objects
+// embedded in optimized code to dependent code lists.
+class WeakHashTable: public HashTable<WeakHashTableShape<2>, Object*> {
+ public:
+  static inline WeakHashTable* cast(Object* obj) {
+    ASSERT(obj->IsHashTable());
+    return reinterpret_cast<WeakHashTable*>(obj);
+  }
+
+  // Looks up the value associated with the given key. The hole value is
+  // returned in case the key is not present.
+  Object* Lookup(Object* key);
+
+ // Adds (or overwrites) the value associated with the given key. Mapping a
+  // key to the hole value causes removal of the whole entry.
+  MUST_USE_RESULT MaybeObject* Put(Object* key, Object* value);
+
+ private:
+  friend class MarkCompactCollector;
+
+  void AddEntry(int entry, Object* key, Object* value);
+
   // Returns the index to the value of an entry.
   static inline int EntryToValueIndex(int entry) {
     return EntryToIndex(entry) + 1;
@@ -5144,9 +5192,11 @@
   bool CanDeoptAt(Address pc);

 #ifdef VERIFY_HEAP
-  void VerifyEmbeddedMapsDependency();
+  void VerifyEmbeddedObjectsDependency();
 #endif

+  static bool IsWeakEmbeddedObject(Kind kind, Object* object);
+
   // Max loop nesting marker used to postpose OSR. We don't take loop
   // nesting that is deeper than 5 levels into account.
   static const int kMaxLoopNestingMarker = 6;
=======================================
--- /branches/bleeding_edge/src/serialize.cc    Fri Sep 20 13:19:40 2013 UTC
+++ /branches/bleeding_edge/src/serialize.cc    Fri Oct  4 07:25:24 2013 UTC
@@ -834,6 +834,8 @@
     isolate_->heap()->set_allocation_sites_list(
         isolate_->heap()->undefined_value());
   }
+
+  isolate_->heap()->InitializeWeakObjectToCodeTable();

// Update data pointers to the external strings containing natives sources.
   for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
=======================================
--- /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Fri Oct 4 07:13:43 2013 UTC +++ /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Fri Oct 4 07:25:24 2013 UTC
@@ -89,9 +89,7 @@
   ASSERT(is_done());
   code->set_stack_slots(GetStackSlotCount());
   code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
-  if (FLAG_weak_embedded_maps_in_optimized_code) {
-    RegisterDependentCodeForEmbeddedMaps(code);
-  }
+  RegisterDependentCodeForEmbeddedMaps(code);
   PopulateDeoptimizationData(code);
   info()->CommitDependencies(code);
 }
@@ -707,26 +705,31 @@

 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
   ZoneList<Handle<Map> > maps(1, zone());
+  ZoneList<Handle<JSObject> > objects(1, zone());
   int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
   for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
-    RelocInfo::Mode mode = it.rinfo()->rmode();
-    if (mode == RelocInfo::EMBEDDED_OBJECT &&
-        it.rinfo()->target_object()->IsMap()) {
-      Handle<Map> map(Map::cast(it.rinfo()->target_object()));
-      if (map->CanTransition()) {
+ if (Code::IsWeakEmbeddedObject(code->kind(), it.rinfo()->target_object())) {
+      if (it.rinfo()->target_object()->IsMap()) {
+        Handle<Map> map(Map::cast(it.rinfo()->target_object()));
         maps.Add(map, zone());
+      } else if (it.rinfo()->target_object()->IsJSObject()) {
+ Handle<JSObject> object(JSObject::cast(it.rinfo()->target_object()));
+        objects.Add(object, zone());
       }
     }
   }
 #ifdef VERIFY_HEAP
-  // This disables verification of weak embedded maps after full GC.
+  // This disables verification of weak embedded objects after full GC.
   // AddDependentCode can cause a GC, which would observe the state where
   // this code is not yet in the depended code lists of the embedded maps.
- NoWeakEmbeddedMapsVerificationScope disable_verification_of_embedded_maps;
+  NoWeakObjectVerificationScope disable_verification_of_embedded_objects;
 #endif
   for (int i = 0; i < maps.length(); i++) {
maps.at(i)->AddDependentCode(DependentCode::kWeaklyEmbeddedGroup, code);
   }
+  for (int i = 0; i < objects.length(); i++) {
+    AddWeakObjectToCodeDependency(isolate()->heap(), objects.at(i), code);
+  }
 }


--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.

Reply via email to