Reviewers: ulan,

Description:
Just visit young array buffers during scavenge.

BUG=

Please review this at https://codereview.chromium.org/881763005/

Base URL: https://chromium.googlesource.com/v8/v8.git@split-up-native-context

Affected files (+55, -25 lines):
  M src/heap/heap.h
  M src/heap/heap.cc
  M src/heap/mark-compact.cc
  M src/heap/objects-visiting.h
  M src/heap/objects-visiting.cc


Index: src/heap/heap.cc
diff --git a/src/heap/heap.cc b/src/heap/heap.cc
index cec871403a718d8f2a88b8666af713e48ba56f54..68d709e98c13e4914bd687947448c6c0faad858c 100644
--- a/src/heap/heap.cc
+++ b/src/heap/heap.cc
@@ -145,7 +145,8 @@ Heap::Heap()
       external_string_table_(this),
       chunks_queued_for_free_(NULL),
       gc_callbacks_depth_(0),
-      deserialization_complete_(false) {
+      deserialization_complete_(false),
+      promotion_failure_(false) {
 // Allow build-time customization of the max semispace size. Building
 // V8 with snapshots and a non-default max semispace size is much
 // easier if you can define it as part of the build environment.
@@ -609,6 +610,8 @@ void Heap::GarbageCollectionEpilogue() {
   // Process pretenuring feedback and update allocation sites.
   ProcessPretenuringFeedback();

+  promotion_failure_ = false;
+
 #ifdef VERIFY_HEAP
   if (FLAG_verify_heap) {
     Verify();
@@ -1690,28 +1693,30 @@ void Heap::UpdateReferencesInExternalStringTable(


 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
-  ProcessArrayBuffers(retainer);
+  ProcessArrayBuffers(retainer, false);
   ProcessNativeContexts(retainer);
   ProcessAllocationSites(retainer);
 }


 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) {
-  ProcessArrayBuffers(retainer);
+  ProcessArrayBuffers(retainer, true);
   ProcessNativeContexts(retainer);
 }


 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
- Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
+  Object* head =
+ VisitWeakList<Context>(this, native_contexts_list(), retainer, false);
   // Update the head of the list of contexts.
   set_native_contexts_list(head);
 }


-void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer) {
-  Object* array_buffer_obj =
-      VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer);
+void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
+                               bool stop_after_young) {
+  Object* array_buffer_obj = VisitWeakList<JSArrayBuffer>(
+      this, array_buffers_list(), retainer, stop_after_young);
   set_array_buffers_list(array_buffer_obj);
 }

@@ -1728,8 +1733,8 @@ void Heap::TearDownArrayBuffers() {


 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
-  Object* allocation_site_obj =
- VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
+  Object* allocation_site_obj = VisitWeakList<AllocationSite>(
+      this, allocation_sites_list(), retainer, false);
   set_allocation_sites_list(allocation_site_obj);
 }

@@ -2162,6 +2167,8 @@ class ScavengingVisitor : public StaticVisitorBase {
       return;
     }

+    heap->set_promotion_failure(true);
+
// If promotion failed, we try to copy the object to the other semi-space if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return;

Index: src/heap/heap.h
diff --git a/src/heap/heap.h b/src/heap/heap.h
index 643a1519f5fe980f5792ba0f6af5dc2ffcaa0b33..7fb189428b165ec8e17f49f99eb1077ae5fc3cee 100644
--- a/src/heap/heap.h
+++ b/src/heap/heap.h
@@ -1442,6 +1442,11 @@ class Heap {

bool deserialization_complete() const { return deserialization_complete_; }

+  bool promotion_failure() const { return promotion_failure_; }
+  void set_promotion_failure(bool promotion_failure) {
+    promotion_failure_ = promotion_failure;
+  }
+
  protected:
   // Methods made available to tests.

@@ -1951,7 +1956,7 @@ class Heap {
   void MarkCompactEpilogue();

   void ProcessNativeContexts(WeakObjectRetainer* retainer);
-  void ProcessArrayBuffers(WeakObjectRetainer* retainer);
+ void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool stop_after_young);
   void ProcessAllocationSites(WeakObjectRetainer* retainer);

// Deopts all code that contains allocation instruction which are tenured or
@@ -2124,6 +2129,10 @@ class Heap {

   bool deserialization_complete_;

+  // Indicates if promotion to old space failed and at least one object was
+  // moved to the other semi-space instead.
+  bool promotion_failure_;
+
   friend class AlwaysAllocateScope;
   friend class Deserializer;
   friend class Factory;
Index: src/heap/mark-compact.cc
diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc
index fa366eefb6b6a2e5003aa1efe550bd74c4a7ca4f..2cc114737793c42a3367e67b31383d2449a985f5 100644
--- a/src/heap/mark-compact.cc
+++ b/src/heap/mark-compact.cc
@@ -1880,6 +1880,7 @@ int MarkCompactCollector::DiscoverAndEvacuateBlackObjectsOnPage(

       MigrateObject(HeapObject::cast(target), object, size, NEW_SPACE);
       heap()->IncrementSemiSpaceCopiedObjectSize(size);
+      heap()->set_promotion_failure(true);
     }
     *cells = 0;
   }
Index: src/heap/objects-visiting.cc
diff --git a/src/heap/objects-visiting.cc b/src/heap/objects-visiting.cc
index 7b2e2d9a388939440e0aac0bb5c3263fedc4676f..9d83dae1ab94a348755c90285815ecf2c978b480 100644
--- a/src/heap/objects-visiting.cc
+++ b/src/heap/objects-visiting.cc
@@ -191,7 +191,8 @@ struct WeakListVisitor;


 template <class T>
-Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) { +Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
+                      bool stop_after_young) {
   Object* undefined = heap->undefined_value();
   Object* head = undefined;
   T* tail = NULL;
@@ -200,6 +201,8 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
   while (list != undefined) {
     // Check whether to keep the candidate in the list.
     T* candidate = reinterpret_cast<T*>(list);
+    T* original_candidate = candidate;
+
     Object* retained = retainer->RetainAs(list);
     if (retained != NULL) {
       if (head == undefined) {
@@ -227,6 +230,19 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
       WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
     }

+    // The list of weak objects is usually order. It starts with objects
+    // recently allocated in the young generation followed by objects
+    // allocated in the old generation. This invariant holds unless a
+    // promotion failure happens. In that case we have to visit the
+ // whole list. The next successful young generation collection establishes
+    // the invariant again.
+    // For young generation collections we just have to visit young
+    // generation objects.
+ if (stop_after_young && !heap->promotion_failure() && retained != NULL &&
+        !heap->InNewSpace(original_candidate)) {
+      return head;
+    }
+
     // Move to next element in the list.
     list = WeakListVisitor<T>::WeakNext(candidate);
   }
@@ -316,7 +332,8 @@ struct WeakListVisitor<Context> {
   static void DoWeakList(Heap* heap, Context* context,
                          WeakObjectRetainer* retainer, int index) {
     // Visit the weak list, removing dead intermediate elements.
- Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
+    Object* list_head =
+        VisitWeakList<T>(heap, context->get(index), retainer, false);

     // Update the list head.
     context->set(index, list_head, UPDATE_WRITE_BARRIER);
@@ -368,7 +385,7 @@ struct WeakListVisitor<JSArrayBuffer> {
   static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer,
                               WeakObjectRetainer* retainer) {
     Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
-        heap, array_buffer->weak_first_view(), retainer);
+        heap, array_buffer->weak_first_view(), retainer, false);
     array_buffer->set_weak_first_view(typed_array_obj);
if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) {
       Object** slot = HeapObject::RawField(array_buffer,
@@ -399,23 +416,18 @@ struct WeakListVisitor<AllocationSite> {
 };


-template Object* VisitWeakList<Code>(Heap* heap, Object* list,
-                                     WeakObjectRetainer* retainer);
-
-
-template Object* VisitWeakList<JSFunction>(Heap* heap, Object* list,
-                                           WeakObjectRetainer* retainer);
-
-
 template Object* VisitWeakList<Context>(Heap* heap, Object* list,
-                                        WeakObjectRetainer* retainer);
+                                        WeakObjectRetainer* retainer,
+                                        bool stop_after_young);


 template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list,
- WeakObjectRetainer* retainer);
+                                              WeakObjectRetainer* retainer,
+                                              bool stop_after_young);


 template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
- WeakObjectRetainer* retainer); + WeakObjectRetainer* retainer,
+                                               bool stop_after_young);
 }
 }  // namespace v8::internal
Index: src/heap/objects-visiting.h
diff --git a/src/heap/objects-visiting.h b/src/heap/objects-visiting.h
index a442867569606c1d4471a3ee2720a1fc89cf5b13..5f512f74d928086f15babbc65c944b1bfd40c678 100644
--- a/src/heap/objects-visiting.h
+++ b/src/heap/objects-visiting.h
@@ -489,7 +489,8 @@ class WeakObjectRetainer;
// pointers. The template parameter T is a WeakListVisitor that defines how to
 // access the next-element pointers.
 template <class T>
-Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer); +Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
+                      bool stop_after_young);
 }
 }  // namespace v8::internal



--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/d/optout.

Reply via email to