Reviewers: Vyacheslav Egorov,

Description:
Fix some correctness issues with the store buffer shown up by
the mozilla tests.

Please review this at http://codereview.chromium.org/6312147/

SVN Base: http://v8.googlecode.com/svn/branches/experimental/gc/

Affected files:
  M     src/heap.cc
  M     src/store-buffer-inl.h
  M     src/store-buffer.h
  M     src/store-buffer.cc


Index: src/heap.cc
===================================================================
--- src/heap.cc (revision 6616)
+++ src/heap.cc (working copy)
@@ -4125,8 +4125,8 @@
       if (Heap::InNewSpace(*slot)) {
         ASSERT(Heap::InToSpace(*slot));
         ASSERT((*slot)->IsHeapObject());
-        StoreBuffer::EnterDirectlyIntoStoreBuffer(
-            reinterpret_cast<Address>(slot));
+        ASSERT(StoreBuffer::CellIsInStoreBuffer(
+            reinterpret_cast<Address>(slot)));
       }
     }
     slot_address += kPointerSize;
@@ -4231,7 +4231,6 @@
       end = page->CachedAllocationWatermark();
     }

-
     Address map_aligned_current = page->ObjectAreaStart();

     ASSERT(map_aligned_current == MapStartAlign(map_aligned_current));
Index: src/store-buffer-inl.h
===================================================================
--- src/store-buffer-inl.h      (revision 6616)
+++ src/store-buffer-inl.h      (working copy)
@@ -52,6 +52,22 @@
 }


+void StoreBuffer::set_store_buffer_mode(StoreBuffer::StoreBufferMode mode) {
+  if (FLAG_trace_gc) {
+    if (mode != store_buffer_mode_) {
+      if (mode == kStoreBufferDisabled) {
+        PrintF("Store buffer overflowed.\n");
+      } else if (mode == kStoreBufferBeingRebuilt) {
+        PrintF("Store buffer being rebuilt.\n");
+      } else if (mode == kStoreBufferFunctional) {
+        PrintF("Store buffer reenabled.\n");
+      }
+    }
+  }
+  store_buffer_mode_ = mode;
+}
+
+
 void StoreBuffer::EnterDirectlyIntoStoreBuffer(Address addr) {
   if (store_buffer_rebuilding_enabled_) {
     Address* top = old_top_;
@@ -59,9 +75,10 @@
     old_top_ = top;
     if (top >= old_limit_) {
       Counters::store_buffer_overflows.Increment();
-      store_buffer_mode_ = kStoreBufferDisabled;
+      set_store_buffer_mode(kStoreBufferDisabled);
       old_top_ = old_start_;
     }
+    old_buffer_is_sorted_ = false;
   }
 }

Index: src/store-buffer.cc
===================================================================
--- src/store-buffer.cc (revision 6616)
+++ src/store-buffer.cc (working copy)
@@ -81,7 +81,7 @@
   Heap::AddGCPrologueCallback(&GCPrologue, kGCTypeAll);
   Heap::AddGCEpilogueCallback(&GCEpilogue, kGCTypeAll);

-  GCPrologue(kGCTypeMarkSweepCompact, kNoGCCallbackFlags);
+  ZapHashTables();
 }


@@ -146,7 +146,7 @@
 void StoreBuffer::SortUniq() {
   Compact();
   if (old_buffer_is_sorted_) return;
-  if (store_buffer_mode_ == kStoreBufferDisabled) {
+  if (store_buffer_mode() == kStoreBufferDisabled) {
     old_top_ = old_start_;
     return;
   }
@@ -163,7 +163,7 @@

 #ifdef DEBUG
 void StoreBuffer::Clean() {
-  if (store_buffer_mode_ == kStoreBufferDisabled) {
+  if (store_buffer_mode() == kStoreBufferDisabled) {
     old_top_ = old_start_;  // Just clear the cache.
     return;
   }
@@ -194,7 +194,7 @@

 bool StoreBuffer::CellIsInStoreBuffer(Address cell_address) {
   if (!FLAG_enable_slow_asserts) return true;
-  if (store_buffer_mode_ != kStoreBufferFunctional) return true;
+  if (store_buffer_mode() != kStoreBufferFunctional) return true;
   if (in_store_buffer_1_element_cache != NULL &&
       *in_store_buffer_1_element_cache == cell_address) {
     return true;
@@ -236,7 +236,7 @@
 void StoreBuffer::Verify() {
 #ifdef DEBUG
   if (FLAG_verify_heap &&
-      StoreBuffer::store_buffer_mode_ == kStoreBufferFunctional) {
+      StoreBuffer::store_buffer_mode() == kStoreBufferFunctional) {
     Heap::OldPointerSpaceCheckStoreBuffer(Heap::WATERMARK_SHOULD_BE_VALID);
     Heap::MapSpaceCheckStoreBuffer(Heap::WATERMARK_SHOULD_BE_VALID);
     Heap::LargeObjectSpaceCheckStoreBuffer();
@@ -247,19 +247,22 @@

 void StoreBuffer::GCEpilogue(GCType type, GCCallbackFlags flags) {
   during_gc_ = false;
-  if (store_buffer_mode_ == kStoreBufferBeingRebuilt) {
-    store_buffer_mode_ = kStoreBufferFunctional;
+  if (store_buffer_mode() == kStoreBufferBeingRebuilt) {
+    set_store_buffer_mode(kStoreBufferFunctional);
   }
   Verify();
 }


 void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback callback) {
-  if (store_buffer_mode_ != kStoreBufferFunctional) {
+  if (store_buffer_mode() == kStoreBufferFunctional) {
+    SortUniq();
+  }
+  if (store_buffer_mode() != kStoreBufferFunctional) {
     old_top_ = old_start_;
     ZapHashTables();
     Heap::public_set_store_buffer_top(start_);
-    store_buffer_mode_ = kStoreBufferBeingRebuilt;
+    set_store_buffer_mode(kStoreBufferBeingRebuilt);
     Heap::IteratePointers(Heap::old_pointer_space(),
                           &Heap::IteratePointersToNewSpace,
                           callback,
@@ -272,7 +275,6 @@

     Heap::lo_space()->IteratePointersToNewSpace(callback);
   } else {
-    SortUniq();
     Address* limit = old_top_;
     old_top_ = old_start_;
     {
@@ -311,7 +313,7 @@
   if (top - start_ > old_limit_ - old_top_) {
     CheckForFullBuffer();
   }
-  if (store_buffer_mode_ == kStoreBufferDisabled) return;
+  if (store_buffer_mode() == kStoreBufferDisabled) return;
   ASSERT(may_move_store_buffer_entries_);
   // Goes through the addresses in the store buffer attempting to remove
   // duplicates.  In the interest of speed this is a lossy operation.  Some
@@ -353,7 +355,9 @@
     // the next two compressions will have enough space in the buffer.  We
// start by trying a more agressive compression. If this frees up at least // half the space then we can keep going, otherwise it is time to brake.
-    SortUniq();
+    if (!during_gc_) {
+      SortUniq();
+    }
     if (old_limit_ - old_top_ < old_limit_ - old_top_) {
       return;
     }
@@ -365,7 +369,7 @@
       // compression to be guaranteed to succeed.
       // TODO(gc): Set a flag to scan all of memory.
       Counters::store_buffer_overflows.Increment();
-      store_buffer_mode_ = kStoreBufferDisabled;
+      set_store_buffer_mode(kStoreBufferDisabled);
     }
   }
 }
Index: src/store-buffer.h
===================================================================
--- src/store-buffer.h  (revision 6616)
+++ src/store-buffer.h  (working copy)
@@ -64,10 +64,6 @@
   // queue and they can overflow this buffer, which we must check for.
   static inline void EnterDirectlyIntoStoreBuffer(Address addr);

-  enum RebuildStoreBufferMode {
-    kRebuildStoreBufferWhileIterating,
-    kPreserveStoreBufferWhileIterating};
-
// Iterates over all pointers that go from old space to new space. It will
   // delete the store buffer as it starts so the callback should reenter
   // surviving old-to-new pointers into the store buffer to rebuild it.
@@ -94,6 +90,7 @@
   };

   static StoreBufferMode store_buffer_mode() { return store_buffer_mode_; }
+  static inline void set_store_buffer_mode(StoreBufferMode mode);
   static bool old_buffer_is_sorted() { return old_buffer_is_sorted_; }

   // Goes through the store buffer removing pointers to things that have


--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to