Reviewers: Vyacheslav Egorov,

Message:
Please take a look when you have time. I will commit it once NewGC is stable.

Description:
Shrink the new space and uncommit marking deque on low memory notification.

BUG=v8:1669
TEST=cctest/test-heap/CollectingAllAvailableGarbageShrinksNewSpace


Please review this at http://codereview.chromium.org/8065003/

SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge

Affected files:
  M src/heap.cc
  M src/incremental-marking.h
  M src/incremental-marking.cc
  M test/cctest/test-heap.cc


Index: src/heap.cc
diff --git a/src/heap.cc b/src/heap.cc
index 580a764d334462a5b930aff5cea827151c77d508..ca9e92f9aefce98ea70cc821ce9817c78324d8d0 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -455,6 +455,8 @@ void Heap::CollectAllAvailableGarbage() {
     }
   }
   mark_compact_collector()->SetFlags(kNoGCFlags);
+  new_space_.Shrink();
+  incremental_marking()->UncommitMarkingDeque();
 }


Index: src/incremental-marking.cc
diff --git a/src/incremental-marking.cc b/src/incremental-marking.cc
index d9725244a804aa3c1f6881ed5e2e1b39199a6f66..c0b28f90caf47fb53a1dd2a3ab39883d940fc25d 100644
--- a/src/incremental-marking.cc
+++ b/src/incremental-marking.cc
@@ -41,6 +41,7 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
     : heap_(heap),
       state_(STOPPED),
       marking_deque_memory_(NULL),
+      marking_deque_memory_committed_(false),
       steps_count_(0),
       steps_took_(0),
       longest_step_(0.0),
@@ -370,10 +371,25 @@ static void PatchIncrementalMarkingRecordWriteStubs(
 void IncrementalMarking::EnsureMarkingDequeIsCommitted() {
   if (marking_deque_memory_ == NULL) {
     marking_deque_memory_ = new VirtualMemory(4 * MB);
-    marking_deque_memory_->Commit(
+  }
+  if (!marking_deque_memory_committed_) {
+    bool success = marking_deque_memory_->Commit(
         reinterpret_cast<Address>(marking_deque_memory_->address()),
         marking_deque_memory_->size(),
         false);  // Not executable.
+    CHECK(success);
+    marking_deque_memory_committed_ = true;
+  }
+}
+
+void IncrementalMarking::UncommitMarkingDeque() {
+  ASSERT(state_ == STOPPED);
+  if (marking_deque_memory_committed_) {
+    bool success = marking_deque_memory_->Uncommit(
+        reinterpret_cast<Address>(marking_deque_memory_->address()),
+        marking_deque_memory_->size());
+    CHECK(success);
+    marking_deque_memory_committed_ = false;
   }
 }

Index: src/incremental-marking.h
diff --git a/src/incremental-marking.h b/src/incremental-marking.h
index cd1a41142107b22e36012739ab6651f53b594f00..d8cf159d12c979cd6545613be7f5796d6a48f00b 100644
--- a/src/incremental-marking.h
+++ b/src/incremental-marking.h
@@ -197,6 +197,8 @@ class IncrementalMarking {
     }
   }

+  void UncommitMarkingDeque();
+
  private:
   void set_should_hurry(bool val) {
     should_hurry_ = val;
@@ -229,6 +231,7 @@ class IncrementalMarking {
   bool is_compacting_;

   VirtualMemory* marking_deque_memory_;
+  bool marking_deque_memory_committed_;
   MarkingDeque marking_deque_;

   int steps_count_;
Index: test/cctest/test-heap.cc
diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc
index 40482be7edbd755cc06430d60b8816f448ad2cb2..98ef77925ed5eb575053cf4390b42ff05ccf01be 100644
--- a/test/cctest/test-heap.cc
+++ b/test/cctest/test-heap.cc
@@ -1222,6 +1222,19 @@ TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
 }


+static void FillUpNewSpace(NewSpace* new_space) {
+  // Fill up new space to the point that it is completely full. Make sure
+  // that the scavenger does not undo the filling.
+  v8::HandleScope scope;
+  AlwaysAllocateScope always_allocate;
+  intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
+ intptr_t number_of_fillers = (available / FixedArray::SizeFor(1000)) - 10;
+  for (intptr_t i = 0; i < number_of_fillers; i++) {
+    CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(1000, NOT_TENURED)));
+  }
+}
+
+
 TEST(GrowAndShrinkNewSpace) {
   InitializeVM();
   NewSpace* new_space = HEAP->new_space();
@@ -1233,18 +1246,8 @@ TEST(GrowAndShrinkNewSpace) {
   new_capacity = new_space->Capacity();
   CHECK(2 * old_capacity == new_capacity);

-  // Fill up new space to the point that it is completely full. Make sure
-  // that the scavenger does not undo the filling.
   old_capacity = new_space->Capacity();
-  {
-    v8::HandleScope scope;
-    AlwaysAllocateScope always_allocate;
- intptr_t available = new_space->EffectiveCapacity() - new_space->Size(); - intptr_t number_of_fillers = (available / FixedArray::SizeFor(1000)) - 10;
-    for (intptr_t i = 0; i < number_of_fillers; i++) {
-      CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(1000, NOT_TENURED)));
-    }
-  }
+  FillUpNewSpace(new_space);
   new_capacity = new_space->Capacity();
   CHECK(old_capacity == new_capacity);

@@ -1272,3 +1275,19 @@ TEST(GrowAndShrinkNewSpace) {
   new_capacity = new_space->Capacity();
   CHECK(old_capacity == new_capacity);
 }
+
+
+TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
+  InitializeVM();
+  v8::HandleScope scope;
+  NewSpace* new_space = HEAP->new_space();
+  intptr_t old_capacity, new_capacity;
+  old_capacity = new_space->Capacity();
+  new_space->Grow();
+  new_capacity = new_space->Capacity();
+  CHECK(2 * old_capacity == new_capacity);
+  FillUpNewSpace(new_space);
+  HEAP->CollectAllAvailableGarbage();
+  new_capacity = new_space->Capacity();
+  CHECK(old_capacity == new_capacity);
+}


--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to