Revision: 9586
Author:   ri...@chromium.org
Date:     Tue Oct 11 12:09:42 2011
Log:      Merge revision 9585 and revision 9580 to trunk.

Revision 9585:
Fix free list node ending up on evacuation candidate.

This is a temporary fix which avoids compaction when incremental marking
is restarted during an old-space-step. That could turn the page that
holds the chosen free list node into an evacuation candidate. It could
also cause several other inconsistencies if it happens during scavenge.

Revision 9580:
Adjust assertions in the incremental marking.

Now we are completely transfering marks when shifting arrays in-place.

This might lead to white free space objects in the marking deque.
Review URL: http://codereview.chromium.org/8194012
http://code.google.com/p/v8/source/detail?r=9586

Modified:
 /trunk/src/incremental-marking.cc
 /trunk/src/incremental-marking.h
 /trunk/src/spaces.cc
 /trunk/src/version.cc

=======================================
--- /trunk/src/incremental-marking.cc   Mon Oct 10 06:20:40 2011
+++ /trunk/src/incremental-marking.cc   Tue Oct 11 12:09:42 2011
@@ -410,7 +410,7 @@

   if (heap_->old_pointer_space()->IsSweepingComplete() &&
       heap_->old_data_space()->IsSweepingComplete()) {
-    StartMarking();
+    StartMarking(ALLOW_COMPACTION);
   } else {
     if (FLAG_trace_incremental_marking) {
       PrintF("[IncrementalMarking] Start sweeping.\n");
@@ -435,12 +435,12 @@
 }


-void IncrementalMarking::StartMarking() {
+void IncrementalMarking::StartMarking(CompactionFlag flag) {
   if (FLAG_trace_incremental_marking) {
     PrintF("[IncrementalMarking] Start marking\n");
   }

-  is_compacting_ = !FLAG_never_compact &&
+  is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) &&
       heap_->mark_compact_collector()->StartCompaction();

   state_ = MARKING;
@@ -517,7 +517,11 @@
         array[new_top] = dest;
         new_top = ((new_top + 1) & mask);
         ASSERT(new_top != marking_deque_.bottom());
-        ASSERT(Marking::IsGrey(Marking::MarkBitFrom(obj)));
+#ifdef DEBUG
+        MarkBit mark_bit = Marking::MarkBitFrom(obj);
+        ASSERT(Marking::IsGrey(mark_bit) ||
+               (obj->IsFiller() && Marking::IsWhite(mark_bit)));
+#endif
       }
     } else if (obj->map() != filler_map) {
       // Skip one word filler objects that appear on the
@@ -525,7 +529,11 @@
       array[new_top] = obj;
       new_top = ((new_top + 1) & mask);
       ASSERT(new_top != marking_deque_.bottom());
-      ASSERT(Marking::IsGrey(Marking::MarkBitFrom(obj)));
+#ifdef DEBUG
+        MarkBit mark_bit = Marking::MarkBitFrom(obj);
+        ASSERT(Marking::IsGrey(mark_bit) ||
+               (obj->IsFiller() && Marking::IsWhite(mark_bit)));
+#endif
     }
   }
   marking_deque_.set_top(new_top);
@@ -696,7 +704,7 @@
   if (state_ == SWEEPING) {
     if (heap_->old_pointer_space()->AdvanceSweeper(bytes_to_process) &&
         heap_->old_data_space()->AdvanceSweeper(bytes_to_process)) {
-      StartMarking();
+      StartMarking(PREVENT_COMPACTION);
     }
   } else if (state_ == MARKING) {
     Map* filler_map = heap_->one_pointer_filler_map();
@@ -710,7 +718,6 @@
       Map* map = obj->map();
       if (map == filler_map) continue;

-      ASSERT(Marking::IsGrey(Marking::MarkBitFrom(obj)));
       int size = obj->SizeFromMap(map);
       bytes_to_process -= size;
       MarkBit map_mark_bit = Marking::MarkBitFrom(map);
@@ -733,7 +740,8 @@
       }

       MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
-      ASSERT(!Marking::IsBlack(obj_mark_bit));
+      ASSERT(Marking::IsGrey(obj_mark_bit) ||
+             (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
       Marking::MarkBlack(obj_mark_bit);
       MemoryChunk::IncrementLiveBytes(obj->address(), size);
     }
=======================================
--- /trunk/src/incremental-marking.h    Wed Oct  5 14:44:48 2011
+++ /trunk/src/incremental-marking.h    Tue Oct 11 12:09:42 2011
@@ -206,7 +206,9 @@

   void ResetStepCounters();

-  void StartMarking();
+  enum CompactionFlag { ALLOW_COMPACTION, PREVENT_COMPACTION };
+
+  void StartMarking(CompactionFlag flag);

   void ActivateIncrementalWriteBarrier(PagedSpace* space);
   static void ActivateIncrementalWriteBarrier(NewSpace* space);
=======================================
--- /trunk/src/spaces.cc        Wed Oct  5 14:44:48 2011
+++ /trunk/src/spaces.cc        Tue Oct 11 12:09:42 2011
@@ -1798,6 +1798,11 @@
   owner_->heap()->incremental_marking()->OldSpaceStep(
       size_in_bytes - old_linear_size);

+  // The old-space-step might have finished sweeping and restarted marking.
+ // Verify that it did not turn the page of the new node into an evacuation
+  // candidate.
+  ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(new_node));
+
   const int kThreshold = IncrementalMarking::kAllocatedThreshold;

// Memory in the linear allocation area is counted as allocated. We may free
=======================================
--- /trunk/src/version.cc       Mon Oct 10 06:20:40 2011
+++ /trunk/src/version.cc       Tue Oct 11 12:09:42 2011
@@ -35,7 +35,7 @@
 #define MAJOR_VERSION     3
 #define MINOR_VERSION     6
 #define BUILD_NUMBER      6
-#define PATCH_LEVEL       0
+#define PATCH_LEVEL       1
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
 #define IS_CANDIDATE_VERSION 0

--
v8-dev mailing list
v8-dev@googlegroups.com
http://groups.google.com/group/v8-dev

Reply via email to