Revision: 12922
Author:   [email protected]
Date:     Fri Nov  9 06:34:11 2012
Log:      Revert accidental r12918 from trunk.

Refactoring incremental marking.

[email protected]

Review URL: https://codereview.chromium.org/11377071
http://code.google.com/p/v8/source/detail?r=12922

Modified:
 /trunk/src/incremental-marking.cc
 /trunk/src/incremental-marking.h
 /trunk/src/objects-visiting.h

=======================================
--- /trunk/src/incremental-marking.cc   Fri Nov  9 04:30:58 2012
+++ /trunk/src/incremental-marking.cc   Fri Nov  9 06:34:11 2012
@@ -173,19 +173,6 @@
     }
   }
 }
-
-
-static void MarkObjectGreyDoNotEnqueue(Object* obj) {
-  if (obj->IsHeapObject()) {
-    HeapObject* heap_obj = HeapObject::cast(obj);
-    MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::cast(obj));
-    if (Marking::IsBlack(mark_bit)) {
-      MemoryChunk::IncrementLiveBytesFromGC(heap_obj->address(),
-                                            -heap_obj->Size());
-    }
-    Marking::AnyToGrey(mark_bit);
-  }
-}


 class IncrementalMarkingMarkingVisitor
@@ -194,18 +181,8 @@
   static void Initialize() {
     StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();

-    table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
     table_.Register(kVisitJSRegExp, &VisitJSRegExp);
   }
-
-  static void VisitNativeContextIncremental(Map* map, HeapObject* object) {
-    Context* context = Context::cast(object);
-
-    // We will mark cache black with a separate pass
-    // when we finish marking.
-    MarkObjectGreyDoNotEnqueue(context->normalized_map_cache());
-    VisitNativeContext(map, context);
-  }

   static void VisitJSWeakMap(Map* map, HeapObject* object) {
     Heap* heap = map->GetHeap();
@@ -515,6 +492,19 @@

   heap_->new_space()->LowerInlineAllocationLimit(kAllocatedThreshold);
 }
+
+
+static void MarkObjectGreyDoNotEnqueue(Object* obj) {
+  if (obj->IsHeapObject()) {
+    HeapObject* heap_obj = HeapObject::cast(obj);
+    MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::cast(obj));
+    if (Marking::IsBlack(mark_bit)) {
+      MemoryChunk::IncrementLiveBytesFromGC(heap_obj->address(),
+                                            -heap_obj->Size());
+    }
+    Marking::AnyToGrey(mark_bit);
+  }
+}


 void IncrementalMarking::StartMarking(CompactionFlag flag) {
@@ -627,54 +617,6 @@
   steps_count_since_last_gc_ = 0;
   longest_step_ = 0.0;
 }
-
-
-void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
-  MarkBit map_mark_bit = Marking::MarkBitFrom(map);
-  if (Marking::IsWhite(map_mark_bit)) {
-    WhiteToGreyAndPush(map, map_mark_bit);
-  }
-
-  IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
-
-  MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
-  SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
-             (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
-  Marking::MarkBlack(obj_mark_bit);
-  MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
-}
-
-
-void IncrementalMarking::ProcessMarkingDeque(intptr_t bytes_to_process) {
-  Map* filler_map = heap_->one_pointer_filler_map();
-  while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
-    HeapObject* obj = marking_deque_.Pop();
-
-    // Explicitly skip one word fillers. Incremental markbit patterns are
-    // correct only for objects that occupy at least two words.
-    Map* map = obj->map();
-    if (map == filler_map) continue;
-
-    int size = obj->SizeFromMap(map);
-    bytes_to_process -= size;
-    VisitObject(map, obj, size);
-  }
-}
-
-
-void IncrementalMarking::ProcessMarkingDeque() {
-  Map* filler_map = heap_->one_pointer_filler_map();
-  while (!marking_deque_.IsEmpty()) {
-    HeapObject* obj = marking_deque_.Pop();
-
-    // Explicitly skip one word fillers. Incremental markbit patterns are
-    // correct only for objects that occupy at least two words.
-    Map* map = obj->map();
-    if (map == filler_map) continue;
-
-    VisitObject(map, obj, obj->SizeFromMap(map));
-  }
-}


 void IncrementalMarking::Hurry() {
@@ -686,7 +628,32 @@
     }
     // TODO(gc) hurry can mark objects it encounters black as mutator
     // was stopped.
-    ProcessMarkingDeque();
+    Map* filler_map = heap_->one_pointer_filler_map();
+    Map* native_context_map = heap_->native_context_map();
+    while (!marking_deque_.IsEmpty()) {
+      HeapObject* obj = marking_deque_.Pop();
+
+      // Explicitly skip one word fillers. Incremental markbit patterns are
+      // correct only for objects that occupy at least two words.
+      Map* map = obj->map();
+      if (map == filler_map) {
+        continue;
+      } else if (map == native_context_map) {
+        // Native contexts have weak fields.
+        IncrementalMarkingMarkingVisitor::VisitNativeContext(map, obj);
+      } else {
+        MarkBit map_mark_bit = Marking::MarkBitFrom(map);
+        if (Marking::IsWhite(map_mark_bit)) {
+          WhiteToGreyAndPush(map, map_mark_bit);
+        }
+        IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
+      }
+
+      MarkBit mark_bit = Marking::MarkBitFrom(obj);
+      ASSERT(!Marking::IsBlack(mark_bit));
+      Marking::MarkBlack(mark_bit);
+      MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
+    }
     state_ = COMPLETE;
     if (FLAG_trace_incremental_marking) {
       double end = OS::TimeCurrentMillis();
@@ -825,7 +792,43 @@
       StartMarking(PREVENT_COMPACTION);
     }
   } else if (state_ == MARKING) {
-    ProcessMarkingDeque(bytes_to_process);
+    Map* filler_map = heap_->one_pointer_filler_map();
+    Map* native_context_map = heap_->native_context_map();
+    while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
+      HeapObject* obj = marking_deque_.Pop();
+
+      // Explicitly skip one word fillers. Incremental markbit patterns are
+      // correct only for objects that occupy at least two words.
+      Map* map = obj->map();
+      if (map == filler_map) continue;
+
+      int size = obj->SizeFromMap(map);
+      bytes_to_process -= size;
+      MarkBit map_mark_bit = Marking::MarkBitFrom(map);
+      if (Marking::IsWhite(map_mark_bit)) {
+        WhiteToGreyAndPush(map, map_mark_bit);
+      }
+
+      // TODO(gc) switch to static visitor instead of normal visitor.
+      if (map == native_context_map) {
+        // Native contexts have weak fields.
+        Context* ctx = Context::cast(obj);
+
+        // We will mark cache black with a separate pass
+        // when we finish marking.
+        MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
+
+        IncrementalMarkingMarkingVisitor::VisitNativeContext(map, ctx);
+      } else {
+        IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
+      }
+
+      MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
+      SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
+                  (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
+      Marking::MarkBlack(obj_mark_bit);
+      MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
+    }
     if (marking_deque_.IsEmpty()) MarkingComplete(action);
   }

=======================================
--- /trunk/src/incremental-marking.h    Fri Nov  9 04:30:58 2012
+++ /trunk/src/incremental-marking.h    Fri Nov  9 06:34:11 2012
@@ -259,12 +259,6 @@

   void EnsureMarkingDequeIsCommitted();

-  INLINE(void ProcessMarkingDeque());
-
-  INLINE(void ProcessMarkingDeque(intptr_t bytes_to_process));
-
-  INLINE(void VisitObject(Map* map, HeapObject* obj, int size));
-
   Heap* heap_;

   State state_;
=======================================
--- /trunk/src/objects-visiting.h       Fri Nov  9 04:30:58 2012
+++ /trunk/src/objects-visiting.h       Fri Nov  9 06:34:11 2012
@@ -393,6 +393,9 @@
   static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo);
   static inline void VisitExternalReference(RelocInfo* rinfo) { }
   static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }
+
+ // TODO(mstarzinger): This should be made protected once refactoring is done.
+  static inline void VisitNativeContext(Map* map, HeapObject* object);

// TODO(mstarzinger): This should be made protected once refactoring is done.
   // Mark non-optimize code for functions inlined into the given optimized
@@ -405,7 +408,6 @@
   static inline void VisitSharedFunctionInfo(Map* map, HeapObject* object);
   static inline void VisitJSFunction(Map* map, HeapObject* object);
   static inline void VisitJSRegExp(Map* map, HeapObject* object);
-  static inline void VisitNativeContext(Map* map, HeapObject* object);

   // Mark pointers in a Map and its TransitionArray together, possibly
   // treating transitions or back pointers weak.

--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to