Reviewers: Vyacheslav Egorov,

Description:
Fix invariant so that we cannot record relocation slots for
white objects when compacting.  Add flag for incremental code
compaction.

Please review this at https://chromiumcodereview.appspot.com/10907174/

SVN Base: http://v8.googlecode.com/svn/branches/bleeding_edge/

Affected files:
  M     src/flag-definitions.h
  M     src/incremental-marking-inl.h
  M     src/incremental-marking.h
  M     src/incremental-marking.cc
  M     src/mark-compact.h
  M     src/mark-compact.cc


Index: src/flag-definitions.h
===================================================================
--- src/flag-definitions.h      (revision 12482)
+++ src/flag-definitions.h      (working copy)
@@ -412,6 +412,8 @@
             "Never perform compaction on full GC - testing only")
 DEFINE_bool(compact_code_space, true,
             "Compact code space on full non-incremental collections")
+DEFINE_bool(incremental_code_compaction, false,
+            "Compact code space on full incremental collections")
 DEFINE_bool(cleanup_code_caches_at_gc, true,
             "Flush inline caches prior to mark compact collection and "
             "flush code caches in maps during mark compact cycle.")
Index: src/incremental-marking-inl.h
===================================================================
--- src/incremental-marking-inl.h       (revision 12482)
+++ src/incremental-marking-inl.h       (working copy)
@@ -48,7 +48,9 @@
     // Object is either grey or white.  It will be scanned if survives.
     return false;
   }
-  return true;
+  if (!is_compacting_) return false;
+  MarkBit obj_bit = Marking::MarkBitFrom(obj);
+  return Marking::IsBlack(obj_bit);
 }


Index: src/incremental-marking.cc
===================================================================
--- src/incremental-marking.cc  (revision 12482)
+++ src/incremental-marking.cc  (working copy)
@@ -67,7 +67,7 @@
 void IncrementalMarking::RecordWriteSlow(HeapObject* obj,
                                          Object** slot,
                                          Object* value) {
- if (BaseRecordWrite(obj, slot, value) && is_compacting_ && slot != NULL) {
+  if (BaseRecordWrite(obj, slot, value) && slot != NULL) {
     MarkBit obj_bit = Marking::MarkBitFrom(obj);
     if (Marking::IsBlack(obj_bit)) {
       // Object is not going to be rescanned we need to record the slot.
@@ -127,9 +127,9 @@


 void IncrementalMarking::RecordWriteOfCodeEntrySlow(JSFunction* host,
-                                                Object** slot,
-                                                Code* value) {
-  if (BaseRecordWrite(host, slot, value) && is_compacting_) {
+                                                    Object** slot,
+                                                    Code* value) {
+  if (BaseRecordWrite(host, slot, value)) {
     ASSERT(slot != NULL);
     heap_->mark_compact_collector()->
         RecordCodeEntrySlot(reinterpret_cast<Address>(slot), value);
Index: src/incremental-marking.h
===================================================================
--- src/incremental-marking.h   (revision 12482)
+++ src/incremental-marking.h   (working copy)
@@ -132,6 +132,7 @@
                                                Object** slot,
                                                Isolate* isolate);

+  // Returns true if you need to record a slot.
INLINE(bool BaseRecordWrite(HeapObject* obj, Object** slot, Object* value));
   INLINE(void RecordWrite(HeapObject* obj, Object** slot, Object* value));
   INLINE(void RecordWriteIntoCode(HeapObject* obj,
Index: src/mark-compact.cc
===================================================================
--- src/mark-compact.cc (revision 12482)
+++ src/mark-compact.cc (working copy)
@@ -343,7 +343,9 @@
     CollectEvacuationCandidates(heap()->old_pointer_space());
     CollectEvacuationCandidates(heap()->old_data_space());

-    if (FLAG_compact_code_space && mode == NON_INCREMENTAL_COMPACTION) {
+    if (FLAG_compact_code_space &&
+        (mode == NON_INCREMENTAL_COMPACTION ||
+         FLAG_incremental_code_compaction)) {
       CollectEvacuationCandidates(heap()->code_space());
     } else if (FLAG_trace_fragmentation) {
       TraceFragmentation(heap()->code_space());
Index: src/mark-compact.h
===================================================================
--- src/mark-compact.h  (revision 12482)
+++ src/mark-compact.h  (working copy)
@@ -304,6 +304,26 @@
     NUMBER_OF_SLOT_TYPES
   };

+  static const char* SlotTypeToString(SlotType type) {
+    switch (type) {
+      case EMBEDDED_OBJECT_SLOT:
+        return "EMBEDDED_OBJECT_SLOT";
+      case RELOCATED_CODE_OBJECT:
+        return "RELOCATED_CODE_OBJECT";
+      case CODE_TARGET_SLOT:
+        return "CODE_TARGET_SLOT";
+      case CODE_ENTRY_SLOT:
+        return "CODE_ENTRY_SLOT";
+      case DEBUG_TARGET_SLOT:
+        return "DEBUG_TARGET_SLOT";
+      case JS_RETURN_SLOT:
+        return "JS_RETURN_SLOT";
+      case NUMBER_OF_SLOT_TYPES:
+        return "NUMBER_OF_SLOT_TYPES";
+    }
+    return "UNKNOWN SlotType";
+  }
+
   void UpdateSlots(Heap* heap);

   void UpdateSlotsWithFilter(Heap* heap);


--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to