Revision: 7443
Author:   [email protected]
Date:     Wed Mar 30 08:17:10 2011
Log: Specialize ScavengingVisitor for the case when all logging and profiling is disabled.

By default use specialized static visitor and fallback to more generic one when one of the isolates with enabled logging/profiling hits GC.

Review URL: http://codereview.chromium.org/6777011
http://code.google.com/p/v8/source/detail?r=7443

Modified:
 /branches/bleeding_edge/src/heap.cc
 /branches/bleeding_edge/src/heap.h
 /branches/bleeding_edge/src/objects-visiting.h

=======================================
--- /branches/bleeding_edge/src/heap.cc Mon Mar 28 06:05:36 2011
+++ /branches/bleeding_edge/src/heap.cc Wed Mar 30 08:17:10 2011
@@ -941,6 +941,8 @@

   gc_state_ = SCAVENGE;

+  SwitchScavengingVisitorsTableIfProfilingWasEnabled();
+
   Page::FlipMeaningOfInvalidatedWatermarkFlag(this);
 #ifdef DEBUG
   VerifyPageWatermarkValidity(old_pointer_space_, ALL_VALID);
@@ -1232,6 +1234,32 @@
 }


+enum LoggingAndProfiling {
+  LOGGING_AND_PROFILING_ENABLED,
+  LOGGING_AND_PROFILING_DISABLED
+};
+
+
+typedef void (*ScavengingCallback)(Map* map,
+                                   HeapObject** slot,
+                                   HeapObject* object);
+
+
+static Atomic32 scavenging_visitors_table_mode_;
+static VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
+
+
+INLINE(static void DoScavengeObject(Map* map,
+                                    HeapObject** slot,
+                                    HeapObject* obj));
+
+
+void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
+  scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
+}
+
+
+template<LoggingAndProfiling logging_and_profiling_mode>
 class ScavengingVisitor : public StaticVisitorBase {
  public:
   static void Initialize() {
@@ -1240,23 +1268,22 @@
     table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
     table_.Register(kVisitByteArray, &EvacuateByteArray);
     table_.Register(kVisitFixedArray, &EvacuateFixedArray);
+
     table_.Register(kVisitGlobalContext,
                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
-                        VisitSpecialized<Context::kSize>);
-
-    typedef ObjectEvacuationStrategy<POINTER_OBJECT> PointerObject;
+                        template VisitSpecialized<Context::kSize>);

     table_.Register(kVisitConsString,
                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
-                        VisitSpecialized<ConsString::kSize>);
+                        template VisitSpecialized<ConsString::kSize>);

     table_.Register(kVisitSharedFunctionInfo,
                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
-                        VisitSpecialized<SharedFunctionInfo::kSize>);
+ template VisitSpecialized<SharedFunctionInfo::kSize>);

     table_.Register(kVisitJSFunction,
                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
-                    VisitSpecialized<JSFunction::kSize>);
+                        template VisitSpecialized<JSFunction::kSize>);

     table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
                                    kVisitDataObject,
@@ -1271,11 +1298,9 @@
                                    kVisitStructGeneric>();
   }

-
- static inline void Scavenge(Map* map, HeapObject** slot, HeapObject* obj) {
-    table_.GetVisitor(map)(map, slot, obj);
-  }
-
+  static VisitorDispatchTable<ScavengingCallback>* GetTable() {
+    return &table_;
+  }

  private:
   enum ObjectContents  { DATA_OBJECT, POINTER_OBJECT };
@@ -1313,21 +1338,24 @@
     // Set the forwarding address.
     source->set_map_word(MapWord::FromForwardingAddress(target));

+    if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
-    // Update NewSpace stats if necessary.
-    RecordCopiedObject(heap, target);
+      // Update NewSpace stats if necessary.
+      RecordCopiedObject(heap, target);
 #endif
- HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address())); + HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
 #if defined(ENABLE_LOGGING_AND_PROFILING)
-    Isolate* isolate = heap->isolate();
-    if (isolate->logger()->is_logging() ||
-        isolate->cpu_profiler()->is_profiling()) {
-      if (target->IsSharedFunctionInfo()) {
-        PROFILE(isolate, SharedFunctionInfoMoveEvent(
-            source->address(), target->address()));
-      }
-    }
+      Isolate* isolate = heap->isolate();
+      if (isolate->logger()->is_logging() ||
+          isolate->cpu_profiler()->is_profiling()) {
+        if (target->IsSharedFunctionInfo()) {
+          PROFILE(isolate, SharedFunctionInfoMoveEvent(
+              source->address(), target->address()));
+        }
+      }
 #endif
+    }
+
     return target;
   }

@@ -1443,7 +1471,7 @@
         return;
       }

-      Scavenge(first->map(), slot, first);
+      DoScavengeObject(first->map(), slot, first);
       object->set_map_word(MapWord::FromForwardingAddress(*slot));
       return;
     }
@@ -1470,13 +1498,51 @@
     }
   };

- typedef void (*Callback)(Map* map, HeapObject** slot, HeapObject* object);
-
-  static VisitorDispatchTable<Callback> table_;
+  static VisitorDispatchTable<ScavengingCallback> table_;
 };


-VisitorDispatchTable<ScavengingVisitor::Callback> ScavengingVisitor::table_;
+template<LoggingAndProfiling logging_and_profiling_mode>
+VisitorDispatchTable<ScavengingCallback>
+    ScavengingVisitor<logging_and_profiling_mode>::table_;
+
+
+static void InitializeScavengingVisitorsTables() {
+  ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>::Initialize();
+  ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED>::Initialize();
+  scavenging_visitors_table_.CopyFrom(
+      ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>::GetTable());
+  scavenging_visitors_table_mode_ = LOGGING_AND_PROFILING_DISABLED;
+}
+
+
+void Heap::SwitchScavengingVisitorsTableIfProfilingWasEnabled() {
+  if (scavenging_visitors_table_mode_ == LOGGING_AND_PROFILING_ENABLED) {
+    // Table was already updated by some isolate.
+    return;
+  }
+
+  if (isolate()->logger()->is_logging() ||
+      isolate()->cpu_profiler()->is_profiling() ||
+      (isolate()->heap_profiler() != NULL &&
+       isolate()->heap_profiler()->is_profiling())) {
+    // If one of the isolates is doing scavenge at this moment of time
+    // it might see this table in an inconsitent state when
+    // some of the callbacks point to
+    // ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED> and others
+    // to ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>.
+    // However this does not lead to any bugs as such isolate does not have
+ // profiling enabled and any isolate with enabled profiling is guaranteed
+    // to see the table in the consistent state.
+    scavenging_visitors_table_.CopyFrom(
+        ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED>::GetTable());
+
+ // We use Release_Store to prevent reordering of this write before writes
+    // to the table.
+    Release_Store(&scavenging_visitors_table_mode_,
+                  LOGGING_AND_PROFILING_ENABLED);
+  }
+}


 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
@@ -1484,7 +1550,7 @@
   MapWord first_word = object->map_word();
   ASSERT(!first_word.IsForwardingAddress());
   Map* map = first_word.ToMap();
-  ScavengingVisitor::Scavenge(map, p, object);
+  DoScavengeObject(map, p, object);
 }


@@ -4757,10 +4823,10 @@
   gc_initializer_mutex->Lock();
   static bool initialized_gc = false;
   if (!initialized_gc) {
-      initialized_gc = true;
-      ScavengingVisitor::Initialize();
-      NewSpaceScavenger::Initialize();
-      MarkCompactCollector::Initialize();
+    initialized_gc = true;
+    InitializeScavengingVisitorsTables();
+    NewSpaceScavenger::Initialize();
+    MarkCompactCollector::Initialize();
   }
   gc_initializer_mutex->Unlock();

=======================================
--- /branches/bleeding_edge/src/heap.h  Fri Mar 18 13:35:07 2011
+++ /branches/bleeding_edge/src/heap.h  Wed Mar 30 08:17:10 2011
@@ -1451,6 +1451,8 @@
   // Allocate empty fixed array.
   MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();

+  void SwitchScavengingVisitorsTableIfProfilingWasEnabled();
+
   // Performs a minor collection in new generation.
   void Scavenge();

=======================================
--- /branches/bleeding_edge/src/objects-visiting.h      Fri Mar 18 13:35:07 2011
+++ /branches/bleeding_edge/src/objects-visiting.h      Wed Mar 30 08:17:10 2011
@@ -141,13 +141,22 @@
 template<typename Callback>
 class VisitorDispatchTable {
  public:
+  void CopyFrom(VisitorDispatchTable* other) {
+    // We are not using memcpy to guarantee that during update
+    // every element of callbacks_ array will remain correct
+    // pointer (memcpy might be implemented as a byte copying loop).
+    for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
+      NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
+    }
+  }
+
   inline Callback GetVisitor(Map* map) {
-    return callbacks_[map->visitor_id()];
+    return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
   }

   void Register(StaticVisitorBase::VisitorId id, Callback callback) {
     ASSERT(id < StaticVisitorBase::kVisitorIdCount);  // id is unsigned.
-    callbacks_[id] = callback;
+    callbacks_[id] = reinterpret_cast<AtomicWord>(callback);
   }

   template<typename Visitor,
@@ -179,7 +188,7 @@
   }

  private:
-  Callback callbacks_[StaticVisitorBase::kVisitorIdCount];
+  AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
 };


--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to