Revision: 8186
Author:   [email protected]
Date:     Mon Jun  6 08:23:04 2011
Log:      Refactor storage of global handles.

We used to have a linked list of nodes that were internally
block-allocated.

I kept the node blocks and put them on two lists: 1) the list of all
allocated blocks, 2) the list of blocks with used nodes. (1) is used
to reclaim blocks and (2) is used for traversal during GC. To make
traversal on scavenges faster the nodes holding new space objects are
grouped in an auxiliary array.

This changes the minimal memory usage from 5 words per global handle
to 4. Additional word is used for new space handles.

Review URL: http://codereview.chromium.org/7054072
http://code.google.com/p/v8/source/detail?r=8186

Modified:
 /branches/bleeding_edge/src/api.cc
 /branches/bleeding_edge/src/global-handles.cc
 /branches/bleeding_edge/src/global-handles.h
 /branches/bleeding_edge/src/heap.cc
 /branches/bleeding_edge/src/heap.h

=======================================
--- /branches/bleeding_edge/src/api.cc  Fri Jun  3 12:45:59 2011
+++ /branches/bleeding_edge/src/api.cc  Mon Jun  6 08:23:04 2011
@@ -176,8 +176,8 @@
   heap_stats.pending_global_handle_count = &pending_global_handle_count;
   int near_death_global_handle_count;
heap_stats.near_death_global_handle_count = &near_death_global_handle_count;
-  int destroyed_global_handle_count;
- heap_stats.destroyed_global_handle_count = &destroyed_global_handle_count;
+  int free_global_handle_count;
+  heap_stats.free_global_handle_count = &free_global_handle_count;
   intptr_t memory_allocator_size;
   heap_stats.memory_allocator_size = &memory_allocator_size;
   intptr_t memory_allocator_capacity;
=======================================
--- /branches/bleeding_edge/src/global-handles.cc       Wed May 18 08:28:43 2011
+++ /branches/bleeding_edge/src/global-handles.cc       Mon Jun  6 08:23:04 2011
@@ -41,80 +41,151 @@
 }


-class GlobalHandles::Node : public Malloced {
+class GlobalHandles::Node {
  public:
-
-  void Initialize(Object* object) {
-    // Set the initial value of the handle.
-    object_ = object;
-    class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
-    independent_ = false;
-    state_  = NORMAL;
-    parameter_or_next_free_.parameter = NULL;
-    callback_ = NULL;
+  // State transition diagram:
+ // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE }
+  enum State {
+    FREE,
+    NORMAL,     // Normal global handle.
+    WEAK,       // Flagged as weak but not yet finalized.
+    PENDING,    // Has been recognized as only reachable by weak handles.
+    NEAR_DEATH  // Callback has informed the handle is near death.
+  };
+
+  // Maps handle location (slot) to the containing node.
+  static Node* FromLocation(Object** location) {
+    ASSERT(OFFSET_OF(Node, object_) == 0);
+    return reinterpret_cast<Node*>(location);
   }

-  Node() {
-    state_ = DESTROYED;
-  }
-
-  explicit Node(Object* object) {
-    Initialize(object);
-    // Initialize link structure.
-    next_ = NULL;
-  }
-
-  ~Node() {
-    if (state_ != DESTROYED) Destroy(Isolate::Current()->global_handles());
+  Node() {}
+
 #ifdef DEBUG
+  ~Node() {
+ // TODO(1428): if it's a weak handle we should have invoked its callback.
     // Zap the values for eager trapping.
     object_ = NULL;
-    next_ = NULL;
+    class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
+    index_ = 0;
+    independent_ = false;
+    in_new_space_list_ = false;
     parameter_or_next_free_.next_free = NULL;
+    callback_ = NULL;
+  }
 #endif
+
+  void Initialize(int index, Node** first_free) {
+    index_ = static_cast<uint8_t>(index);
+    ASSERT(static_cast<int>(index_) == index);
+    state_ = FREE;
+    in_new_space_list_ = false;
+    parameter_or_next_free_.next_free = *first_free;
+    *first_free = this;
   }

-  void Destroy(GlobalHandles* global_handles) {
-    if (state_ == WEAK || IsNearDeath()) {
+  void Acquire(Object* object, GlobalHandles* global_handles) {
+    ASSERT(state_ == FREE);
+    object_ = object;
+    class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
+    independent_ = false;
+    state_  = NORMAL;
+    parameter_or_next_free_.parameter = NULL;
+    callback_ = NULL;
+    IncreaseBlockUses(global_handles);
+  }
+
+  void Release(GlobalHandles* global_handles) {
+    ASSERT(state_ != FREE);
+    if (IsWeakRetainer()) {
       global_handles->number_of_weak_handles_--;
       if (object_->IsJSGlobalObject()) {
         global_handles->number_of_global_object_weak_handles_--;
       }
     }
-    state_ = DESTROYED;
+    state_ = FREE;
+    parameter_or_next_free_.next_free = global_handles->first_free_;
+    global_handles->first_free_ = this;
+    DecreaseBlockUses(global_handles);
   }

-  // Accessors for next_.
-  Node* next() { return next_; }
-  void set_next(Node* value) { next_ = value; }
-  Node** next_addr() { return &next_; }
+  // Object slot accessors.
+  Object* object() const { return object_; }
+  Object** location() { return &object_; }
+  Handle<Object> handle() { return Handle<Object>(location()); }
+
+  // Wrapper class ID accessors.
+  bool has_wrapper_class_id() const {
+    return class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId;
+  }
+  uint16_t wrapper_class_id() const { return class_id_; }
+  void set_wrapper_class_id(uint16_t class_id) {
+    class_id_ = class_id;
+  }
+
+  // State accessors.
+
+  State state() const { return state_; }
+
+  bool IsNearDeath() const {
+ // Check for PENDING to ensure correct answer when processing callbacks.
+    return state_ == PENDING || state_ == NEAR_DEATH;
+  }
+
+  bool IsWeak() const { return state_ == WEAK; }
+
+  bool IsRetainer() const { return state_ != FREE; }
+
+  bool IsStrongRetainer() const { return state_ == NORMAL; }
+
+  bool IsWeakRetainer() const {
+    return state_ == WEAK || state_ == PENDING || state_ == NEAR_DEATH;
+  }
+
+  void MarkPending() {
+    ASSERT(state_ == WEAK);
+    state_ = PENDING;
+  }
+
+  // Independent flag accessors.
+  void MarkIndependent() {
+    ASSERT(state_ != FREE);
+    independent_ = true;
+  }
+  bool is_independent() const { return independent_; }
+
+  // In-new-space-list flag accessors.
+  void set_in_new_space_list(bool v) { in_new_space_list_ = v; }
+  bool is_in_new_space_list() const { return in_new_space_list_; }
+
+  // Callback accessor.
+  WeakReferenceCallback callback() { return callback_; }
+
+  // Callback parameter accessors.
+  void set_parameter(void* parameter) {
+    ASSERT(state_ != FREE);
+    parameter_or_next_free_.parameter = parameter;
+  }
+  void* parameter() const {
+    ASSERT(state_ != FREE);
+    return parameter_or_next_free_.parameter;
+  }

   // Accessors for next free node in the free list.
   Node* next_free() {
-    ASSERT(state_ == DESTROYED);
+    ASSERT(state_ == FREE);
     return parameter_or_next_free_.next_free;
   }
   void set_next_free(Node* value) {
-    ASSERT(state_ == DESTROYED);
+    ASSERT(state_ == FREE);
     parameter_or_next_free_.next_free = value;
   }

-  // Returns a link from the handle.
-  static Node* FromLocation(Object** location) {
-    ASSERT(OFFSET_OF(Node, object_) == 0);
-    return reinterpret_cast<Node*>(location);
-  }
-
-  // Returns the handle.
-  Handle<Object> handle() { return Handle<Object>(&object_); }
-
-  // Make this handle weak.
-  void MakeWeak(GlobalHandles* global_handles, void* parameter,
+  void MakeWeak(GlobalHandles* global_handles,
+                void* parameter,
                 WeakReferenceCallback callback) {
-    LOG(global_handles->isolate(),
-        HandleEvent("GlobalHandle::MakeWeak", handle().location()));
-    ASSERT(state_ != DESTROYED);
-    if (state_ != WEAK && !IsNearDeath()) {
+    ASSERT(state_ != FREE);
+    if (!IsWeakRetainer()) {
       global_handles->number_of_weak_handles_++;
       if (object_->IsJSGlobalObject()) {
         global_handles->number_of_global_object_weak_handles_++;
@@ -126,10 +197,8 @@
   }

   void ClearWeakness(GlobalHandles* global_handles) {
-    LOG(global_handles->isolate(),
-        HandleEvent("GlobalHandle::ClearWeakness", handle().location()));
-    ASSERT(state_ != DESTROYED);
-    if (state_ == WEAK || IsNearDeath()) {
+    ASSERT(state_ != FREE);
+    if (IsWeakRetainer()) {
       global_handles->number_of_weak_handles_--;
       if (object_->IsJSGlobalObject()) {
         global_handles->number_of_global_object_weak_handles_--;
@@ -138,51 +207,13 @@
     state_ = NORMAL;
     set_parameter(NULL);
   }
-
-  void MarkIndependent(GlobalHandles* global_handles) {
-    LOG(global_handles->isolate(),
-        HandleEvent("GlobalHandle::MarkIndependent", handle().location()));
-    ASSERT(state_ != DESTROYED);
-    independent_ = true;
-  }
-
-  bool IsNearDeath() {
- // Check for PENDING to ensure correct answer when processing callbacks.
-    return state_ == PENDING || state_ == NEAR_DEATH;
-  }
-
-  bool IsWeak() {
-    return state_ == WEAK;
-  }
-
-  bool CanBeRetainer() {
-    return state_ != DESTROYED && state_ != NEAR_DEATH;
-  }
-
-  void SetWrapperClassId(uint16_t class_id) {
-    class_id_ = class_id;
-  }
-
-  // Returns the id for this weak handle.
-  void set_parameter(void* parameter) {
-    ASSERT(state_ != DESTROYED);
-    parameter_or_next_free_.parameter = parameter;
-  }
-  void* parameter() {
-    ASSERT(state_ != DESTROYED);
-    return parameter_or_next_free_.parameter;
-  }
-
-  // Returns the callback for this weak handle.
-  WeakReferenceCallback callback() { return callback_; }

   bool PostGarbageCollectionProcessing(Isolate* isolate,
                                        GlobalHandles* global_handles) {
     if (state_ != Node::PENDING) return false;
- LOG(isolate, HandleEvent("GlobalHandle::Processing", handle().location()));
     WeakReferenceCallback func = callback();
     if (func == NULL) {
-      Destroy(global_handles);
+      Release(global_handles);
       return false;
     }
     void* par = parameter();
@@ -191,13 +222,6 @@

     v8::Persistent<v8::Object> object = ToApi<v8::Object>(handle());
     {
- // Forbid reuse of destroyed nodes as they might be already deallocated. - // It's fine though to reuse nodes that were destroyed in weak callback - // as those cannot be deallocated until we are back from the callback.
-      global_handles->set_first_free(NULL);
-      if (global_handles->first_deallocated()) {
- global_handles->first_deallocated()->set_next(global_handles->head());
-      }
       // Check that we are not passing a finalized external string to
       // the callback.
       ASSERT(!object_->IsExternalAsciiString() ||
@@ -214,97 +238,145 @@
     return true;
   }

-  // Place the handle address first to avoid offset computation.
-  Object* object_;  // Storage for object pointer.
-
+ private:
+  inline NodeBlock* FindBlock();
+  inline void IncreaseBlockUses(GlobalHandles* global_handles);
+  inline void DecreaseBlockUses(GlobalHandles* global_handles);
+
+  // Storage for object pointer.
+  // Placed first to avoid offset computation.
+  Object* object_;
+
+  // Next word stores class_id, index, state, and independent.
+  // Note: the most aligned fields should go first.
+
+  // Wrapper class ID.
   uint16_t class_id_;

-  // Transition diagram:
- // NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, DESTROYED }
-  enum State {
-    NORMAL,      // Normal global handle.
-    WEAK,        // Flagged as weak but not yet finalized.
-    PENDING,     // Has been recognized as only reachable by weak handles.
-    NEAR_DEATH,  // Callback has informed the handle is near death.
-    DESTROYED
-  };
- State state_ : 4; // Need one more bit for MSVC as it treats enums as signed.
+  // Index in the containing handle block.
+  uint8_t index_;
+
+  // Need one more bit for MSVC as it treats enums as signed.
+  State state_ : 4;

   bool independent_ : 1;
-
- private:
+  bool in_new_space_list_ : 1;
+
   // Handle specific callback.
   WeakReferenceCallback callback_;
-  // Provided data for callback.  In DESTROYED state, this is used for
+
+  // Provided data for callback.  In FREE state, this is used for
   // the free list link.
   union {
     void* parameter;
     Node* next_free;
   } parameter_or_next_free_;

-  // Linkage for the list.
-  Node* next_;
-
+  DISALLOW_COPY_AND_ASSIGN(Node);
+};
+
+
+class GlobalHandles::NodeBlock {
  public:
-  TRACK_MEMORY("GlobalHandles::Node")
-};
-
-
-class GlobalHandles::Pool {
-  public:
-    Pool() {
-      current_ = new Chunk();
-      current_->previous = NULL;
-      next_ = current_->nodes;
-      limit_ = current_->nodes + kNodesPerChunk;
-    }
-
-    ~Pool() {
-      if (current_ != NULL) {
-        Release();
-      }
-    }
-
-    Node* Allocate() {
-      if (next_ < limit_) {
-        return next_++;
-      }
-      return SlowAllocate();
-    }
-
-    void Release() {
-      Chunk* current = current_;
- ASSERT(current != NULL); // At least a single block must by allocated
-      do {
-        Chunk* previous = current->previous;
-        delete current;
-        current = previous;
-      } while (current != NULL);
-      current_ = NULL;
-      next_ = limit_ = NULL;
-    }
-
-  private:
-    static const int kNodesPerChunk = (1 << 12) - 1;
-    struct Chunk : public Malloced {
-      Chunk* previous;
-      Node nodes[kNodesPerChunk];
-    };
-
-    Node* SlowAllocate() {
-      Chunk* chunk = new Chunk();
-      chunk->previous = current_;
-      current_ = chunk;
-
-      Node* new_nodes = current_->nodes;
-      next_ = new_nodes + 1;
-      limit_ = new_nodes + kNodesPerChunk;
-      return new_nodes;
-    }
-
-    Chunk* current_;
-    Node* next_;
-    Node* limit_;
+  static const int kSize = 256;
+
+  explicit NodeBlock(NodeBlock* next)
+      : next_(next), used_nodes_(0), next_used_(NULL), prev_used_(NULL) {}
+
+  void PutNodesOnFreeList(Node** first_free) {
+    for (int i = kSize - 1; i >= 0; --i) {
+      nodes_[i].Initialize(i, first_free);
+    }
+  }
+
+  Node* node_at(int index) {
+    ASSERT(0 <= index && index < kSize);
+    return &nodes_[index];
+  }
+
+  void IncreaseUses(GlobalHandles* global_handles) {
+    ASSERT(used_nodes_ < kSize);
+    if (used_nodes_++ == 0) {
+      NodeBlock* old_first = global_handles->first_used_block_;
+      global_handles->first_used_block_ = this;
+      next_used_ = old_first;
+      prev_used_ = NULL;
+      if (old_first == NULL) return;
+      old_first->prev_used_ = this;
+    }
+  }
+
+  void DecreaseUses(GlobalHandles* global_handles) {
+    ASSERT(used_nodes_ > 0);
+    if (--used_nodes_ == 0) {
+      if (next_used_ != NULL) next_used_->prev_used_ = prev_used_;
+      if (prev_used_ != NULL) prev_used_->next_used_ = next_used_;
+      if (this == global_handles->first_used_block_) {
+        global_handles->first_used_block_ = next_used_;
+      }
+    }
+  }
+
+  // Next block in the list of all blocks.
+  NodeBlock* next() const { return next_; }
+
+  // Next/previous block in the list of blocks with used nodes.
+  NodeBlock* next_used() const { return next_used_; }
+  NodeBlock* prev_used() const { return prev_used_; }
+
+ private:
+  Node nodes_[kSize];
+  NodeBlock* const next_;
+  int used_nodes_;
+  NodeBlock* next_used_;
+  NodeBlock* prev_used_;
+};
+
+
+GlobalHandles::NodeBlock* GlobalHandles::Node::FindBlock() {
+  intptr_t ptr = reinterpret_cast<intptr_t>(this);
+  ptr = ptr - index_ * sizeof(Node);
+  NodeBlock* block = reinterpret_cast<NodeBlock*>(ptr);
+  ASSERT(block->node_at(index_) == this);
+  return block;
+}
+
+
+void GlobalHandles::Node::IncreaseBlockUses(GlobalHandles* global_handles) {
+  FindBlock()->IncreaseUses(global_handles);
+}
+
+
+void GlobalHandles::Node::DecreaseBlockUses(GlobalHandles* global_handles) {
+  FindBlock()->DecreaseUses(global_handles);
+}
+
+
+class GlobalHandles::NodeIterator {
+ public:
+  explicit NodeIterator(GlobalHandles* global_handles)
+      : block_(global_handles->first_used_block_),
+        index_(0) {}
+
+  bool done() const { return block_ == NULL; }
+
+  Node* node() const {
+    ASSERT(!done());
+    return block_->node_at(index_);
+  }
+
+  void Advance() {
+    ASSERT(!done());
+    if (++index_ < NodeBlock::kSize) return;
+    index_ = 0;
+    block_ = block_->next_used();
+  }
+
+ private:
+  NodeBlock* block_;
+  int index_;
+
+  DISALLOW_COPY_AND_ASSIGN(NodeIterator);
 };


@@ -312,41 +384,39 @@
     : isolate_(isolate),
       number_of_weak_handles_(0),
       number_of_global_object_weak_handles_(0),
-      head_(NULL),
+      first_block_(NULL),
+      first_used_block_(NULL),
       first_free_(NULL),
-      first_deallocated_(NULL),
-      pool_(new Pool()),
-      post_gc_processing_count_(0),
-      object_groups_(4) {
-}
+      post_gc_processing_count_(0) {}


 GlobalHandles::~GlobalHandles() {
-  delete pool_;
-  pool_ = 0;
+  NodeBlock* block = first_block_;
+  while (block != NULL) {
+    NodeBlock* tmp = block->next();
+    delete block;
+    block = tmp;
+  }
+  first_block_ = NULL;
 }


 Handle<Object> GlobalHandles::Create(Object* value) {
   isolate_->counters()->global_handles()->Increment();
-  Node* result;
-  if (first_free()) {
-    // Take the first node in the free list.
-    result = first_free();
-    set_first_free(result->next_free());
-  } else if (first_deallocated()) {
-    // Next try deallocated list
-    result = first_deallocated();
-    set_first_deallocated(result->next_free());
-    ASSERT(result->next() == head());
-    set_head(result);
-  } else {
-    // Allocate a new node.
-    result = pool_->Allocate();
-    result->set_next(head());
-    set_head(result);
-  }
-  result->Initialize(value);
+  if (first_free_ == NULL) {
+    first_block_ = new NodeBlock(first_block_);
+    first_block_->PutNodesOnFreeList(&first_free_);
+  }
+  ASSERT(first_free_ != NULL);
+  // Take the first node in the free list.
+  Node* result = first_free_;
+  first_free_ = result->next_free();
+  result->Acquire(value, this);
+  if (isolate_->heap()->InNewSpace(value) &&
+      !result->is_in_new_space_list()) {
+    new_space_nodes_.Add(result);
+    result->set_in_new_space_list(true);
+  }
   return result->handle();
 }

@@ -354,11 +424,7 @@
 void GlobalHandles::Destroy(Object** location) {
   isolate_->counters()->global_handles()->Decrement();
   if (location == NULL) return;
-  Node* node = Node::FromLocation(location);
-  node->Destroy(this);
-  // Link the destroyed.
-  node->set_next_free(first_free());
-  set_first_free(node);
+  Node::FromLocation(location)->Release(this);
 }


@@ -375,7 +441,7 @@


 void GlobalHandles::MarkIndependent(Object** location) {
-  Node::FromLocation(location)->MarkIndependent(this);
+  Node::FromLocation(location)->MarkIndependent();
 }


@@ -390,32 +456,23 @@


void GlobalHandles::SetWrapperClassId(Object** location, uint16_t class_id) {
-  Node::FromLocation(location)->SetWrapperClassId(class_id);
+  Node::FromLocation(location)->set_wrapper_class_id(class_id);
 }


 void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
-  // Traversal of GC roots in the global handle list that are marked as
-  // WEAK, PENDING or NEAR_DEATH.
-  for (Node* current = head_; current != NULL; current = current->next()) {
-    if (current->state_ == Node::WEAK
-      || current->state_ == Node::PENDING
-      || current->state_ == Node::NEAR_DEATH) {
-      v->VisitPointer(&current->object_);
-    }
+  for (NodeIterator it(this); !it.done(); it.Advance()) {
+ if (it.node()->IsWeakRetainer()) v->VisitPointer(it.node()->location());
   }
 }


-void GlobalHandles::IterateWeakIndependentRoots(ObjectVisitor* v) {
-  // Traversal of GC roots in the global handle list that are independent
-  // and marked as WEAK, PENDING or NEAR_DEATH.
-  for (Node* current = head_; current != NULL; current = current->next()) {
-    if (!current->independent_) continue;
-    if (current->state_ == Node::WEAK
-      || current->state_ == Node::PENDING
-      || current->state_ == Node::NEAR_DEATH) {
-      v->VisitPointer(&current->object_);
+void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) {
+  for (int i = 0; i < new_space_nodes_.length(); ++i) {
+    Node* node = new_space_nodes_[i];
+    ASSERT(node->is_in_new_space_list());
+    if (node->is_independent() && node->IsWeakRetainer()) {
+      v->VisitPointer(node->location());
     }
   }
 }
@@ -423,35 +480,31 @@

 void GlobalHandles::IterateWeakRoots(WeakReferenceGuest f,
                                      WeakReferenceCallback callback) {
-  for (Node* current = head_; current != NULL; current = current->next()) {
-    if (current->IsWeak() && current->callback() == callback) {
-      f(current->object_, current->parameter());
+  for (NodeIterator it(this); !it.done(); it.Advance()) {
+    if (it.node()->IsWeak() && it.node()->callback() == callback) {
+      f(it.node()->object(), it.node()->parameter());
     }
   }
 }


 void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) {
-  for (Node* current = head_; current != NULL; current = current->next()) {
-    if (current->state_ == Node::WEAK) {
-      if (f(&current->object_)) {
-        current->state_ = Node::PENDING;
-        LOG(isolate_,
- HandleEvent("GlobalHandle::Pending", current->handle().location()));
-      }
+  for (NodeIterator it(this); !it.done(); it.Advance()) {
+    if (it.node()->IsWeak() && f(it.node()->location())) {
+      it.node()->MarkPending();
     }
   }
 }


-void GlobalHandles::IdentifyWeakIndependentHandles(WeakSlotCallbackWithHeap f) {
-  for (Node* current = head_; current != NULL; current = current->next()) {
-    if (current->state_ == Node::WEAK && current->independent_) {
-      if (f(isolate_->heap(), &current->object_)) {
-        current->state_ = Node::PENDING;
-        LOG(isolate_,
- HandleEvent("GlobalHandle::Pending", current->handle().location()));
-      }
+void GlobalHandles::IdentifyNewSpaceWeakIndependentHandles(
+    WeakSlotCallbackWithHeap f) {
+  for (int i = 0; i < new_space_nodes_.length(); ++i) {
+    Node* node = new_space_nodes_[i];
+    ASSERT(node->is_in_new_space_list());
+    if (node->is_independent() && node->IsWeak() &&
+        f(isolate_->heap(), node->location())) {
+      node->MarkPending();
     }
   }
 }
@@ -462,98 +515,96 @@
   // Process weak global handle callbacks. This must be done after the
   // GC is completely done, because the callbacks may invoke arbitrary
   // API functions.
-  // At the same time deallocate all DESTROYED nodes.
   ASSERT(isolate_->heap()->gc_state() == Heap::NOT_IN_GC);
   const int initial_post_gc_processing_count = ++post_gc_processing_count_;
   bool next_gc_likely_to_collect_more = false;
-  Node** p = &head_;
-  while (*p != NULL) {
-    // Skip dependent handles. Their weak callbacks might expect to be
-    // called between two global garbage collection callbacks which
-    // are not called for minor collections.
-    if (collector == SCAVENGER && !(*p)->independent_) {
-      p = (*p)->next_addr();
-      continue;
-    }
-
-    if ((*p)->PostGarbageCollectionProcessing(isolate_, this)) {
-      if (initial_post_gc_processing_count != post_gc_processing_count_) {
-        // Weak callback triggered another GC and another round of
-        // PostGarbageCollection processing.  The current node might
-        // have been deleted in that round, so we need to bail out (or
-        // restart the processing).
-        break;
+  if (collector == SCAVENGER) {
+    for (int i = 0; i < new_space_nodes_.length(); ++i) {
+      Node* node = new_space_nodes_[i];
+      ASSERT(node->is_in_new_space_list());
+      // Skip dependent handles. Their weak callbacks might expect to be
+      // called between two global garbage collection callbacks which
+      // are not called for minor collections.
+      if (!node->is_independent()) continue;
+      if (node->PostGarbageCollectionProcessing(isolate_, this)) {
+ if (initial_post_gc_processing_count != post_gc_processing_count_) {
+          // Weak callback triggered another GC and another round of
+          // PostGarbageCollection processing.  The current node might
+          // have been deleted in that round, so we need to bail out (or
+          // restart the processing).
+          return next_gc_likely_to_collect_more;
+        }
+      }
+      if (!node->IsRetainer()) {
+        next_gc_likely_to_collect_more = true;
       }
     }
-    if ((*p)->state_ == Node::DESTROYED) {
-      // Delete the link.
-      Node* node = *p;
-      *p = node->next();  // Update the link.
-      if (first_deallocated()) {
-        first_deallocated()->set_next(node);
-      }
-      node->set_next_free(first_deallocated());
-      set_first_deallocated(node);
-      next_gc_likely_to_collect_more = true;
+  } else {
+    for (NodeIterator it(this); !it.done(); it.Advance()) {
+      if (it.node()->PostGarbageCollectionProcessing(isolate_, this)) {
+ if (initial_post_gc_processing_count != post_gc_processing_count_) {
+          // See the comment above.
+          return next_gc_likely_to_collect_more;
+        }
+      }
+      if (!it.node()->IsRetainer()) {
+        next_gc_likely_to_collect_more = true;
+      }
+    }
+  }
+  // Update the list of new space nodes.
+  int last = 0;
+  for (int i = 0; i < new_space_nodes_.length(); ++i) {
+    Node* node = new_space_nodes_[i];
+    ASSERT(node->is_in_new_space_list());
+ if (node->IsRetainer() && isolate_->heap()->InNewSpace(node->object())) {
+      new_space_nodes_[last++] = node;
     } else {
-      p = (*p)->next_addr();
+      node->set_in_new_space_list(false);
     }
   }
-  set_first_free(NULL);
-  if (first_deallocated()) {
-    first_deallocated()->set_next(head());
-  }
-
+  new_space_nodes_.Rewind(last);
   return next_gc_likely_to_collect_more;
 }


 void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) {
-  // Traversal of global handles marked as NORMAL.
-  for (Node* current = head_; current != NULL; current = current->next()) {
-    if (current->state_ == Node::NORMAL) {
-      v->VisitPointer(&current->object_);
+  for (NodeIterator it(this); !it.done(); it.Advance()) {
+    if (it.node()->IsStrongRetainer()) {
+      v->VisitPointer(it.node()->location());
     }
   }
 }


 void GlobalHandles::IterateAllRoots(ObjectVisitor* v) {
-  for (Node* current = head_; current != NULL; current = current->next()) {
-    if (current->state_ != Node::DESTROYED) {
-      v->VisitPointer(&current->object_);
+  for (NodeIterator it(this); !it.done(); it.Advance()) {
+    if (it.node()->IsRetainer()) {
+      v->VisitPointer(it.node()->location());
     }
   }
 }


-void GlobalHandles::IterateStrongAndDependentRoots(ObjectVisitor* v) {
-  for (Node* current = head_; current != NULL; current = current->next()) {
-    if ((current->independent_ && current->state_ == Node::NORMAL) ||
-        (!current->independent_ && current->state_ != Node::DESTROYED)) {
-      v->VisitPointer(&current->object_);
+void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v) {
+  for (int i = 0; i < new_space_nodes_.length(); ++i) {
+    Node* node = new_space_nodes_[i];
+    if (node->IsStrongRetainer() ||
+        (node->IsWeakRetainer() && !node->is_independent())) {
+      v->VisitPointer(node->location());
     }
   }
 }


 void GlobalHandles::IterateAllRootsWithClassIds(ObjectVisitor* v) {
-  for (Node* current = head_; current != NULL; current = current->next()) {
- if (current->class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId &&
-        current->CanBeRetainer()) {
-      v->VisitEmbedderReference(&current->object_, current->class_id_);
+  for (NodeIterator it(this); !it.done(); it.Advance()) {
+    if (it.node()->has_wrapper_class_id() && it.node()->IsRetainer()) {
+      v->VisitEmbedderReference(it.node()->location(),
+                                it.node()->wrapper_class_id());
     }
   }
 }
-
-
-void GlobalHandles::TearDown() {
-  // Reset all the lists.
-  set_head(NULL);
-  set_first_free(NULL);
-  set_first_deallocated(NULL);
-  pool_->Release();
-}


 void GlobalHandles::RecordStats(HeapStats* stats) {
@@ -561,17 +612,17 @@
   *stats->weak_global_handle_count = 0;
   *stats->pending_global_handle_count = 0;
   *stats->near_death_global_handle_count = 0;
-  *stats->destroyed_global_handle_count = 0;
-  for (Node* current = head_; current != NULL; current = current->next()) {
+  *stats->free_global_handle_count = 0;
+  for (NodeIterator it(this); !it.done(); it.Advance()) {
     *stats->global_handle_count += 1;
-    if (current->state_ == Node::WEAK) {
+    if (it.node()->state() == Node::WEAK) {
       *stats->weak_global_handle_count += 1;
-    } else if (current->state_ == Node::PENDING) {
+    } else if (it.node()->state() == Node::PENDING) {
       *stats->pending_global_handle_count += 1;
-    } else if (current->state_ == Node::NEAR_DEATH) {
+    } else if (it.node()->state() == Node::NEAR_DEATH) {
       *stats->near_death_global_handle_count += 1;
-    } else if (current->state_ == Node::DESTROYED) {
-      *stats->destroyed_global_handle_count += 1;
+    } else if (it.node()->state() == Node::FREE) {
+      *stats->free_global_handle_count += 1;
     }
   }
 }
@@ -585,12 +636,12 @@
   int near_death = 0;
   int destroyed = 0;

-  for (Node* current = head_; current != NULL; current = current->next()) {
+  for (NodeIterator it(this); !it.done(); it.Advance()) {
     total++;
-    if (current->state_ == Node::WEAK) weak++;
-    if (current->state_ == Node::PENDING) pending++;
-    if (current->state_ == Node::NEAR_DEATH) near_death++;
-    if (current->state_ == Node::DESTROYED) destroyed++;
+    if (it.node()->state() == Node::WEAK) weak++;
+    if (it.node()->state() == Node::PENDING) pending++;
+    if (it.node()->state() == Node::NEAR_DEATH) near_death++;
+    if (it.node()->state() == Node::FREE) destroyed++;
   }

   PrintF("Global Handle Statistics:\n");
@@ -598,17 +649,17 @@
   PrintF("  # weak       = %d\n", weak);
   PrintF("  # pending    = %d\n", pending);
   PrintF("  # near_death = %d\n", near_death);
-  PrintF("  # destroyed  = %d\n", destroyed);
+  PrintF("  # free       = %d\n", destroyed);
   PrintF("  # total      = %d\n", total);
 }

 void GlobalHandles::Print() {
   PrintF("Global handles:\n");
-  for (Node* current = head_; current != NULL; current = current->next()) {
-    PrintF("  handle %p to %p (weak=%d)\n",
-           reinterpret_cast<void*>(current->handle().location()),
-           reinterpret_cast<void*>(*current->handle()),
-           current->state_ == Node::WEAK);
+  for (NodeIterator it(this); !it.done(); it.Advance()) {
+    PrintF("  handle %p to %p%s\n",
+           reinterpret_cast<void*>(it.node()->location()),
+           reinterpret_cast<void*>(it.node()->object()),
+           it.node()->IsWeak() ? " (weak)" : "");
   }
 }

@@ -621,7 +672,7 @@
                                    v8::RetainedObjectInfo* info) {
 #ifdef DEBUG
   for (size_t i = 0; i < length; ++i) {
-    ASSERT(!Node::FromLocation(handles[i])->independent_);
+    ASSERT(!Node::FromLocation(handles[i])->is_independent());
   }
 #endif
   if (length == 0) {
@@ -636,9 +687,9 @@
                                           Object*** children,
                                           size_t length) {
 #ifdef DEBUG
-  ASSERT(!Node::FromLocation(BitCast<Object**>(parent))->independent_);
+  ASSERT(!Node::FromLocation(BitCast<Object**>(parent))->is_independent());
   for (size_t i = 0; i < length; ++i) {
-    ASSERT(!Node::FromLocation(children[i])->independent_);
+    ASSERT(!Node::FromLocation(children[i])->is_independent());
   }
 #endif
   if (length == 0) return;
@@ -660,6 +711,11 @@
   }
   implicit_ref_groups_.Clear();
 }
+
+
+void GlobalHandles::TearDown() {
+  // TODO(1428): invoke weak callbacks.
+}


 } }  // namespace v8::internal
=======================================
--- /branches/bleeding_edge/src/global-handles.h        Tue May 17 05:18:19 2011
+++ /branches/bleeding_edge/src/global-handles.h        Mon Jun  6 08:23:04 2011
@@ -163,7 +163,7 @@
   void IterateStrongRoots(ObjectVisitor* v);

   // Iterates over all strong and dependent handles.
-  void IterateStrongAndDependentRoots(ObjectVisitor* v);
+  void IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v);

   // Iterates over all handles.
   void IterateAllRoots(ObjectVisitor* v);
@@ -175,7 +175,7 @@
   void IterateWeakRoots(ObjectVisitor* v);

   // Iterates over all weak independent roots in heap.
-  void IterateWeakIndependentRoots(ObjectVisitor* v);
+  void IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v);

   // Iterates over weak roots that are bound to a given callback.
   void IterateWeakRoots(WeakReferenceGuest f,
@@ -187,7 +187,7 @@

// Find all weak independent handles satisfying the callback predicate, mark
   // them as pending.
-  void IdentifyWeakIndependentHandles(WeakSlotCallbackWithHeap f);
+  void IdentifyNewSpaceWeakIndependentHandles(WeakSlotCallbackWithHeap f);

   // Add an object group.
   // Should be only used in GC callback function before a collection.
@@ -224,12 +224,14 @@
   void PrintStats();
   void Print();
 #endif
-  class Pool;
+
  private:
   explicit GlobalHandles(Isolate* isolate);

-  // Internal node structure, one for each global handle.
+  // Internal node structures.
   class Node;
+  class NodeBlock;
+  class NodeIterator;

   Isolate* isolate_;

@@ -241,35 +243,21 @@
   // number_of_weak_handles_.
   int number_of_global_object_weak_handles_;

-  // Global handles are kept in a single linked list pointed to by head_.
-  Node* head_;
-  Node* head() { return head_; }
-  void set_head(Node* value) { head_ = value; }
-
-  // Free list for DESTROYED global handles not yet deallocated.
+  // List of all allocated node blocks.
+  NodeBlock* first_block_;
+
+  // List of node blocks with used nodes.
+  NodeBlock* first_used_block_;
+
+  // Free list of nodes.
   Node* first_free_;
-  Node* first_free() { return first_free_; }
-  void set_first_free(Node* value) { first_free_ = value; }
-
-  // List of deallocated nodes.
-  // Deallocated nodes form a prefix of all the nodes and
-  // |first_deallocated| points to last deallocated node before
-  // |head|.  Those deallocated nodes are additionally linked
-  // by |next_free|:
-  //                                    1st deallocated  head
-  //                                           |          |
-  //                                           V          V
-  //    node          node        ...         node       node
-  //      .next      -> .next ->                .next ->
-  //   <- .next_free <- .next_free           <- .next_free
-  Node* first_deallocated_;
-  Node* first_deallocated() { return first_deallocated_; }
-  void set_first_deallocated(Node* value) {
-    first_deallocated_ = value;
-  }
-
-  Pool* pool_;
+
+  // Contains all nodes holding new space objects. Note: when the list
+  // is accessed, some of the objects may have been promoted already.
+  List<Node*> new_space_nodes_;
+
   int post_gc_processing_count_;
+
   List<ObjectGroup*> object_groups_;
   List<ImplicitRefGroup*> implicit_ref_groups_;

=======================================
--- /branches/bleeding_edge/src/heap.cc Mon Jun  6 06:15:11 2011
+++ /branches/bleeding_edge/src/heap.cc Mon Jun  6 08:23:04 2011
@@ -1063,9 +1063,10 @@
   scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));

   new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
-  isolate_->global_handles()->IdentifyWeakIndependentHandles(
+  isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles(
       &IsUnscavengedHeapObject);
- isolate_->global_handles()->IterateWeakIndependentRoots(&scavenge_visitor);
+  isolate_->global_handles()->IterateNewSpaceWeakIndependentRoots(
+      &scavenge_visitor);
   new_space_front = DoScavenge(&scavenge_visitor, new_space_front);


@@ -4612,7 +4613,7 @@
       isolate_->global_handles()->IterateStrongRoots(v);
       break;
     case VISIT_ALL_IN_SCAVENGE:
-      isolate_->global_handles()->IterateStrongAndDependentRoots(v);
+ isolate_->global_handles()->IterateNewSpaceStrongAndDependentRoots(v);
       break;
     case VISIT_ALL_IN_SWEEP_NEWSPACE:
     case VISIT_ALL:
=======================================
--- /branches/bleeding_edge/src/heap.h  Mon Jun  6 06:15:11 2011
+++ /branches/bleeding_edge/src/heap.h  Mon Jun  6 08:23:04 2011
@@ -1637,7 +1637,7 @@
   int* weak_global_handle_count;        // 15
   int* pending_global_handle_count;     // 16
   int* near_death_global_handle_count;  // 17
-  int* destroyed_global_handle_count;   // 18
+  int* free_global_handle_count;        // 18
   intptr_t* memory_allocator_size;           // 19
   intptr_t* memory_allocator_capacity;       // 20
   int* objects_per_type;                // 21

--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to