Revision: 12825
Author:   [email protected]
Date:     Fri Oct 26 02:44:34 2012
Log:      Fix code flusher to process weak function links.

This fixes a corner case where weak function links of the code flushing
candidates list were destroyed by scavenges that happened during
incremental marking. Now those weak function links are updated while
scavenging happens.

[email protected]
TEST=cctest/test-heap/TestCodeFlushingIncrementalScavenge

Review URL: https://codereview.chromium.org/11271006
http://code.google.com/p/v8/source/detail?r=12825

Modified:
 /branches/bleeding_edge/src/heap.cc
 /branches/bleeding_edge/src/mark-compact.cc
 /branches/bleeding_edge/src/mark-compact.h
 /branches/bleeding_edge/src/objects-debug.cc
 /branches/bleeding_edge/test/cctest/test-heap.cc

=======================================
--- /branches/bleeding_edge/src/heap.cc Thu Oct 25 04:52:37 2012
+++ /branches/bleeding_edge/src/heap.cc Fri Oct 26 02:44:34 2012
@@ -1326,6 +1326,12 @@
scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
     }
   }
+
+  // Copy objects reachable from the code flushing candidates list.
+  MarkCompactCollector* collector = mark_compact_collector();
+  if (collector->is_code_flushing_enabled()) {
+ collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor);
+  }

   // Scavenge object reachable from the native contexts list directly.
   scavenge_visitor.VisitPointer(BitCast<Object**>(&native_contexts_list_));
@@ -5541,6 +5547,7 @@
   }
   return symbol_table()->LookupSymbolIfExists(string, symbol);
 }
+

 void Heap::ZapFromSpace() {
   NewSpacePageIterator it(new_space_.FromSpaceStart(),
=======================================
--- /branches/bleeding_edge/src/mark-compact.cc Tue Oct 23 01:25:04 2012
+++ /branches/bleeding_edge/src/mark-compact.cc Fri Oct 26 02:44:34 2012
@@ -950,6 +950,21 @@
     }
   }
 }
+
+
+void CodeFlusher::IteratePointersToFromSpace(ObjectVisitor* v) {
+  Heap* heap = isolate_->heap();
+
+  JSFunction** slot = &jsfunction_candidates_head_;
+  JSFunction* candidate = jsfunction_candidates_head_;
+  while (candidate != NULL) {
+    if (heap->InFromSpace(candidate)) {
+      v->VisitPointer(reinterpret_cast<Object**>(slot));
+    }
+    candidate = GetNextCandidate(*slot);
+    slot = GetNextCandidateSlot(*slot);
+  }
+}


 MarkCompactCollector::~MarkCompactCollector() {
=======================================
--- /branches/bleeding_edge/src/mark-compact.h  Tue Oct 23 01:25:04 2012
+++ /branches/bleeding_edge/src/mark-compact.h  Fri Oct 26 02:44:34 2012
@@ -440,11 +440,18 @@
     ProcessSharedFunctionInfoCandidates();
     ProcessJSFunctionCandidates();
   }
+
+  void IteratePointersToFromSpace(ObjectVisitor* v);

  private:
   void ProcessJSFunctionCandidates();
   void ProcessSharedFunctionInfoCandidates();

+  static JSFunction** GetNextCandidateSlot(JSFunction* candidate) {
+    return reinterpret_cast<JSFunction**>(
+ HeapObject::RawField(candidate, JSFunction::kNextFunctionLinkOffset));
+  }
+
   static JSFunction* GetNextCandidate(JSFunction* candidate) {
     Object* next_candidate = candidate->next_function_link();
     return reinterpret_cast<JSFunction*>(next_candidate);
=======================================
--- /branches/bleeding_edge/src/objects-debug.cc        Fri Oct 12 04:41:14 2012
+++ /branches/bleeding_edge/src/objects-debug.cc        Fri Oct 26 02:44:34 2012
@@ -499,7 +499,8 @@
   VerifyObjectField(kPrototypeOrInitialMapOffset);
   VerifyObjectField(kNextFunctionLinkOffset);
   CHECK(code()->IsCode());
-  CHECK(next_function_link()->IsUndefined() ||
+  CHECK(next_function_link() == NULL ||
+        next_function_link()->IsUndefined() ||
         next_function_link()->IsJSFunction());
 }

=======================================
--- /branches/bleeding_edge/test/cctest/test-heap.cc Tue Oct 23 01:25:04 2012 +++ /branches/bleeding_edge/test/cctest/test-heap.cc Fri Oct 26 02:44:34 2012
@@ -1067,6 +1067,70 @@
   CHECK(function->shared()->is_compiled() || !function->IsOptimized());
   CHECK(function->is_compiled() || !function->IsOptimized());
 }
+
+
+TEST(TestCodeFlushingIncrementalScavenge) {
+  // If we do not flush code this test is invalid.
+  if (!FLAG_flush_code) return;
+  i::FLAG_allow_natives_syntax = true;
+  InitializeVM();
+  v8::HandleScope scope;
+  const char* source = "var foo = function() {"
+                       "  var x = 42;"
+                       "  var y = 42;"
+                       "  var z = x + y;"
+                       "};"
+                       "foo();"
+                       "var bar = function() {"
+                       "  var x = 23;"
+                       "};"
+                       "bar();";
+  Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
+  Handle<String> bar_name = FACTORY->LookupAsciiSymbol("bar");
+
+  // Perfrom one initial GC to enable code flushing.
+  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+  // This compile will add the code to the compilation cache.
+  { v8::HandleScope scope;
+    CompileRun(source);
+  }
+
+  // Check functions are compiled.
+  Object* func_value = Isolate::Current()->context()->global_object()->
+      GetProperty(*foo_name)->ToObjectChecked();
+  CHECK(func_value->IsJSFunction());
+  Handle<JSFunction> function(JSFunction::cast(func_value));
+  CHECK(function->shared()->is_compiled());
+  Object* func_value2 = Isolate::Current()->context()->global_object()->
+      GetProperty(*bar_name)->ToObjectChecked();
+  CHECK(func_value2->IsJSFunction());
+  Handle<JSFunction> function2(JSFunction::cast(func_value2));
+  CHECK(function2->shared()->is_compiled());
+
+  // Clear references to functions so that one of them can die.
+  { v8::HandleScope scope;
+    CompileRun("foo = 0; bar = 0;");
+  }
+
+  // Bump the code age so that flushing is triggered while the function
+  // object is still located in new-space.
+  const int kAgingThreshold = 6;
+  function->shared()->set_code_age(kAgingThreshold);
+  function2->shared()->set_code_age(kAgingThreshold);
+
+  // Simulate incremental marking so that the functions are enqueued as
+  // code flushing candidates. Then kill one of the functions. Finally
+  // perform a scavenge while incremental marking is still running.
+  SimulateIncrementalMarking();
+  *function2.location() = NULL;
+  HEAP->CollectGarbage(NEW_SPACE, "test scavenge while marking");
+
+  // Simulate one final GC to make sure the candidate queue is sane.
+  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+  CHECK(!function->shared()->is_compiled() || function->IsOptimized());
+  CHECK(!function->is_compiled() || function->IsOptimized());
+}


 // Count the number of native contexts in the weak list of native contexts.

--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to