Revision: 9253
Author: [email protected]
Date: Tue Sep 13 02:41:04 2011
Log: Reduce imprecision of incremental marking.
Don't mark through caches that will be flushed by MarkCompactPrologue. Make
caches sticky grey (don't push them on to the marking stack) to prevent the
write-barrier or the marker from marking them.
Don't mark through weak links connecting global contexts.
Add tests that started failing after merge from the bleeding edge to
mjsunit.status and cctests.status.
[email protected]
BUG=v8:1679
Review URL: http://codereview.chromium.org/7867040
http://code.google.com/p/v8/source/detail?r=9253
Modified:
/branches/experimental/gc/src/heap.cc
/branches/experimental/gc/src/heap.h
/branches/experimental/gc/src/incremental-marking.cc
/branches/experimental/gc/src/incremental-marking.h
/branches/experimental/gc/src/mark-compact.h
/branches/experimental/gc/test/cctest/cctest.status
/branches/experimental/gc/test/mjsunit/mjsunit.status
=======================================
--- /branches/experimental/gc/src/heap.cc Sat Sep 10 07:48:09 2011
+++ /branches/experimental/gc/src/heap.cc Tue Sep 13 02:41:04 2011
@@ -650,7 +650,10 @@
void Heap::ClearNormalizedMapCaches() {
- if (isolate_->bootstrapper()->IsActive()) return;
+ if (isolate_->bootstrapper()->IsActive() &&
+ !incremental_marking()->IsMarking()) {
+ return;
+ }
Object* context = global_contexts_list_;
while (!context->IsUndefined()) {
=======================================
--- /branches/experimental/gc/src/heap.h Mon Sep 5 04:54:49 2011
+++ /branches/experimental/gc/src/heap.h Tue Sep 13 02:41:04 2011
@@ -1412,6 +1412,10 @@
void QueueMemoryChunkForFree(MemoryChunk* chunk);
void FreeQueuedChunks();
+ // Completely clear the Instanceof cache (to stop it keeping objects
alive
+ // around a GC).
+ inline void CompletelyClearInstanceofCache();
+
private:
Heap();
@@ -1655,10 +1659,6 @@
// Code to be run before and after mark-compact.
void MarkCompactPrologue();
- // Completely clear the Instanceof cache (to stop it keeping objects
alive
- // around a GC).
- inline void CompletelyClearInstanceofCache();
-
// Record statistics before and after garbage collection.
void ReportStatisticsBeforeGC();
void ReportStatisticsAfterGC();
=======================================
--- /branches/experimental/gc/src/incremental-marking.cc Sat Sep 10
07:48:09 2011
+++ /branches/experimental/gc/src/incremental-marking.cc Tue Sep 13
02:41:04 2011
@@ -30,6 +30,7 @@
#include "incremental-marking.h"
#include "code-stubs.h"
+#include "compilation-cache.h"
#include "v8conversions.h"
namespace v8 {
@@ -398,6 +399,14 @@
heap_->new_space()->LowerInlineAllocationLimit(kAllocatedThreshold);
}
+
+
+static void MarkObjectGreyDoNotEnqueue(Object* obj) {
+ if (obj->IsHeapObject()) {
+ MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::cast(obj));
+ Marking::AnyToGrey(mark_bit);
+ }
+}
void IncrementalMarking::StartMarking() {
@@ -429,6 +438,15 @@
// Marking bits are cleared by the sweeper.
heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
#endif
+
+ heap_->CompletelyClearInstanceofCache();
+ heap_->isolate()->compilation_cache()->MarkCompactPrologue();
+
+ if (FLAG_cleanup_code_caches_at_gc) {
+ // We will mark cache black with a separate pass
+ // when we finish marking.
+ MarkObjectGreyDoNotEnqueue(heap_->polymorphic_code_cache());
+ }
// Mark strong roots grey.
IncrementalMarkingRootMarkingVisitor visitor(heap_, this);
@@ -490,6 +508,24 @@
steps_count_since_last_gc_ = 0;
longest_step_ = 0.0;
}
+
+
+void IncrementalMarking::VisitGlobalContext(Context* ctx, ObjectVisitor*
v) {
+ v->VisitPointers(
+ HeapObject::RawField(
+ ctx, Context::MarkCompactBodyDescriptor::kStartOffset),
+ HeapObject::RawField(
+ ctx, Context::MarkCompactBodyDescriptor::kEndOffset));
+
+ MarkCompactCollector* collector = heap_->mark_compact_collector();
+ for (int idx = Context::FIRST_WEAK_SLOT;
+ idx < Context::GLOBAL_CONTEXT_SLOTS;
+ ++idx) {
+ Object** slot =
+ HeapObject::RawField(ctx, FixedArray::OffsetOfElementAt(idx));
+ collector->RecordSlot(slot, slot, *slot);
+ }
+}
void IncrementalMarking::Hurry() {
@@ -502,19 +538,27 @@
// TODO(gc) hurry can mark objects it encounters black as mutator
// was stopped.
Map* filler_map = heap_->one_pointer_filler_map();
+ Map* global_context_map = heap_->global_context_map();
IncrementalMarkingMarkingVisitor marking_visitor(heap_, this);
while (!marking_deque_.IsEmpty()) {
HeapObject* obj = marking_deque_.Pop();
// Explicitly skip one word fillers. Incremental markbit patterns are
// correct only for objects that occupy at least two words.
- if (obj->map() != filler_map) {
+ Map* map = obj->map();
+ if (map == filler_map) {
+ continue;
+ } else if (map == global_context_map) {
+ // Global contexts have weak fields.
+ VisitGlobalContext(Context::cast(obj), &marking_visitor);
+ } else {
obj->Iterate(&marking_visitor);
- MarkBit mark_bit = Marking::MarkBitFrom(obj);
- ASSERT(!Marking::IsBlack(mark_bit));
- Marking::MarkBlack(mark_bit);
- MemoryChunk::IncrementLiveBytes(obj->address(), obj->Size());
- }
+ }
+
+ MarkBit mark_bit = Marking::MarkBitFrom(obj);
+ ASSERT(!Marking::IsBlack(mark_bit));
+ Marking::MarkBlack(mark_bit);
+ MemoryChunk::IncrementLiveBytes(obj->address(), obj->Size());
}
state_ = COMPLETE;
if (FLAG_trace_incremental_marking) {
@@ -523,6 +567,18 @@
static_cast<int>(end - start));
}
}
+
+ if (FLAG_cleanup_code_caches_at_gc) {
+
Marking::GreyToBlack(Marking::MarkBitFrom(heap_->polymorphic_code_cache()));
+ }
+
+ Object* context = heap_->global_contexts_list();
+ while (!context->IsUndefined()) {
+ NormalizedMapCache* cache =
Context::cast(context)->normalized_map_cache();
+ MarkBit mark_bit = Marking::MarkBitFrom(cache);
+ if (Marking::IsGrey(mark_bit)) Marking::GreyToBlack(mark_bit);
+ context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
+ }
}
@@ -602,6 +658,7 @@
}
} else if (state_ == MARKING) {
Map* filler_map = heap_->one_pointer_filler_map();
+ Map* global_context_map = heap_->global_context_map();
IncrementalMarkingMarkingVisitor marking_visitor(heap_, this);
while (!marking_deque_.IsEmpty() && bytes_to_process > 0) {
HeapObject* obj = marking_deque_.Pop();
@@ -609,21 +666,34 @@
// Explicitly skip one word fillers. Incremental markbit patterns are
// correct only for objects that occupy at least two words.
Map* map = obj->map();
- if (map != filler_map) {
- ASSERT(Marking::IsGrey(Marking::MarkBitFrom(obj)));
- int size = obj->SizeFromMap(map);
- bytes_to_process -= size;
- MarkBit map_mark_bit = Marking::MarkBitFrom(map);
- if (Marking::IsWhite(map_mark_bit)) {
- WhiteToGreyAndPush(map, map_mark_bit);
- }
- // TODO(gc) switch to static visitor instead of normal visitor.
+ if (map == filler_map) continue;
+
+ ASSERT(Marking::IsGrey(Marking::MarkBitFrom(obj)));
+ int size = obj->SizeFromMap(map);
+ bytes_to_process -= size;
+ MarkBit map_mark_bit = Marking::MarkBitFrom(map);
+ if (Marking::IsWhite(map_mark_bit)) {
+ WhiteToGreyAndPush(map, map_mark_bit);
+ }
+
+ // TODO(gc) switch to static visitor instead of normal visitor.
+ if (map == global_context_map) {
+ // Global contexts have weak fields.
+ Context* ctx = Context::cast(obj);
+
+ // We will mark cache black with a separate pass
+ // when we finish marking.
+ MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
+
+ VisitGlobalContext(ctx, &marking_visitor);
+ } else {
obj->IterateBody(map->instance_type(), size, &marking_visitor);
- MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
- ASSERT(!Marking::IsBlack(obj_mark_bit));
- Marking::MarkBlack(obj_mark_bit);
- MemoryChunk::IncrementLiveBytes(obj->address(), size);
- }
+ }
+
+ MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
+ ASSERT(!Marking::IsBlack(obj_mark_bit));
+ Marking::MarkBlack(obj_mark_bit);
+ MemoryChunk::IncrementLiveBytes(obj->address(), size);
}
if (marking_deque_.IsEmpty()) MarkingComplete();
}
=======================================
--- /branches/experimental/gc/src/incremental-marking.h Tue Sep 6 08:11:38
2011
+++ /branches/experimental/gc/src/incremental-marking.h Tue Sep 13 02:41:04
2011
@@ -218,6 +218,8 @@
void EnsureMarkingDequeIsCommitted();
+ void VisitGlobalContext(Context* ctx, ObjectVisitor* v);
+
Heap* heap_;
State state_;
=======================================
--- /branches/experimental/gc/src/mark-compact.h Thu Sep 8 05:15:24 2011
+++ /branches/experimental/gc/src/mark-compact.h Tue Sep 13 02:41:04 2011
@@ -118,6 +118,12 @@
ASSERT(obj->Size() >= 2 * kPointerSize);
BlackToGrey(MarkBitFrom(obj));
}
+
+ static inline void AnyToGrey(MarkBit markbit) {
+ markbit.Set();
+ markbit.Next().Set();
+ ASSERT(IsGrey(markbit));
+ }
// Returns true if the the object whose mark is transferred is marked
black.
bool TransferMark(Address old_start, Address new_start);
=======================================
--- /branches/experimental/gc/test/cctest/cctest.status Wed Aug 10 05:50:30
2011
+++ /branches/experimental/gc/test/cctest/cctest.status Tue Sep 13 02:41:04
2011
@@ -52,6 +52,11 @@
# We do not yet shrink weak maps after they have been emptied by the GC
test-weakmaps/Shrinking: FAIL
+# Known failures after merge from the bleeding edge (issue 1672)
+ScriptMakingExternalString: SKIP
+ScriptMakingExternalAsciiString: SKIP
+StressJS: SKIP
+
##############################################################################
[ $arch == arm ]
=======================================
--- /branches/experimental/gc/test/mjsunit/mjsunit.status Tue Jul 12
16:04:25 2011
+++ /branches/experimental/gc/test/mjsunit/mjsunit.status Tue Sep 13
02:41:04 2011
@@ -60,6 +60,15 @@
debug-liveedit-check-stack: SKIP
debug-liveedit-patch-positions-replace: SKIP
+# Known failures after merge from the bleeding edge (issue 1672)
+debug-scopes: PASS, SKIP if ($mode == debug)
+mirror-object: PASS, SKIP if ($mode == debug)
+debug-return-value: PASS, SKIP if ($mode == debug)
+tickprocessor: PASS, SKIP if ($mode == debug)
+debug-blockscopes: PASS, SKIP if ($mode == debug)
+mirror-error: PASS, SKIP if ($mode == debug)
+mirror-array: PASS, SKIP if ($mode == debug)
+debug-return-value: PASS, SKIP if ($mode == debug)
##############################################################################
[ $arch == arm ]
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev