Revision: 12978
Author: [email protected]
Date: Thu Nov 15 09:57:40 2012
Log: Implement progress bar for large objects.
This implements incremental scanning of large objects using a progress
bar in the page header of such objects. Note that this requires forward
white to gray transitions in the write barrier and hence is disabled by
default for now.
[email protected],[email protected]
Review URL: https://codereview.chromium.org/11362246
http://code.google.com/p/v8/source/detail?r=12978
Modified:
/branches/bleeding_edge/src/flag-definitions.h
/branches/bleeding_edge/src/incremental-marking.cc
/branches/bleeding_edge/src/incremental-marking.h
/branches/bleeding_edge/src/mark-compact.cc
/branches/bleeding_edge/src/objects-visiting-inl.h
/branches/bleeding_edge/src/objects-visiting.h
/branches/bleeding_edge/src/spaces.cc
/branches/bleeding_edge/src/spaces.h
=======================================
--- /branches/bleeding_edge/src/flag-definitions.h Fri Nov 9 05:04:51 2012
+++ /branches/bleeding_edge/src/flag-definitions.h Thu Nov 15 09:57:40 2012
@@ -438,6 +438,9 @@
DEFINE_bool(cleanup_code_caches_at_gc, true,
"Flush inline caches prior to mark compact collection and "
"flush code caches in maps during mark compact cycle.")
+DEFINE_bool(use_marking_progress_bar, false,
+ "Use a progress bar to scan large objects in increments when "
+ "incremental marking is active.")
DEFINE_int(random_seed, 0,
"Default seed for initializing random generator "
"(0, the default, means to use system random).")
=======================================
--- /branches/bleeding_edge/src/incremental-marking.cc Mon Nov 12 02:12:35
2012
+++ /branches/bleeding_edge/src/incremental-marking.cc Thu Nov 15 09:57:40
2012
@@ -186,6 +186,28 @@
Marking::AnyToGrey(mark_bit);
}
}
+
+
+static inline void MarkBlackOrKeepGrey(HeapObject* heap_object,
+ MarkBit mark_bit,
+ int size) {
+ ASSERT(!Marking::IsImpossible(mark_bit));
+ if (mark_bit.Get()) return;
+ mark_bit.Set();
+ MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), size);
+ ASSERT(Marking::IsBlack(mark_bit));
+}
+
+
+static inline void MarkBlackOrKeepBlack(HeapObject* heap_object,
+ MarkBit mark_bit,
+ int size) {
+ ASSERT(!Marking::IsImpossible(mark_bit));
+ if (Marking::IsBlack(mark_bit)) return;
+ Marking::MarkBlack(mark_bit);
+ MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), size);
+ ASSERT(Marking::IsBlack(mark_bit));
+}
class IncrementalMarkingMarkingVisitor
@@ -193,10 +215,50 @@
public:
static void Initialize() {
StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
-
+ table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental);
table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
table_.Register(kVisitJSRegExp, &VisitJSRegExp);
}
+
+ static const int kProgressBarScanningChunk = 32 * 1024;
+
+ static void VisitFixedArrayIncremental(Map* map, HeapObject* object) {
+ MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
+ // TODO(mstarzinger): Move setting of the flag to the allocation site
of
+ // the array. The visitor should just check the flag.
+ if (FLAG_use_marking_progress_bar &&
+ chunk->owner()->identity() == LO_SPACE) {
+ chunk->SetFlag(MemoryChunk::HAS_PROGRESS_BAR);
+ }
+ if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
+ Heap* heap = map->GetHeap();
+ // When using a progress bar for large fixed arrays, scan only a
chunk of
+ // the array and try to push it onto the marking deque again until
it is
+ // fully scanned. Fall back to scanning it through to the end in
case this
+ // fails because of a full deque.
+ int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
+ int start_offset = Max(FixedArray::BodyDescriptor::kStartOffset,
+ chunk->progress_bar());
+ int end_offset = Min(object_size,
+ start_offset + kProgressBarScanningChunk);
+ bool scan_until_end = false;
+ do {
+ VisitPointersWithAnchor(heap,
+ HeapObject::RawField(object, 0),
+ HeapObject::RawField(object, start_offset),
+ HeapObject::RawField(object, end_offset));
+ start_offset = end_offset;
+ end_offset = Min(object_size, end_offset +
kProgressBarScanningChunk);
+ scan_until_end =
heap->incremental_marking()->marking_deque()->IsFull();
+ } while (scan_until_end && start_offset < object_size);
+ chunk->set_progress_bar(start_offset);
+ if (start_offset < object_size) {
+ heap->incremental_marking()->marking_deque()->UnshiftGrey(object);
+ }
+ } else {
+ FixedArrayVisitor::Visit(map, object);
+ }
+ }
static void VisitNativeContextIncremental(Map* map, HeapObject* object) {
Context* context = Context::cast(object);
@@ -233,16 +295,26 @@
}
}
}
+
+ INLINE(static void VisitPointersWithAnchor(Heap* heap,
+ Object** anchor,
+ Object** start,
+ Object** end)) {
+ for (Object** p = start; p < end; p++) {
+ Object* obj = *p;
+ if (obj->NonFailureIsHeapObject()) {
+ heap->mark_compact_collector()->RecordSlot(anchor, p, obj);
+ MarkObject(heap, obj);
+ }
+ }
+ }
// Marks the object grey and pushes it on the marking stack.
INLINE(static void MarkObject(Heap* heap, Object* obj)) {
HeapObject* heap_object = HeapObject::cast(obj);
MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
if (mark_bit.data_only()) {
- if (heap->incremental_marking()->MarkBlackOrKeepGrey(mark_bit)) {
- MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(),
- heap_object->Size());
- }
+ MarkBlackOrKeepGrey(heap_object, mark_bit, heap_object->Size());
} else if (Marking::IsWhite(mark_bit)) {
heap->incremental_marking()->WhiteToGreyAndPush(heap_object,
mark_bit);
}
@@ -288,10 +360,7 @@
HeapObject* heap_object = HeapObject::cast(obj);
MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
if (mark_bit.data_only()) {
- if (incremental_marking_->MarkBlackOrKeepGrey(mark_bit)) {
- MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(),
- heap_object->Size());
- }
+ MarkBlackOrKeepGrey(heap_object, mark_bit, heap_object->Size());
} else {
if (Marking::IsWhite(mark_bit)) {
incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit);
@@ -616,8 +685,11 @@
ASSERT(new_top != marking_deque_.bottom());
#ifdef DEBUG
MarkBit mark_bit = Marking::MarkBitFrom(obj);
+ MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
ASSERT(Marking::IsGrey(mark_bit) ||
- (obj->IsFiller() && Marking::IsWhite(mark_bit)));
+ (obj->IsFiller() && Marking::IsWhite(mark_bit)) ||
+ (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
+ Marking::IsBlack(mark_bit)));
#endif
}
}
@@ -637,11 +709,15 @@
IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
- MarkBit obj_mark_bit = Marking::MarkBitFrom(obj);
- SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
- (obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
- Marking::MarkBlack(obj_mark_bit);
- MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
+ MarkBit mark_bit = Marking::MarkBitFrom(obj);
+#ifdef DEBUG
+ MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
+ SLOW_ASSERT(Marking::IsGrey(mark_bit) ||
+ (obj->IsFiller() && Marking::IsWhite(mark_bit)) ||
+ (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
+ Marking::IsBlack(mark_bit)));
+#endif
+ MarkBlackOrKeepBlack(obj, mark_bit, size);
}
=======================================
--- /branches/bleeding_edge/src/incremental-marking.h Mon Nov 12 02:12:35
2012
+++ /branches/bleeding_edge/src/incremental-marking.h Thu Nov 15 09:57:40
2012
@@ -163,19 +163,6 @@
inline void BlackToGreyAndUnshift(HeapObject* obj, MarkBit mark_bit);
inline void WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit);
-
- // Does white->black or keeps gray or black color. Returns true if
converting
- // white to black.
- inline bool MarkBlackOrKeepGrey(MarkBit mark_bit) {
- ASSERT(!Marking::IsImpossible(mark_bit));
- if (mark_bit.Get()) {
- // Grey or black: Keep the color.
- return false;
- }
- mark_bit.Set();
- ASSERT(Marking::IsBlack(mark_bit));
- return true;
- }
inline int steps_count() {
return steps_count_;
=======================================
--- /branches/bleeding_edge/src/mark-compact.cc Thu Nov 8 04:18:11 2012
+++ /branches/bleeding_edge/src/mark-compact.cc Thu Nov 15 09:57:40 2012
@@ -488,6 +488,7 @@
MarkBit mark_bit = Marking::MarkBitFrom(obj);
mark_bit.Clear();
mark_bit.Next().Clear();
+ Page::FromAddress(obj->address())->ResetProgressBar();
Page::FromAddress(obj->address())->ResetLiveBytes();
}
}
=======================================
--- /branches/bleeding_edge/src/objects-visiting-inl.h Thu Nov 15 05:31:27
2012
+++ /branches/bleeding_edge/src/objects-visiting-inl.h Thu Nov 15 09:57:40
2012
@@ -110,10 +110,7 @@
SlicedString::BodyDescriptor,
void>::Visit);
- table_.Register(kVisitFixedArray,
- &FlexibleBodyVisitor<StaticVisitor,
- FixedArray::BodyDescriptor,
- void>::Visit);
+ table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
=======================================
--- /branches/bleeding_edge/src/objects-visiting.h Thu Nov 15 05:31:27 2012
+++ /branches/bleeding_edge/src/objects-visiting.h Thu Nov 15 09:57:40 2012
@@ -434,6 +434,10 @@
}
};
+ typedef FlexibleBodyVisitor<StaticVisitor,
+ FixedArray::BodyDescriptor,
+ void> FixedArrayVisitor;
+
typedef FlexibleBodyVisitor<StaticVisitor,
JSObject::BodyDescriptor,
void> JSObjectVisitor;
=======================================
--- /branches/bleeding_edge/src/spaces.cc Mon Oct 22 11:25:10 2012
+++ /branches/bleeding_edge/src/spaces.cc Thu Nov 15 09:57:40 2012
@@ -448,6 +448,7 @@
chunk->slots_buffer_ = NULL;
chunk->skip_list_ = NULL;
chunk->write_barrier_counter_ = kWriteBarrierCounterGranularity;
+ chunk->progress_bar_ = 0;
chunk->high_water_mark_ = static_cast<int>(area_start - base);
chunk->ResetLiveBytes();
Bitmap::Clear(chunk);
@@ -2784,7 +2785,8 @@
MarkBit mark_bit = Marking::MarkBitFrom(object);
if (mark_bit.Get()) {
mark_bit.Clear();
- MemoryChunk::IncrementLiveBytesFromGC(object->address(),
-object->Size());
+ Page::FromAddress(object->address())->ResetProgressBar();
+ Page::FromAddress(object->address())->ResetLiveBytes();
previous = current;
current = current->next_page();
} else {
=======================================
--- /branches/bleeding_edge/src/spaces.h Mon Oct 22 09:33:10 2012
+++ /branches/bleeding_edge/src/spaces.h Thu Nov 15 09:57:40 2012
@@ -397,6 +397,12 @@
WAS_SWEPT_PRECISELY,
WAS_SWEPT_CONSERVATIVELY,
+ // Large objects can have a progress bar in their page header. These
object
+ // are scanned in increments and will be kept black while being
scanned.
+ // Even if the mutator writes to them they will be kept black and a
white
+ // to grey transition is performed in the value.
+ HAS_PROGRESS_BAR,
+
// Last flag, keep at bottom.
NUM_MEMORY_CHUNK_FLAGS
};
@@ -480,6 +486,23 @@
write_barrier_counter_ = counter;
}
+ int progress_bar() {
+ ASSERT(IsFlagSet(HAS_PROGRESS_BAR));
+ return progress_bar_;
+ }
+
+ void set_progress_bar(int progress_bar) {
+ ASSERT(IsFlagSet(HAS_PROGRESS_BAR));
+ progress_bar_ = progress_bar;
+ }
+
+ void ResetProgressBar() {
+ if (IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
+ set_progress_bar(0);
+ ClearFlag(MemoryChunk::HAS_PROGRESS_BAR);
+ }
+ }
+
static void IncrementLiveBytesFromGC(Address address, int by) {
MemoryChunk::FromAddress(address)->IncrementLiveBytes(by);
@@ -505,7 +528,7 @@
kSlotsBufferOffset + kPointerSize + kPointerSize;
static const size_t kHeaderSize =
- kWriteBarrierCounterOffset + kPointerSize + kPointerSize;
+ kWriteBarrierCounterOffset + kPointerSize + kIntSize + kIntSize;
static const int kBodyOffset =
CODE_POINTER_ALIGN(kHeaderSize + Bitmap::kSize);
@@ -649,6 +672,9 @@
SlotsBuffer* slots_buffer_;
SkipList* skip_list_;
intptr_t write_barrier_counter_;
+ // Used by the incremental marker to keep track of the scanning progress
in
+ // large objects that have a progress bar and are scanned in increments.
+ int progress_bar_;
// Assuming the initial allocation on a page is sequential,
// count highest number of bytes ever allocated on the page.
int high_water_mark_;
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev