Reviewers: Vyacheslav Egorov,
Message:
Brand new CL, this time against bleeding edge.
Description:
Fix free list node ending up on evacuation candidate.
This is a temporary fix which avoids compaction when incremental marking
is restarted during an old-space-step. That could turn the page that
holds the chosen free list node into an evacuation candidate. It could
also cause several other inconsistencies if it happens during scavenge.
[email protected]
Please review this at http://codereview.chromium.org/8228010/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files:
M src/incremental-marking.h
M src/incremental-marking.cc
M src/spaces.cc
Index: src/incremental-marking.cc
diff --git a/src/incremental-marking.cc b/src/incremental-marking.cc
index
6cb7aa4858d80ca1e096a14b06d862a3a1b8e46a..79139a068f65782c7382b6a2367bd21bb887a47b
100644
--- a/src/incremental-marking.cc
+++ b/src/incremental-marking.cc
@@ -410,7 +410,7 @@ void IncrementalMarking::Start() {
if (heap_->old_pointer_space()->IsSweepingComplete() &&
heap_->old_data_space()->IsSweepingComplete()) {
- StartMarking();
+ StartMarking(ALLOW_COMPACTION);
} else {
if (FLAG_trace_incremental_marking) {
PrintF("[IncrementalMarking] Start sweeping.\n");
@@ -435,12 +435,12 @@ static void MarkObjectGreyDoNotEnqueue(Object* obj) {
}
-void IncrementalMarking::StartMarking() {
+void IncrementalMarking::StartMarking(CompactionFlag flag) {
if (FLAG_trace_incremental_marking) {
PrintF("[IncrementalMarking] Start marking\n");
}
- is_compacting_ = !FLAG_never_compact &&
+ is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) &&
heap_->mark_compact_collector()->StartCompaction();
state_ = MARKING;
@@ -696,7 +696,7 @@ void IncrementalMarking::Step(intptr_t allocated_bytes)
{
if (state_ == SWEEPING) {
if (heap_->old_pointer_space()->AdvanceSweeper(bytes_to_process) &&
heap_->old_data_space()->AdvanceSweeper(bytes_to_process)) {
- StartMarking();
+ StartMarking(PREVENT_COMPACTION);
}
} else if (state_ == MARKING) {
Map* filler_map = heap_->one_pointer_filler_map();
Index: src/incremental-marking.h
diff --git a/src/incremental-marking.h b/src/incremental-marking.h
index
30dbbc1605c7c5415767101469cc1f55c2d504ef..d1627bcba5c306834376c750b3ce31af31476566
100644
--- a/src/incremental-marking.h
+++ b/src/incremental-marking.h
@@ -206,7 +206,9 @@ class IncrementalMarking {
void ResetStepCounters();
- void StartMarking();
+ enum CompactionFlag { ALLOW_COMPACTION, PREVENT_COMPACTION };
+
+ void StartMarking(CompactionFlag flag);
void ActivateIncrementalWriteBarrier(PagedSpace* space);
static void ActivateIncrementalWriteBarrier(NewSpace* space);
Index: src/spaces.cc
diff --git a/src/spaces.cc b/src/spaces.cc
index
2aaca5b742f911d47be7816d0419a1321774f4b4..61b318118ae1e333027fc1081e45238af07c424f
100644
--- a/src/spaces.cc
+++ b/src/spaces.cc
@@ -1798,6 +1798,11 @@ HeapObject* FreeList::Allocate(int size_in_bytes) {
owner_->heap()->incremental_marking()->OldSpaceStep(
size_in_bytes - old_linear_size);
+ // The old-space-step might have finished sweeping and restarted marking.
+ // Verify that it did not turn the page of the new node into an
evacuation
+ // candidate.
+ ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(new_node));
+
const int kThreshold = IncrementalMarking::kAllocatedThreshold;
// Memory in the linear allocation area is counted as allocated. We may
free
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev