Reviewers: Vyacheslav Egorov,
Message:
PTAL.
Description:
Fix free list node ending up on evacuation candidate.
This is a temporary fix which avoids compaction when incremental marking
is restarted during an old-space-step. That could turn the page that
holds the chosen free list node into an evacuation candidate. It could
also cause several other inconsistencies if it happens during scavenge.
[email protected]
Please review this at http://codereview.chromium.org/8230018/
SVN Base: https://v8.googlecode.com/svn/tags/3.6.6
Affected files:
M src/incremental-marking.h
M src/incremental-marking.cc
M src/spaces.cc
Index: src/incremental-marking.cc
diff --git a/src/incremental-marking.cc b/src/incremental-marking.cc
index
6cb7aa4858d80ca1e096a14b06d862a3a1b8e46a..149489d7612add3f802f53161eaab370edf4e0a6
100644
--- a/src/incremental-marking.cc
+++ b/src/incremental-marking.cc
@@ -410,7 +410,7 @@ void IncrementalMarking::Start() {
if (heap_->old_pointer_space()->IsSweepingComplete() &&
heap_->old_data_space()->IsSweepingComplete()) {
- StartMarking();
+ StartMarking(true);
} else {
if (FLAG_trace_incremental_marking) {
PrintF("[IncrementalMarking] Start sweeping.\n");
@@ -435,12 +435,12 @@ static void MarkObjectGreyDoNotEnqueue(Object* obj) {
}
-void IncrementalMarking::StartMarking() {
+void IncrementalMarking::StartMarking(bool allow_compaction) {
if (FLAG_trace_incremental_marking) {
PrintF("[IncrementalMarking] Start marking\n");
}
- is_compacting_ = !FLAG_never_compact &&
+ is_compacting_ = !FLAG_never_compact && allow_compaction &&
heap_->mark_compact_collector()->StartCompaction();
state_ = MARKING;
@@ -696,7 +696,7 @@ void IncrementalMarking::Step(intptr_t allocated_bytes)
{
if (state_ == SWEEPING) {
if (heap_->old_pointer_space()->AdvanceSweeper(bytes_to_process) &&
heap_->old_data_space()->AdvanceSweeper(bytes_to_process)) {
- StartMarking();
+ StartMarking(false);
}
} else if (state_ == MARKING) {
Map* filler_map = heap_->one_pointer_filler_map();
Index: src/incremental-marking.h
diff --git a/src/incremental-marking.h b/src/incremental-marking.h
index
30dbbc1605c7c5415767101469cc1f55c2d504ef..b47ae3896d0063e3fa53a041d6225c66e39cf6d7
100644
--- a/src/incremental-marking.h
+++ b/src/incremental-marking.h
@@ -206,7 +206,7 @@ class IncrementalMarking {
void ResetStepCounters();
- void StartMarking();
+ void StartMarking(bool allow_compaction);
void ActivateIncrementalWriteBarrier(PagedSpace* space);
static void ActivateIncrementalWriteBarrier(NewSpace* space);
Index: src/spaces.cc
diff --git a/src/spaces.cc b/src/spaces.cc
index
2aaca5b742f911d47be7816d0419a1321774f4b4..61b318118ae1e333027fc1081e45238af07c424f
100644
--- a/src/spaces.cc
+++ b/src/spaces.cc
@@ -1798,6 +1798,11 @@ HeapObject* FreeList::Allocate(int size_in_bytes) {
owner_->heap()->incremental_marking()->OldSpaceStep(
size_in_bytes - old_linear_size);
+ // The old-space-step might have finished sweeping and restarted marking.
+ // Verify that it did not turn the page of the new node into an
evacuation
+ // candidate.
+ ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(new_node));
+
const int kThreshold = IncrementalMarking::kAllocatedThreshold;
// Memory in the linear allocation area is counted as allocated. We may
free
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev