Reviewers: Toon Verwaest,
Description:
Implement verification of context separation.
This adds the --verify-context-separation flag which can be used to
verify that no code object keeps more than one context alive. It uses
the same definition of connectivity as the full GC does. At the moment
we only verify optimized code, but should extend it to other kinds of
code later.
[email protected]
Please review this at https://chromiumcodereview.appspot.com/10694157/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files:
M src/flag-definitions.h
M src/mark-compact.h
M src/mark-compact.cc
Index: src/flag-definitions.h
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index
1c4f914f204b3fe1f065ac1379423f7a5fbcd8ea..5bf2d85a90f5bfd964a147ca65a20052bc0ef443
100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -541,6 +541,8 @@ DEFINE_bool(gc_verbose, false, "print stuff during
garbage collection")
DEFINE_bool(heap_stats, false, "report heap statistics before and after
GC")
DEFINE_bool(code_stats, false, "report code statistics after GC")
DEFINE_bool(verify_heap, false, "verify heap pointers before and after GC")
+DEFINE_bool(verify_context_separation, false,
+ "verify that code objects keep at most one context alive after
GC")
DEFINE_bool(print_handles, false, "report handles after GC")
DEFINE_bool(print_global_handles, false, "report global handles after GC")
Index: src/mark-compact.cc
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index
e61457991e1769c6a589d1c40c7550de5f9854b2..28a9466648ea60adf0c0e1a394d8d8fc9aa3462e
100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -296,6 +296,16 @@ void MarkCompactCollector::CollectGarbage() {
if (!FLAG_collect_maps) ReattachInitialMaps();
+#ifdef DEBUG
+ if (FLAG_verify_context_separation) {
+ bool was_code_flushing_enabled = is_code_flushing_enabled();
+ EnableCodeFlushing(false);
+ VerifyMarkbitsAreClean();
+ VerifyContextSeparation();
+ EnableCodeFlushing(was_code_flushing_enabled);
+ }
+#endif
+
Finish();
tracer_ = NULL;
@@ -313,6 +323,7 @@ void
MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
}
}
+
void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
NewSpacePageIterator it(space->bottom(), space->top());
@@ -323,6 +334,7 @@ void
MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
}
}
+
void MarkCompactCollector::VerifyMarkbitsAreClean() {
VerifyMarkbitsAreClean(heap_->old_pointer_space());
VerifyMarkbitsAreClean(heap_->old_data_space());
@@ -1620,6 +1632,69 @@ class SharedFunctionInfoMarkingVisitor : public
ObjectVisitor {
};
+#ifdef DEBUG
+static bool IsKnownInternalContext(Heap* heap, Object* context) {
+ Handle<Context> debug_context =
heap->isolate()->debug()->debug_context();
+ if (!debug_context.is_null() && context == *debug_context) return true;
+ return false;
+}
+
+
+void MarkCompactCollector::VerifyContextSeparation() {
+ // Disabling of code cache flushing in maps is known to introduce cross
+ // context leaks and verification will most certainly fail.
+ if (!FLAG_cleanup_code_caches_at_gc) {
+ PrintF("[VerifyContextSeparation: Incompatible flags. Skipped.]\n");
+ return;
+ }
+
+ HeapObjectIterator it(heap_->code_space());
+ for (HeapObject* object = it.Next(); object != NULL; object = it.Next())
{
+ Code* code = Code::cast(object);
+
+ // TODO(mstarzinger): We currently limit this verification to optimized
+ // code, but should extend it to cover all kinds of code objects.
+ if (code->kind() != Code::OPTIMIZED_FUNCTION) continue;
+
+ // Mark the code object and process transitive closure.
+ MarkingVisitor marker(heap_);
+ code->Iterate(&marker);
+ ProcessMarkingDeque();
+
+ // Count the number of global contexts that are kept alive by this
+ // code object.
+ int number_of_live_global_contexts = 0;
+ Object* context = heap_->global_contexts_list();
+ while (!context->IsUndefined()) {
+ ASSERT(context->IsGlobalContext());
+ if (!IsKnownInternalContext(heap_, context) &&
+ Marking::MarkBitFrom(HeapObject::cast(context)).Get()) {
+ number_of_live_global_contexts++;
+ }
+ context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
+ }
+ if (number_of_live_global_contexts > 1) {
+ PrintF("[VerifyContextSeparation: %p keeps %d contexts alive.]\n",
+ reinterpret_cast<void*>(code),
number_of_live_global_contexts);
+#ifdef OBJECT_PRINT
+ code->PrintLn();
+ context = heap_->global_contexts_list();
+ while (!context->IsUndefined()) {
+ if (Marking::MarkBitFrom(HeapObject::cast(context)).Get()) {
+ context->PrintLn();
+ }
+ context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
+ }
+#endif
+ }
+ CHECK_LE(number_of_live_global_contexts, 1);
+ ClearMarkbits();
+ ReattachInitialMaps();
+ }
+}
+#endif
+
+
void MarkCompactCollector::MarkInlinedFunctionsCode(Code* code) {
// For optimized functions we should retain both non-optimized version
// of it's code and non-optimized version of all inlined functions.
@@ -3960,9 +4035,9 @@ void MarkCompactCollector::SweepSpaces() {
#ifdef DEBUG
state_ = SWEEP_SPACES;
#endif
- SweeperType how_to_sweep =
- FLAG_lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE;
- if (FLAG_expose_gc) how_to_sweep = CONSERVATIVE;
+ bool lazy_sweeping =
+ FLAG_lazy_sweeping && !FLAG_expose_gc
&& !FLAG_verify_context_separation;
+ SweeperType how_to_sweep = lazy_sweeping ? LAZY_CONSERVATIVE :
CONSERVATIVE;
if (sweep_precisely_) how_to_sweep = PRECISE;
// Noncompacting collections simply sweep the spaces to clear the mark
// bits and free the nonlive blocks (for old and map spaces). We sweep
Index: src/mark-compact.h
diff --git a/src/mark-compact.h b/src/mark-compact.h
index
c2a70fc385d8eb8069e84eeb8ac9ba3c671d064e..baf128fab5b01ffe291e05202f21de723b11e05a
100644
--- a/src/mark-compact.h
+++ b/src/mark-compact.h
@@ -507,6 +507,7 @@ class MarkCompactCollector {
};
#ifdef DEBUG
+ void VerifyContextSeparation();
void VerifyMarkbitsAreClean();
static void VerifyMarkbitsAreClean(PagedSpace* space);
static void VerifyMarkbitsAreClean(NewSpace* space);
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev