Revision: 9484
Author: [email protected]
Date: Thu Sep 29 06:51:47 2011
Log: Enable code flushing for full (non-incremental) collections.
This will ensure that we will flush code when we hit memory limits.
[email protected]
BUG=v8:1609
Review URL: http://codereview.chromium.org/8060053
http://code.google.com/p/v8/source/detail?r=9484
Modified:
/branches/bleeding_edge/src/flag-definitions.h
/branches/bleeding_edge/src/mark-compact.cc
/branches/bleeding_edge/test/cctest/test-heap.cc
=======================================
--- /branches/bleeding_edge/src/flag-definitions.h Thu Sep 29 01:55:36 2011
+++ /branches/bleeding_edge/src/flag-definitions.h Thu Sep 29 06:51:47 2011
@@ -258,7 +258,7 @@
"report fragmentation for old pointer and data pages")
DEFINE_bool(collect_maps, true,
"garbage collect maps from which no objects can be reached")
-DEFINE_bool(flush_code, false,
+DEFINE_bool(flush_code, true,
"flush code that we expect not to use again before full gc")
DEFINE_bool(incremental_marking, true, "use incremental marking")
DEFINE_bool(incremental_marking_steps, true, "do incremental marking
steps")
=======================================
--- /branches/bleeding_edge/src/mark-compact.cc Thu Sep 29 05:27:31 2011
+++ /branches/bleeding_edge/src/mark-compact.cc Thu Sep 29 06:51:47 2011
@@ -467,8 +467,6 @@
void MarkCompactCollector::Prepare(GCTracer* tracer) {
- FLAG_flush_code = false;
-
was_marked_incrementally_ = heap()->incremental_marking()->IsMarking();
// Disable collection of maps if incremental marking is enabled.
@@ -485,7 +483,6 @@
state_ = PREPARE_GC;
#endif
- // TODO(1726) Revert this into an assertion when compaction is enabled.
ASSERT(!FLAG_never_compact || !FLAG_always_compact);
if (collect_maps_) CreateBackPointers();
@@ -1422,7 +1419,8 @@
void MarkCompactCollector::PrepareForCodeFlushing() {
ASSERT(heap() == Isolate::Current()->heap());
- if (!FLAG_flush_code) {
+ // TODO(1609) Currently incremental marker does not support code
flushing.
+ if (!FLAG_flush_code || was_marked_incrementally_) {
EnableCodeFlushing(false);
return;
}
@@ -1434,6 +1432,7 @@
return;
}
#endif
+
EnableCodeFlushing(true);
// Ensure that empty descriptor array is marked. Method
MarkDescriptorArray
@@ -3641,9 +3640,6 @@
}
-// TODO(1466) ReportDeleteIfNeeded is not called currently.
-// Our profiling tools do not expect intersections between
-// code objects. We should either reenable it or change our tools.
void MarkCompactCollector::EnableCodeFlushing(bool enable) {
if (enable) {
if (code_flusher_ != NULL) return;
@@ -3656,6 +3652,9 @@
}
+// TODO(1466) ReportDeleteIfNeeded is not called currently.
+// Our profiling tools do not expect intersections between
+// code objects. We should either reenable it or change our tools.
void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj,
Isolate* isolate) {
#ifdef ENABLE_GDB_JIT_INTERFACE
=======================================
--- /branches/bleeding_edge/test/cctest/test-heap.cc Thu Sep 22 09:40:07
2011
+++ /branches/bleeding_edge/test/cctest/test-heap.cc Thu Sep 29 06:51:47
2011
@@ -961,17 +961,18 @@
Handle<JSFunction> function(JSFunction::cast(func_value));
CHECK(function->shared()->is_compiled());
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ // TODO(1609) Currently incremental marker does not support code
flushing.
+ HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
CHECK(function->shared()->is_compiled());
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
// foo should no longer be in the compilation cache
CHECK(!function->shared()->is_compiled() || function->IsOptimized());
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev