Revision: 5455
Author: [email protected]
Date: Tue Sep 14 10:48:56 2010
Log: Attempt to collect more garbage before panicking with out of memory.
Currently weak handles retain an object for another GC round (oftem times,
major GC round.) Instrumenting Chromium shows that navigation leaves
many global objects which are only collected in next go. Let's
attempt to collect more garbage when approacing OOM condition.
Review URL: http://codereview.chromium.org/3327021
http://code.google.com/p/v8/source/detail?r=5455
Modified:
/branches/bleeding_edge/src/global-handles.cc
/branches/bleeding_edge/src/global-handles.h
/branches/bleeding_edge/src/heap-inl.h
/branches/bleeding_edge/src/heap.cc
/branches/bleeding_edge/src/heap.h
=======================================
--- /branches/bleeding_edge/src/global-handles.cc Wed Jul 28 05:34:41 2010
+++ /branches/bleeding_edge/src/global-handles.cc Tue Sep 14 10:48:56 2010
@@ -372,13 +372,14 @@
int post_gc_processing_count = 0;
-void GlobalHandles::PostGarbageCollectionProcessing() {
+bool GlobalHandles::PostGarbageCollectionProcessing() {
// Process weak global handle callbacks. This must be done after the
// GC is completely done, because the callbacks may invoke arbitrary
// API functions.
// At the same time deallocate all DESTROYED nodes.
ASSERT(Heap::gc_state() == Heap::NOT_IN_GC);
const int initial_post_gc_processing_count = ++post_gc_processing_count;
+ bool weak_callback_invoked = false;
Node** p = &head_;
while (*p != NULL) {
if ((*p)->PostGarbageCollectionProcessing()) {
@@ -389,6 +390,7 @@
// restart the processing).
break;
}
+ weak_callback_invoked = true;
}
if ((*p)->state_ == Node::DESTROYED) {
// Delete the link.
@@ -407,6 +409,7 @@
if (first_deallocated()) {
first_deallocated()->set_next(head());
}
+ return weak_callback_invoked;
}
=======================================
--- /branches/bleeding_edge/src/global-handles.h Thu Dec 3 02:16:37 2009
+++ /branches/bleeding_edge/src/global-handles.h Tue Sep 14 10:48:56 2010
@@ -95,8 +95,9 @@
// Tells whether global handle is weak.
static bool IsWeak(Object** location);
- // Process pending weak handles.
- static void PostGarbageCollectionProcessing();
+ // Process pending weak handles. Returns true if any weak handle
+ // callback has been invoked.
+ static bool PostGarbageCollectionProcessing();
// Iterates over all strong handles.
static void IterateStrongRoots(ObjectVisitor* v);
=======================================
--- /branches/bleeding_edge/src/heap-inl.h Mon Aug 30 00:10:40 2010
+++ /branches/bleeding_edge/src/heap-inl.h Tue Sep 14 10:48:56 2010
@@ -35,6 +35,16 @@
namespace v8 {
namespace internal {
+void Heap::UpdateOldSpaceLimits() {
+ int old_gen_size = PromotedSpaceSize();
+ old_gen_promotion_limit_ =
+ old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
+ old_gen_allocation_limit_ =
+ old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
+ old_gen_exhausted_ = false;
+}
+
+
int Heap::MaxObjectSizeInPagedSpace() {
return Page::kMaxHeapObjectSize;
}
@@ -403,7 +413,7 @@
} \
if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \
Counters::gc_last_resort_from_handles.Increment(); \
- Heap::CollectAllGarbage(false); \
+ Heap::CollectAllAvailableGarbage(); \
{ \
AlwaysAllocateScope __scope__; \
__object__ = FUNCTION_CALL; \
=======================================
--- /branches/bleeding_edge/src/heap.cc Tue Sep 14 07:52:53 2010
+++ /branches/bleeding_edge/src/heap.cc Tue Sep 14 10:48:56 2010
@@ -55,7 +55,6 @@
String* Heap::hidden_symbol_;
Object* Heap::roots_[Heap::kRootListLength];
-
NewSpace Heap::new_space_;
OldSpace* Heap::old_pointer_space_ = NULL;
OldSpace* Heap::old_data_space_ = NULL;
@@ -64,9 +63,6 @@
CellSpace* Heap::cell_space_ = NULL;
LargeObjectSpace* Heap::lo_space_ = NULL;
-static const int kMinimumPromotionLimit = 2*MB;
-static const int kMinimumAllocationLimit = 8*MB;
-
int Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
int Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
@@ -405,17 +401,26 @@
}
-void Heap::CollectAllGarbage(bool force_compaction) {
+void Heap::CollectAllGarbage(bool force_compaction,
+ CollectionPolicy collectionPolicy) {
// Since we are ignoring the return value, the exact choice of space does
// not matter, so long as we do not specify NEW_SPACE, which would not
// cause a full GC.
MarkCompactCollector::SetForceCompaction(force_compaction);
- CollectGarbage(0, OLD_POINTER_SPACE);
+ CollectGarbage(0, OLD_POINTER_SPACE, collectionPolicy);
MarkCompactCollector::SetForceCompaction(false);
}
-bool Heap::CollectGarbage(int requested_size, AllocationSpace space) {
+void Heap::CollectAllAvailableGarbage() {
+ CompilationCache::Clear();
+ CollectAllGarbage(true, AGGRESSIVE);
+}
+
+
+bool Heap::CollectGarbage(int requested_size,
+ AllocationSpace space,
+ CollectionPolicy collectionPolicy) {
// The VM is in the GC state until exiting this function.
VMState state(GC);
@@ -442,7 +447,7 @@
? &Counters::gc_scavenger
: &Counters::gc_compactor;
rate->Start();
- PerformGarbageCollection(space, collector, &tracer);
+ PerformGarbageCollection(collector, &tracer, collectionPolicy);
rate->Stop();
GarbageCollectionEpilogue();
@@ -475,7 +480,7 @@
void Heap::PerformScavenge() {
GCTracer tracer;
- PerformGarbageCollection(NEW_SPACE, SCAVENGER, &tracer);
+ PerformGarbageCollection(SCAVENGER, &tracer, NORMAL);
}
@@ -664,9 +669,9 @@
survival_rate_ = survival_rate;
}
-void Heap::PerformGarbageCollection(AllocationSpace space,
- GarbageCollector collector,
- GCTracer* tracer) {
+void Heap::PerformGarbageCollection(GarbageCollector collector,
+ GCTracer* tracer,
+ CollectionPolicy collectionPolicy) {
VerifySymbolTable();
if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
ASSERT(!allocation_allowed_);
@@ -696,25 +701,45 @@
UpdateSurvivalRateTrend(start_new_space_size);
- int old_gen_size = PromotedSpaceSize();
- old_gen_promotion_limit_ =
- old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
- old_gen_allocation_limit_ =
- old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
-
- if (high_survival_rate_during_scavenges &&
- IsStableOrIncreasingSurvivalTrend()) {
- // Stable high survival rates of young objects both during partial
and
- // full collection indicate that mutator is either building or
modifying
- // a structure with a long lifetime.
- // In this case we aggressively raise old generation memory limits to
- // postpone subsequent mark-sweep collection and thus trade memory
- // space for the mutation speed.
- old_gen_promotion_limit_ *= 2;
- old_gen_allocation_limit_ *= 2;
+ UpdateOldSpaceLimits();
+
+ // Major GC would invoke weak handle callbacks on weakly reachable
+ // handles, but won't collect weakly reachable objects until next
+ // major GC. Therefore if we collect aggressively and weak handle
callback
+ // has been invoked, we rerun major GC to release objects which become
+ // garbage.
+ if (collectionPolicy == AGGRESSIVE) {
+ // Note: as weak callbacks can execute arbitrary code, we cannot
+ // hope that eventually there will be no weak callbacks invocations.
+ // Therefore stop recollecting after several attempts.
+ const int kMaxNumberOfAttempts = 7;
+ for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
+ { DisableAssertNoAllocation allow_allocation;
+ GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
+ if (!GlobalHandles::PostGarbageCollectionProcessing()) break;
+ }
+ MarkCompact(tracer);
+ // Weak handle callbacks can allocate data, so keep limits correct.
+ UpdateOldSpaceLimits();
+ }
+ } else {
+ if (high_survival_rate_during_scavenges &&
+ IsStableOrIncreasingSurvivalTrend()) {
+ // Stable high survival rates of young objects both during partial
and
+ // full collection indicate that mutator is either building or
modifying
+ // a structure with a long lifetime.
+ // In this case we aggressively raise old generation memory limits
to
+ // postpone subsequent mark-sweep collection and thus trade memory
+ // space for the mutation speed.
+ old_gen_promotion_limit_ *= 2;
+ old_gen_allocation_limit_ *= 2;
+ }
}
- old_gen_exhausted_ = false;
+ { DisableAssertNoAllocation allow_allocation;
+ GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
+ GlobalHandles::PostGarbageCollectionProcessing();
+ }
} else {
tracer_ = tracer;
Scavenge();
@@ -725,12 +750,6 @@
Counters::objs_since_last_young.Set(0);
- if (collector == MARK_COMPACTOR) {
- DisableAssertNoAllocation allow_allocation;
- GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
- GlobalHandles::PostGarbageCollectionProcessing();
- }
-
// Update relocatables.
Relocatable::PostGarbageCollectionProcessing();
=======================================
--- /branches/bleeding_edge/src/heap.h Tue Sep 14 07:52:53 2010
+++ /branches/bleeding_edge/src/heap.h Tue Sep 14 10:48:56 2010
@@ -687,13 +687,21 @@
static void GarbageCollectionPrologue();
static void GarbageCollectionEpilogue();
+ enum CollectionPolicy { NORMAL, AGGRESSIVE };
+
// Performs garbage collection operation.
// Returns whether required_space bytes are available after the
collection.
- static bool CollectGarbage(int required_space, AllocationSpace space);
+ static bool CollectGarbage(int required_space,
+ AllocationSpace space,
+ CollectionPolicy collectionPolicy = NORMAL);
// Performs a full garbage collection. Force compaction if the
// parameter is true.
- static void CollectAllGarbage(bool force_compaction);
+ static void CollectAllGarbage(bool force_compaction,
+ CollectionPolicy collectionPolicy =
NORMAL);
+
+ // Last hope GC, should try to squeeze as much as possible.
+ static void CollectAllAvailableGarbage();
// Notify the heap that a context has been disposed.
static int NotifyContextDisposed() { return ++contexts_disposed_; }
@@ -1214,9 +1222,14 @@
static GarbageCollector SelectGarbageCollector(AllocationSpace space);
// Performs garbage collection
- static void PerformGarbageCollection(AllocationSpace space,
- GarbageCollector collector,
- GCTracer* tracer);
+ static void PerformGarbageCollection(GarbageCollector collector,
+ GCTracer* tracer,
+ CollectionPolicy collectionPolicy);
+
+ static const int kMinimumPromotionLimit = 2 * MB;
+ static const int kMinimumAllocationLimit = 8 * MB;
+
+ inline static void UpdateOldSpaceLimits();
// Allocate an uninitialized object in map space. The behavior is
identical
// to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it
doesn't
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev