Reviewers: mvstanton, Michael Starzinger,
Description:
Fast literals are allocation site pretenured.
Removes the deopt all optimized code switch and adds bailout code for local
pretenuring. From now on, supporting an allocation site with local
pretenuring
will just require local changes.
BUG=
Please review this at https://codereview.chromium.org/96783002/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files (+53, -23 lines):
M src/flag-definitions.h
M src/heap.h
M src/heap.cc
M src/hydrogen.cc
Index: src/flag-definitions.h
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index
35977e63943f4f95cb6f6ffc865fc6d47ae0c1cf..702390395a8b836d13f05597ee875c5bfcc01028
100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -213,7 +213,7 @@ DEFINE_bool(pretenuring, true, "allocate objects in old
space")
// TODO(hpayer): We will remove this flag as soon as we have pretenuring
// support for specific allocation sites.
DEFINE_bool(pretenuring_call_new, false, "pretenure call new")
-DEFINE_bool(allocation_site_pretenuring, false,
+DEFINE_bool(allocation_site_pretenuring, true,
"pretenure with allocation sites")
DEFINE_bool(trace_pretenuring, false,
"trace pretenuring decisions of HAllocate instructions")
Index: src/heap.cc
diff --git a/src/heap.cc b/src/heap.cc
index
d5c40ad154ad0e8de49a1ba86de2a659c352dbb7..aa219f6d20d5a24f1f834447931ca457fe18cd76
100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -517,6 +517,9 @@ void Heap::GarbageCollectionEpilogue() {
if (casted->DigestPretenuringFeedback()) {
if (casted->GetPretenureMode() == TENURED) {
tenure_decisions++;
+ casted->dependent_code()->DeoptimizeDependentCodeGroup(
+ isolate_,
+ DependentCode::kAllocationSiteTenuringChangedGroup);
} else {
dont_tenure_decisions++;
}
@@ -1072,13 +1075,6 @@ bool Heap::PerformGarbageCollection(GarbageCollector
collector,
PrintPID("Limited new space size due to high promotion rate: %d
MB\n",
new_space_.InitialCapacity() / MB);
}
- // Support for global pre-tenuring uses the high promotion mode as a
- // heuristic indicator of whether to pretenure or not, we trigger
- // deoptimization here to take advantage of pre-tenuring as soon as
- // possible.
- if (FLAG_pretenuring) {
- isolate_->stack_guard()->FullDeopt();
- }
} else if (new_space_high_promotion_mode_active_ &&
IsStableOrDecreasingSurvivalTrend() &&
IsLowSurvivalRate()) {
@@ -1090,11 +1086,6 @@ bool Heap::PerformGarbageCollection(GarbageCollector
collector,
PrintPID("Unlimited new space size due to low promotion rate: %d
MB\n",
new_space_.MaximumCapacity() / MB);
}
- // Trigger deoptimization here to turn off pre-tenuring as soon as
- // possible.
- if (FLAG_pretenuring) {
- isolate_->stack_guard()->FullDeopt();
- }
}
if (new_space_high_promotion_mode_active_ &&
@@ -1184,6 +1175,8 @@ void Heap::MarkCompact(GCTracer* tracer) {
gc_state_ = MARK_COMPACT;
LOG(isolate_, ResourceEvent("markcompact", "begin"));
+ int64_t objects_before_gc = SizeOfObjects();
+
mark_compact_collector_.Prepare(tracer);
ms_count_++;
@@ -1200,6 +1193,8 @@ void Heap::MarkCompact(GCTracer* tracer) {
isolate_->counters()->objs_since_last_full()->Set(0);
flush_monomorphic_ics_ = false;
+
+ EvaluateLocalPretenuring(objects_before_gc);
}
@@ -1939,6 +1934,38 @@ void
Heap::ProcessAllocationSites(WeakObjectRetainer* retainer,
}
+void Heap::DeoptAllAllocationSitesDependentCode() {
+ Object* cur = allocation_sites_list();
+ while (cur->IsAllocationSite()) {
+ AllocationSite* casted = AllocationSite::cast(cur);
+ casted->dependent_code()->DeoptimizeDependentCodeGroup(
+ isolate_,
+ DependentCode::kAllocationSiteTenuringChangedGroup);
+ cur = casted->weak_next();
+ }
+}
+
+
+void Heap::EvaluateLocalPretenuring(int64_t objects_before_gc) {
+ int64_t objects_after_gc = SizeOfObjects();
+ int64_t old_generation_survival_rate =
+ (objects_after_gc * 100) / objects_before_gc;
+
+ if (old_generation_survival_rate < kOldSurvivalRateLowThreshold) {
+ // Too many objects died in the old generation, pretenuring of wrong
+ // allocation sites may be the cause for that. We have to deopt all
+ // dependent code registered in the allocation sites to re-evaluate
+ // our pretenuring decisions.
+ DeoptAllAllocationSitesDependentCode();
+ if (FLAG_trace_pretenuring) {
+ PrintF("Deopt all allocation sites dependent code due to low
survival "
+ "rate in the old generation %d\n",
+ static_cast<intptr_t>(old_generation_survival_rate));
+ }
+ }
+}
+
+
void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
DisallowHeapAllocation no_allocation;
Index: src/heap.h
diff --git a/src/heap.h b/src/heap.h
index
1c8e0e16e60405f0c03b08b971057b51fa7a9d45..cf9121f47025824718febad017eb4438b400e6db
100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -2186,6 +2186,13 @@ class Heap {
void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool
record_slots);
void ProcessAllocationSites(WeakObjectRetainer* retainer, bool
record_slots);
+ // Deopts all dependent code of the currently registered allocation
sites.
+ void DeoptAllAllocationSitesDependentCode();
+
+ // Evaluates local pretenuring and calls
DeoptAllAllocationSitesDependentCode
+ // if too many objects died in the old generation.
+ void EvaluateLocalPretenuring(int64_t objects_before_gc);
+
// Called on heap tear-down.
void TearDownArrayBuffers();
@@ -2229,6 +2236,8 @@ class Heap {
static const int kYoungSurvivalRateLowThreshold = 10;
static const int kYoungSurvivalRateAllowedDeviation = 15;
+ static const int kOldSurvivalRateLowThreshold = 20;
+
int young_survivors_after_last_gc_;
int high_survival_rate_period_length_;
int low_survival_rate_period_length_;
Index: src/hydrogen.cc
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index
b6498b1dbbce8786fa2d021df5196e7e274e0df6..bdd2f43c54dc897f6180f520455b226f564119a9
100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -9344,16 +9344,10 @@ HInstruction*
HOptimizedGraphBuilder::BuildFastLiteral(
HValue* object_size_constant = Add<HConstant>(
boilerplate_object->map()->instance_size());
- // We should pull pre-tenure mode from the allocation site.
- // For now, just see what it says, and remark on it if it sez
- // we should pretenure. That means the rudimentary counting in the
garbage
- // collector is having an effect.
- PretenureFlag pretenure_flag = isolate()->heap()->GetPretenureMode();
- if (FLAG_allocation_site_pretenuring) {
- pretenure_flag = site_context->current()->GetPretenureMode()
- ? TENURED
- : NOT_TENURED;
- }
+ PretenureFlag pretenure_flag =
site_context->current()->GetPretenureMode();
+
+ site_context->current()->AddDependentCompilationInfo(
+ AllocationSite::TENURING, top_info());
HInstruction* object = Add<HAllocate>(object_size_constant, type,
pretenure_flag, instance_type, site_context->current());
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.