Revision: 10657
Author: [email protected]
Date: Thu Feb 9 02:19:46 2012
Log: Count-based profiling for primitive functions (hidden behind a
flag)
Review URL: https://chromiumcodereview.appspot.com/9361026
http://code.google.com/p/v8/source/detail?r=10657
Modified:
/branches/bleeding_edge/src/arm/full-codegen-arm.cc
/branches/bleeding_edge/src/ast.cc
/branches/bleeding_edge/src/ast.h
/branches/bleeding_edge/src/compiler.cc
/branches/bleeding_edge/src/compiler.h
/branches/bleeding_edge/src/flag-definitions.h
/branches/bleeding_edge/src/full-codegen.cc
/branches/bleeding_edge/src/heap.cc
/branches/bleeding_edge/src/ia32/full-codegen-ia32.cc
/branches/bleeding_edge/src/ic-inl.h
/branches/bleeding_edge/src/ic.cc
/branches/bleeding_edge/src/ic.h
/branches/bleeding_edge/src/mark-compact.cc
/branches/bleeding_edge/src/objects-inl.h
/branches/bleeding_edge/src/objects.h
/branches/bleeding_edge/src/runtime-profiler.cc
/branches/bleeding_edge/src/runtime-profiler.h
/branches/bleeding_edge/src/runtime.cc
/branches/bleeding_edge/src/x64/full-codegen-x64.cc
=======================================
--- /branches/bleeding_edge/src/arm/full-codegen-arm.cc Fri Jan 27 05:03:19
2012
+++ /branches/bleeding_edge/src/arm/full-codegen-arm.cc Thu Feb 9 02:19:46
2012
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -138,6 +138,27 @@
__ stop("stop-at");
}
#endif
+
+ // We can optionally optimize based on counters rather than statistical
+ // sampling.
+ if (info->ShouldSelfOptimize()) {
+ if (FLAG_trace_opt) {
+ PrintF("[adding self-optimization header to %s]\n",
+ *info->function()->debug_name()->ToCString());
+ }
+ MaybeObject* maybe_cell =
isolate()->heap()->AllocateJSGlobalPropertyCell(
+ Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
+ JSGlobalPropertyCell* cell;
+ if (maybe_cell->To(&cell)) {
+ __ mov(r2, Operand(Handle<JSGlobalPropertyCell>(cell)));
+ __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+ __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
+ __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+ Handle<Code> compile_stub(
+ isolate()->builtins()->builtin(Builtins::kLazyRecompile));
+ __ Jump(compile_stub, RelocInfo::CODE_TARGET, eq);
+ }
+ }
// Strict mode functions and builtins need to replace the receiver
// with undefined when called as functions (without an explicit
=======================================
--- /branches/bleeding_edge/src/ast.cc Wed Feb 8 01:56:33 2012
+++ /branches/bleeding_edge/src/ast.cc Thu Feb 9 02:19:46 2012
@@ -166,11 +166,6 @@
LanguageMode FunctionLiteral::language_mode() const {
return scope()->language_mode();
}
-
-
-bool FunctionLiteral::ShouldSelfOptimize() {
- return !flags()->Contains(kDontSelfOptimize);
-}
ObjectLiteral::Property::Property(Literal* key, Expression* value) {
=======================================
--- /branches/bleeding_edge/src/ast.h Wed Feb 8 01:56:33 2012
+++ /branches/bleeding_edge/src/ast.h Thu Feb 9 02:19:46 2012
@@ -1816,8 +1816,6 @@
bool has_duplicate_parameters() {
return HasDuplicateParameters::decode(bitfield_);
}
-
- bool ShouldSelfOptimize();
int ast_node_count() { return ast_properties_.node_count(); }
AstProperties::Flags* flags() { return ast_properties_.flags(); }
=======================================
--- /branches/bleeding_edge/src/compiler.cc Wed Feb 8 01:56:33 2012
+++ /branches/bleeding_edge/src/compiler.cc Thu Feb 9 02:19:46 2012
@@ -108,6 +108,18 @@
!scope_->inside_with();
SetMode(is_optimizable_closure ? BASE : NONOPT);
}
+
+
+// Primitive functions are unlikely to be picked up by the stack-walking
+// profiler, so they trigger their own optimization when they're called
+// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
+bool CompilationInfo::ShouldSelfOptimize() {
+ return FLAG_counting_profiler &&
+ FLAG_crankshaft &&
+ !Serializer::enabled() &&
+ !function()->flags()->Contains(kDontSelfOptimize) &&
+ (shared_info().is_null() || !shared_info()->optimization_disabled());
+}
void CompilationInfo::AbortOptimization() {
@@ -654,6 +666,7 @@
shared->set_code_age(0);
shared->set_dont_crankshaft(lit->flags()->Contains(kDontOptimize));
shared->set_dont_inline(lit->flags()->Contains(kDontInline));
+ shared->set_ast_node_count(lit->ast_node_count());
if (info->AllowOptimize() && !shared->optimization_disabled()) {
// If we're asked to always optimize, we compile the optimized
=======================================
--- /branches/bleeding_edge/src/compiler.h Thu Nov 24 07:17:04 2011
+++ /branches/bleeding_edge/src/compiler.h Thu Feb 9 02:19:46 2012
@@ -167,6 +167,9 @@
bool AllowOptimize() {
return V8::UseCrankshaft() && !closure_.is_null();
}
+
+ // Determines whether or not to insert a self-optimization header.
+ bool ShouldSelfOptimize();
// Disable all optimization attempts of this info for the rest of the
// current compilation pipeline.
@@ -280,6 +283,9 @@
static const int kMaxInliningLevels = 3;
+ // Call count before primitive functions trigger their own optimization.
+ static const int kCallsUntilPrimitiveOpt = 200;
+
// All routines return a SharedFunctionInfo.
// If an error occurs an exception is raised and the return handle
// contains NULL.
=======================================
--- /branches/bleeding_edge/src/flag-definitions.h Wed Feb 8 07:44:07 2012
+++ /branches/bleeding_edge/src/flag-definitions.h Thu Feb 9 02:19:46 2012
@@ -165,6 +165,9 @@
DEFINE_bool(optimize_closures, true, "optimize closures")
DEFINE_int(loop_weight, 1, "loop weight for representation inference")
+// Count-based optimization decisions.
+DEFINE_bool(counting_profiler, false, "use experimental counter-based
profiler")
+
// assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc
DEFINE_bool(debug_code, false,
"generate extra code (assertions) for debugging")
=======================================
--- /branches/bleeding_edge/src/full-codegen.cc Thu Feb 9 01:43:37 2012
+++ /branches/bleeding_edge/src/full-codegen.cc Thu Feb 9 02:19:46 2012
@@ -297,6 +297,9 @@
code->set_stack_check_table_offset(table_offset);
CodeGenerator::PrintCode(code, info);
info->SetCode(code); // May be an empty handle.
+ if (!code.is_null()) {
+
isolate->runtime_profiler()->NotifyCodeGenerated(code->instruction_size());
+ }
#ifdef ENABLE_GDB_JIT_INTERFACE
if (FLAG_gdbjit && !code.is_null()) {
GDBJITLineInfo* lineinfo =
=======================================
--- /branches/bleeding_edge/src/heap.cc Wed Feb 8 07:39:41 2012
+++ /branches/bleeding_edge/src/heap.cc Thu Feb 9 02:19:46 2012
@@ -1201,7 +1201,9 @@
promotion_queue_.Destroy();
LiveObjectList::UpdateReferencesForScavengeGC();
- isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
+ if (!FLAG_counting_profiler) {
+ isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
+ }
incremental_marking()->UpdateMarkingDequeAfterScavenge();
ASSERT(new_space_front == new_space_.top());
@@ -2866,6 +2868,7 @@
share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER);
share->set_this_property_assignments(undefined_value(),
SKIP_WRITE_BARRIER);
share->set_deopt_counter(FLAG_deopt_every_n_times);
+ share->set_profiler_ticks(0);
share->set_ast_node_count(0);
// Set integer fields (smi or int, depending on the architecture).
=======================================
--- /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc Fri Jan 27
05:03:19 2012
+++ /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc Thu Feb 9
02:19:46 2012
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -128,6 +128,26 @@
__ int3();
}
#endif
+
+ // We can optionally optimize based on counters rather than statistical
+ // sampling.
+ if (info->ShouldSelfOptimize()) {
+ if (FLAG_trace_opt) {
+ PrintF("[adding self-optimization header to %s]\n",
+ *info->function()->debug_name()->ToCString());
+ }
+ MaybeObject* maybe_cell =
isolate()->heap()->AllocateJSGlobalPropertyCell(
+ Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
+ JSGlobalPropertyCell* cell;
+ if (maybe_cell->To(&cell)) {
+ __ sub(Operand::Cell(Handle<JSGlobalPropertyCell>(cell)),
+ Immediate(Smi::FromInt(1)));
+ Handle<Code> compile_stub(
+ isolate()->builtins()->builtin(Builtins::kLazyRecompile));
+ STATIC_ASSERT(kSmiTag == 0);
+ __ j(zero, compile_stub);
+ }
+ }
// Strict mode functions and builtins need to replace the receiver
// with undefined when called as functions (without an explicit
=======================================
--- /branches/bleeding_edge/src/ic-inl.h Mon Dec 5 13:54:45 2011
+++ /branches/bleeding_edge/src/ic-inl.h Thu Feb 9 02:19:46 2012
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -29,6 +29,8 @@
#define V8_IC_INL_H_
#include "ic.h"
+
+#include "compiler.h"
#include "debug.h"
#include "macro-assembler.h"
@@ -89,6 +91,7 @@
Assembler::set_target_address_at(address, target->instruction_start());
target->GetHeap()->incremental_marking()->RecordCodeTargetPatch(address,
target);
+ PostPatching();
}
=======================================
--- /branches/bleeding_edge/src/ic.cc Thu Feb 9 01:11:04 2012
+++ /branches/bleeding_edge/src/ic.cc Thu Feb 9 02:19:46 2012
@@ -290,6 +290,31 @@
type, HandleVector(&name, 1));
return isolate()->Throw(*error);
}
+
+
+void IC::PostPatching() {
+ if (FLAG_counting_profiler) {
+ Isolate::Current()->runtime_profiler()->NotifyICChanged();
+ // We do not want to optimize until the ICs have settled down,
+ // so when they are patched, we postpone optimization for the
+ // current function and the functions above it on the stack that
+ // might want to inline this one.
+ StackFrameIterator it;
+ if (it.done()) return;
+ it.Advance();
+ static const int kStackFramesToMark = Compiler::kMaxInliningLevels - 1;
+ for (int i = 0; i < kStackFramesToMark; ++i) {
+ if (it.done()) return;
+ StackFrame* raw_frame = it.frame();
+ if (raw_frame->is_java_script()) {
+ JSFunction* function =
+ JSFunction::cast(JavaScriptFrame::cast(raw_frame)->function());
+ function->shared()->set_profiler_ticks(0);
+ }
+ it.Advance();
+ }
+ }
+}
void IC::Clear(Address address) {
=======================================
--- /branches/bleeding_edge/src/ic.h Fri Dec 9 01:26:14 2011
+++ /branches/bleeding_edge/src/ic.h Thu Feb 9 02:19:46 2012
@@ -165,6 +165,7 @@
// Access the target code for the given IC address.
static inline Code* GetTargetAtAddress(Address address);
static inline void SetTargetAtAddress(Address address, Code* target);
+ static void PostPatching();
private:
// Frame pointer for the frame that uses (calls) the IC.
=======================================
--- /branches/bleeding_edge/src/mark-compact.cc Wed Feb 8 07:44:07 2012
+++ /branches/bleeding_edge/src/mark-compact.cc Thu Feb 9 02:19:46 2012
@@ -2373,8 +2373,10 @@
code_flusher_->ProcessCandidates();
}
- // Clean up dead objects from the runtime profiler.
- heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
+ if (!FLAG_counting_profiler) {
+ // Clean up dead objects from the runtime profiler.
+ heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
+ }
}
@@ -3381,9 +3383,11 @@
heap_->UpdateReferencesInExternalStringTable(
&UpdateReferenceInExternalStringTableEntry);
- // Update JSFunction pointers from the runtime profiler.
- heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
- &updating_visitor);
+ if (!FLAG_counting_profiler) {
+ // Update JSFunction pointers from the runtime profiler.
+ heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
+ &updating_visitor);
+ }
EvacuationWeakObjectRetainer evacuation_object_retainer;
heap()->ProcessWeakReferences(&evacuation_object_retainer);
=======================================
--- /branches/bleeding_edge/src/objects-inl.h Wed Feb 8 01:56:33 2012
+++ /branches/bleeding_edge/src/objects-inl.h Thu Feb 9 02:19:46 2012
@@ -3530,6 +3530,8 @@
ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
kThisPropertyAssignmentsOffset)
+SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
+
BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
kHiddenPrototypeBit)
BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
=======================================
--- /branches/bleeding_edge/src/objects.h Thu Feb 9 00:58:19 2012
+++ /branches/bleeding_edge/src/objects.h Thu Feb 9 02:19:46 2012
@@ -5200,6 +5200,9 @@
inline int deopt_counter();
inline void set_deopt_counter(int counter);
+ inline int profiler_ticks();
+ inline void set_profiler_ticks(int ticks);
+
inline int ast_node_count();
inline void set_ast_node_count(int count);
@@ -5375,10 +5378,12 @@
kInferredNameOffset + kPointerSize;
static const int kThisPropertyAssignmentsOffset =
kInitialMapOffset + kPointerSize;
+ static const int kProfilerTicksOffset =
+ kThisPropertyAssignmentsOffset + kPointerSize;
#if V8_HOST_ARCH_32_BIT
// Smi fields.
static const int kLengthOffset =
- kThisPropertyAssignmentsOffset + kPointerSize;
+ kProfilerTicksOffset + kPointerSize;
static const int kFormalParameterCountOffset = kLengthOffset +
kPointerSize;
static const int kExpectedNofPropertiesOffset =
kFormalParameterCountOffset + kPointerSize;
@@ -5412,7 +5417,7 @@
// word is not set and thus this word cannot be treated as pointer
// to HeapObject during old space traversal.
static const int kLengthOffset =
- kThisPropertyAssignmentsOffset + kPointerSize;
+ kProfilerTicksOffset + kPointerSize;
static const int kFormalParameterCountOffset =
kLengthOffset + kIntSize;
=======================================
--- /branches/bleeding_edge/src/runtime-profiler.cc Fri Jan 13 05:09:52 2012
+++ /branches/bleeding_edge/src/runtime-profiler.cc Thu Feb 9 02:19:46 2012
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -46,6 +46,8 @@
// Optimization sampler constants.
static const int kSamplerFrameCount = 2;
+
+// Constants for statistical profiler.
static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 };
static const int kSamplerTicksBetweenThresholdAdjustment = 32;
@@ -58,6 +60,16 @@
static const int kSizeLimit = 1500;
+// Constants for counter based profiler.
+
+// Number of times a function has to be seen on the stack before it is
+// optimized.
+static const int kProfilerTicksBeforeOptimization = 2;
+
+// Maximum size in bytes of generated code for a function to be optimized
+// the very first time it is seen on the stack.
+static const int kMaxSizeEarlyOpt = 500;
+
Atomic32 RuntimeProfiler::state_ = 0;
// TODO(isolates): Create the semaphore lazily and clean it up when no
@@ -90,13 +102,13 @@
}
-void RuntimeProfiler::Optimize(JSFunction* function) {
+void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
ASSERT(function->IsOptimizable());
if (FLAG_trace_opt) {
PrintF("[marking ");
function->PrintName();
PrintF(" 0x%" V8PRIxPTR,
reinterpret_cast<intptr_t>(function->address()));
- PrintF(" for recompilation");
+ PrintF(" for recompilation, reason: %s", reason);
PrintF("]\n");
}
@@ -192,17 +204,19 @@
JavaScriptFrame* frame = it.frame();
JSFunction* function = JSFunction::cast(frame->function());
- // Adjust threshold each time we have processed
- // a certain number of ticks.
- if (sampler_ticks_until_threshold_adjustment_ > 0) {
- sampler_ticks_until_threshold_adjustment_--;
- if (sampler_ticks_until_threshold_adjustment_ <= 0) {
- // If the threshold is not already at the minimum
- // modify and reset the ticks until next adjustment.
- if (sampler_threshold_ > kSamplerThresholdMin) {
- sampler_threshold_ -= kSamplerThresholdDelta;
- sampler_ticks_until_threshold_adjustment_ =
- kSamplerTicksBetweenThresholdAdjustment;
+ if (!FLAG_counting_profiler) {
+ // Adjust threshold each time we have processed
+ // a certain number of ticks.
+ if (sampler_ticks_until_threshold_adjustment_ > 0) {
+ sampler_ticks_until_threshold_adjustment_--;
+ if (sampler_ticks_until_threshold_adjustment_ <= 0) {
+ // If the threshold is not already at the minimum
+ // modify and reset the ticks until next adjustment.
+ if (sampler_threshold_ > kSamplerThresholdMin) {
+ sampler_threshold_ -= kSamplerThresholdDelta;
+ sampler_ticks_until_threshold_adjustment_ =
+ kSamplerTicksBetweenThresholdAdjustment;
+ }
}
}
}
@@ -217,25 +231,55 @@
// Do not record non-optimizable functions.
if (!function->IsOptimizable()) continue;
- samples[sample_count++] = function;
-
- int function_size = function->shared()->SourceSize();
- int threshold_size_factor = (function_size > kSizeLimit)
- ? sampler_threshold_size_factor_
- : 1;
-
- int threshold = sampler_threshold_ * threshold_size_factor;
-
- if (LookupSample(function) >= threshold) {
- Optimize(function);
+
+ if (FLAG_counting_profiler) {
+ int ticks = function->shared()->profiler_ticks();
+
+ if (ticks >= kProfilerTicksBeforeOptimization) {
+ // If this particular function hasn't had any ICs patched for
enough
+ // ticks, optimize it now.
+ Optimize(function, "hot and stable");
+ } else if (!any_ic_changed_ &&
+ function->shared()->code()->instruction_size() <
kMaxSizeEarlyOpt) {
+ // If no IC was patched since the last tick and this function is
very
+ // small, optimistically optimize it now.
+ Optimize(function, "small function");
+ } else if (!code_generated_ &&
+ !any_ic_changed_ &&
+ total_code_generated_ > 0 &&
+ total_code_generated_ < 2000) {
+ // If no code was generated and no IC was patched since the last
tick,
+ // but a little code has already been generated since last Reset(),
+ // then type info might already be stable and we can optimize now.
+ Optimize(function, "stable on startup");
+ } else {
+ function->shared()->set_profiler_ticks(ticks + 1);
+ }
+ } else { // !FLAG_counting_profiler
+ samples[sample_count++] = function;
+
+ int function_size = function->shared()->SourceSize();
+ int threshold_size_factor = (function_size > kSizeLimit)
+ ? sampler_threshold_size_factor_
+ : 1;
+
+ int threshold = sampler_threshold_ * threshold_size_factor;
+
+ if (LookupSample(function) >= threshold) {
+ Optimize(function, "sampler window lookup");
+ }
}
}
-
- // Add the collected functions as samples. It's important not to do
- // this as part of collecting them because this will interfere with
- // the sample lookup in case of recursive functions.
- for (int i = 0; i < sample_count; i++) {
- AddSample(samples[i], kSamplerFrameWeight[i]);
+ if (FLAG_counting_profiler) {
+ any_ic_changed_ = false;
+ code_generated_ = false;
+ } else { // !FLAG_counting_profiler
+ // Add the collected functions as samples. It's important not to do
+ // this as part of collecting them because this will interfere with
+ // the sample lookup in case of recursive functions.
+ for (int i = 0; i < sample_count; i++) {
+ AddSample(samples[i], kSamplerFrameWeight[i]);
+ }
}
}
@@ -247,7 +291,9 @@
void RuntimeProfiler::SetUp() {
ASSERT(has_been_globally_set_up_);
- ClearSampleBuffer();
+ if (!FLAG_counting_profiler) {
+ ClearSampleBuffer();
+ }
// If the ticker hasn't already started, make sure to do so to get
// the ticks for the runtime profiler.
if (IsEnabled()) isolate_->logger()->EnsureTickerStarted();
@@ -255,10 +301,14 @@
void RuntimeProfiler::Reset() {
- sampler_threshold_ = kSamplerThresholdInit;
- sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
- sampler_ticks_until_threshold_adjustment_ =
- kSamplerTicksBetweenThresholdAdjustment;
+ if (FLAG_counting_profiler) {
+ total_code_generated_ = 0;
+ } else { // !FLAG_counting_profiler
+ sampler_threshold_ = kSamplerThresholdInit;
+ sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
+ sampler_ticks_until_threshold_adjustment_ =
+ kSamplerTicksBetweenThresholdAdjustment;
+ }
}
=======================================
--- /branches/bleeding_edge/src/runtime-profiler.h Fri Jan 13 05:09:52 2012
+++ /branches/bleeding_edge/src/runtime-profiler.h Thu Feb 9 02:19:46 2012
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -60,6 +60,15 @@
Object** SamplerWindowAddress();
int SamplerWindowSize();
+
+ void NotifyICChanged() { any_ic_changed_ = true; }
+
+ void NotifyCodeGenerated(int generated_code_size) {
+ if (FLAG_counting_profiler) {
+ code_generated_ = true;
+ total_code_generated_ += generated_code_size;
+ }
+ }
// Rate limiting support.
@@ -97,7 +106,7 @@
static void HandleWakeUp(Isolate* isolate);
- void Optimize(JSFunction* function);
+ void Optimize(JSFunction* function, const char* reason);
void AttemptOnStackReplacement(JSFunction* function);
@@ -119,6 +128,10 @@
int sampler_window_position_;
int sampler_window_weight_[kSamplerWindowSize];
+ bool any_ic_changed_;
+ bool code_generated_;
+ int total_code_generated_;
+
// Possible state values:
// -1 => the profiler thread is waiting on the semaphore
// 0 or positive => the number of isolates running JavaScript code.
=======================================
--- /branches/bleeding_edge/src/runtime.cc Thu Feb 9 01:11:04 2012
+++ /branches/bleeding_edge/src/runtime.cc Thu Feb 9 02:19:46 2012
@@ -8427,6 +8427,8 @@
ASSERT(args.length() == 1);
Handle<JSFunction> function = args.at<JSFunction>(0);
+ function->shared()->set_profiler_ticks(0);
+
// If the function is not compiled ignore the lazy
// recompilation. This can happen if the debugger is activated and
// the function is returned to the not compiled state.
=======================================
--- /branches/bleeding_edge/src/x64/full-codegen-x64.cc Fri Jan 27 05:03:19
2012
+++ /branches/bleeding_edge/src/x64/full-codegen-x64.cc Thu Feb 9 02:19:46
2012
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -128,6 +128,27 @@
__ int3();
}
#endif
+
+ // We can optionally optimize based on counters rather than statistical
+ // sampling.
+ if (info->ShouldSelfOptimize()) {
+ if (FLAG_trace_opt) {
+ PrintF("[adding self-optimization header to %s]\n",
+ *info->function()->debug_name()->ToCString());
+ }
+ MaybeObject* maybe_cell =
isolate()->heap()->AllocateJSGlobalPropertyCell(
+ Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
+ JSGlobalPropertyCell* cell;
+ if (maybe_cell->To(&cell)) {
+ __ movq(rax, Handle<JSGlobalPropertyCell>(cell),
+ RelocInfo::EMBEDDED_OBJECT);
+ __ SmiAddConstant(FieldOperand(rax,
JSGlobalPropertyCell::kValueOffset),
+ Smi::FromInt(-1));
+ Handle<Code> compile_stub(
+ isolate()->builtins()->builtin(Builtins::kLazyRecompile));
+ __ j(zero, compile_stub, RelocInfo::CODE_TARGET);
+ }
+ }
// Strict mode functions and builtins need to replace the receiver
// with undefined when called as functions (without an explicit
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev