Revision: 18266
Author:   [email protected]
Date:     Fri Dec  6 09:52:40 2013 UTC
Log:      Remove outdated profiler flags

[email protected]

Review URL: https://codereview.chromium.org/103293006
http://code.google.com/p/v8/source/detail?r=18266

Modified:
 /branches/bleeding_edge/src/api.cc
 /branches/bleeding_edge/src/arm/full-codegen-arm.cc
 /branches/bleeding_edge/src/compiler.cc
 /branches/bleeding_edge/src/flag-definitions.h
 /branches/bleeding_edge/src/heap.cc
 /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc
 /branches/bleeding_edge/src/ic.cc
 /branches/bleeding_edge/src/isolate.cc
 /branches/bleeding_edge/src/mark-compact.cc
 /branches/bleeding_edge/src/mips/full-codegen-mips.cc
 /branches/bleeding_edge/src/runtime-profiler.cc
 /branches/bleeding_edge/src/runtime-profiler.h
 /branches/bleeding_edge/src/x64/full-codegen-x64.cc

=======================================
--- /branches/bleeding_edge/src/api.cc  Mon Dec  2 18:12:01 2013 UTC
+++ /branches/bleeding_edge/src/api.cc  Fri Dec  6 09:52:40 2013 UTC
@@ -5318,7 +5318,6 @@
       global_constructor->set_needs_access_check(
           proxy_constructor->needs_access_check());
     }
-    isolate->runtime_profiler()->Reset();
   }
   // Leave V8.

=======================================
--- /branches/bleeding_edge/src/arm/full-codegen-arm.cc Mon Dec 2 13:14:07 2013 UTC +++ /branches/bleeding_edge/src/arm/full-codegen-arm.cc Fri Dec 6 09:52:40 2013 UTC
@@ -334,10 +334,6 @@

 void FullCodeGenerator::EmitProfilingCounterReset() {
   int reset_value = FLAG_interrupt_budget;
-  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
-    // Self-optimization is a one-off thing: if it fails, don't try again.
-    reset_value = Smi::kMaxValue;
-  }
   if (isolate()->IsDebuggerActive()) {
     // Detect debug break requests as soon as possible.
     reset_value = FLAG_interrupt_budget >> 4;
@@ -355,13 +351,10 @@
   Assembler::BlockConstPoolScope block_const_pool(masm_);
   Label ok;

-  int weight = 1;
-  if (FLAG_weighted_back_edges) {
-    ASSERT(back_edge_target->is_bound());
-    int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
-    weight = Min(kMaxBackEdgeWeight,
-                 Max(1, distance / kCodeSizeMultiplier));
-  }
+  ASSERT(back_edge_target->is_bound());
+  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
+  int weight = Min(kMaxBackEdgeWeight,
+                   Max(1, distance / kCodeSizeMultiplier));
   EmitProfilingCounterDecrement(weight);
   __ b(pl, &ok);
   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
@@ -394,32 +387,24 @@
       __ push(r0);
       __ CallRuntime(Runtime::kTraceExit, 1);
     }
-    if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
-      // Pretend that the exit is a backwards jump to the entry.
-      int weight = 1;
-      if (info_->ShouldSelfOptimize()) {
-        weight = FLAG_interrupt_budget / FLAG_self_opt_count;
-      } else if (FLAG_weighted_back_edges) {
-        int distance = masm_->pc_offset();
-        weight = Min(kMaxBackEdgeWeight,
-                     Max(1, distance / kCodeSizeMultiplier));
-      }
-      EmitProfilingCounterDecrement(weight);
-      Label ok;
-      __ b(pl, &ok);
-      __ push(r0);
-      if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
- __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
-        __ push(r2);
-        __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
-      } else {
-        __ Call(isolate()->builtins()->InterruptCheck(),
-                RelocInfo::CODE_TARGET);
-      }
-      __ pop(r0);
-      EmitProfilingCounterReset();
-      __ bind(&ok);
+    // Pretend that the exit is a backwards jump to the entry.
+    int weight = 1;
+    if (info_->ShouldSelfOptimize()) {
+      weight = FLAG_interrupt_budget / FLAG_self_opt_count;
+    } else {
+      int distance = masm_->pc_offset();
+      weight = Min(kMaxBackEdgeWeight,
+                   Max(1, distance / kCodeSizeMultiplier));
     }
+    EmitProfilingCounterDecrement(weight);
+    Label ok;
+    __ b(pl, &ok);
+    __ push(r0);
+    __ Call(isolate()->builtins()->InterruptCheck(),
+            RelocInfo::CODE_TARGET);
+    __ pop(r0);
+    EmitProfilingCounterReset();
+    __ bind(&ok);

 #ifdef DEBUG
     // Add a label for checking the size of the code used for returning.
=======================================
--- /branches/bleeding_edge/src/compiler.cc     Thu Dec  5 16:17:44 2013 UTC
+++ /branches/bleeding_edge/src/compiler.cc     Fri Dec  6 09:52:40 2013 UTC
@@ -235,8 +235,7 @@
 // profiler, so they trigger their own optimization when they're called
 // for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
 bool CompilationInfo::ShouldSelfOptimize() {
-  return FLAG_self_optimization &&
-      FLAG_crankshaft &&
+  return FLAG_crankshaft &&
       !function()->flags()->Contains(kDontSelfOptimize) &&
       !function()->dont_optimize() &&
       function()->scope()->AllowsLazyCompilation() &&
=======================================
--- /branches/bleeding_edge/src/flag-definitions.h Thu Dec 5 17:26:22 2013 UTC +++ /branches/bleeding_edge/src/flag-definitions.h Fri Dec 6 09:52:40 2013 UTC
@@ -347,19 +347,8 @@

 DEFINE_bool(new_string_add, true, "enable new string addition")

-// Experimental profiler changes.
-DEFINE_bool(experimental_profiler, true, "enable all profiler experiments")
-DEFINE_bool(watch_ic_patching, false, "profiler considers IC stability")
+// Profiler flags.
DEFINE_int(frame_count, 1, "number of stack frames inspected by the profiler")
-DEFINE_bool(self_optimization, false,
-            "primitive functions trigger their own optimization")
-DEFINE_bool(direct_self_opt, false,
-            "call recompile stub directly when self-optimizing")
-DEFINE_bool(retry_self_opt, false, "re-try self-optimization if it failed")
-DEFINE_bool(interrupt_at_exit, false,
-            "insert an interrupt check at function exit")
-DEFINE_bool(weighted_back_edges, false,
-            "weight back edges by jump distance for interrupt triggering")
            // 0x1700 fits in the immediate field of an ARM instruction.
 DEFINE_int(interrupt_budget, 0x1700,
            "execution budget before interrupt is triggered")
@@ -367,13 +356,6 @@
"percentage of ICs that must have type info to allow optimization")
 DEFINE_int(self_opt_count, 130, "call count before self-optimization")

-DEFINE_implication(experimental_profiler, watch_ic_patching)
-DEFINE_implication(experimental_profiler, self_optimization)
-// Not implying direct_self_opt here because it seems to be a bad idea.
-DEFINE_implication(experimental_profiler, retry_self_opt)
-DEFINE_implication(experimental_profiler, interrupt_at_exit)
-DEFINE_implication(experimental_profiler, weighted_back_edges)
-
 DEFINE_bool(trace_opt_verbose, false, "extra verbose compilation tracing")
 DEFINE_implication(trace_opt_verbose, trace_opt)

=======================================
--- /branches/bleeding_edge/src/heap.cc Fri Nov 29 09:54:38 2013 UTC
+++ /branches/bleeding_edge/src/heap.cc Fri Dec  6 09:52:40 2013 UTC
@@ -1548,9 +1548,6 @@

   promotion_queue_.Destroy();

-  if (!FLAG_watch_ic_patching) {
-    isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
-  }
   incremental_marking()->UpdateMarkingDequeAfterScavenge();

   ScavengeWeakObjectRetainer weak_object_retainer(this);
=======================================
--- /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc Mon Dec 2 13:14:07 2013 UTC +++ /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc Fri Dec 6 09:52:40 2013 UTC
@@ -319,10 +319,6 @@

 void FullCodeGenerator::EmitProfilingCounterReset() {
   int reset_value = FLAG_interrupt_budget;
-  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
-    // Self-optimization is a one-off thing: if it fails, don't try again.
-    reset_value = Smi::kMaxValue;
-  }
   __ mov(ebx, Immediate(profiling_counter_));
   __ mov(FieldOperand(ebx, Cell::kValueOffset),
          Immediate(Smi::FromInt(reset_value)));
@@ -334,13 +330,10 @@
   Comment cmnt(masm_, "[ Back edge bookkeeping");
   Label ok;

-  int weight = 1;
-  if (FLAG_weighted_back_edges) {
-    ASSERT(back_edge_target->is_bound());
-    int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
-    weight = Min(kMaxBackEdgeWeight,
-                 Max(1, distance / kCodeSizeMultiplier));
-  }
+  ASSERT(back_edge_target->is_bound());
+  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
+  int weight = Min(kMaxBackEdgeWeight,
+                   Max(1, distance / kCodeSizeMultiplier));
   EmitProfilingCounterDecrement(weight);
   __ j(positive, &ok, Label::kNear);
   __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
@@ -372,31 +365,24 @@
       __ push(eax);
       __ CallRuntime(Runtime::kTraceExit, 1);
     }
-    if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
-      // Pretend that the exit is a backwards jump to the entry.
-      int weight = 1;
-      if (info_->ShouldSelfOptimize()) {
-        weight = FLAG_interrupt_budget / FLAG_self_opt_count;
-      } else if (FLAG_weighted_back_edges) {
-        int distance = masm_->pc_offset();
-        weight = Min(kMaxBackEdgeWeight,
-                     Max(1, distance / kCodeSizeMultiplier));
-      }
-      EmitProfilingCounterDecrement(weight);
-      Label ok;
-      __ j(positive, &ok, Label::kNear);
-      __ push(eax);
-      if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
-        __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
-        __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
-      } else {
-        __ call(isolate()->builtins()->InterruptCheck(),
-                RelocInfo::CODE_TARGET);
-      }
-      __ pop(eax);
-      EmitProfilingCounterReset();
-      __ bind(&ok);
+    // Pretend that the exit is a backwards jump to the entry.
+    int weight = 1;
+    if (info_->ShouldSelfOptimize()) {
+      weight = FLAG_interrupt_budget / FLAG_self_opt_count;
+    } else {
+      int distance = masm_->pc_offset();
+      weight = Min(kMaxBackEdgeWeight,
+                   Max(1, distance / kCodeSizeMultiplier));
     }
+    EmitProfilingCounterDecrement(weight);
+    Label ok;
+    __ j(positive, &ok, Label::kNear);
+    __ push(eax);
+    __ call(isolate()->builtins()->InterruptCheck(),
+            RelocInfo::CODE_TARGET);
+    __ pop(eax);
+    EmitProfilingCounterReset();
+    __ bind(&ok);
 #ifdef DEBUG
     // Add a label for checking the size of the code used for returning.
     Label check_exit_codesize;
=======================================
--- /branches/bleeding_edge/src/ic.cc   Wed Dec  4 09:27:48 2013 UTC
+++ /branches/bleeding_edge/src/ic.cc   Fri Dec  6 09:52:40 2013 UTC
@@ -440,9 +440,6 @@


 void IC::PostPatching(Address address, Code* target, Code* old_target) {
-  if (FLAG_type_info_threshold == 0 && !FLAG_watch_ic_patching) {
-    return;
-  }
   Isolate* isolate = target->GetHeap()->isolate();
   Code* host = isolate->
       inner_pointer_to_code_cache()->GetCacheEntry(address)->code;
@@ -465,10 +462,8 @@
         TypeFeedbackInfo::cast(host->type_feedback_info());
     info->change_own_type_change_checksum();
   }
-  if (FLAG_watch_ic_patching) {
-    host->set_profiler_ticks(0);
-    isolate->runtime_profiler()->NotifyICChanged();
-  }
+  host->set_profiler_ticks(0);
+  isolate->runtime_profiler()->NotifyICChanged();
   // TODO(2029): When an optimized function is patched, it would
   // be nice to propagate the corresponding type information to its
   // unoptimized version for the benefit of later inlining.
=======================================
--- /branches/bleeding_edge/src/isolate.cc      Mon Dec  2 13:14:07 2013 UTC
+++ /branches/bleeding_edge/src/isolate.cc      Fri Dec  6 09:52:40 2013 UTC
@@ -1688,7 +1688,6 @@
     bootstrapper_->TearDown();

     if (runtime_profiler_ != NULL) {
-      runtime_profiler_->TearDown();
       delete runtime_profiler_;
       runtime_profiler_ = NULL;
     }
@@ -2049,7 +2048,6 @@
   if (!create_heap_objects) Assembler::QuietNaN(heap_.nan_value());

   runtime_profiler_ = new RuntimeProfiler(this);
-  runtime_profiler_->SetUp();

   // If we are deserializing, log non-function code objects and compiled
   // functions found in the snapshot.
=======================================
--- /branches/bleeding_edge/src/mark-compact.cc Fri Nov 29 09:47:32 2013 UTC
+++ /branches/bleeding_edge/src/mark-compact.cc Fri Dec  6 09:52:40 2013 UTC
@@ -2421,11 +2421,6 @@
       EnableCodeFlushing(false);
     }
   }
-
-  if (!FLAG_watch_ic_patching) {
-    // Clean up dead objects from the runtime profiler.
-    heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
-  }

   if (FLAG_track_gc_object_stats) {
     heap()->CheckpointObjectStats();
@@ -3514,12 +3509,6 @@
   heap_->UpdateReferencesInExternalStringTable(
       &UpdateReferenceInExternalStringTableEntry);

-  if (!FLAG_watch_ic_patching) {
-    // Update JSFunction pointers from the runtime profiler.
-    heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
-        &updating_visitor);
-  }
-
   EvacuationWeakObjectRetainer evacuation_object_retainer;
   heap()->ProcessWeakReferences(&evacuation_object_retainer);

=======================================
--- /branches/bleeding_edge/src/mips/full-codegen-mips.cc Mon Dec 2 21:17:16 2013 UTC +++ /branches/bleeding_edge/src/mips/full-codegen-mips.cc Fri Dec 6 09:52:40 2013 UTC
@@ -341,10 +341,6 @@

 void FullCodeGenerator::EmitProfilingCounterReset() {
   int reset_value = FLAG_interrupt_budget;
-  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
-    // Self-optimization is a one-off thing: if it fails, don't try again.
-    reset_value = Smi::kMaxValue;
-  }
   if (isolate()->IsDebuggerActive()) {
     // Detect debug break requests as soon as possible.
     reset_value = FLAG_interrupt_budget >> 4;
@@ -365,13 +361,10 @@
   Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
   Comment cmnt(masm_, "[ Back edge bookkeeping");
   Label ok;
-  int weight = 1;
-  if (FLAG_weighted_back_edges) {
-    ASSERT(back_edge_target->is_bound());
-    int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
-    weight = Min(kMaxBackEdgeWeight,
-                 Max(1, distance / kCodeSizeMultiplier));
-  }
+  ASSERT(back_edge_target->is_bound());
+  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
+  int weight = Min(kMaxBackEdgeWeight,
+                   Max(1, distance / kCodeSizeMultiplier));
   EmitProfilingCounterDecrement(weight);
   __ slt(at, a3, zero_reg);
   __ beq(at, zero_reg, &ok);
@@ -404,32 +397,24 @@
       __ push(v0);
       __ CallRuntime(Runtime::kTraceExit, 1);
     }
-    if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
-      // Pretend that the exit is a backwards jump to the entry.
-      int weight = 1;
-      if (info_->ShouldSelfOptimize()) {
-        weight = FLAG_interrupt_budget / FLAG_self_opt_count;
-      } else if (FLAG_weighted_back_edges) {
-        int distance = masm_->pc_offset();
-        weight = Min(kMaxBackEdgeWeight,
-                     Max(1, distance / kCodeSizeMultiplier));
-      }
-      EmitProfilingCounterDecrement(weight);
-      Label ok;
-      __ Branch(&ok, ge, a3, Operand(zero_reg));
-      __ push(v0);
-      if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
- __ lw(a2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
-        __ push(a2);
-        __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
-      } else {
-        __ Call(isolate()->builtins()->InterruptCheck(),
-                RelocInfo::CODE_TARGET);
-      }
-      __ pop(v0);
-      EmitProfilingCounterReset();
-      __ bind(&ok);
+    // Pretend that the exit is a backwards jump to the entry.
+    int weight = 1;
+    if (info_->ShouldSelfOptimize()) {
+      weight = FLAG_interrupt_budget / FLAG_self_opt_count;
+    } else {
+      int distance = masm_->pc_offset();
+      weight = Min(kMaxBackEdgeWeight,
+                   Max(1, distance / kCodeSizeMultiplier));
     }
+    EmitProfilingCounterDecrement(weight);
+    Label ok;
+    __ Branch(&ok, ge, a3, Operand(zero_reg));
+    __ push(v0);
+    __ Call(isolate()->builtins()->InterruptCheck(),
+            RelocInfo::CODE_TARGET);
+    __ pop(v0);
+    EmitProfilingCounterReset();
+    __ bind(&ok);

 #ifdef DEBUG
     // Add a label for checking the size of the code used for returning.
=======================================
--- /branches/bleeding_edge/src/runtime-profiler.cc Tue Nov 19 11:52:47 2013 UTC +++ /branches/bleeding_edge/src/runtime-profiler.cc Fri Dec 6 09:52:40 2013 UTC
@@ -45,24 +45,6 @@
 namespace internal {


-// Optimization sampler constants.
-static const int kSamplerFrameCount = 2;
-
-// Constants for statistical profiler.
-static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 };
-
-static const int kSamplerTicksBetweenThresholdAdjustment = 32;
-
-static const int kSamplerThresholdInit = 3;
-static const int kSamplerThresholdMin = 1;
-static const int kSamplerThresholdDelta = 1;
-
-static const int kSamplerThresholdSizeFactorInit = 3;
-
-static const int kSizeLimit = 1500;
-
-// Constants for counter based profiler.
-
 // Number of times a function has to be seen on the stack before it is
 // optimized.
 static const int kProfilerTicksBeforeOptimization = 2;
@@ -94,14 +76,7 @@

 RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
     : isolate_(isolate),
-      sampler_threshold_(kSamplerThresholdInit),
-      sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit),
-      sampler_ticks_until_threshold_adjustment_(
-          kSamplerTicksBetweenThresholdAdjustment),
-      sampler_window_position_(0),
-      any_ic_changed_(false),
-      code_generated_(false) {
-  ClearSampleBuffer();
+      any_ic_changed_(false) {
 }


@@ -187,38 +162,6 @@

   BackEdgeTable::Patch(isolate_, shared->code());
 }
-
-
-void RuntimeProfiler::ClearSampleBuffer() {
-  memset(sampler_window_, 0, sizeof(sampler_window_));
-  memset(sampler_window_weight_, 0, sizeof(sampler_window_weight_));
-}
-
-
-int RuntimeProfiler::LookupSample(JSFunction* function) {
-  int weight = 0;
-  for (int i = 0; i < kSamplerWindowSize; i++) {
-    Object* sample = sampler_window_[i];
-    if (sample != NULL) {
-      bool fits = FLAG_lookup_sample_by_shared
-          ? (function->shared() == JSFunction::cast(sample)->shared())
-          : (function == JSFunction::cast(sample));
-      if (fits) {
-        weight += sampler_window_weight_[i];
-      }
-    }
-  }
-  return weight;
-}
-
-
-void RuntimeProfiler::AddSample(JSFunction* function, int weight) {
-  ASSERT(IsPowerOf2(kSamplerWindowSize));
-  sampler_window_[sampler_window_position_] = function;
-  sampler_window_weight_[sampler_window_position_] = weight;
-  sampler_window_position_ = (sampler_window_position_ + 1) &
-      (kSamplerWindowSize - 1);
-}


 void RuntimeProfiler::OptimizeNow() {
@@ -231,34 +174,14 @@
   // Run through the JavaScript frames and collect them. If we already
   // have a sample of the function, we mark it for optimizations
   // (eagerly or lazily).
-  JSFunction* samples[kSamplerFrameCount];
-  int sample_count = 0;
   int frame_count = 0;
-  int frame_count_limit = FLAG_watch_ic_patching ? FLAG_frame_count
-                                                 : kSamplerFrameCount;
+  int frame_count_limit = FLAG_frame_count;
   for (JavaScriptFrameIterator it(isolate_);
        frame_count++ < frame_count_limit && !it.done();
        it.Advance()) {
     JavaScriptFrame* frame = it.frame();
     JSFunction* function = frame->function();

-    if (!FLAG_watch_ic_patching) {
-      // Adjust threshold each time we have processed
-      // a certain number of ticks.
-      if (sampler_ticks_until_threshold_adjustment_ > 0) {
-        sampler_ticks_until_threshold_adjustment_--;
-        if (sampler_ticks_until_threshold_adjustment_ <= 0) {
-          // If the threshold is not already at the minimum
-          // modify and reset the ticks until next adjustment.
-          if (sampler_threshold_ > kSamplerThresholdMin) {
-            sampler_threshold_ -= kSamplerThresholdDelta;
-            sampler_ticks_until_threshold_adjustment_ =
-                kSamplerTicksBetweenThresholdAdjustment;
-          }
-        }
-      }
-    }
-
     SharedFunctionInfo* shared = function->shared();
     Code* shared_code = shared->code();

@@ -322,116 +245,36 @@
     }
     if (!function->IsOptimizable()) continue;

-    if (FLAG_watch_ic_patching) {
-      int ticks = shared_code->profiler_ticks();
+    int ticks = shared_code->profiler_ticks();

-      if (ticks >= kProfilerTicksBeforeOptimization) {
-        int typeinfo, total, percentage;
-        GetICCounts(shared_code, &typeinfo, &total, &percentage);
-        if (percentage >= FLAG_type_info_threshold) {
- // If this particular function hasn't had any ICs patched for enough
-          // ticks, optimize it now.
-          Optimize(function, "hot and stable");
-        } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
-          Optimize(function, "not much type info but very hot");
-        } else {
-          shared_code->set_profiler_ticks(ticks + 1);
-          if (FLAG_trace_opt_verbose) {
-            PrintF("[not yet optimizing ");
-            function->PrintName();
-            PrintF(", not enough type info: %d/%d (%d%%)]\n",
-                   typeinfo, total, percentage);
-          }
-        }
-      } else if (!any_ic_changed_ &&
-                 shared_code->instruction_size() < kMaxSizeEarlyOpt) {
- // If no IC was patched since the last tick and this function is very
-        // small, optimistically optimize it now.
-        Optimize(function, "small function");
+    if (ticks >= kProfilerTicksBeforeOptimization) {
+      int typeinfo, total, percentage;
+      GetICCounts(shared_code, &typeinfo, &total, &percentage);
+      if (percentage >= FLAG_type_info_threshold) {
+ // If this particular function hasn't had any ICs patched for enough
+        // ticks, optimize it now.
+        Optimize(function, "hot and stable");
+      } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
+        Optimize(function, "not much type info but very hot");
       } else {
         shared_code->set_profiler_ticks(ticks + 1);
+        if (FLAG_trace_opt_verbose) {
+          PrintF("[not yet optimizing ");
+          function->PrintName();
+          PrintF(", not enough type info: %d/%d (%d%%)]\n",
+                 typeinfo, total, percentage);
+        }
       }
-    } else {  // !FLAG_watch_ic_patching
-      samples[sample_count++] = function;
-
-      int function_size = function->shared()->SourceSize();
-      int threshold_size_factor = (function_size > kSizeLimit)
-          ? sampler_threshold_size_factor_
-          : 1;
-
-      int threshold = sampler_threshold_ * threshold_size_factor;
-
-      if (LookupSample(function) >= threshold) {
-        Optimize(function, "sampler window lookup");
-      }
+    } else if (!any_ic_changed_ &&
+               shared_code->instruction_size() < kMaxSizeEarlyOpt) {
+      // If no IC was patched since the last tick and this function is very
+      // small, optimistically optimize it now.
+      Optimize(function, "small function");
+    } else {
+      shared_code->set_profiler_ticks(ticks + 1);
     }
   }
-  if (FLAG_watch_ic_patching) {
-    any_ic_changed_ = false;
-  } else {  // !FLAG_watch_ic_patching
-    // Add the collected functions as samples. It's important not to do
-    // this as part of collecting them because this will interfere with
-    // the sample lookup in case of recursive functions.
-    for (int i = 0; i < sample_count; i++) {
-      AddSample(samples[i], kSamplerFrameWeight[i]);
-    }
-  }
-}
-
-
-void RuntimeProfiler::SetUp() {
-  if (!FLAG_watch_ic_patching) {
-    ClearSampleBuffer();
-  }
-}
-
-
-void RuntimeProfiler::Reset() {
-  if (!FLAG_watch_ic_patching) {
-    sampler_threshold_ = kSamplerThresholdInit;
-    sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
-    sampler_ticks_until_threshold_adjustment_ =
-        kSamplerTicksBetweenThresholdAdjustment;
-  }
-}
-
-
-void RuntimeProfiler::TearDown() {
-  // Nothing to do.
-}
-
-
-// Update the pointers in the sampler window after a GC.
-void RuntimeProfiler::UpdateSamplesAfterScavenge() {
-  for (int i = 0; i < kSamplerWindowSize; i++) {
-    Object* function = sampler_window_[i];
-    if (function != NULL && isolate_->heap()->InNewSpace(function)) {
-      MapWord map_word = HeapObject::cast(function)->map_word();
-      if (map_word.IsForwardingAddress()) {
-        sampler_window_[i] = map_word.ToForwardingAddress();
-      } else {
-        sampler_window_[i] = NULL;
-      }
-    }
-  }
-}
-
-
-void RuntimeProfiler::RemoveDeadSamples() {
-  for (int i = 0; i < kSamplerWindowSize; i++) {
-    Object* function = sampler_window_[i];
-    if (function != NULL &&
-        !Marking::MarkBitFrom(HeapObject::cast(function)).Get()) {
-      sampler_window_[i] = NULL;
-    }
-  }
-}
-
-
-void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) {
-  for (int i = 0; i < kSamplerWindowSize; i++) {
-    visitor->VisitPointer(&sampler_window_[i]);
-  }
+  any_ic_changed_ = false;
 }


=======================================
--- /branches/bleeding_edge/src/runtime-profiler.h Mon Jul 29 19:02:16 2013 UTC +++ /branches/bleeding_edge/src/runtime-profiler.h Fri Dec 6 09:52:40 2013 UTC
@@ -45,47 +45,18 @@

   void OptimizeNow();

-  void SetUp();
-  void Reset();
-  void TearDown();
-
   void NotifyICChanged() { any_ic_changed_ = true; }
-
-  // Rate limiting support.
-
-  void UpdateSamplesAfterScavenge();
-  void RemoveDeadSamples();
-  void UpdateSamplesAfterCompact(ObjectVisitor* visitor);

   void AttemptOnStackReplacement(JSFunction* function);

  private:
-  static const int kSamplerWindowSize = 16;
-
   void Optimize(JSFunction* function, const char* reason);

-  void ClearSampleBuffer();
-
-  void ClearSampleBufferNewSpaceEntries();
-
-  int LookupSample(JSFunction* function);
-
-  void AddSample(JSFunction* function, int weight);
-
   bool CodeSizeOKForOSR(Code* shared_code);

   Isolate* isolate_;

-  int sampler_threshold_;
-  int sampler_threshold_size_factor_;
-  int sampler_ticks_until_threshold_adjustment_;
-
-  Object* sampler_window_[kSamplerWindowSize];
-  int sampler_window_position_;
-  int sampler_window_weight_[kSamplerWindowSize];
-
   bool any_ic_changed_;
-  bool code_generated_;
 };

 } }  // namespace v8::internal
=======================================
--- /branches/bleeding_edge/src/x64/full-codegen-x64.cc Mon Dec 2 13:14:07 2013 UTC +++ /branches/bleeding_edge/src/x64/full-codegen-x64.cc Fri Dec 6 09:52:40 2013 UTC
@@ -310,10 +310,6 @@

 void FullCodeGenerator::EmitProfilingCounterReset() {
   int reset_value = FLAG_interrupt_budget;
-  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
-    // Self-optimization is a one-off thing; if it fails, don't try again.
-    reset_value = Smi::kMaxValue;
-  }
   __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
   __ Move(kScratchRegister, Smi::FromInt(reset_value));
   __ movq(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
@@ -325,13 +321,10 @@
   Comment cmnt(masm_, "[ Back edge bookkeeping");
   Label ok;

-  int weight = 1;
-  if (FLAG_weighted_back_edges) {
-    ASSERT(back_edge_target->is_bound());
-    int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
-    weight = Min(kMaxBackEdgeWeight,
-                 Max(1, distance / kCodeSizeMultiplier));
-  }
+  ASSERT(back_edge_target->is_bound());
+  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
+  int weight = Min(kMaxBackEdgeWeight,
+                   Max(1, distance / kCodeSizeMultiplier));
   EmitProfilingCounterDecrement(weight);
   __ j(positive, &ok, Label::kNear);
   __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
@@ -362,31 +355,24 @@
       __ push(rax);
       __ CallRuntime(Runtime::kTraceExit, 1);
     }
-    if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
-      // Pretend that the exit is a backwards jump to the entry.
-      int weight = 1;
-      if (info_->ShouldSelfOptimize()) {
-        weight = FLAG_interrupt_budget / FLAG_self_opt_count;
-      } else if (FLAG_weighted_back_edges) {
-        int distance = masm_->pc_offset();
-        weight = Min(kMaxBackEdgeWeight,
-                     Max(1, distance / kCodeSizeMultiplier));
-      }
-      EmitProfilingCounterDecrement(weight);
-      Label ok;
-      __ j(positive, &ok, Label::kNear);
-      __ push(rax);
-      if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
-        __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
-        __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
-      } else {
-        __ call(isolate()->builtins()->InterruptCheck(),
-                RelocInfo::CODE_TARGET);
-      }
-      __ pop(rax);
-      EmitProfilingCounterReset();
-      __ bind(&ok);
+    // Pretend that the exit is a backwards jump to the entry.
+    int weight = 1;
+    if (info_->ShouldSelfOptimize()) {
+      weight = FLAG_interrupt_budget / FLAG_self_opt_count;
+    } else {
+      int distance = masm_->pc_offset();
+      weight = Min(kMaxBackEdgeWeight,
+                   Max(1, distance / kCodeSizeMultiplier));
     }
+    EmitProfilingCounterDecrement(weight);
+    Label ok;
+    __ j(positive, &ok, Label::kNear);
+    __ push(rax);
+    __ call(isolate()->builtins()->InterruptCheck(),
+            RelocInfo::CODE_TARGET);
+    __ pop(rax);
+    EmitProfilingCounterReset();
+    __ bind(&ok);
 #ifdef DEBUG
     // Add a label for checking the size of the code used for returning.
     Label check_exit_codesize;

--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.

Reply via email to