Revision: 21493
Author:   [email protected]
Date:     Mon May 26 12:58:55 2014 UTC
Log:      Remove high promotion mode.

BUG=
[email protected]

Review URL: https://codereview.chromium.org/296413004
http://code.google.com/p/v8/source/detail?r=21493

Modified:
 /branches/bleeding_edge/src/arm/macro-assembler-arm.cc
 /branches/bleeding_edge/src/arm64/macro-assembler-arm64.cc
 /branches/bleeding_edge/src/assembler.cc
 /branches/bleeding_edge/src/assembler.h
 /branches/bleeding_edge/src/execution.cc
 /branches/bleeding_edge/src/execution.h
 /branches/bleeding_edge/src/heap.cc
 /branches/bleeding_edge/src/heap.h
 /branches/bleeding_edge/src/ia32/macro-assembler-ia32.cc
 /branches/bleeding_edge/src/mips/macro-assembler-mips.cc
 /branches/bleeding_edge/src/serialize.cc
 /branches/bleeding_edge/src/x64/macro-assembler-x64.cc
 /branches/bleeding_edge/src/x87/macro-assembler-x87.cc
 /branches/bleeding_edge/test/cctest/test-heap.cc

=======================================
--- /branches/bleeding_edge/src/arm/macro-assembler-arm.cc Fri May 16 15:18:24 2014 UTC +++ /branches/bleeding_edge/src/arm/macro-assembler-arm.cc Mon May 26 12:58:55 2014 UTC
@@ -1980,34 +1980,12 @@
                                              Register scratch1,
                                              Register scratch2,
                                              Label* gc_required) {
-  Label allocate_new_space, install_map;
-  AllocationFlags flags = TAG_OBJECT;
-
-  ExternalReference high_promotion_mode = ExternalReference::
-      new_space_high_promotion_mode_active_address(isolate());
-  mov(scratch1, Operand(high_promotion_mode));
-  ldr(scratch1, MemOperand(scratch1, 0));
-  cmp(scratch1, Operand::Zero());
-  b(eq, &allocate_new_space);
-
   Allocate(ConsString::kSize,
            result,
            scratch1,
            scratch2,
            gc_required,
- static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
-
-  jmp(&install_map);
-
-  bind(&allocate_new_space);
-  Allocate(ConsString::kSize,
-           result,
-           scratch1,
-           scratch2,
-           gc_required,
-           flags);
-
-  bind(&install_map);
+           TAG_OBJECT);

   InitializeNewString(result,
                       length,
=======================================
--- /branches/bleeding_edge/src/arm64/macro-assembler-arm64.cc Fri May 23 14:06:42 2014 UTC +++ /branches/bleeding_edge/src/arm64/macro-assembler-arm64.cc Mon May 26 12:58:55 2014 UTC
@@ -3539,33 +3539,12 @@
                                              Register scratch1,
                                              Register scratch2,
                                              Label* gc_required) {
-  Label allocate_new_space, install_map;
-  AllocationFlags flags = TAG_OBJECT;
-
-  ExternalReference high_promotion_mode = ExternalReference::
-      new_space_high_promotion_mode_active_address(isolate());
-  Mov(scratch1, high_promotion_mode);
-  Ldr(scratch1, MemOperand(scratch1));
-  Cbz(scratch1, &allocate_new_space);
-
   Allocate(ConsString::kSize,
            result,
            scratch1,
            scratch2,
            gc_required,
- static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
-
-  B(&install_map);
-
-  Bind(&allocate_new_space);
-  Allocate(ConsString::kSize,
-           result,
-           scratch1,
-           scratch2,
-           gc_required,
-           flags);
-
-  Bind(&install_map);
+           TAG_OBJECT);

   InitializeNewString(result,
                       length,
=======================================
--- /branches/bleeding_edge/src/assembler.cc    Fri May 23 16:37:27 2014 UTC
+++ /branches/bleeding_edge/src/assembler.cc    Mon May 26 12:58:55 2014 UTC
@@ -1202,13 +1202,6 @@
   return ExternalReference(
       isolate->heap()->OldDataSpaceAllocationLimitAddress());
 }
-
-
-ExternalReference ExternalReference::
-    new_space_high_promotion_mode_active_address(Isolate* isolate) {
-  return ExternalReference(
-      isolate->heap()->NewSpaceHighPromotionModeActiveAddress());
-}


 ExternalReference ExternalReference::handle_scope_level_address(
=======================================
--- /branches/bleeding_edge/src/assembler.h     Thu May 22 07:57:33 2014 UTC
+++ /branches/bleeding_edge/src/assembler.h     Mon May 26 12:58:55 2014 UTC
@@ -874,8 +874,6 @@
       Isolate* isolate);
   static ExternalReference old_data_space_allocation_limit_address(
       Isolate* isolate);
-  static ExternalReference new_space_high_promotion_mode_active_address(
-      Isolate* isolate);

   static ExternalReference mod_two_doubles_operation(Isolate* isolate);
   static ExternalReference power_double_double_function(Isolate* isolate);
=======================================
--- /branches/bleeding_edge/src/execution.cc    Tue May 20 08:52:42 2014 UTC
+++ /branches/bleeding_edge/src/execution.cc    Mon May 26 12:58:55 2014 UTC
@@ -728,10 +728,6 @@
     if (CheckAndClearInterrupt(TERMINATE_EXECUTION, access)) {
       return isolate_->TerminateExecution();
     }
-
-    if (CheckAndClearInterrupt(FULL_DEOPT, access)) {
-      Deoptimizer::DeoptimizeAll(isolate_);
-    }

     if (CheckAndClearInterrupt(DEOPT_MARKED_ALLOCATION_SITES, access)) {
       isolate_->heap()->DeoptMarkedAllocationSites();
=======================================
--- /branches/bleeding_edge/src/execution.h     Mon May 19 07:57:04 2014 UTC
+++ /branches/bleeding_edge/src/execution.h     Mon May 26 12:58:55 2014 UTC
@@ -155,7 +155,6 @@
   V(DEBUGCOMMAND, DebugCommand)                                 \
   V(TERMINATE_EXECUTION, TerminateExecution)                    \
   V(GC_REQUEST, GC)                                             \
-  V(FULL_DEOPT, FullDeopt)                                      \
   V(INSTALL_CODE, InstallCode)                                  \
   V(API_INTERRUPT, ApiInterrupt)                                \
   V(DEOPT_MARKED_ALLOCATION_SITES, DeoptMarkedAllocationSites)
=======================================
--- /branches/bleeding_edge/src/heap.cc Fri May 23 08:52:05 2014 UTC
+++ /branches/bleeding_edge/src/heap.cc Mon May 26 12:58:55 2014 UTC
@@ -84,7 +84,6 @@
 #ifdef DEBUG
       allocation_timeout_(0),
 #endif  // DEBUG
-      new_space_high_promotion_mode_active_(false),
old_generation_allocation_limit_(kMinimumOldGenerationAllocationLimit),
       size_of_old_gen_at_last_old_space_gc_(0),
       external_allocation_limit_(0),
@@ -98,14 +97,10 @@
       total_regexp_code_generated_(0),
       tracer_(NULL),
       high_survival_rate_period_length_(0),
-      low_survival_rate_period_length_(0),
-      survival_rate_(0),
       promoted_objects_size_(0),
       promotion_rate_(0),
       semi_space_copied_object_size_(0),
       semi_space_copied_rate_(0),
-      previous_survival_rate_trend_(Heap::STABLE),
-      survival_rate_trend_(Heap::STABLE),
       max_gc_pause_(0.0),
       total_gc_time_ms_(0.0),
       max_alive_after_gc_(0),
@@ -1013,7 +1008,7 @@
 }


-void Heap::UpdateSurvivalRateTrend(int start_new_space_size) {
+void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
   if (start_new_space_size == 0) return;

   promotion_rate_ =
@@ -1031,24 +1026,6 @@
   } else {
     high_survival_rate_period_length_ = 0;
   }
-
-  if (survival_rate < kYoungSurvivalRateLowThreshold) {
-    low_survival_rate_period_length_++;
-  } else {
-    low_survival_rate_period_length_ = 0;
-  }
-
-  double survival_rate_diff = survival_rate_ - survival_rate;
-
-  if (survival_rate_diff > kYoungSurvivalRateAllowedDeviation) {
-    set_survival_rate_trend(DECREASING);
-  } else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) {
-    set_survival_rate_trend(INCREASING);
-  } else {
-    set_survival_rate_trend(STABLE);
-  }
-
-  survival_rate_ = survival_rate;
 }

 bool Heap::PerformGarbageCollection(
@@ -1108,51 +1085,7 @@
     tracer_ = NULL;
   }

-  UpdateSurvivalRateTrend(start_new_space_size);
-
-  if (!new_space_high_promotion_mode_active_ &&
-      new_space_.Capacity() == new_space_.MaximumCapacity() &&
-      IsStableOrIncreasingSurvivalTrend() &&
-      IsHighSurvivalRate()) {
-    // Stable high survival rates even though young generation is at
-    // maximum capacity indicates that most objects will be promoted.
-    // To decrease scavenger pauses and final mark-sweep pauses, we
-    // have to limit maximal capacity of the young generation.
-    SetNewSpaceHighPromotionModeActive(true);
-    if (FLAG_trace_gc) {
- PrintPID("Limited new space size due to high promotion rate: %d MB\n",
-               new_space_.InitialCapacity() / MB);
-    }
- // The high promotion mode is our indicator to turn on pretenuring. We have
-    // to deoptimize all optimized code in global pretenuring mode and all
-    // code which should be tenured in local pretenuring mode.
-    if (FLAG_pretenuring) {
-      if (!FLAG_allocation_site_pretenuring) {
-        isolate_->stack_guard()->RequestFullDeopt();
-      }
-    }
-  } else if (new_space_high_promotion_mode_active_ &&
-      IsStableOrDecreasingSurvivalTrend() &&
-      IsLowSurvivalRate()) {
-    // Decreasing low survival rates might indicate that the above high
-    // promotion mode is over and we should allow the young generation
-    // to grow again.
-    SetNewSpaceHighPromotionModeActive(false);
-    if (FLAG_trace_gc) {
- PrintPID("Unlimited new space size due to low promotion rate: %d MB\n",
-               new_space_.MaximumCapacity() / MB);
-    }
- // Trigger deoptimization here to turn off global pretenuring as soon as
-    // possible.
-    if (FLAG_pretenuring && !FLAG_allocation_site_pretenuring) {
-      isolate_->stack_guard()->RequestFullDeopt();
-    }
-  }
-
-  if (new_space_high_promotion_mode_active_ &&
-      new_space_.Capacity() > new_space_.InitialCapacity()) {
-    new_space_.Shrink();
-  }
+  UpdateSurvivalStatistics(start_new_space_size);

   isolate_->counters()->objs_since_last_young()->Set(0);

@@ -1353,8 +1286,7 @@

 void Heap::CheckNewSpaceExpansionCriteria() {
   if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
-      survived_since_last_expansion_ > new_space_.Capacity() &&
-      !new_space_high_promotion_mode_active_) {
+      survived_since_last_expansion_ > new_space_.Capacity()) {
     // Grow the size of new space if there is room to grow, enough data
     // has survived scavenge since the last expansion and we are not in
     // high promotion mode.
@@ -6203,7 +6135,6 @@
     PrintF("nodes_died_in_new=%d ", nodes_died_in_new_space_);
     PrintF("nodes_copied_in_new=%d ", nodes_copied_in_new_space_);
     PrintF("nodes_promoted=%d ", nodes_promoted_);
-    PrintF("survival_rate=%.1f%% ", heap_->survival_rate_);
     PrintF("promotion_rate=%.1f%% ", heap_->promotion_rate_);
     PrintF("semi_space_copy_rate=%.1f%% ", heap_->semi_space_copied_rate_);

=======================================
--- /branches/bleeding_edge/src/heap.h  Mon May 26 11:28:08 2014 UTC
+++ /branches/bleeding_edge/src/heap.h  Mon May 26 12:58:55 2014 UTC
@@ -1033,21 +1033,11 @@
   // Returns the adjusted value.
   inline int64_t AdjustAmountOfExternalAllocatedMemory(
       int64_t change_in_bytes);
-
-  // This is only needed for testing high promotion mode.
-  void SetNewSpaceHighPromotionModeActive(bool mode) {
-    new_space_high_promotion_mode_active_ = mode;
-  }

   // Returns the allocation mode (pre-tenuring) based on observed promotion
   // rates of previous collections.
   inline PretenureFlag GetPretenureMode() {
-    return FLAG_pretenuring && new_space_high_promotion_mode_active_
-        ? TENURED : NOT_TENURED;
-  }
-
-  inline Address* NewSpaceHighPromotionModeActiveAddress() {
- return reinterpret_cast<Address*>(&new_space_high_promotion_mode_active_);
+    return FLAG_pretenuring ? TENURED : NOT_TENURED;
   }

   inline intptr_t PromotedTotalSize() {
@@ -1581,11 +1571,6 @@
   int allocation_timeout_;
 #endif  // DEBUG

- // Indicates that the new space should be kept small due to high promotion
-  // rates caused by the mutator allocating a lot of long-lived objects.
-  // TODO(hpayer): change to bool if no longer accessed from generated code
-  intptr_t new_space_high_promotion_mode_active_;
-
// Limit that triggers a global GC on the next (normally caused) GC. This
   // is checked when we have already decided to do a GC to help determine
   // which collector to invoke, before expanding a paged space in the old
@@ -2026,75 +2011,24 @@
   void AddAllocationSiteToScratchpad(AllocationSite* site,
                                      ScratchpadSlotMode mode);

-  void UpdateSurvivalRateTrend(int start_new_space_size);
-
-  enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
+  void UpdateSurvivalStatistics(int start_new_space_size);

   static const int kYoungSurvivalRateHighThreshold = 90;
-  static const int kYoungSurvivalRateLowThreshold = 10;
   static const int kYoungSurvivalRateAllowedDeviation = 15;

   static const int kOldSurvivalRateLowThreshold = 10;

   int high_survival_rate_period_length_;
-  int low_survival_rate_period_length_;
-  double survival_rate_;
   intptr_t promoted_objects_size_;
   double promotion_rate_;
   intptr_t semi_space_copied_object_size_;
   double semi_space_copied_rate_;
-  SurvivalRateTrend previous_survival_rate_trend_;
-  SurvivalRateTrend survival_rate_trend_;

-  void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
-    ASSERT(survival_rate_trend != FLUCTUATING);
-    previous_survival_rate_trend_ = survival_rate_trend_;
-    survival_rate_trend_ = survival_rate_trend;
-  }
-
-  SurvivalRateTrend survival_rate_trend() {
-    if (survival_rate_trend_ == STABLE) {
-      return STABLE;
-    } else if (previous_survival_rate_trend_ == STABLE) {
-      return survival_rate_trend_;
-    } else if (survival_rate_trend_ != previous_survival_rate_trend_) {
-      return FLUCTUATING;
-    } else {
-      return survival_rate_trend_;
-    }
-  }
-
-  bool IsStableOrIncreasingSurvivalTrend() {
-    switch (survival_rate_trend()) {
-      case STABLE:
-      case INCREASING:
-        return true;
-      default:
-        return false;
-    }
-  }
-
-  bool IsStableOrDecreasingSurvivalTrend() {
-    switch (survival_rate_trend()) {
-      case STABLE:
-      case DECREASING:
-        return true;
-      default:
-        return false;
-    }
-  }
-
-  bool IsIncreasingSurvivalTrend() {
-    return survival_rate_trend() == INCREASING;
-  }
-
+ // TODO(hpayer): Allocation site pretenuring may make this method obsolete.
+  // Re-visit incremental marking heuristics.
   bool IsHighSurvivalRate() {
     return high_survival_rate_period_length_ > 0;
   }
-
-  bool IsLowSurvivalRate() {
-    return low_survival_rate_period_length_ > 0;
-  }

   void SelectScavengingVisitorsTable();

=======================================
--- /branches/bleeding_edge/src/ia32/macro-assembler-ia32.cc Mon May 26 06:41:21 2014 UTC +++ /branches/bleeding_edge/src/ia32/macro-assembler-ia32.cc Mon May 26 12:58:55 2014 UTC
@@ -1796,32 +1796,13 @@
                                              Register scratch1,
                                              Register scratch2,
                                              Label* gc_required) {
-  Label allocate_new_space, install_map;
-  AllocationFlags flags = TAG_OBJECT;
-
-  ExternalReference high_promotion_mode = ExternalReference::
-      new_space_high_promotion_mode_active_address(isolate());
-
-  test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
-  j(zero, &allocate_new_space);
-
   Allocate(ConsString::kSize,
            result,
            scratch1,
            scratch2,
            gc_required,
- static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
-  jmp(&install_map);
+           TAG_OBJECT);

-  bind(&allocate_new_space);
-  Allocate(ConsString::kSize,
-           result,
-           scratch1,
-           scratch2,
-           gc_required,
-           flags);
-
-  bind(&install_map);
   // Set the map. The other fields are left uninitialized.
   mov(FieldOperand(result, HeapObject::kMapOffset),
       Immediate(isolate()->factory()->cons_ascii_string_map()));
=======================================
--- /branches/bleeding_edge/src/mips/macro-assembler-mips.cc Thu May 15 12:10:00 2014 UTC +++ /branches/bleeding_edge/src/mips/macro-assembler-mips.cc Mon May 26 12:58:55 2014 UTC
@@ -3117,33 +3117,12 @@
                                              Register scratch1,
                                              Register scratch2,
                                              Label* gc_required) {
-  Label allocate_new_space, install_map;
-  AllocationFlags flags = TAG_OBJECT;
-
-  ExternalReference high_promotion_mode = ExternalReference::
-      new_space_high_promotion_mode_active_address(isolate());
-  li(scratch1, Operand(high_promotion_mode));
-  lw(scratch1, MemOperand(scratch1, 0));
-  Branch(&allocate_new_space, eq, scratch1, Operand(zero_reg));
-
   Allocate(ConsString::kSize,
            result,
            scratch1,
            scratch2,
            gc_required,
- static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
-
-  jmp(&install_map);
-
-  bind(&allocate_new_space);
-  Allocate(ConsString::kSize,
-           result,
-           scratch1,
-           scratch2,
-           gc_required,
-           flags);
-
-  bind(&install_map);
+           TAG_OBJECT);

   InitializeNewString(result,
                       length,
=======================================
--- /branches/bleeding_edge/src/serialize.cc    Thu May 22 09:36:20 2014 UTC
+++ /branches/bleeding_edge/src/serialize.cc    Mon May 26 12:58:55 2014 UTC
@@ -487,54 +487,49 @@
       UNCLASSIFIED,
       58,
       "Heap::OldDataSpaceAllocationLimitAddress");
- Add(ExternalReference::new_space_high_promotion_mode_active_address(isolate).
-      address(),
+  Add(ExternalReference::allocation_sites_list_address(isolate).address(),
       UNCLASSIFIED,
       59,
-      "Heap::NewSpaceAllocationLimitAddress");
-  Add(ExternalReference::allocation_sites_list_address(isolate).address(),
-      UNCLASSIFIED,
-      60,
       "Heap::allocation_sites_list_address()");
   Add(ExternalReference::address_of_uint32_bias().address(),
       UNCLASSIFIED,
-      61,
+      60,
       "uint32_bias");
Add(ExternalReference::get_mark_code_as_executed_function(isolate).address(),
       UNCLASSIFIED,
-      62,
+      61,
       "Code::MarkCodeAsExecuted");

   Add(ExternalReference::is_profiling_address(isolate).address(),
       UNCLASSIFIED,
-      63,
+      62,
       "CpuProfiler::is_profiling");

   Add(ExternalReference::scheduled_exception_address(isolate).address(),
       UNCLASSIFIED,
-      64,
+      63,
       "Isolate::scheduled_exception");

   Add(ExternalReference::invoke_function_callback(isolate).address(),
       UNCLASSIFIED,
-      65,
+      64,
       "InvokeFunctionCallback");

Add(ExternalReference::invoke_accessor_getter_callback(isolate).address(),
       UNCLASSIFIED,
-      66,
+      65,
       "InvokeAccessorGetterCallback");

   // Debug addresses
Add(ExternalReference::debug_after_break_target_address(isolate).address(),
       UNCLASSIFIED,
-      67,
+      66,
       "Debug::after_break_target_address()");

   Add(ExternalReference::debug_restarter_frame_function_pointer_address(
           isolate).address(),
       UNCLASSIFIED,
-      68,
+      67,
       "Debug::restarter_frame_function_pointer_address()");

// Add a small set of deopt entry addresses to encoder without generating the
=======================================
--- /branches/bleeding_edge/src/x64/macro-assembler-x64.cc Mon May 26 06:41:21 2014 UTC +++ /branches/bleeding_edge/src/x64/macro-assembler-x64.cc Mon May 26 12:58:55 2014 UTC
@@ -4560,33 +4560,12 @@
                                              Register scratch1,
                                              Register scratch2,
                                              Label* gc_required) {
-  Label allocate_new_space, install_map;
-  AllocationFlags flags = TAG_OBJECT;
-
-  ExternalReference high_promotion_mode = ExternalReference::
-      new_space_high_promotion_mode_active_address(isolate());
-
-  Load(scratch1, high_promotion_mode);
-  testb(scratch1, Immediate(1));
-  j(zero, &allocate_new_space);
   Allocate(ConsString::kSize,
            result,
            scratch1,
            scratch2,
            gc_required,
- static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
-
-  jmp(&install_map);
-
-  bind(&allocate_new_space);
-  Allocate(ConsString::kSize,
-           result,
-           scratch1,
-           scratch2,
-           gc_required,
-           flags);
-
-  bind(&install_map);
+           TAG_OBJECT);

   // Set the map. The other fields are left uninitialized.
   LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
=======================================
--- /branches/bleeding_edge/src/x87/macro-assembler-x87.cc Fri May 23 16:37:27 2014 UTC +++ /branches/bleeding_edge/src/x87/macro-assembler-x87.cc Mon May 26 12:58:55 2014 UTC
@@ -1689,32 +1689,13 @@
                                              Register scratch1,
                                              Register scratch2,
                                              Label* gc_required) {
-  Label allocate_new_space, install_map;
-  AllocationFlags flags = TAG_OBJECT;
-
-  ExternalReference high_promotion_mode = ExternalReference::
-      new_space_high_promotion_mode_active_address(isolate());
-
-  test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
-  j(zero, &allocate_new_space);
-
   Allocate(ConsString::kSize,
            result,
            scratch1,
            scratch2,
            gc_required,
- static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
-  jmp(&install_map);
+           TAG_OBJECT);

-  bind(&allocate_new_space);
-  Allocate(ConsString::kSize,
-           result,
-           scratch1,
-           scratch2,
-           gc_required,
-           flags);
-
-  bind(&install_map);
   // Set the map. The other fields are left uninitialized.
   mov(FieldOperand(result, HeapObject::kMapOffset),
       Immediate(isolate()->factory()->cons_ascii_string_map()));
=======================================
--- /branches/bleeding_edge/test/cctest/test-heap.cc Thu May 22 15:27:57 2014 UTC +++ /branches/bleeding_edge/test/cctest/test-heap.cc Mon May 26 12:58:55 2014 UTC
@@ -2200,7 +2200,6 @@
   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   v8::HandleScope scope(CcTest::isolate());
-  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);

   v8::Local<v8::Value> res = CompileRun(
       "function DataObject() {"
@@ -2243,7 +2242,6 @@
   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   v8::HandleScope scope(CcTest::isolate());
-  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);

   v8::Local<v8::Value> res = CompileRun(
       "var number_elements = 30000;"
@@ -2590,7 +2588,6 @@
   if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
   v8::HandleScope scope(CcTest::isolate());
-  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);

   AlwaysAllocateScope always_allocate(CcTest::i_isolate());
   v8::Local<v8::Value> res = CompileRun(
@@ -3723,10 +3720,6 @@
   CcTest::heap()->DisableInlineAllocation();
   CompileRun("run()");

-  // Run test with inline allocation disabled and pretenuring.
-  CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
-  CompileRun("run()");
-
   // Run test with inline allocation re-enabled.
   CcTest::heap()->EnableInlineAllocation();
   CompileRun("run()");

--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/d/optout.

Reply via email to