Revision: 12898
Author:   [email protected]
Date:     Thu Nov  8 04:18:11 2012
Log:      Add code again to allow reclaiming old unexecuted functions.

When code objects in the heap for FUNCTIONs and OPTIMIZED_FUNCTIONs are marked by the GC, their prologue is patched with a call to a stub that removes the patch. This allows the collector to quickly identify code objects that haven't been executed since the last full collection (they are the ones that sill contain the patch). The functionality is currently disabled, but can be activated by specifying the "--age-code".

[email protected]

Review URL: https://codereview.chromium.org/10837037
http://code.google.com/p/v8/source/detail?r=12898

Modified:
 /branches/bleeding_edge/src/arm/assembler-arm-inl.h
 /branches/bleeding_edge/src/arm/builtins-arm.cc
 /branches/bleeding_edge/src/arm/codegen-arm.cc
 /branches/bleeding_edge/src/arm/codegen-arm.h
 /branches/bleeding_edge/src/arm/full-codegen-arm.cc
 /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc
 /branches/bleeding_edge/src/assembler.cc
 /branches/bleeding_edge/src/assembler.h
 /branches/bleeding_edge/src/builtins.h
 /branches/bleeding_edge/src/code-stubs.h
 /branches/bleeding_edge/src/compiler.cc
 /branches/bleeding_edge/src/debug.cc
 /branches/bleeding_edge/src/flag-definitions.h
 /branches/bleeding_edge/src/ia32/assembler-ia32-inl.h
 /branches/bleeding_edge/src/ia32/assembler-ia32.cc
 /branches/bleeding_edge/src/ia32/builtins-ia32.cc
 /branches/bleeding_edge/src/ia32/codegen-ia32.cc
 /branches/bleeding_edge/src/ia32/codegen-ia32.h
 /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc
 /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc
 /branches/bleeding_edge/src/liveedit.cc
 /branches/bleeding_edge/src/mark-compact.cc
 /branches/bleeding_edge/src/mark-compact.h
 /branches/bleeding_edge/src/objects-visiting-inl.h
 /branches/bleeding_edge/src/objects-visiting.h
 /branches/bleeding_edge/src/objects.cc
 /branches/bleeding_edge/src/objects.h
 /branches/bleeding_edge/src/serialize.cc
 /branches/bleeding_edge/src/x64/assembler-x64-inl.h
 /branches/bleeding_edge/src/x64/assembler-x64.cc
 /branches/bleeding_edge/src/x64/builtins-x64.cc
 /branches/bleeding_edge/src/x64/codegen-x64.cc
 /branches/bleeding_edge/src/x64/codegen-x64.h
 /branches/bleeding_edge/src/x64/full-codegen-x64.cc
 /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc

=======================================
--- /branches/bleeding_edge/src/arm/assembler-arm-inl.h Mon Oct 29 09:27:54 2012 +++ /branches/bleeding_edge/src/arm/assembler-arm-inl.h Thu Nov 8 04:18:11 2012
@@ -163,6 +163,24 @@
         host(), NULL, cell);
   }
 }
+
+
+static const int kNoCodeAgeSequenceLength = 3;
+
+Code* RelocInfo::code_age_stub() {
+  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+  return Code::GetCodeFromTargetAddress(
+      Memory::Address_at(pc_ + Assembler::kInstrSize *
+                         (kNoCodeAgeSequenceLength - 1)));
+}
+
+
+void RelocInfo::set_code_age_stub(Code* stub) {
+  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+  Memory::Address_at(pc_ + Assembler::kInstrSize *
+                     (kNoCodeAgeSequenceLength - 1)) =
+      stub->instruction_start();
+}


 Address RelocInfo::call_address() {
@@ -238,6 +256,8 @@
     visitor->VisitGlobalPropertyCell(this);
   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
     visitor->VisitExternalReference(this);
+  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+    visitor->VisitCodeAgeSequence(this);
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // TODO(isolates): Get a cached isolate below.
   } else if (((RelocInfo::IsJSReturn(mode) &&
@@ -264,6 +284,8 @@
     StaticVisitor::VisitGlobalPropertyCell(heap, this);
   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
     StaticVisitor::VisitExternalReference(this);
+  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+    StaticVisitor::VisitCodeAgeSequence(heap, this);
 #ifdef ENABLE_DEBUGGER_SUPPORT
   } else if (heap->isolate()->debug()->has_break_points() &&
              ((RelocInfo::IsJSReturn(mode) &&
=======================================
--- /branches/bleeding_edge/src/arm/builtins-arm.cc     Fri Aug 17 05:59:00 2012
+++ /branches/bleeding_edge/src/arm/builtins-arm.cc     Thu Nov  8 04:18:11 2012
@@ -1226,6 +1226,39 @@
 }


+static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
+ // For now, we are relying on the fact that make_code_young doesn't do any + // garbage collection which allows us to save/restore the registers without
+  // worrying about which of them contain pointers. We also don't build an
+ // internal frame to make the code faster, since we shouldn't have to do stack
+  // crawls in MakeCodeYoung. This seems a bit fragile.
+
+ // The following registers must be saved and restored when calling through to
+  // the runtime:
+  //   r0 - contains return address (beginning of patch sequence)
+  //   r1 - function object
+  FrameScope scope(masm, StackFrame::MANUAL);
+  __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
+  __ PrepareCallCFunction(1, 0, r1);
+  __ CallCFunction(
+      ExternalReference::get_make_code_young_function(masm->isolate()), 1);
+  __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
+  __ mov(pc, r0);
+}
+
+#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
+void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
+    MacroAssembler* masm) {                                  \
+  GenerateMakeCodeYoungAgainCommon(masm);                    \
+}                                                            \
+void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
+    MacroAssembler* masm) {                                  \
+  GenerateMakeCodeYoungAgainCommon(masm);                    \
+}
+CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
+#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
+
+
 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
Deoptimizer::BailoutType type) {
   {
=======================================
--- /branches/bleeding_edge/src/arm/codegen-arm.cc      Wed Jul 25 08:26:16 2012
+++ /branches/bleeding_edge/src/arm/codegen-arm.cc      Thu Nov  8 04:18:11 2012
@@ -452,6 +452,92 @@

 #undef __

+// add(r0, pc, Operand(-8))
+static const uint32_t kCodeAgePatchFirstInstruction = 0xe24f0008;
+
+static byte* GetNoCodeAgeSequence(uint32_t* length) {
+  // The sequence of instructions that is patched out for aging code is the
+ // following boilerplate stack-building prologue that is found in FUNCTIONS
+  static bool initialized = false;
+  static uint32_t sequence[kNoCodeAgeSequenceLength];
+  byte* byte_sequence = reinterpret_cast<byte*>(sequence);
+  *length = kNoCodeAgeSequenceLength * Assembler::kInstrSize;
+  if (!initialized) {
+    CodePatcher patcher(byte_sequence, kNoCodeAgeSequenceLength);
+ patcher.masm()->stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
+    patcher.masm()->LoadRoot(ip, Heap::kUndefinedValueRootIndex);
+    patcher.masm()->add(fp, sp, Operand(2 * kPointerSize));
+    initialized = true;
+  }
+  return byte_sequence;
+}
+
+
+byte* Code::FindPlatformCodeAgeSequence() {
+  byte* start = instruction_start();
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  if (!memcmp(start, young_sequence, young_length) ||
+      Memory::uint32_at(start) == kCodeAgePatchFirstInstruction) {
+    return start;
+  } else {
+    byte* start_after_strict = NULL;
+    if (kind() == FUNCTION) {
+      start_after_strict = start + kSizeOfFullCodegenStrictModePrologue;
+    } else {
+      ASSERT(kind() == OPTIMIZED_FUNCTION);
+      start_after_strict = start + kSizeOfOptimizedStrictModePrologue;
+    }
+    ASSERT(!memcmp(start_after_strict, young_sequence, young_length) ||
+           Memory::uint32_at(start_after_strict) ==
+           kCodeAgePatchFirstInstruction);
+    return start_after_strict;
+  }
+}
+
+
+bool Code::IsYoungSequence(byte* sequence) {
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  bool result = !memcmp(sequence, young_sequence, young_length);
+  ASSERT(result ||
+         Memory::uint32_at(sequence) == kCodeAgePatchFirstInstruction);
+  return result;
+}
+
+
+void Code::GetCodeAgeAndParity(byte* sequence, Age* age,
+                               MarkingParity* parity) {
+  if (IsYoungSequence(sequence)) {
+    *age = kNoAge;
+    *parity = NO_MARKING_PARITY;
+  } else {
+    Address target_address = Memory::Address_at(
+        sequence + Assembler::kInstrSize * (kNoCodeAgeSequenceLength - 1));
+    Code* stub = GetCodeFromTargetAddress(target_address);
+    GetCodeAgeAndParity(stub, age, parity);
+  }
+}
+
+
+void Code::PatchPlatformCodeAge(byte* sequence,
+                                Code::Age age,
+                                MarkingParity parity) {
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  if (age == kNoAge) {
+    memcpy(sequence, young_sequence, young_length);
+    CPU::FlushICache(sequence, young_length);
+  } else {
+    Code* stub = GetCodeAgeStub(age, parity);
+    CodePatcher patcher(sequence, young_length / Assembler::kInstrSize);
+    patcher.masm()->add(r0, pc, Operand(-8));
+    patcher.masm()->ldr(pc, MemOperand(pc, -4));
+ patcher.masm()->dd(reinterpret_cast<uint32_t>(stub->instruction_start()));
+  }
+}
+
+
 } }  // namespace v8::internal

 #endif  // V8_TARGET_ARCH_ARM
=======================================
--- /branches/bleeding_edge/src/arm/codegen-arm.h       Thu Nov 24 03:07:39 2011
+++ /branches/bleeding_edge/src/arm/codegen-arm.h       Thu Nov  8 04:18:11 2012
@@ -34,6 +34,9 @@
 namespace v8 {
 namespace internal {

+static const int kSizeOfFullCodegenStrictModePrologue = 16;
+static const int kSizeOfOptimizedStrictModePrologue = 16;
+
 // Forward declarations
 class CompilationInfo;

=======================================
--- /branches/bleeding_edge/src/arm/full-codegen-arm.cc Fri Oct 19 05:39:59 2012 +++ /branches/bleeding_edge/src/arm/full-codegen-arm.cc Thu Nov 8 04:18:11 2012
@@ -149,12 +149,15 @@
   // function calls.
   if (!info->is_classic_mode() || info->is_native()) {
     Label ok;
+    Label begin;
+    __ bind(&begin);
     __ cmp(r5, Operand(0));
     __ b(eq, &ok);
     int receiver_offset = info->scope()->num_parameters() * kPointerSize;
     __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
     __ str(r2, MemOperand(sp, receiver_offset));
     __ bind(&ok);
+ ASSERT_EQ(kSizeOfFullCodegenStrictModePrologue, ok.pos() - begin.pos());
   }

// Open a frame scope to indicate that there is a frame on the stack. The
@@ -164,12 +167,12 @@

   int locals_count = info->scope()->num_stack_slots();

-  __ Push(lr, fp, cp, r1);
-  if (locals_count > 0) {
-    // Load undefined value here, so the value is ready for the loop
-    // below.
-    __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
-  }
+ // The following four instructions must remain together and unmodified for
+  // code aging to work properly.
+  __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
+  // Load undefined value here, so the value is ready for the loop
+  // below.
+  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
   // Adjust fp to point to caller's fp.
   __ add(fp, sp, Operand(2 * kPointerSize));

=======================================
--- /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc Wed Nov 7 13:38:46 2012 +++ /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc Thu Nov 8 04:18:11 2012
@@ -138,15 +138,23 @@
   // function calls.
   if (!info_->is_classic_mode() || info_->is_native()) {
     Label ok;
+    Label begin;
+    __ bind(&begin);
     __ cmp(r5, Operand(0));
     __ b(eq, &ok);
     int receiver_offset = scope()->num_parameters() * kPointerSize;
     __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
     __ str(r2, MemOperand(sp, receiver_offset));
     __ bind(&ok);
+    ASSERT_EQ(kSizeOfOptimizedStrictModePrologue, ok.pos() - begin.pos());
   }

+ // The following three instructions must remain together and unmodified for
+  // code aging to work properly.
   __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
+  // Add unused load of ip to ensure prologue sequence is identical for
+  // full-codegen and lithium-codegen.
+  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.

   // Reserve space for the stack slots needed by the code.
=======================================
--- /branches/bleeding_edge/src/assembler.cc    Fri Oct 12 04:41:14 2012
+++ /branches/bleeding_edge/src/assembler.cc    Thu Nov  8 04:18:11 2012
@@ -313,6 +313,7 @@
 #ifdef DEBUG
   byte* begin_pos = pos_;
 #endif
+  ASSERT(rinfo->rmode() < RelocInfo::NUMBER_OF_MODES);
   ASSERT(rinfo->pc() - last_pc_ >= 0);
ASSERT(RelocInfo::LAST_STANDARD_NONCOMPACT_ENUM - RelocInfo::LAST_COMPACT_ENUM
          <= kMaxStandardNonCompactModes);
@@ -570,6 +571,15 @@
       }
     }
   }
+  if (code_age_sequence_ != NULL) {
+    byte* old_code_age_sequence = code_age_sequence_;
+    code_age_sequence_ = NULL;
+    if (SetMode(RelocInfo::CODE_AGE_SEQUENCE)) {
+      rinfo_.data_ = 0;
+      rinfo_.pc_ = old_code_age_sequence;
+      return;
+    }
+  }
   done_ = true;
 }

@@ -585,6 +595,12 @@
   mode_mask_ = mode_mask;
   last_id_ = 0;
   last_position_ = 0;
+  byte* sequence = code->FindCodeAgeSequence();
+  if (sequence != NULL && !Code::IsYoungSequence(sequence)) {
+    code_age_sequence_ = sequence;
+  } else {
+    code_age_sequence_ = NULL;
+  }
   if (mode_mask_ == 0) pos_ = end_;
   next();
 }
@@ -600,6 +616,7 @@
   mode_mask_ = mode_mask;
   last_id_ = 0;
   last_position_ = 0;
+  code_age_sequence_ = NULL;
   if (mode_mask_ == 0) pos_ = end_;
   next();
 }
@@ -652,6 +669,8 @@
       UNREACHABLE();
 #endif
       return "debug break slot";
+    case RelocInfo::CODE_AGE_SEQUENCE:
+      return "code_age_sequence";
     case RelocInfo::NUMBER_OF_MODES:
       UNREACHABLE();
       return "number_of_modes";
@@ -739,6 +758,9 @@
     case NUMBER_OF_MODES:
       UNREACHABLE();
       break;
+    case CODE_AGE_SEQUENCE:
+      ASSERT(Code::IsYoungSequence(pc_) || code_age_stub()->IsCode());
+      break;
   }
 }
 #endif  // VERIFY_HEAP
@@ -872,6 +894,13 @@
     Isolate* isolate) {
return ExternalReference(Redirect(isolate, FUNCTION_ADDR(JSDate::GetField)));
 }
+
+
+ExternalReference ExternalReference::get_make_code_young_function(
+    Isolate* isolate) {
+  return ExternalReference(Redirect(
+      isolate, FUNCTION_ADDR(Code::MakeCodeAgeSequenceYoung)));
+}


 ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
=======================================
--- /branches/bleeding_edge/src/assembler.h     Thu Oct 18 05:21:42 2012
+++ /branches/bleeding_edge/src/assembler.h     Thu Nov  8 04:18:11 2012
@@ -211,6 +211,12 @@
     // Pseudo-types
NUMBER_OF_MODES, // There are at most 15 modes with noncompact encoding.
     NONE,  // never recorded
+    CODE_AGE_SEQUENCE,  // Not stored in RelocInfo array, used explictly by
+                        // code aging.
+    FIRST_REAL_RELOC_MODE = CODE_TARGET,
+    LAST_REAL_RELOC_MODE = CONST_POOL,
+    FIRST_PSEUDO_RELOC_MODE = CODE_AGE_SEQUENCE,
+    LAST_PSEUDO_RELOC_MODE = CODE_AGE_SEQUENCE,
     LAST_CODE_ENUM = DEBUG_BREAK,
     LAST_GCED_ENUM = GLOBAL_PROPERTY_CELL,
     // Modes <= LAST_COMPACT_ENUM are guaranteed to have compact encoding.
@@ -225,6 +231,15 @@
       : pc_(pc), rmode_(rmode), data_(data), host_(host) {
   }

+  static inline bool IsRealRelocMode(Mode mode) {
+    return mode >= FIRST_REAL_RELOC_MODE &&
+        mode <= LAST_REAL_RELOC_MODE;
+  }
+  static inline bool IsPseudoRelocMode(Mode mode) {
+    ASSERT(!IsRealRelocMode(mode));
+    return mode >= FIRST_PSEUDO_RELOC_MODE &&
+        mode <= LAST_PSEUDO_RELOC_MODE;
+  }
   static inline bool IsConstructCall(Mode mode) {
     return mode == CONSTRUCT_CALL;
   }
@@ -262,6 +277,9 @@
   static inline bool IsDebugBreakSlot(Mode mode) {
     return mode == DEBUG_BREAK_SLOT;
   }
+  static inline bool IsCodeAgeSequence(Mode mode) {
+    return mode == CODE_AGE_SEQUENCE;
+  }
   static inline int ModeMask(Mode mode) { return 1 << mode; }

   // Accessors
@@ -294,7 +312,8 @@
   INLINE(Handle<JSGlobalPropertyCell> target_cell_handle());
   INLINE(void set_target_cell(JSGlobalPropertyCell* cell,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER));
-
+  INLINE(Code* code_age_stub());
+  INLINE(void set_code_age_stub(Code* stub));

   // Read the address of the word containing the target_address in an
// instruction stream. What this means exactly is architecture-independent.
@@ -487,6 +506,7 @@

   byte* pos_;
   byte* end_;
+  byte* code_age_sequence_;
   RelocInfo rinfo_;
   bool done_;
   int mode_mask_;
@@ -595,6 +615,8 @@
   static ExternalReference get_date_field_function(Isolate* isolate);
   static ExternalReference date_cache_stamp(Isolate* isolate);

+  static ExternalReference get_make_code_young_function(Isolate* isolate);
+
   // Deoptimization support.
   static ExternalReference new_deoptimizer_function(Isolate* isolate);
static ExternalReference compute_output_frames_function(Isolate* isolate);
=======================================
--- /branches/bleeding_edge/src/builtins.h      Fri Sep  7 02:01:54 2012
+++ /branches/bleeding_edge/src/builtins.h      Thu Nov  8 04:18:11 2012
@@ -38,6 +38,25 @@
 };


+#define CODE_AGE_LIST_WITH_ARG(V, A)     \
+  V(Quadragenarian, A)                   \
+  V(Quinquagenarian, A)                  \
+  V(Sexagenarian, A)                     \
+  V(Septuagenarian, A)                   \
+  V(Octogenarian, A)
+
+#define CODE_AGE_LIST_IGNORE_ARG(X, V) V(X)
+
+#define CODE_AGE_LIST(V) \
+  CODE_AGE_LIST_WITH_ARG(CODE_AGE_LIST_IGNORE_ARG, V)
+
+#define DECLARE_CODE_AGE_BUILTIN(C, V)             \
+  V(Make##C##CodeYoungAgainOddMarking, BUILTIN,    \
+    UNINITIALIZED, Code::kNoExtraICState)          \
+  V(Make##C##CodeYoungAgainEvenMarking, BUILTIN,   \
+    UNINITIALIZED, Code::kNoExtraICState)
+
+
 // Define list of builtins implemented in C++.
 #define BUILTIN_LIST_C(V)                                           \
   V(Illegal, NO_EXTRA_ARGUMENTS)                                    \
@@ -195,8 +214,8 @@
                                     Code::kNoExtraICState)              \
                                                                         \
   V(OnStackReplacement,             BUILTIN, UNINITIALIZED,             \
-                                    Code::kNoExtraICState)
-
+                                    Code::kNoExtraICState)              \
+  CODE_AGE_LIST_WITH_ARG(DECLARE_CODE_AGE_BUILTIN, V)

 #ifdef ENABLE_DEBUGGER_SUPPORT
 // Define list of builtins used by the debugger implemented in assembly.
@@ -379,6 +398,14 @@
   static void Generate_StringConstructCode(MacroAssembler* masm);
   static void Generate_OnStackReplacement(MacroAssembler* masm);

+#define DECLARE_CODE_AGE_BUILTIN_GENERATOR(C)                \
+  static void Generate_Make##C##CodeYoungAgainEvenMarking(   \
+      MacroAssembler* masm);                                 \
+  static void Generate_Make##C##CodeYoungAgainOddMarking(    \
+      MacroAssembler* masm);
+  CODE_AGE_LIST(DECLARE_CODE_AGE_BUILTIN_GENERATOR)
+#undef DECLARE_CODE_AGE_BUILTIN_GENERATOR
+
   static void InitBuiltinFunctionTable();

   bool initialized_;
=======================================
--- /branches/bleeding_edge/src/code-stubs.h    Thu Sep 27 04:31:26 2012
+++ /branches/bleeding_edge/src/code-stubs.h    Thu Nov  8 04:18:11 2012
@@ -1170,6 +1170,8 @@
   // false on an attempt to replace a non-NULL entry hook with another
   // non-NULL hook.
   static bool SetFunctionEntryHook(FunctionEntryHook entry_hook);
+
+  static bool HasEntryHook() { return entry_hook_ != NULL; }

  private:
   static void EntryHookTrampoline(intptr_t function,
=======================================
--- /branches/bleeding_edge/src/compiler.cc     Mon Nov  5 02:25:32 2012
+++ /branches/bleeding_edge/src/compiler.cc     Thu Nov  8 04:18:11 2012
@@ -611,6 +611,7 @@
     if (result->ic_age() != HEAP->global_ic_age()) {
       result->ResetForNewContext(HEAP->global_ic_age());
     }
+    result->code()->MakeYoung();
   }

   if (result.is_null()) isolate->ReportPendingMessages();
@@ -672,6 +673,7 @@
     if (result->ic_age() != HEAP->global_ic_age()) {
       result->ResetForNewContext(HEAP->global_ic_age());
     }
+    result->code()->MakeYoung();
   }

   return result;
=======================================
--- /branches/bleeding_edge/src/debug.cc        Thu Oct 18 05:21:42 2012
+++ /branches/bleeding_edge/src/debug.cc        Thu Nov  8 04:18:11 2012
@@ -261,8 +261,12 @@
   // Create relocation iterators for the two code objects.
   if (reloc_iterator_ != NULL) delete reloc_iterator_;
   if (reloc_iterator_original_ != NULL) delete reloc_iterator_original_;
-  reloc_iterator_ = new RelocIterator(debug_info_->code());
- reloc_iterator_original_ = new RelocIterator(debug_info_->original_code());
+  reloc_iterator_ = new RelocIterator(
+      debug_info_->code(),
+      ~RelocInfo::ModeMask(RelocInfo::CODE_AGE_SEQUENCE));
+  reloc_iterator_original_ = new RelocIterator(
+      debug_info_->original_code(),
+      ~RelocInfo::ModeMask(RelocInfo::CODE_AGE_SEQUENCE));

   // Position at the first break point.
   break_point_ = -1;
=======================================
--- /branches/bleeding_edge/src/flag-definitions.h      Tue Nov  6 04:32:36 2012
+++ /branches/bleeding_edge/src/flag-definitions.h      Thu Nov  8 04:18:11 2012
@@ -396,6 +396,9 @@
             "flush code that we expect not to use again (during full gc)")
 DEFINE_bool(flush_code_incrementally, false,
             "flush code that we expect not to use again (incrementally)")
+DEFINE_bool(age_code, false,
+            "track un-executed functions to age code and flush only "
+            "old code")
 DEFINE_bool(incremental_marking, true, "use incremental marking")
DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps")
 DEFINE_bool(trace_incremental_marking, false,
=======================================
--- /branches/bleeding_edge/src/ia32/assembler-ia32-inl.h Thu Oct 18 05:21:42 2012 +++ /branches/bleeding_edge/src/ia32/assembler-ia32-inl.h Thu Nov 8 04:18:11 2012
@@ -46,12 +46,21 @@
 namespace internal {


+static const byte kCallOpcode = 0xE8;
+
+
 // The modes possibly affected by apply must be in kApplyMask.
 void RelocInfo::apply(intptr_t delta) {
   if (rmode_ == RUNTIME_ENTRY || IsCodeTarget(rmode_)) {
     int32_t* p = reinterpret_cast<int32_t*>(pc_);
     *p -= delta;  // Relocate entry.
     CPU::FlushICache(p, sizeof(uint32_t));
+  } else if (rmode_ == CODE_AGE_SEQUENCE) {
+    if (*pc_ == kCallOpcode) {
+      int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
+      *p -= delta;  // Relocate entry.
+      CPU::FlushICache(p, sizeof(uint32_t));
+    }
   } else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) {
     // Special handling of js_return when a break point is set (call
     // instruction has been inserted).
@@ -167,6 +176,21 @@
         host(), NULL, cell);
   }
 }
+
+
+Code* RelocInfo::code_age_stub() {
+  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+  ASSERT(*pc_ == kCallOpcode);
+  return Code::GetCodeFromTargetAddress(
+      Assembler::target_address_at(pc_ + 1));
+}
+
+
+void RelocInfo::set_code_age_stub(Code* stub) {
+  ASSERT(*pc_ == kCallOpcode);
+  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+  Assembler::set_target_address_at(pc_ + 1, stub->instruction_start());
+}


 Address RelocInfo::call_address() {
@@ -206,7 +230,7 @@


 bool RelocInfo::IsPatchedReturnSequence() {
-  return *pc_ == 0xE8;
+  return *pc_ == kCallOpcode;
 }


@@ -227,7 +251,9 @@
   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
     visitor->VisitExternalReference(this);
     CPU::FlushICache(pc_, sizeof(Address));
-#ifdef ENABLE_DEBUGGER_SUPPORT
+  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+    visitor->VisitCodeAgeSequence(this);
+  #ifdef ENABLE_DEBUGGER_SUPPORT
   // TODO(isolates): Get a cached isolate below.
   } else if (((RelocInfo::IsJSReturn(mode) &&
               IsPatchedReturnSequence()) ||
@@ -255,6 +281,8 @@
   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
     StaticVisitor::VisitExternalReference(this);
     CPU::FlushICache(pc_, sizeof(Address));
+  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+    StaticVisitor::VisitCodeAgeSequence(heap, this);
 #ifdef ENABLE_DEBUGGER_SUPPORT
   } else if (heap->isolate()->debug()->has_break_points() &&
              ((RelocInfo::IsJSReturn(mode) &&
=======================================
--- /branches/bleeding_edge/src/ia32/assembler-ia32.cc Mon Nov 5 05:28:10 2012 +++ /branches/bleeding_edge/src/ia32/assembler-ia32.cc Thu Nov 8 04:18:11 2012
@@ -169,7 +169,7 @@
 const int RelocInfo::kApplyMask =
   RelocInfo::kCodeTargetMask | 1 << RelocInfo::RUNTIME_ENTRY |
     1 << RelocInfo::JS_RETURN | 1 << RelocInfo::INTERNAL_REFERENCE |
-    1 << RelocInfo::DEBUG_BREAK_SLOT;
+    1 << RelocInfo::DEBUG_BREAK_SLOT | 1 << RelocInfo::CODE_AGE_SEQUENCE;


 bool RelocInfo::IsCodedSpecially() {
=======================================
--- /branches/bleeding_edge/src/ia32/builtins-ia32.cc Fri Aug 17 05:59:00 2012 +++ /branches/bleeding_edge/src/ia32/builtins-ia32.cc Thu Nov 8 04:18:11 2012
@@ -538,6 +538,42 @@
 }


+static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
+ // For now, we are relying on the fact that make_code_young doesn't do any + // garbage collection which allows us to save/restore the registers without
+  // worrying about which of them contain pointers. We also don't build an
+ // internal frame to make the code faster, since we shouldn't have to do stack
+  // crawls in MakeCodeYoung. This seems a bit fragile.
+
+  // Re-execute the code that was patched back to the young age when
+  // the stub returns.
+  __ sub(Operand(esp, 0), Immediate(5));
+  __ pushad();
+  __ mov(eax, Operand(esp, 8 * kPointerSize));
+  {
+    FrameScope scope(masm, StackFrame::MANUAL);
+    __ PrepareCallCFunction(1, ebx);
+    __ mov(Operand(esp, 0), eax);
+    __ CallCFunction(
+ ExternalReference::get_make_code_young_function(masm->isolate()), 1);
+  }
+  __ popad();
+  __ ret(0);
+}
+
+#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
+void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
+    MacroAssembler* masm) {                                  \
+  GenerateMakeCodeYoungAgainCommon(masm);                    \
+}                                                            \
+void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
+    MacroAssembler* masm) {                                  \
+  GenerateMakeCodeYoungAgainCommon(masm);                    \
+}
+CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
+#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
+
+
 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
Deoptimizer::BailoutType type) {
   {
=======================================
--- /branches/bleeding_edge/src/ia32/codegen-ia32.cc Thu Nov 8 04:14:29 2012 +++ /branches/bleeding_edge/src/ia32/codegen-ia32.cc Thu Nov 8 04:18:11 2012
@@ -757,6 +757,103 @@

 #undef __

+static const int kNoCodeAgeSequenceLength = 5;
+
+static byte* GetNoCodeAgeSequence(uint32_t* length) {
+  static bool initialized = false;
+  static byte sequence[kNoCodeAgeSequenceLength];
+  *length = kNoCodeAgeSequenceLength;
+  if (!initialized) {
+ // The sequence of instructions that is patched out for aging code is the
+    // following boilerplate stack-building prologue that is found both in
+    // FUNCTION and OPTIMIZED_FUNCTION code:
+    CodePatcher patcher(sequence, kNoCodeAgeSequenceLength);
+    patcher.masm()->push(ebp);
+    patcher.masm()->mov(ebp, esp);
+    patcher.masm()->push(esi);
+    patcher.masm()->push(edi);
+    initialized = true;
+  }
+  return sequence;
+}
+
+
+byte* Code::FindPlatformCodeAgeSequence() {
+  byte* start = instruction_start();
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  if (!memcmp(start, young_sequence, young_length) ||
+      *start == kCallOpcode) {
+    return start;
+  } else {
+    if (kind() == FUNCTION) {
+      byte* start_after_strict =
+          start + kSizeOfFullCodegenStrictModePrologue;
+      ASSERT(!memcmp(start_after_strict, young_sequence, young_length) ||
+             start[kSizeOfFullCodegenStrictModePrologue] == kCallOpcode);
+      return start_after_strict;
+    } else {
+      ASSERT(kind() == OPTIMIZED_FUNCTION);
+      start = instruction_start() + kSizeOfOptimizedStrictModePrologue;
+      if (!memcmp(start, young_sequence, young_length) ||
+          *start == kCallOpcode) {
+        return start;
+      }
+      start = instruction_start() + kSizeOfOptimizedAlignStackPrologue;
+      if (!memcmp(start, young_sequence, young_length) ||
+          *start == kCallOpcode) {
+        return start;
+      }
+      start = instruction_start() + kSizeOfOptimizedAlignStackPrologue +
+          kSizeOfOptimizedStrictModePrologue;
+      ASSERT(!memcmp(start, young_sequence, young_length) ||
+             *start == kCallOpcode);
+      return start;
+    }
+  }
+}
+
+
+bool Code::IsYoungSequence(byte* sequence) {
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  bool result = (!memcmp(sequence, young_sequence, young_length));
+  ASSERT(result || *sequence == kCallOpcode);
+  return result;
+}
+
+
+void Code::GetCodeAgeAndParity(byte* sequence, Age* age,
+                               MarkingParity* parity) {
+  if (IsYoungSequence(sequence)) {
+    *age = kNoAge;
+    *parity = NO_MARKING_PARITY;
+  } else {
+    sequence++;  // Skip the kCallOpcode byte
+    Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
+        Assembler::kCallTargetAddressOffset;
+    Code* stub = GetCodeFromTargetAddress(target_address);
+    GetCodeAgeAndParity(stub, age, parity);
+  }
+}
+
+
+void Code::PatchPlatformCodeAge(byte* sequence,
+                                Code::Age age,
+                                MarkingParity parity) {
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  if (age == kNoAge) {
+    memcpy(sequence, young_sequence, young_length);
+    CPU::FlushICache(sequence, young_length);
+  } else {
+    Code* stub = GetCodeAgeStub(age, parity);
+    CodePatcher patcher(sequence, young_length);
+    patcher.masm()->call(stub->instruction_start(), RelocInfo::NONE);
+  }
+}
+
+
 } }  // namespace v8::internal

 #endif  // V8_TARGET_ARCH_IA32
=======================================
--- /branches/bleeding_edge/src/ia32/codegen-ia32.h     Thu Nov 24 02:16:39 2011
+++ /branches/bleeding_edge/src/ia32/codegen-ia32.h     Thu Nov  8 04:18:11 2012
@@ -37,6 +37,10 @@
 // Forward declarations
 class CompilationInfo;

+static const int kSizeOfFullCodegenStrictModePrologue = 34;
+static const int kSizeOfOptimizedStrictModePrologue = 12;
+static const int kSizeOfOptimizedAlignStackPrologue = 44;
+
// -------------------------------------------------------------------------
 // CodeGenerator

=======================================
--- /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc Thu Nov 8 04:14:29 2012 +++ /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc Thu Nov 8 04:18:11 2012
@@ -138,6 +138,8 @@
   // function calls.
   if (!info->is_classic_mode() || info->is_native()) {
     Label ok;
+    Label start;
+    __ bind(&start);
     __ test(ecx, ecx);
     __ j(zero, &ok, Label::kNear);
     // +1 for return address.
@@ -149,6 +151,7 @@
     __ mov(Operand(esp, receiver_offset),
            Immediate(isolate()->factory()->undefined_value()));
     __ bind(&ok);
+ ASSERT_EQ(kSizeOfFullCodegenStrictModePrologue, ok.pos() - start.pos());
   }

// Open a frame scope to indicate that there is a frame on the stack. The
=======================================
--- /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Mon Nov 5 05:28:10 2012 +++ /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Thu Nov 8 04:18:11 2012
@@ -140,6 +140,8 @@
   // receiver object). ecx is zero for method calls and non-zero for
   // function calls.
   if (!info_->is_classic_mode() || info_->is_native()) {
+    Label begin;
+    __ bind(&begin);
     Label ok;
     __ test(ecx, Operand(ecx));
     __ j(zero, &ok, Label::kNear);
@@ -148,10 +150,13 @@
     __ mov(Operand(esp, receiver_offset),
            Immediate(isolate()->factory()->undefined_value()));
     __ bind(&ok);
+    ASSERT_EQ(kSizeOfOptimizedStrictModePrologue, ok.pos() - begin.pos());
   }


   if (dynamic_frame_alignment_) {
+    Label begin;
+    __ bind(&begin);
     // Move state of dynamic frame alignment into edx.
     __ mov(edx, Immediate(kNoAlignmentPadding));

@@ -174,6 +179,8 @@
     __ j(not_zero, &align_loop, Label::kNear);
     __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue));
     __ bind(&do_not_pad);
+    ASSERT_EQ(kSizeOfOptimizedAlignStackPrologue,
+              do_not_pad.pos() - begin.pos());
   }

   __ push(ebp);  // Caller's frame pointer.
=======================================
--- /branches/bleeding_edge/src/liveedit.cc     Fri Sep 14 06:31:11 2012
+++ /branches/bleeding_edge/src/liveedit.cc     Thu Nov  8 04:18:11 2012
@@ -1287,7 +1287,9 @@
           continue;
         }
       }
-      buffer_writer.Write(it.rinfo());
+      if (RelocInfo::IsRealRelocMode(rinfo->rmode())) {
+        buffer_writer.Write(it.rinfo());
+      }
     }
   }

=======================================
--- /branches/bleeding_edge/src/mark-compact.cc Thu Nov  8 02:26:50 2012
+++ /branches/bleeding_edge/src/mark-compact.cc Thu Nov  8 04:18:11 2012
@@ -62,6 +62,7 @@
       sweep_precisely_(false),
       reduce_memory_footprint_(false),
       abort_incremental_marking_(false),
+      marking_parity_(ODD_MARKING_PARITY),
       compacting_(false),
       was_marked_incrementally_(false),
       tracer_(NULL),
@@ -404,6 +405,13 @@

   Finish();

+  if (marking_parity_ == EVEN_MARKING_PARITY) {
+    marking_parity_ = ODD_MARKING_PARITY;
+  } else {
+    ASSERT(marking_parity_ == ODD_MARKING_PARITY);
+    marking_parity_ = EVEN_MARKING_PARITY;
+  }
+
   tracer_ = NULL;
 }

@@ -2396,6 +2404,16 @@
       rinfo->set_target_address(Code::cast(target)->instruction_start());
     }
   }
+
+  void VisitCodeAgeSequence(RelocInfo* rinfo) {
+    ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
+    Object* stub = rinfo->code_age_stub();
+    ASSERT(stub != NULL);
+    VisitPointer(&stub);
+    if (stub != rinfo->code_age_stub()) {
+      rinfo->set_code_age_stub(Code::cast(stub));
+    }
+  }

   void VisitDebugTarget(RelocInfo* rinfo) {
     ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
=======================================
--- /branches/bleeding_edge/src/mark-compact.h  Thu Nov  8 02:26:50 2012
+++ /branches/bleeding_edge/src/mark-compact.h  Thu Nov  8 04:18:11 2012
@@ -659,6 +659,8 @@
   void ClearMarkbits();

   bool is_compacting() const { return compacting_; }
+
+  MarkingParity marking_parity() { return marking_parity_; }

  private:
   MarkCompactCollector();
@@ -692,6 +694,8 @@

   bool abort_incremental_marking_;

+  MarkingParity marking_parity_;
+
   // True if we are collecting slots to perform evacuation from evacuation
   // candidates.
   bool compacting_;
=======================================
--- /branches/bleeding_edge/src/objects-visiting-inl.h Wed Oct 17 06:04:49 2012 +++ /branches/bleeding_edge/src/objects-visiting-inl.h Thu Nov 8 04:18:11 2012
@@ -222,6 +222,17 @@
   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
   StaticVisitor::MarkObject(heap, target);
 }
+
+
+template<typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
+    Heap* heap, RelocInfo* rinfo) {
+  ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
+  Code* target = rinfo->code_age_stub();
+  ASSERT(target != NULL);
+  heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
+  StaticVisitor::MarkObject(heap, target);
+}


 template<typename StaticVisitor>
@@ -276,6 +287,9 @@
   if (FLAG_cleanup_code_caches_at_gc) {
     code->ClearTypeFeedbackCells(heap);
   }
+  if (FLAG_age_code && !Serializer::enabled()) {
+    code->MakeOlder(heap->mark_compact_collector()->marking_parity());
+  }
   code->CodeIterateBody<StaticVisitor>(heap);
 }

@@ -449,8 +463,10 @@
   // by optimized version of function.
   MarkBit code_mark = Marking::MarkBitFrom(function->code());
   if (code_mark.Get()) {
-    if (!Marking::MarkBitFrom(shared_info).Get()) {
-      shared_info->set_code_age(0);
+    if (!FLAG_age_code) {
+      if (!Marking::MarkBitFrom(shared_info).Get()) {
+        shared_info->set_code_age(0);
+      }
     }
     return false;
   }
@@ -460,10 +476,15 @@
     return false;
   }

-  // We do not flush code for optimized functions.
+  // We do not (yet) flush code for optimized functions.
   if (function->code() != shared_info->code()) {
     return false;
   }
+
+  // Check age of optimized code.
+  if (FLAG_age_code && !function->code()->IsOld()) {
+    return false;
+  }

   return IsFlushable(heap, shared_info);
 }
@@ -506,20 +527,20 @@
     return false;
   }

-  // TODO(mstarzinger): The following will soon be replaced by a new way of
-  // aging code, that is based on an aging stub in the function prologue.
-
- // How many collections newly compiled code object will survive before being
-  // flushed.
-  static const int kCodeAgeThreshold = 5;
+  if (FLAG_age_code) {
+    return shared_info->code()->IsOld();
+  } else {
+ // How many collections newly compiled code object will survive before being
+    // flushed.
+    static const int kCodeAgeThreshold = 5;

-  // Age this shared function info.
-  if (shared_info->code_age() < kCodeAgeThreshold) {
-    shared_info->set_code_age(shared_info->code_age() + 1);
-    return false;
+    // Age this shared function info.
+    if (shared_info->code_age() < kCodeAgeThreshold) {
+      shared_info->set_code_age(shared_info->code_age() + 1);
+      return false;
+    }
+    return true;
   }
-
-  return true;
 }


=======================================
--- /branches/bleeding_edge/src/objects-visiting.h      Fri Oct 12 05:41:29 2012
+++ /branches/bleeding_edge/src/objects-visiting.h      Thu Nov  8 04:18:11 2012
@@ -391,6 +391,7 @@
   static inline void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo);
   static inline void VisitDebugTarget(Heap* heap, RelocInfo* rinfo);
   static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo);
+  static inline void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo);
   static inline void VisitExternalReference(RelocInfo* rinfo) { }
   static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }

=======================================
--- /branches/bleeding_edge/src/objects.cc      Thu Nov  8 04:14:29 2012
+++ /branches/bleeding_edge/src/objects.cc      Thu Nov  8 04:18:11 2012
@@ -7792,6 +7792,7 @@
   ASSERT(code != NULL);
ASSERT(function->context()->native_context() == code_map->get(index - 1));
   function->ReplaceCode(code);
+  code->MakeYoung();
 }


@@ -8430,6 +8431,15 @@
   VisitPointer(&target);
CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
 }
+
+
+void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) {
+  ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
+  Object* stub = rinfo->code_age_stub();
+  if (stub) {
+    VisitPointer(&stub);
+  }
+}


 void ObjectVisitor::VisitCodeEntry(Address entry_address) {
@@ -8642,6 +8652,99 @@
   return is_keyed_load_stub() || is_keyed_store_stub() ||
(is_compare_ic_stub() && compare_state() == CompareIC::KNOWN_OBJECTS);
 }
+
+
+void Code::MakeCodeAgeSequenceYoung(byte* sequence) {
+  PatchPlatformCodeAge(sequence, kNoAge, NO_MARKING_PARITY);
+}
+
+
+void Code::MakeYoung() {
+  byte* sequence = FindCodeAgeSequence();
+  if (sequence != NULL) {
+    PatchPlatformCodeAge(sequence, kNoAge, NO_MARKING_PARITY);
+  }
+}
+
+
+void Code::MakeOlder(MarkingParity current_parity) {
+  byte* sequence = FindCodeAgeSequence();
+  if (sequence != NULL) {
+    Age age;
+    MarkingParity code_parity;
+    GetCodeAgeAndParity(sequence, &age, &code_parity);
+    if (age != kLastCodeAge && code_parity != current_parity) {
+      PatchPlatformCodeAge(sequence, static_cast<Age>(age + 1),
+                           current_parity);
+    }
+  }
+}
+
+
+bool Code::IsOld() {
+  byte* sequence = FindCodeAgeSequence();
+  if (sequence == NULL) return false;
+  Age age;
+  MarkingParity parity;
+  GetCodeAgeAndParity(sequence, &age, &parity);
+  return age >= kSexagenarianCodeAge;
+}
+
+
+byte* Code::FindCodeAgeSequence() {
+  if (kind() != FUNCTION && kind() != OPTIMIZED_FUNCTION) return NULL;
+  if (strlen(FLAG_stop_at) == 0 &&
+      !ProfileEntryHookStub::HasEntryHook() &&
+      (kind() == FUNCTION && !has_debug_break_slots())) {
+    return FindPlatformCodeAgeSequence();
+  }
+  return NULL;
+}
+
+
+void Code::GetCodeAgeAndParity(Code* code, Age* age,
+                               MarkingParity* parity) {
+  Isolate* isolate = Isolate::Current();
+  Builtins* builtins = isolate->builtins();
+  Code* stub = NULL;
+#define HANDLE_CODE_AGE(AGE)                                            \
+  stub = *builtins->Make##AGE##CodeYoungAgainEvenMarking();             \
+  if (code == stub) {                                                   \
+    *age = k##AGE##CodeAge;                                             \
+    *parity = EVEN_MARKING_PARITY;                                      \
+    return;                                                             \
+  }                                                                     \
+  stub = *builtins->Make##AGE##CodeYoungAgainOddMarking();              \
+  if (code == stub) {                                                   \
+    *age = k##AGE##CodeAge;                                             \
+    *parity = ODD_MARKING_PARITY;                                       \
+    return;                                                             \
+  }
+  CODE_AGE_LIST(HANDLE_CODE_AGE)
+#undef HANDLE_CODE_AGE
+  UNREACHABLE();
+}
+
+
+Code* Code::GetCodeAgeStub(Age age, MarkingParity parity) {
+  Isolate* isolate = Isolate::Current();
+  Builtins* builtins = isolate->builtins();
+  switch (age) {
+#define HANDLE_CODE_AGE(AGE)                                            \
+    case k##AGE##CodeAge: {                                             \
+      Code* stub = parity == EVEN_MARKING_PARITY                        \
+          ? *builtins->Make##AGE##CodeYoungAgainEvenMarking()           \
+          : *builtins->Make##AGE##CodeYoungAgainOddMarking();           \
+      return stub;                                                      \
+    }
+    CODE_AGE_LIST(HANDLE_CODE_AGE)
+#undef HANDLE_CODE_AGE
+    default:
+      UNREACHABLE();
+      break;
+  }
+  return NULL;
+}


 #ifdef ENABLE_DISASSEMBLER
=======================================
--- /branches/bleeding_edge/src/objects.h       Thu Nov  8 04:14:29 2012
+++ /branches/bleeding_edge/src/objects.h       Thu Nov  8 04:18:11 2012
@@ -194,6 +194,18 @@
   OWN_DESCRIPTORS
 };

+// The GC maintains a bit of information, the MarkingParity, which toggles
+// from odd to even and back every time marking is completed. Incremental
+// marking can visit an object twice during a marking phase, so algorithms that
+// that piggy-back on marking can use the parity to ensure that they only
+// perform an operation on an object once per marking phase: they record the +// MarkingParity when they visit an object, and only re-visit the object when it
+// is marked again and the MarkingParity changes.
+enum MarkingParity {
+  NO_MARKING_PARITY,
+  ODD_MARKING_PARITY,
+  EVEN_MARKING_PARITY
+};

 // Instance size sentinel for objects of variable size.
 const int kVariableSizeSentinel = 0;
@@ -4540,6 +4552,23 @@
   void ClearInlineCaches();
   void ClearTypeFeedbackCells(Heap* heap);

+#define DECLARE_CODE_AGE_ENUM(X) k##X##CodeAge,
+  enum Age {
+    kNoAge = 0,
+    CODE_AGE_LIST(DECLARE_CODE_AGE_ENUM)
+    kAfterLastCodeAge,
+    kLastCodeAge = kAfterLastCodeAge - 1,
+    kCodeAgeCount = kAfterLastCodeAge - 1
+  };
+#undef DECLARE_CODE_AGE_ENUM
+
+  // Code aging
+  static void MakeCodeAgeSequenceYoung(byte* sequence);
+  void MakeYoung();
+  void MakeOlder(MarkingParity);
+  static bool IsYoungSequence(byte* sequence);
+  bool IsOld();
+
   // Max loop nesting marker used to postpose OSR. We don't take loop
   // nesting that is deeper than 5 levels into account.
   static const int kMaxLoopNestingMarker = 6;
@@ -4668,6 +4697,21 @@
       TypeField::kMask | CacheHolderField::kMask;

  private:
+  friend class RelocIterator;
+
+  // Code aging
+  byte* FindCodeAgeSequence();
+  static void  GetCodeAgeAndParity(Code* code, Age* age,
+                                   MarkingParity* parity);
+  static void GetCodeAgeAndParity(byte* sequence, Age* age,
+                                  MarkingParity* parity);
+  static Code* GetCodeAgeStub(Age age, MarkingParity parity);
+
+  // Code aging -- platform-specific
+  byte* FindPlatformCodeAgeSequence();
+  static void PatchPlatformCodeAge(byte* sequence, Age age,
+                                   MarkingParity parity);
+
   DISALLOW_IMPLICIT_CONSTRUCTORS(Code);
 };

@@ -8916,6 +8960,10 @@
   // Visits a debug call target in the instruction stream.
   virtual void VisitDebugTarget(RelocInfo* rinfo);

+ // Visits the byte sequence in a function's prologue that contains information
+  // about the code's age.
+  virtual void VisitCodeAgeSequence(RelocInfo* rinfo);
+
   // Handy shorthand for visiting a single pointer.
   virtual void VisitPointer(Object** p) { VisitPointers(p, p + 1); }

=======================================
--- /branches/bleeding_edge/src/serialize.cc    Thu Oct 25 04:52:37 2012
+++ /branches/bleeding_edge/src/serialize.cc    Thu Nov  8 04:18:11 2012
@@ -523,6 +523,10 @@
       UNCLASSIFIED,
       50,
       "pending_message_script");
+  Add(ExternalReference::get_make_code_young_function(isolate).address(),
+      UNCLASSIFIED,
+      51,
+      "Code::MakeCodeYoung");
 }


=======================================
--- /branches/bleeding_edge/src/x64/assembler-x64-inl.h Thu Oct 18 05:21:42 2012 +++ /branches/bleeding_edge/src/x64/assembler-x64-inl.h Thu Nov 8 04:18:11 2012
@@ -42,6 +42,9 @@
 // Implementation of Assembler


+static const byte kCallOpcode = 0xE8;
+
+
 void Assembler::emitl(uint32_t x) {
   Memory::uint32_at(pc_) = x;
   pc_ += sizeof(uint32_t);
@@ -217,6 +220,12 @@
   } else if (IsCodeTarget(rmode_)) {
     Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
     CPU::FlushICache(pc_, sizeof(int32_t));
+  } else if (rmode_ == CODE_AGE_SEQUENCE) {
+    if (*pc_ == kCallOpcode) {
+      int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
+      *p -= delta;  // Relocate entry.
+      CPU::FlushICache(p, sizeof(uint32_t));
+    }
   }
 }

@@ -353,6 +362,21 @@
 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
   return !Assembler::IsNop(pc());
 }
+
+
+Code* RelocInfo::code_age_stub() {
+  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+  ASSERT(*pc_ == kCallOpcode);
+  return Code::GetCodeFromTargetAddress(
+      Assembler::target_address_at(pc_ + 1));
+}
+
+
+void RelocInfo::set_code_age_stub(Code* stub) {
+  ASSERT(*pc_ == kCallOpcode);
+  ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
+  Assembler::set_target_address_at(pc_ + 1, stub->instruction_start());
+}


 Address RelocInfo::call_address() {
@@ -408,6 +432,8 @@
   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
     visitor->VisitExternalReference(this);
     CPU::FlushICache(pc_, sizeof(Address));
+  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+    visitor->VisitCodeAgeSequence(this);
 #ifdef ENABLE_DEBUGGER_SUPPORT
   // TODO(isolates): Get a cached isolate below.
   } else if (((RelocInfo::IsJSReturn(mode) &&
@@ -436,6 +462,8 @@
   } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
     StaticVisitor::VisitExternalReference(this);
     CPU::FlushICache(pc_, sizeof(Address));
+  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
+    StaticVisitor::VisitCodeAgeSequence(heap, this);
 #ifdef ENABLE_DEBUGGER_SUPPORT
   } else if (heap->isolate()->debug()->has_break_points() &&
              ((RelocInfo::IsJSReturn(mode) &&
=======================================
--- /branches/bleeding_edge/src/x64/assembler-x64.cc Mon Aug 6 07:13:09 2012 +++ /branches/bleeding_edge/src/x64/assembler-x64.cc Thu Nov 8 04:18:11 2012
@@ -3047,7 +3047,8 @@


 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
-                                  1 << RelocInfo::INTERNAL_REFERENCE;
+    1 << RelocInfo::INTERNAL_REFERENCE |
+    1 << RelocInfo::CODE_AGE_SEQUENCE;


 bool RelocInfo::IsCodedSpecially() {
=======================================
--- /branches/bleeding_edge/src/x64/builtins-x64.cc     Fri Aug 17 05:59:00 2012
+++ /branches/bleeding_edge/src/x64/builtins-x64.cc     Thu Nov  8 04:18:11 2012
@@ -606,6 +606,46 @@
 }


+static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
+ // For now, we are relying on the fact that make_code_young doesn't do any + // garbage collection which allows us to save/restore the registers without
+  // worrying about which of them contain pointers. We also don't build an
+ // internal frame to make the code faster, since we shouldn't have to do stack
+  // crawls in MakeCodeYoung. This seems a bit fragile.
+
+  // Re-execute the code that was patched back to the young age when
+  // the stub returns.
+  __ subq(Operand(rsp, 0), Immediate(5));
+  __ Pushad();
+#ifdef _WIN64
+  __ movq(rcx, Operand(rsp, kNumSafepointRegisters * kPointerSize));
+#else
+  __ movq(rdi, Operand(rsp, kNumSafepointRegisters * kPointerSize));
+#endif
+  {  // NOLINT
+    FrameScope scope(masm, StackFrame::MANUAL);
+    __ PrepareCallCFunction(1);
+    __ CallCFunction(
+ ExternalReference::get_make_code_young_function(masm->isolate()), 1);
+  }
+  __ Popad();
+  __ ret(0);
+}
+
+
+#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
+void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
+    MacroAssembler* masm) {                                  \
+  GenerateMakeCodeYoungAgainCommon(masm);                    \
+}                                                            \
+void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
+    MacroAssembler* masm) {                                  \
+  GenerateMakeCodeYoungAgainCommon(masm);                    \
+}
+CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
+#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
+
+
 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
Deoptimizer::BailoutType type) {
   // Enter an internal frame.
=======================================
--- /branches/bleeding_edge/src/x64/codegen-x64.cc      Thu Nov  8 04:14:29 2012
+++ /branches/bleeding_edge/src/x64/codegen-x64.cc      Thu Nov  8 04:18:11 2012
@@ -577,6 +577,91 @@

 #undef __

+
+static const int kNoCodeAgeSequenceLength = 6;
+
+static byte* GetNoCodeAgeSequence(uint32_t* length) {
+  static bool initialized = false;
+  static byte sequence[kNoCodeAgeSequenceLength];
+  *length = kNoCodeAgeSequenceLength;
+  if (!initialized) {
+ // The sequence of instructions that is patched out for aging code is the
+    // following boilerplate stack-building prologue that is found both in
+    // FUNCTION and OPTIMIZED_FUNCTION code:
+    CodePatcher patcher(sequence, kNoCodeAgeSequenceLength);
+    patcher.masm()->push(rbp);
+    patcher.masm()->movq(rbp, rsp);
+    patcher.masm()->push(rsi);
+    patcher.masm()->push(rdi);
+    initialized = true;
+  }
+  return sequence;
+}
+
+
+byte* Code::FindPlatformCodeAgeSequence() {
+  byte* start = instruction_start();
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  if (!memcmp(start, young_sequence, young_length) ||
+      *start == kCallOpcode) {
+    return start;
+  } else {
+    byte* start_after_strict = NULL;
+    if (kind() == FUNCTION) {
+      start_after_strict = start + kSizeOfFullCodegenStrictModePrologue;
+    } else {
+      ASSERT(kind() == OPTIMIZED_FUNCTION);
+      start_after_strict = start + kSizeOfOptimizedStrictModePrologue;
+    }
+    ASSERT(!memcmp(start_after_strict, young_sequence, young_length) ||
+           *start_after_strict == kCallOpcode);
+    return start_after_strict;
+  }
+}
+
+
+bool Code::IsYoungSequence(byte* sequence) {
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  bool result = (!memcmp(sequence, young_sequence, young_length));
+  ASSERT(result || *sequence == kCallOpcode);
+  return result;
+}
+
+
+void Code::GetCodeAgeAndParity(byte* sequence, Age* age,
+                               MarkingParity* parity) {
+  if (IsYoungSequence(sequence)) {
+    *age = kNoAge;
+    *parity = NO_MARKING_PARITY;
+  } else {
+    sequence++;  // Skip the kCallOpcode byte
+    Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
+        Assembler::kCallTargetAddressOffset;
+    Code* stub = GetCodeFromTargetAddress(target_address);
+    GetCodeAgeAndParity(stub, age, parity);
+  }
+}
+
+
+void Code::PatchPlatformCodeAge(byte* sequence,
+                                Code::Age age,
+                                MarkingParity parity) {
+  uint32_t young_length;
+  byte* young_sequence = GetNoCodeAgeSequence(&young_length);
+  if (age == kNoAge) {
+    memcpy(sequence, young_sequence, young_length);
+    CPU::FlushICache(sequence, young_length);
+  } else {
+    Code* stub = GetCodeAgeStub(age, parity);
+    CodePatcher patcher(sequence, young_length);
+    patcher.masm()->call(stub->instruction_start());
+    patcher.masm()->nop();
+  }
+}
+
+
 } }  // namespace v8::internal

 #endif  // V8_TARGET_ARCH_X64
=======================================
--- /branches/bleeding_edge/src/x64/codegen-x64.h       Thu Nov 24 02:16:39 2011
+++ /branches/bleeding_edge/src/x64/codegen-x64.h       Thu Nov  8 04:18:11 2012
@@ -39,6 +39,8 @@

 enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };

+static const int kSizeOfFullCodegenStrictModePrologue = 14;
+static const int kSizeOfOptimizedStrictModePrologue = 14;

// -------------------------------------------------------------------------
 // CodeGenerator
=======================================
--- /branches/bleeding_edge/src/x64/full-codegen-x64.cc Thu Nov 8 04:14:29 2012 +++ /branches/bleeding_edge/src/x64/full-codegen-x64.cc Thu Nov 8 04:18:11 2012
@@ -138,6 +138,8 @@
   // function calls.
   if (!info->is_classic_mode() || info->is_native()) {
     Label ok;
+    Label begin;
+    __ bind(&begin);
     __ testq(rcx, rcx);
     __ j(zero, &ok, Label::kNear);
     // +1 for return address.
@@ -145,6 +147,7 @@
     __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
     __ movq(Operand(rsp, receiver_offset), kScratchRegister);
     __ bind(&ok);
+ ASSERT_EQ(kSizeOfFullCodegenStrictModePrologue, ok.pos() - begin.pos());
   }

// Open a frame scope to indicate that there is a frame on the stack. The
=======================================
--- /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Mon Nov 5 05:28:10 2012 +++ /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Thu Nov 8 04:18:11 2012
@@ -133,6 +133,8 @@
   // object). rcx is zero for method calls and non-zero for function
   // calls.
   if (!info_->is_classic_mode() || info_->is_native()) {
+    Label begin;
+    __ bind(&begin);
     Label ok;
     __ testq(rcx, rcx);
     __ j(zero, &ok, Label::kNear);
@@ -141,6 +143,7 @@
     __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
     __ movq(Operand(rsp, receiver_offset), kScratchRegister);
     __ bind(&ok);
+    ASSERT_EQ(kSizeOfOptimizedStrictModePrologue, ok.pos() - begin.pos());
   }

   __ push(rbp);  // Caller's frame pointer.

--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to