Revision: 14650
Author:   [email protected]
Date:     Mon May 13 12:22:18 2013
Log:      Only flush SharedFunctionInfo optimized code cache when necessary

[email protected]

Review URL: https://codereview.chromium.org/14604007
http://code.google.com/p/v8/source/detail?r=14650

Modified:
 /branches/bleeding_edge/src/arm/deoptimizer-arm.cc
 /branches/bleeding_edge/src/ia32/deoptimizer-ia32.cc
 /branches/bleeding_edge/src/objects.cc
 /branches/bleeding_edge/src/objects.h
 /branches/bleeding_edge/src/runtime.cc
 /branches/bleeding_edge/src/x64/deoptimizer-x64.cc

=======================================
--- /branches/bleeding_edge/src/arm/deoptimizer-arm.cc Mon May 13 04:10:31 2013 +++ /branches/bleeding_edge/src/arm/deoptimizer-arm.cc Mon May 13 12:22:18 2013
@@ -53,14 +53,13 @@
   ASSERT(function->IsOptimized());
   ASSERT(function->FunctionsInFunctionListShareSameCode());

-  // The optimized code is going to be patched, so we cannot use it
-  // any more.  Play safe and reset the whole cache.
-  function->shared()->ClearOptimizedCodeMap("deoptimized function");
-
   // Get the optimized code.
   Code* code = function->code();
   Address code_start_address = code->instruction_start();

+ // The optimized code is going to be patched, so we cannot use it any more. + function->shared()->EvictFromOptimizedCodeMap(code, "deoptimized function");
+
// Invalidate the relocation information, as it will become invalid by the
   // code patching below, and is not needed any more.
   code->InvalidateRelocation();
=======================================
--- /branches/bleeding_edge/src/ia32/deoptimizer-ia32.cc Mon May 13 04:10:31 2013 +++ /branches/bleeding_edge/src/ia32/deoptimizer-ia32.cc Mon May 13 12:22:18 2013
@@ -123,14 +123,13 @@
   ASSERT(function->IsOptimized());
   ASSERT(function->FunctionsInFunctionListShareSameCode());

-  // The optimized code is going to be patched, so we cannot use it
-  // any more.  Play safe and reset the whole cache.
-  function->shared()->ClearOptimizedCodeMap("deoptimized function");
-
   // Get the optimized code.
   Code* code = function->code();
   Address code_start_address = code->instruction_start();

+ // The optimized code is going to be patched, so we cannot use it any more. + function->shared()->EvictFromOptimizedCodeMap(code, "deoptimized function");
+
   // We will overwrite the code's relocation info in-place. Relocation info
   // is written backward. The relocation info is the payload of a byte
   // array.  Later on we will slide this to the start of the byte array and
=======================================
--- /branches/bleeding_edge/src/objects.cc      Mon May 13 06:20:47 2013
+++ /branches/bleeding_edge/src/objects.cc      Mon May 13 12:22:18 2013
@@ -9034,13 +9034,48 @@
 void SharedFunctionInfo::ClearOptimizedCodeMap(const char* reason) {
   if (!optimized_code_map()->IsSmi()) {
     if (FLAG_trace_opt) {
-      PrintF("[clearing optimizing code map (%s) for ", reason);
+      PrintF("[clearing entire optimizing code map (%s) for ", reason);
       ShortPrint();
       PrintF("]\n");
     }
     set_optimized_code_map(Smi::FromInt(0));
   }
 }
+
+
+void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
+                                                   const char* reason) {
+  if (optimized_code_map()->IsSmi()) return;
+
+  int i;
+  bool removed_entry = false;
+  FixedArray* code_map = FixedArray::cast(optimized_code_map());
+  for (i = 0; i < code_map->length(); i += kEntryLength) {
+    ASSERT(code_map->get(i)->IsNativeContext());
+    if (Code::cast(code_map->get(i + 1)) == optimized_code) {
+      if (FLAG_trace_opt) {
+        PrintF("[clearing optimizing code map (%s) for ", reason);
+        ShortPrint();
+        PrintF("]\n");
+      }
+      removed_entry = true;
+      break;
+    }
+  }
+  while (i < (code_map->length() - kEntryLength)) {
+    code_map->set(i, code_map->get(i + kEntryLength));
+    code_map->set(i + 1, code_map->get(i + 1 + kEntryLength));
+    code_map->set(i + 2, code_map->get(i + 2 + kEntryLength));
+    i += kEntryLength;
+  }
+  if (removed_entry) {
+    if (code_map->length() > kEntryLength) {
+      RightTrimFixedArray<FROM_MUTATOR>(GetHeap(), code_map, kEntryLength);
+    } else {
+      ClearOptimizedCodeMap(reason);
+    }
+  }
+}


 bool JSFunction::CompileLazy(Handle<JSFunction> function,
=======================================
--- /branches/bleeding_edge/src/objects.h       Mon May 13 04:10:31 2013
+++ /branches/bleeding_edge/src/objects.h       Mon May 13 12:22:18 2013
@@ -5822,6 +5822,9 @@
   // Clear optimized code map.
   void ClearOptimizedCodeMap(const char* reason);

+  // Removed a specific optimized code object from the optimized code map.
+  void EvictFromOptimizedCodeMap(Code* optimized_code, const char* reason);
+
   // Add a new entry to the optimized code map.
   static void AddToOptimizedCodeMap(Handle<SharedFunctionInfo> shared,
                                     Handle<Context> native_context,
=======================================
--- /branches/bleeding_edge/src/runtime.cc      Mon May 13 04:10:31 2013
+++ /branches/bleeding_edge/src/runtime.cc      Mon May 13 12:22:18 2013
@@ -7973,6 +7973,7 @@
   JavaScriptFrame* frame = it.frame();
   RUNTIME_ASSERT(frame->function()->IsJSFunction());
Handle<JSFunction> function(JSFunction::cast(frame->function()), isolate);
+  Handle<Code> optimized_code(function->code());
   RUNTIME_ASSERT(type != Deoptimizer::EAGER || function->IsOptimized());

   // Avoid doing too much work when running with --always-opt and keep
@@ -8010,8 +8011,10 @@
   } else {
     Deoptimizer::DeoptimizeFunction(*function);
   }
-  // Flush optimized code cache for this function.
-  function->shared()->ClearOptimizedCodeMap("notify deoptimized");
+ // Evict optimized code for this function from the cache so that it doesn't
+  // get used for new closures.
+  function->shared()->EvictFromOptimizedCodeMap(*optimized_code,
+                                                "notify deoptimized");

   return isolate->heap()->undefined_value();
 }
=======================================
--- /branches/bleeding_edge/src/x64/deoptimizer-x64.cc Mon May 13 04:10:31 2013 +++ /branches/bleeding_edge/src/x64/deoptimizer-x64.cc Mon May 13 12:22:18 2013
@@ -55,13 +55,12 @@
   ASSERT(function->IsOptimized());
   ASSERT(function->FunctionsInFunctionListShareSameCode());

-  // The optimized code is going to be patched, so we cannot use it
-  // any more.  Play safe and reset the whole cache.
-  function->shared()->ClearOptimizedCodeMap("deoptimized function");
-
   // Get the optimized code.
   Code* code = function->code();

+ // The optimized code is going to be patched, so we cannot use it any more. + function->shared()->EvictFromOptimizedCodeMap(code, "deoptimized function");
+
// Invalidate the relocation information, as it will become invalid by the
   // code patching below, and is not needed any more.
   code->InvalidateRelocation();

--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.


Reply via email to