Revision: 25142
Author:   [email protected]
Date:     Wed Nov  5 11:12:27 2014 UTC
Log:      Reland "Optimize function across closures."

[email protected]

Review URL: https://codereview.chromium.org/703603003
https://code.google.com/p/v8/source/detail?r=25142

Modified:
 /branches/bleeding_edge/src/code-stubs-hydrogen.cc
 /branches/bleeding_edge/src/compiler.cc
 /branches/bleeding_edge/src/factory.cc
 /branches/bleeding_edge/src/hydrogen-instructions.h
 /branches/bleeding_edge/src/objects-inl.h
 /branches/bleeding_edge/src/objects.cc
 /branches/bleeding_edge/src/objects.h
 /branches/bleeding_edge/src/runtime/runtime-test.cc
 /branches/bleeding_edge/src/runtime-profiler.cc

=======================================
--- /branches/bleeding_edge/src/code-stubs-hydrogen.cc Tue Nov 4 12:58:17 2014 UTC +++ /branches/bleeding_edge/src/code-stubs-hydrogen.cc Wed Nov 5 11:12:27 2014 UTC
@@ -1549,47 +1549,67 @@

   AddIncrementCounter(counters->fast_new_closure_total());

-  // Create a new closure from the given function info in new space
-  HValue* size = Add<HConstant>(JSFunction::kSize);
-  HInstruction* js_function = Add<HAllocate>(size, HType::JSObject(),
- NOT_TENURED, JS_FUNCTION_TYPE);
+  IfBuilder optimize_now(this);
+  HInstruction* compile_hint = Add<HLoadNamedField>(
+ shared_info, static_cast<HValue*>(NULL), HObjectAccess::ForCompileHint());
+  HValue* hint_mask = Add<HConstant>(
+      static_cast<int32_t>(1 << SharedFunctionInfo::kOptimizeNextClosure));
+  HInstruction* optimize =
+      AddUncasted<HBitwise>(Token::BIT_AND, compile_hint, hint_mask);
+ optimize_now.If<HCompareNumericAndBranch>(optimize, hint_mask, Token::EQ);
+  optimize_now.Then();
+  {
+ Add<HPushArguments>(context(), shared_info, graph()->GetConstantFalse());
+    Push(Add<HCallRuntime>(isolate()->factory()->empty_string(),
+ Runtime::FunctionForId(Runtime::kNewClosure), 3));
+  }
+  optimize_now.Else();
+  {
+    // Create a new closure from the given function info in new space
+    HValue* size = Add<HConstant>(JSFunction::kSize);
+    HInstruction* js_function =
+ Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE);

-  int map_index = Context::FunctionMapIndex(casted_stub()->strict_mode(),
-                                            casted_stub()->kind());
+    int map_index = Context::FunctionMapIndex(casted_stub()->strict_mode(),
+                                              casted_stub()->kind());

-  // Compute the function map in the current native context and set that
-  // as the map of the allocated object.
-  HInstruction* native_context = BuildGetNativeContext();
-  HInstruction* map_slot_value = Add<HLoadNamedField>(
-      native_context, static_cast<HValue*>(NULL),
-      HObjectAccess::ForContextSlot(map_index));
- Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
+    // Compute the function map in the current native context and set that
+    // as the map of the allocated object.
+    HInstruction* native_context = BuildGetNativeContext();
+    HInstruction* map_slot_value =
+        Add<HLoadNamedField>(native_context, static_cast<HValue*>(NULL),
+                             HObjectAccess::ForContextSlot(map_index));
+ Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);

-  // Initialize the rest of the function.
-  Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
-                        empty_fixed_array);
-  Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
-                        empty_fixed_array);
-  Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
-                        empty_fixed_array);
- Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
-                        graph()->GetConstantHole());
-  Add<HStoreNamedField>(js_function,
-                        HObjectAccess::ForSharedFunctionInfoPointer(),
-                        shared_info);
- Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
-                        context());
+    // Initialize the rest of the function.
+ Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
+                          empty_fixed_array);
+    Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
+                          empty_fixed_array);
+    Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
+                          empty_fixed_array);
+    Add<HStoreNamedField>(js_function,
+                          HObjectAccess::ForPrototypeOrInitialMap(),
+                          graph()->GetConstantHole());
+    Add<HStoreNamedField>(js_function,
+                          HObjectAccess::ForSharedFunctionInfoPointer(),
+                          shared_info);
+    Add<HStoreNamedField>(
+ js_function, HObjectAccess::ForFunctionContextPointer(), context());

-  // Initialize the code pointer in the function to be the one
-  // found in the shared function info object.
-  // But first check if there is an optimized version for our context.
-  if (FLAG_cache_optimized_code) {
- BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
-  } else {
-    BuildInstallCode(js_function, shared_info);
+    // Initialize the code pointer in the function to be the one
+    // found in the shared function info object.
+    // But first check if there is an optimized version for our context.
+    if (FLAG_cache_optimized_code) {
+      BuildInstallFromOptimizedCodeMap(js_function, shared_info,
+                                       native_context);
+    } else {
+      BuildInstallCode(js_function, shared_info);
+    }
+    Push(js_function);
   }
-
-  return js_function;
+  optimize_now.End();
+  return Pop();
 }


=======================================
--- /branches/bleeding_edge/src/compiler.cc     Tue Nov  4 11:40:21 2014 UTC
+++ /branches/bleeding_edge/src/compiler.cc     Wed Nov  5 11:12:27 2014 UTC
@@ -1361,6 +1361,7 @@
   PostponeInterruptsScope postpone(isolate);

   Handle<SharedFunctionInfo> shared = info->shared_info();
+  shared->set_optimize_next_closure(false);
   if (shared->code()->kind() != Code::FUNCTION ||
       ScopeInfo::Empty(isolate) == shared->scope_info()) {
     // The function was never compiled. Compile it unoptimized first.
=======================================
--- /branches/bleeding_edge/src/factory.cc      Tue Nov  4 11:40:21 2014 UTC
+++ /branches/bleeding_edge/src/factory.cc      Wed Nov  5 11:12:27 2014 UTC
@@ -1356,6 +1356,14 @@

   return prototype;
 }
+
+
+static bool ShouldOptimizeNewClosure(Isolate* isolate,
+                                     Handle<SharedFunctionInfo> info) {
+  return isolate->use_crankshaft() && !info->is_toplevel() &&
+         info->is_compiled() && info->allows_lazy_compilation() &&
+ !info->optimization_disabled() && !isolate->DebuggerHasBreakPoints();
+}


 Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
@@ -1395,14 +1403,11 @@
     return result;
   }

-  if (isolate()->use_crankshaft() &&
-      FLAG_always_opt &&
-      result->is_compiled() &&
-      !info->is_toplevel() &&
-      info->allows_lazy_compilation() &&
-      !info->optimization_disabled() &&
-      !isolate()->DebuggerHasBreakPoints()) {
+  if (FLAG_always_opt && ShouldOptimizeNewClosure(isolate(), info)) {
     result->MarkForOptimization();
+  } else if (info->optimize_next_closure() &&
+             ShouldOptimizeNewClosure(isolate(), info)) {
+    result->AttemptConcurrentOptimization();
   }
   return result;
 }
=======================================
--- /branches/bleeding_edge/src/hydrogen-instructions.h Tue Nov 4 11:40:21 2014 UTC +++ /branches/bleeding_edge/src/hydrogen-instructions.h Wed Nov 5 11:12:27 2014 UTC
@@ -6120,6 +6120,11 @@
     return HObjectAccess(kInobject,
                          SharedFunctionInfo::kOptimizedCodeMapOffset);
   }
+
+  static HObjectAccess ForCompileHint() {
+ return HObjectAccess(kInobject, SharedFunctionInfo::kCompilerHintsOffset,
+                         Representation::Smi());
+  }

   static HObjectAccess ForFunctionContextPointer() {
     return HObjectAccess(kInobject, JSFunction::kContextOffset);
=======================================
--- /branches/bleeding_edge/src/objects-inl.h   Wed Nov  5 09:26:22 2014 UTC
+++ /branches/bleeding_edge/src/objects-inl.h   Wed Nov  5 11:12:27 2014 UTC
@@ -5516,9 +5516,9 @@
 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
                kIsTopLevelBit)

-BOOL_ACCESSORS(SharedFunctionInfo,
-               compiler_hints,
-               allows_lazy_compilation,
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, optimize_next_closure,
+               kOptimizeNextClosure)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
                kAllowLazyCompilation)
 BOOL_ACCESSORS(SharedFunctionInfo,
                compiler_hints,
=======================================
--- /branches/bleeding_edge/src/objects.cc      Wed Nov  5 09:26:22 2014 UTC
+++ /branches/bleeding_edge/src/objects.cc      Wed Nov  5 11:12:27 2014 UTC
@@ -9204,12 +9204,27 @@
 }


-void JSFunction::MarkForConcurrentOptimization() {
-  DCHECK(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
+void JSFunction::AttemptConcurrentOptimization() {
+  Isolate* isolate = GetIsolate();
+  if (!isolate->concurrent_recompilation_enabled() ||
+      isolate->bootstrapper()->IsActive()) {
+    MarkForOptimization();
+    return;
+  }
+  if (isolate->concurrent_osr_enabled() &&
+      isolate->optimizing_compiler_thread()->IsQueuedForOSR(this)) {
+ // Do not attempt regular recompilation if we already queued this for OSR.
+    // TODO(yangguo): This is necessary so that we don't install optimized
+    // code on a function that is already optimized, since OSR and regular
+    // recompilation race.  This goes away as soon as OSR becomes one-shot.
+    return;
+  }
+  DCHECK(!IsInOptimizationQueue());
+  DCHECK(is_compiled() || isolate->DebuggerHasBreakPoints());
   DCHECK(!IsOptimized());
   DCHECK(shared()->allows_lazy_compilation() || code()->optimizable());
   DCHECK(!shared()->is_generator());
-  DCHECK(GetIsolate()->concurrent_recompilation_enabled());
+  DCHECK(isolate->concurrent_recompilation_enabled());
   if (FLAG_trace_concurrent_recompilation) {
     PrintF("  ** Marking ");
     ShortPrint();
=======================================
--- /branches/bleeding_edge/src/objects.h       Wed Nov  5 09:26:22 2014 UTC
+++ /branches/bleeding_edge/src/objects.h       Wed Nov  5 11:12:27 2014 UTC
@@ -6787,6 +6787,8 @@
   inline int ic_age();
   inline void set_ic_age(int age);

+  DECL_BOOLEAN_ACCESSORS(optimize_next_closure)
+
   // Indicates if this function can be lazy compiled.
   // This is used to determine if we can safely flush code from a function
   // when doing GC if we expect that the function will no longer be used.
@@ -7077,6 +7079,7 @@
   enum CompilerHints {
     kAllowLazyCompilation,
     kAllowLazyCompilationWithoutContext,
+    kOptimizeNextClosure,
     kOptimizationDisabled,
     kStrictModeFunction,
     kUsesArguments,
@@ -7302,7 +7305,7 @@
   // Mark this function for lazy recompilation. The function will be
   // recompiled the next time it is executed.
   void MarkForOptimization();
-  void MarkForConcurrentOptimization();
+  void AttemptConcurrentOptimization();
   void MarkInOptimizationQueue();

   // Tells whether or not the function is already marked for lazy
=======================================
--- /branches/bleeding_edge/src/runtime/runtime-test.cc Thu Oct 23 05:57:01 2014 UTC +++ /branches/bleeding_edge/src/runtime/runtime-test.cc Wed Nov 5 11:12:27 2014 UTC
@@ -75,7 +75,7 @@
           *function, Code::kMaxLoopNestingMarker);
     } else if (type->IsOneByteEqualTo(STATIC_CHAR_VECTOR("concurrent")) &&
                isolate->concurrent_recompilation_enabled()) {
-      function->MarkForConcurrentOptimization();
+      function->AttemptConcurrentOptimization();
     }
   }

=======================================
--- /branches/bleeding_edge/src/runtime-profiler.cc Tue Nov 4 11:40:21 2014 UTC +++ /branches/bleeding_edge/src/runtime-profiler.cc Wed Nov 5 11:12:27 2014 UTC
@@ -106,23 +106,8 @@
     PrintF("]\n");
   }

-
-  if (isolate_->concurrent_recompilation_enabled() &&
-      !isolate_->bootstrapper()->IsActive()) {
-    if (isolate_->concurrent_osr_enabled() &&
-        isolate_->optimizing_compiler_thread()->IsQueuedForOSR(function)) {
- // Do not attempt regular recompilation if we already queued this for OSR. - // TODO(yangguo): This is necessary so that we don't install optimized - // code on a function that is already optimized, since OSR and regular - // recompilation race. This goes away as soon as OSR becomes one-shot.
-      return;
-    }
-    DCHECK(!function->IsInOptimizationQueue());
-    function->MarkForConcurrentOptimization();
-  } else {
-    // The next call to the function will trigger optimization.
-    function->MarkForOptimization();
-  }
+  function->shared()->set_optimize_next_closure(true);
+  function->AttemptConcurrentOptimization();
 }


--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/d/optout.

Reply via email to