Revision: 16681
Author: [email protected]
Date: Thu Sep 12 11:30:56 2013 UTC
Log: Simplify installing concurrently recompiled code.
Instead of overwriting the code entry of the function, we trigger
an interrupt to install the code on the main thread.
[email protected]
BUG=
Review URL: https://codereview.chromium.org/23542029
http://code.google.com/p/v8/source/detail?r=16681
Modified:
/branches/bleeding_edge/src/arm/builtins-arm.cc
/branches/bleeding_edge/src/builtins.h
/branches/bleeding_edge/src/compiler.cc
/branches/bleeding_edge/src/debug.cc
/branches/bleeding_edge/src/execution.cc
/branches/bleeding_edge/src/execution.h
/branches/bleeding_edge/src/ia32/builtins-ia32.cc
/branches/bleeding_edge/src/mips/builtins-mips.cc
/branches/bleeding_edge/src/objects-inl.h
/branches/bleeding_edge/src/objects.cc
/branches/bleeding_edge/src/objects.h
/branches/bleeding_edge/src/optimizing-compiler-thread.cc
/branches/bleeding_edge/src/optimizing-compiler-thread.h
/branches/bleeding_edge/src/runtime-profiler.cc
/branches/bleeding_edge/src/runtime.cc
/branches/bleeding_edge/src/runtime.h
/branches/bleeding_edge/src/x64/builtins-x64.cc
/branches/bleeding_edge/test/mjsunit/fuzz-natives-part1.js
/branches/bleeding_edge/test/mjsunit/fuzz-natives-part2.js
/branches/bleeding_edge/test/mjsunit/fuzz-natives-part3.js
/branches/bleeding_edge/test/mjsunit/fuzz-natives-part4.js
=======================================
--- /branches/bleeding_edge/src/arm/builtins-arm.cc Wed Sep 11 12:39:00
2013 UTC
+++ /branches/bleeding_edge/src/arm/builtins-arm.cc Thu Sep 12 11:30:56
2013 UTC
@@ -289,70 +289,57 @@
}
__ Ret();
}
+
+
+static void CallRuntimePassFunction(MacroAssembler* masm,
+ Runtime::FunctionId function_id) {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ // Push a copy of the function onto the stack.
+ __ push(r1);
+ // Push call kind information.
+ __ push(r5);
+ // Function is also the parameter to the runtime call.
+ __ push(r1);
+
+ __ CallRuntime(function_id, 1);
+ // Restore call kind information.
+ __ pop(r5);
+ // Restore receiver.
+ __ pop(r1);
+}
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
__ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
__ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ mov(pc, r2);
+ __ Jump(r2);
}
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
- GenerateTailCallToSharedCode(masm);
-}
+ // Checking whether the queued function is ready for install is optional,
+ // since we come across interrupts and stack checks elsewhere. However,
+ // not checking may delay installing ready functions, and always checking
+ // would be quite expensive. A good compromise is to first check against
+ // stack limit as a cue for an interrupt signal.
+ Label ok;
+ __ LoadRoot(ip, Heap::kStackLimitRootIndex);
+ __ cmp(sp, Operand(ip));
+ __ b(hs, &ok);
-
-void Builtins::Generate_InstallRecompiledCode(MacroAssembler* masm) {
- // Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Preserve the function.
- __ push(r1);
- // Push call kind information.
- __ push(r5);
-
- // Push the function on the stack as the argument to the runtime
function.
- __ push(r1);
- __ CallRuntime(Runtime::kInstallRecompiledCode, 1);
- // Calculate the entry point.
- __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
-
- // Restore call kind information.
- __ pop(r5);
- // Restore saved function.
- __ pop(r1);
-
- // Tear down internal frame.
- }
+ CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
+ // Tail call to returned code.
+ __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ Jump(r0);
- // Do a tail-call of the compiled function.
- __ Jump(r2);
+ __ bind(&ok);
+ GenerateTailCallToSharedCode(masm);
}
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Push a copy of the function onto the stack.
- __ push(r1);
- // Push call kind information.
- __ push(r5);
-
- __ push(r1); // Function is also the parameter to the runtime call.
- __ CallRuntime(Runtime::kConcurrentRecompile, 1);
-
- // Restore call kind information.
- __ pop(r5);
- // Restore receiver.
- __ pop(r1);
-
- // Tear down internal frame.
- }
-
+ CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
GenerateTailCallToSharedCode(masm);
}
@@ -795,59 +782,17 @@
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
- // Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Preserve the function.
- __ push(r1);
- // Push call kind information.
- __ push(r5);
-
- // Push the function on the stack as the argument to the runtime
function.
- __ push(r1);
- __ CallRuntime(Runtime::kLazyCompile, 1);
- // Calculate the entry point.
- __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
-
- // Restore call kind information.
- __ pop(r5);
- // Restore saved function.
- __ pop(r1);
-
- // Tear down internal frame.
- }
-
+ CallRuntimePassFunction(masm, Runtime::kLazyCompile);
// Do a tail-call of the compiled function.
+ __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(r2);
}
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
- // Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Preserve the function.
- __ push(r1);
- // Push call kind information.
- __ push(r5);
-
- // Push the function on the stack as the argument to the runtime
function.
- __ push(r1);
- __ CallRuntime(Runtime::kLazyRecompile, 1);
- // Calculate the entry point.
- __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
-
- // Restore call kind information.
- __ pop(r5);
- // Restore saved function.
- __ pop(r1);
-
- // Tear down internal frame.
- }
-
+ CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
// Do a tail-call of the compiled function.
+ __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(r2);
}
=======================================
--- /branches/bleeding_edge/src/builtins.h Tue Sep 3 13:33:54 2013 UTC
+++ /branches/bleeding_edge/src/builtins.h Thu Sep 12 11:30:56 2013 UTC
@@ -87,8 +87,6 @@
Code::kNoExtraICState) \
V(InRecompileQueue, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
- V(InstallRecompiledCode, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, \
@@ -382,7 +380,6 @@
CFunctionId id,
BuiltinExtraArguments extra_args);
static void Generate_InRecompileQueue(MacroAssembler* masm);
- static void Generate_InstallRecompiledCode(MacroAssembler* masm);
static void Generate_ConcurrentRecompile(MacroAssembler* masm);
static void Generate_JSConstructStubCountdown(MacroAssembler* masm);
static void Generate_JSConstructStubGeneric(MacroAssembler* masm);
=======================================
--- /branches/bleeding_edge/src/compiler.cc Wed Sep 11 12:39:00 2013 UTC
+++ /branches/bleeding_edge/src/compiler.cc Thu Sep 12 11:30:56 2013 UTC
@@ -1067,7 +1067,7 @@
info->closure()->PrintName();
PrintF(" as it has been disabled.\n");
}
- ASSERT(!info->closure()->IsMarkedForInstallingRecompiledCode());
+ ASSERT(!info->closure()->IsInRecompileQueue());
return Handle<Code>::null();
}
@@ -1114,7 +1114,7 @@
// Optimized code is finally replacing unoptimized code. Reset the
latter's
// profiler ticks to prevent too soon re-opt after a deopt.
info->shared_info()->code()->set_profiler_ticks(0);
- ASSERT(!info->closure()->IsMarkedForInstallingRecompiledCode());
+ ASSERT(!info->closure()->IsInRecompileQueue());
return (status == OptimizingCompiler::SUCCEEDED) ? info->code()
: Handle<Code>::null();
}
=======================================
--- /branches/bleeding_edge/src/debug.cc Tue Sep 10 14:26:07 2013 UTC
+++ /branches/bleeding_edge/src/debug.cc Thu Sep 12 11:30:56 2013 UTC
@@ -2110,8 +2110,7 @@
function->set_code(*lazy_compile);
function->shared()->set_code(*lazy_compile);
} else if (kind == Code::BUILTIN &&
- (function->IsMarkedForInstallingRecompiledCode() ||
- function->IsInRecompileQueue() ||
+ (function->IsInRecompileQueue() ||
function->IsMarkedForLazyRecompilation() ||
function->IsMarkedForConcurrentRecompilation())) {
// Abort in-flight compilation.
=======================================
--- /branches/bleeding_edge/src/execution.cc Mon Sep 9 07:52:52 2013 UTC
+++ /branches/bleeding_edge/src/execution.cc Thu Sep 12 11:30:56 2013 UTC
@@ -457,6 +457,22 @@
isolate_->heap()->SetStackLimits();
}
}
+
+
+bool StackGuard::IsInstallCodeRequest() {
+ ExecutionAccess access(isolate_);
+ return (thread_local_.interrupt_flags_ & INSTALL_CODE) != 0;
+}
+
+
+void StackGuard::RequestInstallCode() {
+ ExecutionAccess access(isolate_);
+ thread_local_.interrupt_flags_ |= INSTALL_CODE;
+ if (thread_local_.postpone_interrupts_nesting_ == 0) {
+ thread_local_.jslimit_ = thread_local_.climit_ = kInterruptLimit;
+ isolate_->heap()->SetStackLimits();
+ }
+}
bool StackGuard::IsFullDeopt() {
@@ -916,7 +932,6 @@
isolate->counters()->stack_interrupts()->Increment();
isolate->counters()->runtime_profiler_ticks()->Increment();
- isolate->runtime_profiler()->OptimizeNow();
#ifdef ENABLE_DEBUGGER_SUPPORT
if (stack_guard->IsDebugBreak() || stack_guard->IsDebugCommand()) {
DebugBreakHelper(isolate);
@@ -935,6 +950,12 @@
stack_guard->Continue(FULL_DEOPT);
Deoptimizer::DeoptimizeAll(isolate);
}
+ if (stack_guard->IsInstallCodeRequest()) {
+ ASSERT(FLAG_concurrent_recompilation);
+ stack_guard->Continue(INSTALL_CODE);
+ isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
+ }
+ isolate->runtime_profiler()->OptimizeNow();
return isolate->heap()->undefined_value();
}
=======================================
--- /branches/bleeding_edge/src/execution.h Thu Sep 5 08:48:34 2013 UTC
+++ /branches/bleeding_edge/src/execution.h Thu Sep 12 11:30:56 2013 UTC
@@ -42,7 +42,8 @@
PREEMPT = 1 << 3,
TERMINATE = 1 << 4,
GC_REQUEST = 1 << 5,
- FULL_DEOPT = 1 << 6
+ FULL_DEOPT = 1 << 6,
+ INSTALL_CODE = 1 << 7
};
@@ -213,6 +214,8 @@
#endif
bool IsGCRequest();
void RequestGC();
+ bool IsInstallCodeRequest();
+ void RequestInstallCode();
bool IsFullDeopt();
void FullDeopt();
void Continue(InterruptFlag after_what);
=======================================
--- /branches/bleeding_edge/src/ia32/builtins-ia32.cc Wed Sep 11 12:39:00
2013 UTC
+++ /branches/bleeding_edge/src/ia32/builtins-ia32.cc Thu Sep 12 11:30:56
2013 UTC
@@ -72,6 +72,24 @@
__ add(eax, Immediate(num_extra_args + 1));
__ JumpToExternalReference(ExternalReference(id, masm->isolate()));
}
+
+
+static void CallRuntimePassFunction(MacroAssembler* masm,
+ Runtime::FunctionId function_id) {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ // Push a copy of the function.
+ __ push(edi);
+ // Push call kind information.
+ __ push(ecx);
+ // Function is also the parameter to the runtime call.
+ __ push(edi);
+
+ __ CallRuntime(function_id, 1);
+ // Restore call kind information.
+ __ pop(ecx);
+ // Restore receiver.
+ __ pop(edi);
+}
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
@@ -83,56 +101,29 @@
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
- GenerateTailCallToSharedCode(masm);
-}
+ // Checking whether the queued function is ready for install is optional,
+ // since we come across interrupts and stack checks elsewhere. However,
+ // not checking may delay installing ready functions, and always checking
+ // would be quite expensive. A good compromise is to first check against
+ // stack limit as a cue for an interrupt signal.
+ Label ok;
+ ExternalReference stack_limit =
+ ExternalReference::address_of_stack_limit(masm->isolate());
+ __ cmp(esp, Operand::StaticVariable(stack_limit));
+ __ j(above_equal, &ok, Label::kNear);
-
-void Builtins::Generate_InstallRecompiledCode(MacroAssembler* masm) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Push a copy of the function.
- __ push(edi);
- // Push call kind information.
- __ push(ecx);
-
- __ push(edi); // Function is also the parameter to the runtime call.
- __ CallRuntime(Runtime::kInstallRecompiledCode, 1);
-
- // Restore call kind information.
- __ pop(ecx);
- // Restore receiver.
- __ pop(edi);
-
- // Tear down internal frame.
- }
-
- // Do a tail-call of the compiled function.
+ CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
+ // Tail call to returned code.
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
__ jmp(eax);
+
+ __ bind(&ok);
+ GenerateTailCallToSharedCode(masm);
}
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Push a copy of the function onto the stack.
- __ push(edi);
- // Push call kind information.
- __ push(ecx);
-
- __ push(edi); // Function is also the parameter to the runtime call.
- __ CallRuntime(Runtime::kConcurrentRecompile, 1);
-
- // Restore call kind information.
- __ pop(ecx);
- // Restore receiver.
- __ pop(edi);
-
- // Tear down internal frame.
- }
-
+ CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
GenerateTailCallToSharedCode(masm);
}
@@ -519,25 +510,7 @@
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Push a copy of the function.
- __ push(edi);
- // Push call kind information.
- __ push(ecx);
-
- __ push(edi); // Function is also the parameter to the runtime call.
- __ CallRuntime(Runtime::kLazyCompile, 1);
-
- // Restore call kind information.
- __ pop(ecx);
- // Restore receiver.
- __ pop(edi);
-
- // Tear down internal frame.
- }
-
+ CallRuntimePassFunction(masm, Runtime::kLazyCompile);
// Do a tail-call of the compiled function.
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
__ jmp(eax);
@@ -545,25 +518,7 @@
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Push a copy of the function onto the stack.
- __ push(edi);
- // Push call kind information.
- __ push(ecx);
-
- __ push(edi); // Function is also the parameter to the runtime call.
- __ CallRuntime(Runtime::kLazyRecompile, 1);
-
- // Restore call kind information.
- __ pop(ecx);
- // Restore receiver.
- __ pop(edi);
-
- // Tear down internal frame.
- }
-
+ CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
// Do a tail-call of the compiled function.
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
__ jmp(eax);
=======================================
--- /branches/bleeding_edge/src/mips/builtins-mips.cc Wed Sep 11 12:39:00
2013 UTC
+++ /branches/bleeding_edge/src/mips/builtins-mips.cc Thu Sep 12 11:30:56
2013 UTC
@@ -297,6 +297,24 @@
}
__ Ret();
}
+
+
+static void CallRuntimePassFunction(MacroAssembler* masm,
+ Runtime::FunctionId function_id) {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ // Push a copy of the function onto the stack.
+ __ push(a1);
+ // Push call kind information.
+ __ push(t1);
+ // Function is also the parameter to the runtime call.
+ __ push(a1);
+
+ __ CallRuntime(function_id, 1);
+ // Restore call kind information.
+ __ pop(t1);
+ // Restore receiver.
+ __ pop(a1);
+}
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
@@ -308,59 +326,27 @@
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
- GenerateTailCallToSharedCode(masm);
-}
-
-
-void Builtins::Generate_InstallRecompiledCode(MacroAssembler* masm) {
- // Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Preserve the function.
- __ push(a1);
- // Push call kind information.
- __ push(t1);
-
- // Push the function on the stack as the argument to the runtime
function.
- __ push(a1);
- __ CallRuntime(Runtime::kInstallRecompiledCode, 1);
- // Calculate the entry point.
- __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
-
- // Restore call kind information.
- __ pop(t1);
- // Restore saved function.
- __ pop(a1);
+ // Checking whether the queued function is ready for install is optional,
+ // since we come across interrupts and stack checks elsewhere. However,
+ // not checking may delay installing ready functions, and always checking
+ // would be quite expensive. A good compromise is to first check against
+ // stack limit as a cue for an interrupt signal.
+ Label ok;
+ __ LoadRoot(t0, Heap::kStackLimitRootIndex);
+ __ Branch(&ok, hs, sp, Operand(t0));
- // Tear down temporary frame.
- }
+ CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
+ // Tail call to returned code.
+ __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ Jump(at);
- // Do a tail-call of the compiled function.
- __ Jump(t9);
+ __ bind(&ok);
+ GenerateTailCallToSharedCode(masm);
}
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Push a copy of the function onto the stack.
- __ push(a1);
- // Push call kind information.
- __ push(t1);
-
- __ push(a1); // Function is also the parameter to the runtime call.
- __ CallRuntime(Runtime::kConcurrentRecompile, 1);
-
- // Restore call kind information.
- __ pop(t1);
- // Restore receiver.
- __ pop(a1);
-
- // Tear down internal frame.
- }
-
+ CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
GenerateTailCallToSharedCode(masm);
}
@@ -815,60 +801,17 @@
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
- // Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Preserve the function.
- __ push(a1);
- // Push call kind information.
- __ push(t1);
-
- // Push the function on the stack as the argument to the runtime
function.
- __ push(a1);
- // Call the runtime function.
- __ CallRuntime(Runtime::kLazyCompile, 1);
- // Calculate the entry point.
- __ addiu(t9, v0, Code::kHeaderSize - kHeapObjectTag);
-
- // Restore call kind information.
- __ pop(t1);
- // Restore saved function.
- __ pop(a1);
-
- // Tear down temporary frame.
- }
-
+ CallRuntimePassFunction(masm, Runtime::kLazyCompile);
// Do a tail-call of the compiled function.
+ __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(t9);
}
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
- // Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Preserve the function.
- __ push(a1);
- // Push call kind information.
- __ push(t1);
-
- // Push the function on the stack as the argument to the runtime
function.
- __ push(a1);
- __ CallRuntime(Runtime::kLazyRecompile, 1);
- // Calculate the entry point.
- __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
-
- // Restore call kind information.
- __ pop(t1);
- // Restore saved function.
- __ pop(a1);
-
- // Tear down temporary frame.
- }
-
+ CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
// Do a tail-call of the compiled function.
+ __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(t9);
}
=======================================
--- /branches/bleeding_edge/src/objects-inl.h Wed Sep 11 07:14:41 2013 UTC
+++ /branches/bleeding_edge/src/objects-inl.h Thu Sep 12 11:30:56 2013 UTC
@@ -4968,12 +4968,6 @@
bool JSFunction::IsMarkedForLazyRecompilation() {
return code() ==
GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
}
-
-
-bool JSFunction::IsMarkedForInstallingRecompiledCode() {
- return code() == GetIsolate()->builtins()->builtin(
- Builtins::kInstallRecompiledCode);
-}
bool JSFunction::IsMarkedForConcurrentRecompilation() {
=======================================
--- /branches/bleeding_edge/src/objects.cc Thu Sep 12 11:03:27 2013 UTC
+++ /branches/bleeding_edge/src/objects.cc Thu Sep 12 11:30:56 2013 UTC
@@ -9319,18 +9319,6 @@
GetIsolate()->builtins()->builtin(Builtins::kConcurrentRecompile));
// No write barrier required, since the builtin is part of the root set.
}
-
-
-void JSFunction::MarkForInstallingRecompiledCode() {
- // The debugger could have switched the builtin to lazy compile.
- // In that case, simply carry on. It will be dealt with later.
- ASSERT(!IsOptimized());
- ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
- ASSERT(FLAG_concurrent_recompilation);
- set_code_no_write_barrier(
- GetIsolate()->builtins()->builtin(Builtins::kInstallRecompiledCode));
- // No write barrier required, since the builtin is part of the root set.
-}
void JSFunction::MarkInRecompileQueue() {
=======================================
--- /branches/bleeding_edge/src/objects.h Thu Sep 12 11:03:27 2013 UTC
+++ /branches/bleeding_edge/src/objects.h Thu Sep 12 11:30:56 2013 UTC
@@ -6989,7 +6989,6 @@
// recompiled the next time it is executed.
void MarkForLazyRecompilation();
void MarkForConcurrentRecompilation();
- void MarkForInstallingRecompiledCode();
void MarkInRecompileQueue();
// Helpers to compile this function. Returns true on success, false on
@@ -7008,7 +7007,6 @@
// recompilation.
inline bool IsMarkedForLazyRecompilation();
inline bool IsMarkedForConcurrentRecompilation();
- inline bool IsMarkedForInstallingRecompiledCode();
// Tells whether or not the function is on the concurrent recompilation
queue.
inline bool IsInRecompileQueue();
=======================================
--- /branches/bleeding_edge/src/optimizing-compiler-thread.cc Wed Sep 11
12:39:00 2013 UTC
+++ /branches/bleeding_edge/src/optimizing-compiler-thread.cc Thu Sep 12
11:30:56 2013 UTC
@@ -114,11 +114,8 @@
osr_candidates_.RemoveElement(optimizing_compiler);
ready_for_osr_.Add(optimizing_compiler);
} else {
- LockGuard<Mutex> mark_and_queue(&install_mutex_);
- Heap::RelocationLock relocation_lock(isolate_->heap());
- AllowHandleDereference ahd;
-
optimizing_compiler->info()->closure()->MarkForInstallingRecompiledCode();
output_queue_.Enqueue(optimizing_compiler);
+ isolate_->stack_guard()->RequestInstallCode();
}
}
@@ -201,10 +198,7 @@
HandleScope handle_scope(isolate_);
OptimizingCompiler* compiler;
while (true) {
- { // Memory barrier to ensure marked functions are queued.
- LockGuard<Mutex> marked_and_queued(&install_mutex_);
- if (!output_queue_.Dequeue(&compiler)) return;
- }
+ if (!output_queue_.Dequeue(&compiler)) return;
Compiler::InstallOptimizedCode(compiler);
}
=======================================
--- /branches/bleeding_edge/src/optimizing-compiler-thread.h Wed Sep 11
12:39:00 2013 UTC
+++ /branches/bleeding_edge/src/optimizing-compiler-thread.h Thu Sep 12
11:30:56 2013 UTC
@@ -120,7 +120,6 @@
// List of recompilation tasks ready for OSR.
List<OptimizingCompiler*> ready_for_osr_;
- Mutex install_mutex_;
volatile AtomicWord stop_thread_;
volatile Atomic32 queue_length_;
TimeDelta time_spent_compiling_;
=======================================
--- /branches/bleeding_edge/src/runtime-profiler.cc Wed Sep 11 12:39:00
2013 UTC
+++ /branches/bleeding_edge/src/runtime-profiler.cc Thu Sep 12 11:30:56
2013 UTC
@@ -149,7 +149,6 @@
// recompilation race. This goes away as soon as OSR becomes
one-shot.
return;
}
- ASSERT(!function->IsMarkedForInstallingRecompiledCode());
ASSERT(!function->IsInRecompileQueue());
function->MarkForConcurrentRecompilation();
} else {
@@ -227,12 +226,6 @@
if (isolate_->DebuggerHasBreakPoints()) return;
- if (FLAG_concurrent_recompilation) {
- // Take this as opportunity to process the optimizing compiler thread's
- // output queue so that it does not unnecessarily keep objects alive.
- isolate_->optimizing_compiler_thread()->InstallOptimizedFunctions();
- }
-
DisallowHeapAllocation no_gc;
// Run through the JavaScript frames and collect them. If we already
=======================================
--- /branches/bleeding_edge/src/runtime.cc Wed Sep 11 20:03:54 2013 UTC
+++ /branches/bleeding_edge/src/runtime.cc Thu Sep 12 11:30:56 2013 UTC
@@ -8344,16 +8344,6 @@
}
return isolate->heap()->undefined_value();
}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_InstallRecompiledCode) {
- HandleScope handle_scope(isolate);
- ASSERT(args.length() == 1);
- CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
- ASSERT(isolate->use_crankshaft() && FLAG_concurrent_recompilation);
- isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
- return function->code();
-}
class ActivationsFinder : public ThreadVisitor {
@@ -8553,8 +8543,7 @@
}
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
if (FLAG_concurrent_recompilation && sync_with_compiler_thread) {
- while (function->IsInRecompileQueue() ||
- function->IsMarkedForInstallingRecompiledCode()) {
+ while (function->IsInRecompileQueue()) {
isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
OS::Sleep(50);
}
@@ -9380,13 +9369,29 @@
SealHandleScope shs(isolate);
ASSERT(args.length() == 0);
+ // First check if this is a real stack overflow.
+ if (isolate->stack_guard()->IsStackOverflow()) {
+ return isolate->StackOverflow();
+ }
+
+ return Execution::HandleStackGuardInterrupt(isolate);
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_TryInstallRecompiledCode) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
+
// First check if this is a real stack overflow.
if (isolate->stack_guard()->IsStackOverflow()) {
SealHandleScope shs(isolate);
return isolate->StackOverflow();
}
- return Execution::HandleStackGuardInterrupt(isolate);
+ isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
+ return (function->IsOptimized()) ? function->code()
+ : function->shared()->code();
}
=======================================
--- /branches/bleeding_edge/src/runtime.h Wed Sep 11 20:03:54 2013 UTC
+++ /branches/bleeding_edge/src/runtime.h Thu Sep 12 11:30:56 2013 UTC
@@ -88,7 +88,7 @@
F(LazyCompile, 1, 1) \
F(LazyRecompile, 1, 1) \
F(ConcurrentRecompile, 1, 1) \
- F(InstallRecompiledCode, 1, 1) \
+ F(TryInstallRecompiledCode, 1, 1) \
F(NotifyDeoptimized, 1, 1) \
F(NotifyStubFailure, 0, 1) \
F(NotifyOSR, 0, 1) \
=======================================
--- /branches/bleeding_edge/src/x64/builtins-x64.cc Wed Sep 11 12:39:00
2013 UTC
+++ /branches/bleeding_edge/src/x64/builtins-x64.cc Thu Sep 12 11:30:56
2013 UTC
@@ -71,6 +71,24 @@
__ addq(rax, Immediate(num_extra_args + 1));
__ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
}
+
+
+static void CallRuntimePassFunction(MacroAssembler* masm,
+ Runtime::FunctionId function_id) {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ // Push a copy of the function onto the stack.
+ __ push(rdi);
+ // Push call kind information.
+ __ push(rcx);
+ // Function is also the parameter to the runtime call.
+ __ push(rdi);
+
+ __ CallRuntime(function_id, 1);
+ // Restore call kind information.
+ __ pop(rcx);
+ // Restore receiver.
+ __ pop(rdi);
+}
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
@@ -84,57 +102,27 @@
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
- GenerateTailCallToSharedCode(masm);
-}
+ // Checking whether the queued function is ready for install is optional,
+ // since we come across interrupts and stack checks elsewhere. However,
+ // not checking may delay installing ready functions, and always checking
+ // would be quite expensive. A good compromise is to first check against
+ // stack limit as a cue for an interrupt signal.
+ Label ok;
+ __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
+ __ j(above_equal, &ok);
-
-void Builtins::Generate_InstallRecompiledCode(MacroAssembler* masm) {
- // Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Push a copy of the function onto the stack.
- __ push(rdi);
- // Push call kind information.
- __ push(rcx);
-
- __ push(rdi); // Function is also the parameter to the runtime call.
- __ CallRuntime(Runtime::kInstallRecompiledCode, 1);
-
- // Restore call kind information.
- __ pop(rcx);
- // Restore function.
- __ pop(rdi);
-
- // Tear down internal frame.
- }
-
- // Do a tail-call of the compiled function.
+ CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
+ // Tail call to returned code.
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
__ jmp(rax);
+
+ __ bind(&ok);
+ GenerateTailCallToSharedCode(masm);
}
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Push a copy of the function onto the stack.
- __ push(rdi);
- // Push call kind information.
- __ push(rcx);
-
- __ push(rdi); // Function is also the parameter to the runtime call.
- __ CallRuntime(Runtime::kConcurrentRecompile, 1);
-
- // Restore call kind information.
- __ pop(rcx);
- // Restore receiver.
- __ pop(rdi);
-
- // Tear down internal frame.
- }
-
+ CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
GenerateTailCallToSharedCode(masm);
}
@@ -586,26 +574,7 @@
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
- // Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Push a copy of the function onto the stack.
- __ push(rdi);
- // Push call kind information.
- __ push(rcx);
-
- __ push(rdi); // Function is also the parameter to the runtime call.
- __ CallRuntime(Runtime::kLazyCompile, 1);
-
- // Restore call kind information.
- __ pop(rcx);
- // Restore receiver.
- __ pop(rdi);
-
- // Tear down internal frame.
- }
-
+ CallRuntimePassFunction(masm, Runtime::kLazyCompile);
// Do a tail-call of the compiled function.
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
__ jmp(rax);
@@ -613,26 +582,7 @@
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
- // Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Push a copy of the function onto the stack.
- __ push(rdi);
- // Push call kind information.
- __ push(rcx);
-
- __ push(rdi); // Function is also the parameter to the runtime call.
- __ CallRuntime(Runtime::kLazyRecompile, 1);
-
- // Restore call kind information.
- __ pop(rcx);
- // Restore function.
- __ pop(rdi);
-
- // Tear down internal frame.
- }
-
+ CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
// Do a tail-call of the compiled function.
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
__ jmp(rax);
=======================================
--- /branches/bleeding_edge/test/mjsunit/fuzz-natives-part1.js Thu Aug 22
16:14:37 2013 UTC
+++ /branches/bleeding_edge/test/mjsunit/fuzz-natives-part1.js Thu Sep 12
11:30:56 2013 UTC
@@ -151,7 +151,6 @@
"LazyCompile": true,
"LazyRecompile": true,
"ConcurrentRecompile": true,
- "InstallRecompiledCode": true,
"NotifyDeoptimized": true,
"NotifyStubFailure": true,
"NotifyOSR": true,
=======================================
--- /branches/bleeding_edge/test/mjsunit/fuzz-natives-part2.js Mon Aug 26
11:59:14 2013 UTC
+++ /branches/bleeding_edge/test/mjsunit/fuzz-natives-part2.js Thu Sep 12
11:30:56 2013 UTC
@@ -151,7 +151,6 @@
"LazyCompile": true,
"LazyRecompile": true,
"ConcurrentRecompile": true,
- "InstallRecompiledCode": true,
"NotifyDeoptimized": true,
"NotifyStubFailure": true,
"NotifyOSR": true,
=======================================
--- /branches/bleeding_edge/test/mjsunit/fuzz-natives-part3.js Thu Aug 22
16:14:37 2013 UTC
+++ /branches/bleeding_edge/test/mjsunit/fuzz-natives-part3.js Thu Sep 12
11:30:56 2013 UTC
@@ -151,7 +151,6 @@
"LazyCompile": true,
"LazyRecompile": true,
"ConcurrentRecompile": true,
- "InstallRecompiledCode": true,
"NotifyDeoptimized": true,
"NotifyStubFailure": true,
"NotifyOSR": true,
=======================================
--- /branches/bleeding_edge/test/mjsunit/fuzz-natives-part4.js Thu Aug 22
16:14:37 2013 UTC
+++ /branches/bleeding_edge/test/mjsunit/fuzz-natives-part4.js Thu Sep 12
11:30:56 2013 UTC
@@ -151,7 +151,6 @@
"LazyCompile": true,
"LazyRecompile": true,
"ConcurrentRecompile": true,
- "InstallRecompiledCode": true,
"NotifyDeoptimized": true,
"NotifyStubFailure": true,
"NotifyOSR": true,
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.