Revision: 6187
Author: [email protected]
Date: Wed Jan 5 09:09:24 2011
Log: Add deoptimization support to full-codegen-x64.cc (insert
PrepareForBailout in many places.)
Review URL: http://codereview.chromium.org/6031014
http://code.google.com/p/v8/source/detail?r=6187
Modified:
/branches/bleeding_edge/src/x64/full-codegen-x64.cc
=======================================
--- /branches/bleeding_edge/src/x64/full-codegen-x64.cc Mon Jan 3 03:39:22
2011
+++ /branches/bleeding_edge/src/x64/full-codegen-x64.cc Wed Jan 5 09:09:24
2011
@@ -459,7 +459,7 @@
void FullCodeGenerator::TestContext::Plug(bool flag) const {
- codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
+ codegen()->PrepareForBailoutBeforeSplit(TOS_REG, true, NULL, NULL);
if (flag) {
if (true_label_ != fall_through_) __ jmp(true_label_);
} else {
@@ -555,6 +555,25 @@
bool should_normalize,
Label* if_true,
Label* if_false) {
+ // Only prepare for bailouts before splits if we're in a test
+ // context. Otherwise, we let the Visit function deal with the
+ // preparation to avoid preparing with the same AST id twice.
+ if (!context()->IsTest() || !info_->IsOptimizable()) return;
+
+ NearLabel skip;
+ if (should_normalize) __ jmp(&skip);
+
+ ForwardBailoutStack* current = forward_bailout_stack_;
+ while (current != NULL) {
+ PrepareForBailout(current->expr(), state);
+ current = current->parent();
+ }
+
+ if (should_normalize) {
+ __ CompareRoot(rax, Heap::kTrueValueRootIndex);
+ Split(equal, if_true, if_false, NULL);
+ __ bind(&skip);
+ }
}
@@ -669,8 +688,10 @@
Comment cmnt(masm_, "[ SwitchStatement");
Breakable nested_statement(this, stmt);
SetStatementPosition(stmt);
+
// Keep the switch value on the stack until a case matches.
VisitForStackValue(stmt->tag());
+ PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
ZoneList<CaseClause*>* clauses = stmt->cases();
CaseClause* default_clause = NULL; // Can occur anywhere in the list.
@@ -735,6 +756,7 @@
}
__ bind(nested_statement.break_target());
+ PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
}
@@ -1224,6 +1246,7 @@
if (property->emit_store()) {
Handle<Code>
ic(Builtins::builtin(Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ PrepareForBailoutForId(key->id(), NO_REGISTERS);
}
break;
}
@@ -1311,6 +1334,8 @@
// Update the write barrier for the array store.
__ RecordWrite(rbx, offset, result_register(), rcx);
+
+ PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
}
if (result_saved) {
@@ -1382,6 +1407,12 @@
break;
}
}
+
+ // For property compound assignments we need another deoptimization
+ // point after the property load.
+ if (property != NULL) {
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
+ }
Token::Value op = expr->binary_op();
ConstantOperand constant = ShouldInlineSmiCase(op)
@@ -1408,6 +1439,8 @@
} else {
EmitBinaryOp(op, mode);
}
+ // Deoptimization point in case the binary operation may have side
effects.
+ PrepareForBailout(expr->binary_operation(), TOS_REG);
} else {
VisitForAccumulatorValue(expr->value());
}
@@ -1420,6 +1453,7 @@
case VARIABLE:
EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
expr->op());
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(rax);
break;
case NAMED_PROPERTY:
@@ -1529,7 +1563,7 @@
}
-void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_id) {
+void FullCodeGenerator::EmitAssignment(Expression* expr, int
bailout_ast_id) {
// Invalid left-hand sides are rewritten to have a 'throw
// ReferenceError' on the left-hand side.
if (!expr->IsValidLeftHandSide()) {
@@ -1577,6 +1611,7 @@
break;
}
}
+ PrepareForBailoutForId(bailout_ast_id, TOS_REG);
context()->Plug(rax);
}
@@ -1688,6 +1723,7 @@
__ pop(rax);
__ Drop(1);
}
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(rax);
}
@@ -1726,6 +1762,7 @@
__ pop(rax);
}
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(rax);
}
@@ -1766,6 +1803,7 @@
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
EmitCallIC(ic, mode);
+ RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
context()->Plug(rax);
@@ -1799,6 +1837,7 @@
Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count,
in_loop);
__ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
EmitCallIC(ic, mode);
+ RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, rax); // Drop the key still on the stack.
@@ -1819,6 +1858,7 @@
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
__ CallStub(&stub);
+ RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
// Discard the function left on TOS.
@@ -1827,6 +1867,12 @@
void FullCodeGenerator::VisitCall(Call* expr) {
+#ifdef DEBUG
+ // We want to verify that RecordJSReturnSite gets called on all paths
+ // through this function. Avoid early returns.
+ expr->return_is_recorded_ = false;
+#endif
+
Comment cmnt(masm_, "[ Call");
Expression* fun = expr->expression();
Variable* var = fun->AsVariableProxy()->AsVariable();
@@ -1834,7 +1880,7 @@
if (var != NULL && var->is_possibly_eval()) {
// In a call to eval, we first call %ResolvePossiblyDirectEval to
// resolve the function we need to call and the receiver of the
- // call. The we call the resolved function using the given
+ // call. Then we call the resolved function using the given
// arguments.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
@@ -1871,6 +1917,7 @@
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
__ CallStub(&stub);
+ RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, rax);
@@ -1893,32 +1940,31 @@
&done);
__ bind(&slow);
- // Call the runtime to find the function to call (returned in rax)
- // and the object holding it (returned in rdx).
- __ push(context_register());
- __ Push(var->name());
- __ CallRuntime(Runtime::kLoadContextSlot, 2);
- __ push(rax); // Function.
- __ push(rdx); // Receiver.
-
- // If fast case code has been generated, emit code to push the
- // function and receiver and have the slow path jump around this
- // code.
- if (done.is_linked()) {
- NearLabel call;
- __ jmp(&call);
- __ bind(&done);
- // Push function.
- __ push(rax);
- // Push global receiver.
+ }
+ // Call the runtime to find the function to call (returned in rax)
+ // and the object holding it (returned in rdx).
+ __ push(context_register());
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kLoadContextSlot, 2);
+ __ push(rax); // Function.
+ __ push(rdx); // Receiver.
+
+ // If fast case code has been generated, emit code to push the
+ // function and receiver and have the slow path jump around this
+ // code.
+ if (done.is_linked()) {
+ NearLabel call;
+ __ jmp(&call);
+ __ bind(&done);
+ // Push function.
+ __ push(rax);
+ // Push global receiver.
__ movq(rbx, GlobalObjectOperand());
__ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
__ bind(&call);
- }
}
EmitCallWithStub(expr);
-
} else if (fun->AsProperty() != NULL) {
// Call to an object property.
Property* prop = fun->AsProperty();
@@ -1932,24 +1978,23 @@
} else {
// Call to a keyed property.
// For a synthetic property use keyed load IC followed by function
call,
- // for a regular property use KeyedCallIC.
+ // for a regular property use keyed EmitCallIC.
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(prop->obj());
}
if (prop->is_synthetic()) {
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForAccumulatorValue(prop->key());
- __ movq(rdx, Operand(rsp, 0));
}
// Record source code position for IC call.
SetSourcePosition(prop->position());
+ __ pop(rdx); // We do not need to keep the receiver.
+
Handle<Code>
ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
- // Pop receiver.
- __ pop(rbx);
// Push result (function).
__ push(rax);
- // Push receiver object on stack.
+ // Push Global receiver.
__ movq(rcx, GlobalObjectOperand());
__ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
EmitCallWithStub(expr);
@@ -1960,7 +2005,7 @@
} else {
// Call to some other expression. If the expression is an anonymous
// function literal not called in a loop, mark it as one that should
- // also use the fast code generator.
+ // also use the full code generator.
FunctionLiteral* lit = fun->AsFunctionLiteral();
if (lit != NULL &&
lit->name()->Equals(Heap::empty_string()) &&
@@ -1976,6 +2021,11 @@
// Emit function call.
EmitCallWithStub(expr);
}
+
+#ifdef DEBUG
+ // RecordJSReturnSite should have been called.
+ ASSERT(expr->return_is_recorded_);
+#endif
}
@@ -2023,6 +2073,7 @@
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ JumpIfSmi(rax, if_true);
__ jmp(if_false);
@@ -2042,6 +2093,7 @@
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
Split(non_negative_smi, if_true, if_false, fall_through);
@@ -2073,6 +2125,7 @@
__ cmpq(rbx, Immediate(FIRST_JS_OBJECT_TYPE));
__ j(below, if_false);
__ cmpq(rbx, Immediate(LAST_JS_OBJECT_TYPE));
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(below_equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2093,6 +2146,7 @@
__ JumpIfSmi(rax, if_false);
__ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(above_equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2115,6 +2169,7 @@
__ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
__ testb(FieldOperand(rbx, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(not_zero, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2137,6 +2192,7 @@
// Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is
only
// used in a few functions in runtime.js which should not normally be
hit by
// this compiler.
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ jmp(if_false);
context()->Plug(if_true, if_false);
}
@@ -2156,6 +2212,7 @@
__ JumpIfSmi(rax, if_false);
__ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2176,6 +2233,7 @@
__ JumpIfSmi(rax, if_false);
__ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2196,6 +2254,7 @@
__ JumpIfSmi(rax, if_false);
__ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2227,6 +2286,7 @@
__ bind(&check_frame_marker);
__ SmiCompare(Operand(rax, StandardFrameConstants::kMarkerOffset),
Smi::FromInt(StackFrame::CONSTRUCT));
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2249,6 +2309,7 @@
__ pop(rbx);
__ cmpq(rax, rbx);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
context()->Plug(if_true, if_false);
@@ -2822,6 +2883,7 @@
__ testl(FieldOperand(rax, String::kHashFieldOffset),
Immediate(String::kContainsCachedArrayIndexMask));
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ j(zero, if_true);
__ jmp(if_false);
@@ -2943,6 +3005,7 @@
// Notice that the labels are swapped.
context()->PrepareTest(&materialize_true, &materialize_false,
&if_false, &if_true, &fall_through);
+ if (context()->IsTest()) ForwardBailoutToChild(expr);
VisitForControl(expr->expression(), if_true, if_false, fall_through);
context()->Plug(if_false, if_true); // Labels swapped.
break;
@@ -3063,6 +3126,10 @@
EmitKeyedPropertyLoad(prop);
}
}
+
+ // We need a second deoptimization point after loading the value
+ // in case evaluating the property load my have a side effect.
+ PrepareForBailout(expr->increment(), TOS_REG);
// Call ToNumber only if operand is not a smi.
NearLabel no_conversion;
@@ -3133,6 +3200,7 @@
{ EffectContext context(this);
EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
Token::ASSIGN);
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context.Plug(rax);
}
// For all contexts except kEffect: We have the result on
@@ -3144,6 +3212,7 @@
// Perform the assignment as if via '='.
EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
Token::ASSIGN);
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(rax);
}
break;
@@ -3152,6 +3221,7 @@
__ pop(rdx);
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
context()->PlugTOS();
@@ -3166,6 +3236,7 @@
__ pop(rdx);
Handle<Code>
ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
context()->PlugTOS();
@@ -3192,6 +3263,7 @@
// Use a regular load, not a contextual load, to avoid a reference
// error.
EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ PrepareForBailout(expr, TOS_REG);
context()->Plug(rax);
} else if (proxy != NULL &&
proxy->var()->AsSlot() != NULL &&
@@ -3207,12 +3279,13 @@
__ push(rsi);
__ Push(proxy->name());
__ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
+ PrepareForBailout(expr, TOS_REG);
__ bind(&done);
context()->Plug(rax);
} else {
// This expression cannot throw a reference error at the top level.
- Visit(expr);
+ context()->HandleExpression(expr);
}
}
@@ -3237,6 +3310,7 @@
{ AccumulatorValueContext context(this);
VisitForTypeofValue(left_unary->expression());
}
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
if (check->Equals(Heap::number_symbol())) {
Condition is_smi = masm_->CheckSmi(rax);
@@ -3330,6 +3404,7 @@
case Token::IN:
VisitForStackValue(expr->right());
__ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
+ PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
__ CompareRoot(rax, Heap::kTrueValueRootIndex);
Split(equal, if_true, if_false, fall_through);
break;
@@ -3338,6 +3413,7 @@
VisitForStackValue(expr->right());
InstanceofStub stub(InstanceofStub::kNoFlags);
__ CallStub(&stub);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ testq(rax, rax);
// The stub returns 0 for true.
Split(zero, if_true, if_false, fall_through);
@@ -3396,6 +3472,8 @@
: NO_COMPARE_FLAGS;
CompareStub stub(cc, strict, flags);
__ CallStub(&stub);
+
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ testq(rax, rax);
Split(cc, if_true, if_false, fall_through);
}
@@ -3417,6 +3495,7 @@
&if_true, &if_false, &fall_through);
VisitForAccumulatorValue(expr->expression());
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ CompareRoot(rax, Heap::kNullValueRootIndex);
if (expr->is_strict()) {
Split(equal, if_true, if_false, fall_through);
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev