Revision: 5970
Author: [email protected]
Date: Fri Dec 10 06:33:20 2010
Log: Improve our type feedback by recogizining never-executed IC calls for
binary operations.
In the case of inlined smi code in non-optimzied code we could not
distinguish between the smi-only case and the case that the operation was
never executed.
With this change the first execution of a binary operation always jumps
to the stub which in turn patches the smi-check into the correct
conditional branch, so that we benefit from inlined smi code after the
first invocation.
A nop instruction after the call to the BinaryOpIC indicates that no
smi code was inlined. A "test eax" instruction says that there was smi
code inlined and encodes the delta to the patch site and the condition
code of the branch at the patch site to restore the original jump.
Review URL: http://codereview.chromium.org/5714001
http://code.google.com/p/v8/source/detail?r=5970
Modified:
/branches/bleeding_edge/src/arm/ic-arm.cc
/branches/bleeding_edge/src/full-codegen.h
/branches/bleeding_edge/src/ia32/assembler-ia32.h
/branches/bleeding_edge/src/ia32/code-stubs-ia32.cc
/branches/bleeding_edge/src/ia32/code-stubs-ia32.h
/branches/bleeding_edge/src/ia32/full-codegen-ia32.cc
/branches/bleeding_edge/src/ia32/ic-ia32.cc
/branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc
/branches/bleeding_edge/src/ic.cc
/branches/bleeding_edge/src/ic.h
/branches/bleeding_edge/src/type-info.cc
=======================================
--- /branches/bleeding_edge/src/arm/ic-arm.cc Tue Dec 7 03:31:57 2010
+++ /branches/bleeding_edge/src/arm/ic-arm.cc Fri Dec 10 06:33:20 2010
@@ -2360,10 +2360,8 @@
void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
HandleScope scope;
Handle<Code> rewritten;
-#ifdef DEBUG
State previous_state = GetState();
-#endif
- State state = TargetState(x, y);
+ State state = TargetState(previous_state, x, y);
if (state == GENERIC) {
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0);
rewritten = stub.GetCode();
@@ -2382,6 +2380,12 @@
}
#endif
}
+
+
+void PatchInlinedSmiCode(Address address) {
+ UNIMPLEMENTED();
+}
+
} } // namespace v8::internal
=======================================
--- /branches/bleeding_edge/src/full-codegen.h Tue Dec 7 03:31:57 2010
+++ /branches/bleeding_edge/src/full-codegen.h Fri Dec 10 06:33:20 2010
@@ -38,6 +38,9 @@
namespace v8 {
namespace internal {
+// Forward declarations.
+class JumpPatchSite;
+
// AST node visitor which can tell whether a given statement will be
breakable
// when the code is compiled by the full compiler in the debugger. This
means
// that there will be an IC (load/store/call) in the code generated for the
@@ -533,6 +536,10 @@
// Helper for calling an IC stub.
void EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode);
+ // Helper for calling an IC stub with a patch site. Passing NULL for
patch_site
+ // indicates no inlined smi code and emits a nop after the IC call.
+ void EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site);
+
// Set fields in the stack frame. Offsets are the frame pointer relative
// offsets defined in, e.g., StandardFrameConstants.
void StoreToFrameField(int frame_offset, Register value);
=======================================
--- /branches/bleeding_edge/src/ia32/assembler-ia32.h Tue Dec 7 03:31:57
2010
+++ /branches/bleeding_edge/src/ia32/assembler-ia32.h Fri Dec 10 06:33:20
2010
@@ -571,6 +571,14 @@
static const byte kTestEaxByte = 0xA9;
// One byte opcode for test al, 0xXX.
static const byte kTestAlByte = 0xA8;
+ // One byte opcode for nop.
+ static const byte kNopByte = 0x90;
+
+ // One byte opcode for a short unconditional jump.
+ static const byte kJmpShortOpcode = 0xEB;
+ // One byte prefix for a short conditional jump.
+ static const byte kJccShortPrefix = 0x70;
+
//
---------------------------------------------------------------------------
// Code generation
=======================================
--- /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Tue Dec 7 03:31:57
2010
+++ /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Fri Dec 10 06:33:20
2010
@@ -1366,8 +1366,8 @@
if (op_ == Token::DIV || op_ == Token::MOD) {
left = eax;
right = ebx;
- __ mov(ebx, eax);
- __ mov(eax, edx);
+ __ mov(ebx, eax);
+ __ mov(eax, edx);
}
=======================================
--- /branches/bleeding_edge/src/ia32/code-stubs-ia32.h Tue Dec 7 03:31:57
2010
+++ /branches/bleeding_edge/src/ia32/code-stubs-ia32.h Fri Dec 10 06:33:20
2010
@@ -231,7 +231,8 @@
ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
}
- TypeRecordingBinaryOpStub(int key,
+ TypeRecordingBinaryOpStub(
+ int key,
TRBinaryOpIC::TypeInfo operands_type,
TRBinaryOpIC::TypeInfo result_type = TRBinaryOpIC::UNINITIALIZED)
: op_(OpBits::decode(key)),
@@ -239,8 +240,7 @@
use_sse3_(SSE3Bits::decode(key)),
operands_type_(operands_type),
result_type_(result_type),
- name_(NULL) {
- }
+ name_(NULL) { }
// Generate code to call the stub with the supplied arguments. This will
add
// code at the call site to prepare arguments either in registers or on
the
=======================================
--- /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc Tue Dec 7
03:31:57 2010
+++ /branches/bleeding_edge/src/ia32/full-codegen-ia32.cc Fri Dec 10
06:33:20 2010
@@ -41,6 +41,46 @@
namespace v8 {
namespace internal {
+
+class JumpPatchSite BASE_EMBEDDED {
+ public:
+ explicit JumpPatchSite(MacroAssembler* masm)
+ : masm_(masm) {
+#ifdef DEBUG
+ info_emitted_ = false;
+#endif
+ }
+
+ ~JumpPatchSite() {
+ ASSERT(patch_site_.is_bound() == info_emitted_);
+ }
+
+ void EmitJump(NearLabel* target) {
+ ASSERT(!patch_site_.is_bound() && !info_emitted_);
+ masm_->bind(&patch_site_);
+ masm_->jmp(target);
+ }
+
+ void EmitPatchInfo() {
+ int delta_to_patch_site =
masm_->SizeOfCodeGeneratedSince(&patch_site_);
+ ASSERT(is_int8(delta_to_patch_site));
+ masm_->test(eax, Immediate(delta_to_patch_site));
+#ifdef DEBUG
+ info_emitted_ = true;
+#endif
+ }
+
+ bool is_bound() const { return patch_site_.is_bound(); }
+
+ private:
+ MacroAssembler* masm_;
+ Label patch_site_;
+#ifdef DEBUG
+ bool info_emitted_;
+#endif
+};
+
+
#define __ ACCESS_MASM(masm_)
// Generate code for a JS function. On entry to the function the receiver
@@ -715,12 +755,13 @@
// Perform the comparison as if via '==='.
__ mov(edx, Operand(esp, 0)); // Switch value.
bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
+ JumpPatchSite patch_site(masm_);
if (inline_smi_code) {
NearLabel slow_case;
__ mov(ecx, edx);
__ or_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask));
- __ j(not_zero, &slow_case, not_taken);
+ patch_site.EmitJump(&slow_case);
__ cmp(edx, Operand(eax));
__ j(not_equal, &next_test);
__ Drop(1); // Switch value is no longer needed.
@@ -730,9 +771,8 @@
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
-
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
- __ call(ic, RelocInfo::CODE_TARGET);
+ EmitCallIC(ic, &patch_site);
__ test(eax, Operand(eax));
__ j(not_equal, &next_test);
@@ -1552,12 +1592,13 @@
OverwriteMode mode,
bool left_is_constant_smi,
Smi* value) {
- NearLabel call_stub;
- Label done;
+ NearLabel call_stub, done;
__ add(Operand(eax), Immediate(value));
__ j(overflow, &call_stub);
__ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &done);
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJump(&call_stub);
+ __ jmp(&done);
// Undo the optimistic add operation and call the shared stub.
__ bind(&call_stub);
@@ -1570,7 +1611,8 @@
__ mov(edx, eax);
__ mov(eax, Immediate(value));
}
- __ CallStub(&stub);
+ EmitCallIC(stub.GetCode(), &patch_site);
+
__ bind(&done);
context()->Plug(eax);
}
@@ -1580,7 +1622,7 @@
OverwriteMode mode,
bool left_is_constant_smi,
Smi* value) {
- Label call_stub, done;
+ NearLabel call_stub, done;
if (left_is_constant_smi) {
__ mov(ecx, eax);
__ mov(eax, Immediate(value));
@@ -1590,7 +1632,9 @@
}
__ j(overflow, &call_stub);
__ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &done);
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJump(&call_stub);
+ __ jmp(&done);
__ bind(&call_stub);
if (left_is_constant_smi) {
@@ -1603,7 +1647,8 @@
}
Token::Value op = Token::SUB;
TypeRecordingBinaryOpStub stub(op, mode);
- __ CallStub(&stub);
+ EmitCallIC(stub.GetCode(), &patch_site);
+
__ bind(&done);
context()->Plug(eax);
}
@@ -1613,20 +1658,15 @@
Token::Value op,
OverwriteMode mode,
Smi* value) {
- Label call_stub, smi_case, done;
+ NearLabel call_stub, done;
int shift_value = value->value() & 0x1f;
__ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &smi_case);
-
- __ bind(&call_stub);
- __ mov(edx, eax);
- __ mov(eax, Immediate(value));
- TypeRecordingBinaryOpStub stub(op, mode);
- __ CallStub(&stub);
- __ jmp(&done);
-
- __ bind(&smi_case);
+ // Patch site.
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJump(&call_stub);
+
+ // Smi case.
switch (op) {
case Token::SHL:
if (shift_value != 0) {
@@ -1665,6 +1705,14 @@
default:
UNREACHABLE();
}
+ __ jmp(&done);
+
+ // Call stub.
+ __ bind(&call_stub);
+ __ mov(edx, eax);
+ __ mov(eax, Immediate(value));
+ TypeRecordingBinaryOpStub stub(op, mode);
+ EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
context()->Plug(eax);
@@ -1675,18 +1723,14 @@
Token::Value op,
OverwriteMode mode,
Smi* value) {
- Label smi_case, done;
+ NearLabel call_stub, done;
__ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &smi_case);
-
- // The order of the arguments does not matter for bit-ops with a
- // constant operand.
- __ mov(edx, Immediate(value));
- TypeRecordingBinaryOpStub stub(op, mode);
- __ CallStub(&stub);
- __ jmp(&done);
-
- __ bind(&smi_case);
+ // Patch site. The first invocation of the stub will be patch the jmp
with
+ // the required conditional jump.
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJump(&call_stub);
+
+ // Smi case.
switch (op) {
case Token::BIT_OR:
__ or_(Operand(eax), Immediate(value));
@@ -1700,6 +1744,14 @@
default:
UNREACHABLE();
}
+ __ jmp(&done);
+
+ // The order of the arguments does not matter for bit-ops with a
+ // constant operand.
+ __ bind(&call_stub);
+ __ mov(edx, Immediate(value));
+ TypeRecordingBinaryOpStub stub(op, mode);
+ EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
context()->Plug(eax);
@@ -1753,20 +1805,15 @@
// Do combined smi check of the operands. Left operand is on the
// stack. Right operand is in eax.
- Label done, stub_call, smi_case;
+ NearLabel done, stub_call;
__ pop(edx);
__ mov(ecx, eax);
__ or_(eax, Operand(edx));
__ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &smi_case);
-
- __ bind(&stub_call);
- __ mov(eax, ecx);
- TypeRecordingBinaryOpStub stub(op, mode);
- __ CallStub(&stub);
- __ jmp(&done);
-
- __ bind(&smi_case);
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJump(&stub_call);
+
+ // Smi case.
__ mov(eax, edx); // Copy left operand in case of a stub call.
switch (op) {
@@ -1834,6 +1881,12 @@
default:
UNREACHABLE();
}
+ __ jmp(&done);
+
+ __ bind(&stub_call);
+ __ mov(eax, ecx);
+ TypeRecordingBinaryOpStub stub(op, mode);
+ EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
context()->Plug(eax);
@@ -1844,7 +1897,7 @@
OverwriteMode mode) {
__ pop(edx);
TypeRecordingBinaryOpStub stub(op, mode);
- __ CallStub(&stub);
+ EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code.
context()->Plug(eax);
}
@@ -3709,6 +3762,7 @@
// Inline smi case if we are in a loop.
NearLabel stub_call;
+ JumpPatchSite patch_site(masm_);
Label done;
if (ShouldInlineSmiCase(expr->op())) {
if (expr->op() == Token::INC) {
@@ -3720,7 +3774,9 @@
// We could eliminate this smi check if we split the code at
// the first smi check before calling ToNumber.
__ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &done);
+ patch_site.EmitJump(&stub_call);
+ __ jmp(&done);
+
__ bind(&stub_call);
// Call stub. Undo operation first.
if (expr->op() == Token::INC) {
@@ -3738,9 +3794,9 @@
__ mov(eax, Immediate(Smi::FromInt(1)));
TypeRecordingBinaryOpStub stub(expr->binary_op(),
NO_OVERWRITE);
- __ CallStub(&stub);
- __ bind(&done);
-
+ EmitCallIC(stub.GetCode(), &patch_site);
+
+ __ bind(&done);
// Store the value returned in eax.
switch (assign_type) {
case VARIABLE:
@@ -4005,21 +4061,23 @@
}
bool inline_smi_code = ShouldInlineSmiCase(op);
+ JumpPatchSite patch_site(masm_);
if (inline_smi_code) {
NearLabel slow_case;
__ mov(ecx, Operand(edx));
__ or_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask));
- __ j(not_zero, &slow_case, not_taken);
+ patch_site.EmitJump(&slow_case);
__ cmp(edx, Operand(eax));
Split(cc, if_true, if_false, NULL);
__ bind(&slow_case);
}
// Record position and call the compare IC.
- Handle<Code> ic = CompareIC::GetUninitialized(op);
SetSourcePosition(expr->position());
- __ call(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
+ EmitCallIC(ic, &patch_site);
+
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ test(eax, Operand(eax));
Split(cc, if_true, if_false, fall_through);
@@ -4121,6 +4179,16 @@
break;
}
}
+
+
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite*
patch_site) {
+ __ call(ic, RelocInfo::CODE_TARGET);
+ if (patch_site != NULL && patch_site->is_bound()) {
+ patch_site->EmitPatchInfo();
+ } else {
+ __ nop(); // Signals no inlined code.
+ }
+}
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register
value) {
=======================================
--- /branches/bleeding_edge/src/ia32/ic-ia32.cc Tue Dec 7 03:31:57 2010
+++ /branches/bleeding_edge/src/ia32/ic-ia32.cc Fri Dec 10 06:33:20 2010
@@ -2052,10 +2052,9 @@
void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
HandleScope scope;
Handle<Code> rewritten;
-#ifdef DEBUG
State previous_state = GetState();
-#endif
- State state = TargetState(x, y);
+
+ State state = TargetState(previous_state, x, y);
if (state == GENERIC) {
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
rewritten = stub.GetCode();
@@ -2073,6 +2072,40 @@
Token::Name(op_));
}
#endif
+
+ // Activate inlined smi code.
+ if (previous_state == UNINITIALIZED) {
+ PatchInlinedSmiCode(address());
+ }
+}
+
+
+void PatchInlinedSmiCode(Address address) {
+ // The address of the instruction following the call.
+ Address test_instruction_address =
+ address + Assembler::kCallTargetAddressOffset;
+
+ // If the instruction following the call is not a test al, nothing
+ // was inlined.
+ if (*test_instruction_address != Assembler::kTestAlByte) {
+ ASSERT(*test_instruction_address == Assembler::kNopByte);
+ return;
+ }
+
+ Address delta_address = test_instruction_address + 1;
+ // The delta to the start of the map check instruction and the
+ // condition code uses at the patched jump.
+ int8_t delta = *reinterpret_cast<int8_t*>(delta_address);
+ if (FLAG_trace_ic) {
+ PrintF("[ patching ic at %p, test=%p, delta=%d\n",
+ address, test_instruction_address, delta);
+ }
+
+ // Patch with a short conditional jump. There must be an unconditional
+ // short jump at this position.
+ Address jmp_address = test_instruction_address - delta;
+ ASSERT(*jmp_address == Assembler::kJmpShortOpcode);
+ *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | not_zero);
}
=======================================
--- /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Wed Dec 8
06:32:40 2010
+++ /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Fri Dec 10
06:33:20 2010
@@ -315,6 +315,13 @@
__ call(code, mode);
RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex);
}
+
+ // Signal that we don't inline smi code before these stubs in the
+ // optimizing code generator.
+ if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
+ code->kind() == Code::COMPARE_IC) {
+ __ nop();
+ }
}
=======================================
--- /branches/bleeding_edge/src/ic.cc Tue Dec 7 03:31:57 2010
+++ /branches/bleeding_edge/src/ic.cc Fri Dec 10 06:33:20 2010
@@ -1951,7 +1951,7 @@
TRBinaryOpIC::TypeInfo TRBinaryOpIC::JoinTypes(TRBinaryOpIC::TypeInfo x,
- TRBinaryOpIC::TypeInfo y) {
+ TRBinaryOpIC::TypeInfo y) {
if (x == UNINITIALIZED) return y;
if (y == UNINITIALIZED) return x;
if (x == STRING && y == STRING) return STRING;
@@ -2041,6 +2041,11 @@
TRBinaryOpIC::GetName(result_type),
Token::Name(op));
}
+
+ // Activate inlined smi code.
+ if (previous_type == TRBinaryOpIC::UNINITIALIZED) {
+ PatchInlinedSmiCode(ic.address());
+ }
}
Handle<JSBuiltinsObject> builtins = Top::builtins();
@@ -2127,8 +2132,9 @@
}
-CompareIC::State CompareIC::TargetState(Handle<Object> x, Handle<Object>
y) {
- State state = GetState();
+CompareIC::State CompareIC::TargetState(State state,
+ Handle<Object> x,
+ Handle<Object> y) {
if (state != UNINITIALIZED) return GENERIC;
if (x->IsSmi() && y->IsSmi()) return SMIS;
if (x->IsNumber() && y->IsNumber()) return HEAP_NUMBERS;
=======================================
--- /branches/bleeding_edge/src/ic.h Tue Dec 7 03:31:57 2010
+++ /branches/bleeding_edge/src/ic.h Fri Dec 10 06:33:20 2010
@@ -582,7 +582,7 @@
static const char* GetStateName(State state);
private:
- State TargetState(Handle<Object> x, Handle<Object> y);
+ State TargetState(State state, Handle<Object> x, Handle<Object> y);
bool strict() const { return op_ == Token::EQ_STRICT; }
Condition GetCondition() const { return ComputeCondition(op_); }
@@ -591,6 +591,8 @@
Token::Value op_;
};
+// Helper for TRBinaryOpIC and CompareIC.
+void PatchInlinedSmiCode(Address address);
} } // namespace v8::internal
=======================================
--- /branches/bleeding_edge/src/type-info.cc Tue Dec 7 03:53:19 2010
+++ /branches/bleeding_edge/src/type-info.cc Fri Dec 10 06:33:20 2010
@@ -142,6 +142,8 @@
CompareIC::State state =
static_cast<CompareIC::State>(code->compare_state());
switch (state) {
case CompareIC::UNINITIALIZED:
+ // Uninitialized state means never executed.
+ return unknown;
case CompareIC::SMIS:
return TypeInfo::Smi();
case CompareIC::HEAP_NUMBERS:
@@ -184,6 +186,8 @@
switch (type) {
case TRBinaryOpIC::UNINITIALIZED:
+ // Uninitialized state means never executed.
+ return unknown;
case TRBinaryOpIC::SMI:
switch (result_type) {
case TRBinaryOpIC::UNINITIALIZED:
@@ -224,6 +228,8 @@
CompareIC::State state =
static_cast<CompareIC::State>(code->compare_state());
switch (state) {
case CompareIC::UNINITIALIZED:
+ // Uninitialized state means never executed.
+ return unknown;
case CompareIC::SMIS:
return TypeInfo::Smi();
case CompareIC::HEAP_NUMBERS:
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev