Revision: 11491
Author: [email protected]
Date: Thu May 3 03:54:17 2012
Log: Implement clearing of CompareICs.
This allows CompareICs to be cleared during garbage collection to avoid
cross-context garbage retention through maps stored in CompareIC stubs
for the KNOWN_OBJECTS state.
[email protected]
BUG=v8:2102
Review URL: https://chromiumcodereview.appspot.com/10263008
http://code.google.com/p/v8/source/detail?r=11491
Modified:
/branches/bleeding_edge/src/arm/ic-arm.cc
/branches/bleeding_edge/src/arm/macro-assembler-arm.cc
/branches/bleeding_edge/src/code-stubs.h
/branches/bleeding_edge/src/ia32/assembler-ia32.h
/branches/bleeding_edge/src/ia32/ic-ia32.cc
/branches/bleeding_edge/src/ia32/macro-assembler-ia32.cc
/branches/bleeding_edge/src/ic.cc
/branches/bleeding_edge/src/ic.h
/branches/bleeding_edge/src/objects-inl.h
/branches/bleeding_edge/src/objects.cc
/branches/bleeding_edge/src/objects.h
/branches/bleeding_edge/src/x64/assembler-x64.h
/branches/bleeding_edge/src/x64/ic-x64.cc
/branches/bleeding_edge/src/x64/macro-assembler-x64.cc
=======================================
--- /branches/bleeding_edge/src/arm/ic-arm.cc Wed Feb 29 02:45:59 2012
+++ /branches/bleeding_edge/src/arm/ic-arm.cc Thu May 3 03:54:17 2012
@@ -1690,12 +1690,12 @@
// Activate inlined smi code.
if (previous_state == UNINITIALIZED) {
- PatchInlinedSmiCode(address());
+ PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK);
}
}
-void PatchInlinedSmiCode(Address address) {
+void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
Address cmp_instruction_address =
address + Assembler::kCallTargetAddressOffset;
@@ -1729,34 +1729,31 @@
Instr instr_at_patch = Assembler::instr_at(patch_address);
Instr branch_instr =
Assembler::instr_at(patch_address + Instruction::kInstrSize);
- ASSERT(Assembler::IsCmpRegister(instr_at_patch));
- ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(),
- Assembler::GetRm(instr_at_patch).code());
+ // This is patching a conditional "jump if not smi/jump if smi" site.
+ // Enabling by changing from
+ // cmp rx, rx
+ // b eq/ne, <target>
+ // to
+ // tst rx, #kSmiTagMask
+ // b ne/eq, <target>
+ // and vice-versa to be disabled again.
+ CodePatcher patcher(patch_address, 2);
+ Register reg = Assembler::GetRn(instr_at_patch);
+ if (check == ENABLE_INLINED_SMI_CHECK) {
+ ASSERT(Assembler::IsCmpRegister(instr_at_patch));
+ ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(),
+ Assembler::GetRm(instr_at_patch).code());
+ patcher.masm()->tst(reg, Operand(kSmiTagMask));
+ } else {
+ ASSERT(check == DISABLE_INLINED_SMI_CHECK);
+ ASSERT(Assembler::IsTstImmediate(instr_at_patch));
+ patcher.masm()->cmp(reg, reg);
+ }
ASSERT(Assembler::IsBranch(branch_instr));
if (Assembler::GetCondition(branch_instr) == eq) {
- // This is patching a "jump if not smi" site to be active.
- // Changing
- // cmp rx, rx
- // b eq, <target>
- // to
- // tst rx, #kSmiTagMask
- // b ne, <target>
- CodePatcher patcher(patch_address, 2);
- Register reg = Assembler::GetRn(instr_at_patch);
- patcher.masm()->tst(reg, Operand(kSmiTagMask));
patcher.EmitCondition(ne);
} else {
ASSERT(Assembler::GetCondition(branch_instr) == ne);
- // This is patching a "jump if smi" site to be active.
- // Changing
- // cmp rx, rx
- // b ne, <target>
- // to
- // tst rx, #kSmiTagMask
- // b eq, <target>
- CodePatcher patcher(patch_address, 2);
- Register reg = Assembler::GetRn(instr_at_patch);
- patcher.masm()->tst(reg, Operand(kSmiTagMask));
patcher.EmitCondition(eq);
}
}
=======================================
--- /branches/bleeding_edge/src/arm/macro-assembler-arm.cc Tue Apr 24
08:59:07 2012
+++ /branches/bleeding_edge/src/arm/macro-assembler-arm.cc Thu May 3
03:54:17 2012
@@ -3738,7 +3738,7 @@
: address_(address),
instructions_(instructions),
size_(instructions * Assembler::kInstrSize),
- masm_(Isolate::Current(), address, size_ + Assembler::kGap) {
+ masm_(NULL, address, size_ + Assembler::kGap) {
// Create a new macro assembler pointing to the address of the code to
patch.
// The size is adjusted with kGap on order for the assembler to generate
size
// bytes of instructions without failing with buffer size constraints.
=======================================
--- /branches/bleeding_edge/src/code-stubs.h Tue Feb 14 06:00:31 2012
+++ /branches/bleeding_edge/src/code-stubs.h Thu May 3 03:54:17 2012
@@ -498,6 +498,7 @@
virtual void FinishCode(Handle<Code> code) {
code->set_compare_state(state_);
+ code->set_compare_operation(op_);
}
virtual CodeStub::Major MajorKey() { return CompareIC; }
=======================================
--- /branches/bleeding_edge/src/ia32/assembler-ia32.h Wed Mar 21 07:29:14
2012
+++ /branches/bleeding_edge/src/ia32/assembler-ia32.h Thu May 3 03:54:17
2012
@@ -640,6 +640,9 @@
static const byte kJccShortPrefix = 0x70;
static const byte kJncShortOpcode = kJccShortPrefix | not_carry;
static const byte kJcShortOpcode = kJccShortPrefix | carry;
+ static const byte kJnzShortOpcode = kJccShortPrefix | not_zero;
+ static const byte kJzShortOpcode = kJccShortPrefix | zero;
+
//
---------------------------------------------------------------------------
// Code generation
=======================================
--- /branches/bleeding_edge/src/ia32/ic-ia32.cc Fri Apr 27 06:05:45 2012
+++ /branches/bleeding_edge/src/ia32/ic-ia32.cc Thu May 3 03:54:17 2012
@@ -1727,12 +1727,12 @@
// Activate inlined smi code.
if (previous_state == UNINITIALIZED) {
- PatchInlinedSmiCode(address());
+ PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK);
}
}
-void PatchInlinedSmiCode(Address address) {
+void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
// The address of the instruction following the call.
Address test_instruction_address =
address + Assembler::kCallTargetAddressOffset;
@@ -1753,14 +1753,18 @@
address, test_instruction_address, delta);
}
- // Patch with a short conditional jump. There must be a
- // short jump-if-carry/not-carry at this position.
+ // Patch with a short conditional jump. Enabling means switching from a
short
+ // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling
is the
+ // reverse operation of that.
Address jmp_address = test_instruction_address - delta;
- ASSERT(*jmp_address == Assembler::kJncShortOpcode ||
- *jmp_address == Assembler::kJcShortOpcode);
- Condition cc = *jmp_address == Assembler::kJncShortOpcode
- ? not_zero
- : zero;
+ ASSERT((check == ENABLE_INLINED_SMI_CHECK)
+ ? (*jmp_address == Assembler::kJncShortOpcode ||
+ *jmp_address == Assembler::kJcShortOpcode)
+ : (*jmp_address == Assembler::kJnzShortOpcode ||
+ *jmp_address == Assembler::kJzShortOpcode));
+ Condition cc = (check == ENABLE_INLINED_SMI_CHECK)
+ ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
+ : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
*jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
}
=======================================
--- /branches/bleeding_edge/src/ia32/macro-assembler-ia32.cc Wed Feb 22
04:47:42 2012
+++ /branches/bleeding_edge/src/ia32/macro-assembler-ia32.cc Thu May 3
03:54:17 2012
@@ -2566,7 +2566,7 @@
CodePatcher::CodePatcher(byte* address, int size)
: address_(address),
size_(size),
- masm_(Isolate::Current(), address, size + Assembler::kGap) {
+ masm_(NULL, address, size + Assembler::kGap) {
// Create a new macro assembler pointing to the address of the code to
patch.
// The size is adjusted with kGap on order for the assembler to generate
size
// bytes of instructions without failing with buffer size constraints.
=======================================
--- /branches/bleeding_edge/src/ic.cc Thu Apr 12 01:35:30 2012
+++ /branches/bleeding_edge/src/ic.cc Thu May 3 03:54:17 2012
@@ -352,9 +352,9 @@
return KeyedStoreIC::Clear(address, target);
case Code::CALL_IC: return CallIC::Clear(address, target);
case Code::KEYED_CALL_IC: return KeyedCallIC::Clear(address, target);
+ case Code::COMPARE_IC: return CompareIC::Clear(address, target);
case Code::UNARY_OP_IC:
case Code::BINARY_OP_IC:
- case Code::COMPARE_IC:
case Code::TO_BOOLEAN_IC:
// Clearing these is tricky and does not
// make any performance difference.
@@ -365,9 +365,8 @@
void CallICBase::Clear(Address address, Code* target) {
+ if (target->ic_state() == UNINITIALIZED) return;
bool contextual =
CallICBase::Contextual::decode(target->extra_ic_state());
- State state = target->ic_state();
- if (state == UNINITIALIZED) return;
Code* code =
Isolate::Current()->stub_cache()->FindCallInitialize(
target->arguments_count(),
@@ -408,6 +407,17 @@
? initialize_stub_strict()
: initialize_stub());
}
+
+
+void CompareIC::Clear(Address address, Code* target) {
+ // Only clear ICCompareStubs, we currently cannot clear generic
CompareStubs.
+ if (target->major_key() != CodeStub::CompareIC) return;
+ // Only clear CompareICs that can retain objects.
+ if (target->compare_state() != KNOWN_OBJECTS) return;
+ Token::Value op = CompareIC::ComputeOperation(target);
+ SetTargetAtAddress(address, GetRawUninitialized(op));
+ PatchInlinedSmiCode(address, DISABLE_INLINED_SMI_CHECK);
+}
static bool HasInterceptorGetter(JSObject* object) {
@@ -2396,7 +2406,7 @@
// Activate inlined smi code.
if (previous_type == BinaryOpIC::UNINITIALIZED) {
- PatchInlinedSmiCode(ic.address());
+ PatchInlinedSmiCode(ic.address(), ENABLE_INLINED_SMI_CHECK);
}
}
@@ -2455,6 +2465,14 @@
}
return *result;
}
+
+
+Code* CompareIC::GetRawUninitialized(Token::Value op) {
+ ICCompareStub stub(op, UNINITIALIZED);
+ Code* code = NULL;
+ CHECK(stub.FindCodeInCache(&code));
+ return code;
+}
Handle<Code> CompareIC::GetUninitialized(Token::Value op) {
@@ -2469,6 +2487,12 @@
ASSERT(key == CodeStub::CompareIC);
return static_cast<State>(target->compare_state());
}
+
+
+Token::Value CompareIC::ComputeOperation(Code* target) {
+ ASSERT(target->major_key() == CodeStub::CompareIC);
+ return static_cast<Token::Value>(target->compare_operation());
+}
const char* CompareIC::GetStateName(State state) {
=======================================
--- /branches/bleeding_edge/src/ic.h Mon Feb 20 04:57:23 2012
+++ /branches/bleeding_edge/src/ic.h Thu May 3 03:54:17 2012
@@ -794,6 +794,9 @@
// Helper function for determining the state of a compare IC.
static State ComputeState(Code* target);
+ // Helper function for determining the operation a compare IC is for.
+ static Token::Value ComputeOperation(Code* target);
+
static const char* GetStateName(State state);
private:
@@ -803,8 +806,14 @@
bool strict() const { return op_ == Token::EQ_STRICT; }
Condition GetCondition() const { return ComputeCondition(op_); }
State GetState() { return ComputeState(target()); }
+
+ static Code* GetRawUninitialized(Token::Value op);
+
+ static void Clear(Address address, Code* target);
Token::Value op_;
+
+ friend class IC;
};
@@ -817,7 +826,8 @@
// Helper for BinaryOpIC and CompareIC.
-void PatchInlinedSmiCode(Address address);
+enum InlinedSmiCheck { ENABLE_INLINED_SMI_CHECK, DISABLE_INLINED_SMI_CHECK
};
+void PatchInlinedSmiCode(Address address, InlinedSmiCheck check);
} } // namespace v8::internal
=======================================
--- /branches/bleeding_edge/src/objects-inl.h Wed May 2 02:58:42 2012
+++ /branches/bleeding_edge/src/objects-inl.h Thu May 3 03:54:17 2012
@@ -3191,6 +3191,18 @@
ASSERT(is_compare_ic_stub());
WRITE_BYTE_FIELD(this, kCompareStateOffset, value);
}
+
+
+byte Code::compare_operation() {
+ ASSERT(is_compare_ic_stub());
+ return READ_BYTE_FIELD(this, kCompareOperationOffset);
+}
+
+
+void Code::set_compare_operation(byte value) {
+ ASSERT(is_compare_ic_stub());
+ WRITE_BYTE_FIELD(this, kCompareOperationOffset, value);
+}
byte Code::to_boolean_state() {
=======================================
--- /branches/bleeding_edge/src/objects.cc Mon Apr 30 04:54:34 2012
+++ /branches/bleeding_edge/src/objects.cc Thu May 3 03:54:17 2012
@@ -8378,6 +8378,10 @@
CompareIC::State state = CompareIC::ComputeState(this);
PrintF(out, "compare_state = %s\n", CompareIC::GetStateName(state));
}
+ if (is_compare_ic_stub() && major_key() == CodeStub::CompareIC) {
+ Token::Value op = CompareIC::ComputeOperation(this);
+ PrintF(out, "compare_operation = %s\n", Token::Name(op));
+ }
}
if ((name != NULL) && (name[0] != '\0')) {
PrintF(out, "name = %s\n", name);
=======================================
--- /branches/bleeding_edge/src/objects.h Mon Apr 30 04:54:34 2012
+++ /branches/bleeding_edge/src/objects.h Thu May 3 03:54:17 2012
@@ -4291,6 +4291,11 @@
inline byte compare_state();
inline void set_compare_state(byte value);
+ // [compare_operation]: For kind COMPARE_IC tells what compare operation
the
+ // stub was generated for.
+ inline byte compare_operation();
+ inline void set_compare_operation(byte value);
+
// [to_boolean_foo]: For kind TO_BOOLEAN_IC tells what state the stub is
in.
inline byte to_boolean_state();
inline void set_to_boolean_state(byte value);
@@ -4474,6 +4479,8 @@
static const int kBinaryOpReturnTypeOffset = kBinaryOpTypeOffset + 1;
+ static const int kCompareOperationOffset = kCompareStateOffset + 1;
+
static const int kAllowOSRAtLoopNestingLevelOffset = kFullCodeFlags + 1;
static const int kProfilerTicksOffset =
kAllowOSRAtLoopNestingLevelOffset + 1;
=======================================
--- /branches/bleeding_edge/src/x64/assembler-x64.h Wed Mar 21 07:29:14 2012
+++ /branches/bleeding_edge/src/x64/assembler-x64.h Thu May 3 03:54:17 2012
@@ -629,7 +629,8 @@
static const byte kJccShortPrefix = 0x70;
static const byte kJncShortOpcode = kJccShortPrefix | not_carry;
static const byte kJcShortOpcode = kJccShortPrefix | carry;
-
+ static const byte kJnzShortOpcode = kJccShortPrefix | not_zero;
+ static const byte kJzShortOpcode = kJccShortPrefix | zero;
//
---------------------------------------------------------------------------
=======================================
--- /branches/bleeding_edge/src/x64/ic-x64.cc Thu Feb 2 05:17:46 2012
+++ /branches/bleeding_edge/src/x64/ic-x64.cc Thu May 3 03:54:17 2012
@@ -1741,11 +1741,11 @@
// Activate inlined smi code.
if (previous_state == UNINITIALIZED) {
- PatchInlinedSmiCode(address());
+ PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK);
}
}
-void PatchInlinedSmiCode(Address address) {
+void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
// The address of the instruction following the call.
Address test_instruction_address =
address + Assembler::kCallTargetAddressOffset;
@@ -1766,14 +1766,18 @@
address, test_instruction_address, delta);
}
- // Patch with a short conditional jump. There must be a
- // short jump-if-carry/not-carry at this position.
+ // Patch with a short conditional jump. Enabling means switching from a
short
+ // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling
is the
+ // reverse operation of that.
Address jmp_address = test_instruction_address - delta;
- ASSERT(*jmp_address == Assembler::kJncShortOpcode ||
- *jmp_address == Assembler::kJcShortOpcode);
- Condition cc = *jmp_address == Assembler::kJncShortOpcode
- ? not_zero
- : zero;
+ ASSERT((check == ENABLE_INLINED_SMI_CHECK)
+ ? (*jmp_address == Assembler::kJncShortOpcode ||
+ *jmp_address == Assembler::kJcShortOpcode)
+ : (*jmp_address == Assembler::kJnzShortOpcode ||
+ *jmp_address == Assembler::kJzShortOpcode));
+ Condition cc = (check == ENABLE_INLINED_SMI_CHECK)
+ ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
+ : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
*jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
}
=======================================
--- /branches/bleeding_edge/src/x64/macro-assembler-x64.cc Mon Apr 16
03:53:26 2012
+++ /branches/bleeding_edge/src/x64/macro-assembler-x64.cc Thu May 3
03:54:17 2012
@@ -4188,7 +4188,7 @@
CodePatcher::CodePatcher(byte* address, int size)
: address_(address),
size_(size),
- masm_(Isolate::Current(), address, size + Assembler::kGap) {
+ masm_(NULL, address, size + Assembler::kGap) {
// Create a new macro assembler pointing to the address of the code to
patch.
// The size is adjusted with kGap on order for the assembler to generate
size
// bytes of instructions without failing with buffer size constraints.
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev