Revision: 24123
Author: [email protected]
Date: Mon Sep 22 14:16:38 2014 UTC
Log: Further improve deopt reason output.
* Make the detailed deopt reason mandatory on x64, other platforms
will follow in separate CLs.
* Extracted and improved jump table entry sharing logic: When
--trace-deopt is on, we get separate entries for different deopt
reasons. This enables us to distinguish the several reasons single
instructions can have.
* Don't emit superfluous jump table comments: The bailout ID is still
visible, and the jump table entry number is not interesting (but
easy to determine if really needed).
* Unify the internal name of the jump table member across platforms.
[email protected]
Review URL: https://codereview.chromium.org/595513002
https://code.google.com/p/v8/source/detail?r=24123
Modified:
/branches/bleeding_edge/src/arm/lithium-codegen-arm.cc
/branches/bleeding_edge/src/arm/lithium-codegen-arm.h
/branches/bleeding_edge/src/arm64/lithium-codegen-arm64.cc
/branches/bleeding_edge/src/deoptimizer.h
/branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc
/branches/bleeding_edge/src/mips/lithium-codegen-mips.cc
/branches/bleeding_edge/src/mips/lithium-codegen-mips.h
/branches/bleeding_edge/src/mips64/lithium-codegen-mips64.cc
/branches/bleeding_edge/src/mips64/lithium-codegen-mips64.h
/branches/bleeding_edge/src/utils.h
/branches/bleeding_edge/src/x64/lithium-codegen-x64.cc
/branches/bleeding_edge/src/x64/lithium-codegen-x64.h
/branches/bleeding_edge/src/x87/lithium-codegen-x87.cc
=======================================
--- /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc Mon Sep 22
09:50:12 2014 UTC
+++ /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc Mon Sep 22
14:16:38 2014 UTC
@@ -319,29 +319,25 @@
// Each entry in the jump table generates one instruction and inlines one
// 32bit data after it.
if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) +
- deopt_jump_table_.length() * 7)) {
+ jump_table_.length() * 7)) {
Abort(kGeneratedCodeIsTooLarge);
}
- if (deopt_jump_table_.length() > 0) {
+ if (jump_table_.length() > 0) {
Label needs_frame, call_deopt_entry;
Comment(";;; -------------------- Jump table --------------------");
- Address base = deopt_jump_table_[0].address;
+ Address base = jump_table_[0].address;
Register entry_offset = scratch0();
- int length = deopt_jump_table_.length();
+ int length = jump_table_.length();
for (int i = 0; i < length; i++) {
- Deoptimizer::JumpTableEntry* table_entry = &deopt_jump_table_[i];
+ Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
__ bind(&table_entry->label);
- Deoptimizer::BailoutType type = table_entry->bailout_type;
- DCHECK(type == deopt_jump_table_[0].bailout_type);
+ DCHECK_EQ(jump_table_[0].bailout_type, table_entry->bailout_type);
Address entry = table_entry->address;
- int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
- DCHECK_NE(Deoptimizer::kNotDeoptimizationEntry, id);
- Comment(";;; jump table entry %d: deoptimization bailout %d.", i,
id);
DeoptComment(table_entry->reason);
// Second-level deopt table entries are contiguous and small, so
instead
@@ -909,17 +905,15 @@
DeoptComment(reason);
__ Call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
+ Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
+ !frame_is_built_);
// We often have several deopts to the same entry, reuse the last
// jump entry if this is the case.
- if (deopt_jump_table_.is_empty() ||
- (deopt_jump_table_.last().address != entry) ||
- (deopt_jump_table_.last().bailout_type != bailout_type) ||
- (deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
- Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
- !frame_is_built_);
- deopt_jump_table_.Add(table_entry, zone());
+ if (jump_table_.is_empty() ||
+ !table_entry.IsEquivalentTo(jump_table_.last())) {
+ jump_table_.Add(table_entry, zone());
}
- __ b(condition, &deopt_jump_table_.last().label);
+ __ b(condition, &jump_table_.last().label);
}
}
=======================================
--- /branches/bleeding_edge/src/arm/lithium-codegen-arm.h Mon Sep 22
06:36:57 2014 UTC
+++ /branches/bleeding_edge/src/arm/lithium-codegen-arm.h Mon Sep 22
14:16:38 2014 UTC
@@ -26,7 +26,7 @@
LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
: LCodeGenBase(chunk, assembler, info),
deoptimizations_(4, info->zone()),
- deopt_jump_table_(4, info->zone()),
+ jump_table_(4, info->zone()),
deoptimization_literals_(8, info->zone()),
inlined_function_count_(0),
scope_(info->scope()),
@@ -332,7 +332,7 @@
void EmitVectorLoadICRegisters(T* instr);
ZoneList<LEnvironment*> deoptimizations_;
- ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
+ ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
ZoneList<Handle<Object> > deoptimization_literals_;
int inlined_function_count_;
Scope* const scope_;
=======================================
--- /branches/bleeding_edge/src/arm64/lithium-codegen-arm64.cc Mon Sep 22
09:50:12 2014 UTC
+++ /branches/bleeding_edge/src/arm64/lithium-codegen-arm64.cc Mon Sep 22
14:16:38 2014 UTC
@@ -839,11 +839,7 @@
Deoptimizer::JumpTableEntry* table_entry = jump_table_[i];
__ Bind(&table_entry->label);
- Deoptimizer::BailoutType type = table_entry->bailout_type;
Address entry = table_entry->address;
- int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
- DCHECK_NE(Deoptimizer::kNotDeoptimizationEntry, id);
- Comment(";;; jump table entry %d: deoptimization bailout %d.", i,
id);
DeoptComment(table_entry->reason);
// Second-level deopt table entries are contiguous and small, so
instead
@@ -1053,14 +1049,13 @@
DeoptComment(reason);
__ Call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
+ Deoptimizer::JumpTableEntry* table_entry =
+ new (zone()) Deoptimizer::JumpTableEntry(entry, reason,
bailout_type,
+ !frame_is_built_);
// We often have several deopts to the same entry, reuse the last
// jump entry if this is the case.
- if (jump_table_.is_empty() || (jump_table_.last()->address != entry) ||
- (jump_table_.last()->bailout_type != bailout_type) ||
- (jump_table_.last()->needs_frame != !frame_is_built_)) {
- Deoptimizer::JumpTableEntry* table_entry =
- new (zone()) Deoptimizer::JumpTableEntry(entry, reason,
bailout_type,
- !frame_is_built_);
+ if (jump_table_.is_empty() ||
+ !table_entry->IsEquivalentTo(*jump_table_.last())) {
jump_table_.Add(table_entry, zone());
}
__ B(&jump_table_.last()->label, branch_type, reg, bit);
=======================================
--- /branches/bleeding_edge/src/deoptimizer.h Mon Sep 22 09:05:22 2014 UTC
+++ /branches/bleeding_edge/src/deoptimizer.h Mon Sep 22 14:16:38 2014 UTC
@@ -104,6 +104,15 @@
struct Reason {
Reason(int r, const char* m, const char* d)
: raw_position(r), mnemonic(m), detail(d) {}
+
+ bool operator==(const Reason& other) const {
+ return raw_position == other.raw_position &&
+ CStringEquals(mnemonic, other.mnemonic) &&
+ CStringEquals(detail, other.detail);
+ }
+
+ bool operator!=(const Reason& other) const { return !(*this == other);
}
+
int raw_position;
const char* mnemonic;
const char* detail;
@@ -117,6 +126,13 @@
reason(the_reason),
bailout_type(type),
needs_frame(frame) {}
+
+ bool IsEquivalentTo(const JumpTableEntry& other) const {
+ return address == other.address && bailout_type ==
other.bailout_type &&
+ needs_frame == other.needs_frame &&
+ (!FLAG_trace_deopt || reason == other.reason);
+ }
+
Label label;
Address address;
Reason reason;
=======================================
--- /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Mon Sep 22
09:50:12 2014 UTC
+++ /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Mon Sep 22
14:16:38 2014 UTC
@@ -386,10 +386,6 @@
Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
__ bind(&table_entry->label);
Address entry = table_entry->address;
- Deoptimizer::BailoutType type = table_entry->bailout_type;
- int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
- DCHECK_NE(Deoptimizer::kNotDeoptimizationEntry, id);
- Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
DeoptComment(table_entry->reason);
if (table_entry->needs_frame) {
DCHECK(!info()->saves_caller_doubles());
@@ -874,14 +870,12 @@
DeoptComment(reason);
__ call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
+ Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
+ !frame_is_built_);
// We often have several deopts to the same entry, reuse the last
// jump entry if this is the case.
if (jump_table_.is_empty() ||
- jump_table_.last().address != entry ||
- jump_table_.last().needs_frame != !frame_is_built_ ||
- jump_table_.last().bailout_type != bailout_type) {
- Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
- !frame_is_built_);
+ !table_entry.IsEquivalentTo(jump_table_.last())) {
jump_table_.Add(table_entry, zone());
}
if (cc == no_condition) {
=======================================
--- /branches/bleeding_edge/src/mips/lithium-codegen-mips.cc Mon Sep 22
09:50:12 2014 UTC
+++ /branches/bleeding_edge/src/mips/lithium-codegen-mips.cc Mon Sep 22
14:16:38 2014 UTC
@@ -324,25 +324,21 @@
bool LCodeGen::GenerateJumpTable() {
- if (deopt_jump_table_.length() > 0) {
+ if (jump_table_.length() > 0) {
Label needs_frame, call_deopt_entry;
Comment(";;; -------------------- Jump table --------------------");
- Address base = deopt_jump_table_[0].address;
+ Address base = jump_table_[0].address;
Register entry_offset = t9;
- int length = deopt_jump_table_.length();
+ int length = jump_table_.length();
for (int i = 0; i < length; i++) {
- Deoptimizer::JumpTableEntry* table_entry = &deopt_jump_table_[i];
+ Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
__ bind(&table_entry->label);
- Deoptimizer::BailoutType type = table_entry->bailout_type;
- DCHECK(type == deopt_jump_table_[0].bailout_type);
+ DCHECK(table_entry->bailout_type == jump_table_[0].bailout_type);
Address entry = table_entry->address;
- int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
- DCHECK_NE(Deoptimizer::kNotDeoptimizationEntry, id);
- Comment(";;; jump table entry %d: deoptimization bailout %d.", i,
id);
DeoptComment(table_entry->reason);
// Second-level deopt table entries are contiguous and small, so
instead
@@ -872,17 +868,15 @@
DeoptComment(reason);
__ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2);
} else {
+ Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
+ !frame_is_built_);
// We often have several deopts to the same entry, reuse the last
// jump entry if this is the case.
- if (deopt_jump_table_.is_empty() ||
- (deopt_jump_table_.last().address != entry) ||
- (deopt_jump_table_.last().bailout_type != bailout_type) ||
- (deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
- Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
- !frame_is_built_);
- deopt_jump_table_.Add(table_entry, zone());
+ if (jump_table_.is_empty() ||
+ !table_entry.IsEquivalentTo(jump_table_.last())) {
+ jump_table_.Add(table_entry, zone());
}
- __ Branch(&deopt_jump_table_.last().label, condition, src1, src2);
+ __ Branch(&jump_table_.last().label, condition, src1, src2);
}
}
=======================================
--- /branches/bleeding_edge/src/mips/lithium-codegen-mips.h Mon Sep 22
06:36:57 2014 UTC
+++ /branches/bleeding_edge/src/mips/lithium-codegen-mips.h Mon Sep 22
14:16:38 2014 UTC
@@ -25,7 +25,7 @@
LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
: LCodeGenBase(chunk, assembler, info),
deoptimizations_(4, info->zone()),
- deopt_jump_table_(4, info->zone()),
+ jump_table_(4, info->zone()),
deoptimization_literals_(8, info->zone()),
inlined_function_count_(0),
scope_(info->scope()),
@@ -364,7 +364,7 @@
void EmitVectorLoadICRegisters(T* instr);
ZoneList<LEnvironment*> deoptimizations_;
- ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
+ ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
ZoneList<Handle<Object> > deoptimization_literals_;
int inlined_function_count_;
Scope* const scope_;
=======================================
--- /branches/bleeding_edge/src/mips64/lithium-codegen-mips64.cc Mon Sep 22
09:50:12 2014 UTC
+++ /branches/bleeding_edge/src/mips64/lithium-codegen-mips64.cc Mon Sep 22
14:16:38 2014 UTC
@@ -300,21 +300,17 @@
bool LCodeGen::GenerateJumpTable() {
- if (deopt_jump_table_.length() > 0) {
+ if (jump_table_.length() > 0) {
Comment(";;; -------------------- Jump table --------------------");
}
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
Label table_start;
__ bind(&table_start);
Label needs_frame;
- for (int i = 0; i < deopt_jump_table_.length(); i++) {
- Deoptimizer::JumpTableEntry* table_entry = &deopt_jump_table_[i];
+ for (int i = 0; i < jump_table_.length(); i++) {
+ Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
__ bind(&table_entry->label);
Address entry = table_entry->address;
- Deoptimizer::BailoutType type = table_entry->bailout_type;
- int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
- DCHECK_NE(Deoptimizer::kNotDeoptimizationEntry, id);
- Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
DeoptComment(table_entry->reason);
__ li(t9, Operand(ExternalReference::ForDeoptEntry(entry)));
if (table_entry->needs_frame) {
@@ -822,17 +818,15 @@
DeoptComment(reason);
__ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2);
} else {
+ Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
+ !frame_is_built_);
// We often have several deopts to the same entry, reuse the last
// jump entry if this is the case.
- if (deopt_jump_table_.is_empty() ||
- (deopt_jump_table_.last().address != entry) ||
- (deopt_jump_table_.last().bailout_type != bailout_type) ||
- (deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
- Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
- !frame_is_built_);
- deopt_jump_table_.Add(table_entry, zone());
+ if (jump_table_.is_empty() ||
+ !table_entry.IsEquivalentTo(jump_table_.last())) {
+ jump_table_.Add(table_entry, zone());
}
- __ Branch(&deopt_jump_table_.last().label, condition, src1, src2);
+ __ Branch(&jump_table_.last().label, condition, src1, src2);
}
}
=======================================
--- /branches/bleeding_edge/src/mips64/lithium-codegen-mips64.h Mon Sep 22
06:36:57 2014 UTC
+++ /branches/bleeding_edge/src/mips64/lithium-codegen-mips64.h Mon Sep 22
14:16:38 2014 UTC
@@ -25,7 +25,7 @@
LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
: LCodeGenBase(chunk, assembler, info),
deoptimizations_(4, info->zone()),
- deopt_jump_table_(4, info->zone()),
+ jump_table_(4, info->zone()),
deoptimization_literals_(8, info->zone()),
inlined_function_count_(0),
scope_(info->scope()),
@@ -365,7 +365,7 @@
void EmitVectorLoadICRegisters(T* instr);
ZoneList<LEnvironment*> deoptimizations_;
- ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
+ ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
ZoneList<Handle<Object> > deoptimization_literals_;
int inlined_function_count_;
Scope* const scope_;
=======================================
--- /branches/bleeding_edge/src/utils.h Mon Sep 15 10:54:49 2014 UTC
+++ /branches/bleeding_edge/src/utils.h Mon Sep 22 14:16:38 2014 UTC
@@ -26,6 +26,13 @@
//
----------------------------------------------------------------------------
// General helper functions
+
+// Same as strcmp, but can handle NULL arguments.
+inline bool CStringEquals(const char* s1, const char* s2) {
+ return (s1 == s2) || (s1 != NULL && s2 != NULL && strcmp(s1, s2) == 0);
+}
+
+
// X must be a power of 2. Returns the number of trailing zeros.
inline int WhichPowerOf2(uint32_t x) {
DCHECK(base::bits::IsPowerOfTwo32(x));
=======================================
--- /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Mon Sep 22
09:50:12 2014 UTC
+++ /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Mon Sep 22
14:16:38 2014 UTC
@@ -306,10 +306,6 @@
Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
__ bind(&table_entry->label);
Address entry = table_entry->address;
- Deoptimizer::BailoutType type = table_entry->bailout_type;
- int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
- DCHECK_NE(Deoptimizer::kNotDeoptimizationEntry, id);
- Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
DeoptComment(table_entry->reason);
if (table_entry->needs_frame) {
DCHECK(!info()->saves_caller_doubles());
@@ -784,14 +780,12 @@
DeoptComment(reason);
__ call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
+ Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
+ !frame_is_built_);
// We often have several deopts to the same entry, reuse the last
// jump entry if this is the case.
if (jump_table_.is_empty() ||
- jump_table_.last().address != entry ||
- jump_table_.last().needs_frame != !frame_is_built_ ||
- jump_table_.last().bailout_type != bailout_type) {
- Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
- !frame_is_built_);
+ !table_entry.IsEquivalentTo(jump_table_.last())) {
jump_table_.Add(table_entry, zone());
}
if (cc == no_condition) {
@@ -1039,7 +1033,7 @@
__ andl(dividend, Immediate(mask));
__ negl(dividend);
if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
- DeoptimizeIf(zero, instr);
+ DeoptimizeIf(zero, instr, "minus zero");
}
__ jmp(&done, Label::kNear);
}
@@ -1056,7 +1050,7 @@
DCHECK(ToRegister(instr->result()).is(rax));
if (divisor == 0) {
- DeoptimizeIf(no_condition, instr);
+ DeoptimizeIf(no_condition, instr, "division by zero");
return;
}
@@ -1071,7 +1065,7 @@
Label remainder_not_zero;
__ j(not_zero, &remainder_not_zero, Label::kNear);
__ cmpl(dividend, Immediate(0));
- DeoptimizeIf(less, instr);
+ DeoptimizeIf(less, instr, "minus zero");
__ bind(&remainder_not_zero);
}
}
@@ -1093,7 +1087,7 @@
// deopt in this case because we can't return a NaN.
if (hmod->CheckFlag(HValue::kCanBeDivByZero)) {
__ testl(right_reg, right_reg);
- DeoptimizeIf(zero, instr);
+ DeoptimizeIf(zero, instr, "division by zero");
}
// Check for kMinInt % -1, idiv would signal a divide error. We
@@ -1104,7 +1098,7 @@
__ j(not_zero, &no_overflow_possible, Label::kNear);
__ cmpl(right_reg, Immediate(-1));
if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
- DeoptimizeIf(equal, instr);
+ DeoptimizeIf(equal, instr, "minus zero");
} else {
__ j(not_equal, &no_overflow_possible, Label::kNear);
__ Set(result_reg, 0);
@@ -1124,7 +1118,7 @@
__ j(not_sign, &positive_left, Label::kNear);
__ idivl(right_reg);
__ testl(result_reg, result_reg);
- DeoptimizeIf(zero, instr);
+ DeoptimizeIf(zero, instr, "minus zero");
__ jmp(&done, Label::kNear);
__ bind(&positive_left);
}
@@ -1150,13 +1144,13 @@
// If the divisor is negative, we have to negate and handle edge cases.
__ negl(dividend);
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
- DeoptimizeIf(zero, instr);
+ DeoptimizeIf(zero, instr, "minus zero");
}
// Dividing by -1 is basically negation, unless we overflow.
if (divisor == -1) {
if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) {
- DeoptimizeIf(overflow, instr);
+ DeoptimizeIf(overflow, instr, "overflow");
}
return;
}
@@ -1183,7 +1177,7 @@
DCHECK(ToRegister(instr->result()).is(rdx));
if (divisor == 0) {
- DeoptimizeIf(no_condition, instr);
+ DeoptimizeIf(no_condition, instr, "division by zero");
return;
}
@@ -1191,7 +1185,7 @@
HMathFloorOfDiv* hdiv = instr->hydrogen();
if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
__ testl(dividend, dividend);
- DeoptimizeIf(zero, instr);
+ DeoptimizeIf(zero, instr, "minus zero");
}
// Easy case: We need no dynamic check for the dividend and the flooring
@@ -1238,7 +1232,7 @@
// Check for x / 0.
if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) {
__ testl(divisor, divisor);
- DeoptimizeIf(zero, instr);
+ DeoptimizeIf(zero, instr, "division by zero");
}
// Check for (0 / -x) that will produce negative zero.
@@ -1247,7 +1241,7 @@
__ testl(dividend, dividend);
__ j(not_zero, ÷nd_not_zero, Label::kNear);
__ testl(divisor, divisor);
- DeoptimizeIf(sign, instr);
+ DeoptimizeIf(sign, instr, "minus zero");
__ bind(÷nd_not_zero);
}
@@ -1257,7 +1251,7 @@
__ cmpl(dividend, Immediate(kMinInt));
__ j(not_zero, ÷nd_not_min_int, Label::kNear);
__ cmpl(divisor, Immediate(-1));
- DeoptimizeIf(zero, instr);
+ DeoptimizeIf(zero, instr, "overflow");
__ bind(÷nd_not_min_int);
}
@@ -1286,19 +1280,19 @@
HDiv* hdiv = instr->hydrogen();
if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
__ testl(dividend, dividend);
- DeoptimizeIf(zero, instr);
+ DeoptimizeIf(zero, instr, "minus zero");
}
// Check for (kMinInt / -1).
if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) {
__ cmpl(dividend, Immediate(kMinInt));
- DeoptimizeIf(zero, instr);
+ DeoptimizeIf(zero, instr, "overflow");
}
// Deoptimize if remainder will not be 0.
if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
divisor != 1 && divisor != -1) {
int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
__ testl(dividend, Immediate(mask));
- DeoptimizeIf(not_zero, instr);
+ DeoptimizeIf(not_zero, instr, "remainder not zero");
}
__ Move(result, dividend);
int32_t shift = WhichPowerOf2Abs(divisor);
@@ -1319,7 +1313,7 @@
DCHECK(ToRegister(instr->result()).is(rdx));
if (divisor == 0) {
- DeoptimizeIf(no_condition, instr);
+ DeoptimizeIf(no_condition, instr, "division by zero");
return;
}
@@ -1327,7 +1321,7 @@
HDiv* hdiv = instr->hydrogen();
if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
__ testl(dividend, dividend);
- DeoptimizeIf(zero, instr);
+ DeoptimizeIf(zero, instr, "minus zero");
}
__ TruncatingDiv(dividend, Abs(divisor));
@@ -1337,7 +1331,7 @@
__ movl(rax, rdx);
__ imull(rax, rax, Immediate(divisor));
__ subl(rax, dividend);
- DeoptimizeIf(not_equal, instr);
+ DeoptimizeIf(not_equal, instr, "remainder not zero");
}
}
@@ -1357,7 +1351,7 @@
// Check for x / 0.
if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) {
__ testl(divisor, divisor);
- DeoptimizeIf(zero, instr);
+ DeoptimizeIf(zero, instr, "division by zero");
}
// Check for (0 / -x) that will produce negative zero.
@@ -1366,7 +1360,7 @@
__ testl(dividend, dividend);
__ j(not_zero, ÷nd_not_zero, Label::kNear);
__ testl(divisor, divisor);
- DeoptimizeIf(sign, instr);
+ DeoptimizeIf(sign, instr, "minus zero");
__ bind(÷nd_not_zero);
}
@@ -1376,7 +1370,7 @@
__ cmpl(dividend, Immediate(kMinInt));
__ j(not_zero, ÷nd_not_min_int, Label::kNear);
__ cmpl(divisor, Immediate(-1));
- DeoptimizeIf(zero, instr);
+ DeoptimizeIf(zero, instr, "overflow");
__ bind(÷nd_not_min_int);
}
@@ -1387,7 +1381,7 @@
if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) {
// Deoptimize if remainder is not 0.
__ testl(remainder, remainder);
- DeoptimizeIf(not_zero, instr);
+ DeoptimizeIf(not_zero, instr, "remainder not zero");
}
}
@@ -1464,7 +1458,7 @@
}
if (can_overflow) {
- DeoptimizeIf(overflow, instr);
+ DeoptimizeIf(overflow, instr, "overflow");
}
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
@@ -1483,10 +1477,10 @@
? !instr->hydrogen_value()->representation().IsSmi()
: SmiValuesAre31Bits());
if (ToInteger32(LConstantOperand::cast(right)) < 0) {
- DeoptimizeIf(no_condition, instr);
+ DeoptimizeIf(no_condition, instr, "minus zero");
} else if (ToInteger32(LConstantOperand::cast(right)) == 0) {
__ cmpl(kScratchRegister, Immediate(0));
- DeoptimizeIf(less, instr);
+ DeoptimizeIf(less, instr, "minus zero");
}
} else if (right->IsStackSlot()) {
if (instr->hydrogen_value()->representation().IsSmi()) {
@@ -1494,7 +1488,7 @@
} else {
__ orl(kScratchRegister, ToOperand(right));
}
- DeoptimizeIf(sign, instr);
+ DeoptimizeIf(sign, instr, "minus zero");
} else {
// Test the non-zero operand for negative sign.
if (instr->hydrogen_value()->representation().IsSmi()) {
@@ -1502,7 +1496,7 @@
} else {
__ orl(kScratchRegister, ToRegister(right));
}
- DeoptimizeIf(sign, instr);
+ DeoptimizeIf(sign, instr, "minus zero");
}
__ bind(&done);
}
@@ -1615,7 +1609,7 @@
__ shrl_cl(ToRegister(left));
if (instr->can_deopt()) {
__ testl(ToRegister(left), ToRegister(left));
- DeoptimizeIf(negative, instr);
+ DeoptimizeIf(negative, instr, "value to shift was negative");
}
break;
case Token::SHL:
@@ -1644,7 +1638,7 @@
__ shrl(ToRegister(left), Immediate(shift_count));
} else if (instr->can_deopt()) {
__ testl(ToRegister(left), ToRegister(left));
- DeoptimizeIf(negative, instr);
+ DeoptimizeIf(negative, instr, "value to shift was negative");
}
break;
case Token::SHL:
@@ -1659,7 +1653,7 @@
__ shll(ToRegister(left), Immediate(shift_count - 1));
}
__ Integer32ToSmi(ToRegister(left), ToRegister(left));
- DeoptimizeIf(overflow, instr);
+ DeoptimizeIf(overflow, instr, "overflow");
} else {
__ shll(ToRegister(left), Immediate(shift_count));
}
@@ -1702,7 +1696,7 @@
}
if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
- DeoptimizeIf(overflow, instr);
+ DeoptimizeIf(overflow, instr, "overflow");
}
}
@@ -1767,9 +1761,9 @@
DCHECK(object.is(rax));
Condition cc = masm()->CheckSmi(object);
- DeoptimizeIf(cc, instr);
+ DeoptimizeIf(cc, instr, "not an object");
__ CmpObjectType(object, JS_DATE_TYPE, kScratchRegister);
- DeoptimizeIf(not_equal, instr);
+ DeoptimizeIf(not_equal, instr, "not a date object");
if (index->value() == 0) {
__ movp(result, FieldOperand(object, JSDate::kValueOffset));
@@ -1933,7 +1927,7 @@
}
}
if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
- DeoptimizeIf(overflow, instr);
+ DeoptimizeIf(overflow, instr, "overflow");
}
}
}
@@ -2178,7 +2172,7 @@
} else if (expected.NeedsMap()) {
// If we need a map later and have a Smi -> deopt.
__ testb(reg, Immediate(kSmiTagMask));
- DeoptimizeIf(zero, instr);
+ DeoptimizeIf(zero, instr, "Smi");
}
const Register map = kScratchRegister;
@@ -2232,7 +2226,7 @@
if (!expected.IsGeneric()) {
// We've seen something for the first time -> deopt.
// This can only happen if we are not generic already.
- DeoptimizeIf(no_condition, instr);
+ DeoptimizeIf(no_condition, instr, "unexpected object");
}
}
}
@@ -2848,7 +2842,7 @@
__ LoadGlobalCell(result, instr->hydrogen()->cell().handle());
if (instr->hydrogen()->RequiresHoleCheck()) {
__ CompareRoot(result, Heap::kTheHoleValueRootIndex);
- DeoptimizeIf(equal, instr);
+ DeoptimizeIf(equal, instr, "hole");
}
}
@@ -2896,7 +2890,7 @@
DCHECK(!value.is(cell));
__ Move(cell, cell_handle, RelocInfo::CELL);
__ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex);
- DeoptimizeIf(equal, instr);
+ DeoptimizeIf(equal, instr, "hole");
// Store the value.
__ movp(Operand(cell, 0), value);
} else {
@@ -2915,7 +2909,7 @@
if (instr->hydrogen()->RequiresHoleCheck()) {
__ CompareRoot(result, Heap::kTheHoleValueRootIndex);
if (instr->hydrogen()->DeoptimizesOnHole()) {
- DeoptimizeIf(equal, instr);
+ DeoptimizeIf(equal, instr, "hole");
} else {
Label is_not_hole;
__ j(not_equal, &is_not_hole, Label::kNear);
@@ -2936,7 +2930,7 @@
if (instr->hydrogen()->RequiresHoleCheck()) {
__ CompareRoot(target, Heap::kTheHoleValueRootIndex);
if (instr->hydrogen()->DeoptimizesOnHole()) {
- DeoptimizeIf(equal, instr);
+ DeoptimizeIf(equal, instr, "hole");
} else {
__ j(not_equal, &skip_assignment);
}
@@ -3034,7 +3028,7 @@
// Check that the function has a prototype or an initial map.
__ CompareRoot(result, Heap::kTheHoleValueRootIndex);
- DeoptimizeIf(equal, instr);
+ DeoptimizeIf(equal, instr, "hole");
// If the function does not have an initial map, we're done.
Label done;
@@ -3146,7 +3140,7 @@
__ movl(result, operand);
if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) {
__ testl(result, result);
- DeoptimizeIf(negative, instr);
+ DeoptimizeIf(negative, instr, "negative value");
}
break;
case EXTERNAL_FLOAT32_ELEMENTS:
@@ -3185,7 +3179,7 @@
FAST_DOUBLE_ELEMENTS,
instr->base_offset() + sizeof(kHoleNanLower32));
__ cmpl(hole_check_operand, Immediate(kHoleNanUpper32));
- DeoptimizeIf(equal, instr);
+ DeoptimizeIf(equal, instr, "hole");
}
Operand double_load_operand = BuildFastArrayOperand(
@@ -3242,10 +3236,10 @@
if (requires_hole_check) {
if (IsFastSmiElementsKind(hinstr->elements_kind())) {
Condition smi = __ CheckSmi(result);
- DeoptimizeIf(NegateCondition(smi), instr);
+ DeoptimizeIf(NegateCondition(smi), instr, "not a Smi");
} else {
__ CompareRoot(result, Heap::kTheHoleValueRootIndex);
- DeoptimizeIf(equal, instr);
+ DeoptimizeIf(equal, instr, "hole");
}
}
}
@@ -3394,9 +3388,9 @@
// The receiver should be a JS object.
Condition is_smi = __ CheckSmi(receiver);
- DeoptimizeIf(is_smi, instr);
+ DeoptimizeIf(is_smi, instr, "not an object");
__ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, kScratchRegister);
- DeoptimizeIf(below, instr);
+ DeoptimizeIf(below, instr, "not a spec object");
__ jmp(&receiver_ok, Label::kNear);
__ bind(&global_object);
@@ -3423,7 +3417,7 @@
// adaptor frame below it.
const uint32_t kArgumentsLimit = 1 * KB;
__ cmpp(length, Immediate(kArgumentsLimit));
- DeoptimizeIf(above, instr);
+ DeoptimizeIf(above, instr, "too many arguments");
__ Push(receiver);
__ movp(receiver, length);
@@ -3618,7 +3612,7 @@
Register input_reg = ToRegister(instr->value());
__ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
Heap::kHeapNumberMapRootIndex);
- DeoptimizeIf(not_equal, instr);
+ DeoptimizeIf(not_equal, instr, "not a heap number");
Label slow, allocated, done;
Register tmp = input_reg.is(rax) ? rcx : rax;
@@ -3664,7 +3658,7 @@
Label is_positive;
__ j(not_sign, &is_positive, Label::kNear);
__ negl(input_reg); // Sets flags.
- DeoptimizeIf(negative, instr);
+ DeoptimizeIf(negative, instr, "overflow");
__ bind(&is_positive);
}
@@ -3675,7 +3669,7 @@
Label is_positive;
__ j(not_sign, &is_positive, Label::kNear);
__ negp(input_reg); // Sets flags.
- DeoptimizeIf(negative, instr);
+ DeoptimizeIf(negative, instr, "overflow");
__ bind(&is_positive);
}
@@ -3730,18 +3724,18 @@
// Deoptimize if minus zero.
__ movq(output_reg, input_reg);
__ subq(output_reg, Immediate(1));
- DeoptimizeIf(overflow, instr);
+ DeoptimizeIf(overflow, instr, "minus zero");
}
__ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown);
__ cvttsd2si(output_reg, xmm_scratch);
__ cmpl(output_reg, Immediate(0x1));
- DeoptimizeIf(overflow, instr);
+ DeoptimizeIf(overflow, instr, "overflow");
} else {
Label negative_sign, done;
// Deoptimize on unordered.
__ xorps(xmm_scratch, xmm_scratch); // Zero the register.
__ ucomisd(input_reg, xmm_scratch);
- DeoptimizeIf(parity_even, instr);
+ DeoptimizeIf(parity_even, instr, "unordered");
__ j(below, &negative_sign, Label::kNear);
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
@@ -3750,7 +3744,7 @@
__ j(above, &positive_sign, Label::kNear);
__ movmskpd(output_reg, input_reg);
__ testq(output_reg, Immediate(1));
- DeoptimizeIf(not_zero, instr);
+ DeoptimizeIf(not_zero, instr, "minus zero");
__ Set(output_reg, 0);
__ jmp(&done);
__ bind(&positive_sign);
@@ -3760,7 +3754,7 @@
__ cvttsd2si(output_reg, input_reg);
// Overflow is signalled with minint.
__ cmpl(output_reg, Immediate(0x1));
- DeoptimizeIf(overflow, instr);
+ DeoptimizeIf(overflow, instr, "overflow");
__ jmp(&done, Label::kNear);
// Non-zero negative reaches here.
@@ -3771,7 +3765,7 @@
__ ucomisd(input_reg, xmm_scratch);
__ j(equal, &done, Label::kNear);
__ subl(output_reg, Immediate(1));
- DeoptimizeIf(overflow, instr);
+ DeoptimizeIf(overflow, instr, "overflow");
__ bind(&done);
}
@@ -3908,7 +3902,7 @@
Label no_deopt;
__ JumpIfSmi(tagged_exponent, &no_deopt, Label::kNear);
__ CmpObjectType(tagged_exponent, HEAP_NUMBER_TYPE, rcx);
- DeoptimizeIf(not_equal, instr);
+ DeoptimizeIf(not_equal, instr, "not a heap number");
__ bind(&no_deopt);
MathPowStub stub(isolate(), MathPowStub::TAGGED);
__ CallStub(&stub);
@@ -4280,7 +4274,7 @@
__ int3();
__ bind(&done);
} else {
- DeoptimizeIf(cc, instr);
+ DeoptimizeIf(cc, instr, "out of bounds");
}
}
@@ -4528,7 +4522,7 @@
Register temp = ToRegister(instr->temp());
Label no_memento_found;
__ TestJSArrayForAllocationMemento(object, temp, &no_memento_found);
- DeoptimizeIf(equal, instr);
+ DeoptimizeIf(equal, instr, "memento found");
__ bind(&no_memento_found);
}
@@ -4847,12 +4841,12 @@
if (hchange->CheckFlag(HValue::kCanOverflow) &&
hchange->value()->CheckFlag(HValue::kUint32)) {
Condition is_smi = __ CheckUInteger32ValidSmiValue(input);
- DeoptimizeIf(NegateCondition(is_smi), instr);
+ DeoptimizeIf(NegateCondition(is_smi), instr, "not a smi");
}
__ Integer32ToSmi(output, input);
if (hchange->CheckFlag(HValue::kCanOverflow) &&
!hchange->value()->CheckFlag(HValue::kUint32)) {
- DeoptimizeIf(overflow, instr);
+ DeoptimizeIf(overflow, instr, "overflow");
}
}
@@ -4862,7 +4856,7 @@
Register input = ToRegister(instr->value());
if (instr->needs_check()) {
Condition is_smi = __ CheckSmi(input);
- DeoptimizeIf(NegateCondition(is_smi), instr);
+ DeoptimizeIf(NegateCondition(is_smi), instr, "not a smi");
} else {
__ AssertSmi(input);
}
@@ -4893,7 +4887,7 @@
if (can_convert_undefined_to_nan) {
__ j(not_equal, &convert, Label::kNear);
} else {
- DeoptimizeIf(not_equal, instr);
+ DeoptimizeIf(not_equal, instr, "not a heap number");
}
if (deoptimize_on_minus_zero) {
@@ -4903,7 +4897,7 @@
__ j(not_equal, &done, Label::kNear);
__ movmskpd(kScratchRegister, result_reg);
__ testq(kScratchRegister, Immediate(1));
- DeoptimizeIf(not_zero, instr);
+ DeoptimizeIf(not_zero, instr, "minus zero");
}
__ jmp(&done, Label::kNear);
@@ -4912,7 +4906,7 @@
// Convert undefined (and hole) to NaN. Compute NaN as 0/0.
__ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
- DeoptimizeIf(not_equal, instr);
+ DeoptimizeIf(not_equal, instr, "neither a heap number nor
undefined");
__ xorps(result_reg, result_reg);
__ divsd(result_reg, result_reg);
@@ -5049,7 +5043,7 @@
__ jmp(&done, Label::kNear);
__ bind(&bailout);
- DeoptimizeIf(no_condition, instr);
+ DeoptimizeIf(no_condition, instr, "conversion failed");
__ bind(&done);
}
}
@@ -5071,18 +5065,18 @@
__ jmp(&done, Label::kNear);
__ bind(&bailout);
- DeoptimizeIf(no_condition, instr);
+ DeoptimizeIf(no_condition, instr, "conversion failed");
__ bind(&done);
__ Integer32ToSmi(result_reg, result_reg);
- DeoptimizeIf(overflow, instr);
+ DeoptimizeIf(overflow, instr, "overflow");
}
void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
LOperand* input = instr->value();
Condition cc = masm()->CheckSmi(ToRegister(input));
- DeoptimizeIf(NegateCondition(cc), instr);
+ DeoptimizeIf(NegateCondition(cc), instr, "not a Smi");
}
@@ -5090,7 +5084,7 @@
if (!instr->hydrogen()->value()->type().IsHeapObject()) {
LOperand* input = instr->value();
Condition cc = masm()->CheckSmi(ToRegister(input));
- DeoptimizeIf(cc, instr);
+ DeoptimizeIf(cc, instr, "Smi");
}
}
@@ -5110,14 +5104,14 @@
// If there is only one type in the interval check for equality.
if (first == last) {
- DeoptimizeIf(not_equal, instr);
+ DeoptimizeIf(not_equal, instr, "wrong instance type");
} else {
- DeoptimizeIf(below, instr);
+ DeoptimizeIf(below, instr, "wrong instance type");
// Omit check for the last type.
if (last != LAST_TYPE) {
__ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
Immediate(static_cast<int8_t>(last)));
- DeoptimizeIf(above, instr);
+ DeoptimizeIf(above, instr, "wrong instance type");
}
}
} else {
@@ -5129,13 +5123,13 @@
DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag));
__ testb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
Immediate(mask));
- DeoptimizeIf(tag == 0 ? not_zero : zero, instr);
+ DeoptimizeIf(tag == 0 ? not_zero : zero, instr, "wrong instance
type");
} else {
__ movzxbl(kScratchRegister,
FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
__ andb(kScratchRegister, Immediate(mask));
__ cmpb(kScratchRegister, Immediate(tag));
- DeoptimizeIf(not_equal, instr);
+ DeoptimizeIf(not_equal, instr, "wrong instance type");
}
}
}
@@ -5144,7 +5138,7 @@
void LCodeGen::DoCheckValue(LCheckValue* instr) {
Register reg = ToRegister(instr->value());
__ Cmp(reg, instr->hydrogen()->object().handle());
- DeoptimizeIf(not_equal, instr);
+ DeoptimizeIf(not_equal, instr, "value mismatch");
}
@@ -5159,7 +5153,7 @@
__ testp(rax, Immediate(kSmiTagMask));
}
- DeoptimizeIf(zero, instr);
+ DeoptimizeIf(zero, instr, "instance migration failed");
}
@@ -5212,7 +5206,7 @@
if (instr->hydrogen()->HasMigrationTarget()) {
__ j(not_equal, deferred->entry());
} else {
- DeoptimizeIf(not_equal, instr);
+ DeoptimizeIf(not_equal, instr, "wrong map");
}
__ bind(&success);
@@ -5251,7 +5245,7 @@
// Check for undefined. Undefined is converted to zero for clamping
// conversions.
__ Cmp(input_reg, factory()->undefined_value());
- DeoptimizeIf(not_equal, instr);
+ DeoptimizeIf(not_equal, instr, "neither a heap number nor undefined");
__ xorl(input_reg, input_reg);
__ jmp(&done, Label::kNear);
@@ -5732,19 +5726,19 @@
void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
DCHECK(ToRegister(instr->context()).is(rsi));
__ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
- DeoptimizeIf(equal, instr);
+ DeoptimizeIf(equal, instr, "undefined");
Register null_value = rdi;
__ LoadRoot(null_value, Heap::kNullValueRootIndex);
__ cmpp(rax, null_value);
- DeoptimizeIf(equal, instr);
+ DeoptimizeIf(equal, instr, "null");
Condition cc = masm()->CheckSmi(rax);
- DeoptimizeIf(cc, instr);
+ DeoptimizeIf(cc, instr, "Smi");
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
__ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
- DeoptimizeIf(below_equal, instr);
+ DeoptimizeIf(below_equal, instr, "wrong instance type");
Label use_cache, call_runtime;
__ CheckEnumCache(null_value, &call_runtime);
@@ -5759,7 +5753,7 @@
__ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
Heap::kMetaMapRootIndex);
- DeoptimizeIf(not_equal, instr);
+ DeoptimizeIf(not_equal, instr, "not a meta map");
__ bind(&use_cache);
}
@@ -5781,7 +5775,7 @@
FieldOperand(result, FixedArray::SizeFor(instr->idx())));
__ bind(&done);
Condition cc = masm()->CheckSmi(result);
- DeoptimizeIf(cc, instr);
+ DeoptimizeIf(cc, instr, "Smi");
}
@@ -5789,7 +5783,7 @@
Register object = ToRegister(instr->value());
__ cmpp(ToRegister(instr->map()),
FieldOperand(object, HeapObject::kMapOffset));
- DeoptimizeIf(not_equal, instr);
+ DeoptimizeIf(not_equal, instr, "wrong map");
}
=======================================
--- /branches/bleeding_edge/src/x64/lithium-codegen-x64.h Mon Sep 22
06:36:57 2014 UTC
+++ /branches/bleeding_edge/src/x64/lithium-codegen-x64.h Mon Sep 22
14:16:38 2014 UTC
@@ -208,8 +208,7 @@
Safepoint::DeoptMode mode);
void DeoptimizeIf(Condition cc, LInstruction* instr, const char* detail,
Deoptimizer::BailoutType bailout_type);
- void DeoptimizeIf(Condition cc, LInstruction* instr,
- const char* detail = NULL);
+ void DeoptimizeIf(Condition cc, LInstruction* instr, const char* detail);
bool DeoptEveryNTimes() {
return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
=======================================
--- /branches/bleeding_edge/src/x87/lithium-codegen-x87.cc Mon Sep 22
09:50:12 2014 UTC
+++ /branches/bleeding_edge/src/x87/lithium-codegen-x87.cc Mon Sep 22
14:16:38 2014 UTC
@@ -1155,14 +1155,12 @@
DeoptComment(reason);
__ call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
+ Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
+ !frame_is_built_);
// We often have several deopts to the same entry, reuse the last
// jump entry if this is the case.
if (jump_table_.is_empty() ||
- jump_table_.last().address != entry ||
- jump_table_.last().needs_frame != !frame_is_built_ ||
- jump_table_.last().bailout_type != bailout_type) {
- Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
- !frame_is_built_);
+ !table_entry.IsEquivalentTo(jump_table_.last())) {
jump_table_.Add(table_entry, zone());
}
if (cc == no_condition) {
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/d/optout.