Revision: 14658
Author: [email protected]
Date: Tue May 14 04:45:33 2013
Log: Ensure that soft-deopts don't count against opt_count
This makes sure that Crankshaft doesn't disable optimization to early on
hot functions that still contain unexecuted code without type information.
[email protected]
Review URL: https://codereview.chromium.org/14738009
http://code.google.com/p/v8/source/detail?r=14658
Modified:
/branches/bleeding_edge/src/arm/builtins-arm.cc
/branches/bleeding_edge/src/arm/deoptimizer-arm.cc
/branches/bleeding_edge/src/arm/lithium-codegen-arm.cc
/branches/bleeding_edge/src/arm/lithium-codegen-arm.h
/branches/bleeding_edge/src/builtins.h
/branches/bleeding_edge/src/deoptimizer.cc
/branches/bleeding_edge/src/deoptimizer.h
/branches/bleeding_edge/src/disassembler.cc
/branches/bleeding_edge/src/ia32/builtins-ia32.cc
/branches/bleeding_edge/src/ia32/deoptimizer-ia32.cc
/branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc
/branches/bleeding_edge/src/ia32/lithium-codegen-ia32.h
/branches/bleeding_edge/src/objects.cc
/branches/bleeding_edge/src/runtime.cc
/branches/bleeding_edge/src/x64/builtins-x64.cc
/branches/bleeding_edge/src/x64/deoptimizer-x64.cc
/branches/bleeding_edge/src/x64/lithium-codegen-x64.cc
/branches/bleeding_edge/src/x64/lithium-codegen-x64.h
=======================================
--- /branches/bleeding_edge/src/arm/builtins-arm.cc Thu Apr 25 09:00:32 2013
+++ /branches/bleeding_edge/src/arm/builtins-arm.cc Tue May 14 04:45:33 2013
@@ -1338,6 +1338,11 @@
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
}
+
+
+void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
+ Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
+}
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
=======================================
--- /branches/bleeding_edge/src/arm/deoptimizer-arm.cc Mon May 13 12:22:18
2013
+++ /branches/bleeding_edge/src/arm/deoptimizer-arm.cc Tue May 14 04:45:33
2013
@@ -544,9 +544,14 @@
// Set the continuation for the topmost frame.
if (is_topmost && bailout_type_ != DEBUGGER) {
Builtins* builtins = isolate_->builtins();
- Code* continuation = (bailout_type_ == EAGER)
- ? builtins->builtin(Builtins::kNotifyDeoptimized)
- : builtins->builtin(Builtins::kNotifyLazyDeoptimized);
+ Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
+ if (bailout_type_ == LAZY) {
+ continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
+ } else if (bailout_type_ == SOFT) {
+ continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
+ } else {
+ ASSERT(bailout_type_ == EAGER);
+ }
output_frame->SetContinuation(
reinterpret_cast<uint32_t>(continuation->entry()));
}
@@ -639,7 +644,7 @@
// Get the address of the location in the code object if possible (r3)
(return
// address for lazy deoptimization) and compute the fp-to-sp delta in
// register r4.
- if (type() == EAGER) {
+ if (type() == EAGER || type() == SOFT) {
__ mov(r3, Operand::Zero());
// Correct one word for bailout id.
__ add(r4, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
@@ -694,7 +699,7 @@
// Remove the bailout id, eventually return address, and the saved
registers
// from the stack.
- if (type() == EAGER || type() == OSR) {
+ if (type() == EAGER || type() == SOFT || type() == OSR) {
__ add(sp, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize)));
} else {
__ add(sp, sp, Operand(kSavedRegistersAreaSize + (2 * kPointerSize)));
@@ -813,7 +818,7 @@
for (int i = 0; i < count(); i++) {
int start = masm()->pc_offset();
USE(start);
- if (type() == EAGER) {
+ if (type() == EAGER || type() == SOFT) {
__ nop();
} else {
// Emulate ia32 like call by pushing return address to stack.
=======================================
--- /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc Mon May 13
00:35:26 2013
+++ /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc Tue May 14
04:45:33 2013
@@ -360,9 +360,7 @@
for (int i = 0; i < deopt_jump_table_.length(); i++) {
__ bind(&deopt_jump_table_[i].label);
Address entry = deopt_jump_table_[i].address;
- bool is_lazy_deopt = deopt_jump_table_[i].is_lazy_deopt;
- Deoptimizer::BailoutType type =
- is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
+ Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type;
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
if (id == Deoptimizer::kNotDeoptimizationEntry) {
Comment(";;; jump table entry %d.", i);
@@ -371,7 +369,7 @@
}
if (deopt_jump_table_[i].needs_frame) {
__ mov(ip, Operand(ExternalReference::ForDeoptEntry(entry)));
- if (is_lazy_deopt) {
+ if (type == Deoptimizer::LAZY) {
if (needs_frame_is_call.is_bound()) {
__ b(&needs_frame_is_call);
} else {
@@ -404,7 +402,7 @@
}
}
} else {
- if (is_lazy_deopt) {
+ if (type == Deoptimizer::LAZY) {
__ mov(lr, Operand(pc), LeaveCC, al);
__ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry)));
} else {
@@ -833,14 +831,13 @@
}
-void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
+void LCodeGen::DeoptimizeIf(Condition cc,
+ LEnvironment* environment,
+ Deoptimizer::BailoutType bailout_type) {
RegisterEnvironmentForDeoptimization(environment,
Safepoint::kNoLazyDeopt);
ASSERT(environment->HasBeenRegistered());
int id = environment->deoptimization_index();
ASSERT(info()->IsOptimizing() || info()->IsStub());
- Deoptimizer::BailoutType bailout_type = info()->IsStub()
- ? Deoptimizer::LAZY
- : Deoptimizer::EAGER;
Address entry =
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
if (entry == NULL) {
@@ -873,14 +870,31 @@
// jump entry if this is the case.
if (deopt_jump_table_.is_empty() ||
(deopt_jump_table_.last().address != entry) ||
- (deopt_jump_table_.last().is_lazy_deopt != needs_lazy_deopt) ||
+ (deopt_jump_table_.last().bailout_type != bailout_type) ||
(deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
- JumpTableEntry table_entry(entry, !frame_is_built_,
needs_lazy_deopt);
+ Deoptimizer::JumpTableEntry table_entry(entry,
+ bailout_type,
+ !frame_is_built_);
deopt_jump_table_.Add(table_entry, zone());
}
__ b(cc, &deopt_jump_table_.last().label);
}
}
+
+
+void LCodeGen::DeoptimizeIf(Condition cc,
+ LEnvironment* environment) {
+ Deoptimizer::BailoutType bailout_type = info()->IsStub()
+ ? Deoptimizer::LAZY
+ : Deoptimizer::EAGER;
+ DeoptimizeIf(cc, environment, bailout_type);
+}
+
+
+void LCodeGen::SoftDeoptimize(LEnvironment* environment) {
+ ASSERT(!info()->IsStub());
+ DeoptimizeIf(al, environment, Deoptimizer::SOFT);
+}
void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
@@ -5722,7 +5736,11 @@
void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
- DeoptimizeIf(al, instr->environment());
+ if (instr->hydrogen_value()->IsSoftDeoptimize()) {
+ SoftDeoptimize(instr->environment());
+ } else {
+ DeoptimizeIf(al, instr->environment());
+ }
}
=======================================
--- /branches/bleeding_edge/src/arm/lithium-codegen-arm.h Fri Apr 26
08:30:41 2013
+++ /branches/bleeding_edge/src/arm/lithium-codegen-arm.h Tue May 14
04:45:33 2013
@@ -290,7 +290,11 @@
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
Safepoint::DeoptMode mode);
+ void DeoptimizeIf(Condition cc,
+ LEnvironment* environment,
+ Deoptimizer::BailoutType bailout_type);
void DeoptimizeIf(Condition cc, LEnvironment* environment);
+ void SoftDeoptimize(LEnvironment* environment);
void AddToTranslation(Translation* translation,
LOperand* op,
@@ -387,18 +391,6 @@
Register scratch,
LEnvironment* environment);
- struct JumpTableEntry {
- inline JumpTableEntry(Address entry, bool frame, bool is_lazy)
- : label(),
- address(entry),
- needs_frame(frame),
- is_lazy_deopt(is_lazy) { }
- Label label;
- Address address;
- bool needs_frame;
- bool is_lazy_deopt;
- };
-
void EnsureSpaceForLazyDeopt();
void DoLoadKeyedExternalArray(LLoadKeyed* instr);
void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
@@ -416,7 +408,7 @@
int current_instruction_;
const ZoneList<LInstruction*>* instructions_;
ZoneList<LEnvironment*> deoptimizations_;
- ZoneList<JumpTableEntry> deopt_jump_table_;
+ ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
ZoneList<Handle<Object> > deoptimization_literals_;
ZoneList<Handle<Map> > prototype_maps_;
ZoneList<Handle<Map> > transition_maps_;
=======================================
--- /branches/bleeding_edge/src/builtins.h Wed May 8 08:02:08 2013
+++ /branches/bleeding_edge/src/builtins.h Tue May 14 04:45:33 2013
@@ -107,6 +107,8 @@
Code::kNoExtraICState) \
V(NotifyDeoptimized, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
+ V(NotifySoftDeoptimized, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
V(NotifyStubFailure, BUILTIN, UNINITIALIZED, \
@@ -380,6 +382,7 @@
static void Generate_LazyCompile(MacroAssembler* masm);
static void Generate_LazyRecompile(MacroAssembler* masm);
static void Generate_NotifyDeoptimized(MacroAssembler* masm);
+ static void Generate_NotifySoftDeoptimized(MacroAssembler* masm);
static void Generate_NotifyLazyDeoptimized(MacroAssembler* masm);
static void Generate_NotifyOSR(MacroAssembler* masm);
static void Generate_NotifyStubFailure(MacroAssembler* masm);
=======================================
--- /branches/bleeding_edge/src/deoptimizer.cc Thu Apr 18 02:50:46 2013
+++ /branches/bleeding_edge/src/deoptimizer.cc Tue May 14 04:45:33 2013
@@ -50,22 +50,23 @@
DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator)
: allocator_(allocator),
- eager_deoptimization_entry_code_entries_(-1),
- lazy_deoptimization_entry_code_entries_(-1),
- eager_deoptimization_entry_code_(AllocateCodeChunk(allocator)),
- lazy_deoptimization_entry_code_(AllocateCodeChunk(allocator)),
current_(NULL),
#ifdef ENABLE_DEBUGGER_SUPPORT
deoptimized_frame_info_(NULL),
#endif
- deoptimizing_code_list_(NULL) { }
+ deoptimizing_code_list_(NULL) {
+ for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
+ deopt_entry_code_entries_[i] = -1;
+ deopt_entry_code_[i] = AllocateCodeChunk(allocator);
+ }
+}
DeoptimizerData::~DeoptimizerData() {
- allocator_->Free(eager_deoptimization_entry_code_);
- eager_deoptimization_entry_code_ = NULL;
- allocator_->Free(lazy_deoptimization_entry_code_);
- lazy_deoptimization_entry_code_ = NULL;
+ for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
+ allocator_->Free(deopt_entry_code_[i]);
+ deopt_entry_code_[i] = NULL;
+ }
DeoptimizingCodeListNode* current = deoptimizing_code_list_;
while (current != NULL) {
@@ -488,6 +489,7 @@
StackFrame::Type frame_type) {
switch (deopt_type) {
case EAGER:
+ case SOFT:
case LAZY:
case DEBUGGER:
return (frame_type == StackFrame::STUB)
@@ -504,6 +506,7 @@
const char* Deoptimizer::MessageFor(BailoutType type) {
switch (type) {
case EAGER:
+ case SOFT:
case LAZY:
return "DEOPT";
case DEBUGGER:
@@ -545,6 +548,13 @@
}
if (function != NULL && function->IsOptimized()) {
function->shared()->increment_deopt_count();
+ if (bailout_type_ == Deoptimizer::SOFT) {
+ // Soft deopts shouldn't count against the overall re-optimization
count
+ // that can eventually lead to disabling optimization for a function.
+ int opt_count = function->shared()->opt_count();
+ if (opt_count > 0) opt_count--;
+ function->shared()->set_opt_count(opt_count);
+ }
}
compiled_code_ = FindOptimizedCode(function, optimized_code);
StackFrame::Type frame_type = function == NULL
@@ -562,6 +572,7 @@
Code* Deoptimizer::FindOptimizedCode(JSFunction* function,
Code* optimized_code) {
switch (bailout_type_) {
+ case Deoptimizer::SOFT:
case Deoptimizer::EAGER:
ASSERT(from_ == NULL);
return function->code();
@@ -597,7 +608,9 @@
bailout_id_,
reinterpret_cast<intptr_t>(from_),
fp_to_sp_delta_ - (2 * kPointerSize));
- if (bailout_type_ == EAGER)
compiled_code_->PrintDeoptLocation(bailout_id_);
+ if (bailout_type_ == EAGER || bailout_type_ == SOFT) {
+ compiled_code_->PrintDeoptLocation(bailout_id_);
+ }
}
@@ -639,9 +652,8 @@
ASSERT(mode == CALCULATE_ENTRY_ADDRESS);
}
DeoptimizerData* data = isolate->deoptimizer_data();
- MemoryChunk* base = (type == EAGER)
- ? data->eager_deoptimization_entry_code_
- : data->lazy_deoptimization_entry_code_;
+ ASSERT(type < kBailoutTypesWithCodeEntry);
+ MemoryChunk* base = data->deopt_entry_code_[type];
return base->area_start() + (id * table_entry_size_);
}
@@ -650,9 +662,7 @@
Address addr,
BailoutType type) {
DeoptimizerData* data = isolate->deoptimizer_data();
- MemoryChunk* base = (type == EAGER)
- ? data->eager_deoptimization_entry_code_
- : data->lazy_deoptimization_entry_code_;
+ MemoryChunk* base = data->deopt_entry_code_[type];
Address start = base->area_start();
if (base == NULL ||
addr < start ||
@@ -2206,11 +2216,9 @@
// cause us to emit relocation information for the external
// references. This is fine because the deoptimizer's code section
// isn't meant to be serialized at all.
- ASSERT(type == EAGER || type == LAZY);
+ ASSERT(type == EAGER || type == SOFT || type == LAZY);
DeoptimizerData* data = isolate->deoptimizer_data();
- int entry_count = (type == EAGER)
- ? data->eager_deoptimization_entry_code_entries_
- : data->lazy_deoptimization_entry_code_entries_;
+ int entry_count = data->deopt_entry_code_entries_[type];
if (max_entry_id < entry_count) return;
entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries);
while (max_entry_id >= entry_count) entry_count *= 2;
@@ -2223,9 +2231,7 @@
masm.GetCode(&desc);
ASSERT(!RelocInfo::RequiresRelocation(desc));
- MemoryChunk* chunk = (type == EAGER)
- ? data->eager_deoptimization_entry_code_
- : data->lazy_deoptimization_entry_code_;
+ MemoryChunk* chunk = data->deopt_entry_code_[type];
ASSERT(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >=
desc.instr_size);
chunk->CommitArea(desc.instr_size);
@@ -2233,11 +2239,7 @@
static_cast<size_t>(desc.instr_size));
CPU::FlushICache(chunk->area_start(), desc.instr_size);
- if (type == EAGER) {
- data->eager_deoptimization_entry_code_entries_ = entry_count;
- } else {
- data->lazy_deoptimization_entry_code_entries_ = entry_count;
- }
+ data->deopt_entry_code_entries_[type] = entry_count;
}
=======================================
--- /branches/bleeding_edge/src/deoptimizer.h Wed Apr 24 00:39:35 2013
+++ /branches/bleeding_edge/src/deoptimizer.h Tue May 14 04:45:33 2013
@@ -98,53 +98,34 @@
class Deoptimizer;
-class DeoptimizerData {
- public:
- explicit DeoptimizerData(MemoryAllocator* allocator);
- ~DeoptimizerData();
-
-#ifdef ENABLE_DEBUGGER_SUPPORT
- void Iterate(ObjectVisitor* v);
-#endif
-
- Code* FindDeoptimizingCode(Address addr);
- void RemoveDeoptimizingCode(Code* code);
-
- private:
- MemoryAllocator* allocator_;
- int eager_deoptimization_entry_code_entries_;
- int lazy_deoptimization_entry_code_entries_;
- MemoryChunk* eager_deoptimization_entry_code_;
- MemoryChunk* lazy_deoptimization_entry_code_;
- Deoptimizer* current_;
-
-#ifdef ENABLE_DEBUGGER_SUPPORT
- DeoptimizedFrameInfo* deoptimized_frame_info_;
-#endif
-
- // List of deoptimized code which still have references from active stack
- // frames. These code objects are needed by the deoptimizer when
deoptimizing
- // a frame for which the code object for the function function has been
- // changed from the code present when deoptimizing was done.
- DeoptimizingCodeListNode* deoptimizing_code_list_;
-
- friend class Deoptimizer;
-
- DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
-};
-
-
class Deoptimizer : public Malloced {
public:
enum BailoutType {
EAGER,
LAZY,
+ SOFT,
OSR,
// This last bailout type is not really a bailout, but used by the
// debugger to deoptimize stack frames to allow inspection.
DEBUGGER
};
+ static const int kBailoutTypesWithCodeEntry = SOFT + 1;
+
+ struct JumpTableEntry {
+ inline JumpTableEntry(Address entry,
+ Deoptimizer::BailoutType type,
+ bool frame)
+ : label(),
+ address(entry),
+ bailout_type(type),
+ needs_frame(frame) { }
+ Label label;
+ Address address;
+ Deoptimizer::BailoutType bailout_type;
+ bool needs_frame;
+ };
+
static bool TraceEnabledFor(BailoutType deopt_type,
StackFrame::Type frame_type);
static const char* MessageFor(BailoutType type);
@@ -626,6 +607,40 @@
};
+class DeoptimizerData {
+ public:
+ explicit DeoptimizerData(MemoryAllocator* allocator);
+ ~DeoptimizerData();
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ void Iterate(ObjectVisitor* v);
+#endif
+
+ Code* FindDeoptimizingCode(Address addr);
+ void RemoveDeoptimizingCode(Code* code);
+
+ private:
+ MemoryAllocator* allocator_;
+ int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry];
+ MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry];
+ Deoptimizer* current_;
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ DeoptimizedFrameInfo* deoptimized_frame_info_;
+#endif
+
+ // List of deoptimized code which still have references from active stack
+ // frames. These code objects are needed by the deoptimizer when
deoptimizing
+ // a frame for which the code object for the function function has been
+ // changed from the code present when deoptimizing was done.
+ DeoptimizingCodeListNode* deoptimizing_code_list_;
+
+ friend class Deoptimizer;
+
+ DISALLOW_COPY_AND_ASSIGN(DeoptimizerData);
+};
+
+
class TranslationBuffer BASE_EMBEDDED {
public:
explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
=======================================
--- /branches/bleeding_edge/src/disassembler.cc Thu Apr 18 02:50:46 2013
+++ /branches/bleeding_edge/src/disassembler.cc Tue May 14 04:45:33 2013
@@ -293,7 +293,14 @@
addr,
Deoptimizer::LAZY);
if (id == Deoptimizer::kNotDeoptimizationEntry) {
- out.AddFormatted(" ;; %s", RelocInfo::RelocModeName(rmode));
+ id = Deoptimizer::GetDeoptimizationId(isolate,
+ addr,
+ Deoptimizer::SOFT);
+ if (id == Deoptimizer::kNotDeoptimizationEntry) {
+ out.AddFormatted(" ;; %s",
RelocInfo::RelocModeName(rmode));
+ } else {
+ out.AddFormatted(" ;; soft deoptimization bailout %d",
id);
+ }
} else {
out.AddFormatted(" ;; lazy deoptimization bailout %d", id);
}
=======================================
--- /branches/bleeding_edge/src/ia32/builtins-ia32.cc Thu Apr 25 09:00:32
2013
+++ /branches/bleeding_edge/src/ia32/builtins-ia32.cc Tue May 14 04:45:33
2013
@@ -655,6 +655,11 @@
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
}
+
+
+void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
+ Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
+}
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
=======================================
--- /branches/bleeding_edge/src/ia32/deoptimizer-ia32.cc Mon May 13
12:22:18 2013
+++ /branches/bleeding_edge/src/ia32/deoptimizer-ia32.cc Tue May 14
04:45:33 2013
@@ -659,9 +659,14 @@
// Set the continuation for the topmost frame.
if (is_topmost && bailout_type_ != DEBUGGER) {
Builtins* builtins = isolate_->builtins();
- Code* continuation = (bailout_type_ == EAGER)
- ? builtins->builtin(Builtins::kNotifyDeoptimized)
- : builtins->builtin(Builtins::kNotifyLazyDeoptimized);
+ Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
+ if (bailout_type_ == LAZY) {
+ continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
+ } else if (bailout_type_ == SOFT) {
+ continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
+ } else {
+ ASSERT(bailout_type_ == EAGER);
+ }
output_frame->SetContinuation(
reinterpret_cast<uint32_t>(continuation->entry()));
}
@@ -740,7 +745,7 @@
// Get the address of the location in the code object if possible
// and compute the fp-to-sp delta in register edx.
- if (type() == EAGER) {
+ if (type() == EAGER || type() == SOFT) {
__ Set(ecx, Immediate(0));
__ lea(edx, Operand(esp, kSavedRegistersAreaSize + 1 * kPointerSize));
} else {
@@ -793,7 +798,7 @@
__ fnclex();
// Remove the bailout id and the double registers from the stack.
- if (type() == EAGER) {
+ if (type() == EAGER || type() == SOFT) {
__ add(esp, Immediate(kDoubleRegsSize + kPointerSize));
} else {
__ add(esp, Immediate(kDoubleRegsSize + 2 * kPointerSize));
=======================================
--- /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Mon May 13
00:35:26 2013
+++ /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Tue May 14
04:45:33 2013
@@ -385,9 +385,7 @@
for (int i = 0; i < jump_table_.length(); i++) {
__ bind(&jump_table_[i].label);
Address entry = jump_table_[i].address;
- bool is_lazy_deopt = jump_table_[i].is_lazy_deopt;
- Deoptimizer::BailoutType type =
- is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
+ Deoptimizer::BailoutType type = jump_table_[i].bailout_type;
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
if (id == Deoptimizer::kNotDeoptimizationEntry) {
Comment(";;; jump table entry %d.", i);
@@ -396,7 +394,7 @@
}
if (jump_table_[i].needs_frame) {
__ push(Immediate(ExternalReference::ForDeoptEntry(entry)));
- if (is_lazy_deopt) {
+ if (type == Deoptimizer::LAZY) {
if (needs_frame_is_call.is_bound()) {
__ jmp(&needs_frame_is_call);
} else {
@@ -441,7 +439,7 @@
}
}
} else {
- if (is_lazy_deopt) {
+ if (type == Deoptimizer::LAZY) {
__ call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
__ jmp(entry, RelocInfo::RUNTIME_ENTRY);
@@ -893,16 +891,15 @@
}
-void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
+void LCodeGen::DeoptimizeIf(Condition cc,
+ LEnvironment* environment,
+ Deoptimizer::BailoutType bailout_type) {
RegisterEnvironmentForDeoptimization(environment,
Safepoint::kNoLazyDeopt);
ASSERT(environment->HasBeenRegistered());
// It's an error to deoptimize with the x87 fp stack in use.
ASSERT(x87_stack_depth_ == 0);
int id = environment->deoptimization_index();
ASSERT(info()->IsOptimizing() || info()->IsStub());
- Deoptimizer::BailoutType bailout_type = info()->IsStub()
- ? Deoptimizer::LAZY
- : Deoptimizer::EAGER;
Address entry =
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
if (entry == NULL) {
@@ -948,9 +945,8 @@
}
ASSERT(info()->IsStub() || frame_is_built_);
- bool needs_lazy_deopt = info()->IsStub();
if (cc == no_condition && frame_is_built_) {
- if (needs_lazy_deopt) {
+ if (bailout_type == Deoptimizer::LAZY) {
__ call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
__ jmp(entry, RelocInfo::RUNTIME_ENTRY);
@@ -961,8 +957,10 @@
if (jump_table_.is_empty() ||
jump_table_.last().address != entry ||
jump_table_.last().needs_frame != !frame_is_built_ ||
- jump_table_.last().is_lazy_deopt != needs_lazy_deopt) {
- JumpTableEntry table_entry(entry, !frame_is_built_,
needs_lazy_deopt);
+ jump_table_.last().bailout_type != bailout_type) {
+ Deoptimizer::JumpTableEntry table_entry(entry,
+ bailout_type,
+ !frame_is_built_);
jump_table_.Add(table_entry, zone());
}
if (cc == no_condition) {
@@ -972,6 +970,21 @@
}
}
}
+
+
+void LCodeGen::DeoptimizeIf(Condition cc,
+ LEnvironment* environment) {
+ Deoptimizer::BailoutType bailout_type = info()->IsStub()
+ ? Deoptimizer::LAZY
+ : Deoptimizer::EAGER;
+ DeoptimizeIf(cc, environment, bailout_type);
+}
+
+
+void LCodeGen::SoftDeoptimize(LEnvironment* environment) {
+ ASSERT(!info()->IsStub());
+ DeoptimizeIf(no_condition, environment, Deoptimizer::SOFT);
+}
void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
@@ -6316,7 +6329,11 @@
void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
- DeoptimizeIf(no_condition, instr->environment());
+ if (instr->hydrogen_value()->IsSoftDeoptimize()) {
+ SoftDeoptimize(instr->environment());
+ } else {
+ DeoptimizeIf(no_condition, instr->environment());
+ }
}
=======================================
--- /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.h Fri Apr 26
08:30:41 2013
+++ /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.h Tue May 14
04:45:33 2013
@@ -276,7 +276,11 @@
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
Safepoint::DeoptMode mode);
+ void DeoptimizeIf(Condition cc,
+ LEnvironment* environment,
+ Deoptimizer::BailoutType bailout_type);
void DeoptimizeIf(Condition cc, LEnvironment* environment);
+ void SoftDeoptimize(LEnvironment* environment);
void AddToTranslation(Translation* translation,
LOperand* op,
@@ -397,23 +401,11 @@
MacroAssembler* const masm_;
CompilationInfo* const info_;
- struct JumpTableEntry {
- inline JumpTableEntry(Address entry, bool frame, bool is_lazy)
- : label(),
- address(entry),
- needs_frame(frame),
- is_lazy_deopt(is_lazy) { }
- Label label;
- Address address;
- bool needs_frame;
- bool is_lazy_deopt;
- };
-
int current_block_;
int current_instruction_;
const ZoneList<LInstruction*>* instructions_;
ZoneList<LEnvironment*> deoptimizations_;
- ZoneList<JumpTableEntry> jump_table_;
+ ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
ZoneList<Handle<Object> > deoptimization_literals_;
ZoneList<Handle<Map> > prototype_maps_;
ZoneList<Handle<Map> > transition_maps_;
=======================================
--- /branches/bleeding_edge/src/objects.cc Mon May 13 12:22:18 2013
+++ /branches/bleeding_edge/src/objects.cc Tue May 14 04:45:33 2013
@@ -10147,12 +10147,15 @@
RelocInfo* info = it.rinfo();
if (info->rmode() == RelocInfo::COMMENT) {
last_comment = reinterpret_cast<const char*>(info->data());
- } else if (last_comment != NULL &&
- bailout_id == Deoptimizer::GetDeoptimizationId(
- GetIsolate(), info->target_address(),
Deoptimizer::EAGER)) {
- CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
- PrintF(" %s\n", last_comment);
- return;
+ } else if (last_comment != NULL) {
+ if ((bailout_id == Deoptimizer::GetDeoptimizationId(
+ GetIsolate(), info->target_address(), Deoptimizer::EAGER)) ||
+ (bailout_id == Deoptimizer::GetDeoptimizationId(
+ GetIsolate(), info->target_address(), Deoptimizer::SOFT))) {
+ CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
+ PrintF(" %s\n", last_comment);
+ return;
+ }
}
}
}
=======================================
--- /branches/bleeding_edge/src/runtime.cc Mon May 13 12:22:18 2013
+++ /branches/bleeding_edge/src/runtime.cc Tue May 14 04:45:33 2013
@@ -7974,7 +7974,8 @@
RUNTIME_ASSERT(frame->function()->IsJSFunction());
Handle<JSFunction> function(JSFunction::cast(frame->function()),
isolate);
Handle<Code> optimized_code(function->code());
- RUNTIME_ASSERT(type != Deoptimizer::EAGER || function->IsOptimized());
+ RUNTIME_ASSERT((type != Deoptimizer::EAGER &&
+ type != Deoptimizer::SOFT) || function->IsOptimized());
// Avoid doing too much work when running with --always-opt and keep
// the optimized code around.
=======================================
--- /branches/bleeding_edge/src/x64/builtins-x64.cc Thu Apr 25 09:00:32 2013
+++ /branches/bleeding_edge/src/x64/builtins-x64.cc Tue May 14 04:45:33 2013
@@ -727,6 +727,11 @@
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
}
+
+
+void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
+ Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
+}
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
=======================================
--- /branches/bleeding_edge/src/x64/deoptimizer-x64.cc Mon May 13 12:22:18
2013
+++ /branches/bleeding_edge/src/x64/deoptimizer-x64.cc Tue May 14 04:45:33
2013
@@ -530,9 +530,15 @@
// Set the continuation for the topmost frame.
if (is_topmost && bailout_type_ != DEBUGGER) {
- Code* continuation = (bailout_type_ == EAGER)
- ? isolate_->builtins()->builtin(Builtins::kNotifyDeoptimized)
- : isolate_->builtins()->builtin(Builtins::kNotifyLazyDeoptimized);
+ Builtins* builtins = isolate_->builtins();
+ Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
+ if (bailout_type_ == LAZY) {
+ continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
+ } else if (bailout_type_ == SOFT) {
+ continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
+ } else {
+ ASSERT(bailout_type_ == EAGER);
+ }
output_frame->SetContinuation(
reinterpret_cast<intptr_t>(continuation->entry()));
}
@@ -618,7 +624,7 @@
// Get the address of the location in the code object if possible
// and compute the fp-to-sp delta in register arg5.
- if (type() == EAGER) {
+ if (type() == EAGER || type() == SOFT) {
__ Set(arg_reg_4, 0);
__ lea(arg5, Operand(rsp, kSavedRegistersAreaSize + 1 * kPointerSize));
} else {
@@ -669,7 +675,7 @@
}
// Remove the bailout id from the stack.
- if (type() == EAGER) {
+ if (type() == EAGER || type() == SOFT) {
__ addq(rsp, Immediate(kPointerSize));
} else {
__ addq(rsp, Immediate(2 * kPointerSize));
=======================================
--- /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Mon May 13
00:35:26 2013
+++ /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Tue May 14
04:45:33 2013
@@ -301,9 +301,7 @@
for (int i = 0; i < jump_table_.length(); i++) {
__ bind(&jump_table_[i].label);
Address entry = jump_table_[i].address;
- bool is_lazy_deopt = jump_table_[i].is_lazy_deopt;
- Deoptimizer::BailoutType type =
- is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER;
+ Deoptimizer::BailoutType type = jump_table_[i].bailout_type;
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
if (id == Deoptimizer::kNotDeoptimizationEntry) {
Comment(";;; jump table entry %d.", i);
@@ -312,7 +310,7 @@
}
if (jump_table_[i].needs_frame) {
__ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry));
- if (is_lazy_deopt) {
+ if (type == Deoptimizer::LAZY) {
if (needs_frame_is_call.is_bound()) {
__ jmp(&needs_frame_is_call);
} else {
@@ -348,7 +346,7 @@
}
}
} else {
- if (is_lazy_deopt) {
+ if (type == Deoptimizer::LAZY) {
__ call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
__ jmp(entry, RelocInfo::RUNTIME_ENTRY);
@@ -719,14 +717,13 @@
}
-void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
+void LCodeGen::DeoptimizeIf(Condition cc,
+ LEnvironment* environment,
+ Deoptimizer::BailoutType bailout_type) {
RegisterEnvironmentForDeoptimization(environment,
Safepoint::kNoLazyDeopt);
ASSERT(environment->HasBeenRegistered());
int id = environment->deoptimization_index();
ASSERT(info()->IsOptimizing() || info()->IsStub());
- Deoptimizer::BailoutType bailout_type = info()->IsStub()
- ? Deoptimizer::LAZY
- : Deoptimizer::EAGER;
Address entry =
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
if (entry == NULL) {
@@ -759,8 +756,10 @@
if (jump_table_.is_empty() ||
jump_table_.last().address != entry ||
jump_table_.last().needs_frame != !frame_is_built_ ||
- jump_table_.last().is_lazy_deopt != needs_lazy_deopt) {
- JumpTableEntry table_entry(entry, !frame_is_built_,
needs_lazy_deopt);
+ jump_table_.last().bailout_type != bailout_type) {
+ Deoptimizer::JumpTableEntry table_entry(entry,
+ bailout_type,
+ !frame_is_built_);
jump_table_.Add(table_entry, zone());
}
if (cc == no_condition) {
@@ -770,6 +769,21 @@
}
}
}
+
+
+void LCodeGen::DeoptimizeIf(Condition cc,
+ LEnvironment* environment) {
+ Deoptimizer::BailoutType bailout_type = info()->IsStub()
+ ? Deoptimizer::LAZY
+ : Deoptimizer::EAGER;
+ DeoptimizeIf(cc, environment, bailout_type);
+}
+
+
+void LCodeGen::SoftDeoptimize(LEnvironment* environment) {
+ ASSERT(!info()->IsStub());
+ DeoptimizeIf(no_condition, environment, Deoptimizer::SOFT);
+}
void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
@@ -5414,7 +5428,11 @@
void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
- DeoptimizeIf(no_condition, instr->environment());
+ if (instr->hydrogen_value()->IsSoftDeoptimize()) {
+ SoftDeoptimize(instr->environment());
+ } else {
+ DeoptimizeIf(no_condition, instr->environment());
+ }
}
=======================================
--- /branches/bleeding_edge/src/x64/lithium-codegen-x64.h Fri Apr 26
08:30:41 2013
+++ /branches/bleeding_edge/src/x64/lithium-codegen-x64.h Tue May 14
04:45:33 2013
@@ -247,8 +247,11 @@
int argc);
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
Safepoint::DeoptMode mode);
+ void DeoptimizeIf(Condition cc,
+ LEnvironment* environment,
+ Deoptimizer::BailoutType bailout_type);
void DeoptimizeIf(Condition cc, LEnvironment* environment);
-
+ void SoftDeoptimize(LEnvironment* environment);
void AddToTranslation(Translation* translation,
LOperand* op,
bool is_tagged,
@@ -340,18 +343,6 @@
int* offset,
AllocationSiteMode mode);
- struct JumpTableEntry {
- inline JumpTableEntry(Address entry, bool frame, bool is_lazy)
- : label(),
- address(entry),
- needs_frame(frame),
- is_lazy_deopt(is_lazy) { }
- Label label;
- Address address;
- bool needs_frame;
- bool is_lazy_deopt;
- };
-
void EnsureSpaceForLazyDeopt(int space_needed);
void DoLoadKeyedExternalArray(LLoadKeyed* instr);
void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
@@ -369,7 +360,7 @@
int current_instruction_;
const ZoneList<LInstruction*>* instructions_;
ZoneList<LEnvironment*> deoptimizations_;
- ZoneList<JumpTableEntry> jump_table_;
+ ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
ZoneList<Handle<Object> > deoptimization_literals_;
ZoneList<Handle<Map> > prototype_maps_;
ZoneList<Handle<Map> > transition_maps_;
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.