Revision: 18496
Author: [email protected]
Date: Wed Jan 8 15:21:56 2014 UTC
Log: Merged r18000, r18013, r18298, r18319 into 3.22 branch.
Restore saved caller FP registers on stub failure and preserve FP registers
on NotifyStubFailure.
Invalidate embedded objects in optimized code if it was marked for
deoptimization.
Initialize Date parse cache with SMI instead of double to workaround
sharing mutable heap numbers in snapshot.
Fix NotifyStubFailureSaveDoubles generation for ia32.
BUG=320532,v8:2996,280531
[email protected]
Review URL: https://codereview.chromium.org/128303002
http://code.google.com/p/v8/source/detail?r=18496
Added:
/branches/3.22/test/mjsunit/regress/regress-280531.js
Modified:
/branches/3.22/src/arguments.cc
/branches/3.22/src/arguments.h
/branches/3.22/src/arm/builtins-arm.cc
/branches/3.22/src/arm/deoptimizer-arm.cc
/branches/3.22/src/arm/lithium-codegen-arm.cc
/branches/3.22/src/arm/lithium-codegen-arm.h
/branches/3.22/src/arm/macro-assembler-arm.h
/branches/3.22/src/builtins.h
/branches/3.22/src/date.js
/branches/3.22/src/deoptimizer.cc
/branches/3.22/src/deoptimizer.h
/branches/3.22/src/ia32/builtins-ia32.cc
/branches/3.22/src/ia32/deoptimizer-ia32.cc
/branches/3.22/src/ia32/lithium-codegen-ia32.cc
/branches/3.22/src/ia32/lithium-codegen-ia32.h
/branches/3.22/src/ia32/macro-assembler-ia32.h
/branches/3.22/src/mark-compact.cc
/branches/3.22/src/mips/builtins-mips.cc
/branches/3.22/src/mips/deoptimizer-mips.cc
/branches/3.22/src/mips/lithium-codegen-mips.cc
/branches/3.22/src/mips/lithium-codegen-mips.h
/branches/3.22/src/mips/macro-assembler-mips.h
/branches/3.22/src/objects.cc
/branches/3.22/src/objects.h
/branches/3.22/src/version.cc
/branches/3.22/src/x64/builtins-x64.cc
/branches/3.22/src/x64/deoptimizer-x64.cc
/branches/3.22/src/x64/lithium-codegen-x64.cc
/branches/3.22/src/x64/lithium-codegen-x64.h
/branches/3.22/src/x64/macro-assembler-x64.h
=======================================
--- /dev/null
+++ /branches/3.22/test/mjsunit/regress/regress-280531.js Wed Jan 8
15:21:56 2014 UTC
@@ -0,0 +1,32 @@
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var contextA = Realm.create();
+var date1 = Realm.eval(contextA, "new Date('Thu, 29 Aug 2013 00:00:00
UTC')");
+new Date('Thu, 29 Aug 2013 00:00:01 UTC');
+var date2 = Realm.eval(contextA, "new Date('Thu, 29 Aug 2013 00:00:00
UTC')");
+assertEquals(date1, date2);
=======================================
--- /branches/3.22/src/arguments.cc Wed Oct 2 11:41:02 2013 UTC
+++ /branches/3.22/src/arguments.cc Wed Jan 8 15:21:56 2014 UTC
@@ -117,4 +117,12 @@
#undef WRITE_CALL_2_VOID
+double ClobberDoubleRegisters(double x1, double x2, double x3, double x4) {
+ // TODO(ulan): This clobbers only subset of registers depending on
compiler,
+ // Rewrite this in assembly to really clobber all registers.
+ // GCC for ia32 uses the FPU and does not touch XMM registers.
+ return x1 * 1.01 + x2 * 2.02 + x3 * 3.03 + x4 * 4.04;
+}
+
+
} } // namespace v8::internal
=======================================
--- /branches/3.22/src/arguments.h Wed Oct 2 11:41:02 2013 UTC
+++ /branches/3.22/src/arguments.h Wed Jan 8 15:21:56 2014 UTC
@@ -289,12 +289,23 @@
};
+double ClobberDoubleRegisters(double x1, double x2, double x3, double x4);
+
+
+#ifdef DEBUG
+#define CLOBBER_DOUBLE_REGISTERS() ClobberDoubleRegisters(1, 2, 3, 4);
+#else
+#define CLOBBER_DOUBLE_REGISTERS()
+#endif
+
+
#define DECLARE_RUNTIME_FUNCTION(Type, Name) \
Type Name(int args_length, Object** args_object, Isolate* isolate)
#define RUNTIME_FUNCTION(Type, Name) \
static Type __RT_impl_##Name(Arguments args, Isolate* isolate); \
Type Name(int args_length, Object** args_object, Isolate* isolate) { \
+ CLOBBER_DOUBLE_REGISTERS(); \
Arguments args(args_length, args_object); \
return __RT_impl_##Name(args, isolate); \
} \
=======================================
--- /branches/3.22/src/arm/builtins-arm.cc Thu Oct 24 06:31:36 2013 UTC
+++ /branches/3.22/src/arm/builtins-arm.cc Wed Jan 8 15:21:56 2014 UTC
@@ -859,7 +859,8 @@
}
-void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
+static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
+ SaveFPRegsMode save_doubles) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
@@ -868,13 +869,23 @@
// registers.
__ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
// Pass the function and deoptimization type to the runtime system.
- __ CallRuntime(Runtime::kNotifyStubFailure, 0);
+ __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
__ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
}
__ add(sp, sp, Operand(kPointerSize)); // Ignore state
__ mov(pc, lr); // Jump to miss handler
}
+
+
+void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
+ Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
+}
+
+
+void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm)
{
+ Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
+}
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
=======================================
--- /branches/3.22/src/arm/deoptimizer-arm.cc Mon Oct 21 07:19:36 2013 UTC
+++ /branches/3.22/src/arm/deoptimizer-arm.cc Wed Jan 8 15:21:56 2014 UTC
@@ -125,6 +125,11 @@
// There is no dynamic alignment padding on ARM in the input frame.
return false;
}
+
+
+Code* Deoptimizer::NotifyStubFailureBuiltin() {
+ return
isolate_->builtins()->builtin(Builtins::kNotifyStubFailureSaveDoubles);
+}
#define __ masm()->
=======================================
--- /branches/3.22/src/arm/lithium-codegen-arm.cc Mon Nov 18 08:39:25 2013
UTC
+++ /branches/3.22/src/arm/lithium-codegen-arm.cc Wed Jan 8 15:21:56 2014
UTC
@@ -96,6 +96,38 @@
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
+
+
+void LCodeGen::SaveCallerDoubles() {
+ ASSERT(info()->saves_caller_doubles());
+ ASSERT(NeedsEagerFrame());
+ Comment(";;; Save clobbered callee double registers");
+ int count = 0;
+ BitVector* doubles = chunk()->allocated_double_registers();
+ BitVector::Iterator save_iterator(doubles);
+ while (!save_iterator.Done()) {
+ __ vstr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
+ MemOperand(sp, count * kDoubleSize));
+ save_iterator.Advance();
+ count++;
+ }
+}
+
+
+void LCodeGen::RestoreCallerDoubles() {
+ ASSERT(info()->saves_caller_doubles());
+ ASSERT(NeedsEagerFrame());
+ Comment(";;; Restore clobbered callee double registers");
+ BitVector* doubles = chunk()->allocated_double_registers();
+ BitVector::Iterator save_iterator(doubles);
+ int count = 0;
+ while (!save_iterator.Done()) {
+ __ vldr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
+ MemOperand(sp, count * kDoubleSize));
+ save_iterator.Advance();
+ count++;
+ }
+}
bool LCodeGen::GeneratePrologue() {
@@ -158,16 +190,7 @@
}
if (info()->saves_caller_doubles()) {
- Comment(";;; Save clobbered callee double registers");
- int count = 0;
- BitVector* doubles = chunk()->allocated_double_registers();
- BitVector::Iterator save_iterator(doubles);
- while (!save_iterator.Done()) {
- __ vstr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
- MemOperand(sp, count * kDoubleSize));
- save_iterator.Advance();
- count++;
- }
+ SaveCallerDoubles();
}
// Possibly allocate a local context.
@@ -313,6 +336,7 @@
Comment(";;; jump table entry %d: deoptimization bailout %d.", i,
id);
}
if (deopt_jump_table_[i].needs_frame) {
+ ASSERT(!info()->saves_caller_doubles());
__ mov(ip, Operand(ExternalReference::ForDeoptEntry(entry)));
if (needs_frame.is_bound()) {
__ b(&needs_frame);
@@ -330,6 +354,10 @@
__ mov(pc, ip);
}
} else {
+ if (info()->saves_caller_doubles()) {
+ ASSERT(info()->IsStub());
+ RestoreCallerDoubles();
+ }
__ mov(lr, Operand(pc), LeaveCC, al);
__ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry)));
}
@@ -783,7 +811,10 @@
}
ASSERT(info()->IsStub() || frame_is_built_);
- if (condition == al && frame_is_built_) {
+ // Go through jump table if we need to handle condition, build frame, or
+ // restore caller doubles.
+ if (condition == al && frame_is_built_ &&
+ !info()->saves_caller_doubles()) {
__ Call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
// We often have several deopts to the same entry, reuse the last
@@ -2853,16 +2884,7 @@
__ CallRuntime(Runtime::kTraceExit, 1);
}
if (info()->saves_caller_doubles()) {
- ASSERT(NeedsEagerFrame());
- BitVector* doubles = chunk()->allocated_double_registers();
- BitVector::Iterator save_iterator(doubles);
- int count = 0;
- while (!save_iterator.Done()) {
- __ vldr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
- MemOperand(sp, count * kDoubleSize));
- save_iterator.Advance();
- count++;
- }
+ RestoreCallerDoubles();
}
int no_frame_start = -1;
if (NeedsEagerFrame()) {
=======================================
--- /branches/3.22/src/arm/lithium-codegen-arm.h Fri Oct 25 09:22:31 2013
UTC
+++ /branches/3.22/src/arm/lithium-codegen-arm.h Wed Jan 8 15:21:56 2014
UTC
@@ -185,6 +185,9 @@
void Abort(BailoutReason reason);
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone());
}
+
+ void SaveCallerDoubles();
+ void RestoreCallerDoubles();
// Code generation passes. Returns true if code generation should
// continue.
=======================================
--- /branches/3.22/src/arm/macro-assembler-arm.h Fri Oct 25 09:22:31 2013
UTC
+++ /branches/3.22/src/arm/macro-assembler-arm.h Wed Jan 8 15:21:56 2014
UTC
@@ -1045,8 +1045,10 @@
}
// Convenience function: Same as above, but takes the fid instead.
- void CallRuntime(Runtime::FunctionId id, int num_arguments) {
- CallRuntime(Runtime::FunctionForId(id), num_arguments);
+ void CallRuntime(Runtime::FunctionId id,
+ int num_arguments,
+ SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
+ CallRuntime(Runtime::FunctionForId(id), num_arguments, save_doubles);
}
// Convenience function: call an external reference.
=======================================
--- /branches/3.22/src/builtins.h Fri Oct 25 09:22:31 2013 UTC
+++ /branches/3.22/src/builtins.h Wed Jan 8 15:21:56 2014 UTC
@@ -112,6 +112,8 @@
Code::kNoExtraICState) \
V(NotifyStubFailure, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
+ V(NotifyStubFailureSaveDoubles, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
\
V(LoadIC_Miss, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
@@ -400,6 +402,7 @@
static void Generate_NotifySoftDeoptimized(MacroAssembler* masm);
static void Generate_NotifyLazyDeoptimized(MacroAssembler* masm);
static void Generate_NotifyStubFailure(MacroAssembler* masm);
+ static void Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm);
static void Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm);
static void Generate_FunctionCall(MacroAssembler* masm);
=======================================
--- /branches/3.22/src/date.js Fri Oct 18 10:34:25 2013 UTC
+++ /branches/3.22/src/date.js Wed Jan 8 15:21:56 2014 UTC
@@ -132,7 +132,7 @@
// strings over and over again.
var Date_cache = {
// Cached time value.
- time: NAN,
+ time: 0,
// String input for which the cached time is valid.
string: null
};
=======================================
--- /branches/3.22/src/deoptimizer.cc Wed Oct 23 06:51:00 2013 UTC
+++ /branches/3.22/src/deoptimizer.cc Wed Jan 8 15:21:56 2014 UTC
@@ -1574,8 +1574,7 @@
output_frame->SetPc(reinterpret_cast<intptr_t>(
trampoline->instruction_start()));
output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
- Code* notify_failure =
- isolate_->builtins()->builtin(Builtins::kNotifyStubFailure);
+ Code* notify_failure = NotifyStubFailureBuiltin();
output_frame->SetContinuation(
reinterpret_cast<intptr_t>(notify_failure->entry()));
}
=======================================
--- /branches/3.22/src/deoptimizer.h Mon Oct 28 18:03:37 2013 UTC
+++ /branches/3.22/src/deoptimizer.h Wed Jan 8 15:21:56 2014 UTC
@@ -412,6 +412,10 @@
// at the dynamic alignment state slot inside the frame.
bool HasAlignmentPadding(JSFunction* function);
+ // Select the version of NotifyStubFailure builtin that either saves or
+ // doesn't save the double registers depending on CPU features.
+ Code* NotifyStubFailureBuiltin();
+
Isolate* isolate_;
JSFunction* function_;
Code* compiled_code_;
=======================================
--- /branches/3.22/src/ia32/builtins-ia32.cc Thu Oct 24 06:31:36 2013 UTC
+++ /branches/3.22/src/ia32/builtins-ia32.cc Wed Jan 8 15:21:56 2014 UTC
@@ -601,7 +601,8 @@
}
-void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
+static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
+ SaveFPRegsMode save_doubles) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
@@ -610,7 +611,7 @@
// stubs that tail call the runtime on deopts passing their parameters
in
// registers.
__ pushad();
- __ CallRuntime(Runtime::kNotifyStubFailure, 0);
+ __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
__ popad();
// Tear down internal frame.
}
@@ -618,6 +619,21 @@
__ pop(MemOperand(esp, 0)); // Ignore state offset
__ ret(0); // Return to IC Miss stub, continuation still on stack.
}
+
+
+void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
+ Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
+}
+
+
+void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm)
{
+ if (Serializer::enabled()) {
+ PlatformFeatureScope sse2(SSE2);
+ Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
+ } else {
+ Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
+ }
+}
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
=======================================
--- /branches/3.22/src/ia32/deoptimizer-ia32.cc Mon Oct 21 07:19:36 2013 UTC
+++ /branches/3.22/src/ia32/deoptimizer-ia32.cc Wed Jan 8 15:21:56 2014 UTC
@@ -229,6 +229,13 @@
int32_t alignment_state = input_->GetFrameSlot(alignment_state_offset);
return (alignment_state == kAlignmentPaddingPushed);
}
+
+
+Code* Deoptimizer::NotifyStubFailureBuiltin() {
+ Builtins::Name name = CpuFeatures::IsSupported(SSE2) ?
+ Builtins::kNotifyStubFailureSaveDoubles :
Builtins::kNotifyStubFailure;
+ return isolate_->builtins()->builtin(name);
+}
#define __ masm()->
=======================================
--- /branches/3.22/src/ia32/lithium-codegen-ia32.cc Mon Nov 18 08:39:25
2013 UTC
+++ /branches/3.22/src/ia32/lithium-codegen-ia32.cc Wed Jan 8 15:21:56
2014 UTC
@@ -130,6 +130,40 @@
#endif
+void LCodeGen::SaveCallerDoubles() {
+ ASSERT(info()->saves_caller_doubles());
+ ASSERT(NeedsEagerFrame());
+ Comment(";;; Save clobbered callee double registers");
+ CpuFeatureScope scope(masm(), SSE2);
+ int count = 0;
+ BitVector* doubles = chunk()->allocated_double_registers();
+ BitVector::Iterator save_iterator(doubles);
+ while (!save_iterator.Done()) {
+ __ movsd(MemOperand(esp, count * kDoubleSize),
+ XMMRegister::FromAllocationIndex(save_iterator.Current()));
+ save_iterator.Advance();
+ count++;
+ }
+}
+
+
+void LCodeGen::RestoreCallerDoubles() {
+ ASSERT(info()->saves_caller_doubles());
+ ASSERT(NeedsEagerFrame());
+ Comment(";;; Restore clobbered callee double registers");
+ CpuFeatureScope scope(masm(), SSE2);
+ BitVector* doubles = chunk()->allocated_double_registers();
+ BitVector::Iterator save_iterator(doubles);
+ int count = 0;
+ while (!save_iterator.Done()) {
+ __ movsd(XMMRegister::FromAllocationIndex(save_iterator.Current()),
+ MemOperand(esp, count * kDoubleSize));
+ save_iterator.Advance();
+ count++;
+ }
+}
+
+
bool LCodeGen::GeneratePrologue() {
ASSERT(is_generating());
@@ -244,17 +278,7 @@
}
if (info()->saves_caller_doubles() && CpuFeatures::IsSupported(SSE2)) {
- Comment(";;; Save clobbered callee double registers");
- CpuFeatureScope scope(masm(), SSE2);
- int count = 0;
- BitVector* doubles = chunk()->allocated_double_registers();
- BitVector::Iterator save_iterator(doubles);
- while (!save_iterator.Done()) {
- __ movsd(MemOperand(esp, count * kDoubleSize),
-
XMMRegister::FromAllocationIndex(save_iterator.Current()));
- save_iterator.Advance();
- count++;
- }
+ SaveCallerDoubles();
}
}
@@ -399,6 +423,7 @@
Comment(";;; jump table entry %d: deoptimization bailout %d.", i,
id);
}
if (jump_table_[i].needs_frame) {
+ ASSERT(!info()->saves_caller_doubles());
__ push(Immediate(ExternalReference::ForDeoptEntry(entry)));
if (needs_frame.is_bound()) {
__ jmp(&needs_frame);
@@ -425,6 +450,9 @@
__ ret(0); // Call the continuation without clobbering registers.
}
} else {
+ if (info()->saves_caller_doubles() &&
CpuFeatures::IsSupported(SSE2)) {
+ RestoreCallerDoubles();
+ }
__ call(entry, RelocInfo::RUNTIME_ENTRY);
}
}
@@ -3056,17 +3084,7 @@
__ CallRuntime(Runtime::kTraceExit, 1);
}
if (info()->saves_caller_doubles() && CpuFeatures::IsSupported(SSE2)) {
- ASSERT(NeedsEagerFrame());
- CpuFeatureScope scope(masm(), SSE2);
- BitVector* doubles = chunk()->allocated_double_registers();
- BitVector::Iterator save_iterator(doubles);
- int count = 0;
- while (!save_iterator.Done()) {
- __ movsd(XMMRegister::FromAllocationIndex(save_iterator.Current()),
- MemOperand(esp, count * kDoubleSize));
- save_iterator.Advance();
- count++;
- }
+ RestoreCallerDoubles();
}
if (dynamic_frame_alignment_) {
// Fetch the state of the dynamic frame alignment.
=======================================
--- /branches/3.22/src/ia32/lithium-codegen-ia32.h Tue Oct 22 08:00:09 2013
UTC
+++ /branches/3.22/src/ia32/lithium-codegen-ia32.h Wed Jan 8 15:21:56 2014
UTC
@@ -197,6 +197,9 @@
void Abort(BailoutReason reason);
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone());
}
+
+ void SaveCallerDoubles();
+ void RestoreCallerDoubles();
// Code generation passes. Returns true if code generation should
// continue.
=======================================
--- /branches/3.22/src/ia32/macro-assembler-ia32.h Thu Oct 24 06:31:36 2013
UTC
+++ /branches/3.22/src/ia32/macro-assembler-ia32.h Wed Jan 8 15:21:56 2014
UTC
@@ -773,8 +773,10 @@
}
// Convenience function: Same as above, but takes the fid instead.
- void CallRuntime(Runtime::FunctionId id, int num_arguments) {
- CallRuntime(Runtime::FunctionForId(id), num_arguments);
+ void CallRuntime(Runtime::FunctionId id,
+ int num_arguments,
+ SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
+ CallRuntime(Runtime::FunctionForId(id), num_arguments, save_doubles);
}
// Convenience function: call an external reference.
=======================================
--- /branches/3.22/src/mark-compact.cc Fri Oct 25 09:22:31 2013 UTC
+++ /branches/3.22/src/mark-compact.cc Wed Jan 8 15:21:56 2014 UTC
@@ -2643,6 +2643,7 @@
if (IsMarked(code) && !code->marked_for_deoptimization()) {
code->set_marked_for_deoptimization(true);
+ code->InvalidateEmbeddedObjects();
have_code_to_deoptimize_ = true;
}
entries->clear_at(i);
=======================================
--- /branches/3.22/src/mips/builtins-mips.cc Thu Oct 24 06:31:36 2013 UTC
+++ /branches/3.22/src/mips/builtins-mips.cc Wed Jan 8 15:21:56 2014 UTC
@@ -900,7 +900,8 @@
}
-void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
+static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
+ SaveFPRegsMode save_doubles) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
@@ -909,13 +910,23 @@
// registers.
__ MultiPush(kJSCallerSaved | kCalleeSaved);
// Pass the function and deoptimization type to the runtime system.
- __ CallRuntime(Runtime::kNotifyStubFailure, 0);
+ __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
__ MultiPop(kJSCallerSaved | kCalleeSaved);
}
__ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
__ Jump(ra); // Jump to miss handler
}
+
+
+void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
+ Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
+}
+
+
+void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm)
{
+ Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
+}
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
=======================================
--- /branches/3.22/src/mips/deoptimizer-mips.cc Mon Oct 21 07:19:36 2013 UTC
+++ /branches/3.22/src/mips/deoptimizer-mips.cc Wed Jan 8 15:21:56 2014 UTC
@@ -123,6 +123,11 @@
// There is no dynamic alignment padding on MIPS in the input frame.
return false;
}
+
+
+Code* Deoptimizer::NotifyStubFailureBuiltin() {
+ return
isolate_->builtins()->builtin(Builtins::kNotifyStubFailureSaveDoubles);
+}
#define __ masm()->
=======================================
--- /branches/3.22/src/mips/lithium-codegen-mips.cc Mon Nov 18 08:39:25
2013 UTC
+++ /branches/3.22/src/mips/lithium-codegen-mips.cc Wed Jan 8 15:21:56
2014 UTC
@@ -96,6 +96,38 @@
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
+
+
+void LCodeGen::SaveCallerDoubles() {
+ ASSERT(info()->saves_caller_doubles());
+ ASSERT(NeedsEagerFrame());
+ Comment(";;; Save clobbered callee double registers");
+ int count = 0;
+ BitVector* doubles = chunk()->allocated_double_registers();
+ BitVector::Iterator save_iterator(doubles);
+ while (!save_iterator.Done()) {
+ __ sdc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
+ MemOperand(sp, count * kDoubleSize));
+ save_iterator.Advance();
+ count++;
+ }
+}
+
+
+void LCodeGen::RestoreCallerDoubles() {
+ ASSERT(info()->saves_caller_doubles());
+ ASSERT(NeedsEagerFrame());
+ Comment(";;; Restore clobbered callee double registers");
+ BitVector* doubles = chunk()->allocated_double_registers();
+ BitVector::Iterator save_iterator(doubles);
+ int count = 0;
+ while (!save_iterator.Done()) {
+ __ ldc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
+ MemOperand(sp, count * kDoubleSize));
+ save_iterator.Advance();
+ count++;
+ }
+}
bool LCodeGen::GeneratePrologue() {
@@ -160,16 +192,7 @@
}
if (info()->saves_caller_doubles()) {
- Comment(";;; Save clobbered callee double registers");
- int count = 0;
- BitVector* doubles = chunk()->allocated_double_registers();
- BitVector::Iterator save_iterator(doubles);
- while (!save_iterator.Done()) {
- __ sdc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
- MemOperand(sp, count * kDoubleSize));
- save_iterator.Advance();
- count++;
- }
+ SaveCallerDoubles();
}
// Possibly allocate a local context.
@@ -298,6 +321,7 @@
}
__ li(t9, Operand(ExternalReference::ForDeoptEntry(entry)));
if (deopt_jump_table_[i].needs_frame) {
+ ASSERT(!info()->saves_caller_doubles());
if (needs_frame.is_bound()) {
__ Branch(&needs_frame);
} else {
@@ -313,6 +337,10 @@
__ Call(t9);
}
} else {
+ if (info()->saves_caller_doubles()) {
+ ASSERT(info()->IsStub());
+ RestoreCallerDoubles();
+ }
__ Call(t9);
}
}
@@ -757,7 +785,10 @@
}
ASSERT(info()->IsStub() || frame_is_built_);
- if (condition == al && frame_is_built_) {
+ // Go through jump table if we need to handle condition, build frame, or
+ // restore caller doubles.
+ if (condition == al && frame_is_built_ &&
+ !info()->saves_caller_doubles()) {
__ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2);
} else {
// We often have several deopts to the same entry, reuse the last
@@ -2706,16 +2737,7 @@
__ CallRuntime(Runtime::kTraceExit, 1);
}
if (info()->saves_caller_doubles()) {
- ASSERT(NeedsEagerFrame());
- BitVector* doubles = chunk()->allocated_double_registers();
- BitVector::Iterator save_iterator(doubles);
- int count = 0;
- while (!save_iterator.Done()) {
- __ ldc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
- MemOperand(sp, count * kDoubleSize));
- save_iterator.Advance();
- count++;
- }
+ RestoreCallerDoubles();
}
int no_frame_start = -1;
if (NeedsEagerFrame()) {
=======================================
--- /branches/3.22/src/mips/lithium-codegen-mips.h Tue Oct 22 08:00:09 2013
UTC
+++ /branches/3.22/src/mips/lithium-codegen-mips.h Wed Jan 8 15:21:56 2014
UTC
@@ -185,6 +185,9 @@
void Abort(BailoutReason reason);
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone());
}
+
+ void SaveCallerDoubles();
+ void RestoreCallerDoubles();
// Code generation passes. Returns true if code generation should
// continue.
=======================================
--- /branches/3.22/src/mips/macro-assembler-mips.h Thu Oct 24 06:31:36 2013
UTC
+++ /branches/3.22/src/mips/macro-assembler-mips.h Wed Jan 8 15:21:56 2014
UTC
@@ -1210,8 +1210,10 @@
}
// Convenience function: Same as above, but takes the fid instead.
- void CallRuntime(Runtime::FunctionId id, int num_arguments) {
- CallRuntime(Runtime::FunctionForId(id), num_arguments);
+ void CallRuntime(Runtime::FunctionId id,
+ int num_arguments,
+ SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
+ CallRuntime(Runtime::FunctionForId(id), num_arguments, save_doubles);
}
// Convenience function: call an external reference.
=======================================
--- /branches/3.22/src/objects.cc Thu Nov 21 14:01:32 2013 UTC
+++ /branches/3.22/src/objects.cc Wed Jan 8 15:21:56 2014 UTC
@@ -10330,6 +10330,18 @@
void Code::InvalidateRelocation() {
set_relocation_info(GetHeap()->empty_byte_array());
}
+
+
+void Code::InvalidateEmbeddedObjects() {
+ Object* undefined = GetHeap()->undefined_value();
+ int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
+ for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
+ RelocInfo::Mode mode = it.rinfo()->rmode();
+ if (mode == RelocInfo::EMBEDDED_OBJECT) {
+ it.rinfo()->set_target_object(undefined, SKIP_WRITE_BARRIER);
+ }
+ }
+}
void Code::Relocate(intptr_t delta) {
=======================================
--- /branches/3.22/src/objects.h Tue Nov 12 13:33:55 2013 UTC
+++ /branches/3.22/src/objects.h Wed Jan 8 15:21:56 2014 UTC
@@ -5008,6 +5008,7 @@
// [relocation_info]: Code relocation information
DECL_ACCESSORS(relocation_info, ByteArray)
void InvalidateRelocation();
+ void InvalidateEmbeddedObjects();
// [handler_table]: Fixed array containing offsets of exception handlers.
DECL_ACCESSORS(handler_table, FixedArray)
=======================================
--- /branches/3.22/src/version.cc Tue Dec 24 08:32:14 2013 UTC
+++ /branches/3.22/src/version.cc Wed Jan 8 15:21:56 2014 UTC
@@ -35,7 +35,7 @@
#define MAJOR_VERSION 3
#define MINOR_VERSION 22
#define BUILD_NUMBER 24
-#define PATCH_LEVEL 10
+#define PATCH_LEVEL 11
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0
=======================================
--- /branches/3.22/src/x64/builtins-x64.cc Thu Oct 24 09:23:47 2013 UTC
+++ /branches/3.22/src/x64/builtins-x64.cc Wed Jan 8 15:21:56 2014 UTC
@@ -663,7 +663,8 @@
}
-void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
+static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
+ SaveFPRegsMode save_doubles) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
@@ -672,7 +673,7 @@
// stubs that tail call the runtime on deopts passing their parameters
in
// registers.
__ Pushad();
- __ CallRuntime(Runtime::kNotifyStubFailure, 0);
+ __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
__ Popad();
// Tear down internal frame.
}
@@ -680,6 +681,16 @@
__ pop(MemOperand(rsp, 0)); // Ignore state offset
__ ret(0); // Return to IC Miss stub, continuation still on stack.
}
+
+
+void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
+ Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
+}
+
+
+void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm)
{
+ Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
+}
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
=======================================
--- /branches/3.22/src/x64/deoptimizer-x64.cc Mon Oct 21 07:19:36 2013 UTC
+++ /branches/3.22/src/x64/deoptimizer-x64.cc Wed Jan 8 15:21:56 2014 UTC
@@ -124,6 +124,11 @@
// There is no dynamic alignment padding on x64 in the input frame.
return false;
}
+
+
+Code* Deoptimizer::NotifyStubFailureBuiltin() {
+ return
isolate_->builtins()->builtin(Builtins::kNotifyStubFailureSaveDoubles);
+}
#define __ masm()->
=======================================
--- /branches/3.22/src/x64/lithium-codegen-x64.cc Mon Nov 18 08:39:25 2013
UTC
+++ /branches/3.22/src/x64/lithium-codegen-x64.cc Wed Jan 8 15:21:56 2014
UTC
@@ -111,6 +111,38 @@
#endif
+void LCodeGen::SaveCallerDoubles() {
+ ASSERT(info()->saves_caller_doubles());
+ ASSERT(NeedsEagerFrame());
+ Comment(";;; Save clobbered callee double registers");
+ int count = 0;
+ BitVector* doubles = chunk()->allocated_double_registers();
+ BitVector::Iterator save_iterator(doubles);
+ while (!save_iterator.Done()) {
+ __ movsd(MemOperand(rsp, count * kDoubleSize),
+ XMMRegister::FromAllocationIndex(save_iterator.Current()));
+ save_iterator.Advance();
+ count++;
+ }
+}
+
+
+void LCodeGen::RestoreCallerDoubles() {
+ ASSERT(info()->saves_caller_doubles());
+ ASSERT(NeedsEagerFrame());
+ Comment(";;; Restore clobbered callee double registers");
+ BitVector* doubles = chunk()->allocated_double_registers();
+ BitVector::Iterator save_iterator(doubles);
+ int count = 0;
+ while (!save_iterator.Done()) {
+ __ movsd(XMMRegister::FromAllocationIndex(save_iterator.Current()),
+ MemOperand(rsp, count * kDoubleSize));
+ save_iterator.Advance();
+ count++;
+ }
+}
+
+
bool LCodeGen::GeneratePrologue() {
ASSERT(is_generating());
@@ -173,16 +205,7 @@
}
if (info()->saves_caller_doubles()) {
- Comment(";;; Save clobbered callee double registers");
- int count = 0;
- BitVector* doubles = chunk()->allocated_double_registers();
- BitVector::Iterator save_iterator(doubles);
- while (!save_iterator.Done()) {
- __ movsd(MemOperand(rsp, count * kDoubleSize),
-
XMMRegister::FromAllocationIndex(save_iterator.Current()));
- save_iterator.Advance();
- count++;
- }
+ SaveCallerDoubles();
}
}
@@ -261,6 +284,7 @@
Comment(";;; jump table entry %d: deoptimization bailout %d.", i,
id);
}
if (jump_table_[i].needs_frame) {
+ ASSERT(!info()->saves_caller_doubles());
__ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry));
if (needs_frame.is_bound()) {
__ jmp(&needs_frame);
@@ -279,6 +303,10 @@
__ call(kScratchRegister);
}
} else {
+ if (info()->saves_caller_doubles()) {
+ ASSERT(info()->IsStub());
+ RestoreCallerDoubles();
+ }
__ call(entry, RelocInfo::RUNTIME_ENTRY);
}
}
@@ -661,7 +689,10 @@
}
ASSERT(info()->IsStub() || frame_is_built_);
- if (cc == no_condition && frame_is_built_) {
+ // Go through jump table if we need to handle condition, build frame, or
+ // restore caller doubles.
+ if (cc == no_condition && frame_is_built_ &&
+ !info()->saves_caller_doubles()) {
__ call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
// We often have several deopts to the same entry, reuse the last
@@ -2551,16 +2582,7 @@
__ CallRuntime(Runtime::kTraceExit, 1);
}
if (info()->saves_caller_doubles()) {
- ASSERT(NeedsEagerFrame());
- BitVector* doubles = chunk()->allocated_double_registers();
- BitVector::Iterator save_iterator(doubles);
- int count = 0;
- while (!save_iterator.Done()) {
- __ movsd(XMMRegister::FromAllocationIndex(save_iterator.Current()),
- MemOperand(rsp, count * kDoubleSize));
- save_iterator.Advance();
- count++;
- }
+ RestoreCallerDoubles();
}
int no_frame_start = -1;
if (NeedsEagerFrame()) {
=======================================
--- /branches/3.22/src/x64/lithium-codegen-x64.h Tue Oct 22 08:00:09 2013
UTC
+++ /branches/3.22/src/x64/lithium-codegen-x64.h Wed Jan 8 15:21:56 2014
UTC
@@ -152,6 +152,10 @@
void Abort(BailoutReason reason);
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone());
}
+
+
+ void SaveCallerDoubles();
+ void RestoreCallerDoubles();
// Code generation passes. Returns true if code generation should
// continue.
=======================================
--- /branches/3.22/src/x64/macro-assembler-x64.h Fri Oct 25 09:22:31 2013
UTC
+++ /branches/3.22/src/x64/macro-assembler-x64.h Wed Jan 8 15:21:56 2014
UTC
@@ -1248,8 +1248,10 @@
}
// Convenience function: Same as above, but takes the fid instead.
- void CallRuntime(Runtime::FunctionId id, int num_arguments) {
- CallRuntime(Runtime::FunctionForId(id), num_arguments);
+ void CallRuntime(Runtime::FunctionId id,
+ int num_arguments,
+ SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
+ CallRuntime(Runtime::FunctionForId(id), num_arguments, save_doubles);
}
// Convenience function: call an external reference.
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.