Revision: 6324
Author: [email protected]
Date: Fri Jan 14 05:16:48 2011
Log: X64 Crnakshaft: Added GeneratePrologue implementation.
Review URL: http://codereview.chromium.org/6326003
http://code.google.com/p/v8/source/detail?r=6324
Modified:
/branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc
/branches/bleeding_edge/src/lithium-allocator.h
/branches/bleeding_edge/src/v8globals.h
/branches/bleeding_edge/src/x64/lithium-codegen-x64.cc
/branches/bleeding_edge/src/x64/lithium-x64.cc
=======================================
--- /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Fri Jan 14
04:50:03 2011
+++ /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Fri Jan 14
05:16:48 2011
@@ -290,6 +290,17 @@
__ j(not_zero, &loop);
} else {
__ sub(Operand(esp), Immediate(slots * kPointerSize));
+#ifdef _MSC_VER
+ // On windows, you may not access the stack more than one page below
+ // the most recently mapped page. To make the allocated area randomly
+ // accessible, we write to each page in turn (the value is
irrelevant).
+ const int kPageSize = 4 * KB;
+ for (int offset = slots * kPointerSize - kPageSize;
+ offset > 0;
+ offset -= kPageSize) {
+ __ mov(Operand(esp, offset), eax);
+ }
+#endif
}
}
=======================================
--- /branches/bleeding_edge/src/lithium-allocator.h Mon Jan 10 04:19:15 2011
+++ /branches/bleeding_edge/src/lithium-allocator.h Fri Jan 14 05:16:48 2011
@@ -705,6 +705,7 @@
bool HasAllocatedSpillOperand() const {
return spill_operand_ != NULL && !spill_operand_->IsUnallocated();
}
+
LOperand* GetSpillOperand() const { return spill_operand_; }
void SetSpillOperand(LOperand* operand) {
ASSERT(!operand->IsUnallocated());
@@ -722,7 +723,6 @@
bool Covers(LifetimePosition position);
LifetimePosition FirstIntersection(LiveRange* other);
-
// Add a new interval or a new use position to this live range.
void EnsureInterval(LifetimePosition start, LifetimePosition end);
void AddUseInterval(LifetimePosition start, LifetimePosition end);
=======================================
--- /branches/bleeding_edge/src/v8globals.h Tue Dec 7 03:31:57 2010
+++ /branches/bleeding_edge/src/v8globals.h Fri Jan 14 05:16:48 2011
@@ -77,7 +77,8 @@
reinterpret_cast<Address>(V8_UINT64_C(0x1baddead0baddead));
const Address kFromSpaceZapValue =
reinterpret_cast<Address>(V8_UINT64_C(0x1beefdad0beefdad));
-const uint64_t kDebugZapValue = 0xbadbaddbbadbaddb;
+const uint64_t kDebugZapValue = V8_UINT64_C(0xbadbaddbbadbaddb);
+const uint64_t kSlotsZapValue = V8_UINT64_C(0xbeefdeadbeefdeed);
#else
const Address kZapValue = reinterpret_cast<Address>(0xdeadbeed);
const Address kHandleZapValue = reinterpret_cast<Address>(0xbaddead);
=======================================
--- /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Fri Jan 14
04:50:03 2011
+++ /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Fri Jan 14
05:16:48 2011
@@ -242,8 +242,52 @@
bool LCodeGen::GeneratePrologue() {
- Abort("Unimplemented: %s", "GeneratePrologue");
- return false;
+ ASSERT(is_generating());
+
+#ifdef DEBUG
+ if (strlen(FLAG_stop_at) > 0 &&
+ info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
+ __ int3();
+ }
+#endif
+
+ __ push(rbp); // Caller's frame pointer.
+ __ movq(rbp, rsp);
+ __ push(rsi); // Callee's context.
+ __ push(rdi); // Callee's JS function.
+
+ // Reserve space for the stack slots needed by the code.
+ int slots = StackSlotCount();
+ if (slots > 0) {
+ if (FLAG_debug_code) {
+ __ movl(rax, Immediate(slots));
+ __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE);
+ Label loop;
+ __ bind(&loop);
+ __ push(kScratchRegister);
+ __ decl(rax);
+ __ j(not_zero, &loop);
+ } else {
+ __ subq(rsp, Immediate(slots * kPointerSize));
+#ifdef _MSC_VER
+ // On windows, you may not access the stack more than one page below
+ // the most recently mapped page. To make the allocated area randomly
+ // accessible, we write to each page in turn (the value is
irrelevant).
+ const int kPageSize = 4 * KB;
+ for (int offset = slots * kPointerSize - kPageSize;
+ offset > 0;
+ offset -= kPageSize) {
+ __ moveq(Operand(rsp, offset), rax);
+ }
+#endif
+ }
+ }
+
+ // Trace the call.
+ if (FLAG_trace) {
+ __ CallRuntime(Runtime::kTraceEnter, 0);
+ }
+ return !is_aborted();
}
=======================================
--- /branches/bleeding_edge/src/x64/lithium-x64.cc Fri Jan 14 02:27:25 2011
+++ /branches/bleeding_edge/src/x64/lithium-x64.cc Fri Jan 14 05:16:48 2011
@@ -305,15 +305,20 @@
int LChunk::GetNextSpillIndex(bool is_double) {
- // Need to consider what index means: Is it 32 bit or 64 bit index?
- UNIMPLEMENTED();
- return 0;
+ return spill_slot_count_++;
}
LOperand* LChunk::GetNextSpillSlot(bool is_double) {
- UNIMPLEMENTED();
- return NULL;
+ // All stack slots are Double stack slots on x64.
+ // Alternatively, at some point, start using half-size
+ // stack slots for int32 values.
+ int index = GetNextSpillIndex(is_double);
+ if (is_double) {
+ return LDoubleStackSlot::Create(index);
+ } else {
+ return LStackSlot::Create(index);
+ }
}
@@ -736,6 +741,7 @@
Abort("Unimplemented: %s", "DoArithmeticT");
return NULL;
}
+
void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock*
next_block) {
ASSERT(is_building());
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev