Revision: 3592
Author: [email protected]
Date: Wed Jan 13 00:16:02 2010
Log: Port FastNewContextStub to x64 and arm.
BUG=551
Review URL: http://codereview.chromium.org/541027
http://code.google.com/p/v8/source/detail?r=3592
Modified:
/branches/bleeding_edge/src/arm/codegen-arm.cc
/branches/bleeding_edge/src/x64/codegen-x64.cc
=======================================
--- /branches/bleeding_edge/src/arm/codegen-arm.cc Tue Jan 12 09:22:57 2010
+++ /branches/bleeding_edge/src/arm/codegen-arm.cc Wed Jan 13 00:16:02 2010
@@ -187,12 +187,18 @@
function_return_is_shadowed_ = false;
VirtualFrame::SpilledScope spilled_scope;
- if (scope_->num_heap_slots() > 0) {
+ int heap_slots = scope_->num_heap_slots();
+ if (heap_slots > 0) {
// Allocate local context.
// Get outer context and create a new context based on it.
__ ldr(r0, frame_->Function());
frame_->EmitPush(r0);
- frame_->CallRuntime(Runtime::kNewContext, 1); // r0 holds the result
+ if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+ FastNewContextStub stub(heap_slots);
+ frame_->CallStub(&stub, 1);
+ } else {
+ frame_->CallRuntime(Runtime::kNewContext, 1);
+ }
#ifdef DEBUG
JumpTarget verified_true;
@@ -4442,6 +4448,55 @@
__ push(r3);
__ TailCallRuntime(ExternalReference(Runtime::kNewClosure), 2, 1);
}
+
+
+void FastNewContextStub::Generate(MacroAssembler* masm) {
+ // Try to allocate the context in new space.
+ Label gc;
+ int length = slots_ + Context::MIN_CONTEXT_SLOTS;
+
+ // Pop the function from the stack.
+ __ pop(r3);
+
+ // Attempt to allocate the context in new space.
+ __ AllocateInNewSpace(length + (FixedArray::kHeaderSize / kPointerSize),
+ r0,
+ r1,
+ r2,
+ &gc,
+ TAG_OBJECT);
+
+ // Setup the object header.
+ __ LoadRoot(r2, Heap::kContextMapRootIndex);
+ __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
+ __ mov(r2, Operand(length));
+ __ str(r2, FieldMemOperand(r0, Array::kLengthOffset));
+
+ // Setup the fixed slots.
+ __ mov(r1, Operand(Smi::FromInt(0)));
+ __ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX)));
+ __ str(r0, MemOperand(r0, Context::SlotOffset(Context::FCONTEXT_INDEX)));
+ __ str(r1, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX)));
+ __ str(r1, MemOperand(r0,
Context::SlotOffset(Context::EXTENSION_INDEX)));
+
+ // Copy the global object from the surrounding context.
+ __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ str(r1, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX)));
+
+ // Initialize the rest of the slots to undefined.
+ __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
+ for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
+ __ str(r1, MemOperand(r0, Context::SlotOffset(i)));
+ }
+
+ // Return. The on-stack parameter has already been popped.
+ __ mov(cp, r0);
+ __ Ret();
+
+ // Need to collect. Call into runtime system.
+ __ bind(&gc);
+ __ TailCallRuntime(ExternalReference(Runtime::kNewContext), 1, 1);
+}
// Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
=======================================
--- /branches/bleeding_edge/src/x64/codegen-x64.cc Tue Jan 12 09:22:57 2010
+++ /branches/bleeding_edge/src/x64/codegen-x64.cc Wed Jan 13 00:16:02 2010
@@ -326,12 +326,19 @@
function_return_is_shadowed_ = false;
// Allocate the local context if needed.
- if (scope_->num_heap_slots() > 0) {
+ int heap_slots = scope_->num_heap_slots();
+ if (heap_slots > 0) {
Comment cmnt(masm_, "[ allocate local context");
// Allocate local context.
// Get outer context and create a new context based on it.
frame_->PushFunction();
- Result context = frame_->CallRuntime(Runtime::kNewContext, 1);
+ Result context;
+ if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+ FastNewContextStub stub(heap_slots);
+ context = frame_->CallStub(&stub, 1);
+ } else {
+ context = frame_->CallRuntime(Runtime::kNewContext, 1);
+ }
// Update context local.
frame_->SaveContextRegister();
@@ -6114,6 +6121,48 @@
__ push(rcx); // Restore return address.
__ TailCallRuntime(ExternalReference(Runtime::kNewClosure), 2, 1);
}
+
+
+void FastNewContextStub::Generate(MacroAssembler* masm) {
+ // Try to allocate the context in new space.
+ Label gc;
+ int length = slots_ + Context::MIN_CONTEXT_SLOTS;
+ __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
+ rax, rbx, rcx, &gc, TAG_OBJECT);
+
+ // Get the function from the stack.
+ __ movq(rcx, Operand(rsp, 1 * kPointerSize));
+
+ // Setup the object header.
+ __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex);
+ __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
+ __ movl(FieldOperand(rax, Array::kLengthOffset), Immediate(length));
+
+ // Setup the fixed slots.
+ __ xor_(rbx, rbx); // Set to NULL.
+ __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
+ __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax);
+ __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx);
+ __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)),
rbx);
+
+ // Copy the global object from the surrounding context.
+ __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
+
+ // Initialize the rest of the slots to undefined.
+ __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
+ for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
+ __ movq(Operand(rax, Context::SlotOffset(i)), rbx);
+ }
+
+ // Return and remove the on-stack parameter.
+ __ movq(rsi, rax);
+ __ ret(1 * kPointerSize);
+
+ // Need to collect. Call into runtime system.
+ __ bind(&gc);
+ __ TailCallRuntime(ExternalReference(Runtime::kNewContext), 1, 1);
+}
void ToBooleanStub::Generate(MacroAssembler* masm) {
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev