Revision: 9542
Author:   [email protected]
Date:     Thu Oct  6 08:59:02 2011
Log:      Fast allocation of block contexts.

Review URL: http://codereview.chromium.org/8066002
http://code.google.com/p/v8/source/detail?r=9542

Modified:
 /branches/bleeding_edge/src/arm/code-stubs-arm.cc
 /branches/bleeding_edge/src/code-stubs.h
 /branches/bleeding_edge/src/full-codegen.cc
 /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc
 /branches/bleeding_edge/src/x64/code-stubs-x64.cc

=======================================
--- /branches/bleeding_edge/src/arm/code-stubs-arm.cc Tue Oct 4 02:07:50 2011 +++ /branches/bleeding_edge/src/arm/code-stubs-arm.cc Thu Oct 6 08:59:02 2011
@@ -187,6 +187,72 @@
   __ bind(&gc);
   __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
 }
+
+
+void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
+  // Stack layout on entry:
+  //
+  // [sp]: function.
+  // [sp + kPointerSize]: serialized scope info
+
+  // Try to allocate the context in new space.
+  Label gc;
+  int length = slots_ + Context::MIN_CONTEXT_SLOTS;
+  __ AllocateInNewSpace(FixedArray::SizeFor(length),
+                        r0, r1, r2, &gc, TAG_OBJECT);
+
+  // Load the function from the stack.
+  __ ldr(r3, MemOperand(sp, 0));
+
+  // Load the serialized scope info from the stack.
+  __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
+
+  // Setup the object header.
+  __ LoadRoot(r2, Heap::kBlockContextMapRootIndex);
+  __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
+  __ mov(r2, Operand(Smi::FromInt(length)));
+  __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
+
+  // If this block context is nested in the global context we get a smi
+  // sentinel instead of a function. The block context should get the
+  // canonical empty function of the global context as its closure which
+  // we still have to look up.
+  Label after_sentinel;
+  __ JumpIfNotSmi(r3, &after_sentinel);
+  if (FLAG_debug_code) {
+    const char* message = "Expected 0 as a Smi sentinel";
+    __ cmp(r3, Operand::Zero());
+    __ Assert(eq, message);
+  }
+  __ ldr(r3, GlobalObjectOperand());
+  __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalContextOffset));
+  __ ldr(r3, ContextOperand(r3, Context::CLOSURE_INDEX));
+  __ bind(&after_sentinel);
+
+  // Setup the fixed slots.
+  __ str(r3, ContextOperand(r0, Context::CLOSURE_INDEX));
+  __ str(cp, ContextOperand(r0, Context::PREVIOUS_INDEX));
+  __ str(r1, ContextOperand(r0, Context::EXTENSION_INDEX));
+
+  // Copy the global object from the previous context.
+  __ ldr(r1, ContextOperand(cp, Context::GLOBAL_INDEX));
+  __ str(r1, ContextOperand(r0, Context::GLOBAL_INDEX));
+
+  // Initialize the rest of the slots to the hole value.
+  __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
+  for (int i = 0; i < slots_; i++) {
+    __ str(r1, ContextOperand(r0, i + Context::MIN_CONTEXT_SLOTS));
+  }
+
+  // Remove the on-stack argument and return.
+  __ mov(cp, r0);
+  __ add(sp, sp, Operand(2 * kPointerSize));
+  __ Ret();
+
+  // Need to collect. Call into runtime system.
+  __ bind(&gc);
+  __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
+}


 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
=======================================
--- /branches/bleeding_edge/src/code-stubs.h    Mon Oct  3 04:13:20 2011
+++ /branches/bleeding_edge/src/code-stubs.h    Thu Oct  6 08:59:02 2011
@@ -55,6 +55,7 @@
   V(StackCheck)                          \
   V(FastNewClosure)                      \
   V(FastNewContext)                      \
+  V(FastNewBlockContext)                 \
   V(FastCloneShallowArray)               \
   V(RevertToNumber)                      \
   V(ToBoolean)                           \
@@ -323,7 +324,7 @@
   static const int kMaximumSlots = 64;

   explicit FastNewContextStub(int slots) : slots_(slots) {
-    ASSERT(slots_ > 0 && slots <= kMaximumSlots);
+    ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
   }

   void Generate(MacroAssembler* masm);
@@ -336,6 +337,24 @@
 };


+class FastNewBlockContextStub : public CodeStub {
+ public:
+  static const int kMaximumSlots = 64;
+
+  explicit FastNewBlockContextStub(int slots) : slots_(slots) {
+    ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
+  }
+
+  void Generate(MacroAssembler* masm);
+
+ private:
+  int slots_;
+
+  Major MajorKey() { return FastNewBlockContext; }
+  int MinorKey() { return slots_; }
+};
+
+
 class FastCloneShallowArrayStub : public CodeStub {
  public:
   // Maximum length of copied elements array.
=======================================
--- /branches/bleeding_edge/src/full-codegen.cc Sat Oct  1 01:47:12 2011
+++ /branches/bleeding_edge/src/full-codegen.cc Thu Oct  6 08:59:02 2011
@@ -820,9 +820,19 @@
   if (stmt->block_scope() != NULL) {
     { Comment cmnt(masm_, "[ Extend block context");
       scope_ = stmt->block_scope();
-      __ Push(scope_->GetSerializedScopeInfo());
+ Handle<SerializedScopeInfo> scope_info = scope_->GetSerializedScopeInfo();
+      int heap_slots =
+          scope_info->NumberOfContextSlots() - Context::MIN_CONTEXT_SLOTS;
+      __ Push(scope_info);
       PushFunctionArgumentForContextAllocation();
-      __ CallRuntime(Runtime::kPushBlockContext, 2);
+      if (heap_slots <= FastNewBlockContextStub::kMaximumSlots) {
+        FastNewBlockContextStub stub(heap_slots);
+        __ CallStub(&stub);
+      } else {
+        __ CallRuntime(Runtime::kPushBlockContext, 2);
+      }
+
+      // Replace the context stored in the frame.
       StoreToFrameField(StandardFrameConstants::kContextOffset,
                         context_register());
     }
=======================================
--- /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Tue Oct 4 04:38:12 2011 +++ /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Thu Oct 6 08:59:02 2011
@@ -157,6 +157,77 @@
   __ bind(&gc);
   __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
 }
+
+
+void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
+  // Stack layout on entry:
+  //
+  // [esp + (1 * kPointerSize)]: function
+  // [esp + (2 * kPointerSize)]: serialized scope info
+
+  // Try to allocate the context in new space.
+  Label gc;
+  int length = slots_ + Context::MIN_CONTEXT_SLOTS;
+  __ AllocateInNewSpace(FixedArray::SizeFor(length),
+                        eax, ebx, ecx, &gc, TAG_OBJECT);
+
+  // Get the function or sentinel from the stack.
+  __ mov(ecx, Operand(esp, 1 * kPointerSize));
+
+  // Get the serialized scope info from the stack.
+  __ mov(ebx, Operand(esp, 2 * kPointerSize));
+
+  // Setup the object header.
+  Factory* factory = masm->isolate()->factory();
+  __ mov(FieldOperand(eax, HeapObject::kMapOffset),
+         factory->block_context_map());
+  __ mov(FieldOperand(eax, Context::kLengthOffset),
+         Immediate(Smi::FromInt(length)));
+
+  // If this block context is nested in the global context we get a smi
+  // sentinel instead of a function. The block context should get the
+  // canonical empty function of the global context as its closure which
+  // we still have to look up.
+  Label after_sentinel;
+  __ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear);
+  if (FLAG_debug_code) {
+    const char* message = "Expected 0 as a Smi sentinel";
+    __ cmp(ecx, 0);
+    __ Assert(equal, message);
+  }
+  __ mov(ecx, GlobalObjectOperand());
+  __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
+  __ mov(ecx, ContextOperand(ecx, Context::CLOSURE_INDEX));
+  __ bind(&after_sentinel);
+
+  // Setup the fixed slots.
+  __ mov(ContextOperand(eax, Context::CLOSURE_INDEX), ecx);
+  __ mov(ContextOperand(eax, Context::PREVIOUS_INDEX), esi);
+  __ mov(ContextOperand(eax, Context::EXTENSION_INDEX), ebx);
+
+  // Copy the global object from the previous context.
+  __ mov(ebx, ContextOperand(esi, Context::GLOBAL_INDEX));
+  __ mov(ContextOperand(eax, Context::GLOBAL_INDEX), ebx);
+
+  // Initialize the rest of the slots to the hole value.
+  if (slots_ == 1) {
+    __ mov(ContextOperand(eax, Context::MIN_CONTEXT_SLOTS),
+           factory->the_hole_value());
+  } else {
+    __ mov(ebx, factory->the_hole_value());
+    for (int i = 0; i < slots_; i++) {
+      __ mov(ContextOperand(eax, i + Context::MIN_CONTEXT_SLOTS), ebx);
+    }
+  }
+
+  // Return and remove the on-stack parameters.
+  __ mov(esi, eax);
+  __ ret(2 * kPointerSize);
+
+  // Need to collect. Call into runtime system.
+  __ bind(&gc);
+  __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
+}


 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
=======================================
--- /branches/bleeding_edge/src/x64/code-stubs-x64.cc Wed Sep 28 05:23:40 2011 +++ /branches/bleeding_edge/src/x64/code-stubs-x64.cc Thu Oct 6 08:59:02 2011
@@ -153,6 +153,70 @@
   __ bind(&gc);
   __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
 }
+
+
+void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
+  // Stack layout on entry:
+  //
+  // [rsp + (1 * kPointerSize)]: function
+  // [rsp + (2 * kPointerSize)]: serialized scope info
+
+  // Try to allocate the context in new space.
+  Label gc;
+  int length = slots_ + Context::MIN_CONTEXT_SLOTS;
+  __ AllocateInNewSpace(FixedArray::SizeFor(length),
+                        rax, rbx, rcx, &gc, TAG_OBJECT);
+
+  // Get the function from the stack.
+  __ movq(rcx, Operand(rsp, 1 * kPointerSize));
+
+  // Get the serialized scope info from the stack.
+  __ movq(rbx, Operand(rsp, 2 * kPointerSize));
+
+  // Setup the object header.
+  __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex);
+  __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
+ __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
+
+  // If this block context is nested in the global context we get a smi
+  // sentinel instead of a function. The block context should get the
+  // canonical empty function of the global context as its closure which
+  // we still have to look up.
+  Label after_sentinel;
+  __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear);
+  if (FLAG_debug_code) {
+    const char* message = "Expected 0 as a Smi sentinel";
+    __ cmpq(rcx, Immediate(0));
+    __ Assert(equal, message);
+  }
+  __ movq(rcx, GlobalObjectOperand());
+  __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
+  __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX));
+  __ bind(&after_sentinel);
+
+  // Setup the fixed slots.
+  __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx);
+  __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi);
+  __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx);
+
+  // Copy the global object from the previous context.
+  __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_INDEX));
+  __ movq(ContextOperand(rax, Context::GLOBAL_INDEX), rbx);
+
+  // Initialize the rest of the slots to the hole value.
+  __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
+  for (int i = 0; i < slots_; i++) {
+    __ movq(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx);
+  }
+
+  // Return and remove the on-stack parameter.
+  __ movq(rsi, rax);
+  __ ret(2 * kPointerSize);
+
+  // Need to collect. Call into runtime system.
+  __ bind(&gc);
+  __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
+}


 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {

--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to