Revision: 21105
Author:   [email protected]
Date:     Wed Apr 30 20:19:45 2014 UTC
Log:      MIPS: CallICStub with a "never patch" approach by default.

Port r21093 (21e3836)

Original commit message:
Patching will
occur only when custom feedback needs to be gathered (future CLs).

Now rebased on https://codereview.chromium.org/254623002/, which moves the type feedback vector to the SharedFunctionInfo.

BUG=
[email protected]

Review URL: https://codereview.chromium.org/260753004

Patch from Balazs Kilvady <[email protected]>.
http://code.google.com/p/v8/source/detail?r=21105

Modified:
 /branches/bleeding_edge/src/mips/builtins-mips.cc
 /branches/bleeding_edge/src/mips/code-stubs-mips.cc
 /branches/bleeding_edge/src/mips/debug-mips.cc
 /branches/bleeding_edge/src/mips/full-codegen-mips.cc
 /branches/bleeding_edge/src/mips/lithium-codegen-mips.cc

=======================================
--- /branches/bleeding_edge/src/mips/builtins-mips.cc Tue Apr 29 06:42:26 2014 UTC +++ /branches/bleeding_edge/src/mips/builtins-mips.cc Wed Apr 30 20:19:45 2014 UTC
@@ -799,7 +799,7 @@
     if (is_construct) {
       // No type feedback cell is available
       __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
-      CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
+      CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
       __ CallStub(&stub);
     } else {
       ParameterCount actual(a0);
=======================================
--- /branches/bleeding_edge/src/mips/code-stubs-mips.cc Tue Apr 29 06:42:26 2014 UTC +++ /branches/bleeding_edge/src/mips/code-stubs-mips.cc Wed Apr 30 20:19:45 2014 UTC
@@ -3015,13 +3015,63 @@

   __ bind(&done);
 }
+
+
+static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
+  __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
+  __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset));
+
+  // Do not transform the receiver for strict mode functions.
+  int32_t strict_mode_function_mask =
+      1 <<  (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize);
+ // Do not transform the receiver for native (Compilerhints already in a3).
+  int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize);
+  __ And(at, t0, Operand(strict_mode_function_mask | native_mask));
+  __ Branch(cont, ne, at, Operand(zero_reg));
+}
+
+
+static void EmitSlowCase(MacroAssembler* masm,
+                         int argc,
+                         Label* non_function) {
+  // Check for function proxy.
+  __ Branch(non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE));
+  __ push(a1);  // put proxy as additional argument
+  __ li(a0, Operand(argc + 1, RelocInfo::NONE32));
+  __ mov(a2, zero_reg);
+  __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
+  {
+    Handle<Code> adaptor =
+        masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+    __ Jump(adaptor, RelocInfo::CODE_TARGET);
+  }
+
+  // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
+  // of the original receiver from the call site).
+  __ bind(non_function);
+  __ sw(a1, MemOperand(sp, argc * kPointerSize));
+  __ li(a0, Operand(argc));  // Set up the number of arguments.
+  __ mov(a2, zero_reg);
+  __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
+  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+          RelocInfo::CODE_TARGET);
+}
+
+
+static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
+  // Wrap the receiver and patch it back onto the stack.
+  { FrameScope frame_scope(masm, StackFrame::INTERNAL);
+    __ Push(a1, a3);
+    __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+    __ pop(a1);
+  }
+  __ Branch(USE_DELAY_SLOT, cont);
+  __ sw(v0, MemOperand(sp, argc * kPointerSize));
+}


 void CallFunctionStub::Generate(MacroAssembler* masm) {
   // a1 : the function to call
-  // a2 : feedback vector
-  // a3 : (only if a2 is not the megamorphic symbol) slot in feedback
-  //      vector (Smi)
   Label slow, non_function, wrap, cont;

   if (NeedsChecks()) {
@@ -3032,34 +3082,20 @@
     // Goto slow case if we do not have a function.
     __ GetObjectType(a1, t0, t0);
     __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE));
-
-    if (RecordCallTarget()) {
-      GenerateRecordCallTarget(masm);
-      // Type information was updated. Because we may call Array, which
-      // expects either undefined or an AllocationSite in a2 we need
-      // to set a2 to undefined.
-      __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
-    }
   }

   // Fast-case: Invoke the function now.
   // a1: pushed function
-  ParameterCount actual(argc_);
+  int argc = argc_;
+  ParameterCount actual(argc);

   if (CallAsMethod()) {
     if (NeedsChecks()) {
- // Do not transform the receiver for strict mode functions and natives. - __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); - __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset));
-      int32_t strict_mode_function_mask =
-          1 <<  (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize);
- int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize);
-      __ And(at, t0, Operand(strict_mode_function_mask | native_mask));
-      __ Branch(&cont, ne, at, Operand(zero_reg));
+      EmitContinueIfStrictOrNative(masm, &cont);
     }

     // Compute the receiver in sloppy mode.
-    __ lw(a3, MemOperand(sp, argc_ * kPointerSize));
+    __ lw(a3, MemOperand(sp, argc * kPointerSize));

     if (NeedsChecks()) {
       __ JumpIfSmi(a3, &wrap);
@@ -3071,56 +3107,19 @@

     __ bind(&cont);
   }
+
   __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper());

   if (NeedsChecks()) {
     // Slow-case: Non-function called.
     __ bind(&slow);
-    if (RecordCallTarget()) {
-      // If there is a call target cache, mark it megamorphic in the
-      // non-function case.  MegamorphicSentinel is an immortal immovable
-      // object (megamorphic symbol) so no write barrier is needed.
-      ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
-                isolate()->heap()->megamorphic_symbol());
-      __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
-      __ Addu(t1, a2, Operand(t1));
-      __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
-      __ sw(at, FieldMemOperand(t1, FixedArray::kHeaderSize));
-    }
-    // Check for function proxy.
-    __ Branch(&non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE));
-    __ push(a1);  // Put proxy as additional argument.
-    __ li(a0, Operand(argc_ + 1, RelocInfo::NONE32));
-    __ li(a2, Operand(0, RelocInfo::NONE32));
-    __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
-    {
-      Handle<Code> adaptor =
-        isolate()->builtins()->ArgumentsAdaptorTrampoline();
-      __ Jump(adaptor, RelocInfo::CODE_TARGET);
-    }
-
- // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
-    // of the original receiver from the call site).
-    __ bind(&non_function);
-    __ sw(a1, MemOperand(sp, argc_ * kPointerSize));
-    __ li(a0, Operand(argc_));  // Set up the number of arguments.
-    __ li(a2, Operand(0, RelocInfo::NONE32));
-    __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
-    __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
-            RelocInfo::CODE_TARGET);
+    EmitSlowCase(masm, argc, &non_function);
   }

   if (CallAsMethod()) {
     __ bind(&wrap);
     // Wrap the receiver and patch it back onto the stack.
-    { FrameScope frame_scope(masm, StackFrame::INTERNAL);
-      __ Push(a1, a3);
-      __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
-      __ pop(a1);
-    }
-    __ mov(a0, v0);
-    __ sw(a0, MemOperand(sp, argc_ * kPointerSize));
-    __ jmp(&cont);
+    EmitWrapCase(masm, argc, &cont);
   }
 }

@@ -3187,6 +3186,110 @@
   __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
           RelocInfo::CODE_TARGET);
 }
+
+
+static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
+  __ lw(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+  __ lw(vector, FieldMemOperand(vector,
+                                JSFunction::kSharedFunctionInfoOffset));
+  __ lw(vector, FieldMemOperand(vector,
+ SharedFunctionInfo::kFeedbackVectorOffset));
+}
+
+
+void CallICStub::Generate(MacroAssembler* masm) {
+  // r1 - function
+  // r3 - slot id (Smi)
+  Label extra_checks_or_miss, slow_start;
+  Label slow, non_function, wrap, cont;
+  Label have_js_function;
+  int argc = state_.arg_count();
+  ParameterCount actual(argc);
+
+  EmitLoadTypeFeedbackVector(masm, a2);
+
+  // The checks. First, does r1 match the recorded monomorphic target?
+  __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
+  __ Addu(t0, a2, Operand(t0));
+  __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize));
+  __ Branch(&extra_checks_or_miss, ne, a1, Operand(t0));
+
+  __ bind(&have_js_function);
+  if (state_.CallAsMethod()) {
+    EmitContinueIfStrictOrNative(masm, &cont);
+    // Compute the receiver in sloppy mode.
+    __ lw(a3, MemOperand(sp, argc * kPointerSize));
+
+    __ JumpIfSmi(a3, &wrap);
+    __ GetObjectType(a3, t0, t0);
+    __ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE));
+
+    __ bind(&cont);
+  }
+
+  __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper());
+
+  __ bind(&slow);
+  EmitSlowCase(masm, argc, &non_function);
+
+  if (state_.CallAsMethod()) {
+    __ bind(&wrap);
+    EmitWrapCase(masm, argc, &cont);
+  }
+
+  __ bind(&extra_checks_or_miss);
+  Label miss;
+
+  __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
+  __ Branch(&slow_start, eq, t0, Operand(at));
+  __ LoadRoot(at, Heap::kUninitializedSymbolRootIndex);
+  __ Branch(&miss, eq, t0, Operand(at));
+
+  if (!FLAG_trace_ic) {
+    // We are going megamorphic, and we don't want to visit the runtime.
+    __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
+    __ Addu(t0, a2, Operand(t0));
+    __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
+    __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize));
+    __ Branch(&slow_start);
+  }
+
+  // We are here because tracing is on or we are going monomorphic.
+  __ bind(&miss);
+  GenerateMiss(masm);
+
+  // the slow case
+  __ bind(&slow_start);
+  // Check that the function is really a JavaScript function.
+  // r1: pushed function (to be verified)
+  __ JumpIfSmi(a1, &non_function);
+
+  // Goto slow case if we do not have a function.
+  __ GetObjectType(a1, t0, t0);
+  __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE));
+  __ Branch(&have_js_function);
+}
+
+
+void CallICStub::GenerateMiss(MacroAssembler* masm) {
+  // Get the receiver of the function from the stack; 1 ~ return address.
+  __ lw(t0, MemOperand(sp, (state_.arg_count() + 1) * kPointerSize));
+
+  {
+    FrameScope scope(masm, StackFrame::INTERNAL);
+
+    // Push the receiver and the function and feedback info.
+    __ Push(t0, a1, a2, a3);
+
+    // Call the entry.
+ ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
+                                               masm->isolate());
+    __ CallExternalReference(miss, 4);
+
+    // Move result to a1 and exit the internal frame.
+    __ mov(a1, v0);
+  }
+}


 // StringCharCodeAtGenerator.
=======================================
--- /branches/bleeding_edge/src/mips/debug-mips.cc Tue Apr 29 06:42:26 2014 UTC +++ /branches/bleeding_edge/src/mips/debug-mips.cc Wed Apr 30 20:19:45 2014 UTC
@@ -161,6 +161,16 @@
   __ lw(t9, MemOperand(t9));
   __ Jump(t9);
 }
+
+
+void Debug::GenerateCallICStubDebugBreak(MacroAssembler* masm) {
+  // Register state for CallICStub
+  // ----------- S t a t e -------------
+  //  -- a1 : function
+  //  -- a3 : slot in feedback array (smi)
+  // -----------------------------------
+  Generate_DebugBreakCallHelper(masm, a1.bit() | a3.bit(), 0);
+}


 void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) {
@@ -217,15 +227,6 @@
   // -----------------------------------
   Generate_DebugBreakCallHelper(masm, a0.bit(), 0);
 }
-
-
-void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) {
-  // Calling convention for IC call (from ic-mips.cc).
-  // ----------- S t a t e -------------
-  //  -- a2: name
-  // -----------------------------------
-  Generate_DebugBreakCallHelper(masm, a2.bit(), 0);
-}


 void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
@@ -243,17 +244,6 @@
   // -----------------------------------
   Generate_DebugBreakCallHelper(masm, a1.bit(), 0);
 }
-
-
-void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) {
-  // Register state for CallFunctionStub (from code-stubs-mips.cc).
-  // ----------- S t a t e -------------
-  //  -- a1 : function
-  //  -- a2 : feedback array
-  //  -- a3 : slot in feedback array
-  // -----------------------------------
-  Generate_DebugBreakCallHelper(masm, a1.bit() | a2.bit() | a3.bit(), 0);
-}


 void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) {
=======================================
--- /branches/bleeding_edge/src/mips/full-codegen-mips.cc Wed Apr 30 15:25:47 2014 UTC +++ /branches/bleeding_edge/src/mips/full-codegen-mips.cc Wed Apr 30 20:19:45 2014 UTC
@@ -2624,14 +2624,15 @@


 // Code common for calls using the IC.
-void FullCodeGenerator::EmitCallWithIC(Call* expr) {
+void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
   Expression* callee = expr->expression();
-  ZoneList<Expression*>* args = expr->arguments();
-  int arg_count = args->length();

-  CallFunctionFlags flags;
+  CallIC::CallType call_type = callee->IsVariableProxy()
+      ? CallIC::FUNCTION
+      : CallIC::METHOD;
+
   // Get the target function.
-  if (callee->IsVariableProxy()) {
+  if (call_type == CallIC::FUNCTION) {
     { StackValueContext context(this);
       EmitVariableLoad(callee->AsVariableProxy());
       PrepareForBailout(callee, NO_REGISTERS);
@@ -2639,7 +2640,6 @@
// Push undefined as receiver. This is patched in the method prologue if it
     // is a sloppy mode method.
     __ Push(isolate()->factory()->undefined_value());
-    flags = NO_CALL_FUNCTION_FLAGS;
   } else {
     // Load the function from the receiver.
     ASSERT(callee->IsProperty());
@@ -2650,39 +2650,19 @@
     __ lw(at, MemOperand(sp, 0));
     __ push(at);
     __ sw(v0, MemOperand(sp, kPointerSize));
-    flags = CALL_AS_METHOD;
   }

-  // Load the arguments.
-  { PreservePositionScope scope(masm()->positions_recorder());
-    for (int i = 0; i < arg_count; i++) {
-      VisitForStackValue(args->at(i));
-    }
-  }
-  // Record source position for debugger.
-  SetSourcePosition(expr->position());
-  CallFunctionStub stub(isolate(), arg_count, flags);
-  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
-  __ CallStub(&stub);
-
-  RecordJSReturnSite(expr);
-
-  // Restore context register.
-  __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
-
-  context()->DropAndPlug(1, v0);
+  EmitCall(expr, call_type);
 }


 // Code common for calls using the IC.
-void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
-                                            Expression* key) {
+void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
+                                                Expression* key) {
   // Load the key.
   VisitForAccumulatorValue(key);

   Expression* callee = expr->expression();
-  ZoneList<Expression*>* args = expr->arguments();
-  int arg_count = args->length();

   // Load the function from the receiver.
   ASSERT(callee->IsProperty());
@@ -2695,28 +2675,12 @@
   __ push(at);
   __ sw(v0, MemOperand(sp, kPointerSize));

-  { PreservePositionScope scope(masm()->positions_recorder());
-    for (int i = 0; i < arg_count; i++) {
-      VisitForStackValue(args->at(i));
-    }
-  }
-
-  // Record source position for debugger.
-  SetSourcePosition(expr->position());
-  CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
-  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
-  __ CallStub(&stub);
-
-  RecordJSReturnSite(expr);
-  // Restore context register.
-  __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
-
-  context()->DropAndPlug(1, v0);
+  EmitCall(expr, CallIC::METHOD);
 }


-void FullCodeGenerator::EmitCallWithStub(Call* expr) {
-  // Code common for calls using the call stub.
+void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
+  // Load the arguments.
   ZoneList<Expression*>* args = expr->arguments();
   int arg_count = args->length();
   { PreservePositionScope scope(masm()->positions_recorder());
@@ -2724,16 +2688,17 @@
       VisitForStackValue(args->at(i));
     }
   }
-  // Record source position for debugger.
+
+  // Record source position of the IC call.
   SetSourcePosition(expr->position());
-
-  __ li(a2, FeedbackVector());
+  Handle<Code> ic = CallIC::initialize_stub(
+      isolate(), arg_count, call_type);
   __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
+  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
+ // Don't assign a type feedback id to the IC, since type feedback is provided
+  // by the vector above.
+  CallIC(ic);

-  // Record call targets in unoptimized code.
-  CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
-  __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
-  __ CallStub(&stub);
   RecordJSReturnSite(expr);
   // Restore context register.
   __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2815,7 +2780,7 @@
     __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
     context()->DropAndPlug(1, v0);
   } else if (call_type == Call::GLOBAL_CALL) {
-    EmitCallWithIC(expr);
+    EmitCallWithLoadIC(expr);
   } else if (call_type == Call::LOOKUP_SLOT_CALL) {
     // Call to a lookup slot (dynamically introduced variable).
     VariableProxy* proxy = callee->AsVariableProxy();
@@ -2854,16 +2819,16 @@

     // The receiver is either the global receiver or an object found
     // by LoadContextSlot.
-    EmitCallWithStub(expr);
+    EmitCall(expr);
   } else if (call_type == Call::PROPERTY_CALL) {
     Property* property = callee->AsProperty();
     { PreservePositionScope scope(masm()->positions_recorder());
       VisitForStackValue(property->obj());
     }
     if (property->key()->IsPropertyName()) {
-      EmitCallWithIC(expr);
+      EmitCallWithLoadIC(expr);
     } else {
-      EmitKeyedCallWithIC(expr, property->key());
+      EmitKeyedCallWithLoadIC(expr, property->key());
     }
   } else {
     ASSERT(call_type == Call::OTHER_CALL);
@@ -2874,7 +2839,7 @@
     __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
     __ push(a1);
     // Emit function call.
-    EmitCallWithStub(expr);
+    EmitCall(expr);
   }

 #ifdef DEBUG
@@ -2920,7 +2885,7 @@
   __ li(a2, FeedbackVector());
   __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));

-  CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
+  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
   __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   context()->Plug(v0);
=======================================
--- /branches/bleeding_edge/src/mips/lithium-codegen-mips.cc Mon Apr 28 15:33:16 2014 UTC +++ /branches/bleeding_edge/src/mips/lithium-codegen-mips.cc Wed Apr 30 20:19:45 2014 UTC
@@ -3981,7 +3981,7 @@
   __ li(a0, Operand(instr->arity()));
   // No cell in a2 for construct type feedback in optimized code
   __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
-  CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
+  CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
   CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
 }

--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/d/optout.

Reply via email to