Revision: 20091
Author: [email protected]
Date: Wed Mar 19 20:00:29 2014 UTC
Log: MIPS: Pretenure call new support.
Port r20076 (b7b1372)
Original commit message:
When FLAG_pretenure_call_new is on, we emit mementos on new object creation
in full code, and consume the feedback in crankshaft. A key difference in
the
generated code for stubs is the allocation of an additional type vector
slot for the
CallNew AST node, which simplifies the CallConstructStub and
CallFunctionStub
considerably.
Some performance tuning still needs to be addressed, therefore the flag is
off at
this moment, though fully functional. The goal is to remove the flag as
soon as
possible, which allows much code deletion (yay).
BUG=
[email protected]
Review URL: https://codereview.chromium.org/203463013
Patch from Balazs Kilvady <[email protected]>.
http://code.google.com/p/v8/source/detail?r=20091
Modified:
/branches/bleeding_edge/src/mips/builtins-mips.cc
/branches/bleeding_edge/src/mips/code-stubs-mips.cc
/branches/bleeding_edge/src/mips/full-codegen-mips.cc
=======================================
--- /branches/bleeding_edge/src/mips/builtins-mips.cc Wed Mar 19 15:46:24
2014 UTC
+++ /branches/bleeding_edge/src/mips/builtins-mips.cc Wed Mar 19 20:00:29
2014 UTC
@@ -342,10 +342,12 @@
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
- bool count_constructions) {
+ bool count_constructions,
+ bool create_memento) {
// ----------- S t a t e -------------
// -- a0 : number of arguments
// -- a1 : constructor function
+ // -- a2 : allocation site or undefined
// -- ra : return address
// -- sp[...]: constructor arguments
// -----------------------------------
@@ -353,6 +355,12 @@
// Should never count constructions for api objects.
ASSERT(!is_api_function || !count_constructions);
+ // Should never create mementos for api functions.
+ ASSERT(!is_api_function || !create_memento);
+
+ // Should never create mementos before slack tracking is finished.
+ ASSERT(!count_constructions || !create_memento);
+
Isolate* isolate = masm->isolate();
// ----------- S t a t e -------------
@@ -366,6 +374,11 @@
{
FrameScope scope(masm, StackFrame::CONSTRUCT);
+ if (create_memento) {
+ __ AssertUndefinedOrAllocationSite(a2, a3);
+ __ push(a2);
+ }
+
// Preserve the two incoming parameters on the stack.
__ sll(a0, a0, kSmiTagSize); // Tag arguments count.
__ MultiPushReversed(a0.bit() | a1.bit());
@@ -425,13 +438,17 @@
// a1: constructor function
// a2: initial map
__ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
+ if (create_memento) {
+ __ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
+ }
+
__ Allocate(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS);
// Allocated the JSObject, now initialize the fields. Map is set to
// initial map and properties and elements are set to empty fixed
array.
// a1: constructor function
// a2: initial map
- // a3: object size
+ // a3: object size (not including memento if create_memento)
// t4: JSObject (not tagged)
__ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
__ mov(t5, t4);
@@ -446,14 +463,13 @@
// Fill all the in-object properties with appropriate filler.
// a1: constructor function
// a2: initial map
- // a3: object size (in words)
+ // a3: object size (in words, including memento if create_memento)
// t4: JSObject (not tagged)
// t5: First in-object property of JSObject (not tagged)
- __ sll(t0, a3, kPointerSizeLog2);
- __ addu(t6, t4, t0); // End of object.
ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
- __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
+
if (count_constructions) {
+ __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
__ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset));
__ Ext(a0, a0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
kBitsPerByte);
@@ -467,8 +483,33 @@
__ InitializeFieldsWithFiller(t5, a0, t7);
// To allow for truncation.
__ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
+ __ sll(at, a3, kPointerSizeLog2);
+ __ Addu(a0, t4, Operand(at)); // End of object.
+ __ InitializeFieldsWithFiller(t5, a0, t7);
+ } else if (create_memento) {
+ __ Subu(t7, a3, Operand(AllocationMemento::kSize / kPointerSize));
+ __ sll(at, t7, kPointerSizeLog2);
+ __ Addu(a0, t4, Operand(at)); // End of object.
+ __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
+ __ InitializeFieldsWithFiller(t5, a0, t7);
+
+ // Fill in memento fields.
+ // t5: points to the allocated but uninitialized memento.
+ __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
+ ASSERT_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
+ __ sw(t7, MemOperand(t5));
+ __ Addu(t5, t5, kPointerSize);
+ // Load the AllocationSite.
+ __ lw(t7, MemOperand(sp, 2 * kPointerSize));
+ ASSERT_EQ(1 * kPointerSize,
AllocationMemento::kAllocationSiteOffset);
+ __ sw(t7, MemOperand(t5));
+ __ Addu(t5, t5, kPointerSize);
+ } else {
+ __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
+ __ sll(at, a3, kPointerSizeLog2);
+ __ Addu(a0, t4, Operand(at)); // End of object.
+ __ InitializeFieldsWithFiller(t5, a0, t7);
}
- __ InitializeFieldsWithFiller(t5, t6, t7);
// Add the object tag to make the JSObject real, so that we can
continue
// and jump into the continuation code at any time from now on. Any
@@ -572,15 +613,48 @@
__ UndoAllocationInNewSpace(t4, t5);
}
- __ bind(&rt_call);
// Allocate the new receiver object using the runtime call.
// a1: constructor function
+ __ bind(&rt_call);
+ if (create_memento) {
+ // Get the cell or allocation site.
+ __ lw(a2, MemOperand(sp, 2 * kPointerSize));
+ __ push(a2);
+ }
+
__ push(a1); // Argument for Runtime_NewObject.
- __ CallRuntime(Runtime::kNewObject, 1);
+ if (create_memento) {
+ __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
+ } else {
+ __ CallRuntime(Runtime::kNewObject, 1);
+ }
__ mov(t4, v0);
+
+ // If we ended up using the runtime, and we want a memento, then the
+ // runtime call made it for us, and we shouldn't do create count
+ // increment.
+ Label count_incremented;
+ if (create_memento) {
+ __ jmp(&count_incremented);
+ }
// Receiver for constructor call allocated.
// t4: JSObject
+
+ if (create_memento) {
+ __ lw(a2, MemOperand(sp, kPointerSize * 2));
+ __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
+ __ Branch(&count_incremented, eq, a2, Operand(t5));
+ // a2 is an AllocationSite. We are creating a memento from it, so we
+ // need to increment the memento create count.
+ __ lw(a3, FieldMemOperand(a2,
+
AllocationSite::kPretenureCreateCountOffset));
+ __ Addu(a3, a3, Operand(Smi::FromInt(1)));
+ __ sw(a3, FieldMemOperand(a2,
+
AllocationSite::kPretenureCreateCountOffset));
+ __ bind(&count_incremented);
+ }
+
__ bind(&allocated);
__ Push(t4, t4);
@@ -682,17 +756,17 @@
void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, true);
+ Generate_JSConstructStubHelper(masm, false, true, false);
}
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, false);
+ Generate_JSConstructStubHelper(masm, false, false,
FLAG_pretenuring_call_new);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false);
+ Generate_JSConstructStubHelper(masm, true, false, false);
}
=======================================
--- /branches/bleeding_edge/src/mips/code-stubs-mips.cc Wed Mar 19 15:46:24
2014 UTC
+++ /branches/bleeding_edge/src/mips/code-stubs-mips.cc Wed Mar 19 20:00:29
2014 UTC
@@ -3093,18 +3093,20 @@
// function without changing the state.
__ Branch(&done, eq, t0, Operand(a1));
- // If we came here, we need to see if we are the array function.
- // If we didn't have a matching function, and we didn't find the
megamorph
- // sentinel, then we have in the slot either some other function or an
- // AllocationSite. Do a map check on the object in a3.
- __ lw(t1, FieldMemOperand(t0, 0));
- __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
- __ Branch(&miss, ne, t1, Operand(at));
+ if (!FLAG_pretenuring_call_new) {
+ // If we came here, we need to see if we are the array function.
+ // If we didn't have a matching function, and we didn't find the
megamorph
+ // sentinel, then we have in the slot either some other function or an
+ // AllocationSite. Do a map check on the object in a3.
+ __ lw(t1, FieldMemOperand(t0, 0));
+ __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
+ __ Branch(&miss, ne, t1, Operand(at));
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0);
- __ Branch(&megamorphic, ne, a1, Operand(t0));
- __ jmp(&done);
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0);
+ __ Branch(&megamorphic, ne, a1, Operand(t0));
+ __ jmp(&done);
+ }
__ bind(&miss);
@@ -3121,36 +3123,38 @@
__ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize));
__ jmp(&done);
- // An uninitialized cache is patched with the function or sentinel to
- // indicate the ElementsKind if function is the Array constructor.
+ // An uninitialized cache is patched with the function.
__ bind(&initialize);
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0);
- __ Branch(¬_array_function, ne, a1, Operand(t0));
+ if (!FLAG_pretenuring_call_new) {
+ // Make sure the function is the Array() function.
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0);
+ __ Branch(¬_array_function, ne, a1, Operand(t0));
- // The target function is the Array constructor.
- // Create an AllocationSite if we don't already have it, store it in the
slot.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- const RegList kSavedRegs =
- 1 << 4 | // a0
- 1 << 5 | // a1
- 1 << 6 | // a2
- 1 << 7; // a3
+ // The target function is the Array constructor,
+ // Create an AllocationSite if we don't already have it, store it in
the
+ // slot.
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ const RegList kSavedRegs =
+ 1 << 4 | // a0
+ 1 << 5 | // a1
+ 1 << 6 | // a2
+ 1 << 7; // a3
+
+ // Arguments register must be smi-tagged to call out.
+ __ SmiTag(a0);
+ __ MultiPush(kSavedRegs);
- // Arguments register must be smi-tagged to call out.
- __ SmiTag(a0);
- __ MultiPush(kSavedRegs);
+ CreateAllocationSiteStub create_stub;
+ __ CallStub(&create_stub);
- CreateAllocationSiteStub create_stub;
- __ CallStub(&create_stub);
+ __ MultiPop(kSavedRegs);
+ __ SmiUntag(a0);
+ }
+ __ Branch(&done);
- __ MultiPop(kSavedRegs);
- __ SmiUntag(a0);
+ __ bind(¬_array_function);
}
- __ Branch(&done);
-
- __ bind(¬_array_function);
__ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(t0, a2, Operand(t0));
@@ -3288,18 +3292,26 @@
__ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE));
if (RecordCallTarget()) {
- Label feedback_register_initialized;
GenerateRecordCallTarget(masm);
- // Put the AllocationSite from the feedback vector into a2, or
undefined.
__ sll(at, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(t1, a2, at);
- __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize));
- __ lw(t1, FieldMemOperand(a2, AllocationSite::kMapOffset));
- __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
- __ Branch(&feedback_register_initialized, eq, t1, Operand(at));
- __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
- __ bind(&feedback_register_initialized);
+ if (FLAG_pretenuring_call_new) {
+ // Put the AllocationSite from the feedback vector into a2.
+ // By adding kPointerSize we encode that we know the AllocationSite
+ // entry is at the feedback vector slot given by a3 + 1.
+ __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize +
kPointerSize));
+ } else {
+ Label feedback_register_initialized;
+ // Put the AllocationSite from the feedback vector into a2, or
undefined.
+ __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize));
+ __ lw(t1, FieldMemOperand(a2, AllocationSite::kMapOffset));
+ __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
+ __ Branch(&feedback_register_initialized, eq, t1, Operand(at));
+ __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
+ __ bind(&feedback_register_initialized);
+ }
+
__ AssertUndefinedOrAllocationSite(a2, t1);
}
=======================================
--- /branches/bleeding_edge/src/mips/full-codegen-mips.cc Mon Mar 17
15:11:04 2014 UTC
+++ /branches/bleeding_edge/src/mips/full-codegen-mips.cc Wed Mar 19
20:00:29 2014 UTC
@@ -2916,6 +2916,13 @@
Handle<Object> uninitialized =
TypeFeedbackInfo::UninitializedSentinel(isolate());
StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
+ if (FLAG_pretenuring_call_new) {
+ StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
+ isolate()->factory()->NewAllocationSite());
+ ASSERT(expr->AllocationSiteFeedbackSlot() ==
+ expr->CallNewFeedbackSlot() + 1);
+ }
+
__ li(a2, FeedbackVector());
__ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/d/optout.