Revision: 17741
Author: [email protected]
Date: Thu Nov 14 12:05:09 2013 UTC
Log: Inline zero argument array constructor.
patch from issue 54583003 (dependent code).
Zero arguments - very easy
1 argument - three special cases:
a) If length is a constant in valid array length range,
no need to check it at runtime.
b) respect DoNotInline feedback on the AllocationSite for
cases that the argument is not a smi or is an integer
with a length that should create a dictionary.
c) if kind feedback is non-holey, and length is non-constant,
we'd have to generate a lot of code to be correct.
Don't inline this case.
N arguments - one special case:
a) If a deopt ever occurs because an input argument isn't
compatible with the elements kind, then set the
DoNotInline flag.
BUG=
[email protected]
Review URL: https://codereview.chromium.org/55933002
http://code.google.com/p/v8/source/detail?r=17741
Modified:
/branches/bleeding_edge/src/arm/code-stubs-arm.cc
/branches/bleeding_edge/src/arm/stub-cache-arm.cc
/branches/bleeding_edge/src/code-stubs-hydrogen.cc
/branches/bleeding_edge/src/elements-kind.cc
/branches/bleeding_edge/src/elements-kind.h
/branches/bleeding_edge/src/heap.cc
/branches/bleeding_edge/src/hydrogen.cc
/branches/bleeding_edge/src/hydrogen.h
/branches/bleeding_edge/src/ia32/code-stubs-ia32.cc
/branches/bleeding_edge/src/ia32/stub-cache-ia32.cc
/branches/bleeding_edge/src/objects-inl.h
/branches/bleeding_edge/src/objects-printer.cc
/branches/bleeding_edge/src/objects.cc
/branches/bleeding_edge/src/objects.h
/branches/bleeding_edge/src/runtime.cc
/branches/bleeding_edge/src/x64/code-stubs-x64.cc
/branches/bleeding_edge/src/x64/stub-cache-x64.cc
/branches/bleeding_edge/test/mjsunit/array-constructor-feedback.js
=======================================
--- /branches/bleeding_edge/src/arm/code-stubs-arm.cc Wed Nov 13 10:07:04
2013 UTC
+++ /branches/bleeding_edge/src/arm/code-stubs-arm.cc Thu Nov 14 12:05:09
2013 UTC
@@ -5850,11 +5850,13 @@
__ ldr(r5, FieldMemOperand(r2, Cell::kValueOffset));
}
- // Save the resulting elements kind in type info
- __ SmiTag(r3);
- __ ldr(r5, FieldMemOperand(r2, Cell::kValueOffset));
- __ str(r3, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset));
- __ SmiUntag(r3);
+ // Save the resulting elements kind in type info. We can't just store
r3
+ // in the AllocationSite::transition_info field because elements kind
is
+ // restricted to a portion of the field...upper bits need to be left
alone.
+ STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+ __ ldr(r4, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset));
+ __ add(r4, r4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
+ __ str(r4, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset));
__ bind(&normal_sequence);
int last_index = GetSequenceIndexFromFastElementsKind(
@@ -5996,6 +5998,8 @@
__ ldr(r3, FieldMemOperand(r3, AllocationSite::kTransitionInfoOffset));
__ SmiUntag(r3);
+ STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+ __ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask));
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ bind(&no_info);
=======================================
--- /branches/bleeding_edge/src/arm/stub-cache-arm.cc Thu Nov 7 10:17:13
2013 UTC
+++ /branches/bleeding_edge/src/arm/stub-cache-arm.cc Thu Nov 14 12:05:09
2013 UTC
@@ -1686,7 +1686,7 @@
}
Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
- site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
+ site->SetElementsKind(GetInitialFastElementsKind());
Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
__ mov(r0, Operand(argc));
__ mov(r2, Operand(site_feedback_cell));
=======================================
--- /branches/bleeding_edge/src/code-stubs-hydrogen.cc Wed Nov 13 10:07:04
2013 UTC
+++ /branches/bleeding_edge/src/code-stubs-hydrogen.cc Thu Nov 14 12:05:09
2013 UTC
@@ -694,27 +694,7 @@
HInstruction* argument = Add<HAccessArgumentsAt>(
elements, constant_one, constant_zero);
- HConstant* max_alloc_length =
- Add<HConstant>(JSObject::kInitialMaxFastElementArray);
- const int initial_capacity = JSArray::kPreallocatedArrayElements;
- HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
-
- HInstruction* checked_arg = Add<HBoundsCheck>(argument,
max_alloc_length);
- IfBuilder if_builder(this);
- if_builder.If<HCompareNumericAndBranch>(checked_arg, constant_zero,
- Token::EQ);
- if_builder.Then();
- Push(initial_capacity_node); // capacity
- Push(constant_zero); // length
- if_builder.Else();
- Push(checked_arg); // capacity
- Push(checked_arg); // length
- if_builder.End();
-
- // Figure out total size
- HValue* length = Pop();
- HValue* capacity = Pop();
- return array_builder->AllocateArray(capacity, length, true);
+ return BuildAllocateArrayFromLength(array_builder, argument);
}
@@ -725,11 +705,16 @@
// the array because they aren't compatible with a smi array.
// If it's a double array, no problem, and if it's fast then no
// problem either because doubles are boxed.
+ //
+ // TODO(mvstanton): consider an instruction to memset fill the array
+ // with zero in this case instead.
HValue* length = GetArgumentsLength();
- bool fill_with_hole = IsFastSmiElementsKind(kind);
+ JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
+ ? JSArrayBuilder::FILL_WITH_HOLE
+ : JSArrayBuilder::DONT_FILL_WITH_HOLE;
HValue* new_object = array_builder->AllocateArray(length,
length,
- fill_with_hole);
+ fill_mode);
HValue* elements = array_builder->GetElementsLocation();
ASSERT(elements != NULL);
=======================================
--- /branches/bleeding_edge/src/elements-kind.cc Fri Jul 5 09:52:11 2013
UTC
+++ /branches/bleeding_edge/src/elements-kind.cc Thu Nov 14 12:05:09 2013
UTC
@@ -68,6 +68,14 @@
fast_elements_kind_sequence[3] = FAST_HOLEY_DOUBLE_ELEMENTS;
fast_elements_kind_sequence[4] = FAST_ELEMENTS;
fast_elements_kind_sequence[5] = FAST_HOLEY_ELEMENTS;
+
+ // Verify that kFastElementsKindPackedToHoley is correct.
+ STATIC_ASSERT(FAST_SMI_ELEMENTS + kFastElementsKindPackedToHoley ==
+ FAST_HOLEY_SMI_ELEMENTS);
+ STATIC_ASSERT(FAST_DOUBLE_ELEMENTS + kFastElementsKindPackedToHoley ==
+ FAST_HOLEY_DOUBLE_ELEMENTS);
+ STATIC_ASSERT(FAST_ELEMENTS + kFastElementsKindPackedToHoley ==
+ FAST_HOLEY_ELEMENTS);
}
};
=======================================
--- /branches/bleeding_edge/src/elements-kind.h Mon Jul 15 15:23:52 2013 UTC
+++ /branches/bleeding_edge/src/elements-kind.h Thu Nov 14 12:05:09 2013 UTC
@@ -77,6 +77,10 @@
const int kFastElementsKindCount = LAST_FAST_ELEMENTS_KIND -
FIRST_FAST_ELEMENTS_KIND + 1;
+// The number to add to a packed elements kind to reach a holey elements
kind
+const int kFastElementsKindPackedToHoley =
+ FAST_HOLEY_SMI_ELEMENTS - FAST_SMI_ELEMENTS;
+
const char* ElementsKindToString(ElementsKind kind);
void PrintElementsKind(FILE* out, ElementsKind kind);
=======================================
--- /branches/bleeding_edge/src/heap.cc Wed Nov 13 10:34:06 2013 UTC
+++ /branches/bleeding_edge/src/heap.cc Thu Nov 14 12:05:09 2013 UTC
@@ -4598,8 +4598,7 @@
// advice
Map* initial_map = constructor->initial_map();
- Smi* smi = Smi::cast(allocation_site->transition_info());
- ElementsKind to_kind = static_cast<ElementsKind>(smi->value());
+ ElementsKind to_kind = allocation_site->GetElementsKind();
AllocationSiteMode mode = TRACK_ALLOCATION_SITE;
if (to_kind != initial_map->elements_kind()) {
MaybeObject* maybe_new_map = initial_map->AsElementsKind(to_kind);
=======================================
--- /branches/bleeding_edge/src/hydrogen.cc Wed Nov 13 17:03:11 2013 UTC
+++ /branches/bleeding_edge/src/hydrogen.cc Thu Nov 14 12:05:09 2013 UTC
@@ -1908,6 +1908,48 @@
elements_kind, is_store, load_mode);
}
+
+
+HValue* HGraphBuilder::BuildAllocateArrayFromLength(
+ JSArrayBuilder* array_builder,
+ HValue* length_argument) {
+ if (length_argument->IsConstant() &&
+ HConstant::cast(length_argument)->HasSmiValue()) {
+ int array_length = HConstant::cast(length_argument)->Integer32Value();
+ HValue* new_object = array_length == 0
+ ? array_builder->AllocateEmptyArray()
+ : array_builder->AllocateArray(length_argument, length_argument);
+ return new_object;
+ }
+
+ HValue* constant_zero = graph()->GetConstant0();
+ HConstant* max_alloc_length =
+ Add<HConstant>(JSObject::kInitialMaxFastElementArray);
+ HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
+ max_alloc_length);
+ IfBuilder if_builder(this);
+ if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
+ Token::EQ);
+ if_builder.Then();
+ const int initial_capacity = JSArray::kPreallocatedArrayElements;
+ HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
+ Push(initial_capacity_node); // capacity
+ Push(constant_zero); // length
+ if_builder.Else();
+ if (!(top_info()->IsStub()) &&
+ IsFastPackedElementsKind(array_builder->kind())) {
+ // We'll come back later with better (holey) feedback.
+ if_builder.Deopt("Holey array despite packed elements_kind feedback");
+ }
+ Push(checked_length); // capacity
+ Push(checked_length); // length
+ if_builder.End();
+
+ // Figure out total size
+ HValue* length = Pop();
+ HValue* capacity = Pop();
+ return array_builder->AllocateArray(capacity, length);
+}
HValue* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
HValue* capacity) {
@@ -2097,19 +2139,18 @@
: Add<HConstant>(nan_double);
// Special loop unfolding case
- static const int kLoopUnfoldLimit = 4;
- bool unfold_loop = false;
- int initial_capacity = JSArray::kPreallocatedArrayElements;
- if (from->ActualValue()->IsConstant() && to->ActualValue()->IsConstant()
&&
- initial_capacity <= kLoopUnfoldLimit) {
+ static const int kLoopUnfoldLimit = 8;
+ STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
+ int initial_capacity = -1;
+ if (from->ActualValue()->IsConstant() &&
to->ActualValue()->IsConstant()) {
HConstant* constant_from = HConstant::cast(from->ActualValue());
HConstant* constant_to = HConstant::cast(to->ActualValue());
if (constant_from->HasInteger32Value() &&
constant_from->Integer32Value() == 0 &&
constant_to->HasInteger32Value() &&
- constant_to->Integer32Value() == initial_capacity) {
- unfold_loop = true;
+ constant_to->Integer32Value() <= kLoopUnfoldLimit) {
+ initial_capacity = constant_to->Integer32Value();
}
}
@@ -2119,7 +2160,7 @@
elements_kind = FAST_HOLEY_ELEMENTS;
}
- if (unfold_loop) {
+ if (initial_capacity >= 0) {
for (int i = 0; i < initial_capacity; i++) {
HInstruction* key = Add<HConstant>(i);
Add<HStoreKeyed>(elements, key, hole, elements_kind);
@@ -2378,6 +2419,13 @@
HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
+ if (!builder()->top_info()->IsStub()) {
+ // A constant map is fine.
+ Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_),
+ builder()->isolate());
+ return builder()->Add<HConstant>(map);
+ }
+
if (kind_ == GetInitialFastElementsKind()) {
// No need for a context lookup if the kind_ matches the initial
// map, because we can just load the map in that case.
@@ -2420,12 +2468,14 @@
HInstruction* elements_size_value =
builder()->Add<HConstant>(elements_size());
- HInstruction* mul = builder()->Add<HMul>(length_node,
elements_size_value);
- mul->ClearFlag(HValue::kCanOverflow);
-
+ HInstruction* mul = HMul::NewImul(builder()->zone(),
builder()->context(),
+ length_node, elements_size_value);
+ builder()->AddInstruction(mul);
HInstruction* base = builder()->Add<HConstant>(base_size);
- HInstruction* total_size = builder()->Add<HAdd>(base, mul);
+ HInstruction* total_size = HAdd::New(builder()->zone(),
builder()->context(),
+ base, mul);
total_size->ClearFlag(HValue::kCanOverflow);
+ builder()->AddInstruction(total_size);
return total_size;
}
@@ -2449,23 +2499,22 @@
HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
return AllocateArray(size_in_bytes,
capacity,
- builder()->graph()->GetConstant0(),
- true);
+ builder()->graph()->GetConstant0());
}
HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* capacity,
HValue* length_field,
- bool fill_with_hole) {
+ FillMode fill_mode) {
HValue* size_in_bytes = EstablishAllocationSize(capacity);
- return AllocateArray(size_in_bytes, capacity, length_field,
fill_with_hole);
+ return AllocateArray(size_in_bytes, capacity, length_field, fill_mode);
}
HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
HValue* capacity,
HValue* length_field,
- bool fill_with_hole) {
+ FillMode fill_mode) {
// These HForceRepresentations are because we store these as fields in
the
// objects we construct, and an int32-to-smi HChange could deopt. Accept
// the deopt possibility now, before allocation occurs.
@@ -2499,7 +2548,7 @@
// Initialize the elements
builder()->BuildInitializeElementsHeader(elements_location_, kind_,
capacity);
- if (fill_with_hole) {
+ if (fill_mode == FILL_WITH_HOLE) {
builder()->BuildFillElementsWithHole(elements_location_, kind_,
graph()->GetConstant0(),
capacity);
}
@@ -7533,6 +7582,71 @@
return ast_context()->ReturnInstruction(call, expr->id());
}
+
+
+void HOptimizedGraphBuilder::BuildInlinedCallNewArray(CallNew* expr) {
+ NoObservableSideEffectsScope no_effects(this);
+
+ int argument_count = expr->arguments()->length();
+ // We should at least have the constructor on the expression stack.
+ HValue* constructor = environment()->ExpressionStackAt(argument_count);
+
+ ElementsKind kind = expr->elements_kind();
+ Handle<Cell> cell = expr->allocation_info_cell();
+ AllocationSite* site = AllocationSite::cast(cell->value());
+
+ // Register on the site for deoptimization if the cell value changes.
+ site->AddDependentCompilationInfo(AllocationSite::TRANSITIONS,
top_info());
+ HInstruction* cell_instruction = Add<HConstant>(cell);
+
+ // In the single constant argument case, we may have to adjust elements
kind
+ // to avoid creating a packed non-empty array.
+ if (argument_count == 1 && !IsHoleyElementsKind(kind)) {
+ HValue* argument = environment()->Top();
+ if (argument->IsConstant()) {
+ HConstant* constant_argument = HConstant::cast(argument);
+ ASSERT(constant_argument->HasSmiValue());
+ int constant_array_size = constant_argument->Integer32Value();
+ if (constant_array_size != 0) {
+ kind = GetHoleyElementsKind(kind);
+ }
+ }
+ }
+
+ // Build the array.
+ JSArrayBuilder array_builder(this,
+ kind,
+ cell_instruction,
+ constructor,
+ DISABLE_ALLOCATION_SITES);
+ HValue* new_object;
+ if (argument_count == 0) {
+ new_object = array_builder.AllocateEmptyArray();
+ } else if (argument_count == 1) {
+ HValue* argument = environment()->Top();
+ new_object = BuildAllocateArrayFromLength(&array_builder, argument);
+ } else {
+ HValue* length = Add<HConstant>(argument_count);
+ // Smi arrays need to initialize array elements with the hole because
+ // bailout could occur if the arguments don't fit in a smi.
+ //
+ // TODO(mvstanton): If all the arguments are constants in smi range,
then
+ // we could set fill_with_hole to false and save a few instructions.
+ JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
+ ? JSArrayBuilder::FILL_WITH_HOLE
+ : JSArrayBuilder::DONT_FILL_WITH_HOLE;
+ new_object = array_builder.AllocateArray(length, length, fill_mode);
+ HValue* elements = array_builder.GetElementsLocation();
+ for (int i = 0; i < argument_count; i++) {
+ HValue* value = environment()->ExpressionStackAt(argument_count - i
- 1);
+ HValue* constant_i = Add<HConstant>(i);
+ Add<HStoreKeyed>(elements, constant_i, value, kind);
+ }
+ }
+
+ Drop(argument_count + 1); // drop constructor and args.
+ ast_context()->ReturnValue(new_object);
+}
// Checks whether allocation using the given constructor can be inlined.
@@ -7542,6 +7656,50 @@
constructor->initial_map()->instance_size() <
HAllocate::kMaxInlineSize &&
constructor->initial_map()->InitialPropertiesLength() == 0;
}
+
+
+bool HOptimizedGraphBuilder::IsCallNewArrayInlineable(CallNew* expr) {
+ bool inline_ok = false;
+ Handle<JSFunction> caller = current_info()->closure();
+ Handle<JSFunction> target(isolate()->global_context()->array_function(),
+ isolate());
+ int argument_count = expr->arguments()->length();
+ // We should have the function plus array arguments on the environment
stack.
+ ASSERT(environment()->length() >= (argument_count + 1));
+ Handle<Cell> cell = expr->allocation_info_cell();
+ AllocationSite* site = AllocationSite::cast(cell->value());
+ if (site->CanInlineCall()) {
+ // We also want to avoid inlining in certain 1 argument scenarios.
+ if (argument_count == 1) {
+ HValue* argument = Top();
+ if (argument->IsConstant()) {
+ // Do not inline if the constant length argument is not a smi or
+ // outside the valid range for a fast array.
+ HConstant* constant_argument = HConstant::cast(argument);
+ if (constant_argument->HasSmiValue()) {
+ int value = constant_argument->Integer32Value();
+ inline_ok = value >= 0 &&
+ value < JSObject::kInitialMaxFastElementArray;
+ if (!inline_ok) {
+ TraceInline(target, caller,
+ "Length outside of valid array range");
+ }
+ }
+ } else {
+ inline_ok = true;
+ }
+ } else {
+ inline_ok = true;
+ }
+ } else {
+ TraceInline(target, caller, "AllocationSite requested no inlining.");
+ }
+
+ if (inline_ok) {
+ TraceInline(target, caller, NULL);
+ }
+ return inline_ok;
+}
void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
@@ -7552,14 +7710,15 @@
int argument_count = expr->arguments()->length() + 1; // Plus
constructor.
Factory* factory = isolate()->factory();
+ // The constructor function is on the stack in the unoptimized code
+ // during evaluation of the arguments.
+ CHECK_ALIVE(VisitForValue(expr->expression()));
+ HValue* function = Top();
+ CHECK_ALIVE(VisitExpressions(expr->arguments()));
+
if (FLAG_inline_construct &&
expr->IsMonomorphic() &&
IsAllocationInlineable(expr->target())) {
- // The constructor function is on the stack in the unoptimized code
- // during evaluation of the arguments.
- CHECK_ALIVE(VisitForValue(expr->expression()));
- HValue* function = Top();
- CHECK_ALIVE(VisitExpressions(expr->arguments()));
Handle<JSFunction> constructor = expr->target();
HValue* check = Add<HCheckValue>(function, constructor);
@@ -7646,19 +7805,24 @@
// argument to the construct call.
Handle<JSFunction> array_function(
isolate()->global_context()->array_function(), isolate());
- CHECK_ALIVE(VisitArgument(expr->expression()));
- HValue* constructor = HPushArgument::cast(Top())->argument();
- CHECK_ALIVE(VisitArgumentList(expr->arguments()));
+ bool use_call_new_array =
expr->target().is_identical_to(array_function);
+ Handle<Cell> cell = expr->allocation_info_cell();
+ if (use_call_new_array && IsCallNewArrayInlineable(expr)) {
+ // Verify we are still calling the array function for our native
context.
+ Add<HCheckValue>(function, array_function);
+ BuildInlinedCallNewArray(expr);
+ return;
+ }
+
HBinaryCall* call;
- if (expr->target().is_identical_to(array_function)) {
- Handle<Cell> cell = expr->allocation_info_cell();
- Add<HCheckValue>(constructor, array_function);
- call = New<HCallNewArray>(constructor, argument_count,
- cell, expr->elements_kind());
+ if (use_call_new_array) {
+ Add<HCheckValue>(function, array_function);
+ call = New<HCallNewArray>(function, argument_count, cell,
+ expr->elements_kind());
} else {
- call = New<HCallNew>(constructor, argument_count);
+ call = New<HCallNew>(function, argument_count);
}
- Drop(argument_count);
+ PreProcessCall(call);
return ast_context()->ReturnInstruction(call, expr->id());
}
}
=======================================
--- /branches/bleeding_edge/src/hydrogen.h Tue Nov 12 10:21:08 2013 UTC
+++ /branches/bleeding_edge/src/hydrogen.h Thu Nov 14 12:05:09 2013 UTC
@@ -1585,9 +1585,16 @@
ElementsKind kind,
HValue* constructor_function);
+ enum FillMode {
+ DONT_FILL_WITH_HOLE,
+ FILL_WITH_HOLE
+ };
+
+ ElementsKind kind() { return kind_; }
+
HValue* AllocateEmptyArray();
HValue* AllocateArray(HValue* capacity, HValue* length_field,
- bool fill_with_hole);
+ FillMode fill_mode = FILL_WITH_HOLE);
HValue* GetElementsLocation() { return elements_location_; }
private:
@@ -1607,7 +1614,8 @@
HValue* EstablishEmptyArrayAllocationSize();
HValue* EstablishAllocationSize(HValue* length_node);
HValue* AllocateArray(HValue* size_in_bytes, HValue* capacity,
- HValue* length_field, bool fill_with_hole);
+ HValue* length_field,
+ FillMode fill_mode = FILL_WITH_HOLE);
HGraphBuilder* builder_;
ElementsKind kind_;
@@ -1617,6 +1625,9 @@
HInnerAllocatedObject* elements_location_;
};
+ HValue* BuildAllocateArrayFromLength(JSArrayBuilder* array_builder,
+ HValue* length_argument);
+
HValue* BuildAllocateElements(ElementsKind kind,
HValue* capacity);
@@ -2101,6 +2112,9 @@
SmallMapList* types,
Handle<String> name);
+ bool IsCallNewArrayInlineable(CallNew* expr);
+ void BuildInlinedCallNewArray(CallNew* expr);
+
class PropertyAccessInfo {
public:
PropertyAccessInfo(Isolate* isolate, Handle<Map> map, Handle<String>
name)
=======================================
--- /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Wed Nov 13 10:07:04
2013 UTC
+++ /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Thu Nov 14 12:05:09
2013 UTC
@@ -5785,10 +5785,12 @@
__ Assert(equal, kExpectedAllocationSiteInCell);
}
- // Save the resulting elements kind in type info
- __ SmiTag(edx);
- __ mov(FieldOperand(ecx, AllocationSite::kTransitionInfoOffset), edx);
- __ SmiUntag(edx);
+ // Save the resulting elements kind in type info. We can't just store
r3
+ // in the AllocationSite::transition_info field because elements kind
is
+ // restricted to a portion of the field...upper bits need to be left
alone.
+ STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+ __ add(FieldOperand(ecx, AllocationSite::kTransitionInfoOffset),
+ Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
__ bind(&normal_sequence);
int last_index = GetSequenceIndexFromFastElementsKind(
@@ -5929,8 +5931,11 @@
masm->isolate()->factory()->allocation_site_map()));
__ j(not_equal, &no_info);
+ // Only look at the lower 16 bits of the transition info.
__ mov(edx, FieldOperand(edx, AllocationSite::kTransitionInfoOffset));
__ SmiUntag(edx);
+ STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+ __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ bind(&no_info);
=======================================
--- /branches/bleeding_edge/src/ia32/stub-cache-ia32.cc Thu Nov 7 10:17:13
2013 UTC
+++ /branches/bleeding_edge/src/ia32/stub-cache-ia32.cc Thu Nov 14 12:05:09
2013 UTC
@@ -1705,7 +1705,7 @@
}
Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
- site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
+ site->SetElementsKind(GetInitialFastElementsKind());
Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
__ mov(eax, Immediate(argc));
__ mov(ebx, site_feedback_cell);
=======================================
--- /branches/bleeding_edge/src/objects-inl.h Wed Nov 13 10:34:06 2013 UTC
+++ /branches/bleeding_edge/src/objects-inl.h Thu Nov 14 12:05:09 2013 UTC
@@ -1330,6 +1330,7 @@
void AllocationSite::Initialize() {
+ set_transition_info(Smi::FromInt(0));
SetElementsKind(GetInitialFastElementsKind());
set_nested_site(Smi::FromInt(0));
set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
@@ -1365,6 +1366,21 @@
inline bool AllocationSite::CanTrack(InstanceType type) {
return type == JS_ARRAY_TYPE;
}
+
+
+inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
+ Reason reason) {
+ switch (reason) {
+ case TENURING:
+ return DependentCode::kAllocationSiteTenuringChangedGroup;
+ break;
+ case TRANSITIONS:
+ return DependentCode::kAllocationSiteTransitionChangedGroup;
+ break;
+ }
+ UNREACHABLE();
+ return DependentCode::kAllocationSiteTransitionChangedGroup;
+}
void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object)
{
=======================================
--- /branches/bleeding_edge/src/objects-printer.cc Wed Nov 13 10:34:06 2013
UTC
+++ /branches/bleeding_edge/src/objects-printer.cc Thu Nov 14 12:05:09 2013
UTC
@@ -1126,17 +1126,12 @@
PrintF(out, "\n - nested site: ");
nested_site()->ShortPrint(out);
PrintF(out, "\n - transition_info: ");
- if (transition_info()->IsCell()) {
- Cell* cell = Cell::cast(transition_info());
- Object* cell_contents = cell->value();
- if (cell_contents->IsSmi()) {
- ElementsKind kind = static_cast<ElementsKind>(
- Smi::cast(cell_contents)->value());
- PrintF(out, "Array allocation with ElementsKind ");
- PrintElementsKind(out, kind);
- PrintF(out, "\n");
- return;
- }
+ if (transition_info()->IsSmi()) {
+ ElementsKind kind = GetElementsKind();
+ PrintF(out, "Array allocation with ElementsKind ");
+ PrintElementsKind(out, kind);
+ PrintF(out, "\n");
+ return;
} else if (transition_info()->IsJSArray()) {
PrintF(out, "Array literal ");
transition_info()->ShortPrint(out);
=======================================
--- /branches/bleeding_edge/src/objects.cc Tue Nov 12 14:43:18 2013 UTC
+++ /branches/bleeding_edge/src/objects.cc Thu Nov 14 12:05:09 2013 UTC
@@ -11633,6 +11633,9 @@
AllowDeferredHandleDereference dependencies_are_safe;
if (group == DependentCode::kPropertyCellChangedGroup) {
return Handle<PropertyCell>::cast(object)->dependent_code();
+ } else if (group == DependentCode::kAllocationSiteTenuringChangedGroup ||
+ group == DependentCode::kAllocationSiteTransitionChangedGroup) {
+ return Handle<AllocationSite>::cast(object)->dependent_code();
}
return Handle<Map>::cast(object)->dependent_code();
}
@@ -12804,21 +12807,11 @@
}
-MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) {
- if (!FLAG_track_allocation_sites || !IsJSArray()) {
- return this;
- }
-
- AllocationMemento* memento = AllocationMemento::FindForJSObject(this);
- if (memento == NULL || !memento->IsValid()) {
- return this;
- }
+MaybeObject* AllocationSite::DigestTransitionFeedback(ElementsKind
to_kind) {
+ Isolate* isolate = GetIsolate();
- // Walk through to the Allocation Site
- AllocationSite* site = memento->GetAllocationSite();
- if (site->SitePointsToLiteral() &&
- site->transition_info()->IsJSArray()) {
- JSArray* transition_info = JSArray::cast(site->transition_info());
+ if (SitePointsToLiteral() && transition_info()->IsJSArray()) {
+ JSArray* transition_info = JSArray::cast(this->transition_info());
ElementsKind kind = transition_info->GetElementsKind();
// if kind is holey ensure that to_kind is as well.
if (IsHoleyElementsKind(kind)) {
@@ -12829,9 +12822,9 @@
// function, so we shouldn't make new instances of it very often.
uint32_t length = 0;
CHECK(transition_info->length()->ToArrayIndex(&length));
- if (length <= AllocationSite::kMaximumArrayBytesToPretransition) {
+ if (length <= kMaximumArrayBytesToPretransition) {
if (FLAG_trace_track_allocation_sites) {
- bool is_nested = site->IsNestedSite();
+ bool is_nested = IsNestedSite();
PrintF(
"AllocationSite: JSArray %p boilerplate %s updated %s->%s\n",
reinterpret_cast<void*>(this),
@@ -12839,11 +12832,14 @@
ElementsKindToString(kind),
ElementsKindToString(to_kind));
}
- return transition_info->TransitionElementsKind(to_kind);
+ MaybeObject* result =
transition_info->TransitionElementsKind(to_kind);
+ if (result->IsFailure()) return result;
+ dependent_code()->DeoptimizeDependentCodeGroup(
+ isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
}
}
} else {
- ElementsKind kind = site->GetElementsKind();
+ ElementsKind kind = GetElementsKind();
// if kind is holey ensure that to_kind is as well.
if (IsHoleyElementsKind(kind)) {
to_kind = GetHoleyElementsKind(to_kind);
@@ -12855,11 +12851,48 @@
ElementsKindToString(kind),
ElementsKindToString(to_kind));
}
- site->set_transition_info(Smi::FromInt(to_kind));
+ SetElementsKind(to_kind);
+ dependent_code()->DeoptimizeDependentCodeGroup(
+ isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
}
}
return this;
}
+
+
+void AllocationSite::AddDependentCompilationInfo(Reason reason,
+ CompilationInfo* info) {
+ DependentCode::DependencyGroup group = ToDependencyGroup(reason);
+ Handle<DependentCode> dep(dependent_code());
+ Handle<DependentCode> codes =
+ DependentCode::Insert(dep, group, info->object_wrapper());
+ if (*codes != dependent_code()) set_dependent_code(*codes);
+ info->dependencies(group)->Add(Handle<HeapObject>(this), info->zone());
+}
+
+
+void AllocationSite::AddDependentCode(Reason reason, Handle<Code> code) {
+ DependentCode::DependencyGroup group = ToDependencyGroup(reason);
+ Handle<DependentCode> codes = DependentCode::Insert(
+ Handle<DependentCode>(dependent_code()), group, code);
+ if (*codes != dependent_code()) set_dependent_code(*codes);
+}
+
+
+MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) {
+ if (!FLAG_track_allocation_sites || !IsJSArray()) {
+ return this;
+ }
+
+ AllocationMemento* memento = AllocationMemento::FindForJSObject(this);
+ if (memento == NULL || !memento->IsValid()) {
+ return this;
+ }
+
+ // Walk through to the Allocation Site
+ AllocationSite* site = memento->GetAllocationSite();
+ return site->DigestTransitionFeedback(to_kind);
+}
MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
=======================================
--- /branches/bleeding_edge/src/objects.h Wed Nov 13 10:34:06 2013 UTC
+++ /branches/bleeding_edge/src/objects.h Thu Nov 14 12:05:09 2013 UTC
@@ -5565,7 +5565,13 @@
// Group of code that depends on global property values in property
cells
// not being changed.
kPropertyCellChangedGroup,
- kGroupCount = kPropertyCellChangedGroup + 1
+ // Group of code that depends on tenuring information in
AllocationSites
+ // not being changed.
+ kAllocationSiteTenuringChangedGroup,
+ // Group of code that depends on element transition information in
+ // AllocationSites not being changed.
+ kAllocationSiteTransitionChangedGroup,
+ kGroupCount = kAllocationSiteTransitionChangedGroup + 1
};
// Array for holding the index of the first code object of each group.
@@ -8087,13 +8093,31 @@
// This method is expensive, it should only be called for reporting.
bool IsNestedSite();
+ class ElementsKindBits: public BitField<ElementsKind, 0, 15> {};
+ class UnusedBits: public BitField<int, 15, 14> {};
+ class DoNotInlineBit: public BitField<bool, 29, 1> {};
+
ElementsKind GetElementsKind() {
ASSERT(!SitePointsToLiteral());
- return
static_cast<ElementsKind>(Smi::cast(transition_info())->value());
+ int value = Smi::cast(transition_info())->value();
+ return ElementsKindBits::decode(value);
}
void SetElementsKind(ElementsKind kind) {
- set_transition_info(Smi::FromInt(static_cast<int>(kind)));
+ int value = Smi::cast(transition_info())->value();
+ set_transition_info(Smi::FromInt(ElementsKindBits::update(value,
kind)),
+ SKIP_WRITE_BARRIER);
+ }
+
+ bool CanInlineCall() {
+ int value = Smi::cast(transition_info())->value();
+ return DoNotInlineBit::decode(value) == 0;
+ }
+
+ void SetDoNotInlineCall() {
+ int value = Smi::cast(transition_info())->value();
+ set_transition_info(Smi::FromInt(DoNotInlineBit::update(value, true)),
+ SKIP_WRITE_BARRIER);
}
bool SitePointsToLiteral() {
@@ -8102,6 +8126,16 @@
// for an object or array literal.
return transition_info()->IsJSArray() ||
transition_info()->IsJSObject();
}
+
+ MaybeObject* DigestTransitionFeedback(ElementsKind to_kind);
+
+ enum Reason {
+ TENURING,
+ TRANSITIONS
+ };
+
+ void AddDependentCompilationInfo(Reason reason, CompilationInfo* info);
+ void AddDependentCode(Reason reason, Handle<Code> code);
DECLARE_PRINTER(AllocationSite)
DECLARE_VERIFIER(AllocationSite)
@@ -8123,6 +8157,7 @@
kSize> BodyDescriptor;
private:
+ inline DependentCode::DependencyGroup ToDependencyGroup(Reason reason);
DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationSite);
};
=======================================
--- /branches/bleeding_edge/src/runtime.cc Thu Nov 14 06:20:48 2013 UTC
+++ /branches/bleeding_edge/src/runtime.cc Thu Nov 14 12:05:09 2013 UTC
@@ -14688,7 +14688,7 @@
static MaybeObject* ArrayConstructorCommon(Isolate* isolate,
Handle<JSFunction> constructor,
- Handle<Object> type_info,
+ Handle<AllocationSite> site,
Arguments* caller_args) {
bool holey = false;
bool can_use_type_feedback = true;
@@ -14710,14 +14710,7 @@
JSArray* array;
MaybeObject* maybe_array;
- if (!type_info.is_null() &&
- *type_info != isolate->heap()->undefined_value() &&
- Cell::cast(*type_info)->value()->IsAllocationSite() &&
- can_use_type_feedback) {
- Handle<Cell> cell = Handle<Cell>::cast(type_info);
- Handle<AllocationSite> site = Handle<AllocationSite>(
- AllocationSite::cast(cell->value()), isolate);
- ASSERT(!site->SitePointsToLiteral());
+ if (!site.is_null() && can_use_type_feedback) {
ElementsKind to_kind = site->GetElementsKind();
if (holey && !IsFastHoleyElementsKind(to_kind)) {
to_kind = GetHoleyElementsKind(to_kind);
@@ -14743,8 +14736,17 @@
maybe_array = isolate->heap()->AllocateJSArrayStorage(array, 0, 0,
DONT_INITIALIZE_ARRAY_ELEMENTS);
if (maybe_array->IsFailure()) return maybe_array;
+ ElementsKind old_kind = array->GetElementsKind();
maybe_array = ArrayConstructInitializeElements(array, caller_args);
if (maybe_array->IsFailure()) return maybe_array;
+ if (!site.is_null() &&
+ (old_kind != array->GetElementsKind() ||
+ !can_use_type_feedback)) {
+ // The arguments passed in caused a transition. This kind of complexity
+ // can't be dealt with in the inlined hydrogen array constructor case.
+ // We must mark the allocationsite as un-inlinable.
+ site->SetDoNotInlineCall();
+ }
return array;
}
@@ -14771,9 +14773,19 @@
ASSERT(arg_count == caller_args->length());
}
#endif
+
+ Handle<AllocationSite> site;
+ if (!type_info.is_null() &&
+ *type_info != isolate->heap()->undefined_value() &&
+ Cell::cast(*type_info)->value()->IsAllocationSite()) {
+ site = Handle<AllocationSite>(
+ AllocationSite::cast(Cell::cast(*type_info)->value()), isolate);
+ ASSERT(!site->SitePointsToLiteral());
+ }
+
return ArrayConstructorCommon(isolate,
constructor,
- type_info,
+ site,
caller_args);
}
@@ -14796,7 +14808,7 @@
#endif
return ArrayConstructorCommon(isolate,
constructor,
- Handle<Object>::null(),
+ Handle<AllocationSite>::null(),
caller_args);
}
=======================================
--- /branches/bleeding_edge/src/x64/code-stubs-x64.cc Wed Nov 13 10:07:04
2013 UTC
+++ /branches/bleeding_edge/src/x64/code-stubs-x64.cc Thu Nov 14 12:05:09
2013 UTC
@@ -5593,10 +5593,12 @@
__ Assert(equal, kExpectedAllocationSiteInCell);
}
- // Save the resulting elements kind in type info
- __ Integer32ToSmi(rdx, rdx);
- __ movq(FieldOperand(rcx, AllocationSite::kTransitionInfoOffset), rdx);
- __ SmiToInteger32(rdx, rdx);
+ // Save the resulting elements kind in type info. We can't just store
r3
+ // in the AllocationSite::transition_info field because elements kind
is
+ // restricted to a portion of the field...upper bits need to be left
alone.
+ STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+ __ SmiAddConstant(FieldOperand(rcx,
AllocationSite::kTransitionInfoOffset),
+ Smi::FromInt(kFastElementsKindPackedToHoley));
__ bind(&normal_sequence);
int last_index = GetSequenceIndexFromFastElementsKind(
@@ -5738,8 +5740,11 @@
masm->isolate()->factory()->allocation_site_map());
__ j(not_equal, &no_info);
+ // Only look at the lower 16 bits of the transition info.
__ movq(rdx, FieldOperand(rdx, AllocationSite::kTransitionInfoOffset));
__ SmiToInteger32(rdx, rdx);
+ STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
+ __ and_(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
__ bind(&no_info);
=======================================
--- /branches/bleeding_edge/src/x64/stub-cache-x64.cc Thu Nov 7 10:17:13
2013 UTC
+++ /branches/bleeding_edge/src/x64/stub-cache-x64.cc Thu Nov 14 12:05:09
2013 UTC
@@ -1629,7 +1629,7 @@
}
Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
- site->set_transition_info(Smi::FromInt(GetInitialFastElementsKind()));
+ site->SetElementsKind(GetInitialFastElementsKind());
Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
__ movq(rax, Immediate(argc));
__ Move(rbx, site_feedback_cell);
=======================================
--- /branches/bleeding_edge/test/mjsunit/array-constructor-feedback.js Mon
Jul 22 09:16:33 2013 UTC
+++ /branches/bleeding_edge/test/mjsunit/array-constructor-feedback.js Thu
Nov 14 12:05:09 2013 UTC
@@ -138,8 +138,8 @@
})();
- // Test: Ensure that bailouts from the stub don't deopt a crankshafted
- // method with a call to that stub.
+ // Test: Ensure that inlined array calls in crankshaft learn from deopts
+ // based on the move to a dictionary for the array.
(function() {
function bar(len) {
return new Array(len);
@@ -152,10 +152,16 @@
a = bar(10);
assertKind(elements_kind.fast, a);
assertOptimized(bar);
- // The stub bails out, but the method call should be fine.
+ // bar should deopt because the length is too large.
a = bar(100000);
+ assertUnoptimized(bar);
+ assertKind(elements_kind.dictionary, a);
+ // The allocation site now has feedback that means the array
constructor
+ // will not be inlined.
+ %OptimizeFunctionOnNextCall(bar);
+ a = bar(100000);
+ assertKind(elements_kind.dictionary, a);
assertOptimized(bar);
- assertKind(elements_kind.dictionary, a);
// If the argument isn't a smi, it bails out as well
a = bar("oops");
@@ -172,8 +178,12 @@
barn(1, 2, 3);
assertOptimized(barn);
a = barn(1, "oops", 3);
- // The stub should bail out but the method should remain optimized.
+ // The method should deopt, but learn from the failure to avoid
inlining
+ // the array.
assertKind(elements_kind.fast, a);
+ assertUnoptimized(barn);
+ %OptimizeFunctionOnNextCall(barn);
+ a = barn(1, "oops", 3);
assertOptimized(barn);
})();
@@ -219,4 +229,29 @@
assertFalse(Realm.eval(contextB, "bar2();") instanceof Array);
assertTrue(Realm.eval(contextB, "bar2() instanceof Array"));
})();
+
+ // Test: create array with packed feedback, then optimize/inline
+ // function. Verify that if we ask for a holey array then we deopt.
+ // Reoptimization will proceed with the correct feedback and we
+ // won't deopt anymore.
+ (function() {
+ function bar(len) { return new Array(len); }
+ bar(0);
+ bar(0);
+ %OptimizeFunctionOnNextCall(bar);
+ a = bar(0);
+ assertOptimized(bar);
+ assertFalse(isHoley(a));
+ a = bar(1); // ouch!
+ assertUnoptimized(bar);
+ assertTrue(isHoley(a));
+ // Try again
+ %OptimizeFunctionOnNextCall(bar);
+ a = bar(100);
+ assertOptimized(bar);
+ assertTrue(isHoley(a));
+ a = bar(0);
+ assertOptimized(bar);
+ assertTrue(isHoley(a));
+ })();
}
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.