Reviewers: mvstanton, danno, Paul Lind, kisg,
Description:
MIPS: Adapt Danno's Track Allocation Info idea to fast literals.
Port r13330 (6d9ce8a8)
Original commit message:
Adapt Danno's Track Allocation Info idea to fast literals. When allocating a
literal array, we store an AllocationSiteInfo object right after the
JSArray,
with a pointer to the boilerplate object. Later, if the array transitions we
check for the continued existence of the temporary AllocationSiteInfo object
(has no roots). If found, we'll use it to transition the boilerplate array
as
well.
Danno's original changeset: https://codereview.chromium.org/10615002/
BUG=
TEST=
Please review this at https://chromiumcodereview.appspot.com/11783048/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files:
M src/mips/code-stubs-mips.cc
M src/mips/codegen-mips.cc
M src/mips/full-codegen-mips.cc
M src/mips/macro-assembler-mips.h
M src/mips/macro-assembler-mips.cc
Index: src/mips/code-stubs-mips.cc
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index
9f0d535416c6c8c3c04b2c3bc63ed94f067b690a..e920abf82c9d5d696f59870277276ffd598594d9
100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -343,6 +343,7 @@ static void GenerateFastCloneShallowArrayCommon(
MacroAssembler* masm,
int length,
FastCloneShallowArrayStub::Mode mode,
+ AllocationSiteInfoMode allocation_site_info_mode,
Label* fail) {
// Registers on entry:
// a3: boilerplate literal array.
@@ -355,7 +356,12 @@ static void GenerateFastCloneShallowArrayCommon(
? FixedDoubleArray::SizeFor(length)
: FixedArray::SizeFor(length);
}
- int size = JSArray::kSize + elements_size;
+ int size = JSArray::kSize;
+ int allocation_info_start = size;
+ if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
+ size += AllocationSiteInfo::kSize;
+ }
+ size += elements_size;
// Allocate both the JS array and the elements array in one big
// allocation. This avoids multiple limit checks.
@@ -366,6 +372,13 @@ static void GenerateFastCloneShallowArrayCommon(
fail,
TAG_OBJECT);
+ if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
+ __ li(a2, Operand(Handle<Map>(masm->isolate()->heap()->
+ allocation_site_info_map())));
+ __ sw(a2, FieldMemOperand(v0, allocation_info_start));
+ __ sw(a3, FieldMemOperand(v0, allocation_info_start + kPointerSize));
+ }
+
// Copy the JS array part.
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
if ((i != JSArray::kElementsOffset) || (length == 0)) {
@@ -378,7 +391,11 @@ static void GenerateFastCloneShallowArrayCommon(
// Get hold of the elements array of the boilerplate and setup the
// elements pointer in the resulting object.
__ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset));
- __ Addu(a2, v0, Operand(JSArray::kSize));
+ if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
+ __ Addu(a2, v0, Operand(JSArray::kSize + AllocationSiteInfo::kSize));
+ } else {
+ __ Addu(a2, v0, Operand(JSArray::kSize));
+ }
__ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset));
// Copy the elements array.
@@ -407,6 +424,12 @@ void
FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
__ Branch(&slow_case, eq, a3, Operand(t1));
FastCloneShallowArrayStub::Mode mode = mode_;
+ AllocationSiteInfoMode allocation_site_info_mode =
+ DONT_TRACK_ALLOCATION_SITE_INFO;
+ if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO) {
+ mode = CLONE_ANY_ELEMENTS;
+ allocation_site_info_mode = TRACK_ALLOCATION_SITE_INFO;
+ }
if (mode == CLONE_ANY_ELEMENTS) {
Label double_elements, check_fast_elements;
__ lw(v0, FieldMemOperand(a3, JSArray::kElementsOffset));
@@ -414,7 +437,9 @@ void
FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
__ LoadRoot(t1, Heap::kFixedCOWArrayMapRootIndex);
__ Branch(&check_fast_elements, ne, v0, Operand(t1));
GenerateFastCloneShallowArrayCommon(masm, 0,
- COPY_ON_WRITE_ELEMENTS,
&slow_case);
+ COPY_ON_WRITE_ELEMENTS,
+ allocation_site_info_mode,
+ &slow_case);
// Return and remove the on-stack parameters.
__ DropAndRet(3);
@@ -422,7 +447,9 @@ void
FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
__ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
__ Branch(&double_elements, ne, v0, Operand(t1));
GenerateFastCloneShallowArrayCommon(masm, length_,
- CLONE_ELEMENTS, &slow_case);
+ CLONE_ELEMENTS,
+ allocation_site_info_mode,
+ &slow_case);
// Return and remove the on-stack parameters.
__ DropAndRet(3);
@@ -453,7 +480,8 @@ void
FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
__ pop(a3);
}
- GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
+ GenerateFastCloneShallowArrayCommon(masm, length_, mode,
+ allocation_site_info_mode,
&slow_case);
// Return and remove the on-stack parameters.
__ DropAndRet(3);
Index: src/mips/codegen-mips.cc
diff --git a/src/mips/codegen-mips.cc b/src/mips/codegen-mips.cc
index
1da808963564079376ceb6c31060476425286ed9..27835af6f33485a8eabfcb1de81eb0b60f4f0ae9
100644
--- a/src/mips/codegen-mips.cc
+++ b/src/mips/codegen-mips.cc
@@ -180,6 +180,10 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
Register scratch = t6;
+ if (FLAG_track_allocation_sites) {
+ masm->TestJSArrayForAllocationSiteInfo(a2, t0, fail);
+ }
+
// Check for empty arrays, which only require a map transition and no
changes
// to the backing store.
__ lw(t0, FieldMemOperand(a2, JSObject::kElementsOffset));
Index: src/mips/full-codegen-mips.cc
diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc
index
e5a870094801e9e350de82bf0eb52a93ac7535bd..b162d267236cc1f32ddeaee0ea0f08b96065743a
100644
--- a/src/mips/full-codegen-mips.cc
+++ b/src/mips/full-codegen-mips.cc
@@ -1744,6 +1744,15 @@ void
FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
FastCloneShallowArrayStub::Mode mode = has_fast_elements
? FastCloneShallowArrayStub::CLONE_ELEMENTS
: FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
+
+ // Tracking allocation info allows us to pre-transition later if it
makes
+ // sense.
+ if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS &&
+ FLAG_track_allocation_sites) {
+ mode = FastCloneShallowArrayStub::
+ CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO;
+ }
+
FastCloneShallowArrayStub stub(mode, length);
__ CallStub(&stub);
}
Index: src/mips/macro-assembler-mips.cc
diff --git a/src/mips/macro-assembler-mips.cc
b/src/mips/macro-assembler-mips.cc
index
f5ff751268e4687c405c7315022b9ce91e6e3714..e0c5787d3b990c86f354033b0daeec41a06c2cc5
100644
--- a/src/mips/macro-assembler-mips.cc
+++ b/src/mips/macro-assembler-mips.cc
@@ -5449,6 +5449,27 @@ void MacroAssembler::ClampDoubleToUint8(Register
result_reg,
}
+void MacroAssembler::TestJSArrayForAllocationSiteInfo(
+ Register receiver_reg,
+ Register scratch_reg,
+ Label* allocation_info_present) {
+ Label no_info_available;
+ ExternalReference new_space_start =
+ ExternalReference::new_space_start(isolate());
+ ExternalReference new_space_allocation_top =
+ ExternalReference::new_space_allocation_top_address(isolate());
+ lw(scratch_reg, FieldMemOperand(receiver_reg,
+ JSArray::kSize +
AllocationSiteInfo::kSize));
+ Branch(&no_info_available, lt, scratch_reg, Operand(new_space_start));
+ Branch(&no_info_available, hs, scratch_reg,
+ Operand(new_space_allocation_top));
+ lw(scratch_reg, MemOperand(scratch_reg));
+ Branch(allocation_info_present, eq, scratch_reg,
+ Operand(Handle<Map>(isolate()->heap()->allocation_site_info_map())));
+ bind(&no_info_available);
+}
+
+
bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
if (r1.is(r2)) return true;
if (r1.is(r3)) return true;
Index: src/mips/macro-assembler-mips.h
diff --git a/src/mips/macro-assembler-mips.h
b/src/mips/macro-assembler-mips.h
index
4aedce48e44e3e5efb5653438ace99ad58d64d28..1a37acaf464ab7a299c30e2d33875ad6daf17d8c
100644
--- a/src/mips/macro-assembler-mips.h
+++ b/src/mips/macro-assembler-mips.h
@@ -1440,6 +1440,16 @@ class MacroAssembler: public Assembler {
// in a0. Assumes that any other register can be used as a scratch.
void CheckEnumCache(Register null_value, Label* call_runtime);
+ // AllocationSiteInfo support. Arrays may have an associated
+ // AllocationSiteInfo object that can be checked for in order to
pretransition
+ // to another type.
+ // On entry, receiver_reg should point to the array object.
+ // scratch_reg gets clobbered.
+ // If allocation info is present, jump to allocation_info_present
+ void TestJSArrayForAllocationSiteInfo(Register receiver_reg,
+ Register scratch_reg,
+ Label* allocation_info_present);
+
private:
void CallCFunctionHelper(Register function,
int num_reg_arguments,
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev