Revision: 16878
Author: [email protected]
Date: Mon Sep 23 11:26:03 2013 UTC
Log: Merged r16346, r16357, r16381 into 3.20 branch.
Added allocation folding support for old space allocations.
Prefill allocated objects with one word fillers if we do not use allocation
folding.
Clear next map word when folding allocations into js arrays.
Also manually merged fix for chromium bug 284577, from r16790.
[email protected]
BUG=
Review URL: https://codereview.chromium.org/24279003
http://code.google.com/p/v8/source/detail?r=16878
Modified:
/branches/3.20/src/heap.cc
/branches/3.20/src/hydrogen-instructions.cc
/branches/3.20/src/hydrogen-instructions.h
/branches/3.20/src/hydrogen.cc
/branches/3.20/src/objects-inl.h
/branches/3.20/src/objects.h
/branches/3.20/src/version.cc
/branches/3.20/test/cctest/test-heap.cc
/branches/3.20/test/mjsunit/allocation-folding.js
=======================================
--- /branches/3.20/src/heap.cc Thu Aug 29 16:58:06 2013 UTC
+++ /branches/3.20/src/heap.cc Mon Sep 23 11:26:03 2013 UTC
@@ -4970,7 +4970,7 @@
int object_size = map->instance_size();
Object* clone;
- ASSERT(map->CanTrackAllocationSite());
+ ASSERT(AllocationSite::CanTrack(map->instance_type()));
ASSERT(map->instance_type() == JS_ARRAY_TYPE);
WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
=======================================
--- /branches/3.20/src/hydrogen-instructions.cc Wed Aug 14 17:13:49 2013 UTC
+++ /branches/3.20/src/hydrogen-instructions.cc Mon Sep 23 11:26:03 2013 UTC
@@ -2364,6 +2364,24 @@
ASSERT(!type.IsTaggedNumber());
Initialize(r);
}
+
+
+HConstant::HConstant(Handle<Map> handle,
+ UniqueValueId unique_id)
+ : HTemplateInstruction<0>(HType::Tagged()),
+ handle_(handle),
+ unique_id_(unique_id),
+ has_smi_value_(false),
+ has_int32_value_(false),
+ has_double_value_(false),
+ has_external_reference_value_(false),
+ is_internalized_string_(false),
+ is_not_in_new_space_(true),
+ is_cell_(false),
+ boolean_value_(false) {
+ ASSERT(!handle.is_null());
+ Initialize(Representation::Tagged());
+}
HConstant::HConstant(int32_t integer_value,
@@ -3183,6 +3201,7 @@
void HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
HValue* dominator) {
ASSERT(side_effect == kChangesNewSpacePromotion);
+ Zone* zone = block()->zone();
if (!FLAG_use_allocation_folding) return;
// Try to fold allocations together with their dominating allocations.
@@ -3194,31 +3213,44 @@
return;
}
- HAllocate* dominator_allocate_instr = HAllocate::cast(dominator);
- HValue* dominator_size = dominator_allocate_instr->size();
+ HAllocate* dominator_allocate = HAllocate::cast(dominator);
+ HValue* dominator_size = dominator_allocate->size();
HValue* current_size = size();
- // We can just fold allocations that are guaranteed in new space.
+
// TODO(hpayer): Add support for non-constant allocation in dominator.
- if (!IsNewSpaceAllocation() || !current_size->IsInteger32Constant() ||
- !dominator_allocate_instr->IsNewSpaceAllocation() ||
+ if (!current_size->IsInteger32Constant() ||
!dominator_size->IsInteger32Constant()) {
if (FLAG_trace_allocation_folding) {
- PrintF("#%d (%s) cannot fold into #%d (%s)\n",
+ PrintF("#%d (%s) cannot fold into #%d (%s), dynamic allocation
size\n",
id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
}
return;
}
+
+ dominator_allocate = GetFoldableDominator(dominator_allocate);
+ if (dominator_allocate == NULL) {
+ return;
+ }
+
+ ASSERT((IsNewSpaceAllocation() &&
+ dominator_allocate->IsNewSpaceAllocation()) ||
+ (IsOldDataSpaceAllocation() &&
+ dominator_allocate->IsOldDataSpaceAllocation()) ||
+ (IsOldPointerSpaceAllocation() &&
+ dominator_allocate->IsOldPointerSpaceAllocation()));
// First update the size of the dominator allocate instruction.
- int32_t dominator_size_constant =
+ dominator_size = dominator_allocate->size();
+ int32_t original_object_size =
HConstant::cast(dominator_size)->GetInteger32Constant();
+ int32_t dominator_size_constant = original_object_size;
int32_t current_size_constant =
HConstant::cast(current_size)->GetInteger32Constant();
int32_t new_dominator_size = dominator_size_constant +
current_size_constant;
if (MustAllocateDoubleAligned()) {
- if (!dominator_allocate_instr->MustAllocateDoubleAligned()) {
- dominator_allocate_instr->MakeDoubleAligned();
+ if (!dominator_allocate->MustAllocateDoubleAligned()) {
+ dominator_allocate->MakeDoubleAligned();
}
if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
dominator_size_constant += kDoubleSize / 2;
@@ -3229,36 +3261,167 @@
if (new_dominator_size > Page::kMaxNonCodeHeapObjectSize) {
if (FLAG_trace_allocation_folding) {
PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
- id(), Mnemonic(), dominator->id(), dominator->Mnemonic(),
- new_dominator_size);
+ id(), Mnemonic(), dominator_allocate->id(),
+ dominator_allocate->Mnemonic(), new_dominator_size);
}
return;
}
- HBasicBlock* block = dominator->block();
- Zone* zone = block->zone();
- HInstruction* new_dominator_size_constant =
- HConstant::New(zone, context(), new_dominator_size);
- new_dominator_size_constant->InsertBefore(dominator_allocate_instr);
- dominator_allocate_instr->UpdateSize(new_dominator_size_constant);
+
+ HInstruction* new_dominator_size_constant =
HConstant::CreateAndInsertBefore(
+ zone, context(), new_dominator_size, dominator_allocate);
+ dominator_allocate->UpdateSize(new_dominator_size_constant);
#ifdef VERIFY_HEAP
- if (FLAG_verify_heap) {
- dominator_allocate_instr->MakePrefillWithFiller();
+ if (FLAG_verify_heap && dominator_allocate->IsNewSpaceAllocation()) {
+ dominator_allocate->MakePrefillWithFiller();
+ } else {
+ // TODO(hpayer): This is a short-term hack to make allocation mementos
+ // work again in new space.
+ ClearNextMapWord(original_object_size);
}
+#else
+ // TODO(hpayer): This is a short-term hack to make allocation mementos
+ // work again in new space.
+ ClearNextMapWord(original_object_size);
#endif
+ dominator_allocate->clear_next_map_word_ = clear_next_map_word_;
+
// After that replace the dominated allocate instruction.
HInstruction* dominated_allocate_instr =
HInnerAllocatedObject::New(zone,
context(),
- dominator_allocate_instr,
+ dominator_allocate,
dominator_size_constant,
type());
dominated_allocate_instr->InsertBefore(this);
DeleteAndReplaceWith(dominated_allocate_instr);
if (FLAG_trace_allocation_folding) {
PrintF("#%d (%s) folded into #%d (%s)\n",
- id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
+ id(), Mnemonic(), dominator_allocate->id(),
+ dominator_allocate->Mnemonic());
+ }
+}
+
+
+HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) {
+ if (!IsFoldable(dominator)) {
+ // We cannot hoist old space allocations over new space allocations.
+ if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) {
+ if (FLAG_trace_allocation_folding) {
+ PrintF("#%d (%s) cannot fold into #%d (%s), new space hoisting\n",
+ id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
+ }
+ return NULL;
+ }
+
+ HAllocate* dominator_dominator = dominator->dominating_allocate_;
+
+ // We can hoist old data space allocations over an old pointer space
+ // allocation and vice versa. For that we have to check the dominator
+ // of the dominator allocate instruction.
+ if (dominator_dominator == NULL) {
+ dominating_allocate_ = dominator;
+ if (FLAG_trace_allocation_folding) {
+ PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n",
+ id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
+ }
+ return NULL;
+ }
+
+ // We can just fold old space allocations that are in the same basic
block,
+ // since it is not guaranteed that we fill up the whole allocated old
+ // space memory.
+ // TODO(hpayer): Remove this limitation and add filler maps for each
each
+ // allocation as soon as we have store elimination.
+ if (block()->block_id() != dominator_dominator->block()->block_id()) {
+ if (FLAG_trace_allocation_folding) {
+ PrintF("#%d (%s) cannot fold into #%d (%s), different basic
blocks\n",
+ id(), Mnemonic(), dominator_dominator->id(),
+ dominator_dominator->Mnemonic());
+ }
+ return NULL;
+ }
+
+ ASSERT((IsOldDataSpaceAllocation() &&
+ dominator_dominator->IsOldDataSpaceAllocation()) ||
+ (IsOldPointerSpaceAllocation() &&
+ dominator_dominator->IsOldPointerSpaceAllocation()));
+
+ int32_t current_size = HConstant::cast(size())->GetInteger32Constant();
+ HStoreNamedField* dominator_free_space_size =
+ dominator->filler_free_space_size_;
+ if (dominator_free_space_size != NULL) {
+ // We already hoisted one old space allocation, i.e., we already
installed
+ // a filler map. Hence, we just have to update the free space size.
+ dominator->UpdateFreeSpaceFiller(current_size);
+ } else {
+ // This is the first old space allocation that gets hoisted. We have
to
+ // install a filler map since the follwing allocation may cause a GC.
+ dominator->CreateFreeSpaceFiller(current_size);
+ }
+
+ // We can hoist the old space allocation over the actual dominator.
+ return dominator_dominator;
+ }
+ return dominator;
+}
+
+
+void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) {
+ ASSERT(filler_free_space_size_ != NULL);
+ Zone* zone = block()->zone();
+ HConstant* new_free_space_size = HConstant::CreateAndInsertBefore(
+ zone,
+ context(),
+ filler_free_space_size_->value()->GetInteger32Constant() +
+ free_space_size,
+ filler_free_space_size_);
+ filler_free_space_size_->UpdateValue(new_free_space_size);
+}
+
+
+void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) {
+ ASSERT(filler_free_space_size_ == NULL);
+ Zone* zone = block()->zone();
+ int32_t dominator_size =
+
HConstant::cast(dominating_allocate_->size())->GetInteger32Constant();
+ HInstruction* free_space_instr =
+ HInnerAllocatedObject::New(zone, context(), dominating_allocate_,
+ dominator_size, type());
+ free_space_instr->InsertBefore(this);
+ HConstant* filler_map = HConstant::New(
+ zone,
+ context(),
+ isolate()->factory()->free_space_map(),
+ UniqueValueId(isolate()->heap()->free_space_map()));
+ filler_map->InsertAfter(free_space_instr);
+ HInstruction* store_map = HStoreNamedField::New(zone, context(),
+ free_space_instr, HObjectAccess::ForMap(), filler_map);
+ store_map->SetFlag(HValue::kHasNoObservableSideEffects);
+ store_map->InsertAfter(filler_map);
+
+ HConstant* filler_size = HConstant::CreateAndInsertAfter(
+ zone, context(), free_space_size, store_map);
+ HObjectAccess access =
+ HObjectAccess::ForJSObjectOffset(FreeSpace::kSizeOffset);
+ HStoreNamedField* store_size = HStoreNamedField::New(zone, context(),
+ free_space_instr, access, filler_size);
+ store_size->SetFlag(HValue::kHasNoObservableSideEffects);
+ store_size->InsertAfter(filler_size);
+ filler_free_space_size_ = store_size;
+}
+
+
+void HAllocate::ClearNextMapWord(int offset) {
+ if (clear_next_map_word_) {
+ Zone* zone = block()->zone();
+ HObjectAccess access = HObjectAccess::ForJSObjectOffset(offset);
+ HStoreNamedField* clear_next_map =
+ HStoreNamedField::New(zone, context(), this, access,
+ block()->graph()->GetConstantNull());
+ clear_next_map->ClearAllSideEffects();
+ clear_next_map->InsertAfter(this);
}
}
=======================================
--- /branches/3.20/src/hydrogen-instructions.h Mon Sep 2 16:36:28 2013 UTC
+++ /branches/3.20/src/hydrogen-instructions.h Mon Sep 23 11:26:03 2013 UTC
@@ -49,11 +49,11 @@
class HInferRepresentationPhase;
class HInstruction;
class HLoopInformation;
+class HStoreNamedField;
class HValue;
class LInstruction;
class LChunkBuilder;
-
#define HYDROGEN_ABSTRACT_INSTRUCTION_LIST(V) \
V(ArithmeticBinaryOperation) \
V(BinaryOperation) \
@@ -3204,7 +3204,26 @@
DECLARE_INSTRUCTION_FACTORY_P2(HConstant, int32_t, Representation);
DECLARE_INSTRUCTION_FACTORY_P1(HConstant, double);
DECLARE_INSTRUCTION_FACTORY_P1(HConstant, Handle<Object>);
+ DECLARE_INSTRUCTION_FACTORY_P2(HConstant, Handle<Map>, UniqueValueId);
DECLARE_INSTRUCTION_FACTORY_P1(HConstant, ExternalReference);
+
+ static HConstant* CreateAndInsertAfter(Zone* zone,
+ HValue* context,
+ int32_t value,
+ HInstruction* instruction) {
+ HConstant* new_constant = HConstant::New(zone, context, value);
+ new_constant->InsertAfter(instruction);
+ return new_constant;
+ }
+
+ static HConstant* CreateAndInsertBefore(Zone* zone,
+ HValue* context,
+ int32_t value,
+ HInstruction* instruction) {
+ HConstant* new_constant = HConstant::New(zone, context, value);
+ new_constant->InsertBefore(instruction);
+ return new_constant;
+ }
Handle<Object> handle() {
if (handle_.is_null()) {
@@ -3408,6 +3427,8 @@
bool is_not_in_new_space,
bool is_cell,
bool boolean_value);
+ HConstant(Handle<Map> handle,
+ UniqueValueId unique_id);
explicit HConstant(ExternalReference reference);
void Initialize(Representation r);
@@ -5098,7 +5119,10 @@
HType type,
PretenureFlag pretenure_flag,
InstanceType instance_type)
- : HTemplateInstruction<2>(type) {
+ : HTemplateInstruction<2>(type),
+ dominating_allocate_(NULL),
+ filler_free_space_size_(NULL),
+ clear_next_map_word_(false) {
SetOperandAt(0, context);
SetOperandAt(1, size);
set_representation(Representation::Tagged());
@@ -5110,13 +5134,40 @@
? ALLOCATE_IN_OLD_POINTER_SPACE : ALLOCATE_IN_OLD_DATA_SPACE)
: ALLOCATE_IN_NEW_SPACE;
if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
- flags_ = static_cast<HAllocate::Flags>(flags_ |
- ALLOCATE_DOUBLE_ALIGNED);
+ flags_ = static_cast<HAllocate::Flags>(flags_ |
ALLOCATE_DOUBLE_ALIGNED);
+ }
+ // We have to fill the allocated object with one word fillers if we do
+ // not use allocation folding since some allocations may depend on each
+ // other, i.e., have a pointer to each other. A GC in between these
+ // allocations may leave such objects behind in a not completely
initialized
+ // state.
+ if (!FLAG_use_gvn || !FLAG_use_allocation_folding) {
+ flags_ = static_cast<HAllocate::Flags>(flags_ | PREFILL_WITH_FILLER);
}
+ clear_next_map_word_ = pretenure_flag == NOT_TENURED &&
+ AllocationSite::CanTrack(instance_type);
}
+
+ HAllocate* GetFoldableDominator(HAllocate* dominator);
+
+ void UpdateFreeSpaceFiller(int32_t filler_size);
+
+ void CreateFreeSpaceFiller(int32_t filler_size);
+
+ bool IsFoldable(HAllocate* allocate) {
+ return (IsNewSpaceAllocation() && allocate->IsNewSpaceAllocation()) ||
+ (IsOldDataSpaceAllocation() &&
allocate->IsOldDataSpaceAllocation()) ||
+ (IsOldPointerSpaceAllocation() &&
+ allocate->IsOldPointerSpaceAllocation());
+ }
+
+ void ClearNextMapWord(int offset);
Flags flags_;
Handle<Map> known_initial_map_;
+ HAllocate* dominating_allocate_;
+ HStoreNamedField* filler_free_space_size_;
+ bool clear_next_map_word_;
};
@@ -5972,6 +6023,10 @@
Representation field_representation() const {
return access_.representation();
}
+
+ void UpdateValue(HValue* value) {
+ SetOperandAt(1, value);
+ }
private:
HStoreNamedField(HValue* obj,
=======================================
--- /branches/3.20/src/hydrogen.cc Mon Sep 2 16:36:28 2013 UTC
+++ /branches/3.20/src/hydrogen.cc Mon Sep 23 11:26:03 2013 UTC
@@ -8241,7 +8241,10 @@
int pointer_size,
AllocationSiteMode mode) {
NoObservableSideEffectsScope no_effects(this);
-
+ InstanceType instance_type = boilerplate_object->map()->instance_type();
+ ASSERT(instance_type == JS_ARRAY_TYPE || instance_type ==
JS_OBJECT_TYPE);
+ HType type = instance_type == JS_ARRAY_TYPE
+ ? HType::JSArray() : HType::JSObject();
HInstruction* target = NULL;
HInstruction* data_target = NULL;
@@ -8258,14 +8261,11 @@
}
if (pointer_size != 0) {
HValue* size_in_bytes = Add<HConstant>(pointer_size);
- target = Add<HAllocate>(size_in_bytes, HType::JSObject(), TENURED,
- JS_OBJECT_TYPE);
+ target = Add<HAllocate>(size_in_bytes, type, TENURED, instance_type);
}
} else {
- InstanceType instance_type =
boilerplate_object->map()->instance_type();
HValue* size_in_bytes = Add<HConstant>(data_size + pointer_size);
- target = Add<HAllocate>(size_in_bytes, HType::JSObject(), NOT_TENURED,
- instance_type);
+ target = Add<HAllocate>(size_in_bytes, type, NOT_TENURED,
instance_type);
}
int offset = 0;
@@ -8287,7 +8287,7 @@
int* data_offset,
AllocationSiteMode mode) {
bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
- boilerplate_object->map()->CanTrackAllocationSite();
+ AllocationSite::CanTrack(boilerplate_object->map()->instance_type());
// If using allocation sites, then the payload on the site should already
// be filled in as a valid (boilerplate) array.
@@ -8343,7 +8343,7 @@
// Create allocation site info.
if (mode == TRACK_ALLOCATION_SITE &&
- boilerplate_object->map()->CanTrackAllocationSite()) {
+
AllocationSite::CanTrack(boilerplate_object->map()->instance_type())) {
elements_offset += AllocationMemento::kSize;
*offset += AllocationMemento::kSize;
BuildCreateAllocationMemento(target, JSArray::kSize, allocation_site);
=======================================
--- /branches/3.20/src/objects-inl.h Thu Aug 29 13:18:43 2013 UTC
+++ /branches/3.20/src/objects-inl.h Mon Sep 23 11:26:03 2013 UTC
@@ -1312,7 +1312,7 @@
bool JSObject::ShouldTrackAllocationInfo() {
- if (map()->CanTrackAllocationSite()) {
+ if (AllocationSite::CanTrack(map()->instance_type())) {
if (!IsJSArray()) {
return true;
}
@@ -1349,6 +1349,11 @@
}
+inline bool AllocationSite::CanTrack(InstanceType type) {
+ return type == JS_ARRAY_TYPE;
+}
+
+
MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
ValidateElements();
ElementsKind elements_kind = map()->elements_kind();
@@ -3576,11 +3581,6 @@
Code::Flags Code::flags() {
return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
}
-
-
-inline bool Map::CanTrackAllocationSite() {
- return instance_type() == JS_ARRAY_TYPE;
-}
void Map::set_owns_descriptors(bool is_shared) {
=======================================
--- /branches/3.20/src/objects.h Mon Sep 2 14:34:05 2013 UTC
+++ /branches/3.20/src/objects.h Mon Sep 23 11:26:03 2013 UTC
@@ -5760,7 +5760,6 @@
set_bit_field3(EnumLengthBits::update(bit_field3(), length));
}
- inline bool CanTrackAllocationSite();
inline bool owns_descriptors();
inline void set_owns_descriptors(bool is_shared);
inline bool is_observed();
@@ -7829,6 +7828,7 @@
static inline AllocationSiteMode GetMode(
ElementsKind boilerplate_elements_kind);
static inline AllocationSiteMode GetMode(ElementsKind from, ElementsKind
to);
+ static inline bool CanTrack(InstanceType type);
static const int kTransitionInfoOffset = HeapObject::kHeaderSize;
static const int kWeakNextOffset = kTransitionInfoOffset + kPointerSize;
=======================================
--- /branches/3.20/src/version.cc Thu Sep 19 08:18:03 2013 UTC
+++ /branches/3.20/src/version.cc Mon Sep 23 11:26:03 2013 UTC
@@ -35,7 +35,7 @@
#define MAJOR_VERSION 3
#define MINOR_VERSION 20
#define BUILD_NUMBER 17
-#define PATCH_LEVEL 8
+#define PATCH_LEVEL 9
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0
=======================================
--- /branches/3.20/test/cctest/test-heap.cc Tue Aug 13 17:09:37 2013 UTC
+++ /branches/3.20/test/cctest/test-heap.cc Mon Sep 23 11:26:03 2013 UTC
@@ -2113,6 +2113,78 @@
CHECK(HEAP->InNewSpace(*o));
}
+
+
+TEST(OptimizedPretenuringAllocationFolding) {
+ i::FLAG_allow_natives_syntax = true;
+ CcTest::InitializeVM();
+ if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
+ if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
+ v8::HandleScope scope(CcTest::isolate());
+ HEAP->SetNewSpaceHighPromotionModeActive(true);
+
+ v8::Local<v8::Value> res = CompileRun(
+ "function DataObject() {"
+ " this.a = 1.1;"
+ " this.b = [{}];"
+ " this.c = 1.2;"
+ " this.d = [{}];"
+ " this.e = 1.3;"
+ " this.f = [{}];"
+ "}"
+ "function f() {"
+ " return new DataObject();"
+ "};"
+ "f(); f(); f();"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f();");
+
+ Handle<JSObject> o =
+ v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
+
+ CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(0)));
+ CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(1)));
+ CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(2)));
+ CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(3)));
+ CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(4)));
+ CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(5)));
+}
+
+
+TEST(OptimizedPretenuringAllocationFoldingBlocks) {
+ i::FLAG_allow_natives_syntax = true;
+ CcTest::InitializeVM();
+ if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
+ if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
+ v8::HandleScope scope(CcTest::isolate());
+ HEAP->SetNewSpaceHighPromotionModeActive(true);
+
+ v8::Local<v8::Value> res = CompileRun(
+ "function DataObject() {"
+ " this.a = [{}];"
+ " this.b = [{}];"
+ " this.c = 1.1;"
+ " this.d = 1.2;"
+ " this.e = [{}];"
+ " this.f = 1.3;"
+ "}"
+ "function f() {"
+ " return new DataObject();"
+ "};"
+ "f(); f(); f();"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f();");
+
+ Handle<JSObject> o =
+ v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
+
+ CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(0)));
+ CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(1)));
+ CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(2)));
+ CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(3)));
+ CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(4)));
+ CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(5)));
+}
TEST(OptimizedPretenuringObjectArrayLiterals) {
=======================================
--- /branches/3.20/test/mjsunit/allocation-folding.js Wed Jul 24 08:18:28
2013 UTC
+++ /branches/3.20/test/mjsunit/allocation-folding.js Mon Sep 23 11:26:03
2013 UTC
@@ -56,7 +56,7 @@
doubles(); doubles(); doubles();
%OptimizeFunctionOnNextCall(doubles);
-var result = doubles();
+result = doubles();
gc();
@@ -72,8 +72,31 @@
doubles_int(); doubles_int(); doubles_int();
%OptimizeFunctionOnNextCall(doubles_int);
-var result = doubles_int();
+result = doubles_int();
gc();
assertEquals(result[1], 3.1);
+
+// Test allocation folding over a branch.
+
+function branch_int(left) {
+ var elem1 = [1, 2];
+ var elem2;
+ if (left) {
+ elem2 = [3, 4];
+ } else {
+ elem2 = [5, 6];
+ }
+ return elem2;
+}
+
+branch_int(1); branch_int(1); branch_int(1);
+%OptimizeFunctionOnNextCall(branch_int);
+result = branch_int(1);
+var result2 = branch_int(0);
+
+gc();
+
+assertEquals(result[1], 4);
+assertEquals(result2[1], 6);
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.