Revision: 5510
Author: [email protected]
Date: Thu Sep 23 02:15:26 2010
Log: Dynamically determine optimal instance size.
The number of inobject properties used to be derived from the number
of this property assignments in the constructor (and increased by 2 to
allow for properties added later). This very often leads to wasted inobject
slots.
This patch reclaims some of the unused inobject space by the following
method:
- for each constructor function the first several objects are allocated
using the initial
("generous) instance size estimation (this is called 'tracking phase').
- during the tracking phase map transitions are tracked and actual
property counts are collected.
- at the end of the tracking phase instance sizes in the maps are
decreased if necessary
(starting with the function's initial map and traversing the transition
tree).
- all further allocation use more realistic instance size estimation.
Shrinking generously allocated objects without costly heap traversal is
made possible
by initializing their inobject properties with one_pointer_filler_map
(instead of undefined).
The initial slack for the generous allocation is increased from 2 to 6
which really helps some tests.
Review URL: http://codereview.chromium.org/3329019
http://code.google.com/p/v8/source/detail?r=5510
Modified:
/branches/bleeding_edge/src/arm/builtins-arm.cc
/branches/bleeding_edge/src/builtins.h
/branches/bleeding_edge/src/handles.cc
/branches/bleeding_edge/src/heap.cc
/branches/bleeding_edge/src/ia32/assembler-ia32.cc
/branches/bleeding_edge/src/ia32/assembler-ia32.h
/branches/bleeding_edge/src/ia32/builtins-ia32.cc
/branches/bleeding_edge/src/mark-compact.cc
/branches/bleeding_edge/src/objects-inl.h
/branches/bleeding_edge/src/objects.cc
/branches/bleeding_edge/src/objects.h
/branches/bleeding_edge/src/runtime.cc
/branches/bleeding_edge/src/runtime.h
/branches/bleeding_edge/src/x64/builtins-x64.cc
/branches/bleeding_edge/test/cctest/test-api.cc
=======================================
--- /branches/bleeding_edge/src/arm/builtins-arm.cc Tue Sep 7 04:09:45 2010
+++ /branches/bleeding_edge/src/arm/builtins-arm.cc Thu Sep 23 02:15:26 2010
@@ -521,7 +521,11 @@
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
- bool is_api_function) {
+ bool is_api_function,
+ bool count_constructions) {
+ // Should never count constructions for api objects.
+ ASSERT(!is_api_function || !count_constructions);
+
// Enter a construct frame.
__ EnterConstructFrame();
@@ -530,9 +534,6 @@
__ push(r0); // Smi-tagged arguments count.
__ push(r1); // Constructor function.
- // Use r7 for holding undefined which is used in several places below.
- __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
-
// Try to allocate the object without transitioning into C code. If any
of the
// preconditions is not met, the code bails out to the runtime call.
Label rt_call, allocated;
@@ -549,7 +550,6 @@
// Load the initial map and verify that it is in fact a map.
// r1: constructor function
- // r7: undefined value
__ ldr(r2, FieldMemOperand(r1,
JSFunction::kPrototypeOrInitialMapOffset));
__ tst(r2, Operand(kSmiTagMask));
__ b(eq, &rt_call);
@@ -561,14 +561,35 @@
// instance type would be JS_FUNCTION_TYPE.
// r1: constructor function
// r2: initial map
- // r7: undefined value
__ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
__ b(eq, &rt_call);
+ if (count_constructions) {
+ Label allocate;
+ // Decrease generous allocation count.
+ __ ldr(r3, FieldMemOperand(r1,
JSFunction::kSharedFunctionInfoOffset));
+ MemOperand constructor_count =
+ FieldMemOperand(r3,
SharedFunctionInfo::kConstructionCountOffset);
+ __ ldrb(r4, constructor_count);
+ __ sub(r4, r4, Operand(1), SetCC);
+ __ strb(r4, constructor_count);
+ __ b(ne, &allocate);
+
+ __ Push(r1, r2);
+
+ __ push(r1); // constructor
+ // The call will replace the stub, so the countdown is only done
once.
+ __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
+
+ __ pop(r2);
+ __ pop(r1);
+
+ __ bind(&allocate);
+ }
+
// Now allocate the JSObject on the heap.
// r1: constructor function
// r2: initial map
- // r7: undefined value
__ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
__ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
@@ -578,7 +599,6 @@
// r2: initial map
// r3: object size
// r4: JSObject (not tagged)
- // r7: undefined value
__ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
__ mov(r5, r4);
ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
@@ -588,16 +608,21 @@
ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
__ str(r6, MemOperand(r5, kPointerSize, PostIndex));
- // Fill all the in-object properties with undefined.
+ // Fill all the in-object properties with the appropriate filler.
// r1: constructor function
// r2: initial map
// r3: object size (in words)
// r4: JSObject (not tagged)
// r5: First in-object property of JSObject (not tagged)
- // r7: undefined value
__ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
{ Label loop, entry;
+ if (count_constructions) {
+ // To allow for truncation.
+ __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex);
+ } else {
+ __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
+ }
__ b(&entry);
__ bind(&loop);
__ str(r7, MemOperand(r5, kPointerSize, PostIndex));
@@ -617,7 +642,6 @@
// r1: constructor function
// r4: JSObject
// r5: start of next object (not tagged)
- // r7: undefined value
__ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
// The field instance sizes contains both pre-allocated property
fields and
// in-object properties.
@@ -637,7 +661,6 @@
// r3: number of elements in properties array
// r4: JSObject
// r5: start of next object
- // r7: undefined value
__ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
__ AllocateInNewSpace(
r0,
@@ -652,7 +675,6 @@
// r3: number of elements in properties array
// r4: JSObject
// r5: FixedArray (not tagged)
- // r7: undefined value
__ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
__ mov(r2, r5);
ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
@@ -667,10 +689,16 @@
// r3: number of elements in properties array
// r4: JSObject
// r5: FixedArray (not tagged)
- // r7: undefined
__ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
{ Label loop, entry;
+ if (count_constructions) {
+ __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
+ } else if (FLAG_debug_code) {
+ __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
+ __ cmp(r7, r8);
+ __ Assert(eq, "Undefined value not loaded.");
+ }
__ b(&entry);
__ bind(&loop);
__ str(r7, MemOperand(r2, kPointerSize, PostIndex));
@@ -820,15 +848,20 @@
__ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
__ Jump(lr);
}
+
+
+void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
+ Generate_JSConstructStubHelper(masm, false, true);
+}
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false);
+ Generate_JSConstructStubHelper(masm, false, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true);
+ Generate_JSConstructStubHelper(masm, true, false);
}
=======================================
--- /branches/bleeding_edge/src/builtins.h Thu Aug 26 06:59:37 2010
+++ /branches/bleeding_edge/src/builtins.h Thu Sep 23 02:15:26 2010
@@ -65,6 +65,7 @@
#define BUILTIN_LIST_A(V) \
V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED) \
V(JSConstructCall, BUILTIN, UNINITIALIZED) \
+ V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED) \
V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED) \
V(JSConstructStubApi, BUILTIN, UNINITIALIZED) \
V(JSEntryTrampoline, BUILTIN, UNINITIALIZED) \
@@ -249,6 +250,7 @@
CFunctionId id,
BuiltinExtraArguments extra_args);
static void Generate_JSConstructCall(MacroAssembler* masm);
+ static void Generate_JSConstructStubCountdown(MacroAssembler* masm);
static void Generate_JSConstructStubGeneric(MacroAssembler* masm);
static void Generate_JSConstructStubApi(MacroAssembler* masm);
static void Generate_JSEntryTrampoline(MacroAssembler* masm);
=======================================
--- /branches/bleeding_edge/src/handles.cc Thu Sep 23 01:27:51 2010
+++ /branches/bleeding_edge/src/handles.cc Thu Sep 23 02:15:26 2010
@@ -142,6 +142,13 @@
void SetExpectedNofProperties(Handle<JSFunction> func, int nof) {
+ // If objects constructed from this function exist then changing
+ // 'estimated_nof_properties' is dangerous since the previois value might
+ // have been compiled into the fast construct stub. More over, the
inobject
+ // slack tracking logic might have adjusted the previous value, so even
+ // passing the same value is risky.
+ if (func->shared()->live_objects_may_exist()) return;
+
func->shared()->set_expected_nof_properties(nof);
if (func->has_initial_map()) {
Handle<Map> new_initial_map =
@@ -158,16 +165,25 @@
static int ExpectedNofPropertiesFromEstimate(int estimate) {
- // TODO(1231235): We need dynamic feedback to estimate the number
- // of expected properties in an object. The static hack below
- // is barely a solution.
- if (estimate == 0) return 4;
- return estimate + 2;
+ // If no properties are added in the constructor, they are more likely
+ // to be added later.
+ if (estimate == 0) estimate = 2;
+
+ // We do not shrink objects that go into a snapshot (yet), so we adjust
+ // the estimate conservatively.
+ if (Serializer::enabled()) return estimate + 2;
+
+ // Inobject slack tracking will reclaim redundant inobject space later,
+ // so we can afford to adjust the estimate generously.
+ return estimate + 6;
}
void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo>
shared,
int estimate) {
+ // See the comment in SetExpectedNofProperties.
+ if (shared->live_objects_may_exist()) return;
+
shared->set_expected_nof_properties(
ExpectedNofPropertiesFromEstimate(estimate));
}
=======================================
--- /branches/bleeding_edge/src/heap.cc Thu Sep 16 03:55:37 2010
+++ /branches/bleeding_edge/src/heap.cc Thu Sep 23 02:15:26 2010
@@ -2068,6 +2068,7 @@
share->set_debug_info(undefined_value());
share->set_inferred_name(empty_string());
share->set_compiler_hints(0);
+ share->set_initial_map(undefined_value());
share->set_this_property_assignments_count(0);
share->set_this_property_assignments(undefined_value());
share->set_num_literals(0);
@@ -2726,6 +2727,9 @@
}
}
}
+
+ fun->shared()->StartInobjectSlackTracking(map);
+
return map;
}
@@ -2742,7 +2746,20 @@
// fixed array (eg, Heap::empty_fixed_array()). Currently, the object
// verification code has to cope with (temporarily) invalid objects. See
// for example, JSArray::JSArrayVerify).
- obj->InitializeBody(map->instance_size());
+ Object* filler;
+ // We cannot always fill with one_pointer_filler_map because objects
+ // created from API functions expect their internal fields to be
initialized
+ // with undefined_value.
+ if (map->constructor()->IsJSFunction() &&
+ JSFunction::cast(map->constructor())->shared()->
+ IsInobjectSlackTrackingInProgress()) {
+ // We might want to shrink the object later.
+ ASSERT(obj->GetInternalFieldCount() == 0);
+ filler = Heap::one_pointer_filler_map();
+ } else {
+ filler = Heap::undefined_value();
+ }
+ obj->InitializeBody(map->instance_size(), filler);
}
@@ -2925,19 +2942,13 @@
Object* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
JSGlobalProxy* object) {
- // Allocate initial map if absent.
- if (!constructor->has_initial_map()) {
- Object* initial_map = AllocateInitialMap(constructor);
- if (initial_map->IsFailure()) return initial_map;
- constructor->set_initial_map(Map::cast(initial_map));
- Map::cast(initial_map)->set_constructor(constructor);
- }
-
+ ASSERT(constructor->has_initial_map());
Map* map = constructor->initial_map();
- // Check that the already allocated object has the same size as
+ // Check that the already allocated object has the same size and type as
// objects allocated using the constructor.
ASSERT(map->instance_size() == object->map()->instance_size());
+ ASSERT(map->instance_type() == object->map()->instance_type());
// Allocate the backing storage for the properties.
int prop_size = map->unused_property_fields() -
map->inobject_properties();
=======================================
--- /branches/bleeding_edge/src/ia32/assembler-ia32.cc Tue Sep 21 05:54:12
2010
+++ /branches/bleeding_edge/src/ia32/assembler-ia32.cc Thu Sep 23 02:15:26
2010
@@ -991,6 +991,14 @@
EMIT(0xFE);
EMIT(0xC8 | dst.code());
}
+
+
+void Assembler::dec_b(const Operand& dst) {
+ EnsureSpace ensure_space(this);
+ last_pc_ = pc_;
+ EMIT(0xFE);
+ emit_operand(ecx, dst);
+}
void Assembler::dec(Register dst) {
=======================================
--- /branches/bleeding_edge/src/ia32/assembler-ia32.h Tue Sep 21 05:54:12
2010
+++ /branches/bleeding_edge/src/ia32/assembler-ia32.h Thu Sep 23 02:15:26
2010
@@ -595,6 +595,7 @@
void cmp(const Operand& op, Handle<Object> handle);
void dec_b(Register dst);
+ void dec_b(const Operand& dst);
void dec(Register dst);
void dec(const Operand& dst);
=======================================
--- /branches/bleeding_edge/src/ia32/builtins-ia32.cc Mon Aug 30 04:48:07
2010
+++ /branches/bleeding_edge/src/ia32/builtins-ia32.cc Thu Sep 23 02:15:26
2010
@@ -105,7 +105,11 @@
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
- bool is_api_function) {
+ bool is_api_function,
+ bool count_constructions) {
+ // Should never count constructions for api objects.
+ ASSERT(!is_api_function || !count_constructions);
+
// Enter a construct frame.
__ EnterConstructFrame();
@@ -148,6 +152,26 @@
__ CmpInstanceType(eax, JS_FUNCTION_TYPE);
__ j(equal, &rt_call);
+ if (count_constructions) {
+ Label allocate;
+ // Decrease generous allocation count.
+ __ mov(ecx, FieldOperand(edi,
JSFunction::kSharedFunctionInfoOffset));
+ __ dec_b(FieldOperand(ecx,
SharedFunctionInfo::kConstructionCountOffset));
+ __ j(not_zero, &allocate);
+
+ __ push(eax);
+ __ push(edi);
+
+ __ push(edi); // constructor
+ // The call will replace the stub, so the countdown is only done
once.
+ __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
+
+ __ pop(edi);
+ __ pop(eax);
+
+ __ bind(&allocate);
+ }
+
// Now allocate the JSObject on the heap.
// edi: constructor
// eax: initial map
@@ -167,7 +191,12 @@
// ebx: JSObject
// edi: start of next object
{ Label loop, entry;
- __ mov(edx, Factory::undefined_value());
+ // To allow for truncation.
+ if (count_constructions) {
+ __ mov(edx, Factory::one_pointer_filler_map());
+ } else {
+ __ mov(edx, Factory::undefined_value());
+ }
__ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
__ jmp(&entry);
__ bind(&loop);
@@ -349,15 +378,20 @@
__ IncrementCounter(&Counters::constructed_objects, 1);
__ ret(0);
}
+
+
+void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
+ Generate_JSConstructStubHelper(masm, false, true);
+}
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false);
+ Generate_JSConstructStubHelper(masm, false, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true);
+ Generate_JSConstructStubHelper(masm, true, false);
}
=======================================
--- /branches/bleeding_edge/src/mark-compact.cc Tue Sep 7 02:15:15 2010
+++ /branches/bleeding_edge/src/mark-compact.cc Thu Sep 23 02:15:26 2010
@@ -282,10 +282,7 @@
FixedArray::BodyDescriptor,
void>::Visit);
- table_.Register(kVisitSharedFunctionInfo,
- &FixedBodyVisitor<StaticMarkingVisitor,
- SharedFunctionInfo::BodyDescriptor,
- void>::Visit);
+ table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
@@ -535,6 +532,17 @@
return true;
}
+
+
+ static void VisitSharedFunctionInfo(Map* map, HeapObject* object) {
+ SharedFunctionInfo* shared =
reinterpret_cast<SharedFunctionInfo*>(object);
+ if (shared->IsInobjectSlackTrackingInProgress()) {
+ shared->DetachInitialMap();
+ }
+ FixedBodyVisitor<StaticMarkingVisitor,
+ SharedFunctionInfo::BodyDescriptor,
+ void>::Visit(map, object);
+ }
static void VisitCodeEntry(Address entry_address) {
@@ -1139,6 +1147,14 @@
// Only JSObject and subtypes have map transitions and back pointers.
if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue;
if (map->instance_type() > JS_FUNCTION_TYPE) continue;
+
+ if (map->IsMarked() && map->attached_to_shared_function_info()) {
+ // This map is used for inobject slack tracking and has been detached
+ // from SharedFunctionInfo during the mark phase.
+ // Since it survived the GC, reattach it now.
+
map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map);
+ }
+
// Follow the chain of back pointers to find the prototype.
Map* current = map;
while (SafeIsMap(current)) {
=======================================
--- /branches/bleeding_edge/src/objects-inl.h Mon Sep 6 05:50:11 2010
+++ /branches/bleeding_edge/src/objects-inl.h Thu Sep 23 02:15:26 2010
@@ -1343,8 +1343,8 @@
-void JSObject::InitializeBody(int object_size) {
- Object* value = Heap::undefined_value();
+void JSObject::InitializeBody(int object_size, Object* value) {
+ ASSERT(!value->IsHeapObject() || !Heap::InNewSpace(value));
for (int offset = kHeaderSize; offset < object_size; offset +=
kPointerSize) {
WRITE_FIELD(this, offset, value);
}
@@ -2279,6 +2279,23 @@
}
+void Map::set_attached_to_shared_function_info(bool value) {
+ if (value) {
+ set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
+ } else {
+ set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
+ }
+}
+
+bool Map::attached_to_shared_function_info() {
+ return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
+}
+
+
+JSFunction* Map::unchecked_constructor() {
+ return reinterpret_cast<JSFunction*>(READ_FIELD(this,
kConstructorOffset));
+}
+
Code::Flags Code::flags() {
return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
@@ -2571,6 +2588,7 @@
ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
+ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
kInstanceClassNameOffset)
ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
@@ -2662,6 +2680,37 @@
kThisPropertyAssignmentsCountOffset)
#endif
+
+int SharedFunctionInfo::construction_count() {
+ return READ_BYTE_FIELD(this, kConstructionCountOffset);
+}
+
+
+void SharedFunctionInfo::set_construction_count(int value) {
+ ASSERT(0 <= value && value < 256);
+ WRITE_BYTE_FIELD(this, kConstructionCountOffset,
static_cast<byte>(value));
+}
+
+
+bool SharedFunctionInfo::live_objects_may_exist() {
+ return (compiler_hints() & (1 << kLiveObjectsMayExist)) != 0;
+}
+
+
+void SharedFunctionInfo::set_live_objects_may_exist(bool value) {
+ if (value) {
+ set_compiler_hints(compiler_hints() | (1 << kLiveObjectsMayExist));
+ } else {
+ set_compiler_hints(compiler_hints() & ~(1 << kLiveObjectsMayExist));
+ }
+}
+
+
+bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
+ return initial_map() != Heap::undefined_value();
+}
+
+
ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
=======================================
--- /branches/bleeding_edge/src/objects.cc Thu Sep 23 01:27:51 2010
+++ /branches/bleeding_edge/src/objects.cc Thu Sep 23 02:15:26 2010
@@ -1476,8 +1476,8 @@
FixedArray* new_properties = 0; // Will always be NULL or a valid
pointer.
int new_unused_property_fields = map()->unused_property_fields() - 1;
if (map()->unused_property_fields() == 0) {
- new_unused_property_fields = kFieldsAdded - 1;
- Object* new_properties_unchecked =
+ new_unused_property_fields = kFieldsAdded - 1;
+ Object* new_properties_unchecked =
properties()->CopySize(properties()->length() + kFieldsAdded);
if (new_properties_unchecked->IsFailure()) return
new_properties_unchecked;
new_properties = FixedArray::cast(new_properties_unchecked);
@@ -3269,6 +3269,47 @@
ASSERT(!code_cache()->IsFixedArray());
CodeCache::cast(code_cache())->RemoveByIndex(name, code, index);
}
+
+
+void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
+ Map* current = this;
+ while (current != Heap::meta_map()) {
+ DescriptorArray* d = reinterpret_cast<DescriptorArray*>(
+ *RawField(current, Map::kInstanceDescriptorsOffset));
+ if (d == Heap::empty_descriptor_array()) {
+ Map* prev = current->map();
+ current->set_map(Heap::meta_map());
+ callback(current, data);
+ current = prev;
+ continue;
+ }
+
+ FixedArray* contents = reinterpret_cast<FixedArray*>(
+ d->get(DescriptorArray::kContentArrayIndex));
+ Object** map_or_index_field = RawField(contents,
HeapObject::kMapOffset);
+ Object* map_or_index = *map_or_index_field;
+ bool map_done = true;
+ for (int i = map_or_index->IsSmi() ?
Smi::cast(map_or_index)->value() : 0;
+ i < contents->length();
+ i += 2) {
+ PropertyDetails details(Smi::cast(contents->get(i + 1)));
+ if (details.IsTransition()) {
+ Map* next = reinterpret_cast<Map*>(contents->get(i));
+ next->set_map(current);
+ *map_or_index_field = Smi::FromInt(i + 2);
+ current = next;
+ map_done = false;
+ break;
+ }
+ }
+ if (!map_done) continue;
+ *map_or_index_field = Heap::fixed_array_map();
+ Map* prev = current->map();
+ current->set_map(Heap::meta_map());
+ callback(current, data);
+ current = prev;
+ }
+}
Object* CodeCache::Update(String* name, Code* code) {
@@ -5375,6 +5416,107 @@
accumulator->Put(script_source, start_position(), end_position());
}
}
+
+
+void SharedFunctionInfo::StartInobjectSlackTracking(Map* map) {
+ ASSERT(!IsInobjectSlackTrackingInProgress());
+
+ // Only initiate the tracking the first time.
+ if (live_objects_may_exist()) return;
+ set_live_objects_may_exist(true);
+
+ // No tracking during the snapshot construction phase.
+ if (Serializer::enabled()) return;
+
+ if (map->unused_property_fields() == 0) return;
+
+ // Nonzero counter is a leftover from the previous attempt interrupted
+ // by GC, keep it.
+ if (construction_count() == 0) {
+ set_construction_count(kGenerousAllocationCount);
+ }
+ set_initial_map(map);
+ ASSERT_EQ(Builtins::builtin(Builtins::JSConstructStubGeneric),
+ construct_stub());
+
set_construct_stub(Builtins::builtin(Builtins::JSConstructStubCountdown));
+}
+
+
+// Called from GC, hence reinterpret_cast and unchecked accessors.
+void SharedFunctionInfo::DetachInitialMap() {
+ Map* map = reinterpret_cast<Map*>(initial_map());
+
+ // Make the map remember to restore the link if it survives the GC.
+ map->set_bit_field2(
+ map->bit_field2() | (1 << Map::kAttachedToSharedFunctionInfo));
+
+ // Undo state changes made by StartInobjectTracking (except the
+ // construction_count). This way if the initial map does not survive the
GC
+ // then StartInobjectTracking will be called again the next time the
+ // constructor is called. The countdown will continue and (possibly after
+ // several more GCs) CompleteInobjectSlackTracking will eventually be
called.
+ set_initial_map(Heap::raw_unchecked_undefined_value());
+ ASSERT_EQ(Builtins::builtin(Builtins::JSConstructStubCountdown),
+ *RawField(this, kConstructStubOffset));
+ set_construct_stub(Builtins::builtin(Builtins::JSConstructStubGeneric));
+ // It is safe to clear the flag: it will be set again if the map is live.
+ set_live_objects_may_exist(false);
+}
+
+
+// Called from GC, hence reinterpret_cast and unchecked accessors.
+void SharedFunctionInfo::AttachInitialMap(Map* map) {
+ map->set_bit_field2(
+ map->bit_field2() & ~(1 << Map::kAttachedToSharedFunctionInfo));
+
+ // Resume inobject slack tracking.
+ set_initial_map(map);
+ ASSERT_EQ(Builtins::builtin(Builtins::JSConstructStubGeneric),
+ *RawField(this, kConstructStubOffset));
+
set_construct_stub(Builtins::builtin(Builtins::JSConstructStubCountdown));
+ // The map survived the gc, so there may be objects referencing it.
+ set_live_objects_may_exist(true);
+}
+
+
+static void GetMinInobjectSlack(Map* map, void* data) {
+ int slack = map->unused_property_fields();
+ if (*reinterpret_cast<int*>(data) > slack) {
+ *reinterpret_cast<int*>(data) = slack;
+ }
+}
+
+
+static void ShrinkInstanceSize(Map* map, void* data) {
+ int slack = *reinterpret_cast<int*>(data);
+ map->set_inobject_properties(map->inobject_properties() - slack);
+ map->set_unused_property_fields(map->unused_property_fields() - slack);
+ map->set_instance_size(map->instance_size() - slack * kPointerSize);
+
+ // Visitor id might depend on the instance size, recalculate it.
+ map->set_visitor_id(StaticVisitorBase::GetVisitorId(map));
+}
+
+
+void SharedFunctionInfo::CompleteInobjectSlackTracking() {
+ ASSERT(live_objects_may_exist() && IsInobjectSlackTrackingInProgress());
+ Map* map = Map::cast(initial_map());
+
+ set_initial_map(Heap::undefined_value());
+ ASSERT_EQ(Builtins::builtin(Builtins::JSConstructStubCountdown),
+ construct_stub());
+ set_construct_stub(Builtins::builtin(Builtins::JSConstructStubGeneric));
+
+ int slack = map->unused_property_fields();
+ map->TraverseTransitionTree(&GetMinInobjectSlack, &slack);
+ if (slack != 0) {
+ // Resize the initial map and all maps in its transition tree.
+ map->TraverseTransitionTree(&ShrinkInstanceSize, &slack);
+ // Give the correct expected_nof_properties to initial maps created
later.
+ ASSERT(expected_nof_properties() >= slack);
+ set_expected_nof_properties(expected_nof_properties() - slack);
+ }
+}
void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
=======================================
--- /branches/bleeding_edge/src/objects.h Thu Sep 16 03:55:37 2010
+++ /branches/bleeding_edge/src/objects.h Thu Sep 23 02:15:26 2010
@@ -1576,7 +1576,7 @@
// initialized by set_properties
// Note: this call does not update write barrier, it is caller's
// reponsibility to ensure that *v* can be collected without WB here.
- inline void InitializeBody(int object_size);
+ inline void InitializeBody(int object_size, Object* value);
// Check whether this object references another object
bool ReferencesObject(Object* obj);
@@ -3150,6 +3150,12 @@
inline bool has_fast_elements() {
return ((1 << kHasFastElements) & bit_field2()) != 0;
}
+
+ // Tells whether the map is attached to SharedFunctionInfo
+ // (for inobject slack tracking).
+ inline void set_attached_to_shared_function_info(bool value);
+
+ inline bool attached_to_shared_function_info();
// Tells whether the instance needs security checks when accessing its
// properties.
@@ -3162,6 +3168,8 @@
// [constructor]: points back to the function responsible for this map.
DECL_ACCESSORS(constructor, Object)
+ inline JSFunction* unchecked_constructor();
+
// [instance descriptors]: describes the object.
DECL_ACCESSORS(instance_descriptors, DescriptorArray)
@@ -3240,6 +3248,10 @@
inline int visitor_id();
inline void set_visitor_id(int visitor_id);
+ typedef void (*TraverseCallback)(Map* map, void* data);
+
+ void TraverseTransitionTree(TraverseCallback callback, void* data);
+
static const int kMaxPreAllocatedPropertyFields = 255;
// Layout description.
@@ -3293,6 +3305,7 @@
static const int kFunctionWithPrototype = 1;
static const int kHasFastElements = 2;
static const int kStringWrapperSafeForDefaultValueOf = 3;
+ static const int kAttachedToSharedFunctionInfo = 4;
// Layout of the default cache. It holds alternating name and code
objects.
static const int kCodeCacheEntrySize = 2;
@@ -3447,6 +3460,100 @@
inline int expected_nof_properties();
inline void set_expected_nof_properties(int value);
+ // Inobject slack tracking is the way to reclaim unused inobject space.
+ //
+ // The instance size is initially determined by adding some slack to
+ // expected_nof_properties (to allow for a few extra properties added
+ // after the constructor). There is no guarantee that the extra space
+ // will not be wasted.
+ //
+ // Here is the algorithm to reclaim the unused inobject space:
+ // - Detect the first constructor call for this SharedFunctionInfo.
+ // When it happens enter the "in progress" state: remember the
+ // constructor's initial_map and install a special construct stub that
+ // counts constructor calls.
+ // - While the tracking is in progress create objects filled with
+ // one_pointer_filler_map instead of undefined_value. This way they
can be
+ // resized quickly and safely.
+ // - Once enough (kGenerousAllocationCount) objects have been created
+ // compute the 'slack' (traverse the map transition tree starting from
the
+ // initial_map and find the lowest value of unused_property_fields).
+ // - Traverse the transition tree again and decrease the instance size
+ // of every map. Existing objects will resize automatically (they are
+ // filled with one_pointer_filler_map). All further allocations will
+ // use the adjusted instance size.
+ // - Decrease expected_nof_properties so that an allocations made from
+ // another context will use the adjusted instance size too.
+ // - Exit "in progress" state by clearing the reference to the
initial_map
+ // and setting the regular construct stub (generic or inline).
+ //
+ // The above is the main event sequence. Some special cases are possible
+ // while the tracking is in progress:
+ //
+ // - GC occurs.
+ // Check if the initial_map is referenced by any live objects (except
this
+ // SharedFunctionInfo). If it is, continue tracking as usual.
+ // If it is not, clear the reference and reset the tracking state. The
+ // tracking will be initiated again on the next constructor call.
+ //
+ // - The constructor is called from another context.
+ // Immediately complete the tracking, perform all the necessary changes
+ // to maps. This is necessary because there is no efficient way to
track
+ // multiple initial_maps.
+ // Proceed to create an object in the current context (with the
adjusted
+ // size).
+ //
+ // - A different constructor function sharing the same
SharedFunctionInfo is
+ // called in the same context. This could be another closure in the
same
+ // context, or the first function could have been disposed.
+ // This is handled the same way as the previous case.
+ //
+ // Important: inobject slack tracking is not attempted during the
snapshot
+ // creation.
+
+ static const int kGenerousAllocationCount = 16;
+
+ // [construction_count]: Counter for constructor calls made during
+ // the tracking phase.
+ inline int construction_count();
+ inline void set_construction_count(int value);
+
+ // [initial_map]: initial map of the first function called as a
constructor.
+ // Saved for the duration of the tracking phase.
+ // This is a weak link (GC resets it to undefined_value if no other live
+ // object reference this map).
+ DECL_ACCESSORS(initial_map, Object)
+
+ // True if the initial_map is not undefined and the countdown stub is
+ // installed.
+ inline bool IsInobjectSlackTrackingInProgress();
+
+ // Starts the tracking.
+ // Stores the initial map and installs the countdown stub.
+ // IsInobjectSlackTrackingInProgress is normally true after this call,
+ // except when tracking have not been started (e.g. the map has no unused
+ // properties or the snapshot is being built).
+ void StartInobjectSlackTracking(Map* map);
+
+ // Completes the tracking.
+ // IsInobjectSlackTrackingInProgress is false after this call.
+ void CompleteInobjectSlackTracking();
+
+ // Clears the initial_map before the GC marking phase to ensure the
reference
+ // is weak. IsInobjectSlackTrackingInProgress is false after this call.
+ void DetachInitialMap();
+
+ // Restores the link to the initial map after the GC marking phase.
+ // IsInobjectSlackTrackingInProgress is true after this call.
+ void AttachInitialMap(Map* map);
+
+ // False if there are definitely no live objects created from this
function.
+ // True if live objects _may_ exist (existence not guaranteed).
+ // May go back from true to false after GC.
+ inline bool live_objects_may_exist();
+
+ inline void set_live_objects_may_exist(bool value);
+
// [instance class name]: class name for instances.
DECL_ACCESSORS(instance_class_name, Object)
@@ -3598,8 +3705,10 @@
static const int kScriptOffset = kFunctionDataOffset + kPointerSize;
static const int kDebugInfoOffset = kScriptOffset + kPointerSize;
static const int kInferredNameOffset = kDebugInfoOffset + kPointerSize;
- static const int kThisPropertyAssignmentsOffset =
+ static const int kInitialMapOffset =
kInferredNameOffset + kPointerSize;
+ static const int kThisPropertyAssignmentsOffset =
+ kInitialMapOffset + kPointerSize;
#if V8_HOST_ARCH_32_BIT
// Smi fields.
static const int kLengthOffset =
@@ -3623,7 +3732,7 @@
static const int kSize = kThisPropertyAssignmentsCountOffset +
kPointerSize;
#else
// The only reason to use smi fields instead of int fields
- // is to allow interation without maps decoding during
+ // is to allow iteration without maps decoding during
// garbage collections.
// To avoid wasting space on 64-bit architectures we use
// the following trick: we group integer fields into pairs
@@ -3658,6 +3767,18 @@
static const int kSize = kThisPropertyAssignmentsCountOffset + kIntSize;
#endif
+
+ // The construction counter for inobject slack tracking is stored in the
+ // most significant byte of compiler_hints which is otherwise unused.
+ // Its offset depends on the endian-ness of the architecture.
+#if __BYTE_ORDER == __LITTLE_ENDIAN
+ static const int kConstructionCountOffset = kCompilerHintsOffset + 3;
+#elif __BYTE_ORDER == __BIG_ENDIAN
+ static const int kConstructionCountOffset = kCompilerHintsOffset + 0;
+#else
+#error Unknown byte ordering
+#endif
+
static const int kAlignedSize = POINTER_SIZE_ALIGN(kSize);
typedef FixedBodyDescriptor<kNameOffset,
@@ -3677,7 +3798,8 @@
static const int kHasOnlySimpleThisPropertyAssignments = 0;
static const int kTryFullCodegen = 1;
static const int kAllowLazyCompilation = 2;
- static const int kCodeAgeShift = 3;
+ static const int kLiveObjectsMayExist = 3;
+ static const int kCodeAgeShift = 4;
static const int kCodeAgeMask = 7;
DISALLOW_IMPLICIT_CONSTRUCTORS(SharedFunctionInfo);
=======================================
--- /branches/bleeding_edge/src/runtime.cc Thu Sep 23 01:27:51 2010
+++ /branches/bleeding_edge/src/runtime.cc Thu Sep 23 02:15:26 2010
@@ -6284,7 +6284,7 @@
}
-static Code* ComputeConstructStub(Handle<JSFunction> function) {
+static void TrySettingInlineConstructStub(Handle<JSFunction> function) {
Handle<Object> prototype = Factory::null_value();
if (function->has_instance_prototype()) {
prototype = Handle<Object>(function->instance_prototype());
@@ -6292,13 +6292,10 @@
if (function->shared()->CanGenerateInlineConstructor(*prototype)) {
ConstructStubCompiler compiler;
Object* code = compiler.CompileConstructStub(function->shared());
- if (code->IsFailure()) {
- return Builtins::builtin(Builtins::JSConstructStubGeneric);
- }
- return Code::cast(code);
- }
-
- return function->shared()->construct_stub();
+ if (!code->IsFailure()) {
+ function->shared()->set_construct_stub(Code::cast(code));
+ }
+ }
}
@@ -6355,12 +6352,20 @@
Handle<SharedFunctionInfo> shared(function->shared());
EnsureCompiled(shared, CLEAR_EXCEPTION);
- bool first_allocation = !function->has_initial_map();
+ if (!function->has_initial_map() &&
+ shared->IsInobjectSlackTrackingInProgress()) {
+ // The tracking is already in progress for another function. We can
only
+ // track one initial_map at a time, so we force the completion before
the
+ // function is called as a constructor for the first time.
+ shared->CompleteInobjectSlackTracking();
+ TrySettingInlineConstructStub(function);
+ }
+
+ bool first_allocation = !shared->live_objects_may_exist();
Handle<JSObject> result = Factory::NewJSObject(function);
- if (first_allocation) {
- Handle<Code> stub = Handle<Code>(
- ComputeConstructStub(Handle<JSFunction>(function)));
- shared->set_construct_stub(*stub);
+ // Delay setting the stub if inobject slack tracking is in progress.
+ if (first_allocation && !shared->IsInobjectSlackTrackingInProgress()) {
+ TrySettingInlineConstructStub(function);
}
Counters::constructed_objects.Increment();
@@ -6368,6 +6373,18 @@
return *result;
}
+
+
+static Object* Runtime_FinalizeInstanceSize(Arguments args) {
+ HandleScope scope;
+ ASSERT(args.length() == 1);
+
+ CONVERT_ARG_CHECKED(JSFunction, function, 0);
+ function->shared()->CompleteInobjectSlackTracking();
+ TrySettingInlineConstructStub(function);
+
+ return Heap::undefined_value();
+}
static Object* Runtime_LazyCompile(Arguments args) {
=======================================
--- /branches/bleeding_edge/src/runtime.h Thu Sep 16 14:40:42 2010
+++ /branches/bleeding_edge/src/runtime.h Thu Sep 23 02:15:26 2010
@@ -263,6 +263,7 @@
F(NewClosure, 2, 1) \
F(NewObject, 1, 1) \
F(NewObjectFromBound, 2, 1) \
+ F(FinalizeInstanceSize, 1, 1) \
F(Throw, 1, 1) \
F(ReThrow, 1, 1) \
F(ThrowReferenceError, 1, 1) \
=======================================
--- /branches/bleeding_edge/src/x64/builtins-x64.cc Thu Aug 26 06:59:37 2010
+++ /branches/bleeding_edge/src/x64/builtins-x64.cc Thu Sep 23 02:15:26 2010
@@ -913,7 +913,11 @@
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
- bool is_api_function) {
+ bool is_api_function,
+ bool count_constructions) {
+ // Should never count constructions for api objects.
+ ASSERT(!is_api_function || !count_constructions);
+
// Enter a construct frame.
__ EnterConstructFrame();
@@ -958,6 +962,26 @@
__ CmpInstanceType(rax, JS_FUNCTION_TYPE);
__ j(equal, &rt_call);
+ if (count_constructions) {
+ Label allocate;
+ // Decrease generous allocation count.
+ __ movq(rcx, FieldOperand(rdi,
JSFunction::kSharedFunctionInfoOffset));
+ __ decb(FieldOperand(rcx,
SharedFunctionInfo::kConstructionCountOffset));
+ __ j(not_zero, &allocate);
+
+ __ push(rax);
+ __ push(rdi);
+
+ __ push(rdi); // constructor
+ // The call will replace the stub, so the countdown is only done
once.
+ __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
+
+ __ pop(rdi);
+ __ pop(rax);
+
+ __ bind(&allocate);
+ }
+
// Now allocate the JSObject on the heap.
__ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
__ shl(rdi, Immediate(kPointerSizeLog2));
@@ -981,7 +1005,12 @@
// rbx: JSObject
// rdi: start of next object
{ Label loop, entry;
- __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
+ // To allow for truncation.
+ if (count_constructions) {
+ __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
+ } else {
+ __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
+ }
__ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
__ jmp(&entry);
__ bind(&loop);
@@ -1162,15 +1191,20 @@
__ IncrementCounter(&Counters::constructed_objects, 1);
__ ret(0);
}
+
+
+void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
+ Generate_JSConstructStubHelper(masm, false, true);
+}
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false);
+ Generate_JSConstructStubHelper(masm, false, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true);
+ Generate_JSConstructStubHelper(masm, true, false);
}
=======================================
--- /branches/bleeding_edge/test/cctest/test-api.cc Fri Sep 17 02:56:47 2010
+++ /branches/bleeding_edge/test/cctest/test-api.cc Thu Sep 23 02:15:26 2010
@@ -10947,7 +10947,9 @@
CompileRun(source);
script = v8::Script::Compile(v8_str("new C1();"));
- for (int i = 0; i < 10; i++) {
+ // Allow enough iterations for the inobject slack tracking logic
+ // to finalize instance size and install the fast construct stub.
+ for (int i = 0; i < 256; i++) {
v8::Handle<v8::Object> c1 =
v8::Handle<v8::Object>::Cast(script->Run());
CHECK_EQ(23, c1->Get(v8_str("x"))->Int32Value());
CHECK_EQ(42, c1->Get(v8_str("y"))->Int32Value());
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev