Revision: 5516
Author: [email protected]
Date: Thu Sep 23 14:56:02 2010
Log: During GC, the StaticVisitor is massively used, adding a Heap*
parameter makes it possible to remove Isolate::Current() lookup in some
very hot and cheap operations (like marking), giving measurable result.
Review URL: http://codereview.chromium.org/3397021
http://code.google.com/p/v8/source/detail?r=5516
Modified:
/branches/experimental/isolates/src/arm/assembler-arm-inl.h
/branches/experimental/isolates/src/assembler.h
/branches/experimental/isolates/src/globals.h
/branches/experimental/isolates/src/heap.cc
/branches/experimental/isolates/src/ia32/assembler-ia32-inl.h
/branches/experimental/isolates/src/mark-compact.cc
/branches/experimental/isolates/src/objects-debug.cc
/branches/experimental/isolates/src/objects-visiting.h
/branches/experimental/isolates/src/objects.h
/branches/experimental/isolates/src/spaces.h
/branches/experimental/isolates/src/x64/assembler-x64-inl.h
/branches/experimental/isolates/test/cctest/test-spaces.cc
=======================================
--- /branches/experimental/isolates/src/arm/assembler-arm-inl.h Wed Sep 1
10:01:38 2010
+++ /branches/experimental/isolates/src/arm/assembler-arm-inl.h Thu Sep 23
14:56:02 2010
@@ -192,16 +192,16 @@
template<typename StaticVisitor>
-void RelocInfo::Visit() {
+void RelocInfo::Visit(Heap* heap) {
RelocInfo::Mode mode = rmode();
if (mode == RelocInfo::EMBEDDED_OBJECT) {
- StaticVisitor::VisitPointer(target_object_address());
+ StaticVisitor::VisitPointer(heap, target_object_address());
} else if (RelocInfo::IsCodeTarget(mode)) {
StaticVisitor::VisitCodeTarget(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
StaticVisitor::VisitExternalReference(target_reference_address());
#ifdef ENABLE_DEBUGGER_SUPPORT
- } else if (Isolate::Current()->debug()->has_break_points() &&
+ } else if (heap->isolate()->debug()->has_break_points() &&
((RelocInfo::IsJSReturn(mode) &&
IsPatchedReturnSequence()) ||
(RelocInfo::IsDebugBreakSlot(mode) &&
=======================================
--- /branches/experimental/isolates/src/assembler.h Wed Sep 1 10:01:38 2010
+++ /branches/experimental/isolates/src/assembler.h Thu Sep 23 14:56:02 2010
@@ -234,7 +234,7 @@
INLINE(void set_call_object(Object* target));
INLINE(Object** call_object_address());
- template<typename StaticVisitor> inline void Visit();
+ template<typename StaticVisitor> inline void Visit(Heap* heap);
inline void Visit(ObjectVisitor* v);
// Patch the code with some other code.
=======================================
--- /branches/experimental/isolates/src/globals.h Thu Sep 9 17:53:48 2010
+++ /branches/experimental/isolates/src/globals.h Thu Sep 23 14:56:02 2010
@@ -214,6 +214,12 @@
const intptr_t kMapAlignment = (1 << kMapAlignmentBits);
const intptr_t kMapAlignmentMask = kMapAlignment - 1;
+// Desired alignment for generated code.
+// Code entry points are aligned to 32 bytes (cache line size in some
CPUs).
+const int kCodeAlignmentBits = 5;
+const intptr_t kCodeAlignment = 1 << kCodeAlignmentBits;
+const intptr_t kCodeAlignmentMask = kCodeAlignment - 1;
+
// Tag information for Failure.
const int kFailureTag = 3;
const int kFailureTagSize = 2;
@@ -589,6 +595,10 @@
#define MAP_POINTER_ALIGN(value) \
(((value) + kMapAlignmentMask) & ~kMapAlignmentMask)
+// CODE_POINTER_ALIGN returns the value aligned as a generated code
segment.
+#define CODE_POINTER_ALIGN(value) \
+ (((value) + kCodeAlignmentMask) & ~kCodeAlignmentMask)
+
// The expression OFFSET_OF(type, field) computes the byte-offset
// of the specified field relative to the containing type. This
// corresponds to 'offsetof' (in stddef.h), except that it doesn't
=======================================
--- /branches/experimental/isolates/src/heap.cc Thu Sep 16 17:50:24 2010
+++ /branches/experimental/isolates/src/heap.cc Thu Sep 23 14:56:02 2010
@@ -1039,9 +1039,9 @@
class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
public:
- static inline void VisitPointer(Object** p) {
+ static inline void VisitPointer(Heap* heap, Object** p) {
Object* object = *p;
- if (!HEAP->InNewSpace(object)) return;
+ if (!heap->InNewSpace(object)) return;
Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
reinterpret_cast<HeapObject*>(object));
}
@@ -2390,7 +2390,7 @@
// Compute size
int body_size = RoundUp(desc.instr_size, kObjectAlignment);
int obj_size = Code::SizeFor(body_size);
- ASSERT(IsAligned(obj_size, Code::kCodeAlignment));
+ ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment));
Object* result;
if (obj_size > MaxObjectSizeInPagedSpace()) {
result = lo_space_->AllocateRawCode(obj_size);
@@ -2787,7 +2787,7 @@
// Setup the global object as a normalized object.
global->set_map(new_map);
- global->map()->set_instance_descriptors(HEAP->empty_descriptor_array());
+ global->map()->set_instance_descriptors(empty_descriptor_array());
global->set_properties(dictionary);
// Make sure result is a global object with properties in dictionary.
=======================================
--- /branches/experimental/isolates/src/ia32/assembler-ia32-inl.h Wed Sep
1 10:01:38 2010
+++ /branches/experimental/isolates/src/ia32/assembler-ia32-inl.h Thu Sep
23 14:56:02 2010
@@ -185,16 +185,16 @@
template<typename StaticVisitor>
-void RelocInfo::Visit() {
+void RelocInfo::Visit(Heap* heap) {
RelocInfo::Mode mode = rmode();
if (mode == RelocInfo::EMBEDDED_OBJECT) {
- StaticVisitor::VisitPointer(target_object_address());
+ StaticVisitor::VisitPointer(heap, target_object_address());
} else if (RelocInfo::IsCodeTarget(mode)) {
StaticVisitor::VisitCodeTarget(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
StaticVisitor::VisitExternalReference(target_reference_address());
#ifdef ENABLE_DEBUGGER_SUPPORT
- } else if (Isolate::Current()->debug()->has_break_points() &&
+ } else if (heap->isolate()->debug()->has_break_points() &&
((RelocInfo::IsJSReturn(mode) &&
IsPatchedReturnSequence()) ||
(RelocInfo::IsDebugBreakSlot(mode) &&
=======================================
--- /branches/experimental/isolates/src/mark-compact.cc Thu Sep 16 17:50:24
2010
+++ /branches/experimental/isolates/src/mark-compact.cc Thu Sep 23 14:56:02
2010
@@ -320,18 +320,18 @@
kVisitStructGeneric>();
}
- INLINE(static void VisitPointer(Object** p)) {
- MarkObjectByPointer(p);
+ INLINE(static void VisitPointer(Heap* heap, Object** p)) {
+ MarkObjectByPointer(heap, p);
}
- INLINE(static void VisitPointers(Object** start, Object** end)) {
+ INLINE(static void VisitPointers(Heap* heap, Object** start, Object**
end)) {
// Mark all objects pointed to in [start, end).
const int kMinRangeForMarkingRecursion = 64;
if (end - start >= kMinRangeForMarkingRecursion) {
- if (VisitUnmarkedObjects(start, end)) return;
+ if (VisitUnmarkedObjects(heap, start, end)) return;
// We are close to a stack overflow, so just mark the objects.
}
- for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
+ for (Object** p = start; p < end; p++) MarkObjectByPointer(heap, p);
}
static inline void VisitCodeTarget(RelocInfo* rinfo) {
@@ -356,10 +356,10 @@
}
// Mark object pointed to by p.
- INLINE(static void MarkObjectByPointer(Object** p)) {
+ INLINE(static void MarkObjectByPointer(Heap* heap, Object** p)) {
if (!(*p)->IsHeapObject()) return;
HeapObject* object = ShortCircuitConsString(p);
- HEAP->mark_compact_collector()->MarkObject(object);
+ heap->mark_compact_collector()->MarkObject(object);
}
@@ -370,17 +370,20 @@
ASSERT(!obj->IsMarked());
#endif
Map* map = obj->map();
- map->heap()->mark_compact_collector()->SetMark(obj);
+ MarkCompactCollector* collector =
map->heap()->mark_compact_collector();
+ collector->SetMark(obj);
// Mark the map pointer and the body.
- map->heap()->mark_compact_collector()->MarkObject(map);
+ collector->MarkObject(map);
IterateBody(map, obj);
}
// Visit all unmarked objects pointed to by [start, end).
// Returns false if the operation fails (lack of stack space).
- static inline bool VisitUnmarkedObjects(Object** start, Object** end) {
+ static inline bool VisitUnmarkedObjects(Heap* heap,
+ Object** start,
+ Object** end) {
// Return false is we are close to the stack limit.
- StackLimitCheck check(Isolate::Current());
+ StackLimitCheck check(heap->isolate());
if (check.HasOverflowed()) return false;
// Visit the unmarked objects.
@@ -416,7 +419,8 @@
void> StructObjectVisitor;
static void VisitCode(Map* map, HeapObject* object) {
-
reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>();
+ reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(
+ map->heap());
}
// Code flushing support.
@@ -536,10 +540,10 @@
}
- static void VisitCodeEntry(Address entry_address) {
+ static void VisitCodeEntry(Heap* heap, Address entry_address) {
Object* code = Code::GetObjectFromEntryAddress(entry_address);
Object* old_code = code;
- VisitPointer(&code);
+ VisitPointer(heap, &code);
if (code != old_code) {
Memory::Address_at(entry_address) =
reinterpret_cast<Code*>(code)->entry();
@@ -560,13 +564,15 @@
static void VisitJSFunction(Map* map, HeapObject* object) {
#define SLOT_ADDR(obj, offset) \
reinterpret_cast<Object**>((obj)->address() + offset)
-
- VisitPointers(SLOT_ADDR(object, JSFunction::kPropertiesOffset),
+ Heap* heap = map->heap();
+ VisitPointers(heap,
+ SLOT_ADDR(object, JSFunction::kPropertiesOffset),
SLOT_ADDR(object, JSFunction::kCodeEntryOffset));
- VisitCodeEntry(object->address() + JSFunction::kCodeEntryOffset);
-
- VisitPointers(SLOT_ADDR(object,
+ VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
+
+ VisitPointers(heap,
+ SLOT_ADDR(object,
JSFunction::kCodeEntryOffset + kPointerSize),
SLOT_ADDR(object, JSFunction::kSize));
#undef SLOT_ADDR
@@ -585,12 +591,14 @@
class MarkingVisitor : public ObjectVisitor {
public:
+ explicit MarkingVisitor(Heap* heap) : heap_(heap) { }
+
void VisitPointer(Object** p) {
- StaticMarkingVisitor::VisitPointer(p);
+ StaticMarkingVisitor::VisitPointer(heap_, p);
}
void VisitPointers(Object** start, Object** end) {
- StaticMarkingVisitor::VisitPointers(start, end);
+ StaticMarkingVisitor::VisitPointers(heap_, start, end);
}
void VisitCodeTarget(RelocInfo* rinfo) {
@@ -600,6 +608,9 @@
void VisitDebugTarget(RelocInfo* rinfo) {
StaticMarkingVisitor::VisitDebugTarget(rinfo);
}
+
+ private:
+ Heap* heap_;
};
@@ -664,6 +675,9 @@
// Visitor class for marking heap roots.
class RootMarkingVisitor : public ObjectVisitor {
public:
+ explicit RootMarkingVisitor(Heap* heap)
+ : collector_(heap->mark_compact_collector()) { }
+
void VisitPointer(Object** p) {
MarkObjectByPointer(p);
}
@@ -682,16 +696,18 @@
Map* map = object->map();
// Mark the object.
- HEAP->mark_compact_collector()->SetMark(object);
+ collector_->SetMark(object);
// Mark the map pointer and body, and push them on the marking stack.
- HEAP->mark_compact_collector()->MarkObject(map);
+ collector_->MarkObject(map);
StaticMarkingVisitor::IterateBody(map, object);
// Mark all the objects reachable from the map and body. May leave
// overflowed objects in the heap.
- HEAP->mark_compact_collector()->EmptyMarkingStack();
- }
+ collector_->EmptyMarkingStack();
+ }
+
+ MarkCompactCollector* collector_;
};
@@ -762,7 +778,7 @@
Object** end_slot = HeapObject::RawField(map,
Map::kPointerFieldsEndOffset);
- StaticMarkingVisitor::VisitPointers(start_slot, end_slot);
+ StaticMarkingVisitor::VisitPointers(map->heap(), start_slot, end_slot);
}
@@ -861,11 +877,11 @@
void MarkCompactCollector::MarkSymbolTable() {
- SymbolTable* symbol_table = HEAP->raw_unchecked_symbol_table();
+ SymbolTable* symbol_table = heap_->raw_unchecked_symbol_table();
// Mark the symbol table itself.
SetMark(symbol_table);
// Explicitly mark the prefix.
- MarkingVisitor marker;
+ MarkingVisitor marker(heap_);
symbol_table->IteratePrefix(&marker);
ProcessMarkingStack();
}
@@ -930,7 +946,7 @@
while (!marking_stack_.is_empty()) {
HeapObject* object = marking_stack_.Pop();
ASSERT(object->IsHeapObject());
- ASSERT(HEAP->Contains(object));
+ ASSERT(heap_->Contains(object));
ASSERT(object->IsMarked());
ASSERT(!object->IsOverflowed());
@@ -1019,14 +1035,14 @@
#endif
// The to space contains live objects, the from space is used as a
marking
// stack.
- marking_stack_.Initialize(HEAP->new_space()->FromSpaceLow(),
- HEAP->new_space()->FromSpaceHigh());
+ marking_stack_.Initialize(heap_->new_space()->FromSpaceLow(),
+ heap_->new_space()->FromSpaceHigh());
ASSERT(!marking_stack_.overflowed());
PrepareForCodeFlushing();
- RootMarkingVisitor root_visitor;
+ RootMarkingVisitor root_visitor(heap_);
MarkRoots(&root_visitor);
// The objects reachable from the roots are marked, yet unreachable
@@ -1437,14 +1453,14 @@
class StaticPointersToNewGenUpdatingVisitor : public
StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> {
public:
- static inline void VisitPointer(Object** p) {
+ static inline void VisitPointer(Heap* heap, Object** p) {
if (!(*p)->IsHeapObject()) return;
HeapObject* obj = HeapObject::cast(*p);
Address old_addr = obj->address();
- if (HEAP->new_space()->Contains(obj)) {
- ASSERT(HEAP->InFromSpace(*p));
+ if (heap->new_space()->Contains(obj)) {
+ ASSERT(heap->InFromSpace(*p));
*p = HeapObject::FromAddress(Memory::Address_at(old_addr));
}
}
@@ -1455,13 +1471,15 @@
// It does not expect to encounter pointers to dead objects.
class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
public:
+ explicit PointersToNewGenUpdatingVisitor(Heap* heap) : heap_(heap) { }
+
void VisitPointer(Object** p) {
- StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
+ StaticPointersToNewGenUpdatingVisitor::VisitPointer(heap_, p);
}
void VisitPointers(Object** start, Object** end) {
for (Object** p = start; p < end; p++) {
- StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
+ StaticPointersToNewGenUpdatingVisitor::VisitPointer(heap_, p);
}
}
@@ -1481,6 +1499,8 @@
VisitPointer(&target);
rinfo->set_call_address(Code::cast(target)->instruction_start());
}
+ private:
+ Heap* heap_;
};
@@ -1597,7 +1617,7 @@
}
// Second pass: find pointers to new space and update them.
- PointersToNewGenUpdatingVisitor updating_visitor;
+ PointersToNewGenUpdatingVisitor updating_visitor(heap);
// Update pointers in to space.
Address current = space->bottom();
=======================================
--- /branches/experimental/isolates/src/objects-debug.cc Wed Sep 1
10:01:38 2010
+++ /branches/experimental/isolates/src/objects-debug.cc Thu Sep 23
14:56:02 2010
@@ -904,7 +904,7 @@
void Code::CodeVerify() {
CHECK(IsAligned(reinterpret_cast<intptr_t>(instruction_start()),
- static_cast<intptr_t>(kCodeAlignment)));
+ kCodeAlignment));
Address last_gc_pc = NULL;
for (RelocIterator it(this); !it.done(); it.next()) {
it.rinfo()->Verify();
=======================================
--- /branches/experimental/isolates/src/objects-visiting.h Thu Sep 9
17:53:48 2010
+++ /branches/experimental/isolates/src/objects-visiting.h Thu Sep 23
14:56:02 2010
@@ -187,14 +187,15 @@
template<typename StaticVisitor>
class BodyVisitorBase : public AllStatic {
public:
- static inline void IteratePointers(HeapObject* object,
+ static inline void IteratePointers(Heap* heap,
+ HeapObject* object,
int start_offset,
int end_offset) {
Object** start_slot = reinterpret_cast<Object**>(object->address() +
start_offset);
Object** end_slot = reinterpret_cast<Object**>(object->address() +
end_offset);
- StaticVisitor::VisitPointers(start_slot, end_slot);
+ StaticVisitor::VisitPointers(heap, start_slot, end_slot);
}
};
@@ -205,7 +206,10 @@
static inline ReturnType Visit(Map* map, HeapObject* object) {
int object_size = BodyDescriptor::SizeOf(map, object);
BodyVisitorBase<StaticVisitor>::IteratePointers(
- object, BodyDescriptor::kStartOffset, object_size);
+ map->heap(),
+ object,
+ BodyDescriptor::kStartOffset,
+ object_size);
return static_cast<ReturnType>(object_size);
}
@@ -213,7 +217,10 @@
static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
ASSERT(BodyDescriptor::SizeOf(map, object) == object_size);
BodyVisitorBase<StaticVisitor>::IteratePointers(
- object, BodyDescriptor::kStartOffset, object_size);
+ map->heap(),
+ object,
+ BodyDescriptor::kStartOffset,
+ object_size);
return static_cast<ReturnType>(object_size);
}
};
@@ -224,7 +231,10 @@
public:
static inline ReturnType Visit(Map* map, HeapObject* object) {
BodyVisitorBase<StaticVisitor>::IteratePointers(
- object, BodyDescriptor::kStartOffset, BodyDescriptor::kEndOffset);
+ map->heap(),
+ object,
+ BodyDescriptor::kStartOffset,
+ BodyDescriptor::kEndOffset);
return static_cast<ReturnType>(BodyDescriptor::kSize);
}
};
@@ -295,8 +305,8 @@
return table_.GetVisitor(map)(map, obj);
}
- static inline void VisitPointers(Object** start, Object** end) {
- for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(p);
+ static inline void VisitPointers(Heap* heap, Object** start, Object**
end) {
+ for (Object** p = start; p < end; p++)
StaticVisitor::VisitPointer(heap, p);
}
private:
@@ -368,7 +378,7 @@
template<typename StaticVisitor>
-void Code::CodeIterateBody() {
+void Code::CodeIterateBody(Heap* heap) {
int mode_mask = RelocInfo::kCodeTargetMask |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
@@ -381,10 +391,11 @@
RelocIterator it(this, mode_mask);
StaticVisitor::VisitPointer(
+ heap,
reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
for (; !it.done(); it.next()) {
- it.rinfo()->template Visit<StaticVisitor>();
+ it.rinfo()->template Visit<StaticVisitor>(heap);
}
}
=======================================
--- /branches/experimental/isolates/src/objects.h Wed Sep 15 08:20:49 2010
+++ /branches/experimental/isolates/src/objects.h Thu Sep 23 14:56:02 2010
@@ -2994,15 +2994,11 @@
inline void CodeIterateBody(ObjectVisitor* v);
template<typename StaticVisitor>
- inline void CodeIterateBody();
+ inline void CodeIterateBody(Heap* heap);
#ifdef DEBUG
void CodePrint();
void CodeVerify();
#endif
- // Code entry points are aligned to 32 bytes.
- static const int kCodeAlignmentBits = 5;
- static const int kCodeAlignment = 1 << kCodeAlignmentBits;
- static const int kCodeAlignmentMask = kCodeAlignment - 1;
// Layout description.
static const int kInstructionSizeOffset = HeapObject::kHeaderSize;
=======================================
--- /branches/experimental/isolates/src/spaces.h Thu Sep 16 17:50:24 2010
+++ /branches/experimental/isolates/src/spaces.h Thu Sep 23 14:56:02 2010
@@ -245,8 +245,11 @@
static const int kPageHeaderSize = kPointerSize + kPointerSize +
kIntSize +
kIntSize + kPointerSize + kPointerSize;
- // The start offset of the object area in a page.
- static const int kObjectStartOffset = MAP_POINTER_ALIGN(kPageHeaderSize);
+ // The start offset of the object area in a page. Aligned to both maps
and
+ // code alignment to be suitabel for both.
+ static const int kObjectStartOffset =
+ CODE_POINTER_ALIGN(MAP_POINTER_ALIGN(kPageHeaderSize));
+
// Object area size in bytes.
static const int kObjectAreaSize = kPageSize - kObjectStartOffset;
=======================================
--- /branches/experimental/isolates/src/x64/assembler-x64-inl.h Wed Sep 1
10:01:38 2010
+++ /branches/experimental/isolates/src/x64/assembler-x64-inl.h Thu Sep 23
14:56:02 2010
@@ -352,16 +352,16 @@
template<typename StaticVisitor>
-void RelocInfo::Visit() {
+void RelocInfo::Visit(Heap* heap) {
RelocInfo::Mode mode = rmode();
if (mode == RelocInfo::EMBEDDED_OBJECT) {
- StaticVisitor::VisitPointer(target_object_address());
+ StaticVisitor::VisitPointer(heap, target_object_address());
} else if (RelocInfo::IsCodeTarget(mode)) {
StaticVisitor::VisitCodeTarget(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
StaticVisitor::VisitExternalReference(target_reference_address());
#ifdef ENABLE_DEBUGGER_SUPPORT
- } else if (Isolate::Current()->debug()->has_break_points() &&
+ } else if (heap->isolate()->debug()->has_break_points() &&
((RelocInfo::IsJSReturn(mode) &&
IsPatchedReturnSequence()) ||
(RelocInfo::IsDebugBreakSlot(mode) &&
=======================================
--- /branches/experimental/isolates/test/cctest/test-spaces.cc Fri Sep 10
11:52:02 2010
+++ /branches/experimental/isolates/test/cctest/test-spaces.cc Thu Sep 23
14:56:02 2010
@@ -65,6 +65,8 @@
Address page_start = RoundUp(start, Page::kPageSize);
Page* p = Page::FromAddress(page_start);
+ // Initialized Page has heap pointer, normally set by memory_allocator.
+ p->heap_ = HEAP;
CHECK(p->address() == page_start);
CHECK(p->is_valid());
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev