Author: [email protected]
Date: Wed May 20 01:05:12 2009
New Revision: 2007
Modified:
branches/bleeding_edge/src/heap-inl.h
branches/bleeding_edge/src/heap.cc
branches/bleeding_edge/src/objects.h
branches/bleeding_edge/src/runtime.cc
branches/bleeding_edge/src/spaces-inl.h
branches/bleeding_edge/src/spaces.h
branches/bleeding_edge/src/utils.h
branches/bleeding_edge/src/v8threads.cc
Log:
X64: Disabled RSet in 64-bit mode.
Made a few more places use intptr_t instead of int for pointer arithmetic.
Ensure that objects have a declared size that matches heap object alignment.
Review URL: http://codereview.chromium.org/115559
Modified: branches/bleeding_edge/src/heap-inl.h
==============================================================================
--- branches/bleeding_edge/src/heap-inl.h (original)
+++ branches/bleeding_edge/src/heap-inl.h Wed May 20 01:05:12 2009
@@ -145,7 +145,9 @@
if (new_space_.Contains(address)) return;
ASSERT(!new_space_.FromSpaceContains(address));
SLOW_ASSERT(Contains(address + offset));
+#ifndef V8_HOST_ARCH_64_BIT
Page::SetRSet(address, offset);
+#endif // V8_HOST_ARCH_64_BIT
}
Modified: branches/bleeding_edge/src/heap.cc
==============================================================================
--- branches/bleeding_edge/src/heap.cc (original)
+++ branches/bleeding_edge/src/heap.cc Wed May 20 01:05:12 2009
@@ -667,11 +667,33 @@
// Copy objects reachable from weak pointers.
GlobalHandles::IterateWeakRoots(&scavenge_visitor);
+#if V8_HOST_ARCH_64_BIT
+ // TODO(X64): Make this go away again. We currently disable RSets for
+ // 64-bit-mode.
+ HeapObjectIterator old_pointer_iterator(old_pointer_space_);
+ while (old_pointer_iterator.has_next()) {
+ HeapObject* heap_object = old_pointer_iterator.next();
+ heap_object->Iterate(&scavenge_visitor);
+ }
+ HeapObjectIterator map_iterator(map_space_);
+ while (map_iterator.has_next()) {
+ HeapObject* heap_object = map_iterator.next();
+ heap_object->Iterate(&scavenge_visitor);
+ }
+ LargeObjectIterator lo_iterator(lo_space_);
+ while (lo_iterator.has_next()) {
+ HeapObject* heap_object = lo_iterator.next();
+ if (heap_object->IsFixedArray()) {
+ heap_object->Iterate(&scavenge_visitor);
+ }
+ }
+#else // V8_HOST_ARCH_64_BIT
// Copy objects reachable from the old generation. By definition,
// there are no intergenerational pointers in code or data spaces.
IterateRSet(old_pointer_space_, &ScavengePointer);
IterateRSet(map_space_, &ScavengePointer);
lo_space_->IterateRSet(&ScavengePointer);
+#endif // V8_HOST_ARCH_64_BIT
do {
ASSERT(new_space_front <= new_space_.top());
@@ -999,7 +1021,7 @@
meta_map_ = reinterpret_cast<Map*>(obj);
meta_map()->set_map(meta_map());
- obj = AllocatePartialMap(FIXED_ARRAY_TYPE, Array::kHeaderSize);
+ obj = AllocatePartialMap(FIXED_ARRAY_TYPE, FixedArray::kHeaderSize);
if (obj->IsFailure()) return false;
fixed_array_map_ = Map::cast(obj);
@@ -1056,37 +1078,37 @@
STRING_TYPE_LIST(ALLOCATE_STRING_MAP);
#undef ALLOCATE_STRING_MAP
- obj = AllocateMap(SHORT_STRING_TYPE, SeqTwoByteString::kHeaderSize);
+ obj = AllocateMap(SHORT_STRING_TYPE, SeqTwoByteString::kAlignedSize);
if (obj->IsFailure()) return false;
undetectable_short_string_map_ = Map::cast(obj);
undetectable_short_string_map_->set_is_undetectable();
- obj = AllocateMap(MEDIUM_STRING_TYPE, SeqTwoByteString::kHeaderSize);
+ obj = AllocateMap(MEDIUM_STRING_TYPE, SeqTwoByteString::kAlignedSize);
if (obj->IsFailure()) return false;
undetectable_medium_string_map_ = Map::cast(obj);
undetectable_medium_string_map_->set_is_undetectable();
- obj = AllocateMap(LONG_STRING_TYPE, SeqTwoByteString::kHeaderSize);
+ obj = AllocateMap(LONG_STRING_TYPE, SeqTwoByteString::kAlignedSize);
if (obj->IsFailure()) return false;
undetectable_long_string_map_ = Map::cast(obj);
undetectable_long_string_map_->set_is_undetectable();
- obj = AllocateMap(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
+ obj = AllocateMap(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
if (obj->IsFailure()) return false;
undetectable_short_ascii_string_map_ = Map::cast(obj);
undetectable_short_ascii_string_map_->set_is_undetectable();
- obj = AllocateMap(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
+ obj = AllocateMap(MEDIUM_ASCII_STRING_TYPE,
SeqAsciiString::kAlignedSize);
if (obj->IsFailure()) return false;
undetectable_medium_ascii_string_map_ = Map::cast(obj);
undetectable_medium_ascii_string_map_->set_is_undetectable();
- obj = AllocateMap(LONG_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
+ obj = AllocateMap(LONG_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
if (obj->IsFailure()) return false;
undetectable_long_ascii_string_map_ = Map::cast(obj);
undetectable_long_ascii_string_map_->set_is_undetectable();
- obj = AllocateMap(BYTE_ARRAY_TYPE, Array::kHeaderSize);
+ obj = AllocateMap(BYTE_ARRAY_TYPE, Array::kAlignedSize);
if (obj->IsFailure()) return false;
byte_array_map_ = Map::cast(obj);
Modified: branches/bleeding_edge/src/objects.h
==============================================================================
--- branches/bleeding_edge/src/objects.h (original)
+++ branches/bleeding_edge/src/objects.h Wed May 20 01:05:12 2009
@@ -296,12 +296,12 @@
// Since string types are not consecutive, this macro is used to
// iterate over them.
#define
STRING_TYPE_LIST(V) \
- V(SHORT_SYMBOL_TYPE, SeqTwoByteString::kHeaderSize,
short_symbol) \
- V(MEDIUM_SYMBOL_TYPE, SeqTwoByteString::kHeaderSize,
medium_symbol) \
- V(LONG_SYMBOL_TYPE, SeqTwoByteString::kHeaderSize,
long_symbol) \
- V(SHORT_ASCII_SYMBOL_TYPE, SeqAsciiString::kHeaderSize,
short_ascii_symbol) \
- V(MEDIUM_ASCII_SYMBOL_TYPE, SeqAsciiString::kHeaderSize,
medium_ascii_symbol)\
- V(LONG_ASCII_SYMBOL_TYPE, SeqAsciiString::kHeaderSize,
long_ascii_symbol) \
+ V(SHORT_SYMBOL_TYPE, SeqTwoByteString::kAlignedSize,
short_symbol) \
+ V(MEDIUM_SYMBOL_TYPE, SeqTwoByteString::kAlignedSize,
medium_symbol) \
+ V(LONG_SYMBOL_TYPE, SeqTwoByteString::kAlignedSize,
long_symbol) \
+ V(SHORT_ASCII_SYMBOL_TYPE, SeqAsciiString::kAlignedSize,
short_ascii_symbol) \
+ V(MEDIUM_ASCII_SYMBOL_TYPE, SeqAsciiString::kAlignedSize,
medium_ascii_symbol)\
+ V(LONG_ASCII_SYMBOL_TYPE, SeqAsciiString::kAlignedSize,
long_ascii_symbol) \
V(SHORT_CONS_SYMBOL_TYPE, ConsString::kSize,
short_cons_symbol) \
V(MEDIUM_CONS_SYMBOL_TYPE, ConsString::kSize,
medium_cons_symbol) \
V(LONG_CONS_SYMBOL_TYPE, ConsString::kSize,
long_cons_symbol) \
@@ -338,12 +338,12 @@
V(LONG_EXTERNAL_ASCII_SYMBOL_TYPE,
\
ExternalAsciiString::kSize, \
long_external_ascii_symbol) \
- V(SHORT_STRING_TYPE, SeqTwoByteString::kHeaderSize,
short_string) \
- V(MEDIUM_STRING_TYPE, SeqTwoByteString::kHeaderSize,
medium_string) \
- V(LONG_STRING_TYPE, SeqTwoByteString::kHeaderSize,
long_string) \
- V(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize,
short_ascii_string) \
- V(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize,
medium_ascii_string)\
- V(LONG_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize,
long_ascii_string) \
+ V(SHORT_STRING_TYPE, SeqTwoByteString::kAlignedSize,
short_string) \
+ V(MEDIUM_STRING_TYPE, SeqTwoByteString::kAlignedSize,
medium_string) \
+ V(LONG_STRING_TYPE, SeqTwoByteString::kAlignedSize,
long_string) \
+ V(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize,
short_ascii_string) \
+ V(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize,
medium_ascii_string)\
+ V(LONG_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize,
long_ascii_string) \
V(SHORT_CONS_STRING_TYPE, ConsString::kSize,
short_cons_string) \
V(MEDIUM_CONS_STRING_TYPE, ConsString::kSize,
medium_cons_string) \
V(LONG_CONS_STRING_TYPE, ConsString::kSize,
long_cons_string) \
@@ -1553,6 +1553,7 @@
// Layout descriptor.
static const int kLengthOffset = HeapObject::kHeaderSize;
static const int kHeaderSize = kLengthOffset + kIntSize;
+ static const int kAlignedSize = POINTER_SIZE_ALIGN(kHeaderSize);
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(Array);
@@ -3373,6 +3374,7 @@
// Layout description.
static const int kLengthOffset = HeapObject::kHeaderSize;
static const int kSize = kLengthOffset + kIntSize;
+ // Notice: kSize is not pointer-size aligned if pointers are 64-bit.
// Limits on sizes of different types of strings.
static const int kMaxShortStringSize = 63;
@@ -3526,6 +3528,7 @@
// Layout description.
static const int kHeaderSize = String::kSize;
+ static const int kAlignedSize = POINTER_SIZE_ALIGN(kHeaderSize);
// Support for StringInputBuffer.
inline void SeqAsciiStringReadBlockIntoBuffer(ReadBlockBuffer* buffer,
@@ -3571,6 +3574,7 @@
// Layout description.
static const int kHeaderSize = String::kSize;
+ static const int kAlignedSize = POINTER_SIZE_ALIGN(kHeaderSize);
// Support for StringInputBuffer.
inline void SeqTwoByteStringReadBlockIntoBuffer(ReadBlockBuffer* buffer,
Modified: branches/bleeding_edge/src/runtime.cc
==============================================================================
--- branches/bleeding_edge/src/runtime.cc (original)
+++ branches/bleeding_edge/src/runtime.cc Wed May 20 01:05:12 2009
@@ -5428,7 +5428,7 @@
// Helper functions for wrapping and unwrapping stack frame ids.
static Smi* WrapFrameId(StackFrame::Id id) {
- ASSERT(IsAligned(OffsetFrom(id), 4));
+ ASSERT(IsAligned(OffsetFrom(id), static_cast<intptr_t>(4)));
return Smi::FromInt(id >> 2);
}
Modified: branches/bleeding_edge/src/spaces-inl.h
==============================================================================
--- branches/bleeding_edge/src/spaces-inl.h (original)
+++ branches/bleeding_edge/src/spaces-inl.h Wed May 20 01:05:12 2009
@@ -92,8 +92,10 @@
void Page::ClearRSet() {
+#ifndef V8_HOST_ARCH_64_BIT
// This method can be called in all rset states.
memset(RSetStart(), 0, kRSetEndOffset - kRSetStartOffset);
+#endif
}
@@ -194,7 +196,7 @@
Page* MemoryAllocator::GetNextPage(Page* p) {
ASSERT(p->is_valid());
- int raw_addr = p->opaque_header & ~Page::kPageAlignmentMask;
+ intptr_t raw_addr = p->opaque_header & ~Page::kPageAlignmentMask;
return Page::FromAddress(AddressFrom<Address>(raw_addr));
}
@@ -207,7 +209,7 @@
void MemoryAllocator::SetNextPage(Page* prev, Page* next) {
ASSERT(prev->is_valid());
- int chunk_id = prev->opaque_header & Page::kPageAlignmentMask;
+ int chunk_id = GetChunkId(prev);
ASSERT_PAGE_ALIGNED(next->address());
prev->opaque_header = OffsetFrom(next->address()) | chunk_id;
}
Modified: branches/bleeding_edge/src/spaces.h
==============================================================================
--- branches/bleeding_edge/src/spaces.h (original)
+++ branches/bleeding_edge/src/spaces.h Wed May 20 01:05:12 2009
@@ -98,6 +98,7 @@
// its page offset by 32. Therefore, the object area in a page starts at
the
// 256th byte (8K/32). Bytes 0 to 255 do not need the remembered set, so
that
// the first two words (64 bits) in a page can be used for other purposes.
+// TODO(X64): This description only represents the 32-bit layout.
//
// The mark-compact collector transforms a map pointer into a page index
and a
// page offset. The map space can have up to 1024 pages, and 8M bytes
(1024 *
@@ -213,7 +214,7 @@
static const int kPageSize = 1 << kPageSizeBits;
// Page size mask.
- static const int kPageAlignmentMask = (1 << kPageSizeBits) - 1;
+ static const intptr_t kPageAlignmentMask = (1 << kPageSizeBits) - 1;
// The end offset of the remembered set in a page
// (heaps are aligned to pointer size).
@@ -242,7 +243,7 @@
// in the current page. If a page is in the large object space, the
first
// word *may* (if the page start and large object chunk start are the
// same) contain the address of the next large object chunk.
- int opaque_header;
+ intptr_t opaque_header;
// If the page is not in the large object space, the low-order bit of the
// second word is set. If the page is in the large object space, the
Modified: branches/bleeding_edge/src/utils.h
==============================================================================
--- branches/bleeding_edge/src/utils.h (original)
+++ branches/bleeding_edge/src/utils.h Wed May 20 01:05:12 2009
@@ -54,7 +54,7 @@
// This allows conversion of Addresses and integral types into
// 0-relative int offsets.
template <typename T>
-static inline int OffsetFrom(T x) {
+static inline intptr_t OffsetFrom(T x) {
return x - static_cast<T>(0);
}
@@ -63,7 +63,7 @@
// This allows conversion of 0-relative int offsets into Addresses and
// integral types.
template <typename T>
-static inline T AddressFrom(int x) {
+static inline T AddressFrom(intptr_t x) {
return static_cast<T>(0) + x;
}
Modified: branches/bleeding_edge/src/v8threads.cc
==============================================================================
--- branches/bleeding_edge/src/v8threads.cc (original)
+++ branches/bleeding_edge/src/v8threads.cc Wed May 20 01:05:12 2009
@@ -261,6 +261,8 @@
ThreadState* state = lazily_archived_thread_state_;
state->LinkInto(ThreadState::IN_USE_LIST);
char* to = state->data();
+ // Ensure that data containing GC roots are archived first, and handle
them
+ // in ThreadManager::Iterate(ObjectVisitor*).
to = HandleScopeImplementer::ArchiveThread(to);
to = Top::ArchiveThread(to);
#ifdef ENABLE_DEBUGGER_SUPPORT
--~--~---------~--~----~------------~-------~--~----~
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
-~----------~----~----~----~------~----~------~--~---