Reviewers: jarin, Michael Starzinger,
Description:
Synchronize store buffer processing and concurrent sweeping.
BUG=
Please review this at https://codereview.chromium.org/227533006/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files (+56, -25 lines):
M src/heap.h
M src/heap.cc
M src/heap-inl.h
M src/mark-compact.cc
M src/objects.h
M src/objects-inl.h
M src/spaces.cc
M src/store-buffer.h
M src/store-buffer.cc
Index: src/heap-inl.h
diff --git a/src/heap-inl.h b/src/heap-inl.h
index
ea7dcda3b0acd8aee62f2507a5ab5852bd7196d1..cac969caedc5ac6bac24a21d4e68d457f8367c72
100644
--- a/src/heap-inl.h
+++ b/src/heap-inl.h
@@ -524,7 +524,9 @@ void Heap::UpdateAllocationSiteFeedback(HeapObject*
object,
}
-void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
+void Heap::ScavengeObject(HeapObject** p,
+ HeapObject* object,
+ Object* old_object) {
ASSERT(object->GetIsolate()->heap()->InFromSpace(object));
// We use the first word (where the map pointer usually is) of a heap
Index: src/heap.cc
diff --git a/src/heap.cc b/src/heap.cc
index
d115d664f337948623a1f9d937a69bb4fb332ad1..5e71a715b4965994226a52e9c7808095c8a47870
100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -5934,7 +5934,8 @@ void Heap::IterateAndMarkPointersToFromSpace(Address
start,
if (object->IsHeapObject()) {
if (Heap::InFromSpace(object)) {
callback(reinterpret_cast<HeapObject**>(slot),
- HeapObject::cast(object));
+ HeapObject::cast(object),
+ NULL);
Object* new_object = *slot;
if (InNewSpace(new_object)) {
SLOW_ASSERT(Heap::InToSpace(new_object));
Index: src/heap.h
diff --git a/src/heap.h b/src/heap.h
index
758213719c3792678e8b2f89446ef6d34c53e5b9..1ac80d32563a8e72972ffd231ef8f08c8e5df142
100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -1474,7 +1474,13 @@ class Heap {
// ensure the precondition that the object is (a) a heap object and (b)
in
// the heap's from space.
static inline void ScavengePointer(HeapObject** p);
- static inline void ScavengeObject(HeapObject** p, HeapObject* object);
+
+ // p is the slot pointing to the object in new space
+ // object ist the object in new space
+ // old_object is not used in the ScavengeObject callback
+ static inline void ScavengeObject(HeapObject** p,
+ HeapObject* object,
+ Object* old_object = NULL);
enum ScratchpadSlotMode {
IGNORE_SCRATCHPAD_SLOT,
Index: src/mark-compact.cc
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index
f04a8bcb9a64105a496b8e4926cc833d06b7bc3a..fbbc0dc58c9a89efbd16fb7d8bd4c306a2711ac3
100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -2988,25 +2988,32 @@ class PointersUpdatingVisitor: public ObjectVisitor
{
};
-static void UpdatePointer(HeapObject** p, HeapObject* object) {
- ASSERT(*p == object);
-
- Address old_addr = object->address();
-
- Address new_addr = Memory::Address_at(old_addr);
+static void UpdatePointer(HeapObject** address,
+ HeapObject* new_value,
+ Object* old_value) {
+ Address new_addr = Memory::Address_at(new_value->address());
// The new space sweep will overwrite the map word of dead objects
// with NULL. In this case we do not need to transfer this entry to
// the store buffer which we are rebuilding.
+ // We perform the pointer update with a no barrier compare-and-swap. The
+ // compare and swap may fail in the case where the pointer update tries
to
+ // update garbage memory which was concurrently accessed by the sweeper.
if (new_addr != NULL) {
- *p = HeapObject::FromAddress(new_addr);
+ NoBarrier_CompareAndSwap(
+ reinterpret_cast<AtomicWord*>(address),
+ reinterpret_cast<AtomicWord>(old_value),
+ reinterpret_cast<AtomicWord>(HeapObject::FromAddress(new_addr)));
} else {
// We have to zap this pointer, because the store buffer may overflow
later,
// and then we have to scan the entire heap and we don't want to find
// spurious newspace pointers in the old space.
// TODO(mstarzinger): This was changed to a sentinel value to track
down
// rare crashes, change it back to Smi::FromInt(0) later.
- *p = reinterpret_cast<HeapObject*>(Smi::FromInt(0x0f100d00 >> 1)); //
flood
+ NoBarrier_CompareAndSwap(
+ reinterpret_cast<AtomicWord*>(address),
+ reinterpret_cast<AtomicWord>(old_value),
+ reinterpret_cast<AtomicWord>(Smi::FromInt(0x0f100d00 >> 1)));
}
}
Index: src/objects-inl.h
diff --git a/src/objects-inl.h b/src/objects-inl.h
index
3939921bf7304378b13540fc4a1fda3f426216b3..d3c8f00c273ffcb9d4b38cf8192b5b1c17d07837
100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -2989,6 +2989,7 @@ SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
+SYNCHRONIZED_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
SMI_ACCESSORS(String, length, kLengthOffset)
SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
@@ -4016,7 +4017,7 @@ int HeapObject::SizeFromMap(Map* map) {
return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
}
if (instance_type == FREE_SPACE_TYPE) {
- return reinterpret_cast<FreeSpace*>(this)->size();
+ return reinterpret_cast<FreeSpace*>(this)->synchronized_size();
}
if (instance_type == STRING_TYPE ||
instance_type == INTERNALIZED_STRING_TYPE) {
Index: src/objects.h
diff --git a/src/objects.h b/src/objects.h
index
89267e5c6fc23b2aacd33aa81c7335a1acfb1a81..c76cfcfb555d10f333a654e9711aba705f1dfb38
100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -4807,6 +4807,9 @@ class FreeSpace: public HeapObject {
inline int size();
inline void set_size(int value);
+ inline int synchronized_size();
+ inline void synchronized_set_size(int value);
+
inline int Size() { return size(); }
// Casting.
Index: src/spaces.cc
diff --git a/src/spaces.cc b/src/spaces.cc
index
7831ef1ecdf66ecd1f4bcbd9c92abd8a392349a1..046d27500d91261e33994da827474d2bc51fb27f
100644
--- a/src/spaces.cc
+++ b/src/spaces.cc
@@ -2020,7 +2020,7 @@ void FreeListNode::set_size(Heap* heap, int
size_in_bytes) {
set_map_no_write_barrier(heap->raw_unchecked_free_space_map());
// Can't use FreeSpace::cast because it fails during deserialization.
FreeSpace* this_as_free_space = reinterpret_cast<FreeSpace*>(this);
- this_as_free_space->set_size(size_in_bytes);
+ this_as_free_space->synchronized_set_size(size_in_bytes);
} else if (size_in_bytes == kPointerSize) {
set_map_no_write_barrier(heap->raw_unchecked_one_pointer_filler_map());
} else if (size_in_bytes == 2 * kPointerSize) {
@@ -2064,11 +2064,11 @@ void FreeListNode::set_next(FreeListNode* next) {
// stage.
if (map() == GetHeap()->raw_unchecked_free_space_map()) {
ASSERT(map() == NULL || Size() >= kNextOffset + kPointerSize);
- Memory::Address_at(address() + kNextOffset) =
- reinterpret_cast<Address>(next);
+ Release_Store(reinterpret_cast<AtomicWord*>(address() + kNextOffset),
+ reinterpret_cast<AtomicWord>(next));
} else {
- Memory::Address_at(address() + kPointerSize) =
- reinterpret_cast<Address>(next);
+ Release_Store(reinterpret_cast<AtomicWord*>(address() + kPointerSize),
+ reinterpret_cast<AtomicWord>(next));
}
}
Index: src/store-buffer.cc
diff --git a/src/store-buffer.cc b/src/store-buffer.cc
index
a1479b2b9aaaac37c8bf6a8fa251f592b6fd33a7..82edc2bffe5b85561acfa145064c82a03f3b96c9
100644
--- a/src/store-buffer.cc
+++ b/src/store-buffer.cc
@@ -355,7 +355,7 @@ void StoreBuffer::GCPrologue() {
#ifdef VERIFY_HEAP
-static void DummyScavengePointer(HeapObject** p, HeapObject* o) {
+static void DummyScavengePointer(HeapObject** p, HeapObject* o, Object*
old) {
// Do nothing.
}
@@ -427,13 +427,14 @@ void StoreBuffer::FindPointersToNewSpaceInRegion(
slot_address < end;
slot_address += kPointerSize) {
Object** slot = reinterpret_cast<Object**>(slot_address);
- if (heap_->InNewSpace(*slot)) {
+ Object* old_value = *slot;
+ if (heap_->InNewSpace(old_value)) {
HeapObject* object = reinterpret_cast<HeapObject*>(*slot);
ASSERT(object->IsHeapObject());
// The new space object was not promoted if it still contains a map
// pointer. Clear the map field now lazily.
if (clear_maps) ClearDeadObject(object);
- slot_callback(reinterpret_cast<HeapObject**>(slot), object);
+ slot_callback(reinterpret_cast<HeapObject**>(slot), object,
old_value);
if (heap_->InNewSpace(*slot)) {
EnterDirectlyIntoStoreBuffer(slot_address);
}
@@ -531,7 +532,11 @@ void StoreBuffer::FindPointersToNewSpaceOnPage(
Object* constant_pool_array_map = heap_->constant_pool_array_map();
while (visitable_end < end_of_page) {
- Object* o = *reinterpret_cast<Object**>(visitable_end);
+ // The sweeper thread concurrently may write free space maps and size
to
+ // this page. We need acquire load here to make sure that we get a
+ // consistent view of maps and their sizes.
+ Object* o = reinterpret_cast<Object*>(
+ Acquire_Load(reinterpret_cast<AtomicWord*>(visitable_end)));
// Skip fillers or constant pool arrays (which never contain new-space
// pointers but can contain pointers which can be confused for fillers)
// but not things that look like fillers in the special garbage section
@@ -594,14 +599,18 @@ void StoreBuffer::IteratePointersInStoreBuffer(
#ifdef DEBUG
Address* saved_top = old_top_;
#endif
- Object** slot = reinterpret_cast<Object**>(*current);
- Object* object = *slot;
+ Object** slot = reinterpret_cast<Object**>(
+ NoBarrier_Load(reinterpret_cast<AtomicWord*>(current)));
+ Object* object = reinterpret_cast<Object*>(
+ NoBarrier_Load(reinterpret_cast<AtomicWord*>(slot)));
if (heap_->InFromSpace(object)) {
HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
// The new space object was not promoted if it still contains a map
// pointer. Clear the map field now lazily.
if (clear_maps) ClearDeadObject(heap_object);
- slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object);
+ slot_callback(reinterpret_cast<HeapObject**>(slot),
+ heap_object,
+ object);
if (heap_->InNewSpace(*slot)) {
EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot));
}
Index: src/store-buffer.h
diff --git a/src/store-buffer.h b/src/store-buffer.h
index
01e7cbeb8d2a439784617f20a16a61793df14459..73b04e19e2a3d387eccd9572bd345a3b48fd4be7
100644
--- a/src/store-buffer.h
+++ b/src/store-buffer.h
@@ -41,7 +41,9 @@ class Page;
class PagedSpace;
class StoreBuffer;
-typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to);
+typedef void (*ObjectSlotCallback)(HeapObject** address,
+ HeapObject* new_value,
+ Object* old_value);
typedef void (StoreBuffer::*RegionCallback)(Address start,
Address end,
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/d/optout.