Revision: 5202
Author: [email protected]
Date: Fri Aug 6 16:03:39 2010
Log: [Isolates] Add a pointer to Heap to a meta map.
Review URL: http://codereview.chromium.org/3089005
http://code.google.com/p/v8/source/detail?r=5202
Modified:
/branches/experimental/isolates/src/heap-inl.h
/branches/experimental/isolates/src/heap.cc
/branches/experimental/isolates/src/heap.h
/branches/experimental/isolates/src/mark-compact.cc
/branches/experimental/isolates/src/objects-inl.h
/branches/experimental/isolates/src/objects.cc
/branches/experimental/isolates/src/objects.h
/branches/experimental/isolates/src/serialize.cc
=======================================
--- /branches/experimental/isolates/src/heap-inl.h Thu Aug 5 13:38:56 2010
+++ /branches/experimental/isolates/src/heap-inl.h Fri Aug 6 16:03:39 2010
@@ -381,6 +381,10 @@
roots_[kLastScriptIdRootIndex] = last_script_id;
}
+Isolate* Heap::isolate() {
+ return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
+ reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
+}
#define GC_GREEDY_CHECK() \
ASSERT(!FLAG_gc_greedy || HEAP->GarbageCollectionGreedyCheck())
=======================================
--- /branches/experimental/isolates/src/heap.cc Thu Aug 5 13:38:56 2010
+++ /branches/experimental/isolates/src/heap.cc Fri Aug 6 16:03:39 2010
@@ -1360,8 +1360,12 @@
reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
- reinterpret_cast<Map*>(result)->
- set_scavenger(GetScavenger(instance_type, instance_size));
+ if (instance_type == MAP_TYPE) {
+ reinterpret_cast<Map*>(result)->set_heap(this);
+ } else {
+
reinterpret_cast<Map*>(result)->set_scavenger(GetScavenger(instance_type,
+
instance_size));
+ }
reinterpret_cast<Map*>(result)->set_inobject_properties(0);
reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
@@ -1872,7 +1876,7 @@
// Flush the number to string cache.
int len = number_string_cache()->length();
for (int i = 0; i < len; i++) {
- number_string_cache()->set_undefined(i);
+ number_string_cache()->set_undefined(this, i);
}
}
=======================================
--- /branches/experimental/isolates/src/heap.h Fri Aug 6 15:56:35 2010
+++ /branches/experimental/isolates/src/heap.h Fri Aug 6 16:03:39 2010
@@ -1120,6 +1120,8 @@
ExternalStringTable* external_string_table() {
return &external_string_table_;
}
+
+ inline Isolate* isolate();
private:
Heap();
=======================================
--- /branches/experimental/isolates/src/mark-compact.cc Thu Jul 15 20:09:25
2010
+++ /branches/experimental/isolates/src/mark-compact.cc Fri Aug 6 16:03:39
2010
@@ -403,7 +403,7 @@
if (object->IsMap()) {
Map* map = Map::cast(object);
if (FLAG_cleanup_caches_in_maps_at_gc) {
- map->ClearCodeCache();
+ map->ClearCodeCache(heap_);
}
SetMark(map);
if (FLAG_collect_maps &&
@@ -829,7 +829,7 @@
// This test will always be false on the first iteration.
if (on_dead_path && current->IsMarked()) {
on_dead_path = false;
- current->ClearNonLiveTransitions(real_prototype);
+ current->ClearNonLiveTransitions(heap_, real_prototype);
}
*HeapObject::RawField(current, Map::kPrototypeOffset) =
real_prototype;
=======================================
--- /branches/experimental/isolates/src/objects-inl.h Thu Aug 5 13:38:56
2010
+++ /branches/experimental/isolates/src/objects-inl.h Fri Aug 6 16:03:39
2010
@@ -105,6 +105,9 @@
void holder::set_##name(bool value) { \
set_##field(BooleanBit::set(field(), offset, value)); \
}
+
+
+#define GET_HEAP (HeapObject::cast(this)->GetHeap())
bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
@@ -449,21 +452,21 @@
bool Object::IsContext() {
return Object::IsHeapObject()
- && (HeapObject::cast(this)->map() == HEAP->context_map() ||
- HeapObject::cast(this)->map() == HEAP->catch_context_map() ||
- HeapObject::cast(this)->map() == HEAP->global_context_map());
+ && (HeapObject::cast(this)->map() == GET_HEAP->context_map() ||
+ HeapObject::cast(this)->map() == GET_HEAP->catch_context_map() ||
+ HeapObject::cast(this)->map() == GET_HEAP->global_context_map());
}
bool Object::IsCatchContext() {
return Object::IsHeapObject()
- && HeapObject::cast(this)->map() == HEAP->catch_context_map();
+ && HeapObject::cast(this)->map() == GET_HEAP->catch_context_map();
}
bool Object::IsGlobalContext() {
return Object::IsHeapObject()
- && HeapObject::cast(this)->map() == HEAP->global_context_map();
+ && HeapObject::cast(this)->map() == GET_HEAP->global_context_map();
}
@@ -546,17 +549,17 @@
bool Object::IsHashTable() {
return Object::IsHeapObject()
- && HeapObject::cast(this)->map() == HEAP->hash_table_map();
+ && HeapObject::cast(this)->map() == GET_HEAP->hash_table_map();
}
bool Object::IsDictionary() {
- return IsHashTable() && this != HEAP->symbol_table();
+ return IsHashTable() && this != GET_HEAP->symbol_table();
}
bool Object::IsSymbolTable() {
- return IsHashTable() && this == HEAP->raw_unchecked_symbol_table();
+ return IsHashTable() && this == GET_HEAP->raw_unchecked_symbol_table();
}
@@ -738,17 +741,17 @@
// TODO(isolates): Pass heap in to these macros.
#define WRITE_BARRIER(object, offset) \
- HEAP->RecordWrite(object->address(), offset);
+ object->GetHeap()->RecordWrite(object->address(), offset);
// CONDITIONAL_WRITE_BARRIER must be issued after the actual
// write due to the assert validating the written value.
#define CONDITIONAL_WRITE_BARRIER(object, offset, mode) \
if (mode == UPDATE_WRITE_BARRIER) { \
- HEAP->RecordWrite(object->address(), offset); \
+ object->GetHeap()->RecordWrite(object->address(), offset); \
} else { \
ASSERT(mode == SKIP_WRITE_BARRIER); \
- ASSERT(HEAP->InNewSpace(object) || \
- !HEAP->InNewSpace(READ_FIELD(object, offset)) || \
+ ASSERT(object->GetHeap()->InNewSpace(object) || \
+ !object->GetHeap()->InNewSpace(READ_FIELD(object, offset)) || \
Page::FromAddress(object->address())-> \
IsRegionDirty(object->address() + offset)); \
}
@@ -1043,6 +1046,16 @@
#endif
+Heap* HeapObject::GetHeap() {
+ // During GC, the map pointer in HeapObject is used in various ways that
+ // prevent us from retrieving Heap from the map.
+ // Assert that we are not in GC, implement GC code in a way that it
doesn't
+ // pull heap from the map.
+ ASSERT(HEAP->gc_state() == Heap::NOT_IN_GC);
+ return map()->map()->heap();
+}
+
+
Map* HeapObject::map() {
return map_word().ToMap();
}
@@ -1179,14 +1192,14 @@
void JSObject::initialize_properties() {
ASSERT(!HEAP->InNewSpace(HEAP->empty_fixed_array()));
- WRITE_FIELD(this, kPropertiesOffset, HEAP->empty_fixed_array());
+ WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
}
void JSObject::initialize_elements() {
ASSERT(map()->has_fast_elements());
ASSERT(!HEAP->InNewSpace(HEAP->empty_fixed_array()));
- WRITE_FIELD(this, kElementsOffset, HEAP->empty_fixed_array());
+ WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
}
@@ -1339,7 +1352,7 @@
void JSObject::InitializeBody(int object_size) {
- Object* value = HEAP->undefined_value();
+ Object* value = GetHeap()->undefined_value();
for (int offset = kHeaderSize; offset < object_size; offset +=
kPointerSize) {
WRITE_FIELD(this, offset, value);
}
@@ -1362,7 +1375,7 @@
void Struct::InitializeBody(int object_size) {
- Object* value = HEAP->undefined_value();
+ Object* value = GetHeap()->undefined_value();
for (int offset = kHeaderSize; offset < object_size; offset +=
kPointerSize) {
WRITE_FIELD(this, offset, value);
}
@@ -1423,7 +1436,7 @@
WriteBarrierMode HeapObject::GetWriteBarrierMode(const
AssertNoAllocation&) {
- if (HEAP->InNewSpace(this)) return SKIP_WRITE_BARRIER;
+ if (GetHeap()->InNewSpace(this)) return SKIP_WRITE_BARRIER;
return UPDATE_WRITE_BARRIER;
}
@@ -1446,24 +1459,36 @@
void FixedArray::set_undefined(int index) {
+ set_undefined(GetHeap(), index);
+}
+
+
+void FixedArray::set_undefined(Heap* heap, int index) {
ASSERT(index >= 0 && index < this->length());
- ASSERT(!HEAP->InNewSpace(HEAP->undefined_value()));
+ ASSERT(!heap->InNewSpace(heap->undefined_value()));
WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
- HEAP->undefined_value());
+ heap->undefined_value());
}
void FixedArray::set_null(int index) {
+ set_null(GetHeap(),index);
+}
+
+
+void FixedArray::set_null(Heap* heap, int index) {
ASSERT(index >= 0 && index < this->length());
- ASSERT(!HEAP->InNewSpace(HEAP->null_value()));
- WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
HEAP->null_value());
+ ASSERT(!heap->InNewSpace(heap->null_value()));
+ WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
heap->null_value());
}
void FixedArray::set_the_hole(int index) {
ASSERT(index >= 0 && index < this->length());
ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
- WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
HEAP->the_hole_value());
+ WRITE_FIELD(this,
+ kHeaderSize + index * kPointerSize,
+ GetHeap()->the_hole_value());
}
@@ -1473,9 +1498,9 @@
bool DescriptorArray::IsEmpty() {
- ASSERT(this == HEAP->empty_descriptor_array() ||
- this->length() > 2);
- return this == HEAP->empty_descriptor_array();
+ ASSERT(this->length() > kFirstIndex ||
+ this == HEAP->empty_descriptor_array());
+ return length() <= kFirstIndex;
}
@@ -1902,7 +1927,7 @@
void JSFunctionResultCache::Clear() {
int cache_size = Smi::cast(get(kCacheSizeIndex))->value();
Object** entries_start = RawField(this,
OffsetOfElementAt(kEntriesIndex));
- MemsetPointer(entries_start, HEAP->the_hole_value(), cache_size);
+ MemsetPointer(entries_start, GetHeap()->the_hole_value(), cache_size);
MakeZeroSize();
}
@@ -2075,6 +2100,7 @@
Scavenger callback = reinterpret_cast<Scavenger>(
READ_INTPTR_FIELD(this, kScavengerCallbackOffset));
+ ASSERT(instance_type() != MAP_TYPE); // MAP_TYPE has Heap pointer
instead.
ASSERT(callback == Heap::GetScavenger(instance_type(),
instance_size()));
@@ -2082,6 +2108,7 @@
}
inline void Map::set_scavenger(Scavenger callback) {
+ ASSERT(instance_type() != MAP_TYPE); // MAP_TYPE has Heap pointer
instead.
WRITE_INTPTR_FIELD(this,
kScavengerCallbackOffset,
reinterpret_cast<intptr_t>(callback));
@@ -2392,6 +2419,28 @@
Code* result = reinterpret_cast<Code*>(code);
return result;
}
+
+
+Heap* Map::heap() {
+ ASSERT(instance_type() == MAP_TYPE);
+ ASSERT(this == map());
+ Heap* heap = reinterpret_cast<Heap*>(
+ READ_INTPTR_FIELD(this, kScavengerCallbackOffset));
+ ASSERT(heap != NULL);
+ ASSERT(heap->isolate() == Isolate::Current());
+ return heap;
+}
+
+
+void Map::set_heap(Heap* heap) {
+ ASSERT(heap != NULL);
+ ASSERT(heap->isolate() == Isolate::Current());
+ ASSERT(instance_type() == MAP_TYPE);
+ // WRITE_FIELD does not invoke write barrier, but there is no need here.
+ WRITE_INTPTR_FIELD(this,
+ kScavengerCallbackOffset,
+ reinterpret_cast<intptr_t>(heap));
+}
Object* Map::prototype() {
@@ -2950,12 +2999,11 @@
HeapObject* array = elements();
if (array->IsFixedArray()) {
// FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a
FixedArray.
- if (array->map() == HEAP->fixed_array_map()) {
- ASSERT(map()->has_fast_elements());
+ if (map()->has_fast_elements()) {
+ ASSERT(array->map() == GetHeap()->fixed_array_map());
return FAST_ELEMENTS;
}
ASSERT(array->IsDictionary());
- ASSERT(!map()->has_fast_elements());
return DICTIONARY_ELEMENTS;
}
ASSERT(!map()->has_fast_elements());
@@ -3180,7 +3228,7 @@
Object* JSObject::BypassGlobalProxy() {
if (IsJSGlobalProxy()) {
Object* proto = GetPrototype();
- if (proto->IsNull()) return HEAP->undefined_value();
+ if (proto->IsNull()) return GetHeap()->undefined_value();
ASSERT(proto->IsJSGlobalObject());
return proto;
}
@@ -3191,7 +3239,7 @@
bool JSObject::HasHiddenPropertiesObject() {
ASSERT(!IsJSGlobalProxy());
return GetPropertyAttributePostInterceptor(this,
- HEAP->hidden_symbol(),
+ GetHeap()->hidden_symbol(),
false) != ABSENT;
}
@@ -3200,14 +3248,14 @@
ASSERT(!IsJSGlobalProxy());
PropertyAttributes attributes;
return GetLocalPropertyPostInterceptor(this,
- HEAP->hidden_symbol(),
+ GetHeap()->hidden_symbol(),
&attributes);
}
Object* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) {
ASSERT(!IsJSGlobalProxy());
- return SetPropertyPostInterceptor(HEAP->hidden_symbol(),
+ return SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
hidden_obj,
DONT_ENUM);
}
@@ -3274,12 +3322,12 @@
}
-void Map::ClearCodeCache() {
+void Map::ClearCodeCache(Heap* heap) {
// No write barrier is needed since empty_fixed_array is not in new
space.
// Please note this function is used during marking:
// - MarkCompactCollector::MarkUnmarkedObject
- ASSERT(!HEAP->InNewSpace(HEAP->raw_unchecked_empty_fixed_array()));
- WRITE_FIELD(this, kCodeCacheOffset,
HEAP->raw_unchecked_empty_fixed_array());
+ ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
+ WRITE_FIELD(this, kCodeCacheOffset,
heap->raw_unchecked_empty_fixed_array());
}
@@ -3292,7 +3340,7 @@
// constantly growing.
Expand(required_size + (required_size >> 3));
// It's a performance benefit to keep a frequently used array in
new-space.
- } else if (!HEAP->new_space()->Contains(elts) &&
+ } else if (!GetHeap()->new_space()->Contains(elts) &&
required_size < kArraySizeThatFitsComfortablyInNewSpace) {
// Expand will allocate a new backing store in new space even if the
size
// we asked for isn't larger than what we had before.
@@ -3314,7 +3362,7 @@
Object* FixedArray::Copy() {
if (length() == 0) return this;
- return HEAP->CopyFixedArray(this);
+ return GetHeap()->CopyFixedArray(this);
}
=======================================
--- /branches/experimental/isolates/src/objects.cc Thu Aug 5 13:38:56 2010
+++ /branches/experimental/isolates/src/objects.cc Fri Aug 6 16:03:39 2010
@@ -3082,7 +3082,7 @@
Object* Map::CopyDropDescriptors() {
- Object* result = HEAP->AllocateMap(instance_type(), instance_size());
+ Object* result = GetHeap()->AllocateMap(instance_type(),
instance_size());
if (result->IsFailure()) return result;
Map::cast(result)->set_prototype(prototype());
Map::cast(result)->set_constructor(constructor());
@@ -3091,7 +3091,8 @@
// pointing to the same transition which is bad because the garbage
// collector relies on being able to reverse pointers from transitions
// to maps. If properties need to be retained use CopyDropTransitions.
-
Map::cast(result)->set_instance_descriptors(HEAP->empty_descriptor_array());
+ Map::cast(result)->set_instance_descriptors(
+ GetHeap()->empty_descriptor_array());
// Please note instance_type and instance_size are set when allocated.
Map::cast(result)->set_inobject_properties(inobject_properties());
Map::cast(result)->set_unused_property_fields(unused_property_fields());
@@ -3111,7 +3112,7 @@
}
Map::cast(result)->set_bit_field(bit_field());
Map::cast(result)->set_bit_field2(bit_field2());
- Map::cast(result)->ClearCodeCache();
+ Map::cast(result)->ClearCodeCache(GetHeap());
return result;
}
@@ -5023,12 +5024,12 @@
}
-void Map::ClearNonLiveTransitions(Object* real_prototype) {
+void Map::ClearNonLiveTransitions(Heap* heap, Object* real_prototype) {
// Live DescriptorArray objects will be marked, so we must use
// low-level accessors to get and modify their data.
DescriptorArray* d = reinterpret_cast<DescriptorArray*>(
*RawField(this, Map::kInstanceDescriptorsOffset));
- if (d == HEAP->raw_unchecked_empty_descriptor_array()) return;
+ if (d == heap->raw_unchecked_empty_descriptor_array()) return;
Smi* NullDescriptorDetails =
PropertyDetails(NONE, NULL_DESCRIPTOR).AsSmi();
FixedArray* contents = reinterpret_cast<FixedArray*>(
@@ -5047,7 +5048,7 @@
if (!target->IsMarked()) {
ASSERT(target->IsMap());
contents->set(i + 1, NullDescriptorDetails);
- contents->set_null(i);
+ contents->set_null(heap, i);
ASSERT(target->prototype() == this ||
target->prototype() == real_prototype);
// Getter prototype() is read-only, set_prototype() has side
effects.
=======================================
--- /branches/experimental/isolates/src/objects.h Thu Aug 5 13:38:56 2010
+++ /branches/experimental/isolates/src/objects.h Fri Aug 6 16:03:39 2010
@@ -1000,6 +1000,10 @@
inline MapWord map_word();
inline void set_map_word(MapWord map_word);
+ // The Heap the object was allocated in. Used also to access Isolate.
+ // This method can not be used during GC, it ASSERTs this.
+ inline Heap* GetHeap();
+
// Converts an address to a HeapObject pointer.
static inline HeapObject* FromAddress(Address address);
@@ -1651,7 +1655,11 @@
// Setters for frequently used oddballs located in old space.
inline void set_undefined(int index);
+ // TODO(isolates): duplicate.
+ inline void set_undefined(Heap* heap, int index);
inline void set_null(int index);
+ // TODO(isolates): duplicate.
+ inline void set_null(Heap* heap, int index);
inline void set_the_hole(int index);
// Gives access to raw memory which stores the array's data.
@@ -1737,7 +1745,9 @@
// Returns the number of descriptors in the array.
int number_of_descriptors() {
- return IsEmpty() ? 0 : length() - kFirstIndex;
+ ASSERT(length() > kFirstIndex || IsEmpty());
+ int len = length();
+ return len <= kFirstIndex ? 0 : len - kFirstIndex;
}
int NextEnumerationIndex() {
@@ -3058,7 +3068,7 @@
// Code cache operations.
// Clears the code cache.
- inline void ClearCodeCache();
+ inline void ClearCodeCache(Heap* heap);
// Update code cache.
Object* UpdateCodeCache(String* name, Code* code);
@@ -3082,7 +3092,7 @@
// Also, restore the original prototype on the targets of these
// transitions, so that we do not process this map again while
// following back pointers.
- void ClearNonLiveTransitions(Object* real_prototype);
+ void ClearNonLiveTransitions(Heap* heap, Object* real_prototype);
// Dispatched behavior.
void MapIterateBody(ObjectVisitor* v);
@@ -3094,6 +3104,10 @@
inline Scavenger scavenger();
inline void set_scavenger(Scavenger callback);
+ // Meta map has a heap pointer for fast access to Heap and Isolate.
+ inline Heap* heap();
+ inline void set_heap(Heap* heap);
+
inline void Scavenge(HeapObject** slot, HeapObject* obj) {
scavenger()(this, slot, obj);
}
=======================================
--- /branches/experimental/isolates/src/serialize.cc Fri Aug 6 15:56:35
2010
+++ /branches/experimental/isolates/src/serialize.cc Fri Aug 6 16:03:39
2010
@@ -695,8 +695,14 @@
ASSERT(size == Map::kSize);
HeapObject* obj = HeapObject::FromAddress(address);
Map* map = reinterpret_cast<Map*>(obj);
- map->set_scavenger(Heap::GetScavenger(map->instance_type(),
- map->instance_size()));
+ if (map->instance_type() == MAP_TYPE) {
+ // Meta map has Heap pointer instead of scavenger.
+ ASSERT(map == map->map());
+ map->set_heap(HEAP);
+ } else {
+ map->set_scavenger(Heap::GetScavenger(map->instance_type(),
+ map->instance_size()));
+ }
}
}
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev