Revision: 22971
Author: [email protected]
Date: Thu Aug 7 12:21:01 2014 UTC
Log: Move objects-visiting into heap.
BUG=
[email protected], [email protected]
Review URL: https://codereview.chromium.org/443933002
http://code.google.com/p/v8/source/detail?r=22971
Added:
/branches/bleeding_edge/src/heap/objects-visiting-inl.h
/branches/bleeding_edge/src/heap/objects-visiting.cc
/branches/bleeding_edge/src/heap/objects-visiting.h
Deleted:
/branches/bleeding_edge/src/objects-visiting-inl.h
/branches/bleeding_edge/src/objects-visiting.cc
/branches/bleeding_edge/src/objects-visiting.h
Modified:
/branches/bleeding_edge/BUILD.gn
/branches/bleeding_edge/src/heap/heap.cc
/branches/bleeding_edge/src/heap/heap.h
/branches/bleeding_edge/src/heap/incremental-marking.cc
/branches/bleeding_edge/src/heap/mark-compact.cc
/branches/bleeding_edge/src/objects-debug.cc
/branches/bleeding_edge/src/objects-inl.h
/branches/bleeding_edge/src/objects-printer.cc
/branches/bleeding_edge/src/objects.cc
/branches/bleeding_edge/tools/gyp/v8.gyp
=======================================
--- /dev/null
+++ /branches/bleeding_edge/src/heap/objects-visiting-inl.h Thu Aug 7
12:21:01 2014 UTC
@@ -0,0 +1,932 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_OBJECTS_VISITING_INL_H_
+#define V8_OBJECTS_VISITING_INL_H_
+
+
+namespace v8 {
+namespace internal {
+
+template <typename StaticVisitor>
+void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
+ table_.Register(
+ kVisitShortcutCandidate,
+ &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
int>::Visit);
+
+ table_.Register(
+ kVisitConsString,
+ &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
int>::Visit);
+
+ table_.Register(kVisitSlicedString,
+ &FixedBodyVisitor<StaticVisitor,
SlicedString::BodyDescriptor,
+ int>::Visit);
+
+ table_.Register(
+ kVisitSymbol,
+ &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor,
int>::Visit);
+
+ table_.Register(kVisitFixedArray,
+ &FlexibleBodyVisitor<StaticVisitor,
+ FixedArray::BodyDescriptor,
int>::Visit);
+
+ table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
+ table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
+ table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
+
+ table_.Register(
+ kVisitNativeContext,
+ &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
+ int>::Visit);
+
+ table_.Register(kVisitByteArray, &VisitByteArray);
+
+ table_.Register(
+ kVisitSharedFunctionInfo,
+ &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
+ int>::Visit);
+
+ table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
+
+ table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
+
+ table_.Register(kVisitJSFunction, &VisitJSFunction);
+
+ table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
+
+ table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
+
+ table_.Register(kVisitJSDataView, &VisitJSDataView);
+
+ table_.Register(kVisitFreeSpace, &VisitFreeSpace);
+
+ table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
+
+ table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
+
+ table_.template RegisterSpecializations<DataObjectVisitor,
kVisitDataObject,
+ kVisitDataObjectGeneric>();
+
+ table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
+ kVisitJSObjectGeneric>();
+ table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
+ kVisitStructGeneric>();
+}
+
+
+template <typename StaticVisitor>
+int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
+ Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+
+ STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
+ JSArrayBuffer::kWeakNextOffset + kPointerSize);
+ VisitPointers(heap, HeapObject::RawField(
+ object,
JSArrayBuffer::BodyDescriptor::kStartOffset),
+ HeapObject::RawField(object,
JSArrayBuffer::kWeakNextOffset));
+ VisitPointers(
+ heap, HeapObject::RawField(
+ object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
+ HeapObject::RawField(object,
JSArrayBuffer::kSizeWithInternalFields));
+ return JSArrayBuffer::kSizeWithInternalFields;
+}
+
+
+template <typename StaticVisitor>
+int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
+ Map* map, HeapObject* object) {
+ VisitPointers(
+ map->GetHeap(),
+ HeapObject::RawField(object,
JSTypedArray::BodyDescriptor::kStartOffset),
+ HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
+ VisitPointers(
+ map->GetHeap(), HeapObject::RawField(
+ object, JSTypedArray::kWeakNextOffset +
kPointerSize),
+ HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
+ return JSTypedArray::kSizeWithInternalFields;
+}
+
+
+template <typename StaticVisitor>
+int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
+ HeapObject*
object) {
+ VisitPointers(
+ map->GetHeap(),
+ HeapObject::RawField(object,
JSDataView::BodyDescriptor::kStartOffset),
+ HeapObject::RawField(object, JSDataView::kWeakNextOffset));
+ VisitPointers(
+ map->GetHeap(),
+ HeapObject::RawField(object, JSDataView::kWeakNextOffset +
kPointerSize),
+ HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
+ return JSDataView::kSizeWithInternalFields;
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::Initialize() {
+ table_.Register(kVisitShortcutCandidate,
+ &FixedBodyVisitor<StaticVisitor,
ConsString::BodyDescriptor,
+ void>::Visit);
+
+ table_.Register(kVisitConsString,
+ &FixedBodyVisitor<StaticVisitor,
ConsString::BodyDescriptor,
+ void>::Visit);
+
+ table_.Register(kVisitSlicedString,
+ &FixedBodyVisitor<StaticVisitor,
SlicedString::BodyDescriptor,
+ void>::Visit);
+
+ table_.Register(
+ kVisitSymbol,
+ &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor,
void>::Visit);
+
+ table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
+
+ table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
+
+ table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
+
+ table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
+
+ table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
+
+ table_.Register(kVisitNativeContext, &VisitNativeContext);
+
+ table_.Register(kVisitAllocationSite, &VisitAllocationSite);
+
+ table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
+
+ table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
+
+ table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
+
+ table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
+
+ table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
+
+ table_.Register(
+ kVisitOddball,
+ &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor,
void>::Visit);
+
+ table_.Register(kVisitMap, &VisitMap);
+
+ table_.Register(kVisitCode, &VisitCode);
+
+ table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
+
+ table_.Register(kVisitJSFunction, &VisitJSFunction);
+
+ table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
+
+ table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
+
+ table_.Register(kVisitJSDataView, &VisitJSDataView);
+
+ // Registration for kVisitJSRegExp is done by StaticVisitor.
+
+ table_.Register(
+ kVisitCell,
+ &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
+
+ table_.Register(kVisitPropertyCell, &VisitPropertyCell);
+
+ table_.template RegisterSpecializations<DataObjectVisitor,
kVisitDataObject,
+ kVisitDataObjectGeneric>();
+
+ table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
+ kVisitJSObjectGeneric>();
+
+ table_.template RegisterSpecializations<StructObjectVisitor,
kVisitStruct,
+ kVisitStructGeneric>();
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
+ Heap* heap, Address entry_address) {
+ Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
+ heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
+ StaticVisitor::MarkObject(heap, code);
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
+ Heap* heap, RelocInfo* rinfo) {
+ DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
+ HeapObject* object = HeapObject::cast(rinfo->target_object());
+ heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
+ // TODO(ulan): It could be better to record slots only for strongly
embedded
+ // objects here and record slots for weakly embedded object during
clearing
+ // of non-live references in mark-compact.
+ if (!rinfo->host()->IsWeakObject(object)) {
+ StaticVisitor::MarkObject(heap, object);
+ }
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
+ RelocInfo* rinfo) {
+ DCHECK(rinfo->rmode() == RelocInfo::CELL);
+ Cell* cell = rinfo->target_cell();
+ // No need to record slots because the cell space is not compacted
during GC.
+ if (!rinfo->host()->IsWeakObject(cell)) {
+ StaticVisitor::MarkObject(heap, cell);
+ }
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
+ RelocInfo*
rinfo) {
+ DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
+ rinfo->IsPatchedReturnSequence()) ||
+ (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
+ rinfo->IsPatchedDebugBreakSlotSequence()));
+ Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
+ heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
+ StaticVisitor::MarkObject(heap, target);
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
+ RelocInfo*
rinfo) {
+ DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
+ Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
+ // Monomorphic ICs are preserved when possible, but need to be flushed
+ // when they might be keeping a Context alive, or when the heap is about
+ // to be serialized.
+ if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
+ (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC |
|
+ target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() |
|
+ heap->isolate()->serializer_enabled() ||
+ target->ic_age() != heap->global_ic_age() ||
+ target->is_invalidated_weak_stub())) {
+ IC::Clear(heap->isolate(), rinfo->pc(),
rinfo->host()->constant_pool());
+ target = Code::GetCodeFromTargetAddress(rinfo->target_address());
+ }
+ heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
+ StaticVisitor::MarkObject(heap, target);
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
+ Heap* heap, RelocInfo* rinfo) {
+ DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
+ Code* target = rinfo->code_age_stub();
+ DCHECK(target != NULL);
+ heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
+ StaticVisitor::MarkObject(heap, target);
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
+ Map* map, HeapObject* object) {
+ FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
+ void>::Visit(map, object);
+
+ MarkCompactCollector* collector =
map->GetHeap()->mark_compact_collector();
+ for (int idx = Context::FIRST_WEAK_SLOT; idx <
Context::NATIVE_CONTEXT_SLOTS;
+ ++idx) {
+ Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
+ collector->RecordSlot(slot, slot, *slot);
+ }
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
+ HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ Map* map_object = Map::cast(object);
+
+ // Clears the cache of ICs related to this map.
+ if (FLAG_cleanup_code_caches_at_gc) {
+ map_object->ClearCodeCache(heap);
+ }
+
+ // When map collection is enabled we have to mark through map's
transitions
+ // and back pointers in a special way to make these links weak.
+ if (FLAG_collect_maps && map_object->CanTransition()) {
+ MarkMapContents(heap, map_object);
+ } else {
+ StaticVisitor::VisitPointers(
+ heap, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
+ HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
+ }
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
+ Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+
+ Object** slot =
+ HeapObject::RawField(object, PropertyCell::kDependentCodeOffset);
+ if (FLAG_collect_maps) {
+ // Mark property cell dependent codes array but do not push it onto
marking
+ // stack, this will make references from it weak. We will clean dead
+ // codes when we iterate over property cells in ClearNonLiveReferences.
+ HeapObject* obj = HeapObject::cast(*slot);
+ heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
+ StaticVisitor::MarkObjectWithoutPush(heap, obj);
+ } else {
+ StaticVisitor::VisitPointer(heap, slot);
+ }
+
+ StaticVisitor::VisitPointers(
+ heap,
+ HeapObject::RawField(object,
PropertyCell::kPointerFieldsBeginOffset),
+ HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
+ Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+
+ Object** slot =
+ HeapObject::RawField(object, AllocationSite::kDependentCodeOffset);
+ if (FLAG_collect_maps) {
+ // Mark allocation site dependent codes array but do not push it onto
+ // marking stack, this will make references from it weak. We will clean
+ // dead codes when we iterate over allocation sites in
+ // ClearNonLiveReferences.
+ HeapObject* obj = HeapObject::cast(*slot);
+ heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
+ StaticVisitor::MarkObjectWithoutPush(heap, obj);
+ } else {
+ StaticVisitor::VisitPointer(heap, slot);
+ }
+
+ StaticVisitor::VisitPointers(
+ heap,
+ HeapObject::RawField(object,
AllocationSite::kPointerFieldsBeginOffset),
+ HeapObject::RawField(object,
AllocationSite::kPointerFieldsEndOffset));
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
+ Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ JSWeakCollection* weak_collection =
+ reinterpret_cast<JSWeakCollection*>(object);
+
+ // Enqueue weak collection in linked list of encountered weak
collections.
+ if (weak_collection->next() == heap->undefined_value()) {
+ weak_collection->set_next(heap->encountered_weak_collections());
+ heap->set_encountered_weak_collections(weak_collection);
+ }
+
+ // Skip visiting the backing hash table containing the mappings and the
+ // pointer to the other enqueued weak collections, both are
post-processed.
+ StaticVisitor::VisitPointers(
+ heap, HeapObject::RawField(object,
JSWeakCollection::kPropertiesOffset),
+ HeapObject::RawField(object, JSWeakCollection::kTableOffset));
+ STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
+ JSWeakCollection::kNextOffset);
+ STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
+ JSWeakCollection::kSize);
+
+ // Partially initialized weak collection is enqueued, but table is
ignored.
+ if (!weak_collection->table()->IsHashTable()) return;
+
+ // Mark the backing hash table without pushing it on the marking stack.
+ Object** slot = HeapObject::RawField(object,
JSWeakCollection::kTableOffset);
+ HeapObject* obj = HeapObject::cast(*slot);
+ heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
+ StaticVisitor::MarkObjectWithoutPush(heap, obj);
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
+ HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ Code* code = Code::cast(object);
+ if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
+ code->MakeOlder(heap->mark_compact_collector()->marking_parity());
+ }
+ code->CodeIterateBody<StaticVisitor>(heap);
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
+ Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
+ if (shared->ic_age() != heap->global_ic_age()) {
+ shared->ResetForNewContext(heap->global_ic_age());
+ }
+ if (FLAG_cleanup_code_caches_at_gc) {
+ shared->ClearTypeFeedbackInfo();
+ }
+ if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
+ !shared->optimized_code_map()->IsSmi()) {
+ // Always flush the optimized code map if requested by flag.
+ shared->ClearOptimizedCodeMap();
+ }
+ MarkCompactCollector* collector = heap->mark_compact_collector();
+ if (collector->is_code_flushing_enabled()) {
+ if (FLAG_cache_optimized_code
&& !shared->optimized_code_map()->IsSmi()) {
+ // Add the shared function info holding an optimized code map to
+ // the code flusher for processing of code maps after marking.
+ collector->code_flusher()->AddOptimizedCodeMap(shared);
+ // Treat all references within the code map weakly by marking the
+ // code map itself but not pushing it onto the marking deque.
+ FixedArray* code_map =
FixedArray::cast(shared->optimized_code_map());
+ StaticVisitor::MarkObjectWithoutPush(heap, code_map);
+ }
+ if (IsFlushable(heap, shared)) {
+ // This function's code looks flushable. But we have to postpone
+ // the decision until we see all functions that point to the same
+ // SharedFunctionInfo because some of them might be optimized.
+ // That would also make the non-optimized version of the code
+ // non-flushable, because it is required for bailing out from
+ // optimized code.
+ collector->code_flusher()->AddCandidate(shared);
+ // Treat the reference to the code object weakly.
+ VisitSharedFunctionInfoWeakCode(heap, object);
+ return;
+ }
+ } else {
+ if (FLAG_cache_optimized_code
&& !shared->optimized_code_map()->IsSmi()) {
+ // Flush optimized code map on major GCs without code flushing,
+ // needed because cached code doesn't contain breakpoints.
+ shared->ClearOptimizedCodeMap();
+ }
+ }
+ VisitSharedFunctionInfoStrongCode(heap, object);
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
+ Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ ConstantPoolArray* array = ConstantPoolArray::cast(object);
+ ConstantPoolArray::Iterator code_iter(array,
ConstantPoolArray::CODE_PTR);
+ while (!code_iter.is_finished()) {
+ Address code_entry = reinterpret_cast<Address>(
+ array->RawFieldOfElementAt(code_iter.next_index()));
+ StaticVisitor::VisitCodeEntry(heap, code_entry);
+ }
+
+ ConstantPoolArray::Iterator heap_iter(array,
ConstantPoolArray::HEAP_PTR);
+ while (!heap_iter.is_finished()) {
+ Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
+ HeapObject* object = HeapObject::cast(*slot);
+ heap->mark_compact_collector()->RecordSlot(slot, slot, object);
+ bool is_weak_object =
+ (array->get_weak_object_state() ==
+ ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
+ Code::IsWeakObjectInOptimizedCode(object)) ||
+ (array->get_weak_object_state() ==
+ ConstantPoolArray::WEAK_OBJECTS_IN_IC &&
+ Code::IsWeakObjectInIC(object));
+ if (!is_weak_object) {
+ StaticVisitor::MarkObject(heap, object);
+ }
+ }
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
+ HeapObject*
object) {
+ Heap* heap = map->GetHeap();
+ JSFunction* function = JSFunction::cast(object);
+ MarkCompactCollector* collector = heap->mark_compact_collector();
+ if (collector->is_code_flushing_enabled()) {
+ if (IsFlushable(heap, function)) {
+ // This function's code looks flushable. But we have to postpone
+ // the decision until we see all functions that point to the same
+ // SharedFunctionInfo because some of them might be optimized.
+ // That would also make the non-optimized version of the code
+ // non-flushable, because it is required for bailing out from
+ // optimized code.
+ collector->code_flusher()->AddCandidate(function);
+ // Visit shared function info immediately to avoid double checking
+ // of its flushability later. This is just an optimization because
+ // the shared function info would eventually be visited.
+ SharedFunctionInfo* shared = function->shared();
+ if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
+ StaticVisitor::MarkObject(heap, shared->map());
+ VisitSharedFunctionInfoWeakCode(heap, shared);
+ }
+ // Treat the reference to the code object weakly.
+ VisitJSFunctionWeakCode(heap, object);
+ return;
+ } else {
+ // Visit all unoptimized code objects to prevent flushing them.
+ StaticVisitor::MarkObject(heap, function->shared()->code());
+ if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
+ MarkInlinedFunctionsCode(heap, function->code());
+ }
+ }
+ }
+ VisitJSFunctionStrongCode(heap, object);
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
+ HeapObject*
object) {
+ int last_property_offset =
+ JSRegExp::kSize + kPointerSize * map->inobject_properties();
+ StaticVisitor::VisitPointers(
+ map->GetHeap(), HeapObject::RawField(object,
JSRegExp::kPropertiesOffset),
+ HeapObject::RawField(object, last_property_offset));
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
+ Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+
+ STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
+ JSArrayBuffer::kWeakNextOffset + kPointerSize);
+ StaticVisitor::VisitPointers(
+ heap,
+ HeapObject::RawField(object,
JSArrayBuffer::BodyDescriptor::kStartOffset),
+ HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
+ StaticVisitor::VisitPointers(
+ heap, HeapObject::RawField(
+ object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
+ HeapObject::RawField(object,
JSArrayBuffer::kSizeWithInternalFields));
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
+ Map* map, HeapObject* object) {
+ StaticVisitor::VisitPointers(
+ map->GetHeap(),
+ HeapObject::RawField(object,
JSTypedArray::BodyDescriptor::kStartOffset),
+ HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
+ StaticVisitor::VisitPointers(
+ map->GetHeap(), HeapObject::RawField(
+ object, JSTypedArray::kWeakNextOffset +
kPointerSize),
+ HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
+ HeapObject*
object) {
+ StaticVisitor::VisitPointers(
+ map->GetHeap(),
+ HeapObject::RawField(object,
JSDataView::BodyDescriptor::kStartOffset),
+ HeapObject::RawField(object, JSDataView::kWeakNextOffset));
+ StaticVisitor::VisitPointers(
+ map->GetHeap(),
+ HeapObject::RawField(object, JSDataView::kWeakNextOffset +
kPointerSize),
+ HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
+ Map* map) {
+ // Make sure that the back pointer stored either in the map itself or
+ // inside its transitions array is marked. Skip recording the back
+ // pointer slot since map space is not compacted.
+ StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
+
+ // Treat pointers in the transitions array as weak and also mark that
+ // array to prevent visiting it later. Skip recording the transition
+ // array slot, since it will be implicitly recorded when the pointer
+ // fields of this map are visited.
+ if (map->HasTransitionArray()) {
+ TransitionArray* transitions = map->transitions();
+ MarkTransitionArray(heap, transitions);
+ }
+
+ // Since descriptor arrays are potentially shared, ensure that only the
+ // descriptors that belong to this map are marked. The first time a
+ // non-empty descriptor array is marked, its header is also visited. The
slot
+ // holding the descriptor array will be implicitly recorded when the
pointer
+ // fields of this map are visited.
+ DescriptorArray* descriptors = map->instance_descriptors();
+ if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
+ descriptors->length() > 0) {
+ StaticVisitor::VisitPointers(heap,
descriptors->GetFirstElementAddress(),
+ descriptors->GetDescriptorEndSlot(0));
+ }
+ int start = 0;
+ int end = map->NumberOfOwnDescriptors();
+ if (start < end) {
+ StaticVisitor::VisitPointers(heap,
+
descriptors->GetDescriptorStartSlot(start),
+ descriptors->GetDescriptorEndSlot(end));
+ }
+
+ // Mark prototype dependent codes array but do not push it onto marking
+ // stack, this will make references from it weak. We will clean dead
+ // codes when we iterate over maps in ClearNonLiveTransitions.
+ Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
+ HeapObject* obj = HeapObject::cast(*slot);
+ heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
+ StaticVisitor::MarkObjectWithoutPush(heap, obj);
+
+ // Mark the pointer fields of the Map. Since the transitions array has
+ // been marked already, it is fine that one of these fields contains a
+ // pointer to it.
+ StaticVisitor::VisitPointers(
+ heap, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
+ HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
+ Heap* heap, TransitionArray* transitions) {
+ if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
+
+ // Simple transitions do not have keys nor prototype transitions.
+ if (transitions->IsSimpleTransition()) return;
+
+ if (transitions->HasPrototypeTransitions()) {
+ // Mark prototype transitions array but do not push it onto marking
+ // stack, this will make references from it weak. We will clean dead
+ // prototype transitions in ClearNonLiveTransitions.
+ Object** slot = transitions->GetPrototypeTransitionsSlot();
+ HeapObject* obj = HeapObject::cast(*slot);
+ heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
+ StaticVisitor::MarkObjectWithoutPush(heap, obj);
+ }
+
+ for (int i = 0; i < transitions->number_of_transitions(); ++i) {
+ StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
+ }
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap*
heap,
+ Code*
code) {
+ // Skip in absence of inlining.
+ // TODO(turbofan): Revisit once we support inlining.
+ if (code->is_turbofanned()) return;
+ // For optimized functions we should retain both non-optimized version
+ // of its code and non-optimized version of all inlined functions.
+ // This is required to support bailing out from inlined code.
+ DeoptimizationInputData* data =
+ DeoptimizationInputData::cast(code->deoptimization_data());
+ FixedArray* literals = data->LiteralArray();
+ for (int i = 0, count = data->InlinedFunctionCount()->value(); i < count;
+ i++) {
+ JSFunction* inlined = JSFunction::cast(literals->get(i));
+ StaticVisitor::MarkObject(heap, inlined->shared()->code());
+ }
+}
+
+
+inline static bool IsValidNonBuiltinContext(Object* context) {
+ return context->IsContext() &&
+ !Context::cast(context)->global_object()->IsJSBuiltinsObject();
+}
+
+
+inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
+ Object* undefined = heap->undefined_value();
+ return (info->script() != undefined) &&
+ (reinterpret_cast<Script*>(info->script())->source() !=
undefined);
+}
+
+
+template <typename StaticVisitor>
+bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
+ JSFunction*
function) {
+ SharedFunctionInfo* shared_info = function->shared();
+
+ // Code is either on stack, in compilation cache or referenced
+ // by optimized version of function.
+ MarkBit code_mark = Marking::MarkBitFrom(function->code());
+ if (code_mark.Get()) {
+ return false;
+ }
+
+ // The function must have a valid context and not be a builtin.
+ if (!IsValidNonBuiltinContext(function->context())) {
+ return false;
+ }
+
+ // We do not (yet) flush code for optimized functions.
+ if (function->code() != shared_info->code()) {
+ return false;
+ }
+
+ // Check age of optimized code.
+ if (FLAG_age_code && !function->code()->IsOld()) {
+ return false;
+ }
+
+ return IsFlushable(heap, shared_info);
+}
+
+
+template <typename StaticVisitor>
+bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
+ Heap* heap, SharedFunctionInfo* shared_info) {
+ // Code is either on stack, in compilation cache or referenced
+ // by optimized version of function.
+ MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
+ if (code_mark.Get()) {
+ return false;
+ }
+
+ // The function must be compiled and have the source code available,
+ // to be able to recompile it in case we need the function again.
+ if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
+ return false;
+ }
+
+ // We never flush code for API functions.
+ Object* function_data = shared_info->function_data();
+ if (function_data->IsFunctionTemplateInfo()) {
+ return false;
+ }
+
+ // Only flush code for functions.
+ if (shared_info->code()->kind() != Code::FUNCTION) {
+ return false;
+ }
+
+ // Function must be lazy compilable.
+ if (!shared_info->allows_lazy_compilation()) {
+ return false;
+ }
+
+ // We do not (yet?) flush code for generator functions, because we don't
know
+ // if there are still live activations (generator objects) on the heap.
+ if (shared_info->is_generator()) {
+ return false;
+ }
+
+ // If this is a full script wrapped in a function we do not flush the
code.
+ if (shared_info->is_toplevel()) {
+ return false;
+ }
+
+ // If this is a function initialized with %SetCode then the one-to-one
+ // relation between SharedFunctionInfo and Code is broken.
+ if (shared_info->dont_flush()) {
+ return false;
+ }
+
+ // Check age of code. If code aging is disabled we never flush.
+ if (!FLAG_age_code || !shared_info->code()->IsOld()) {
+ return false;
+ }
+
+ return true;
+}
+
+
+template <typename StaticVisitor>
+void
StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
+ Heap* heap, HeapObject* object) {
+ Object** start_slot = HeapObject::RawField(
+ object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
+ Object** end_slot = HeapObject::RawField(
+ object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
+ StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
+ Heap* heap, HeapObject* object) {
+ Object** name_slot =
+ HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
+ StaticVisitor::VisitPointer(heap, name_slot);
+
+ // Skip visiting kCodeOffset as it is treated weakly here.
+ STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
+ SharedFunctionInfo::kCodeOffset);
+ STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
+ SharedFunctionInfo::kOptimizedCodeMapOffset);
+
+ Object** start_slot =
+ HeapObject::RawField(object,
SharedFunctionInfo::kOptimizedCodeMapOffset);
+ Object** end_slot = HeapObject::RawField(
+ object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
+ StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
+ Heap* heap, HeapObject* object) {
+ Object** start_slot =
+ HeapObject::RawField(object, JSFunction::kPropertiesOffset);
+ Object** end_slot =
+ HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
+ StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+
+ VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
+ STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
+ JSFunction::kPrototypeOrInitialMapOffset);
+
+ start_slot =
+ HeapObject::RawField(object,
JSFunction::kPrototypeOrInitialMapOffset);
+ end_slot = HeapObject::RawField(object,
JSFunction::kNonWeakFieldsEndOffset);
+ StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+}
+
+
+template <typename StaticVisitor>
+void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
+ Heap* heap, HeapObject* object) {
+ Object** start_slot =
+ HeapObject::RawField(object, JSFunction::kPropertiesOffset);
+ Object** end_slot =
+ HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
+ StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+
+ // Skip visiting kCodeEntryOffset as it is treated weakly here.
+ STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
+ JSFunction::kPrototypeOrInitialMapOffset);
+
+ start_slot =
+ HeapObject::RawField(object,
JSFunction::kPrototypeOrInitialMapOffset);
+ end_slot = HeapObject::RawField(object,
JSFunction::kNonWeakFieldsEndOffset);
+ StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+}
+
+
+void Code::CodeIterateBody(ObjectVisitor* v) {
+ int mode_mask = RelocInfo::kCodeTargetMask |
+ RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
+ RelocInfo::ModeMask(RelocInfo::CELL) |
+ RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
+ RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
+ RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
+ RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
+
+ // There are two places where we iterate code bodies: here and the
+ // templated CodeIterateBody (below). They should be kept in sync.
+ IteratePointer(v, kRelocationInfoOffset);
+ IteratePointer(v, kHandlerTableOffset);
+ IteratePointer(v, kDeoptimizationDataOffset);
+ IteratePointer(v, kTypeFeedbackInfoOffset);
+ IterateNextCodeLink(v, kNextCodeLinkOffset);
+ IteratePointer(v, kConstantPoolOffset);
+
+ RelocIterator it(this, mode_mask);
+ Isolate* isolate = this->GetIsolate();
+ for (; !it.done(); it.next()) {
+ it.rinfo()->Visit(isolate, v);
+ }
+}
+
+
+template <typename StaticVisitor>
+void Code::CodeIterateBody(Heap* heap) {
+ int mode_mask = RelocInfo::kCodeTargetMask |
+ RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
+ RelocInfo::ModeMask(RelocInfo::CELL) |
+ RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
+ RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
+ RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
+ RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
+
+ // There are two places where we iterate code bodies: here and the non-
+ // templated CodeIterateBody (above). They should be kept in sync.
+ StaticVisitor::VisitPointer(
+ heap,
+ reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
+ StaticVisitor::VisitPointer(
+ heap, reinterpret_cast<Object**>(this->address() +
kHandlerTableOffset));
+ StaticVisitor::VisitPointer(
+ heap,
+ reinterpret_cast<Object**>(this->address() +
kDeoptimizationDataOffset));
+ StaticVisitor::VisitPointer(
+ heap,
+ reinterpret_cast<Object**>(this->address() +
kTypeFeedbackInfoOffset));
+ StaticVisitor::VisitNextCodeLink(
+ heap, reinterpret_cast<Object**>(this->address() +
kNextCodeLinkOffset));
+ StaticVisitor::VisitPointer(
+ heap, reinterpret_cast<Object**>(this->address() +
kConstantPoolOffset));
+
+
+ RelocIterator it(this, mode_mask);
+ for (; !it.done(); it.next()) {
+ it.rinfo()->template Visit<StaticVisitor>(heap);
+ }
+}
+}
+} // namespace v8::internal
+
+#endif // V8_OBJECTS_VISITING_INL_H_
=======================================
--- /dev/null
+++ /branches/bleeding_edge/src/heap/objects-visiting.cc Thu Aug 7
12:21:01 2014 UTC
@@ -0,0 +1,414 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/v8.h"
+
+#include "src/heap/objects-visiting.h"
+#include "src/ic-inl.h"
+
+namespace v8 {
+namespace internal {
+
+
+StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
+ int instance_type, int instance_size) {
+ if (instance_type < FIRST_NONSTRING_TYPE) {
+ switch (instance_type & kStringRepresentationMask) {
+ case kSeqStringTag:
+ if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
+ return kVisitSeqOneByteString;
+ } else {
+ return kVisitSeqTwoByteString;
+ }
+
+ case kConsStringTag:
+ if (IsShortcutCandidate(instance_type)) {
+ return kVisitShortcutCandidate;
+ } else {
+ return kVisitConsString;
+ }
+
+ case kSlicedStringTag:
+ return kVisitSlicedString;
+
+ case kExternalStringTag:
+ return GetVisitorIdForSize(kVisitDataObject,
kVisitDataObjectGeneric,
+ instance_size);
+ }
+ UNREACHABLE();
+ }
+
+ switch (instance_type) {
+ case BYTE_ARRAY_TYPE:
+ return kVisitByteArray;
+
+ case FREE_SPACE_TYPE:
+ return kVisitFreeSpace;
+
+ case FIXED_ARRAY_TYPE:
+ return kVisitFixedArray;
+
+ case FIXED_DOUBLE_ARRAY_TYPE:
+ return kVisitFixedDoubleArray;
+
+ case CONSTANT_POOL_ARRAY_TYPE:
+ return kVisitConstantPoolArray;
+
+ case ODDBALL_TYPE:
+ return kVisitOddball;
+
+ case MAP_TYPE:
+ return kVisitMap;
+
+ case CODE_TYPE:
+ return kVisitCode;
+
+ case CELL_TYPE:
+ return kVisitCell;
+
+ case PROPERTY_CELL_TYPE:
+ return kVisitPropertyCell;
+
+ case JS_SET_TYPE:
+ return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
+ JSSet::kSize);
+
+ case JS_MAP_TYPE:
+ return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
+ JSMap::kSize);
+
+ case JS_WEAK_MAP_TYPE:
+ case JS_WEAK_SET_TYPE:
+ return kVisitJSWeakCollection;
+
+ case JS_REGEXP_TYPE:
+ return kVisitJSRegExp;
+
+ case SHARED_FUNCTION_INFO_TYPE:
+ return kVisitSharedFunctionInfo;
+
+ case JS_PROXY_TYPE:
+ return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
+ JSProxy::kSize);
+
+ case JS_FUNCTION_PROXY_TYPE:
+ return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
+ JSFunctionProxy::kSize);
+
+ case FOREIGN_TYPE:
+ return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
+ Foreign::kSize);
+
+ case SYMBOL_TYPE:
+ return kVisitSymbol;
+
+ case FILLER_TYPE:
+ return kVisitDataObjectGeneric;
+
+ case JS_ARRAY_BUFFER_TYPE:
+ return kVisitJSArrayBuffer;
+
+ case JS_TYPED_ARRAY_TYPE:
+ return kVisitJSTypedArray;
+
+ case JS_DATA_VIEW_TYPE:
+ return kVisitJSDataView;
+
+ case JS_OBJECT_TYPE:
+ case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
+ case JS_GENERATOR_OBJECT_TYPE:
+ case JS_MODULE_TYPE:
+ case JS_VALUE_TYPE:
+ case JS_DATE_TYPE:
+ case JS_ARRAY_TYPE:
+ case JS_GLOBAL_PROXY_TYPE:
+ case JS_GLOBAL_OBJECT_TYPE:
+ case JS_BUILTINS_OBJECT_TYPE:
+ case JS_MESSAGE_OBJECT_TYPE:
+ case JS_SET_ITERATOR_TYPE:
+ case JS_MAP_ITERATOR_TYPE:
+ return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric,
+ instance_size);
+
+ case JS_FUNCTION_TYPE:
+ return kVisitJSFunction;
+
+ case HEAP_NUMBER_TYPE:
+ case MUTABLE_HEAP_NUMBER_TYPE:
+#define EXTERNAL_ARRAY_CASE(Type, type, TYPE, ctype, size) \
+ case EXTERNAL_##TYPE##_ARRAY_TYPE:
+
+ TYPED_ARRAYS(EXTERNAL_ARRAY_CASE)
+ return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
+ instance_size);
+#undef EXTERNAL_ARRAY_CASE
+
+ case FIXED_UINT8_ARRAY_TYPE:
+ case FIXED_INT8_ARRAY_TYPE:
+ case FIXED_UINT16_ARRAY_TYPE:
+ case FIXED_INT16_ARRAY_TYPE:
+ case FIXED_UINT32_ARRAY_TYPE:
+ case FIXED_INT32_ARRAY_TYPE:
+ case FIXED_FLOAT32_ARRAY_TYPE:
+ case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
+ return kVisitFixedTypedArray;
+
+ case FIXED_FLOAT64_ARRAY_TYPE:
+ return kVisitFixedFloat64Array;
+
+#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
+ STRUCT_LIST(MAKE_STRUCT_CASE)
+#undef MAKE_STRUCT_CASE
+ if (instance_type == ALLOCATION_SITE_TYPE) {
+ return kVisitAllocationSite;
+ }
+
+ return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
+ instance_size);
+
+ default:
+ UNREACHABLE();
+ return kVisitorIdCount;
+ }
+}
+
+
+// We don't record weak slots during marking or scavenges. Instead we do it
+// once when we complete mark-compact cycle. Note that write barrier has
no
+// effect if we are already in the middle of compacting mark-sweep cycle
and we
+// have to record slots manually.
+static bool MustRecordSlots(Heap* heap) {
+ return heap->gc_state() == Heap::MARK_COMPACT &&
+ heap->mark_compact_collector()->is_compacting();
+}
+
+
+template <class T>
+struct WeakListVisitor;
+
+
+template <class T>
+Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer*
retainer) {
+ Object* undefined = heap->undefined_value();
+ Object* head = undefined;
+ T* tail = NULL;
+ MarkCompactCollector* collector = heap->mark_compact_collector();
+ bool record_slots = MustRecordSlots(heap);
+ while (list != undefined) {
+ // Check whether to keep the candidate in the list.
+ T* candidate = reinterpret_cast<T*>(list);
+ Object* retained = retainer->RetainAs(list);
+ if (retained != NULL) {
+ if (head == undefined) {
+ // First element in the list.
+ head = retained;
+ } else {
+ // Subsequent elements in the list.
+ DCHECK(tail != NULL);
+ WeakListVisitor<T>::SetWeakNext(tail, retained);
+ if (record_slots) {
+ Object** next_slot =
+ HeapObject::RawField(tail,
WeakListVisitor<T>::WeakNextOffset());
+ collector->RecordSlot(next_slot, next_slot, retained);
+ }
+ }
+ // Retained object is new tail.
+ DCHECK(!retained->IsUndefined());
+ candidate = reinterpret_cast<T*>(retained);
+ tail = candidate;
+
+
+ // tail is a live object, visit it.
+ WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
+ } else {
+ WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
+ }
+
+ // Move to next element in the list.
+ list = WeakListVisitor<T>::WeakNext(candidate);
+ }
+
+ // Terminate the list if there is one or more elements.
+ if (tail != NULL) {
+ WeakListVisitor<T>::SetWeakNext(tail, undefined);
+ }
+ return head;
+}
+
+
+template <class T>
+static void ClearWeakList(Heap* heap, Object* list) {
+ Object* undefined = heap->undefined_value();
+ while (list != undefined) {
+ T* candidate = reinterpret_cast<T*>(list);
+ list = WeakListVisitor<T>::WeakNext(candidate);
+ WeakListVisitor<T>::SetWeakNext(candidate, undefined);
+ }
+}
+
+
+template <>
+struct WeakListVisitor<JSFunction> {
+ static void SetWeakNext(JSFunction* function, Object* next) {
+ function->set_next_function_link(next);
+ }
+
+ static Object* WeakNext(JSFunction* function) {
+ return function->next_function_link();
+ }
+
+ static int WeakNextOffset() { return
JSFunction::kNextFunctionLinkOffset; }
+
+ static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
+
+ static void VisitPhantomObject(Heap*, JSFunction*) {}
+};
+
+
+template <>
+struct WeakListVisitor<Code> {
+ static void SetWeakNext(Code* code, Object* next) {
+ code->set_next_code_link(next);
+ }
+
+ static Object* WeakNext(Code* code) { return code->next_code_link(); }
+
+ static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
+
+ static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
+
+ static void VisitPhantomObject(Heap*, Code*) {}
+};
+
+
+template <>
+struct WeakListVisitor<Context> {
+ static void SetWeakNext(Context* context, Object* next) {
+ context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WRITE_BARRIER);
+ }
+
+ static Object* WeakNext(Context* context) {
+ return context->get(Context::NEXT_CONTEXT_LINK);
+ }
+
+ static int WeakNextOffset() {
+ return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
+ }
+
+ static void VisitLiveObject(Heap* heap, Context* context,
+ WeakObjectRetainer* retainer) {
+ // Process the three weak lists linked off the context.
+ DoWeakList<JSFunction>(heap, context, retainer,
+ Context::OPTIMIZED_FUNCTIONS_LIST);
+ DoWeakList<Code>(heap, context, retainer,
Context::OPTIMIZED_CODE_LIST);
+ DoWeakList<Code>(heap, context, retainer,
Context::DEOPTIMIZED_CODE_LIST);
+ }
+
+ template <class T>
+ static void DoWeakList(Heap* heap, Context* context,
+ WeakObjectRetainer* retainer, int index) {
+ // Visit the weak list, removing dead intermediate elements.
+ Object* list_head = VisitWeakList<T>(heap, context->get(index),
retainer);
+
+ // Update the list head.
+ context->set(index, list_head, UPDATE_WRITE_BARRIER);
+
+ if (MustRecordSlots(heap)) {
+ // Record the updated slot if necessary.
+ Object** head_slot =
+ HeapObject::RawField(context, FixedArray::SizeFor(index));
+ heap->mark_compact_collector()->RecordSlot(head_slot, head_slot,
+ list_head);
+ }
+ }
+
+ static void VisitPhantomObject(Heap* heap, Context* context) {
+ ClearWeakList<JSFunction>(heap,
+
context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
+ ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
+ ClearWeakList<Code>(heap,
context->get(Context::DEOPTIMIZED_CODE_LIST));
+ }
+};
+
+
+template <>
+struct WeakListVisitor<JSArrayBufferView> {
+ static void SetWeakNext(JSArrayBufferView* obj, Object* next) {
+ obj->set_weak_next(next);
+ }
+
+ static Object* WeakNext(JSArrayBufferView* obj) { return
obj->weak_next(); }
+
+ static int WeakNextOffset() { return JSArrayBufferView::kWeakNextOffset;
}
+
+ static void VisitLiveObject(Heap*, JSArrayBufferView*,
WeakObjectRetainer*) {}
+
+ static void VisitPhantomObject(Heap*, JSArrayBufferView*) {}
+};
+
+
+template <>
+struct WeakListVisitor<JSArrayBuffer> {
+ static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
+ obj->set_weak_next(next);
+ }
+
+ static Object* WeakNext(JSArrayBuffer* obj) { return obj->weak_next(); }
+
+ static int WeakNextOffset() { return JSArrayBuffer::kWeakNextOffset; }
+
+ static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer,
+ WeakObjectRetainer* retainer) {
+ Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
+ heap, array_buffer->weak_first_view(), retainer);
+ array_buffer->set_weak_first_view(typed_array_obj);
+ if (typed_array_obj != heap->undefined_value() &&
MustRecordSlots(heap)) {
+ Object** slot = HeapObject::RawField(array_buffer,
+
JSArrayBuffer::kWeakFirstViewOffset);
+ heap->mark_compact_collector()->RecordSlot(slot, slot,
typed_array_obj);
+ }
+ }
+
+ static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
+ Runtime::FreeArrayBuffer(heap->isolate(), phantom);
+ }
+};
+
+
+template <>
+struct WeakListVisitor<AllocationSite> {
+ static void SetWeakNext(AllocationSite* obj, Object* next) {
+ obj->set_weak_next(next);
+ }
+
+ static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
+
+ static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
+
+ static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*)
{}
+
+ static void VisitPhantomObject(Heap*, AllocationSite*) {}
+};
+
+
+template Object* VisitWeakList<Code>(Heap* heap, Object* list,
+ WeakObjectRetainer* retainer);
+
+
+template Object* VisitWeakList<JSFunction>(Heap* heap, Object* list,
+ WeakObjectRetainer* retainer);
+
+
+template Object* VisitWeakList<Context>(Heap* heap, Object* list,
+ WeakObjectRetainer* retainer);
+
+
+template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list,
+ WeakObjectRetainer*
retainer);
+
+
+template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
+ WeakObjectRetainer*
retainer);
+}
+} // namespace v8::internal
=======================================
--- /dev/null
+++ /branches/bleeding_edge/src/heap/objects-visiting.h Thu Aug 7 12:21:01
2014 UTC
@@ -0,0 +1,452 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_OBJECTS_VISITING_H_
+#define V8_OBJECTS_VISITING_H_
+
+#include "src/allocation.h"
+
+// This file provides base classes and auxiliary methods for defining
+// static object visitors used during GC.
+// Visiting HeapObject body with a normal ObjectVisitor requires performing
+// two switches on object's instance type to determine object size and
layout
+// and one or more virtual method calls on visitor itself.
+// Static visitor is different: it provides a dispatch table which contains
+// pointers to specialized visit functions. Each map has the visitor_id
+// field which contains an index of specialized visitor to use.
+
+namespace v8 {
+namespace internal {
+
+
+// Base class for all static visitors.
+class StaticVisitorBase : public AllStatic {
+ public:
+#define VISITOR_ID_LIST(V) \
+ V(SeqOneByteString) \
+ V(SeqTwoByteString) \
+ V(ShortcutCandidate) \
+ V(ByteArray) \
+ V(FreeSpace) \
+ V(FixedArray) \
+ V(FixedDoubleArray) \
+ V(FixedTypedArray) \
+ V(FixedFloat64Array) \
+ V(ConstantPoolArray) \
+ V(NativeContext) \
+ V(AllocationSite) \
+ V(DataObject2) \
+ V(DataObject3) \
+ V(DataObject4) \
+ V(DataObject5) \
+ V(DataObject6) \
+ V(DataObject7) \
+ V(DataObject8) \
+ V(DataObject9) \
+ V(DataObjectGeneric) \
+ V(JSObject2) \
+ V(JSObject3) \
+ V(JSObject4) \
+ V(JSObject5) \
+ V(JSObject6) \
+ V(JSObject7) \
+ V(JSObject8) \
+ V(JSObject9) \
+ V(JSObjectGeneric) \
+ V(Struct2) \
+ V(Struct3) \
+ V(Struct4) \
+ V(Struct5) \
+ V(Struct6) \
+ V(Struct7) \
+ V(Struct8) \
+ V(Struct9) \
+ V(StructGeneric) \
+ V(ConsString) \
+ V(SlicedString) \
+ V(Symbol) \
+ V(Oddball) \
+ V(Code) \
+ V(Map) \
+ V(Cell) \
+ V(PropertyCell) \
+ V(SharedFunctionInfo) \
+ V(JSFunction) \
+ V(JSWeakCollection) \
+ V(JSArrayBuffer) \
+ V(JSTypedArray) \
+ V(JSDataView) \
+ V(JSRegExp)
+
+ // For data objects, JS objects and structs along with generic visitor
which
+ // can visit object of any size we provide visitors specialized by
+ // object size in words.
+ // Ids of specialized visitors are declared in a linear order (without
+ // holes) starting from the id of visitor specialized for 2 words objects
+ // (base visitor id) and ending with the id of generic visitor.
+ // Method GetVisitorIdForSize depends on this ordering to calculate
visitor
+ // id of specialized visitor from given instance size, base visitor id
and
+ // generic visitor's id.
+ enum VisitorId {
+#define VISITOR_ID_ENUM_DECL(id) kVisit##id,
+ VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
+#undef VISITOR_ID_ENUM_DECL
+ kVisitorIdCount,
+ kVisitDataObject = kVisitDataObject2,
+ kVisitJSObject = kVisitJSObject2,
+ kVisitStruct = kVisitStruct2,
+ kMinObjectSizeInWords = 2
+ };
+
+ // Visitor ID should fit in one byte.
+ STATIC_ASSERT(kVisitorIdCount <= 256);
+
+ // Determine which specialized visitor should be used for given instance
type
+ // and instance type.
+ static VisitorId GetVisitorId(int instance_type, int instance_size);
+
+ static VisitorId GetVisitorId(Map* map) {
+ return GetVisitorId(map->instance_type(), map->instance_size());
+ }
+
+ // For visitors that allow specialization by size calculate VisitorId
based
+ // on size, base visitor id and generic visitor id.
+ static VisitorId GetVisitorIdForSize(VisitorId base, VisitorId generic,
+ int object_size) {
+ DCHECK((base == kVisitDataObject) || (base == kVisitStruct) ||
+ (base == kVisitJSObject));
+ DCHECK(IsAligned(object_size, kPointerSize));
+ DCHECK(kMinObjectSizeInWords * kPointerSize <= object_size);
+ DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+
+ const VisitorId specialization = static_cast<VisitorId>(
+ base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
+
+ return Min(specialization, generic);
+ }
+};
+
+
+template <typename Callback>
+class VisitorDispatchTable {
+ public:
+ void CopyFrom(VisitorDispatchTable* other) {
+ // We are not using memcpy to guarantee that during update
+ // every element of callbacks_ array will remain correct
+ // pointer (memcpy might be implemented as a byte copying loop).
+ for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
+ base::NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
+ }
+ }
+
+ inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
+ return reinterpret_cast<Callback>(callbacks_[id]);
+ }
+
+ inline Callback GetVisitor(Map* map) {
+ return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
+ }
+
+ void Register(StaticVisitorBase::VisitorId id, Callback callback) {
+ DCHECK(id < StaticVisitorBase::kVisitorIdCount); // id is unsigned.
+ callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback);
+ }
+
+ template <typename Visitor, StaticVisitorBase::VisitorId base,
+ StaticVisitorBase::VisitorId generic, int object_size_in_words>
+ void RegisterSpecialization() {
+ static const int size = object_size_in_words * kPointerSize;
+ Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size),
+ &Visitor::template VisitSpecialized<size>);
+ }
+
+
+ template <typename Visitor, StaticVisitorBase::VisitorId base,
+ StaticVisitorBase::VisitorId generic>
+ void RegisterSpecializations() {
+ STATIC_ASSERT((generic - base +
StaticVisitorBase::kMinObjectSizeInWords) ==
+ 10);
+ RegisterSpecialization<Visitor, base, generic, 2>();
+ RegisterSpecialization<Visitor, base, generic, 3>();
+ RegisterSpecialization<Visitor, base, generic, 4>();
+ RegisterSpecialization<Visitor, base, generic, 5>();
+ RegisterSpecialization<Visitor, base, generic, 6>();
+ RegisterSpecialization<Visitor, base, generic, 7>();
+ RegisterSpecialization<Visitor, base, generic, 8>();
+ RegisterSpecialization<Visitor, base, generic, 9>();
+ Register(generic, &Visitor::Visit);
+ }
+
+ private:
+ base::AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
+};
+
+
+template <typename StaticVisitor>
+class BodyVisitorBase : public AllStatic {
+ public:
+ INLINE(static void IteratePointers(Heap* heap, HeapObject* object,
+ int start_offset, int end_offset)) {
+ Object** start_slot =
+ reinterpret_cast<Object**>(object->address() + start_offset);
+ Object** end_slot =
+ reinterpret_cast<Object**>(object->address() + end_offset);
+ StaticVisitor::VisitPointers(heap, start_slot, end_slot);
+ }
+};
+
+
+template <typename StaticVisitor, typename BodyDescriptor, typename
ReturnType>
+class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
+ public:
+ INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
+ int object_size = BodyDescriptor::SizeOf(map, object);
+ BodyVisitorBase<StaticVisitor>::IteratePointers(
+ map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
+ return static_cast<ReturnType>(object_size);
+ }
+
+ template <int object_size>
+ static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
+ DCHECK(BodyDescriptor::SizeOf(map, object) == object_size);
+ BodyVisitorBase<StaticVisitor>::IteratePointers(
+ map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
+ return static_cast<ReturnType>(object_size);
+ }
+};
+
+
+template <typename StaticVisitor, typename BodyDescriptor, typename
ReturnType>
+class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
+ public:
+ INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
+ BodyVisitorBase<StaticVisitor>::IteratePointers(
+ map->GetHeap(), object, BodyDescriptor::kStartOffset,
+ BodyDescriptor::kEndOffset);
+ return static_cast<ReturnType>(BodyDescriptor::kSize);
+ }
+};
+
+
+// Base class for visitors used for a linear new space iteration.
+// IterateBody returns size of visited object.
+// Certain types of objects (i.e. Code objects) are not handled
+// by dispatch table of this visitor because they cannot appear
+// in the new space.
+//
+// This class is intended to be used in the following way:
+//
+// class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
+// ...
+// }
+//
+// This is an example of Curiously recurring template pattern
+// (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
+// We use CRTP to guarantee aggressive compile time optimizations (i.e.
+// inlining and specialization of StaticVisitor::VisitPointers methods).
+template <typename StaticVisitor>
+class StaticNewSpaceVisitor : public StaticVisitorBase {
+ public:
+ static void Initialize();
+
+ INLINE(static int IterateBody(Map* map, HeapObject* obj)) {
+ return table_.GetVisitor(map)(map, obj);
+ }
+
+ INLINE(static void VisitPointers(Heap* heap, Object** start, Object**
end)) {
+ for (Object** p = start; p < end; p++)
StaticVisitor::VisitPointer(heap, p);
+ }
+
+ private:
+ INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
+ Heap* heap = map->GetHeap();
+ VisitPointers(heap,
+ HeapObject::RawField(object,
JSFunction::kPropertiesOffset),
+ HeapObject::RawField(object,
JSFunction::kCodeEntryOffset));
+
+ // Don't visit code entry. We are using this visitor only during
scavenges.
+
+ VisitPointers(
+ heap, HeapObject::RawField(object,
+ JSFunction::kCodeEntryOffset +
kPointerSize),
+ HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
+ return JSFunction::kSize;
+ }
+
+ INLINE(static int VisitByteArray(Map* map, HeapObject* object)) {
+ return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
+ }
+
+ INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
+ int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
+ return FixedDoubleArray::SizeFor(length);
+ }
+
+ INLINE(static int VisitFixedTypedArray(Map* map, HeapObject* object)) {
+ return reinterpret_cast<FixedTypedArrayBase*>(object)->size();
+ }
+
+ INLINE(static int VisitJSObject(Map* map, HeapObject* object)) {
+ return JSObjectVisitor::Visit(map, object);
+ }
+
+ INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) {
+ return SeqOneByteString::cast(object)
+ ->SeqOneByteStringSize(map->instance_type());
+ }
+
+ INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) {
+ return SeqTwoByteString::cast(object)
+ ->SeqTwoByteStringSize(map->instance_type());
+ }
+
+ INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) {
+ return FreeSpace::cast(object)->Size();
+ }
+
+ INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
+ INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
+ INLINE(static int VisitJSDataView(Map* map, HeapObject* object));
+
+ class DataObjectVisitor {
+ public:
+ template <int object_size>
+ static inline int VisitSpecialized(Map* map, HeapObject* object) {
+ return object_size;
+ }
+
+ INLINE(static int Visit(Map* map, HeapObject* object)) {
+ return map->instance_size();
+ }
+ };
+
+ typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, int>
+ StructVisitor;
+
+ typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, int>
+ JSObjectVisitor;
+
+ typedef int (*Callback)(Map* map, HeapObject* object);
+
+ static VisitorDispatchTable<Callback> table_;
+};
+
+
+template <typename StaticVisitor>
+VisitorDispatchTable<typename
StaticNewSpaceVisitor<StaticVisitor>::Callback>
+ StaticNewSpaceVisitor<StaticVisitor>::table_;
+
+
+// Base class for visitors used to transitively mark the entire heap.
+// IterateBody returns nothing.
+// Certain types of objects might not be handled by this base class and
+// no visitor function is registered by the generic initialization. A
+// specialized visitor function needs to be provided by the inheriting
+// class itself for those cases.
+//
+// This class is intended to be used in the following way:
+//
+// class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
+// ...
+// }
+//
+// This is an example of Curiously recurring template pattern.
+template <typename StaticVisitor>
+class StaticMarkingVisitor : public StaticVisitorBase {
+ public:
+ static void Initialize();
+
+ INLINE(static void IterateBody(Map* map, HeapObject* obj)) {
+ table_.GetVisitor(map)(map, obj);
+ }
+
+ INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
+ INLINE(static void VisitCodeEntry(Heap* heap, Address entry_address));
+ INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
+ INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
+ INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
+ INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
+ INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
+ INLINE(static void VisitExternalReference(RelocInfo* rinfo)) {}
+ INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) {}
+ // Skip the weak next code link in a code object.
+ INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) {}
+
+ // TODO(mstarzinger): This should be made protected once refactoring is
done.
+ // Mark non-optimize code for functions inlined into the given optimized
+ // code. This will prevent it from being flushed.
+ static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
+
+ protected:
+ INLINE(static void VisitMap(Map* map, HeapObject* object));
+ INLINE(static void VisitCode(Map* map, HeapObject* object));
+ INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject*
object));
+ INLINE(static void VisitConstantPoolArray(Map* map, HeapObject* object));
+ INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
+ INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
+ INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
+ INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
+ INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
+ INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
+ INLINE(static void VisitJSDataView(Map* map, HeapObject* object));
+ INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
+
+ // Mark pointers in a Map and its TransitionArray together, possibly
+ // treating transitions or back pointers weak.
+ static void MarkMapContents(Heap* heap, Map* map);
+ static void MarkTransitionArray(Heap* heap, TransitionArray*
transitions);
+
+ // Code flushing support.
+ INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
+ INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo*
shared_info));
+
+ // Helpers used by code flushing support that visit pointer fields and
treat
+ // references to code objects either strongly or weakly.
+ static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject*
object);
+ static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject*
object);
+ static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
+ static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
+
+ class DataObjectVisitor {
+ public:
+ template <int size>
+ static inline void VisitSpecialized(Map* map, HeapObject* object) {}
+
+ INLINE(static void Visit(Map* map, HeapObject* object)) {}
+ };
+
+ typedef FlexibleBodyVisitor<StaticVisitor, FixedArray::BodyDescriptor,
void>
+ FixedArrayVisitor;
+
+ typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor,
void>
+ JSObjectVisitor;
+
+ typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, void>
+ StructObjectVisitor;
+
+ typedef void (*Callback)(Map* map, HeapObject* object);
+
+ static VisitorDispatchTable<Callback> table_;
+};
+
+
+template <typename StaticVisitor>
+VisitorDispatchTable<typename
StaticMarkingVisitor<StaticVisitor>::Callback>
+ StaticMarkingVisitor<StaticVisitor>::table_;
+
+
+class WeakObjectRetainer;
+
+
+// A weak list is single linked list where each element has a weak pointer
to
+// the next element. Given the head of the list, this function removes dead
+// elements from the list and if requested records slots for next-element
+// pointers. The template parameter T is a WeakListVisitor that defines
how to
+// access the next-element pointers.
+template <class T>
+Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer*
retainer);
+}
+} // namespace v8::internal
+
+#endif // V8_OBJECTS_VISITING_H_
=======================================
--- /branches/bleeding_edge/src/objects-visiting-inl.h Mon Aug 4 11:34:54
2014 UTC
+++ /dev/null
@@ -1,971 +0,0 @@
-// Copyright 2012 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef V8_OBJECTS_VISITING_INL_H_
-#define V8_OBJECTS_VISITING_INL_H_
-
-
-namespace v8 {
-namespace internal {
-
-template<typename StaticVisitor>
-void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
- table_.Register(kVisitShortcutCandidate,
- &FixedBodyVisitor<StaticVisitor,
- ConsString::BodyDescriptor,
- int>::Visit);
-
- table_.Register(kVisitConsString,
- &FixedBodyVisitor<StaticVisitor,
- ConsString::BodyDescriptor,
- int>::Visit);
-
- table_.Register(kVisitSlicedString,
- &FixedBodyVisitor<StaticVisitor,
- SlicedString::BodyDescriptor,
- int>::Visit);
-
- table_.Register(kVisitSymbol,
- &FixedBodyVisitor<StaticVisitor,
- Symbol::BodyDescriptor,
- int>::Visit);
-
- table_.Register(kVisitFixedArray,
- &FlexibleBodyVisitor<StaticVisitor,
- FixedArray::BodyDescriptor,
- int>::Visit);
-
- table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
- table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
- table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
-
- table_.Register(kVisitNativeContext,
- &FixedBodyVisitor<StaticVisitor,
- Context::ScavengeBodyDescriptor,
- int>::Visit);
-
- table_.Register(kVisitByteArray, &VisitByteArray);
-
- table_.Register(kVisitSharedFunctionInfo,
- &FixedBodyVisitor<StaticVisitor,
- SharedFunctionInfo::BodyDescriptor,
- int>::Visit);
-
- table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
-
- table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
-
- table_.Register(kVisitJSFunction, &VisitJSFunction);
-
- table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
-
- table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
-
- table_.Register(kVisitJSDataView, &VisitJSDataView);
-
- table_.Register(kVisitFreeSpace, &VisitFreeSpace);
-
- table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
-
- table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
-
- table_.template RegisterSpecializations<DataObjectVisitor,
- kVisitDataObject,
- kVisitDataObjectGeneric>();
-
- table_.template RegisterSpecializations<JSObjectVisitor,
- kVisitJSObject,
- kVisitJSObjectGeneric>();
- table_.template RegisterSpecializations<StructVisitor,
- kVisitStruct,
- kVisitStructGeneric>();
-}
-
-
-template<typename StaticVisitor>
-int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
- Map* map, HeapObject* object) {
- Heap* heap = map->GetHeap();
-
- STATIC_ASSERT(
- JSArrayBuffer::kWeakFirstViewOffset ==
- JSArrayBuffer::kWeakNextOffset + kPointerSize);
- VisitPointers(
- heap,
- HeapObject::RawField(object,
JSArrayBuffer::BodyDescriptor::kStartOffset),
- HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
- VisitPointers(
- heap,
- HeapObject::RawField(object,
- JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
- HeapObject::RawField(object,
JSArrayBuffer::kSizeWithInternalFields));
- return JSArrayBuffer::kSizeWithInternalFields;
-}
-
-
-template<typename StaticVisitor>
-int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
- Map* map, HeapObject* object) {
- VisitPointers(
- map->GetHeap(),
- HeapObject::RawField(object,
JSTypedArray::BodyDescriptor::kStartOffset),
- HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
- VisitPointers(
- map->GetHeap(),
- HeapObject::RawField(object,
- JSTypedArray::kWeakNextOffset + kPointerSize),
- HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
- return JSTypedArray::kSizeWithInternalFields;
-}
-
-
-template<typename StaticVisitor>
-int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(
- Map* map, HeapObject* object) {
- VisitPointers(
- map->GetHeap(),
- HeapObject::RawField(object,
JSDataView::BodyDescriptor::kStartOffset),
- HeapObject::RawField(object, JSDataView::kWeakNextOffset));
- VisitPointers(
- map->GetHeap(),
- HeapObject::RawField(object,
- JSDataView::kWeakNextOffset + kPointerSize),
- HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
- return JSDataView::kSizeWithInternalFields;
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::Initialize() {
- table_.Register(kVisitShortcutCandidate,
- &FixedBodyVisitor<StaticVisitor,
- ConsString::BodyDescriptor,
- void>::Visit);
-
- table_.Register(kVisitConsString,
- &FixedBodyVisitor<StaticVisitor,
- ConsString::BodyDescriptor,
- void>::Visit);
-
- table_.Register(kVisitSlicedString,
- &FixedBodyVisitor<StaticVisitor,
- SlicedString::BodyDescriptor,
- void>::Visit);
-
- table_.Register(kVisitSymbol,
- &FixedBodyVisitor<StaticVisitor,
- Symbol::BodyDescriptor,
- void>::Visit);
-
- table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
-
- table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
-
- table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
-
- table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
-
- table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
-
- table_.Register(kVisitNativeContext, &VisitNativeContext);
-
- table_.Register(kVisitAllocationSite, &VisitAllocationSite);
-
- table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
-
- table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
-
- table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
-
- table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
-
- table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
-
- table_.Register(kVisitOddball,
- &FixedBodyVisitor<StaticVisitor,
- Oddball::BodyDescriptor,
- void>::Visit);
-
- table_.Register(kVisitMap, &VisitMap);
-
- table_.Register(kVisitCode, &VisitCode);
-
- table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
-
- table_.Register(kVisitJSFunction, &VisitJSFunction);
-
- table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
-
- table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
-
- table_.Register(kVisitJSDataView, &VisitJSDataView);
-
- // Registration for kVisitJSRegExp is done by StaticVisitor.
-
- table_.Register(kVisitCell,
- &FixedBodyVisitor<StaticVisitor,
- Cell::BodyDescriptor,
- void>::Visit);
-
- table_.Register(kVisitPropertyCell, &VisitPropertyCell);
-
- table_.template RegisterSpecializations<DataObjectVisitor,
- kVisitDataObject,
- kVisitDataObjectGeneric>();
-
- table_.template RegisterSpecializations<JSObjectVisitor,
- kVisitJSObject,
- kVisitJSObjectGeneric>();
-
- table_.template RegisterSpecializations<StructObjectVisitor,
- kVisitStruct,
- kVisitStructGeneric>();
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
- Heap* heap, Address entry_address) {
- Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
- heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
- StaticVisitor::MarkObject(heap, code);
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
- Heap* heap, RelocInfo* rinfo) {
- DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
- HeapObject* object = HeapObject::cast(rinfo->target_object());
- heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
- // TODO(ulan): It could be better to record slots only for strongly
embedded
- // objects here and record slots for weakly embedded object during
clearing
- // of non-live references in mark-compact.
- if (!rinfo->host()->IsWeakObject(object)) {
- StaticVisitor::MarkObject(heap, object);
- }
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitCell(
- Heap* heap, RelocInfo* rinfo) {
- DCHECK(rinfo->rmode() == RelocInfo::CELL);
- Cell* cell = rinfo->target_cell();
- // No need to record slots because the cell space is not compacted
during GC.
- if (!rinfo->host()->IsWeakObject(cell)) {
- StaticVisitor::MarkObject(heap, cell);
- }
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(
- Heap* heap, RelocInfo* rinfo) {
- DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
- rinfo->IsPatchedReturnSequence()) ||
- (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
- rinfo->IsPatchedDebugBreakSlotSequence()));
- Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
- heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
- StaticVisitor::MarkObject(heap, target);
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
- Heap* heap, RelocInfo* rinfo) {
- DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
- Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
- // Monomorphic ICs are preserved when possible, but need to be flushed
- // when they might be keeping a Context alive, or when the heap is about
- // to be serialized.
- if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
- && (target->ic_state() == MEGAMORPHIC || target->ic_state() ==
GENERIC ||
- target->ic_state() == POLYMORPHIC ||
heap->flush_monomorphic_ics() ||
- heap->isolate()->serializer_enabled() ||
- target->ic_age() != heap->global_ic_age() ||
- target->is_invalidated_weak_stub())) {
- IC::Clear(heap->isolate(), rinfo->pc(),
rinfo->host()->constant_pool());
- target = Code::GetCodeFromTargetAddress(rinfo->target_address());
- }
- heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
- StaticVisitor::MarkObject(heap, target);
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
- Heap* heap, RelocInfo* rinfo) {
- DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
- Code* target = rinfo->code_age_stub();
- DCHECK(target != NULL);
- heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
- StaticVisitor::MarkObject(heap, target);
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
- Map* map, HeapObject* object) {
- FixedBodyVisitor<StaticVisitor,
- Context::MarkCompactBodyDescriptor,
- void>::Visit(map, object);
-
- MarkCompactCollector* collector =
map->GetHeap()->mark_compact_collector();
- for (int idx = Context::FIRST_WEAK_SLOT;
- idx < Context::NATIVE_CONTEXT_SLOTS;
- ++idx) {
- Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
- collector->RecordSlot(slot, slot, *slot);
- }
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitMap(
- Map* map, HeapObject* object) {
- Heap* heap = map->GetHeap();
- Map* map_object = Map::cast(object);
-
- // Clears the cache of ICs related to this map.
- if (FLAG_cleanup_code_caches_at_gc) {
- map_object->ClearCodeCache(heap);
- }
-
- // When map collection is enabled we have to mark through map's
transitions
- // and back pointers in a special way to make these links weak.
- if (FLAG_collect_maps && map_object->CanTransition()) {
- MarkMapContents(heap, map_object);
- } else {
- StaticVisitor::VisitPointers(heap,
- HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
- HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
- }
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
- Map* map, HeapObject* object) {
- Heap* heap = map->GetHeap();
-
- Object** slot =
- HeapObject::RawField(object, PropertyCell::kDependentCodeOffset);
- if (FLAG_collect_maps) {
- // Mark property cell dependent codes array but do not push it onto
marking
- // stack, this will make references from it weak. We will clean dead
- // codes when we iterate over property cells in ClearNonLiveReferences.
- HeapObject* obj = HeapObject::cast(*slot);
- heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
- StaticVisitor::MarkObjectWithoutPush(heap, obj);
- } else {
- StaticVisitor::VisitPointer(heap, slot);
- }
-
- StaticVisitor::VisitPointers(heap,
- HeapObject::RawField(object,
PropertyCell::kPointerFieldsBeginOffset),
- HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
- Map* map, HeapObject* object) {
- Heap* heap = map->GetHeap();
-
- Object** slot =
- HeapObject::RawField(object, AllocationSite::kDependentCodeOffset);
- if (FLAG_collect_maps) {
- // Mark allocation site dependent codes array but do not push it onto
- // marking stack, this will make references from it weak. We will clean
- // dead codes when we iterate over allocation sites in
- // ClearNonLiveReferences.
- HeapObject* obj = HeapObject::cast(*slot);
- heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
- StaticVisitor::MarkObjectWithoutPush(heap, obj);
- } else {
- StaticVisitor::VisitPointer(heap, slot);
- }
-
- StaticVisitor::VisitPointers(heap,
- HeapObject::RawField(object,
AllocationSite::kPointerFieldsBeginOffset),
- HeapObject::RawField(object,
AllocationSite::kPointerFieldsEndOffset));
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
- Map* map, HeapObject* object) {
- Heap* heap = map->GetHeap();
- JSWeakCollection* weak_collection =
- reinterpret_cast<JSWeakCollection*>(object);
-
- // Enqueue weak collection in linked list of encountered weak
collections.
- if (weak_collection->next() == heap->undefined_value()) {
- weak_collection->set_next(heap->encountered_weak_collections());
- heap->set_encountered_weak_collections(weak_collection);
- }
-
- // Skip visiting the backing hash table containing the mappings and the
- // pointer to the other enqueued weak collections, both are
post-processed.
- StaticVisitor::VisitPointers(heap,
- HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
- HeapObject::RawField(object, JSWeakCollection::kTableOffset));
- STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
- JSWeakCollection::kNextOffset);
- STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
- JSWeakCollection::kSize);
-
- // Partially initialized weak collection is enqueued, but table is
ignored.
- if (!weak_collection->table()->IsHashTable()) return;
-
- // Mark the backing hash table without pushing it on the marking stack.
- Object** slot = HeapObject::RawField(object,
JSWeakCollection::kTableOffset);
- HeapObject* obj = HeapObject::cast(*slot);
- heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
- StaticVisitor::MarkObjectWithoutPush(heap, obj);
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitCode(
- Map* map, HeapObject* object) {
- Heap* heap = map->GetHeap();
- Code* code = Code::cast(object);
- if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
- code->MakeOlder(heap->mark_compact_collector()->marking_parity());
- }
- code->CodeIterateBody<StaticVisitor>(heap);
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
- Map* map, HeapObject* object) {
- Heap* heap = map->GetHeap();
- SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
- if (shared->ic_age() != heap->global_ic_age()) {
- shared->ResetForNewContext(heap->global_ic_age());
- }
- if (FLAG_cleanup_code_caches_at_gc) {
- shared->ClearTypeFeedbackInfo();
- }
- if (FLAG_cache_optimized_code &&
- FLAG_flush_optimized_code_cache &&
- !shared->optimized_code_map()->IsSmi()) {
- // Always flush the optimized code map if requested by flag.
- shared->ClearOptimizedCodeMap();
- }
- MarkCompactCollector* collector = heap->mark_compact_collector();
- if (collector->is_code_flushing_enabled()) {
- if (FLAG_cache_optimized_code
&& !shared->optimized_code_map()->IsSmi()) {
- // Add the shared function info holding an optimized code map to
- // the code flusher for processing of code maps after marking.
- collector->code_flusher()->AddOptimizedCodeMap(shared);
- // Treat all references within the code map weakly by marking the
- // code map itself but not pushing it onto the marking deque.
- FixedArray* code_map =
FixedArray::cast(shared->optimized_code_map());
- StaticVisitor::MarkObjectWithoutPush(heap, code_map);
- }
- if (IsFlushable(heap, shared)) {
- // This function's code looks flushable. But we have to postpone
- // the decision until we see all functions that point to the same
- // SharedFunctionInfo because some of them might be optimized.
- // That would also make the non-optimized version of the code
- // non-flushable, because it is required for bailing out from
- // optimized code.
- collector->code_flusher()->AddCandidate(shared);
- // Treat the reference to the code object weakly.
- VisitSharedFunctionInfoWeakCode(heap, object);
- return;
- }
- } else {
- if (FLAG_cache_optimized_code
&& !shared->optimized_code_map()->IsSmi()) {
- // Flush optimized code map on major GCs without code flushing,
- // needed because cached code doesn't contain breakpoints.
- shared->ClearOptimizedCodeMap();
- }
- }
- VisitSharedFunctionInfoStrongCode(heap, object);
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
- Map* map, HeapObject* object) {
- Heap* heap = map->GetHeap();
- ConstantPoolArray* array = ConstantPoolArray::cast(object);
- ConstantPoolArray::Iterator code_iter(array,
ConstantPoolArray::CODE_PTR);
- while (!code_iter.is_finished()) {
- Address code_entry = reinterpret_cast<Address>(
- array->RawFieldOfElementAt(code_iter.next_index()));
- StaticVisitor::VisitCodeEntry(heap, code_entry);
- }
-
- ConstantPoolArray::Iterator heap_iter(array,
ConstantPoolArray::HEAP_PTR);
- while (!heap_iter.is_finished()) {
- Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
- HeapObject* object = HeapObject::cast(*slot);
- heap->mark_compact_collector()->RecordSlot(slot, slot, object);
- bool is_weak_object =
- (array->get_weak_object_state() ==
- ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
- Code::IsWeakObjectInOptimizedCode(object)) ||
- (array->get_weak_object_state() ==
- ConstantPoolArray::WEAK_OBJECTS_IN_IC &&
- Code::IsWeakObjectInIC(object));
- if (!is_weak_object) {
- StaticVisitor::MarkObject(heap, object);
- }
- }
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
- Map* map, HeapObject* object) {
- Heap* heap = map->GetHeap();
- JSFunction* function = JSFunction::cast(object);
- MarkCompactCollector* collector = heap->mark_compact_collector();
- if (collector->is_code_flushing_enabled()) {
- if (IsFlushable(heap, function)) {
- // This function's code looks flushable. But we have to postpone
- // the decision until we see all functions that point to the same
- // SharedFunctionInfo because some of them might be optimized.
- // That would also make the non-optimized version of the code
- // non-flushable, because it is required for bailing out from
- // optimized code.
- collector->code_flusher()->AddCandidate(function);
- // Visit shared function info immediately to avoid double checking
- // of its flushability later. This is just an optimization because
- // the shared function info would eventually be visited.
- SharedFunctionInfo* shared = function->shared();
- if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
- StaticVisitor::MarkObject(heap, shared->map());
- VisitSharedFunctionInfoWeakCode(heap, shared);
- }
- // Treat the reference to the code object weakly.
- VisitJSFunctionWeakCode(heap, object);
- return;
- } else {
- // Visit all unoptimized code objects to prevent flushing them.
- StaticVisitor::MarkObject(heap, function->shared()->code());
- if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
- MarkInlinedFunctionsCode(heap, function->code());
- }
- }
- }
- VisitJSFunctionStrongCode(heap, object);
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
- Map* map, HeapObject* object) {
- int last_property_offset =
- JSRegExp::kSize + kPointerSize * map->inobject_properties();
- StaticVisitor::VisitPointers(map->GetHeap(),
- HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
- HeapObject::RawField(object, last_property_offset));
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
- Map* map, HeapObject* object) {
- Heap* heap = map->GetHeap();
-
- STATIC_ASSERT(
- JSArrayBuffer::kWeakFirstViewOffset ==
- JSArrayBuffer::kWeakNextOffset + kPointerSize);
- StaticVisitor::VisitPointers(
- heap,
- HeapObject::RawField(object,
JSArrayBuffer::BodyDescriptor::kStartOffset),
- HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
- StaticVisitor::VisitPointers(
- heap,
- HeapObject::RawField(object,
- JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
- HeapObject::RawField(object,
JSArrayBuffer::kSizeWithInternalFields));
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
- Map* map, HeapObject* object) {
- StaticVisitor::VisitPointers(
- map->GetHeap(),
- HeapObject::RawField(object,
JSTypedArray::BodyDescriptor::kStartOffset),
- HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
- StaticVisitor::VisitPointers(
- map->GetHeap(),
- HeapObject::RawField(object,
- JSTypedArray::kWeakNextOffset + kPointerSize),
- HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(
- Map* map, HeapObject* object) {
- StaticVisitor::VisitPointers(
- map->GetHeap(),
- HeapObject::RawField(object,
JSDataView::BodyDescriptor::kStartOffset),
- HeapObject::RawField(object, JSDataView::kWeakNextOffset));
- StaticVisitor::VisitPointers(
- map->GetHeap(),
- HeapObject::RawField(object,
- JSDataView::kWeakNextOffset + kPointerSize),
- HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
- Heap* heap, Map* map) {
- // Make sure that the back pointer stored either in the map itself or
- // inside its transitions array is marked. Skip recording the back
- // pointer slot since map space is not compacted.
- StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
-
- // Treat pointers in the transitions array as weak and also mark that
- // array to prevent visiting it later. Skip recording the transition
- // array slot, since it will be implicitly recorded when the pointer
- // fields of this map are visited.
- if (map->HasTransitionArray()) {
- TransitionArray* transitions = map->transitions();
- MarkTransitionArray(heap, transitions);
- }
-
- // Since descriptor arrays are potentially shared, ensure that only the
- // descriptors that belong to this map are marked. The first time a
- // non-empty descriptor array is marked, its header is also visited. The
slot
- // holding the descriptor array will be implicitly recorded when the
pointer
- // fields of this map are visited.
- DescriptorArray* descriptors = map->instance_descriptors();
- if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
- descriptors->length() > 0) {
- StaticVisitor::VisitPointers(heap,
- descriptors->GetFirstElementAddress(),
- descriptors->GetDescriptorEndSlot(0));
- }
- int start = 0;
- int end = map->NumberOfOwnDescriptors();
- if (start < end) {
- StaticVisitor::VisitPointers(heap,
- descriptors->GetDescriptorStartSlot(start),
- descriptors->GetDescriptorEndSlot(end));
- }
-
- // Mark prototype dependent codes array but do not push it onto marking
- // stack, this will make references from it weak. We will clean dead
- // codes when we iterate over maps in ClearNonLiveTransitions.
- Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
- HeapObject* obj = HeapObject::cast(*slot);
- heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
- StaticVisitor::MarkObjectWithoutPush(heap, obj);
-
- // Mark the pointer fields of the Map. Since the transitions array has
- // been marked already, it is fine that one of these fields contains a
- // pointer to it.
- StaticVisitor::VisitPointers(heap,
- HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
- HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
- Heap* heap, TransitionArray* transitions) {
- if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
-
- // Simple transitions do not have keys nor prototype transitions.
- if (transitions->IsSimpleTransition()) return;
-
- if (transitions->HasPrototypeTransitions()) {
- // Mark prototype transitions array but do not push it onto marking
- // stack, this will make references from it weak. We will clean dead
- // prototype transitions in ClearNonLiveTransitions.
- Object** slot = transitions->GetPrototypeTransitionsSlot();
- HeapObject* obj = HeapObject::cast(*slot);
- heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
- StaticVisitor::MarkObjectWithoutPush(heap, obj);
- }
-
- for (int i = 0; i < transitions->number_of_transitions(); ++i) {
- StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
- }
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(
- Heap* heap, Code* code) {
- // Skip in absence of inlining.
- // TODO(turbofan): Revisit once we support inlining.
- if (code->is_turbofanned()) return;
- // For optimized functions we should retain both non-optimized version
- // of its code and non-optimized version of all inlined functions.
- // This is required to support bailing out from inlined code.
- DeoptimizationInputData* data =
- DeoptimizationInputData::cast(code->deoptimization_data());
- FixedArray* literals = data->LiteralArray();
- for (int i = 0, count = data->InlinedFunctionCount()->value();
- i < count;
- i++) {
- JSFunction* inlined = JSFunction::cast(literals->get(i));
- StaticVisitor::MarkObject(heap, inlined->shared()->code());
- }
-}
-
-
-inline static bool IsValidNonBuiltinContext(Object* context) {
- return context->IsContext() &&
- !Context::cast(context)->global_object()->IsJSBuiltinsObject();
-}
-
-
-inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
- Object* undefined = heap->undefined_value();
- return (info->script() != undefined) &&
- (reinterpret_cast<Script*>(info->script())->source() != undefined);
-}
-
-
-template<typename StaticVisitor>
-bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
- Heap* heap, JSFunction* function) {
- SharedFunctionInfo* shared_info = function->shared();
-
- // Code is either on stack, in compilation cache or referenced
- // by optimized version of function.
- MarkBit code_mark = Marking::MarkBitFrom(function->code());
- if (code_mark.Get()) {
- return false;
- }
-
- // The function must have a valid context and not be a builtin.
- if (!IsValidNonBuiltinContext(function->context())) {
- return false;
- }
-
- // We do not (yet) flush code for optimized functions.
- if (function->code() != shared_info->code()) {
- return false;
- }
-
- // Check age of optimized code.
- if (FLAG_age_code && !function->code()->IsOld()) {
- return false;
- }
-
- return IsFlushable(heap, shared_info);
-}
-
-
-template<typename StaticVisitor>
-bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
- Heap* heap, SharedFunctionInfo* shared_info) {
- // Code is either on stack, in compilation cache or referenced
- // by optimized version of function.
- MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
- if (code_mark.Get()) {
- return false;
- }
-
- // The function must be compiled and have the source code available,
- // to be able to recompile it in case we need the function again.
- if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
- return false;
- }
-
- // We never flush code for API functions.
- Object* function_data = shared_info->function_data();
- if (function_data->IsFunctionTemplateInfo()) {
- return false;
- }
-
- // Only flush code for functions.
- if (shared_info->code()->kind() != Code::FUNCTION) {
- return false;
- }
-
- // Function must be lazy compilable.
- if (!shared_info->allows_lazy_compilation()) {
- return false;
- }
-
- // We do not (yet?) flush code for generator functions, because we don't
know
- // if there are still live activations (generator objects) on the heap.
- if (shared_info->is_generator()) {
- return false;
- }
-
- // If this is a full script wrapped in a function we do not flush the
code.
- if (shared_info->is_toplevel()) {
- return false;
- }
-
- // If this is a function initialized with %SetCode then the one-to-one
- // relation between SharedFunctionInfo and Code is broken.
- if (shared_info->dont_flush()) {
- return false;
- }
-
- // Check age of code. If code aging is disabled we never flush.
- if (!FLAG_age_code || !shared_info->code()->IsOld()) {
- return false;
- }
-
- return true;
-}
-
-
-template<typename StaticVisitor>
-void
StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
- Heap* heap, HeapObject* object) {
- Object** start_slot =
- HeapObject::RawField(object,
-
SharedFunctionInfo::BodyDescriptor::kStartOffset);
- Object** end_slot =
- HeapObject::RawField(object,
- SharedFunctionInfo::BodyDescriptor::kEndOffset);
- StaticVisitor::VisitPointers(heap, start_slot, end_slot);
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
- Heap* heap, HeapObject* object) {
- Object** name_slot =
- HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
- StaticVisitor::VisitPointer(heap, name_slot);
-
- // Skip visiting kCodeOffset as it is treated weakly here.
- STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
- SharedFunctionInfo::kCodeOffset);
- STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
- SharedFunctionInfo::kOptimizedCodeMapOffset);
-
- Object** start_slot =
- HeapObject::RawField(object,
- SharedFunctionInfo::kOptimizedCodeMapOffset);
- Object** end_slot =
- HeapObject::RawField(object,
- SharedFunctionInfo::BodyDescriptor::kEndOffset);
- StaticVisitor::VisitPointers(heap, start_slot, end_slot);
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
- Heap* heap, HeapObject* object) {
- Object** start_slot =
- HeapObject::RawField(object, JSFunction::kPropertiesOffset);
- Object** end_slot =
- HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
- StaticVisitor::VisitPointers(heap, start_slot, end_slot);
-
- VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
- STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
- JSFunction::kPrototypeOrInitialMapOffset);
-
- start_slot =
- HeapObject::RawField(object,
JSFunction::kPrototypeOrInitialMapOffset);
- end_slot =
- HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
- StaticVisitor::VisitPointers(heap, start_slot, end_slot);
-}
-
-
-template<typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
- Heap* heap, HeapObject* object) {
- Object** start_slot =
- HeapObject::RawField(object, JSFunction::kPropertiesOffset);
- Object** end_slot =
- HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
- StaticVisitor::VisitPointers(heap, start_slot, end_slot);
-
- // Skip visiting kCodeEntryOffset as it is treated weakly here.
- STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
- JSFunction::kPrototypeOrInitialMapOffset);
-
- start_slot =
- HeapObject::RawField(object,
JSFunction::kPrototypeOrInitialMapOffset);
- end_slot =
- HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
- StaticVisitor::VisitPointers(heap, start_slot, end_slot);
-}
-
-
-void Code::CodeIterateBody(ObjectVisitor* v) {
- int mode_mask = RelocInfo::kCodeTargetMask |
- RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
- RelocInfo::ModeMask(RelocInfo::CELL) |
- RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
- RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
- RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
- RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
-
- // There are two places where we iterate code bodies: here and the
- // templated CodeIterateBody (below). They should be kept in sync.
- IteratePointer(v, kRelocationInfoOffset);
- IteratePointer(v, kHandlerTableOffset);
- IteratePointer(v, kDeoptimizationDataOffset);
- IteratePointer(v, kTypeFeedbackInfoOffset);
- IterateNextCodeLink(v, kNextCodeLinkOffset);
- IteratePointer(v, kConstantPoolOffset);
-
- RelocIterator it(this, mode_mask);
- Isolate* isolate = this->GetIsolate();
- for (; !it.done(); it.next()) {
- it.rinfo()->Visit(isolate, v);
- }
-}
-
-
-template<typename StaticVisitor>
-void Code::CodeIterateBody(Heap* heap) {
- int mode_mask = RelocInfo::kCodeTargetMask |
- RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
- RelocInfo::ModeMask(RelocInfo::CELL) |
- RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
- RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
- RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
- RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
-
- // There are two places where we iterate code bodies: here and the non-
- // templated CodeIterateBody (above). They should be kept in sync.
- StaticVisitor::VisitPointer(
- heap,
- reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
- StaticVisitor::VisitPointer(
- heap,
- reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
- StaticVisitor::VisitPointer(
- heap,
- reinterpret_cast<Object**>(this->address() +
kDeoptimizationDataOffset));
- StaticVisitor::VisitPointer(
- heap,
- reinterpret_cast<Object**>(this->address() +
kTypeFeedbackInfoOffset));
- StaticVisitor::VisitNextCodeLink(
- heap,
- reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
- StaticVisitor::VisitPointer(
- heap,
- reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
-
-
- RelocIterator it(this, mode_mask);
- for (; !it.done(); it.next()) {
- it.rinfo()->template Visit<StaticVisitor>(heap);
- }
-}
-
-
-} } // namespace v8::internal
-
-#endif // V8_OBJECTS_VISITING_INL_H_
=======================================
--- /branches/bleeding_edge/src/objects-visiting.cc Mon Aug 4 11:34:54
2014 UTC
+++ /dev/null
@@ -1,455 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "src/v8.h"
-
-#include "src/ic-inl.h"
-#include "src/objects-visiting.h"
-
-namespace v8 {
-namespace internal {
-
-
-StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
- int instance_type,
- int instance_size) {
- if (instance_type < FIRST_NONSTRING_TYPE) {
- switch (instance_type & kStringRepresentationMask) {
- case kSeqStringTag:
- if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
- return kVisitSeqOneByteString;
- } else {
- return kVisitSeqTwoByteString;
- }
-
- case kConsStringTag:
- if (IsShortcutCandidate(instance_type)) {
- return kVisitShortcutCandidate;
- } else {
- return kVisitConsString;
- }
-
- case kSlicedStringTag:
- return kVisitSlicedString;
-
- case kExternalStringTag:
- return GetVisitorIdForSize(kVisitDataObject,
- kVisitDataObjectGeneric,
- instance_size);
- }
- UNREACHABLE();
- }
-
- switch (instance_type) {
- case BYTE_ARRAY_TYPE:
- return kVisitByteArray;
-
- case FREE_SPACE_TYPE:
- return kVisitFreeSpace;
-
- case FIXED_ARRAY_TYPE:
- return kVisitFixedArray;
-
- case FIXED_DOUBLE_ARRAY_TYPE:
- return kVisitFixedDoubleArray;
-
- case CONSTANT_POOL_ARRAY_TYPE:
- return kVisitConstantPoolArray;
-
- case ODDBALL_TYPE:
- return kVisitOddball;
-
- case MAP_TYPE:
- return kVisitMap;
-
- case CODE_TYPE:
- return kVisitCode;
-
- case CELL_TYPE:
- return kVisitCell;
-
- case PROPERTY_CELL_TYPE:
- return kVisitPropertyCell;
-
- case JS_SET_TYPE:
- return GetVisitorIdForSize(kVisitStruct,
- kVisitStructGeneric,
- JSSet::kSize);
-
- case JS_MAP_TYPE:
- return GetVisitorIdForSize(kVisitStruct,
- kVisitStructGeneric,
- JSMap::kSize);
-
- case JS_WEAK_MAP_TYPE:
- case JS_WEAK_SET_TYPE:
- return kVisitJSWeakCollection;
-
- case JS_REGEXP_TYPE:
- return kVisitJSRegExp;
-
- case SHARED_FUNCTION_INFO_TYPE:
- return kVisitSharedFunctionInfo;
-
- case JS_PROXY_TYPE:
- return GetVisitorIdForSize(kVisitStruct,
- kVisitStructGeneric,
- JSProxy::kSize);
-
- case JS_FUNCTION_PROXY_TYPE:
- return GetVisitorIdForSize(kVisitStruct,
- kVisitStructGeneric,
- JSFunctionProxy::kSize);
-
- case FOREIGN_TYPE:
- return GetVisitorIdForSize(kVisitDataObject,
- kVisitDataObjectGeneric,
- Foreign::kSize);
-
- case SYMBOL_TYPE:
- return kVisitSymbol;
-
- case FILLER_TYPE:
- return kVisitDataObjectGeneric;
-
- case JS_ARRAY_BUFFER_TYPE:
- return kVisitJSArrayBuffer;
-
- case JS_TYPED_ARRAY_TYPE:
- return kVisitJSTypedArray;
-
- case JS_DATA_VIEW_TYPE:
- return kVisitJSDataView;
-
- case JS_OBJECT_TYPE:
- case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
- case JS_GENERATOR_OBJECT_TYPE:
- case JS_MODULE_TYPE:
- case JS_VALUE_TYPE:
- case JS_DATE_TYPE:
- case JS_ARRAY_TYPE:
- case JS_GLOBAL_PROXY_TYPE:
- case JS_GLOBAL_OBJECT_TYPE:
- case JS_BUILTINS_OBJECT_TYPE:
- case JS_MESSAGE_OBJECT_TYPE:
- case JS_SET_ITERATOR_TYPE:
- case JS_MAP_ITERATOR_TYPE:
- return GetVisitorIdForSize(kVisitJSObject,
- kVisitJSObjectGeneric,
- instance_size);
-
- case JS_FUNCTION_TYPE:
- return kVisitJSFunction;
-
- case HEAP_NUMBER_TYPE:
- case MUTABLE_HEAP_NUMBER_TYPE:
-#define EXTERNAL_ARRAY_CASE(Type, type, TYPE, ctype,
size) \
- case EXTERNAL_##TYPE##_ARRAY_TYPE:
-
- TYPED_ARRAYS(EXTERNAL_ARRAY_CASE)
- return GetVisitorIdForSize(kVisitDataObject,
- kVisitDataObjectGeneric,
- instance_size);
-#undef EXTERNAL_ARRAY_CASE
-
- case FIXED_UINT8_ARRAY_TYPE:
- case FIXED_INT8_ARRAY_TYPE:
- case FIXED_UINT16_ARRAY_TYPE:
- case FIXED_INT16_ARRAY_TYPE:
- case FIXED_UINT32_ARRAY_TYPE:
- case FIXED_INT32_ARRAY_TYPE:
- case FIXED_FLOAT32_ARRAY_TYPE:
- case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
- return kVisitFixedTypedArray;
-
- case FIXED_FLOAT64_ARRAY_TYPE:
- return kVisitFixedFloat64Array;
-
-#define MAKE_STRUCT_CASE(NAME, Name, name) \
- case NAME##_TYPE:
- STRUCT_LIST(MAKE_STRUCT_CASE)
-#undef MAKE_STRUCT_CASE
- if (instance_type == ALLOCATION_SITE_TYPE) {
- return kVisitAllocationSite;
- }
-
- return GetVisitorIdForSize(kVisitStruct,
- kVisitStructGeneric,
- instance_size);
-
- default:
- UNREACHABLE();
- return kVisitorIdCount;
- }
-}
-
-
-// We don't record weak slots during marking or scavenges. Instead we do it
-// once when we complete mark-compact cycle. Note that write barrier has
no
-// effect if we are already in the middle of compacting mark-sweep cycle
and we
-// have to record slots manually.
-static bool MustRecordSlots(Heap* heap) {
- return heap->gc_state() == Heap::MARK_COMPACT &&
- heap->mark_compact_collector()->is_compacting();
-}
-
-
-template <class T>
-struct WeakListVisitor;
-
-
-template <class T>
-Object* VisitWeakList(Heap* heap,
- Object* list,
- WeakObjectRetainer* retainer) {
- Object* undefined = heap->undefined_value();
- Object* head = undefined;
- T* tail = NULL;
- MarkCompactCollector* collector = heap->mark_compact_collector();
- bool record_slots = MustRecordSlots(heap);
- while (list != undefined) {
- // Check whether to keep the candidate in the list.
- T* candidate = reinterpret_cast<T*>(list);
- Object* retained = retainer->RetainAs(list);
- if (retained != NULL) {
- if (head == undefined) {
- // First element in the list.
- head = retained;
- } else {
- // Subsequent elements in the list.
- DCHECK(tail != NULL);
- WeakListVisitor<T>::SetWeakNext(tail, retained);
- if (record_slots) {
- Object** next_slot =
- HeapObject::RawField(tail,
WeakListVisitor<T>::WeakNextOffset());
- collector->RecordSlot(next_slot, next_slot, retained);
- }
- }
- // Retained object is new tail.
- DCHECK(!retained->IsUndefined());
- candidate = reinterpret_cast<T*>(retained);
- tail = candidate;
-
-
- // tail is a live object, visit it.
- WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
- } else {
- WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
- }
-
- // Move to next element in the list.
- list = WeakListVisitor<T>::WeakNext(candidate);
- }
-
- // Terminate the list if there is one or more elements.
- if (tail != NULL) {
- WeakListVisitor<T>::SetWeakNext(tail, undefined);
- }
- return head;
-}
-
-
-template <class T>
-static void ClearWeakList(Heap* heap,
- Object* list) {
- Object* undefined = heap->undefined_value();
- while (list != undefined) {
- T* candidate = reinterpret_cast<T*>(list);
- list = WeakListVisitor<T>::WeakNext(candidate);
- WeakListVisitor<T>::SetWeakNext(candidate, undefined);
- }
-}
-
-
-template<>
-struct WeakListVisitor<JSFunction> {
- static void SetWeakNext(JSFunction* function, Object* next) {
- function->set_next_function_link(next);
- }
-
- static Object* WeakNext(JSFunction* function) {
- return function->next_function_link();
- }
-
- static int WeakNextOffset() {
- return JSFunction::kNextFunctionLinkOffset;
- }
-
- static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
-
- static void VisitPhantomObject(Heap*, JSFunction*) {}
-};
-
-
-template<>
-struct WeakListVisitor<Code> {
- static void SetWeakNext(Code* code, Object* next) {
- code->set_next_code_link(next);
- }
-
- static Object* WeakNext(Code* code) {
- return code->next_code_link();
- }
-
- static int WeakNextOffset() {
- return Code::kNextCodeLinkOffset;
- }
-
- static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
-
- static void VisitPhantomObject(Heap*, Code*) {}
-};
-
-
-template<>
-struct WeakListVisitor<Context> {
- static void SetWeakNext(Context* context, Object* next) {
- context->set(Context::NEXT_CONTEXT_LINK,
- next,
- UPDATE_WRITE_BARRIER);
- }
-
- static Object* WeakNext(Context* context) {
- return context->get(Context::NEXT_CONTEXT_LINK);
- }
-
- static int WeakNextOffset() {
- return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
- }
-
- static void VisitLiveObject(Heap* heap,
- Context* context,
- WeakObjectRetainer* retainer) {
- // Process the three weak lists linked off the context.
- DoWeakList<JSFunction>(heap, context, retainer,
- Context::OPTIMIZED_FUNCTIONS_LIST);
- DoWeakList<Code>(heap, context, retainer,
Context::OPTIMIZED_CODE_LIST);
- DoWeakList<Code>(heap, context, retainer,
Context::DEOPTIMIZED_CODE_LIST);
- }
-
- template<class T>
- static void DoWeakList(Heap* heap,
- Context* context,
- WeakObjectRetainer* retainer,
- int index) {
- // Visit the weak list, removing dead intermediate elements.
- Object* list_head = VisitWeakList<T>(heap, context->get(index),
retainer);
-
- // Update the list head.
- context->set(index, list_head, UPDATE_WRITE_BARRIER);
-
- if (MustRecordSlots(heap)) {
- // Record the updated slot if necessary.
- Object** head_slot = HeapObject::RawField(
- context, FixedArray::SizeFor(index));
- heap->mark_compact_collector()->RecordSlot(
- head_slot, head_slot, list_head);
- }
- }
-
- static void VisitPhantomObject(Heap* heap, Context* context) {
- ClearWeakList<JSFunction>(heap,
- context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
- ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
- ClearWeakList<Code>(heap,
context->get(Context::DEOPTIMIZED_CODE_LIST));
- }
-};
-
-
-template<>
-struct WeakListVisitor<JSArrayBufferView> {
- static void SetWeakNext(JSArrayBufferView* obj, Object* next) {
- obj->set_weak_next(next);
- }
-
- static Object* WeakNext(JSArrayBufferView* obj) {
- return obj->weak_next();
- }
-
- static int WeakNextOffset() {
- return JSArrayBufferView::kWeakNextOffset;
- }
-
- static void VisitLiveObject(Heap*, JSArrayBufferView*,
WeakObjectRetainer*) {}
-
- static void VisitPhantomObject(Heap*, JSArrayBufferView*) {}
-};
-
-
-template<>
-struct WeakListVisitor<JSArrayBuffer> {
- static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
- obj->set_weak_next(next);
- }
-
- static Object* WeakNext(JSArrayBuffer* obj) {
- return obj->weak_next();
- }
-
- static int WeakNextOffset() {
- return JSArrayBuffer::kWeakNextOffset;
- }
-
- static void VisitLiveObject(Heap* heap,
- JSArrayBuffer* array_buffer,
- WeakObjectRetainer* retainer) {
- Object* typed_array_obj =
- VisitWeakList<JSArrayBufferView>(
- heap,
- array_buffer->weak_first_view(),
- retainer);
- array_buffer->set_weak_first_view(typed_array_obj);
- if (typed_array_obj != heap->undefined_value() &&
MustRecordSlots(heap)) {
- Object** slot = HeapObject::RawField(
- array_buffer, JSArrayBuffer::kWeakFirstViewOffset);
- heap->mark_compact_collector()->RecordSlot(slot, slot,
typed_array_obj);
- }
- }
-
- static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
- Runtime::FreeArrayBuffer(heap->isolate(), phantom);
- }
-};
-
-
-template<>
-struct WeakListVisitor<AllocationSite> {
- static void SetWeakNext(AllocationSite* obj, Object* next) {
- obj->set_weak_next(next);
- }
-
- static Object* WeakNext(AllocationSite* obj) {
- return obj->weak_next();
- }
-
- static int WeakNextOffset() {
- return AllocationSite::kWeakNextOffset;
- }
-
- static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*)
{}
-
- static void VisitPhantomObject(Heap*, AllocationSite*) {}
-};
-
-
-template Object* VisitWeakList<Code>(
- Heap* heap, Object* list, WeakObjectRetainer* retainer);
-
-
-template Object* VisitWeakList<JSFunction>(
- Heap* heap, Object* list, WeakObjectRetainer* retainer);
-
-
-template Object* VisitWeakList<Context>(
- Heap* heap, Object* list, WeakObjectRetainer* retainer);
-
-
-template Object* VisitWeakList<JSArrayBuffer>(
- Heap* heap, Object* list, WeakObjectRetainer* retainer);
-
-
-template Object* VisitWeakList<AllocationSite>(
- Heap* heap, Object* list, WeakObjectRetainer* retainer);
-
-} } // namespace v8::internal
=======================================
--- /branches/bleeding_edge/src/objects-visiting.h Mon Aug 4 11:34:54 2014
UTC
+++ /dev/null
@@ -1,476 +0,0 @@
-// Copyright 2012 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef V8_OBJECTS_VISITING_H_
-#define V8_OBJECTS_VISITING_H_
-
-#include "src/allocation.h"
-
-// This file provides base classes and auxiliary methods for defining
-// static object visitors used during GC.
-// Visiting HeapObject body with a normal ObjectVisitor requires performing
-// two switches on object's instance type to determine object size and
layout
-// and one or more virtual method calls on visitor itself.
-// Static visitor is different: it provides a dispatch table which contains
-// pointers to specialized visit functions. Each map has the visitor_id
-// field which contains an index of specialized visitor to use.
-
-namespace v8 {
-namespace internal {
-
-
-// Base class for all static visitors.
-class StaticVisitorBase : public AllStatic {
- public:
-#define VISITOR_ID_LIST(V) \
- V(SeqOneByteString) \
- V(SeqTwoByteString) \
- V(ShortcutCandidate) \
- V(ByteArray) \
- V(FreeSpace) \
- V(FixedArray) \
- V(FixedDoubleArray) \
- V(FixedTypedArray) \
- V(FixedFloat64Array) \
- V(ConstantPoolArray) \
- V(NativeContext) \
- V(AllocationSite) \
- V(DataObject2) \
- V(DataObject3) \
- V(DataObject4) \
- V(DataObject5) \
- V(DataObject6) \
- V(DataObject7) \
- V(DataObject8) \
- V(DataObject9) \
- V(DataObjectGeneric) \
- V(JSObject2) \
- V(JSObject3) \
- V(JSObject4) \
- V(JSObject5) \
- V(JSObject6) \
- V(JSObject7) \
- V(JSObject8) \
- V(JSObject9) \
- V(JSObjectGeneric) \
- V(Struct2) \
- V(Struct3) \
- V(Struct4) \
- V(Struct5) \
- V(Struct6) \
- V(Struct7) \
- V(Struct8) \
- V(Struct9) \
- V(StructGeneric) \
- V(ConsString) \
- V(SlicedString) \
- V(Symbol) \
- V(Oddball) \
- V(Code) \
- V(Map) \
- V(Cell) \
- V(PropertyCell) \
- V(SharedFunctionInfo) \
- V(JSFunction) \
- V(JSWeakCollection) \
- V(JSArrayBuffer) \
- V(JSTypedArray) \
- V(JSDataView) \
- V(JSRegExp)
-
- // For data objects, JS objects and structs along with generic visitor
which
- // can visit object of any size we provide visitors specialized by
- // object size in words.
- // Ids of specialized visitors are declared in a linear order (without
- // holes) starting from the id of visitor specialized for 2 words objects
- // (base visitor id) and ending with the id of generic visitor.
- // Method GetVisitorIdForSize depends on this ordering to calculate
visitor
- // id of specialized visitor from given instance size, base visitor id
and
- // generic visitor's id.
- enum VisitorId {
-#define VISITOR_ID_ENUM_DECL(id) kVisit##id,
- VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
-#undef VISITOR_ID_ENUM_DECL
- kVisitorIdCount,
- kVisitDataObject = kVisitDataObject2,
- kVisitJSObject = kVisitJSObject2,
- kVisitStruct = kVisitStruct2,
- kMinObjectSizeInWords = 2
- };
-
- // Visitor ID should fit in one byte.
- STATIC_ASSERT(kVisitorIdCount <= 256);
-
- // Determine which specialized visitor should be used for given instance
type
- // and instance type.
- static VisitorId GetVisitorId(int instance_type, int instance_size);
-
- static VisitorId GetVisitorId(Map* map) {
- return GetVisitorId(map->instance_type(), map->instance_size());
- }
-
- // For visitors that allow specialization by size calculate VisitorId
based
- // on size, base visitor id and generic visitor id.
- static VisitorId GetVisitorIdForSize(VisitorId base,
- VisitorId generic,
- int object_size) {
- DCHECK((base == kVisitDataObject) ||
- (base == kVisitStruct) ||
- (base == kVisitJSObject));
- DCHECK(IsAligned(object_size, kPointerSize));
- DCHECK(kMinObjectSizeInWords * kPointerSize <= object_size);
- DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
-
- const VisitorId specialization = static_cast<VisitorId>(
- base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
-
- return Min(specialization, generic);
- }
-};
-
-
-template<typename Callback>
-class VisitorDispatchTable {
- public:
- void CopyFrom(VisitorDispatchTable* other) {
- // We are not using memcpy to guarantee that during update
- // every element of callbacks_ array will remain correct
- // pointer (memcpy might be implemented as a byte copying loop).
- for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
- base::NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
- }
- }
-
- inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
- return reinterpret_cast<Callback>(callbacks_[id]);
- }
-
- inline Callback GetVisitor(Map* map) {
- return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
- }
-
- void Register(StaticVisitorBase::VisitorId id, Callback callback) {
- DCHECK(id < StaticVisitorBase::kVisitorIdCount); // id is unsigned.
- callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback);
- }
-
- template<typename Visitor,
- StaticVisitorBase::VisitorId base,
- StaticVisitorBase::VisitorId generic,
- int object_size_in_words>
- void RegisterSpecialization() {
- static const int size = object_size_in_words * kPointerSize;
- Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size),
- &Visitor::template VisitSpecialized<size>);
- }
-
-
- template<typename Visitor,
- StaticVisitorBase::VisitorId base,
- StaticVisitorBase::VisitorId generic>
- void RegisterSpecializations() {
- STATIC_ASSERT(
- (generic - base + StaticVisitorBase::kMinObjectSizeInWords) == 10);
- RegisterSpecialization<Visitor, base, generic, 2>();
- RegisterSpecialization<Visitor, base, generic, 3>();
- RegisterSpecialization<Visitor, base, generic, 4>();
- RegisterSpecialization<Visitor, base, generic, 5>();
- RegisterSpecialization<Visitor, base, generic, 6>();
- RegisterSpecialization<Visitor, base, generic, 7>();
- RegisterSpecialization<Visitor, base, generic, 8>();
- RegisterSpecialization<Visitor, base, generic, 9>();
- Register(generic, &Visitor::Visit);
- }
-
- private:
- base::AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
-};
-
-
-template<typename StaticVisitor>
-class BodyVisitorBase : public AllStatic {
- public:
- INLINE(static void IteratePointers(Heap* heap,
- HeapObject* object,
- int start_offset,
- int end_offset)) {
- Object** start_slot = reinterpret_cast<Object**>(object->address() +
- start_offset);
- Object** end_slot = reinterpret_cast<Object**>(object->address() +
- end_offset);
- StaticVisitor::VisitPointers(heap, start_slot, end_slot);
- }
-};
-
-
-template<typename StaticVisitor, typename BodyDescriptor, typename
ReturnType>
-class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
- public:
- INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
- int object_size = BodyDescriptor::SizeOf(map, object);
- BodyVisitorBase<StaticVisitor>::IteratePointers(
- map->GetHeap(),
- object,
- BodyDescriptor::kStartOffset,
- object_size);
- return static_cast<ReturnType>(object_size);
- }
-
- template<int object_size>
- static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
- DCHECK(BodyDescriptor::SizeOf(map, object) == object_size);
- BodyVisitorBase<StaticVisitor>::IteratePointers(
- map->GetHeap(),
- object,
- BodyDescriptor::kStartOffset,
- object_size);
- return static_cast<ReturnType>(object_size);
- }
-};
-
-
-template<typename StaticVisitor, typename BodyDescriptor, typename
ReturnType>
-class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
- public:
- INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
- BodyVisitorBase<StaticVisitor>::IteratePointers(
- map->GetHeap(),
- object,
- BodyDescriptor::kStartOffset,
- BodyDescriptor::kEndOffset);
- return static_cast<ReturnType>(BodyDescriptor::kSize);
- }
-};
-
-
-// Base class for visitors used for a linear new space iteration.
-// IterateBody returns size of visited object.
-// Certain types of objects (i.e. Code objects) are not handled
-// by dispatch table of this visitor because they cannot appear
-// in the new space.
-//
-// This class is intended to be used in the following way:
-//
-// class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
-// ...
-// }
-//
-// This is an example of Curiously recurring template pattern
-// (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
-// We use CRTP to guarantee aggressive compile time optimizations (i.e.
-// inlining and specialization of StaticVisitor::VisitPointers methods).
-template<typename StaticVisitor>
-class StaticNewSpaceVisitor : public StaticVisitorBase {
- public:
- static void Initialize();
-
- INLINE(static int IterateBody(Map* map, HeapObject* obj)) {
- return table_.GetVisitor(map)(map, obj);
- }
-
- INLINE(static void VisitPointers(Heap* heap, Object** start, Object**
end)) {
- for (Object** p = start; p < end; p++)
StaticVisitor::VisitPointer(heap, p);
- }
-
- private:
- INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
- Heap* heap = map->GetHeap();
- VisitPointers(heap,
- HeapObject::RawField(object,
JSFunction::kPropertiesOffset),
- HeapObject::RawField(object,
JSFunction::kCodeEntryOffset));
-
- // Don't visit code entry. We are using this visitor only during
scavenges.
-
- VisitPointers(
- heap,
- HeapObject::RawField(object,
- JSFunction::kCodeEntryOffset + kPointerSize),
- HeapObject::RawField(object,
- JSFunction::kNonWeakFieldsEndOffset));
- return JSFunction::kSize;
- }
-
- INLINE(static int VisitByteArray(Map* map, HeapObject* object)) {
- return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
- }
-
- INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
- int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
- return FixedDoubleArray::SizeFor(length);
- }
-
- INLINE(static int VisitFixedTypedArray(Map* map, HeapObject* object)) {
- return reinterpret_cast<FixedTypedArrayBase*>(object)->size();
- }
-
- INLINE(static int VisitJSObject(Map* map, HeapObject* object)) {
- return JSObjectVisitor::Visit(map, object);
- }
-
- INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) {
- return SeqOneByteString::cast(object)->
- SeqOneByteStringSize(map->instance_type());
- }
-
- INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) {
- return SeqTwoByteString::cast(object)->
- SeqTwoByteStringSize(map->instance_type());
- }
-
- INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) {
- return FreeSpace::cast(object)->Size();
- }
-
- INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
- INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
- INLINE(static int VisitJSDataView(Map* map, HeapObject* object));
-
- class DataObjectVisitor {
- public:
- template<int object_size>
- static inline int VisitSpecialized(Map* map, HeapObject* object) {
- return object_size;
- }
-
- INLINE(static int Visit(Map* map, HeapObject* object)) {
- return map->instance_size();
- }
- };
-
- typedef FlexibleBodyVisitor<StaticVisitor,
- StructBodyDescriptor,
- int> StructVisitor;
-
- typedef FlexibleBodyVisitor<StaticVisitor,
- JSObject::BodyDescriptor,
- int> JSObjectVisitor;
-
- typedef int (*Callback)(Map* map, HeapObject* object);
-
- static VisitorDispatchTable<Callback> table_;
-};
-
-
-template<typename StaticVisitor>
-VisitorDispatchTable<typename
StaticNewSpaceVisitor<StaticVisitor>::Callback>
- StaticNewSpaceVisitor<StaticVisitor>::table_;
-
-
-// Base class for visitors used to transitively mark the entire heap.
-// IterateBody returns nothing.
-// Certain types of objects might not be handled by this base class and
-// no visitor function is registered by the generic initialization. A
-// specialized visitor function needs to be provided by the inheriting
-// class itself for those cases.
-//
-// This class is intended to be used in the following way:
-//
-// class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
-// ...
-// }
-//
-// This is an example of Curiously recurring template pattern.
-template<typename StaticVisitor>
-class StaticMarkingVisitor : public StaticVisitorBase {
- public:
- static void Initialize();
-
- INLINE(static void IterateBody(Map* map, HeapObject* obj)) {
- table_.GetVisitor(map)(map, obj);
- }
-
- INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
- INLINE(static void VisitCodeEntry(Heap* heap, Address entry_address));
- INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
- INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
- INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
- INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
- INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
- INLINE(static void VisitExternalReference(RelocInfo* rinfo)) { }
- INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) { }
- // Skip the weak next code link in a code object.
- INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) { }
-
- // TODO(mstarzinger): This should be made protected once refactoring is
done.
- // Mark non-optimize code for functions inlined into the given optimized
- // code. This will prevent it from being flushed.
- static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
-
- protected:
- INLINE(static void VisitMap(Map* map, HeapObject* object));
- INLINE(static void VisitCode(Map* map, HeapObject* object));
- INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject*
object));
- INLINE(static void VisitConstantPoolArray(Map* map, HeapObject* object));
- INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
- INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
- INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
- INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
- INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
- INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
- INLINE(static void VisitJSDataView(Map* map, HeapObject* object));
- INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
-
- // Mark pointers in a Map and its TransitionArray together, possibly
- // treating transitions or back pointers weak.
- static void MarkMapContents(Heap* heap, Map* map);
- static void MarkTransitionArray(Heap* heap, TransitionArray*
transitions);
-
- // Code flushing support.
- INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
- INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo*
shared_info));
-
- // Helpers used by code flushing support that visit pointer fields and
treat
- // references to code objects either strongly or weakly.
- static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject*
object);
- static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject*
object);
- static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
- static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
-
- class DataObjectVisitor {
- public:
- template<int size>
- static inline void VisitSpecialized(Map* map, HeapObject* object) {
- }
-
- INLINE(static void Visit(Map* map, HeapObject* object)) {
- }
- };
-
- typedef FlexibleBodyVisitor<StaticVisitor,
- FixedArray::BodyDescriptor,
- void> FixedArrayVisitor;
-
- typedef FlexibleBodyVisitor<StaticVisitor,
- JSObject::BodyDescriptor,
- void> JSObjectVisitor;
-
- typedef FlexibleBodyVisitor<StaticVisitor,
- StructBodyDescriptor,
- void> StructObjectVisitor;
-
- typedef void (*Callback)(Map* map, HeapObject* object);
-
- static VisitorDispatchTable<Callback> table_;
-};
-
-
-template<typename StaticVisitor>
-VisitorDispatchTable<typename
StaticMarkingVisitor<StaticVisitor>::Callback>
- StaticMarkingVisitor<StaticVisitor>::table_;
-
-
-class WeakObjectRetainer;
-
-
-// A weak list is single linked list where each element has a weak pointer
to
-// the next element. Given the head of the list, this function removes dead
-// elements from the list and if requested records slots for next-element
-// pointers. The template parameter T is a WeakListVisitor that defines
how to
-// access the next-element pointers.
-template <class T>
-Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer*
retainer);
-
-} } // namespace v8::internal
-
-#endif // V8_OBJECTS_VISITING_H_
=======================================
--- /branches/bleeding_edge/BUILD.gn Thu Aug 7 08:02:37 2014 UTC
+++ /branches/bleeding_edge/BUILD.gn Thu Aug 7 12:21:01 2014 UTC
@@ -640,6 +640,9 @@
"src/heap/mark-compact-inl.h",
"src/heap/mark-compact.cc",
"src/heap/mark-compact.h",
+ "src/heap/objects-visiting-inl.h",
+ "src/heap/objects-visiting.cc",
+ "src/heap/objects-visiting.h",
"src/heap/spaces-inl.h",
"src/heap/spaces.cc",
"src/heap/spaces.h",
@@ -742,8 +745,6 @@
"src/objects-debug.cc",
"src/objects-inl.h",
"src/objects-printer.cc",
- "src/objects-visiting.cc",
- "src/objects-visiting.h",
"src/objects.cc",
"src/objects.h",
"src/optimizing-compiler-thread.cc",
=======================================
--- /branches/bleeding_edge/src/heap/heap.cc Tue Aug 5 12:09:23 2014 UTC
+++ /branches/bleeding_edge/src/heap/heap.cc Thu Aug 7 12:21:01 2014 UTC
@@ -18,11 +18,11 @@
#include "src/global-handles.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact.h"
+#include "src/heap/objects-visiting-inl.h"
+#include "src/heap/objects-visiting.h"
#include "src/heap-profiler.h"
#include "src/isolate-inl.h"
#include "src/natives.h"
-#include "src/objects-visiting-inl.h"
-#include "src/objects-visiting.h"
#include "src/runtime-profiler.h"
#include "src/scopeinfo.h"
#include "src/snapshot.h"
=======================================
--- /branches/bleeding_edge/src/heap/heap.h Tue Aug 5 11:16:11 2014 UTC
+++ /branches/bleeding_edge/src/heap/heap.h Thu Aug 7 12:21:01 2014 UTC
@@ -14,9 +14,9 @@
#include "src/heap/gc-tracer.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact.h"
+#include "src/heap/objects-visiting.h"
#include "src/heap/spaces.h"
#include "src/list.h"
-#include "src/objects-visiting.h"
#include "src/splay-tree-inl.h"
#include "src/store-buffer.h"
=======================================
--- /branches/bleeding_edge/src/heap/incremental-marking.cc Tue Aug 5
09:40:09 2014 UTC
+++ /branches/bleeding_edge/src/heap/incremental-marking.cc Thu Aug 7
12:21:01 2014 UTC
@@ -9,8 +9,8 @@
#include "src/code-stubs.h"
#include "src/compilation-cache.h"
#include "src/conversions.h"
-#include "src/objects-visiting.h"
-#include "src/objects-visiting-inl.h"
+#include "src/heap/objects-visiting.h"
+#include "src/heap/objects-visiting-inl.h"
namespace v8 {
namespace internal {
=======================================
--- /branches/bleeding_edge/src/heap/mark-compact.cc Tue Aug 5 12:04:58
2014 UTC
+++ /branches/bleeding_edge/src/heap/mark-compact.cc Thu Aug 7 12:21:01
2014 UTC
@@ -14,12 +14,12 @@
#include "src/global-handles.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact.h"
+#include "src/heap/objects-visiting.h"
+#include "src/heap/objects-visiting-inl.h"
#include "src/heap/spaces-inl.h"
#include "src/heap/sweeper-thread.h"
#include "src/heap-profiler.h"
#include "src/ic-inl.h"
-#include "src/objects-visiting.h"
-#include "src/objects-visiting-inl.h"
#include "src/stub-cache.h"
namespace v8 {
=======================================
--- /branches/bleeding_edge/src/objects-debug.cc Tue Aug 5 17:06:01 2014
UTC
+++ /branches/bleeding_edge/src/objects-debug.cc Thu Aug 7 12:21:01 2014
UTC
@@ -6,9 +6,9 @@
#include "src/disasm.h"
#include "src/disassembler.h"
+#include "src/heap/objects-visiting.h"
#include "src/jsregexp.h"
#include "src/macro-assembler.h"
-#include "src/objects-visiting.h"
#include "src/ostreams.h"
namespace v8 {
=======================================
--- /branches/bleeding_edge/src/objects-inl.h Wed Aug 6 08:02:21 2014 UTC
+++ /branches/bleeding_edge/src/objects-inl.h Thu Aug 7 12:21:01 2014 UTC
@@ -21,11 +21,11 @@
#include "src/heap/heap-inl.h"
#include "src/heap/heap.h"
#include "src/heap/incremental-marking.h"
+#include "src/heap/objects-visiting.h"
#include "src/heap/spaces.h"
#include "src/isolate.h"
#include "src/lookup.h"
#include "src/objects.h"
-#include "src/objects-visiting.h"
#include "src/property.h"
#include "src/prototype.h"
#include "src/store-buffer.h"
=======================================
--- /branches/bleeding_edge/src/objects-printer.cc Tue Aug 5 17:06:01 2014
UTC
+++ /branches/bleeding_edge/src/objects-printer.cc Thu Aug 7 12:21:01 2014
UTC
@@ -6,8 +6,8 @@
#include "src/disasm.h"
#include "src/disassembler.h"
+#include "src/heap/objects-visiting.h"
#include "src/jsregexp.h"
-#include "src/objects-visiting.h"
#include "src/ostreams.h"
namespace v8 {
=======================================
--- /branches/bleeding_edge/src/objects.cc Tue Aug 5 11:58:24 2014 UTC
+++ /branches/bleeding_edge/src/objects.cc Thu Aug 7 12:21:01 2014 UTC
@@ -21,13 +21,13 @@
#include "src/field-index.h"
#include "src/full-codegen.h"
#include "src/heap/mark-compact.h"
+#include "src/heap/objects-visiting-inl.h"
#include "src/hydrogen.h"
#include "src/isolate-inl.h"
#include "src/log.h"
#include "src/lookup.h"
#include "src/macro-assembler.h"
#include "src/objects-inl.h"
-#include "src/objects-visiting-inl.h"
#include "src/prototype.h"
#include "src/safepoint-table.h"
#include "src/string-search.h"
=======================================
--- /branches/bleeding_edge/tools/gyp/v8.gyp Wed Aug 6 15:50:40 2014 UTC
+++ /branches/bleeding_edge/tools/gyp/v8.gyp Thu Aug 7 12:21:01 2014 UTC
@@ -524,6 +524,9 @@
'../../src/heap/mark-compact-inl.h',
'../../src/heap/mark-compact.cc',
'../../src/heap/mark-compact.h',
+ '../../src/heap/objects-visiting-inl.h',
+ '../../src/heap/objects-visiting.cc',
+ '../../src/heap/objects-visiting.h',
'../../src/heap/spaces-inl.h',
'../../src/heap/spaces.cc',
'../../src/heap/spaces.h',
@@ -627,8 +630,6 @@
'../../src/objects-debug.cc',
'../../src/objects-inl.h',
'../../src/objects-printer.cc',
- '../../src/objects-visiting.cc',
- '../../src/objects-visiting.h',
'../../src/objects.cc',
'../../src/objects.h',
'../../src/optimizing-compiler-thread.cc',
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/d/optout.