Revision: 7762
Author:   [email protected]
Date:     Tue May  3 05:23:50 2011
Log:      Read isolate from page, not through map.

Review URL: http://codereview.chromium.org/6904166
http://code.google.com/p/v8/source/detail?r=7762

Modified:
 /branches/experimental/gc/src/heap.cc
 /branches/experimental/gc/src/mark-compact.cc
 /branches/experimental/gc/src/objects-inl.h
 /branches/experimental/gc/src/objects-visiting.h
 /branches/experimental/gc/src/objects.cc
 /branches/experimental/gc/src/objects.h

=======================================
--- /branches/experimental/gc/src/heap.cc       Mon May  2 05:23:52 2011
+++ /branches/experimental/gc/src/heap.cc       Tue May  3 05:23:50 2011
@@ -1405,7 +1405,7 @@
            (object_size <= Page::kMaxHeapObjectSize));
     ASSERT(object->Size() == object_size);

-    Heap* heap = map->heap();
+    Heap* heap = map->GetHeap();
     if (heap->ShouldBePromoted(object->address(), object_size)) {
       MaybeObject* maybe_result;

@@ -1493,13 +1493,13 @@

     if (marks_handling == IGNORE_MARKS &&
         ConsString::cast(object)->unchecked_second() ==
-        map->heap()->empty_string()) {
+        map->GetHeap()->empty_string()) {
       HeapObject* first =
           HeapObject::cast(ConsString::cast(object)->unchecked_first());

       *slot = first;

-      if (!map->heap()->InNewSpace(first)) {
+      if (!map->GetHeap()->InNewSpace(first)) {
         object->set_map_word(MapWord::FromForwardingAddress(first));
         return;
       }
=======================================
--- /branches/experimental/gc/src/mark-compact.cc       Sun Apr 24 04:36:08 2011
+++ /branches/experimental/gc/src/mark-compact.cc       Tue May  3 05:23:50 2011
@@ -546,7 +546,7 @@
   if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object;

Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second();
-  Heap* heap = map->heap();
+  Heap* heap = map->GetHeap();
   if (second != heap->raw_unchecked_empty_string()) {
     return object;
   }
@@ -749,7 +749,7 @@

   static void VisitCode(Map* map, HeapObject* object) {
     reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(
-        map->heap());
+        map->GetHeap());
   }

   // Code flushing support.
@@ -895,7 +895,7 @@

   static void VisitSharedFunctionInfoAndFlushCode(Map* map,
                                                   HeapObject* object) {
- MarkCompactCollector* collector = map->heap()->mark_compact_collector(); + MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
     if (!collector->is_code_flushing_enabled()) {
       VisitSharedFunctionInfoGeneric(map, object);
       return;
@@ -906,7 +906,7 @@

   static void VisitSharedFunctionInfoAndFlushCodeGeneric(
       Map* map, HeapObject* object, bool known_flush_code_candidate) {
-    Heap* heap = map->heap();
+    Heap* heap = map->GetHeap();
SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);

if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
@@ -934,7 +934,7 @@


   static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) {
-    Heap* heap = map->heap();
+    Heap* heap = map->GetHeap();
     MarkCompactCollector* collector = heap->mark_compact_collector();
     if (!collector->is_code_flushing_enabled()) {
       VisitJSFunction(map, object);
@@ -996,7 +996,7 @@
   static inline void VisitJSFunctionFields(Map* map,
                                            JSFunction* object,
                                            bool flush_code_candidate) {
-    Heap* heap = map->heap();
+    Heap* heap = map->GetHeap();

     VisitPointers(heap,
                   SLOT_ADDR(object, JSFunction::kPropertiesOffset),
@@ -1307,7 +1307,7 @@

Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);

-  StaticMarkingVisitor::VisitPointers(map->heap(), start_slot, end_slot);
+ StaticMarkingVisitor::VisitPointers(map->GetHeap(), start_slot, end_slot);
 }


=======================================
--- /branches/experimental/gc/src/objects-inl.h Sun Apr 24 04:36:08 2011
+++ /branches/experimental/gc/src/objects-inl.h Tue May  3 05:23:50 2011
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -1051,18 +1051,14 @@


 Heap* HeapObject::GetHeap() {
-  // During GC, the map pointer in HeapObject is used in various ways that
-  // prevent us from retrieving Heap from the map.
- // Assert that we are not in GC, implement GC code in a way that it doesn't
-  // pull heap from the map.
-  return map()->heap();
+  return MemoryChunk::FromAddress(address())->heap();
 }


 Isolate* HeapObject::GetIsolate() {
-  Isolate* i = GetHeap()->isolate();
-  ASSERT(i == Isolate::Current());
-  return i;
+ Isolate* isolate = MemoryChunk::FromAddress(address())->heap()->isolate();
+  ASSERT(isolate == Isolate::Current());
+  return isolate;
 }


@@ -1074,7 +1070,7 @@
 void HeapObject::set_map(Map* value) {
   set_map_word(MapWord::FromMap(value));
   if (value != NULL) {
-    value->heap()->incremental_marking()->RecordWrite(this, value);
+    value->GetHeap()->incremental_marking()->RecordWrite(this, value);
   }
 }

@@ -2709,15 +2705,6 @@
   Code* result = reinterpret_cast<Code*>(code);
   return result;
 }
-
-
-Heap* Map::heap() {
-  // NOTE: address() helper is not used to save one instruction.
-  Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap();
-  ASSERT(heap != NULL);
-  ASSERT(heap->isolate() == Isolate::Current());
-  return heap;
-}


 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
=======================================
--- /branches/experimental/gc/src/objects-visiting.h Sun Apr 24 04:36:08 2011 +++ /branches/experimental/gc/src/objects-visiting.h Tue May 3 05:23:50 2011
@@ -1,4 +1,4 @@
-// Copyright 2006-2009 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -212,7 +212,7 @@
   static inline ReturnType Visit(Map* map, HeapObject* object) {
     int object_size = BodyDescriptor::SizeOf(map, object);
     BodyVisitorBase<StaticVisitor>::IteratePointers(
-        map->heap(),
+        map->GetHeap(),
         object,
         BodyDescriptor::kStartOffset,
         object_size);
@@ -223,7 +223,7 @@
   static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
     ASSERT(BodyDescriptor::SizeOf(map, object) == object_size);
     BodyVisitorBase<StaticVisitor>::IteratePointers(
-        map->heap(),
+        map->GetHeap(),
         object,
         BodyDescriptor::kStartOffset,
         object_size);
@@ -237,7 +237,7 @@
  public:
   static inline ReturnType Visit(Map* map, HeapObject* object) {
     BodyVisitorBase<StaticVisitor>::IteratePointers(
-        map->heap(),
+        map->GetHeap(),
         object,
         BodyDescriptor::kStartOffset,
         BodyDescriptor::kEndOffset);
=======================================
--- /branches/experimental/gc/src/objects.cc    Sun Apr 24 04:36:08 2011
+++ /branches/experimental/gc/src/objects.cc    Tue May  3 05:23:50 2011
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -1810,7 +1810,8 @@
                               String* name,
                               LookupResult* result) {
   DescriptorArray* descriptors = instance_descriptors();
- DescriptorLookupCache* cache = heap()->isolate()->descriptor_lookup_cache();
+  DescriptorLookupCache* cache =
+      GetHeap()->isolate()->descriptor_lookup_cache();
   int number = cache->Lookup(descriptors, name);
   if (number == DescriptorLookupCache::kAbsent) {
     number = descriptors->Search(name);
@@ -3577,11 +3578,11 @@

 void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
   Map* current = this;
-  Map* meta_map = heap()->meta_map();
+  Map* meta_map = GetHeap()->meta_map();
   while (current != meta_map) {
     DescriptorArray* d = reinterpret_cast<DescriptorArray*>(
         *RawField(current, Map::kInstanceDescriptorsOffset));
-    if (d == heap()->empty_descriptor_array()) {
+    if (d == GetHeap()->empty_descriptor_array()) {
       Map* prev = current->map();
       current->set_map(meta_map);
       callback(current, data);
@@ -3608,7 +3609,7 @@
       }
     }
     if (!map_done) continue;
-    *map_or_index_field = heap()->fixed_array_map();
+    *map_or_index_field = GetHeap()->fixed_array_map();
     Map* prev = current->map();
     current->set_map(meta_map);
     callback(current, data);
@@ -5944,7 +5945,7 @@
     set_construction_count(kGenerousAllocationCount);
   }
   set_initial_map(map);
-  Builtins* builtins = map->heap()->isolate()->builtins();
+  Builtins* builtins = map->GetHeap()->isolate()->builtins();
   ASSERT_EQ(builtins->builtin(Builtins::JSConstructStubGeneric),
             construct_stub());
set_construct_stub(builtins->builtin(Builtins::JSConstructStubCountdown));
@@ -5964,8 +5965,9 @@
   // then StartInobjectTracking will be called again the next time the
   // constructor is called. The countdown will continue and (possibly after
// several more GCs) CompleteInobjectSlackTracking will eventually be called.
-  set_initial_map(map->heap()->raw_unchecked_undefined_value());
-  Builtins* builtins = map->heap()->isolate()->builtins();
+  Heap* heap = map->GetHeap();
+  set_initial_map(heap->raw_unchecked_undefined_value());
+  Builtins* builtins = heap->isolate()->builtins();
   ASSERT_EQ(builtins->builtin(Builtins::JSConstructStubCountdown),
             *RawField(this, kConstructStubOffset));
   set_construct_stub(builtins->builtin(Builtins::JSConstructStubGeneric));
@@ -5981,7 +5983,7 @@

   // Resume inobject slack tracking.
   set_initial_map(map);
-  Builtins* builtins = map->heap()->isolate()->builtins();
+  Builtins* builtins = map->GetHeap()->isolate()->builtins();
   ASSERT_EQ(builtins->builtin(Builtins::JSConstructStubGeneric),
             *RawField(this, kConstructStubOffset));
set_construct_stub(builtins->builtin(Builtins::JSConstructStubCountdown));
@@ -6013,7 +6015,7 @@
   ASSERT(live_objects_may_exist() && IsInobjectSlackTrackingInProgress());
   Map* map = Map::cast(initial_map());

-  Heap* heap = map->heap();
+  Heap* heap = map->GetHeap();
   set_initial_map(heap->undefined_value());
   Builtins* builtins = heap->isolate()->builtins();
   ASSERT_EQ(builtins->builtin(Builtins::JSConstructStubCountdown),
=======================================
--- /branches/experimental/gc/src/objects.h     Sun Apr 24 04:36:08 2011
+++ /branches/experimental/gc/src/objects.h     Tue May  3 05:23:50 2011
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -3725,9 +3725,6 @@
   inline int visitor_id();
   inline void set_visitor_id(int visitor_id);

-  // Returns the heap this map belongs to.
-  inline Heap* heap();
-
   typedef void (*TraverseCallback)(Map* map, void* data);

   void TraverseTransitionTree(TraverseCallback callback, void* data);

--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to