Revision: 16631
Author:   [email protected]
Date:     Wed Sep 11 07:14:41 2013 UTC
Log:      move HEAP to /test

[email protected]
BUG=

Review URL: https://codereview.chromium.org/23468021
http://code.google.com/p/v8/source/detail?r=16631

Modified:
 /branches/bleeding_edge/src/cpu-profiler.cc
 /branches/bleeding_edge/src/disassembler.cc
 /branches/bleeding_edge/src/factory.cc
 /branches/bleeding_edge/src/heap-snapshot-generator.cc
 /branches/bleeding_edge/src/isolate.h
 /branches/bleeding_edge/src/mark-compact.cc
 /branches/bleeding_edge/src/mksnapshot.cc
 /branches/bleeding_edge/src/objects-debug.cc
 /branches/bleeding_edge/src/objects-inl.h
 /branches/bleeding_edge/src/objects.cc
 /branches/bleeding_edge/src/objects.h
 /branches/bleeding_edge/src/profile-generator.cc
 /branches/bleeding_edge/src/profile-generator.h
 /branches/bleeding_edge/src/property.h
 /branches/bleeding_edge/src/runtime.cc
 /branches/bleeding_edge/src/scopeinfo.cc
 /branches/bleeding_edge/src/serialize.cc
 /branches/bleeding_edge/src/serialize.h
 /branches/bleeding_edge/src/spaces-inl.h
 /branches/bleeding_edge/src/spaces.h
 /branches/bleeding_edge/src/store-buffer-inl.h
 /branches/bleeding_edge/src/store-buffer.cc
 /branches/bleeding_edge/src/string-stream.cc
 /branches/bleeding_edge/src/stub-cache.cc
 /branches/bleeding_edge/test/cctest/cctest.h
 /branches/bleeding_edge/test/cctest/test-cpu-profiler.cc
 /branches/bleeding_edge/test/cctest/test-profile-generator.cc

=======================================
--- /branches/bleeding_edge/src/cpu-profiler.cc Fri Sep  6 06:25:06 2013 UTC
+++ /branches/bleeding_edge/src/cpu-profiler.cc Wed Sep 11 07:14:41 2013 UTC
@@ -373,7 +373,7 @@
     : isolate_(isolate),
       sampling_interval_(TimeDelta::FromMicroseconds(
           FLAG_cpu_profiler_sampling_interval)),
-      profiles_(new CpuProfilesCollection()),
+      profiles_(new CpuProfilesCollection(isolate->heap())),
       next_profile_uid_(1),
       generator_(NULL),
       processor_(NULL),
@@ -410,7 +410,7 @@

 void CpuProfiler::ResetProfiles() {
   delete profiles_;
-  profiles_ = new CpuProfilesCollection();
+  profiles_ = new CpuProfilesCollection(isolate()->heap());
 }


=======================================
--- /branches/bleeding_edge/src/disassembler.cc Tue Sep  3 11:54:08 2013 UTC
+++ /branches/bleeding_edge/src/disassembler.cc Wed Sep 11 07:14:41 2013 UTC
@@ -118,7 +118,7 @@
   SealHandleScope shs(isolate);
   DisallowHeapAllocation no_alloc;
   ExternalReferenceEncoder ref_encoder(isolate);
-  Heap* heap = HEAP;
+  Heap* heap = isolate->heap();

   v8::internal::EmbeddedVector<char, 128> decode_buffer;
   v8::internal::EmbeddedVector<char, kOutBufferSize> out_buffer;
=======================================
--- /branches/bleeding_edge/src/factory.cc      Mon Sep  9 14:29:47 2013 UTC
+++ /branches/bleeding_edge/src/factory.cc      Wed Sep 11 07:14:41 2013 UTC
@@ -141,7 +141,8 @@
     PretenureFlag pretenure) {
   ASSERT(deopt_entry_count > 0);
   CALL_HEAP_FUNCTION(isolate(),
-                     DeoptimizationInputData::Allocate(deopt_entry_count,
+                     DeoptimizationInputData::Allocate(isolate(),
+                                                       deopt_entry_count,
                                                        pretenure),
                      DeoptimizationInputData);
 }
@@ -152,7 +153,8 @@
     PretenureFlag pretenure) {
   ASSERT(deopt_entry_count > 0);
   CALL_HEAP_FUNCTION(isolate(),
-                     DeoptimizationOutputData::Allocate(deopt_entry_count,
+                     DeoptimizationOutputData::Allocate(isolate(),
+                                                        deopt_entry_count,
                                                         pretenure),
                      DeoptimizationOutputData);
 }
=======================================
--- /branches/bleeding_edge/src/heap-snapshot-generator.cc Tue Sep 10 14:30:36 2013 UTC +++ /branches/bleeding_edge/src/heap-snapshot-generator.cc Wed Sep 11 07:14:41 2013 UTC
@@ -586,6 +586,7 @@

 HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap)
     : is_tracking_objects_(false),
+      names_(heap),
       ids_(heap) {
 }

=======================================
--- /branches/bleeding_edge/src/isolate.h       Tue Sep 10 14:30:36 2013 UTC
+++ /branches/bleeding_edge/src/isolate.h       Wed Sep 11 07:14:41 2013 UTC
@@ -1520,7 +1520,6 @@

 // Temporary macros for accessing current isolate and its subobjects.
 // They provide better readability, especially when used a lot in the code.
-#define HEAP (v8::internal::Isolate::Current()->heap())
 #define ISOLATE (v8::internal::Isolate::Current())


=======================================
--- /branches/bleeding_edge/src/mark-compact.cc Thu Sep  5 11:27:22 2013 UTC
+++ /branches/bleeding_edge/src/mark-compact.cc Wed Sep 11 07:14:41 2013 UTC
@@ -79,11 +79,13 @@
 #ifdef VERIFY_HEAP
 class VerifyMarkingVisitor: public ObjectVisitor {
  public:
+  explicit VerifyMarkingVisitor(Heap* heap) : heap_(heap) {}
+
   void VisitPointers(Object** start, Object** end) {
     for (Object** current = start; current < end; current++) {
       if ((*current)->IsHeapObject()) {
         HeapObject* object = HeapObject::cast(*current);
-        CHECK(HEAP->mark_compact_collector()->IsMarked(object));
+        CHECK(heap_->mark_compact_collector()->IsMarked(object));
       }
     }
   }
@@ -97,11 +99,14 @@
       VisitPointer(rinfo->target_object_address());
     }
   }
+
+ private:
+  Heap* heap_;
 };


-static void VerifyMarking(Address bottom, Address top) {
-  VerifyMarkingVisitor visitor;
+static void VerifyMarking(Heap* heap, Address bottom, Address top) {
+  VerifyMarkingVisitor visitor(heap);
   HeapObject* object;
   Address next_object_must_be_here_or_later = bottom;

@@ -129,7 +134,7 @@
     NewSpacePage* page = it.next();
     Address limit = it.has_next() ? page->area_end() : end;
     CHECK(limit == end || !page->Contains(end));
-    VerifyMarking(page->area_start(), limit);
+    VerifyMarking(space->heap(), page->area_start(), limit);
   }
 }

@@ -139,7 +144,7 @@

   while (it.has_next()) {
     Page* p = it.next();
-    VerifyMarking(p->area_start(), p->area_end());
+    VerifyMarking(space->heap(), p->area_start(), p->area_end());
   }
 }

@@ -153,7 +158,7 @@
   VerifyMarking(heap->map_space());
   VerifyMarking(heap->new_space());

-  VerifyMarkingVisitor visitor;
+  VerifyMarkingVisitor visitor(heap);

   LargeObjectIterator it(heap->lo_space());
   for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
@@ -1409,7 +1414,7 @@
                                          HeapObject* obj)) {
 #ifdef DEBUG
     ASSERT(collector->heap()->Contains(obj));
-    ASSERT(!HEAP->mark_compact_collector()->IsMarked(obj));
+    ASSERT(!collector->heap()->mark_compact_collector()->IsMarked(obj));
 #endif
     Map* map = obj->map();
     Heap* heap = obj->GetHeap();
=======================================
--- /branches/bleeding_edge/src/mksnapshot.cc   Tue Sep  3 11:54:08 2013 UTC
+++ /branches/bleeding_edge/src/mksnapshot.cc   Wed Sep 11 07:14:41 2013 UTC
@@ -397,7 +397,8 @@
   }
   // If we don't do this then we end up with a stray root pointing at the
   // context even after we have disposed of the context.
-  HEAP->CollectAllGarbage(i::Heap::kNoGCFlags, "mksnapshot");
+  internal_isolate->heap()->CollectAllGarbage(
+      i::Heap::kNoGCFlags, "mksnapshot");
   i::Object* raw_context = *v8::Utils::OpenPersistent(context);
   context.Dispose();
   CppByteSink sink(argv[1]);
=======================================
--- /branches/bleeding_edge/src/objects-debug.cc Wed Sep 4 07:45:36 2013 UTC +++ /branches/bleeding_edge/src/objects-debug.cc Wed Sep 11 07:14:41 2013 UTC
@@ -230,7 +230,8 @@

 void HeapObject::VerifyHeapPointer(Object* p) {
   CHECK(p->IsHeapObject());
-  CHECK(HEAP->Contains(HeapObject::cast(p)));
+  HeapObject* ho = HeapObject::cast(p);
+  CHECK(ho->GetHeap()->Contains(ho));
 }


@@ -337,11 +338,12 @@


 void Map::MapVerify() {
-  CHECK(!HEAP->InNewSpace(this));
+  Heap* heap = GetHeap();
+  CHECK(!heap->InNewSpace(this));
   CHECK(FIRST_TYPE <= instance_type() && instance_type() <= LAST_TYPE);
   CHECK(instance_size() == kVariableSizeSentinel ||
          (kPointerSize <= instance_size() &&
-          instance_size() < HEAP->Capacity()));
+          instance_size() < heap->Capacity()));
   VerifyHeapPointer(prototype());
   VerifyHeapPointer(instance_descriptors());
   SLOW_ASSERT(instance_descriptors()->IsSortedNoDuplicates());
@@ -523,7 +525,7 @@
   CHECK(IsString());
   CHECK(length() >= 0 && length() <= Smi::kMaxValue);
   if (IsInternalizedString()) {
-    CHECK(!HEAP->InNewSpace(this));
+    CHECK(!GetHeap()->InNewSpace(this));
   }
   if (IsConsString()) {
     ConsString::cast(this)->ConsStringVerify();
@@ -615,7 +617,7 @@
   VerifyHeapPointer(to_string());
   Object* number = to_number();
   if (number->IsHeapObject()) {
-    CHECK(number == HEAP->nan_value());
+    CHECK(number == HeapObject::cast(number)->GetHeap()->nan_value());
   } else {
     CHECK(number->IsSmi());
     int value = Smi::cast(number)->value();
@@ -1043,7 +1045,7 @@
       int holes = 0;
       FixedArray* e = FixedArray::cast(elements());
       int len = e->length();
-      Heap* heap = HEAP;
+      Heap* heap = GetHeap();
       for (int i = 0; i < len; i++) {
         if (e->get(i) == heap->the_hole_value()) holes++;
       }
=======================================
--- /branches/bleeding_edge/src/objects-inl.h   Mon Sep  9 14:29:47 2013 UTC
+++ /branches/bleeding_edge/src/objects-inl.h   Wed Sep 11 07:14:41 2013 UTC
@@ -1963,7 +1963,7 @@


 void FixedArray::set(int index, Smi* value) {
-  ASSERT(map() != HEAP->fixed_cow_array_map());
+  ASSERT(map() != GetHeap()->fixed_cow_array_map());
   ASSERT(index >= 0 && index < this->length());
   ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
   int offset = kHeaderSize + index * kPointerSize;
@@ -1972,7 +1972,7 @@


 void FixedArray::set(int index, Object* value) {
-  ASSERT(map() != HEAP->fixed_cow_array_map());
+  ASSERT(map() != GetHeap()->fixed_cow_array_map());
   ASSERT(index >= 0 && index < this->length());
   int offset = kHeaderSize + index * kPointerSize;
   WRITE_FIELD(this, offset, value);
@@ -1998,8 +1998,8 @@


 double FixedDoubleArray::get_scalar(int index) {
-  ASSERT(map() != HEAP->fixed_cow_array_map() &&
-         map() != HEAP->fixed_array_map());
+  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
+         map() != GetHeap()->fixed_array_map());
   ASSERT(index >= 0 && index < this->length());
double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
   ASSERT(!is_the_hole_nan(result));
@@ -2007,8 +2007,8 @@
 }

 int64_t FixedDoubleArray::get_representation(int index) {
-  ASSERT(map() != HEAP->fixed_cow_array_map() &&
-         map() != HEAP->fixed_array_map());
+  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
+         map() != GetHeap()->fixed_array_map());
   ASSERT(index >= 0 && index < this->length());
   return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
 }
@@ -2023,8 +2023,8 @@


 void FixedDoubleArray::set(int index, double value) {
-  ASSERT(map() != HEAP->fixed_cow_array_map() &&
-         map() != HEAP->fixed_array_map());
+  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
+         map() != GetHeap()->fixed_array_map());
   int offset = kHeaderSize + index * kDoubleSize;
   if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
   WRITE_DOUBLE_FIELD(this, offset, value);
@@ -2032,8 +2032,8 @@


 void FixedDoubleArray::set_the_hole(int index) {
-  ASSERT(map() != HEAP->fixed_cow_array_map() &&
-         map() != HEAP->fixed_array_map());
+  ASSERT(map() != GetHeap()->fixed_cow_array_map() &&
+         map() != GetHeap()->fixed_array_map());
   int offset = kHeaderSize + index * kDoubleSize;
   WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
 }
@@ -2057,7 +2057,7 @@
 void FixedArray::set(int index,
                      Object* value,
                      WriteBarrierMode mode) {
-  ASSERT(map() != HEAP->fixed_cow_array_map());
+  ASSERT(map() != GetHeap()->fixed_cow_array_map());
   ASSERT(index >= 0 && index < this->length());
   int offset = kHeaderSize + index * kPointerSize;
   WRITE_FIELD(this, offset, value);
@@ -2068,7 +2068,7 @@
 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
                                               int index,
                                               Object* value) {
-  ASSERT(array->map() != HEAP->fixed_cow_array_map());
+  ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
   ASSERT(index >= 0 && index < array->length());
   int offset = kHeaderSize + index * kPointerSize;
   WRITE_FIELD(array, offset, value);
@@ -2082,9 +2082,9 @@
 void FixedArray::NoWriteBarrierSet(FixedArray* array,
                                    int index,
                                    Object* value) {
-  ASSERT(array->map() != HEAP->fixed_cow_array_map());
+  ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map());
   ASSERT(index >= 0 && index < array->length());
-  ASSERT(!HEAP->InNewSpace(value));
+  ASSERT(!array->GetHeap()->InNewSpace(value));
   WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
 }

@@ -2109,9 +2109,9 @@


 void FixedArray::set_the_hole(int index) {
-  ASSERT(map() != HEAP->fixed_cow_array_map());
+  ASSERT(map() != GetHeap()->fixed_cow_array_map());
   ASSERT(index >= 0 && index < this->length());
-  ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
+  ASSERT(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
   WRITE_FIELD(this,
               kHeaderSize + index * kPointerSize,
               GetHeap()->the_hole_value());
@@ -2130,7 +2130,7 @@

 bool DescriptorArray::IsEmpty() {
   ASSERT(length() >= kFirstIndex ||
-         this == HEAP->empty_descriptor_array());
+         this == GetHeap()->empty_descriptor_array());
   return length() < kFirstIndex;
 }

@@ -4995,7 +4995,7 @@


 void JSFunction::set_code(Code* value) {
-  ASSERT(!HEAP->InNewSpace(value));
+  ASSERT(!GetHeap()->InNewSpace(value));
   Address entry = value->entry();
WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
   GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
@@ -5006,7 +5006,7 @@


 void JSFunction::set_code_no_write_barrier(Code* value) {
-  ASSERT(!HEAP->InNewSpace(value));
+  ASSERT(!GetHeap()->InNewSpace(value));
   Address entry = value->entry();
WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
 }
@@ -5156,7 +5156,7 @@
                                                    Code* value) {
   ASSERT(id < kJSBuiltinsCount);  // id is unsigned.
   WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
-  ASSERT(!HEAP->InNewSpace(value));
+  ASSERT(!GetHeap()->InNewSpace(value));
 }


=======================================
--- /branches/bleeding_edge/src/objects.cc      Tue Sep 10 16:41:46 2013 UTC
+++ /branches/bleeding_edge/src/objects.cc      Wed Sep 11 07:14:41 2013 UTC
@@ -2188,9 +2188,9 @@

 template<RightTrimMode trim_mode>
static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) {
-  ASSERT(elms->map() != HEAP->fixed_cow_array_map());
+  ASSERT(elms->map() != heap->fixed_cow_array_map());
// For now this trick is only applied to fixed arrays in new and paged space.
-  ASSERT(!HEAP->lo_space()->Contains(elms));
+  ASSERT(!heap->lo_space()->Contains(elms));

   const int len = elms->length();

@@ -7980,19 +7980,21 @@
 }


-MaybeObject* DeoptimizationInputData::Allocate(int deopt_entry_count,
+MaybeObject* DeoptimizationInputData::Allocate(Isolate* isolate,
+                                               int deopt_entry_count,
                                                PretenureFlag pretenure) {
   ASSERT(deopt_entry_count > 0);
-  return HEAP->AllocateFixedArray(LengthFor(deopt_entry_count),
-                                  pretenure);
+  return isolate->heap()->AllocateFixedArray(LengthFor(deopt_entry_count),
+                                             pretenure);
 }


-MaybeObject* DeoptimizationOutputData::Allocate(int number_of_deopt_points,
+MaybeObject* DeoptimizationOutputData::Allocate(Isolate* isolate,
+                                                int number_of_deopt_points,
                                                 PretenureFlag pretenure) {
-  if (number_of_deopt_points == 0) return HEAP->empty_fixed_array();
- return HEAP->AllocateFixedArray(LengthOfFixedArray(number_of_deopt_points),
-                                  pretenure);
+ if (number_of_deopt_points == 0) return isolate->heap()->empty_fixed_array();
+  return isolate->heap()->AllocateFixedArray(
+      LengthOfFixedArray(number_of_deopt_points), pretenure);
 }


=======================================
--- /branches/bleeding_edge/src/objects.h       Tue Sep 10 14:33:06 2013 UTC
+++ /branches/bleeding_edge/src/objects.h       Wed Sep 11 07:14:41 2013 UTC
@@ -4635,7 +4635,8 @@
   }

   // Allocates a DeoptimizationInputData.
-  MUST_USE_RESULT static MaybeObject* Allocate(int deopt_entry_count,
+  MUST_USE_RESULT static MaybeObject* Allocate(Isolate* isolate,
+                                               int deopt_entry_count,
                                                PretenureFlag pretenure);

   // Casting.
@@ -4681,7 +4682,8 @@
   }

   // Allocates a DeoptimizationOutputData.
-  MUST_USE_RESULT static MaybeObject* Allocate(int number_of_deopt_points,
+  MUST_USE_RESULT static MaybeObject* Allocate(Isolate* isolate,
+                                               int number_of_deopt_points,
                                                PretenureFlag pretenure);

   // Casting.
=======================================
--- /branches/bleeding_edge/src/profile-generator.cc Thu Sep 5 13:20:51 2013 UTC +++ /branches/bleeding_edge/src/profile-generator.cc Wed Sep 11 07:14:41 2013 UTC
@@ -41,8 +41,8 @@
 namespace internal {


-StringsStorage::StringsStorage()
-    : names_(StringsMatch) {
+StringsStorage::StringsStorage(Heap* heap)
+    : hash_seed_(heap->HashSeed()), names_(StringsMatch) {
 }


@@ -61,7 +61,7 @@
   OS::StrNCpy(dst, src, len);
   dst[len] = '\0';
   uint32_t hash =
- StringHasher::HashSequentialString(dst.start(), len, HEAP->HashSeed());
+      StringHasher::HashSequentialString(dst.start(), len, hash_seed_);
   return AddOrDisposeString(dst.start(), hash);
 }

@@ -95,7 +95,7 @@
     return format;
   }
   uint32_t hash = StringHasher::HashSequentialString(
-      str.start(), len, HEAP->HashSeed());
+      str.start(), len, hash_seed_);
   return AddOrDisposeString(str.start(), hash);
 }

@@ -443,8 +443,9 @@
 }


-CpuProfilesCollection::CpuProfilesCollection()
-    : current_profiles_semaphore_(1) {
+CpuProfilesCollection::CpuProfilesCollection(Heap* heap)
+    : function_and_resource_names_(heap),
+      current_profiles_semaphore_(1) {
 }


=======================================
--- /branches/bleeding_edge/src/profile-generator.h Thu Sep 5 13:20:51 2013 UTC +++ /branches/bleeding_edge/src/profile-generator.h Wed Sep 11 07:14:41 2013 UTC
@@ -41,7 +41,7 @@
 // forever, even if they disappear from JS heap or external storage.
 class StringsStorage {
  public:
-  StringsStorage();
+  explicit StringsStorage(Heap* heap);
   ~StringsStorage();

   const char* GetCopy(const char* src);
@@ -63,6 +63,7 @@
   const char* AddOrDisposeString(char* str, uint32_t hash);

   // Mapping of strings by String::Hash to const char* strings.
+  uint32_t hash_seed_;
   HashMap names_;

   DISALLOW_COPY_AND_ASSIGN(StringsStorage);
@@ -277,7 +278,7 @@

 class CpuProfilesCollection {
  public:
-  CpuProfilesCollection();
+  explicit CpuProfilesCollection(Heap* heap);
   ~CpuProfilesCollection();

bool StartProfiling(const char* title, unsigned uid, bool record_samples);
=======================================
--- /branches/bleeding_edge/src/property.h      Mon Sep  2 12:16:02 2013 UTC
+++ /branches/bleeding_edge/src/property.h      Wed Sep 11 07:14:41 2013 UTC
@@ -46,7 +46,8 @@
  public:
   MUST_USE_RESULT MaybeObject* KeyToUniqueName() {
     if (!key_->IsUniqueName()) {
- MaybeObject* maybe_result = HEAP->InternalizeString(String::cast(key_));
+      MaybeObject* maybe_result =
+ key_->GetIsolate()->heap()->InternalizeString(String::cast(key_));
       if (!maybe_result->To(&key_)) return maybe_result;
     }
     return key_;
=======================================
--- /branches/bleeding_edge/src/runtime.cc      Mon Sep  9 16:34:40 2013 UTC
+++ /branches/bleeding_edge/src/runtime.cc      Wed Sep 11 07:14:41 2013 UTC
@@ -12950,7 +12950,7 @@
   }
   FixedArray* instances = FixedArray::cast(object);

-  ASSERT(HEAP->IsHeapIterable());
+  ASSERT(isolate->heap()->IsHeapIterable());
   // Fill the referencing objects.
   HeapIterator heap_iterator2(heap);
   count = DebugConstructedBy(&heap_iterator2,
=======================================
--- /branches/bleeding_edge/src/scopeinfo.cc    Tue Sep  3 11:54:08 2013 UTC
+++ /branches/bleeding_edge/src/scopeinfo.cc    Wed Sep 11 07:14:41 2013 UTC
@@ -445,7 +445,8 @@
                               int slot_index) {
   String* internalized_name;
   ASSERT(slot_index > kNotFound);
-  if (HEAP->InternalizeStringIfExists(name, &internalized_name)) {
+  if (name->GetIsolate()->heap()->InternalizeStringIfExists(
+          name, &internalized_name)) {
     int index = Hash(data, internalized_name);
     Key& key = keys_[index];
     key.data = data;
@@ -472,7 +473,8 @@
                                      InitializationFlag init_flag,
                                      int slot_index) {
   String* internalized_name;
-  if (HEAP->InternalizeStringIfExists(name, &internalized_name)) {
+  if (name->GetIsolate()->heap()->InternalizeStringIfExists(
+          name, &internalized_name)) {
     int index = Hash(data, name);
     Key& key = keys_[index];
     ASSERT(key.data == data);
=======================================
--- /branches/bleeding_edge/src/serialize.cc    Tue Sep  3 11:54:08 2013 UTC
+++ /branches/bleeding_edge/src/serialize.cc    Wed Sep 11 07:14:41 2013 UTC
@@ -1307,7 +1307,7 @@
   // We don't support serializing installed extensions.
   CHECK(!isolate->has_installed_extensions());

-  HEAP->IterateStrongRoots(this, VISIT_ONLY_STRONG);
+  isolate->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
 }


@@ -1485,7 +1485,7 @@
   // uses to know it is done deserializing the array.
   Object* undefined = isolate()->heap()->undefined_value();
   VisitPointer(&undefined);
-  HEAP->IterateWeakRoots(this, VISIT_ALL);
+  isolate()->heap()->IterateWeakRoots(this, VISIT_ALL);
   Pad();
 }

@@ -1498,7 +1498,7 @@
   if (how_to_code == kPlain &&
       where_to_point == kStartOfObject &&
       root_index < kRootArrayNumberOfConstantEncodings &&
-      !HEAP->InNewSpace(object)) {
+      !isolate()->heap()->InNewSpace(object)) {
     if (skip == 0) {
       sink_->Put(kRootArrayConstants + kNoSkipDistance + root_index,
                  "RootConstant");
@@ -1631,7 +1631,7 @@
           root_index != kInvalidRootIndex &&
           root_index < kRootArrayNumberOfConstantEncodings &&
           current_contents == current[-1]) {
-        ASSERT(!HEAP->InNewSpace(current_contents));
+ ASSERT(!serializer_->isolate()->heap()->InNewSpace(current_contents));
         int repeat_count = 1;
while (current < end - 1 && current[repeat_count] == current_contents) {
           repeat_count++;
@@ -1748,7 +1748,8 @@
   Address references_start = reinterpret_cast<Address>(resource_pointer);
   OutputRawData(references_start);
   for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
-    Object* source = HEAP->natives_source_cache()->get(i);
+    Object* source =
+        serializer_->isolate()->heap()->natives_source_cache()->get(i);
     if (!source->IsUndefined()) {
       ExternalAsciiString* string = ExternalAsciiString::cast(source);
       typedef v8::String::ExternalAsciiStringResource Resource;
@@ -1817,7 +1818,7 @@
 int Serializer::SpaceOfObject(HeapObject* object) {
   for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
     AllocationSpace s = static_cast<AllocationSpace>(i);
-    if (HEAP->InSpace(object, s)) {
+    if (object->GetHeap()->InSpace(object, s)) {
       ASSERT(i < kNumberOfSpaces);
       return i;
     }
=======================================
--- /branches/bleeding_edge/src/serialize.h     Tue Sep  3 11:54:08 2013 UTC
+++ /branches/bleeding_edge/src/serialize.h     Wed Sep 11 07:14:41 2013 UTC
@@ -620,7 +620,8 @@
     return o->IsName() || o->IsSharedFunctionInfo() ||
            o->IsHeapNumber() || o->IsCode() ||
            o->IsScopeInfo() ||
-           o->map() == HEAP->fixed_cow_array_map();
+           o->map() ==
+ startup_serializer_->isolate()->heap()->fixed_cow_array_map();
   }

  private:
=======================================
--- /branches/bleeding_edge/src/spaces-inl.h    Thu Mar 28 11:19:38 2013 UTC
+++ /branches/bleeding_edge/src/spaces-inl.h    Wed Sep 11 07:14:41 2013 UTC
@@ -194,11 +194,11 @@
 }


-MemoryChunk* MemoryChunk::FromAnyPointerAddress(Address addr) {
+MemoryChunk* MemoryChunk::FromAnyPointerAddress(Heap* heap, Address addr) {
   MemoryChunk* maybe = reinterpret_cast<MemoryChunk*>(
       OffsetFrom(addr) & ~Page::kPageAlignmentMask);
   if (maybe->owner() != NULL) return maybe;
-  LargeObjectIterator iterator(HEAP->lo_space());
+  LargeObjectIterator iterator(heap->lo_space());
   for (HeapObject* o = iterator.Next(); o != NULL; o = iterator.Next()) {
     // Fixed arrays are the only pointer-containing objects in large object
     // space.
@@ -315,12 +315,12 @@
 #ifdef DEBUG
   // If we are stressing compaction we waste some memory in new space
   // in order to get more frequent GCs.
-  if (FLAG_stress_compaction && !HEAP->linear_allocation()) {
+  if (FLAG_stress_compaction && !heap()->linear_allocation()) {
     if (allocation_info_.limit - old_top >= size_in_bytes * 4) {
       int filler_size = size_in_bytes * 4;
       for (int i = 0; i < filler_size; i += kPointerSize) {
         *(reinterpret_cast<Object**>(old_top + i)) =
-            HEAP->one_pointer_filler_map();
+            heap()->one_pointer_filler_map();
       }
       old_top += filler_size;
       allocation_info_.top += filler_size;
=======================================
--- /branches/bleeding_edge/src/spaces.h        Thu Sep  5 08:17:57 2013 UTC
+++ /branches/bleeding_edge/src/spaces.h        Wed Sep 11 07:14:41 2013 UTC
@@ -307,7 +307,7 @@
   }

   // Only works for addresses in pointer spaces, not data or code spaces.
-  static inline MemoryChunk* FromAnyPointerAddress(Address addr);
+ static inline MemoryChunk* FromAnyPointerAddress(Heap* heap, Address addr);

   Address address() { return reinterpret_cast<Address>(this); }

=======================================
--- /branches/bleeding_edge/src/store-buffer-inl.h Thu Jul 11 09:17:03 2013 UTC +++ /branches/bleeding_edge/src/store-buffer-inl.h Wed Sep 11 07:14:41 2013 UTC
@@ -67,7 +67,7 @@
     if (top >= old_limit_) {
       ASSERT(callback_ != NULL);
       (*callback_)(heap_,
-                   MemoryChunk::FromAnyPointerAddress(addr),
+                   MemoryChunk::FromAnyPointerAddress(heap_, addr),
                    kStoreBufferFullEvent);
     }
   }
=======================================
--- /branches/bleeding_edge/src/store-buffer.cc Fri Sep  6 12:25:46 2013 UTC
+++ /branches/bleeding_edge/src/store-buffer.cc Wed Sep 11 07:14:41 2013 UTC
@@ -221,7 +221,7 @@
     if (previous_chunk != NULL && previous_chunk->Contains(addr)) {
       containing_chunk = previous_chunk;
     } else {
-      containing_chunk = MemoryChunk::FromAnyPointerAddress(addr);
+      containing_chunk = MemoryChunk::FromAnyPointerAddress(heap_, addr);
     }
     int old_counter = containing_chunk->store_buffer_counter();
     if (old_counter == threshold) {
@@ -247,7 +247,7 @@
     if (previous_chunk != NULL && previous_chunk->Contains(addr)) {
       containing_chunk = previous_chunk;
     } else {
-      containing_chunk = MemoryChunk::FromAnyPointerAddress(addr);
+      containing_chunk = MemoryChunk::FromAnyPointerAddress(heap_, addr);
       previous_chunk = containing_chunk;
     }
     if (!containing_chunk->IsFlagSet(flag)) {
=======================================
--- /branches/bleeding_edge/src/string-stream.cc Tue Sep 3 11:54:08 2013 UTC +++ /branches/bleeding_edge/src/string-stream.cc Wed Sep 11 07:14:41 2013 UTC
@@ -350,7 +350,7 @@

 void StringStream::PrintUsingMap(JSObject* js_object) {
   Map* map = js_object->map();
-  if (!HEAP->Contains(map) ||
+  if (!js_object->GetHeap()->Contains(map) ||
       !map->IsHeapObject() ||
       !map->IsMap()) {
     Add("<Invalid map>\n");
@@ -384,7 +384,7 @@


 void StringStream::PrintFixedArray(FixedArray* array, unsigned int limit) {
-  Heap* heap = HEAP;
+  Heap* heap = array->GetHeap();
   for (unsigned int i = 0; i < 10 && i < limit; i++) {
     Object* element = array->get(i);
     if (element != heap->the_hole_value()) {
@@ -491,48 +491,39 @@


void StringStream::PrintFunction(Object* f, Object* receiver, Code** code) {
-  if (f->IsHeapObject() &&
-      HEAP->Contains(HeapObject::cast(f)) &&
-      HEAP->Contains(HeapObject::cast(f)->map()) &&
-      HeapObject::cast(f)->map()->IsMap()) {
-    if (f->IsJSFunction()) {
-      JSFunction* fun = JSFunction::cast(f);
-      // Common case: on-stack function present and resolved.
-      PrintPrototype(fun, receiver);
-      *code = fun->code();
-    } else if (f->IsInternalizedString()) {
-      // Unresolved and megamorphic calls: Instead of the function
-      // we have the function name on the stack.
-      PrintName(f);
-      Add("/* unresolved */ ");
-    } else {
- // Unless this is the frame of a built-in function, we should always have
-      // the callee function or name on the stack. If we don't, we have a
-      // problem or a change of the stack frame layout.
-      Add("%o", f);
-      Add("/* warning: no JSFunction object or function name found */ ");
-    }
-    /* } else if (is_trampoline()) {
-       Print("trampoline ");
-    */
+  if (!f->IsHeapObject()) {
+    Add("/* warning: 'function' was not a heap object */ ");
+    return;
+  }
+  Heap* heap = HeapObject::cast(f)->GetHeap();
+  if (!heap->Contains(HeapObject::cast(f))) {
+    Add("/* warning: 'function' was not on the heap */ ");
+    return;
+  }
+  if (!heap->Contains(HeapObject::cast(f)->map())) {
+    Add("/* warning: function's map was not on the heap */ ");
+    return;
+  }
+  if (!HeapObject::cast(f)->map()->IsMap()) {
+    Add("/* warning: function's map was not a valid map */ ");
+    return;
+  }
+  if (f->IsJSFunction()) {
+    JSFunction* fun = JSFunction::cast(f);
+    // Common case: on-stack function present and resolved.
+    PrintPrototype(fun, receiver);
+    *code = fun->code();
+  } else if (f->IsInternalizedString()) {
+    // Unresolved and megamorphic calls: Instead of the function
+    // we have the function name on the stack.
+    PrintName(f);
+    Add("/* unresolved */ ");
   } else {
-    if (!f->IsHeapObject()) {
-      Add("/* warning: 'function' was not a heap object */ ");
-      return;
-    }
-    if (!HEAP->Contains(HeapObject::cast(f))) {
-      Add("/* warning: 'function' was not on the heap */ ");
-      return;
-    }
-    if (!HEAP->Contains(HeapObject::cast(f)->map())) {
-      Add("/* warning: function's map was not on the heap */ ");
-      return;
-    }
-    if (!HeapObject::cast(f)->map()->IsMap()) {
-      Add("/* warning: function's map was not a valid map */ ");
-      return;
-    }
-    Add("/* warning: Invalid JSFunction object found */ ");
+ // Unless this is the frame of a built-in function, we should always have
+    // the callee function or name on the stack. If we don't, we have a
+    // problem or a change of the stack frame layout.
+    Add("%o", f);
+    Add("/* warning: no JSFunction object or function name found */ ");
   }
 }

=======================================
--- /branches/bleeding_edge/src/stub-cache.cc   Fri Sep  6 11:31:25 2013 UTC
+++ /branches/bleeding_edge/src/stub-cache.cc   Wed Sep 11 07:14:41 2013 UTC
@@ -1309,7 +1309,7 @@
   // can't use either LoadIC or KeyedLoadIC constructors.
   IC ic(IC::NO_EXTRA_FRAME, isolate);
   ASSERT(ic.target()->is_load_stub() || ic.target()->is_keyed_load_stub());
-  if (!ic.SlowIsUndeclaredGlobal()) return HEAP->undefined_value();
+ if (!ic.SlowIsUndeclaredGlobal()) return isolate->heap()->undefined_value();

   // Throw a reference error.
   HandleScope scope(isolate);
=======================================
--- /branches/bleeding_edge/test/cctest/cctest.h Thu Sep 5 08:48:34 2013 UTC +++ /branches/bleeding_edge/test/cctest/cctest.h Wed Sep 11 07:14:41 2013 UTC
@@ -71,6 +71,10 @@
   EXTENSION_LIST(DEFINE_EXTENSION_FLAG)
 #undef DEFINE_EXTENSION_FLAG

+// Temporary macros for accessing current isolate and its subobjects.
+// They provide better readability, especially when used a lot in the code.
+#define HEAP (v8::internal::Isolate::Current()->heap())
+
 class CcTest {
  public:
   typedef void (TestFunction)();
=======================================
--- /branches/bleeding_edge/test/cctest/test-cpu-profiler.cc Thu Aug 29 09:15:13 2013 UTC +++ /branches/bleeding_edge/test/cctest/test-cpu-profiler.cc Wed Sep 11 07:14:41 2013 UTC
@@ -49,7 +49,8 @@


 TEST(StartStop) {
-  CpuProfilesCollection profiles;
+  i::Isolate* isolate = CcTest::i_isolate();
+  CpuProfilesCollection profiles(isolate->heap());
   ProfileGenerator generator(&profiles);
SmartPointer<ProfilerEventsProcessor> processor(new ProfilerEventsProcessor(
           &generator, NULL, TimeDelta::FromMicroseconds(100)));
@@ -140,7 +141,7 @@
   i::Code* args3_code = CreateCode(&env);
   i::Code* args4_code = CreateCode(&env);

-  CpuProfilesCollection* profiles = new CpuProfilesCollection;
+ CpuProfilesCollection* profiles = new CpuProfilesCollection(isolate->heap());
   profiles->StartProfiling("", 1, false);
   ProfileGenerator generator(profiles);
SmartPointer<ProfilerEventsProcessor> processor(new ProfilerEventsProcessor(
@@ -202,7 +203,7 @@
   i::Code* frame2_code = CreateCode(&env);
   i::Code* frame3_code = CreateCode(&env);

-  CpuProfilesCollection* profiles = new CpuProfilesCollection;
+ CpuProfilesCollection* profiles = new CpuProfilesCollection(isolate->heap());
   profiles->StartProfiling("", 1, false);
   ProfileGenerator generator(profiles);
SmartPointer<ProfilerEventsProcessor> processor(new ProfilerEventsProcessor(
@@ -271,7 +272,7 @@

   i::Code* code = CreateCode(&env);

-  CpuProfilesCollection* profiles = new CpuProfilesCollection;
+ CpuProfilesCollection* profiles = new CpuProfilesCollection(isolate->heap());
   profiles->StartProfiling("", 1, false);
   ProfileGenerator generator(profiles);
SmartPointer<ProfilerEventsProcessor> processor(new ProfilerEventsProcessor(
=======================================
--- /branches/bleeding_edge/test/cctest/test-profile-generator.cc Thu Sep 5 13:20:51 2013 UTC +++ /branches/bleeding_edge/test/cctest/test-profile-generator.cc Wed Sep 11 07:14:41 2013 UTC
@@ -399,7 +399,7 @@

 TEST(RecordTickSample) {
   TestSetup test_setup;
-  CpuProfilesCollection profiles;
+  CpuProfilesCollection profiles(CcTest::i_isolate()->heap());
   profiles.StartProfiling("", 1, false);
   ProfileGenerator generator(&profiles);
CodeEntry* entry1 = profiles.NewCodeEntry(i::Logger::FUNCTION_TAG, "aaa");
@@ -465,7 +465,7 @@

 TEST(SampleIds) {
   TestSetup test_setup;
-  CpuProfilesCollection profiles;
+  CpuProfilesCollection profiles(CcTest::i_isolate()->heap());
   profiles.StartProfiling("", 1, true);
   ProfileGenerator generator(&profiles);
CodeEntry* entry1 = profiles.NewCodeEntry(i::Logger::FUNCTION_TAG, "aaa");
@@ -513,7 +513,7 @@

 TEST(NoSamples) {
   TestSetup test_setup;
-  CpuProfilesCollection profiles;
+  CpuProfilesCollection profiles(CcTest::i_isolate()->heap());
   profiles.StartProfiling("", 1, false);
   ProfileGenerator generator(&profiles);
CodeEntry* entry1 = profiles.NewCodeEntry(i::Logger::FUNCTION_TAG, "aaa");
@@ -652,7 +652,7 @@


 TEST(Issue51919) {
-  CpuProfilesCollection collection;
+  CpuProfilesCollection collection(CcTest::i_isolate()->heap());
   i::EmbeddedVector<char*,
       CpuProfilesCollection::kMaxSimultaneousProfiles> titles;
for (int i = 0; i < CpuProfilesCollection::kMaxSimultaneousProfiles; ++i) {

--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.

Reply via email to