Revision: 16622
Author:   [email protected]
Date:     Tue Sep 10 14:30:36 2013 UTC
Log:      Get rid of most uses of 'Temporary macro' HEAP

[email protected]
BUG=

Review URL: https://codereview.chromium.org/23708030
http://code.google.com/p/v8/source/detail?r=16622

Modified:
 /branches/bleeding_edge/src/accessors.cc
 /branches/bleeding_edge/src/ast.cc
 /branches/bleeding_edge/src/bootstrapper.h
 /branches/bleeding_edge/src/builtins.cc
 /branches/bleeding_edge/src/compiler.cc
 /branches/bleeding_edge/src/d8.cc
 /branches/bleeding_edge/src/elements.cc
 /branches/bleeding_edge/src/extensions/externalize-string-extension.cc
 /branches/bleeding_edge/src/extensions/gc-extension.cc
 /branches/bleeding_edge/src/frames.cc
 /branches/bleeding_edge/src/heap-inl.h
 /branches/bleeding_edge/src/heap-snapshot-generator.cc
 /branches/bleeding_edge/src/heap-snapshot-generator.h
 /branches/bleeding_edge/src/heap.cc
 /branches/bleeding_edge/src/isolate.h
 /branches/bleeding_edge/src/jsregexp.h
 /branches/bleeding_edge/src/liveedit.cc
 /branches/bleeding_edge/src/mark-compact-inl.h
 /branches/bleeding_edge/src/x64/assembler-x64.cc
 /branches/bleeding_edge/src/x64/macro-assembler-x64.cc

=======================================
--- /branches/bleeding_edge/src/accessors.cc    Tue Sep  3 06:59:01 2013 UTC
+++ /branches/bleeding_edge/src/accessors.cc    Tue Sep 10 14:30:36 2013 UTC
@@ -400,7 +400,7 @@
       return *GetScriptWrapper(eval_from_script);
     }
   }
-  return HEAP->undefined_value();
+  return isolate->heap()->undefined_value();
 }


=======================================
--- /branches/bleeding_edge/src/ast.cc  Fri Sep  6 11:32:46 2013 UTC
+++ /branches/bleeding_edge/src/ast.cc  Tue Sep 10 14:30:36 2013 UTC
@@ -599,7 +599,7 @@
     Handle<JSFunction> candidate(JSFunction::cast(cell_->value()));
     // If the function is in new space we assume it's more likely to
     // change and thus prefer the general IC code.
-    if (!HEAP->InNewSpace(*candidate)) {
+    if (!lookup->isolate()->heap()->InNewSpace(*candidate)) {
       target_ = candidate;
       return true;
     }
=======================================
--- /branches/bleeding_edge/src/bootstrapper.h  Tue Jun  4 10:30:05 2013 UTC
+++ /branches/bleeding_edge/src/bootstrapper.h  Tue Sep 10 14:30:36 2013 UTC
@@ -45,7 +45,8 @@
explicit SourceCodeCache(Script::Type type): type_(type), cache_(NULL) { }

   void Initialize(bool create_heap_objects) {
-    cache_ = create_heap_objects ? HEAP->empty_fixed_array() : NULL;
+    cache_ = create_heap_objects ?
+        Isolate::Current()->heap()->empty_fixed_array() : NULL;
   }

   void Iterate(ObjectVisitor* v) {
=======================================
--- /branches/bleeding_edge/src/builtins.cc     Thu Sep  5 11:27:22 2013 UTC
+++ /branches/bleeding_edge/src/builtins.cc     Tue Sep 10 14:30:36 2013 UTC
@@ -303,11 +303,11 @@
   } else {
     entry_size = kDoubleSize;
   }
-  ASSERT(elms->map() != HEAP->fixed_cow_array_map());
+  ASSERT(elms->map() != heap->fixed_cow_array_map());
// For now this trick is only applied to fixed arrays in new and paged space.
   // In large object space the object's start must coincide with chunk
   // and thus the trick is just not applicable.
-  ASSERT(!HEAP->lo_space()->Contains(elms));
+  ASSERT(!heap->lo_space()->Contains(elms));

   STATIC_ASSERT(FixedArrayBase::kMapOffset == 0);
   STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize);
=======================================
--- /branches/bleeding_edge/src/compiler.cc     Tue Sep 10 11:09:22 2013 UTC
+++ /branches/bleeding_edge/src/compiler.cc     Tue Sep 10 14:30:36 2013 UTC
@@ -725,7 +725,7 @@
     }
     script->set_is_shared_cross_origin(is_shared_cross_origin);

-    script->set_data(script_data.is_null() ? HEAP->undefined_value()
+ script->set_data(script_data.is_null() ? isolate->heap()->undefined_value()
                                            : *script_data);

     // Compile the function and add it to the cache.
@@ -742,8 +742,8 @@
       compilation_cache->PutScript(source, context, result);
     }
   } else {
-    if (result->ic_age() != HEAP->global_ic_age()) {
-      result->ResetForNewContext(HEAP->global_ic_age());
+    if (result->ic_age() != isolate->heap()->global_ic_age()) {
+      result->ResetForNewContext(isolate->heap()->global_ic_age());
     }
   }

@@ -805,8 +805,8 @@
       }
     }
   } else {
-    if (result->ic_age() != HEAP->global_ic_age()) {
-      result->ResetForNewContext(HEAP->global_ic_age());
+    if (result->ic_age() != isolate->heap()->global_ic_age()) {
+      result->ResetForNewContext(isolate->heap()->global_ic_age());
     }
   }

=======================================
--- /branches/bleeding_edge/src/d8.cc   Tue Sep  3 07:34:34 2013 UTC
+++ /branches/bleeding_edge/src/d8.cc   Tue Sep 10 14:30:36 2013 UTC
@@ -772,7 +772,8 @@
       = i::Handle<i::JSObject>(debug->debug_context()->global_object());
   utility_context->Global()->Set(String::New("$debug"),
                                   Utils::ToLocal(js_debug));
-  debug->debug_context()->set_security_token(HEAP->undefined_value());
+  debug->debug_context()->set_security_token(
+      reinterpret_cast<i::Isolate*>(isolate)->heap()->undefined_value());
 #endif  // ENABLE_DEBUGGER_SUPPORT

   // Run the d8 shell utility script in the utility context
=======================================
--- /branches/bleeding_edge/src/elements.cc     Tue Sep  3 06:59:01 2013 UTC
+++ /branches/bleeding_edge/src/elements.cc     Tue Sep 10 14:30:36 2013 UTC
@@ -154,7 +154,8 @@
                                        ElementsKind to_kind,
                                        uint32_t to_start,
                                        int raw_copy_size) {
-  ASSERT(to_base->map() != HEAP->fixed_cow_array_map());
+  ASSERT(to_base->map() !=
+      from_base->GetIsolate()->heap()->fixed_cow_array_map());
   DisallowHeapAllocation no_allocation;
   int copy_size = raw_copy_size;
   if (raw_copy_size < 0) {
=======================================
--- /branches/bleeding_edge/src/extensions/externalize-string-extension.cc Wed Jun 5 12:36:33 2013 UTC +++ /branches/bleeding_edge/src/extensions/externalize-string-extension.cc Tue Sep 10 14:30:36 2013 UTC
@@ -103,7 +103,8 @@
         reinterpret_cast<char*>(data), string->length());
     result = string->MakeExternal(resource);
     if (result && !string->IsInternalizedString()) {
-      HEAP->external_string_table()->AddString(*string);
+ i::Isolate* isolate = reinterpret_cast<i::Isolate*>(args.GetIsolate());
+      isolate->heap()->external_string_table()->AddString(*string);
     }
     if (!result) delete resource;
   } else {
@@ -113,7 +114,8 @@
         data, string->length());
     result = string->MakeExternal(resource);
     if (result && !string->IsInternalizedString()) {
-      HEAP->external_string_table()->AddString(*string);
+ i::Isolate* isolate = reinterpret_cast<i::Isolate*>(args.GetIsolate());
+      isolate->heap()->external_string_table()->AddString(*string);
     }
     if (!result) delete resource;
   }
=======================================
--- /branches/bleeding_edge/src/extensions/gc-extension.cc Wed Jun 5 12:36:33 2013 UTC +++ /branches/bleeding_edge/src/extensions/gc-extension.cc Tue Sep 10 14:30:36 2013 UTC
@@ -39,10 +39,11 @@


 void GCExtension::GC(const v8::FunctionCallbackInfo<v8::Value>& args) {
+  i::Isolate* isolate = reinterpret_cast<i::Isolate*>(args.GetIsolate());
   if (args[0]->BooleanValue()) {
-    HEAP->CollectGarbage(NEW_SPACE, "gc extension");
+    isolate->heap()->CollectGarbage(NEW_SPACE, "gc extension");
   } else {
-    HEAP->CollectAllGarbage(Heap::kNoGCFlags, "gc extension");
+    isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, "gc extension");
   }
 }

=======================================
--- /branches/bleeding_edge/src/frames.cc       Tue Sep  3 11:47:16 2013 UTC
+++ /branches/bleeding_edge/src/frames.cc       Tue Sep 10 14:30:36 2013 UTC
@@ -489,7 +489,7 @@


 Code* EntryFrame::unchecked_code() const {
-  return HEAP->js_entry_code();
+  return isolate()->heap()->js_entry_code();
 }


@@ -512,7 +512,7 @@


 Code* EntryConstructFrame::unchecked_code() const {
-  return HEAP->js_construct_entry_code();
+  return isolate()->heap()->js_construct_entry_code();
 }


=======================================
--- /branches/bleeding_edge/src/heap-inl.h      Tue Sep  3 11:47:16 2013 UTC
+++ /branches/bleeding_edge/src/heap-inl.h      Tue Sep 10 14:30:36 2013 UTC
@@ -69,7 +69,7 @@
   *(--rear_) = size;
   // Assert no overflow into live objects.
 #ifdef DEBUG
-  SemiSpace::AssertValidRange(HEAP->new_space()->top(),
+ SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(),
                               reinterpret_cast<Address>(rear_));
 #endif
 }
@@ -508,7 +508,7 @@


 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
-  ASSERT(HEAP->InFromSpace(object));
+  ASSERT(object->GetIsolate()->heap()->InFromSpace(object));

   // We use the first word (where the map pointer usually is) of a heap
   // object to record the forwarding pointer.  A forwarding pointer can
@@ -520,7 +520,7 @@
   // copied.
   if (first_word.IsForwardingAddress()) {
     HeapObject* dest = first_word.ToForwardingAddress();
-    ASSERT(HEAP->InFromSpace(*p));
+    ASSERT(object->GetIsolate()->heap()->InFromSpace(*p));
     *p = dest;
     return;
   }
@@ -613,10 +613,10 @@


 #ifdef DEBUG
-#define GC_GREEDY_CHECK() \
-  if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck()
+#define GC_GREEDY_CHECK(ISOLATE) \
+  if (FLAG_gc_greedy) (ISOLATE)->heap()->GarbageCollectionGreedyCheck()
 #else
-#define GC_GREEDY_CHECK() { }
+#define GC_GREEDY_CHECK(ISOLATE) { }
 #endif

 // Calls the FUNCTION_CALL function and retries it up to three times
@@ -628,7 +628,7 @@

#define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY, OOM)\ do { \ - GC_GREEDY_CHECK(); \ + GC_GREEDY_CHECK(ISOLATE); \ MaybeObject* __maybe_object__ = FUNCTION_CALL; \ Object* __object__ = NULL; \ if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
@@ -636,7 +636,7 @@
OOM; \ } \ if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ - ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \ + (ISOLATE)->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \ allocation_space(), \ "allocation failure"); \ __maybe_object__ = FUNCTION_CALL; \
@@ -645,8 +645,8 @@
OOM; \ } \ if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \ - ISOLATE->counters()->gc_last_resort_from_handles()->Increment(); \ - ISOLATE->heap()->CollectAllAvailableGarbage("last resort gc"); \ + (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ + (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ { \ AlwaysAllocateScope __scope__; \ __maybe_object__ = FUNCTION_CALL; \
@@ -719,12 +719,12 @@
   for (int i = 0; i < new_space_strings_.length(); ++i) {
     Object* obj = Object::cast(new_space_strings_[i]);
     ASSERT(heap_->InNewSpace(obj));
-    ASSERT(obj != HEAP->the_hole_value());
+    ASSERT(obj != heap_->the_hole_value());
   }
   for (int i = 0; i < old_space_strings_.length(); ++i) {
     Object* obj = Object::cast(old_space_strings_[i]);
     ASSERT(!heap_->InNewSpace(obj));
-    ASSERT(obj != HEAP->the_hole_value());
+    ASSERT(obj != heap_->the_hole_value());
   }
 #endif
 }
@@ -831,25 +831,29 @@
   // non-handle code to call handle code. The code still works but
   // performance will degrade, so we want to catch this situation
   // in debug mode.
-  ASSERT(HEAP->always_allocate_scope_depth_ == 0);
-  HEAP->always_allocate_scope_depth_++;
+  Isolate* isolate = Isolate::Current();
+  ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0);
+  isolate->heap()->always_allocate_scope_depth_++;
 }


 AlwaysAllocateScope::~AlwaysAllocateScope() {
-  HEAP->always_allocate_scope_depth_--;
-  ASSERT(HEAP->always_allocate_scope_depth_ == 0);
+  Isolate* isolate = Isolate::Current();
+  isolate->heap()->always_allocate_scope_depth_--;
+  ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0);
 }


 #ifdef VERIFY_HEAP
NoWeakEmbeddedMapsVerificationScope::NoWeakEmbeddedMapsVerificationScope() {
-  HEAP->no_weak_embedded_maps_verification_scope_depth_++;
+  Isolate* isolate = Isolate::Current();
+  isolate->heap()->no_weak_embedded_maps_verification_scope_depth_++;
 }


NoWeakEmbeddedMapsVerificationScope::~NoWeakEmbeddedMapsVerificationScope() {
-  HEAP->no_weak_embedded_maps_verification_scope_depth_--;
+  Isolate* isolate = Isolate::Current();
+  isolate->heap()->no_weak_embedded_maps_verification_scope_depth_--;
 }
 #endif

@@ -858,7 +862,7 @@
   for (Object** current = start; current < end; current++) {
     if ((*current)->IsHeapObject()) {
       HeapObject* object = HeapObject::cast(*current);
-      CHECK(HEAP->Contains(object));
+      CHECK(object->GetIsolate()->heap()->Contains(object));
       CHECK(object->map()->IsMap());
     }
   }
@@ -866,21 +870,23 @@


 double GCTracer::SizeOfHeapObjects() {
-  return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
+  return (static_cast<double>(heap_->SizeOfObjects())) / MB;
 }


 DisallowAllocationFailure::DisallowAllocationFailure() {
 #ifdef DEBUG
-  old_state_ = HEAP->disallow_allocation_failure_;
-  HEAP->disallow_allocation_failure_ = true;
+  Isolate* isolate = Isolate::Current();
+  old_state_ = isolate->heap()->disallow_allocation_failure_;
+  isolate->heap()->disallow_allocation_failure_ = true;
 #endif
 }


 DisallowAllocationFailure::~DisallowAllocationFailure() {
 #ifdef DEBUG
-  HEAP->disallow_allocation_failure_ = old_state_;
+  Isolate* isolate = Isolate::Current();
+  isolate->heap()->disallow_allocation_failure_ = old_state_;
 #endif
 }

=======================================
--- /branches/bleeding_edge/src/heap-snapshot-generator.cc Tue Sep 10 11:12:35 2013 UTC +++ /branches/bleeding_edge/src/heap-snapshot-generator.cc Tue Sep 10 14:30:36 2013 UTC
@@ -472,7 +472,7 @@


 void HeapObjectsMap::UpdateHeapObjectsMap() {
-  HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+  heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
                           "HeapSnapshotsCollection::UpdateHeapObjectsMap");
   HeapIterator iterator(heap_);
   for (HeapObject* obj = iterator.next();
@@ -560,12 +560,13 @@
 }


-SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
+SnapshotObjectId HeapObjectsMap::GenerateId(Heap* heap,
+                                            v8::RetainedObjectInfo* info) {
   SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
   const char* label = info->GetLabel();
   id ^= StringHasher::HashSequentialString(label,
                                            static_cast<int>(strlen(label)),
-                                           HEAP->HashSeed());
+                                           heap->HashSeed());
   intptr_t element_count = info->GetElementCount();
   if (element_count != -1)
     id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
@@ -623,7 +624,7 @@
 Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
     SnapshotObjectId id) {
   // First perform a full GC in order to avoid dead objects.
-  HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+  heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask,
                           "HeapSnapshotsCollection::FindHeapObjectById");
   DisallowHeapAllocation no_allocation;
   HeapObject* object = NULL;
@@ -1932,7 +1933,7 @@
   return snapshot_->AddEntry(
       entries_type_,
       name,
-      HeapObjectsMap::GenerateId(info),
+      HeapObjectsMap::GenerateId(collection_->heap(), info),
       size != -1 ? static_cast<int>(size) : 0);
 }

@@ -2108,7 +2109,7 @@
   uint32_t hash = StringHasher::HashSequentialString(
       label_copy,
       static_cast<int>(strlen(label_copy)),
-      HEAP->HashSeed());
+      isolate_->heap()->HashSeed());
HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
                                                 hash, true);
   if (entry->value == NULL) {
=======================================
--- /branches/bleeding_edge/src/heap-snapshot-generator.h Tue Sep 10 11:12:35 2013 UTC +++ /branches/bleeding_edge/src/heap-snapshot-generator.h Tue Sep 10 14:30:36 2013 UTC
@@ -237,7 +237,7 @@
   SnapshotObjectId PushHeapObjectsStats(OutputStream* stream);
   size_t GetUsedMemorySize() const;

-  static SnapshotObjectId GenerateId(v8::RetainedObjectInfo* info);
+ static SnapshotObjectId GenerateId(Heap* heap, v8::RetainedObjectInfo* info);
   static inline SnapshotObjectId GetNthGcSubrootId(int delta);

   static const int kObjectIdStep = 2;
=======================================
--- /branches/bleeding_edge/src/heap.cc Tue Sep 10 11:13:55 2013 UTC
+++ /branches/bleeding_edge/src/heap.cc Tue Sep 10 14:30:36 2013 UTC
@@ -731,7 +731,7 @@
                         int len) {
   if (len == 0) return;

-  ASSERT(array->map() != HEAP->fixed_cow_array_map());
+  ASSERT(array->map() != fixed_cow_array_map());
   Object** dst_objects = array->data_start() + dst_index;
   OS::MemMove(dst_objects,
               array->data_start() + src_index,
@@ -765,9 +765,9 @@
 };


-static void VerifyStringTable() {
+static void VerifyStringTable(Heap* heap) {
   StringTableVerifier verifier;
-  HEAP->string_table()->IterateElements(&verifier);
+  heap->string_table()->IterateElements(&verifier);
 }
 #endif  // VERIFY_HEAP

@@ -922,7 +922,7 @@

 #ifdef VERIFY_HEAP
   if (FLAG_verify_heap) {
-    VerifyStringTable();
+    VerifyStringTable(this);
   }
 #endif

@@ -1046,7 +1046,7 @@

 #ifdef VERIFY_HEAP
   if (FLAG_verify_heap) {
-    VerifyStringTable();
+    VerifyStringTable(this);
   }
 #endif

@@ -1154,29 +1154,33 @@
 // new space.
 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
  public:
+ explicit VerifyNonPointerSpacePointersVisitor(Heap* heap) : heap_(heap) {}
   void VisitPointers(Object** start, Object**end) {
     for (Object** current = start; current < end; current++) {
       if ((*current)->IsHeapObject()) {
-        CHECK(!HEAP->InNewSpace(HeapObject::cast(*current)));
+        CHECK(!heap_->InNewSpace(HeapObject::cast(*current)));
       }
     }
   }
+
+ private:
+  Heap* heap_;
 };


-static void VerifyNonPointerSpacePointers() {
+static void VerifyNonPointerSpacePointers(Heap* heap) {
   // Verify that there are no pointers to new space in spaces where we
   // do not expect them.
-  VerifyNonPointerSpacePointersVisitor v;
-  HeapObjectIterator code_it(HEAP->code_space());
+  VerifyNonPointerSpacePointersVisitor v(heap);
+  HeapObjectIterator code_it(heap->code_space());
   for (HeapObject* object = code_it.Next();
        object != NULL; object = code_it.Next())
     object->Iterate(&v);

// The old data space was normally swept conservatively so that the iterator
   // doesn't work, so we normally skip the next bit.
-  if (!HEAP->old_data_space()->was_swept_conservatively()) {
-    HeapObjectIterator data_it(HEAP->old_data_space());
+  if (!heap->old_data_space()->was_swept_conservatively()) {
+    HeapObjectIterator data_it(heap->old_data_space());
     for (HeapObject* object = data_it.Next();
          object != NULL; object = data_it.Next())
       object->Iterate(&v);
@@ -1323,7 +1327,7 @@
   RelocationLock relocation_lock(this);

 #ifdef VERIFY_HEAP
-  if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
+  if (FLAG_verify_heap) VerifyNonPointerSpacePointers(this);
 #endif

   gc_state_ = SCAVENGE;
@@ -2377,7 +2381,7 @@


 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
-  SLOW_ASSERT(HEAP->InFromSpace(object));
+  SLOW_ASSERT(object->GetIsolate()->heap()->InFromSpace(object));
   MapWord first_word = object->map_word();
   SLOW_ASSERT(!first_word.IsForwardingAddress());
   Map* map = first_word.ToMap();
@@ -7841,7 +7845,7 @@
 void KeyedLookupCache::Update(Map* map, Name* name, int field_offset) {
   if (!name->IsUniqueName()) {
     String* internalized_string;
-    if (!HEAP->InternalizeStringIfExists(
+    if (!map->GetIsolate()->heap()->InternalizeStringIfExists(
             String::cast(name), &internalized_string)) {
       return;
     }
@@ -7849,7 +7853,7 @@
   }
// This cache is cleared only between mark compact passes, so we expect the
   // cache to only contain old space names.
-  ASSERT(!HEAP->InNewSpace(name));
+  ASSERT(!map->GetIsolate()->heap()->InNewSpace(name));

   int index = (Hash(map, name) & kHashMask);
   // After a GC there will be free slots, so we use them in order (this may
=======================================
--- /branches/bleeding_edge/src/isolate.h       Tue Sep 10 11:13:55 2013 UTC
+++ /branches/bleeding_edge/src/isolate.h       Tue Sep 10 14:30:36 2013 UTC
@@ -1532,7 +1532,7 @@

 // Mark the native context with out of memory.
 inline void Context::mark_out_of_memory() {
-  native_context()->set_out_of_memory(HEAP->true_value());
+  native_context()->set_out_of_memory(GetIsolate()->heap()->true_value());
 }


=======================================
--- /branches/bleeding_edge/src/jsregexp.h      Fri Aug 23 11:06:16 2013 UTC
+++ /branches/bleeding_edge/src/jsregexp.h      Tue Sep 10 14:30:36 2013 UTC
@@ -1617,7 +1617,7 @@
   struct CompilationResult {
     explicit CompilationResult(const char* error_message)
         : error_message(error_message),
-          code(HEAP->the_hole_value()),
+          code(Isolate::Current()->heap()->the_hole_value()),
           num_registers(0) {}
     CompilationResult(Object* code, int registers)
       : error_message(NULL),
=======================================
--- /branches/bleeding_edge/src/liveedit.cc     Wed Sep  4 15:06:36 2013 UTC
+++ /branches/bleeding_edge/src/liveedit.cc     Tue Sep 10 14:30:36 2013 UTC
@@ -1233,7 +1233,9 @@
   DeoptimizationInputData* data =
DeoptimizationInputData::cast(function->code()->deoptimization_data());

-  if (data == HEAP->empty_fixed_array()) return false;
+  if (data == function->GetIsolate()->heap()->empty_fixed_array()) {
+    return false;
+  }

   FixedArray* literals = data->LiteralArray();

@@ -1549,7 +1551,7 @@
   info->set_end_position(new_function_end);
   info->set_function_token_position(new_function_token_pos);

-  HEAP->EnsureHeapIsIterable();
+  info->GetIsolate()->heap()->EnsureHeapIsIterable();

   if (IsJSFunctionCode(info->code())) {
     // Patch relocation info section of the code.
@@ -1565,7 +1567,7 @@
     }
   }

-  return HEAP->undefined_value();
+  return info->GetIsolate()->heap()->undefined_value();
 }


@@ -1611,7 +1613,7 @@
   original_script->set_source(*new_source);

   // Drop line ends so that they will be recalculated.
-  original_script->set_line_ends(HEAP->undefined_value());
+  original_script->set_line_ends(isolate->heap()->undefined_value());

   return *old_script_object;
 }
=======================================
--- /branches/bleeding_edge/src/mark-compact-inl.h Thu Oct 4 11:09:17 2012 UTC +++ /branches/bleeding_edge/src/mark-compact-inl.h Tue Sep 10 14:30:36 2013 UTC
@@ -58,7 +58,7 @@
     mark_bit.Set();
     MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
     ASSERT(IsMarked(obj));
-    ASSERT(HEAP->Contains(obj));
+    ASSERT(obj->GetIsolate()->heap()->Contains(obj));
     marking_deque_.PushBlack(obj);
   }
 }
=======================================
--- /branches/bleeding_edge/src/x64/assembler-x64.cc Mon Aug 26 14:51:51 2013 UTC +++ /branches/bleeding_edge/src/x64/assembler-x64.cc Tue Sep 10 14:30:36 2013 UTC
@@ -396,7 +396,7 @@
   // Some internal data structures overflow for very large buffers,
   // they must ensure that kMaximalBufferSize is not too large.
   if ((desc.buffer_size > kMaximalBufferSize) ||
-      (desc.buffer_size > HEAP->MaxOldGenerationSize())) {
+      (desc.buffer_size > isolate()->heap()->MaxOldGenerationSize())) {
     V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
   }

@@ -1533,7 +1533,7 @@
   } else {
     EnsureSpace ensure_space(this);
     ASSERT(value->IsHeapObject());
-    ASSERT(!HEAP->InNewSpace(*value));
+    ASSERT(!isolate()->heap()->InNewSpace(*value));
     emit_rex_64(dst);
     emit(0xB8 | dst.low_bits());
     emitp(value.location(), mode);
=======================================
--- /branches/bleeding_edge/src/x64/macro-assembler-x64.cc Tue Sep 10 12:37:30 2013 UTC +++ /branches/bleeding_edge/src/x64/macro-assembler-x64.cc Tue Sep 10 14:30:36 2013 UTC
@@ -285,16 +285,17 @@
     cmpq(scratch, kScratchRegister);
     j(cc, branch, distance);
   } else {
-    ASSERT(is_int32(static_cast<int64_t>(HEAP->NewSpaceMask())));
+ ASSERT(is_int32(static_cast<int64_t>(isolate()->heap()->NewSpaceMask())));
     intptr_t new_space_start =
-        reinterpret_cast<intptr_t>(HEAP->NewSpaceStart());
+        reinterpret_cast<intptr_t>(isolate()->heap()->NewSpaceStart());
     movq(kScratchRegister, -new_space_start, RelocInfo::NONE64);
     if (scratch.is(object)) {
       addq(scratch, kScratchRegister);
     } else {
       lea(scratch, Operand(object, kScratchRegister, times_1, 0));
     }
-    and_(scratch, Immediate(static_cast<int32_t>(HEAP->NewSpaceMask())));
+    and_(scratch,
+ Immediate(static_cast<int32_t>(isolate()->heap()->NewSpaceMask())));
     j(cc, branch, distance);
   }
 }

--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.

Reply via email to