Revision: 3535
Author: [email protected]
Date: Tue Jan  5 03:30:05 2010
Log: Simplify checking during allocation when Heap::always_allocate() is
true.  The rules are:

1. Heap::AllocateRaw can normally handle allocation requests in new
    space even when always_allocate() is true.  It properly retries
    failed allocation in the second 'retry' space.

2. Heap::Allocate can normally handle allocation requests in new
    space.

3. We only need to check always_allocate() when explicitly requesting
    allocation in new space via Heap::new_space().AllocateRaw().

4. The exception to these rules is fixed arrays with size such that
    MaxObjectSizeInPagedSpace < size <= MaxObjectSizeInNewSpace (ie,
    those that will be allocated in new space and promoted to large
    object space).  They cannot be allocated in new space via
    Heap::Allocate or Heap::AllocateRaw, because the retry logic does
    not know to allocate extra remembered set bits when retrying in
    large object space.

Review URL: http://codereview.chromium.org/518007
http://code.google.com/p/v8/source/detail?r=3535

Modified:
  /branches/bleeding_edge/src/heap-inl.h
  /branches/bleeding_edge/src/heap.cc

=======================================
--- /branches/bleeding_edge/src/heap-inl.h      Tue Dec 22 05:34:02 2009
+++ /branches/bleeding_edge/src/heap-inl.h      Tue Jan  5 03:30:05 2010
@@ -54,7 +54,8 @@
    ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
    ASSERT(space != NEW_SPACE ||
           retry_space == OLD_POINTER_SPACE ||
-         retry_space == OLD_DATA_SPACE);
+         retry_space == OLD_DATA_SPACE ||
+         retry_space == LO_SPACE);
  #ifdef DEBUG
    if (FLAG_gc_interval >= 0 &&
        !disallow_allocation_failure_ &&
=======================================
--- /branches/bleeding_edge/src/heap.cc Tue Dec 22 05:34:02 2009
+++ /branches/bleeding_edge/src/heap.cc Tue Jan  5 03:30:05 2010
@@ -1354,9 +1354,6 @@
    STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
    AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE :  
NEW_SPACE;

-  // New space can't cope with forced allocation.
-  if (always_allocate()) space = OLD_DATA_SPACE;
-
    Object* result = AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE);
    if (result->IsFailure()) return result;

@@ -1762,7 +1759,6 @@
    // Statically ensure that it is safe to allocate proxies in paged spaces.
    STATIC_ASSERT(Proxy::kSize <= Page::kMaxHeapObjectSize);
    AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE :  
NEW_SPACE;
-  if (always_allocate()) space = OLD_DATA_SPACE;
    Object* result = Allocate(proxy_map(), space);
    if (result->IsFailure()) return result;

@@ -1902,8 +1898,7 @@

    Map* map = is_ascii ? cons_ascii_string_map() : cons_string_map();

-  Object* result = Allocate(map,
-                            always_allocate() ? OLD_POINTER_SPACE :  
NEW_SPACE);
+  Object* result = Allocate(map, NEW_SPACE);
    if (result->IsFailure()) return result;
    ConsString* cons_string = ConsString::cast(result);
    WriteBarrierMode mode = cons_string->GetWriteBarrierMode();
@@ -1967,8 +1962,7 @@
    }

    Map* map = external_ascii_string_map();
-  Object* result = Allocate(map,
-                            always_allocate() ? OLD_DATA_SPACE :  
NEW_SPACE);
+  Object* result = Allocate(map, NEW_SPACE);
    if (result->IsFailure()) return result;

    ExternalAsciiString* external_string = ExternalAsciiString::cast(result);
@@ -1989,8 +1983,7 @@
    }

    Map* map = Heap::external_string_map();
-  Object* result = Allocate(map,
-                            always_allocate() ? OLD_DATA_SPACE :  
NEW_SPACE);
+  Object* result = Allocate(map, NEW_SPACE);
    if (result->IsFailure()) return result;

    ExternalTwoByteString* external_string =  
ExternalTwoByteString::cast(result);
@@ -2029,11 +2022,9 @@
      return AllocateByteArray(length);
    }
    int size = ByteArray::SizeFor(length);
-  AllocationSpace space =
-      size > MaxObjectSizeInPagedSpace() ? LO_SPACE : OLD_DATA_SPACE;
-
-  Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
-
+  Object* result = (size <= MaxObjectSizeInPagedSpace())
+      ? old_data_space_->AllocateRaw(size)
+      : lo_space_->AllocateRaw(size);
    if (result->IsFailure()) return result;

    reinterpret_cast<Array*>(result)->set_map(byte_array_map());
@@ -2045,13 +2036,8 @@
  Object* Heap::AllocateByteArray(int length) {
    int size = ByteArray::SizeFor(length);
    AllocationSpace space =
-      size > MaxObjectSizeInPagedSpace() ? LO_SPACE : NEW_SPACE;
-
-  // New space can't cope with forced allocation.
-  if (always_allocate()) space = LO_SPACE;
-
+      (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : NEW_SPACE;
    Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
-
    if (result->IsFailure()) return result;

    reinterpret_cast<Array*>(result)->set_map(byte_array_map());
@@ -2076,12 +2062,7 @@
                                   uint8_t* external_pointer,
                                   PretenureFlag pretenure) {
    AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE :  
NEW_SPACE;
-
-  // New space can't cope with forced allocation.
-  if (always_allocate()) space = OLD_DATA_SPACE;
-
    Object* result = AllocateRaw(PixelArray::kAlignedSize, space,  
OLD_DATA_SPACE);
-
    if (result->IsFailure()) return result;

    reinterpret_cast<PixelArray*>(result)->set_map(pixel_array_map());
@@ -2097,14 +2078,9 @@
                                      void* external_pointer,
                                      PretenureFlag pretenure) {
    AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE :  
NEW_SPACE;
-
-  // New space can't cope with forced allocation.
-  if (always_allocate()) space = OLD_DATA_SPACE;
-
    Object* result = AllocateRaw(ExternalArray::kAlignedSize,
                                 space,
                                 OLD_DATA_SPACE);
-
    if (result->IsFailure()) return result;

    reinterpret_cast<ExternalArray*>(result)->set_map(
@@ -2386,7 +2362,6 @@
    AllocationSpace space =
        (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
    if (map->instance_size() > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
-  if (always_allocate()) space = OLD_POINTER_SPACE;
    Object* obj = Allocate(map, space);
    if (obj->IsFailure()) return obj;

@@ -2683,9 +2658,9 @@
    }

    // Allocate string.
-  AllocationSpace space =
-      (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_DATA_SPACE;
-  Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
+  Object* result = (size > MaxObjectSizeInPagedSpace())
+      ? lo_space_->AllocateRaw(size)
+      : old_data_space_->AllocateRaw(size);
    if (result->IsFailure()) return result;

    reinterpret_cast<HeapObject*>(result)->set_map(map);
@@ -2705,22 +2680,22 @@


  Object* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
+  int size = SeqAsciiString::SizeFor(length);
    AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE :  
NEW_SPACE;
-
-  // New space can't cope with forced allocation.
-  if (always_allocate()) space = OLD_DATA_SPACE;
-
-  int size = SeqAsciiString::SizeFor(length);
-
-  Object* result = Failure::OutOfMemoryException();
+  AllocationSpace retry_space = OLD_DATA_SPACE;
+
    if (space == NEW_SPACE) {
-    result = size <= kMaxObjectSizeInNewSpace
-        ? new_space_.AllocateRaw(size)
-        : lo_space_->AllocateRaw(size);
-  } else {
-    if (size > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
-    result = AllocateRaw(size, space, OLD_DATA_SPACE);
-  }
+    if (size > kMaxObjectSizeInNewSpace) {
+      // Allocate in large object space, retry space will be ignored.
+      space = LO_SPACE;
+    } else if (size > MaxObjectSizeInPagedSpace()) {
+      // Allocate in new space, retry in large object space.
+      retry_space = LO_SPACE;
+    }
+  } else if (space == OLD_DATA_SPACE && size >  
MaxObjectSizeInPagedSpace()) {
+    space = LO_SPACE;
+  }
+  Object* result = AllocateRaw(size, space, retry_space);
    if (result->IsFailure()) return result;

    // Partially initialize the object.
@@ -2733,22 +2708,22 @@


  Object* Heap::AllocateRawTwoByteString(int length, PretenureFlag  
pretenure) {
+  int size = SeqTwoByteString::SizeFor(length);
    AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE :  
NEW_SPACE;
-
-  // New space can't cope with forced allocation.
-  if (always_allocate()) space = OLD_DATA_SPACE;
-
-  int size = SeqTwoByteString::SizeFor(length);
-
-  Object* result = Failure::OutOfMemoryException();
+  AllocationSpace retry_space = OLD_DATA_SPACE;
+
    if (space == NEW_SPACE) {
-    result = size <= kMaxObjectSizeInNewSpace
-        ? new_space_.AllocateRaw(size)
-        : lo_space_->AllocateRaw(size);
-  } else {
-    if (size > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
-    result = AllocateRaw(size, space, OLD_DATA_SPACE);
-  }
+    if (size > kMaxObjectSizeInNewSpace) {
+      // Allocate in large object space, retry space will be ignored.
+      space = LO_SPACE;
+    } else if (size > MaxObjectSizeInPagedSpace()) {
+      // Allocate in new space, retry in large object space.
+      retry_space = LO_SPACE;
+    }
+  } else if (space == OLD_DATA_SPACE && size >  
MaxObjectSizeInPagedSpace()) {
+    space = LO_SPACE;
+  }
+  Object* result = AllocateRaw(size, space, retry_space);
    if (result->IsFailure()) return result;

    // Partially initialize the object.
@@ -2826,26 +2801,40 @@
    ASSERT(empty_fixed_array()->IsFixedArray());
    if (length == 0) return empty_fixed_array();

-  // New space can't cope with forced allocation.
-  if (always_allocate()) pretenure = TENURED;
-
+  AllocationSpace space =
+      (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
    int size = FixedArray::SizeFor(length);
-  Object* result = Failure::OutOfMemoryException();
-  if (pretenure != TENURED) {
-    result = size <= kMaxObjectSizeInNewSpace
-        ? new_space_.AllocateRaw(size)
-        : lo_space_->AllocateRawFixedArray(size);
-  }
-  if (result->IsFailure()) {
-    if (size > MaxObjectSizeInPagedSpace()) {
-      result = lo_space_->AllocateRawFixedArray(size);
-    } else {
-      AllocationSpace space =
-          (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
-      result = AllocateRaw(size, space, OLD_POINTER_SPACE);
-    }
-    if (result->IsFailure()) return result;
-  }
+  if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
+    // Too big for new space.
+    space = LO_SPACE;
+  } else if (space == OLD_POINTER_SPACE &&
+             size > MaxObjectSizeInPagedSpace()) {
+    // Too big for old pointer space.
+    space = LO_SPACE;
+  }
+
+  // Specialize allocation for the space.
+  Object* result = Failure::OutOfMemoryException();
+  if (space == NEW_SPACE) {
+    // We cannot use Heap::AllocateRaw() because it will not properly
+    // allocate extra remembered set bits if always_allocate() is true and
+    // new space allocation fails.
+    result = new_space_.AllocateRaw(size);
+    if (result->IsFailure() && always_allocate()) {
+      if (size <= MaxObjectSizeInPagedSpace()) {
+        result = old_pointer_space_->AllocateRaw(size);
+      } else {
+        result = lo_space_->AllocateRawFixedArray(size);
+      }
+    }
+  } else if (space == OLD_POINTER_SPACE) {
+    result = old_pointer_space_->AllocateRaw(size);
+  } else {
+    ASSERT(space == LO_SPACE);
+    result = lo_space_->AllocateRawFixedArray(size);
+  }
+  if (result->IsFailure()) return result;
+
    // Initialize the object.
    reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
    FixedArray* array = FixedArray::cast(result);

-- 
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to