Reviewers: ishell, ulan,
Description:
Make runtime new-space allocations go through Heap::AllocateRaw.
[email protected],[email protected]
Please review this at https://codereview.chromium.org/66723003/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files (+42, -18 lines):
M src/heap-inl.h
M src/heap.h
M src/heap.cc
M src/runtime.cc
Index: src/heap-inl.h
diff --git a/src/heap-inl.h b/src/heap-inl.h
index
ad6f44f935f1af9349d99f0cc670017a3cd7f192..9bd764e47e2ad9dd11f7b91aaa22f1d42e30c2c4
100644
--- a/src/heap-inl.h
+++ b/src/heap-inl.h
@@ -816,6 +816,18 @@ AlwaysAllocateScope::~AlwaysAllocateScope() {
}
+NotAlwaysAllocateScope::NotAlwaysAllocateScope(Heap* heap) : heap_(heap) {
+ depth_ = heap->always_allocate_scope_depth_;
+ heap_->always_allocate_scope_depth_ = 0;
+}
+
+
+NotAlwaysAllocateScope::~NotAlwaysAllocateScope() {
+ ASSERT(heap_->always_allocate_scope_depth_ == 0);
+ heap_->always_allocate_scope_depth_ = depth_;
+}
+
+
#ifdef VERIFY_HEAP
NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
Isolate* isolate = Isolate::Current();
Index: src/heap.cc
diff --git a/src/heap.cc b/src/heap.cc
index
217b37b2fe5cbdb11ccc52fb559dfc16c8814106..18bd6f6b191e1528af487bde0f34a84b8d993570
100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -4823,7 +4823,8 @@ MaybeObject* Heap::CopyJSObject(JSObject* source,
AllocationSite* site) {
{ int adjusted_object_size = site != NULL
? object_size + AllocationMemento::kSize
: object_size;
- MaybeObject* maybe_clone =
new_space_.AllocateRaw(adjusted_object_size);
+ MaybeObject* maybe_clone =
+ AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE);
if (!maybe_clone->ToObject(&clone)) return maybe_clone;
}
SLOW_ASSERT(InNewSpace(clone));
Index: src/heap.h
diff --git a/src/heap.h b/src/heap.h
index
782a2eb2d0ecde6f7aeb479ce59a7a055a717da6..b79020062f276e810d690d2a88cfba731c06d38a
100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -2405,6 +2405,7 @@ class Heap {
friend class GCTracer;
friend class DisallowAllocationFailure;
friend class AlwaysAllocateScope;
+ friend class NotAlwaysAllocateScope;
friend class Page;
friend class Isolate;
friend class MarkCompactCollector;
@@ -2475,6 +2476,18 @@ class AlwaysAllocateScope {
DisallowAllocationFailure disallow_allocation_failure_;
};
+
+class NotAlwaysAllocateScope {
+ public:
+ explicit inline NotAlwaysAllocateScope(Heap* heap);
+ inline ~NotAlwaysAllocateScope();
+
+ private:
+ Heap* heap_;
+ int depth_;
+};
+
+
#ifdef VERIFY_HEAP
class NoWeakObjectVerificationScope {
public:
Index: src/runtime.cc
diff --git a/src/runtime.cc b/src/runtime.cc
index
c9f152f9daccb94f48058c8d709362be25e0de9f..0482ec0a35c36e658f7ce86ae5caa91ab5a9300c
100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -9710,30 +9710,28 @@ RUNTIME_FUNCTION(ObjectPair,
Runtime_ResolvePossiblyDirectEval) {
}
+// Allocate a block of memory in the given space (filled with a filler).
+// Used as a fall-back for generated code when the space is full.
static MaybeObject* Allocate(Isolate* isolate,
int size,
AllocationSpace space) {
- // Allocate a block of memory in the given space (filled with a filler).
- // Use as fallback for allocation in generated code when the space
- // is full.
- SealHandleScope shs(isolate);
+ Heap* heap = isolate->heap();
RUNTIME_ASSERT(IsAligned(size, kPointerSize));
RUNTIME_ASSERT(size > 0);
- Heap* heap = isolate->heap();
RUNTIME_ASSERT(size <= heap->MaxRegularSpaceAllocationSize());
- Object* allocation;
- { MaybeObject* maybe_allocation;
- if (space == NEW_SPACE) {
- maybe_allocation = heap->new_space()->AllocateRaw(size);
- } else {
- ASSERT(space == OLD_POINTER_SPACE || space == OLD_DATA_SPACE);
- maybe_allocation = heap->paged_space(space)->AllocateRaw(size);
- }
- if (maybe_allocation->ToObject(&allocation)) {
- heap->CreateFillerObjectAt(HeapObject::cast(allocation)->address(),
size);
- }
- return maybe_allocation;
+ // If we're forced to always allocate, the general allocation function
+ // might leave us with an object in old space. Guard against that.
+ NotAlwaysAllocateScope not_always_allocate(heap);
+ HeapObject* allocation;
+ { MaybeObject* maybe_allocation = heap->AllocateRaw(size, space,
LO_SPACE);
+ if (!maybe_allocation->To(&allocation)) return maybe_allocation;
}
+#ifdef DEBUG
+ MemoryChunk* chunk = MemoryChunk::FromAddress(allocation->address());
+ ASSERT(chunk->owner()->identity() == space);
+#endif
+ heap->CreateFillerObjectAt(allocation->address(), size);
+ return allocation;
}
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.