Revision: 13064
Author: [email protected]
Date: Tue Nov 27 04:01:14 2012
Log: Avoid double initialization of arrays.
Review URL: https://chromiumcodereview.appspot.com/11413179
http://code.google.com/p/v8/source/detail?r=13064
Modified:
/branches/bleeding_edge/src/builtins.cc
/branches/bleeding_edge/src/elements.cc
/branches/bleeding_edge/src/objects.cc
/branches/bleeding_edge/test/mjsunit/regress/regress-121407.js
=======================================
--- /branches/bleeding_edge/src/builtins.cc Thu Nov 22 08:22:57 2012
+++ /branches/bleeding_edge/src/builtins.cc Tue Nov 27 04:01:14 2012
@@ -574,14 +574,12 @@
MaybeObject* maybe_obj =
heap->AllocateUninitializedFixedArray(capacity);
if (!maybe_obj->To(&new_elms)) return maybe_obj;
- if (len > 0) {
- ElementsAccessor* accessor = array->GetElementsAccessor();
- MaybeObject* maybe_failure =
- accessor->CopyElements(NULL, 0, new_elms, kind, 0, len,
elms_obj);
- ASSERT(!maybe_failure->IsFailure());
- USE(maybe_failure);
- }
- FillWithHoles(heap, new_elms, new_length, capacity);
+ ElementsAccessor* accessor = array->GetElementsAccessor();
+ MaybeObject* maybe_failure = accessor->CopyElements(
+ NULL, 0, new_elms, kind, 0,
+ ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj);
+ ASSERT(!maybe_failure->IsFailure());
+ USE(maybe_failure);
elms = new_elms;
}
@@ -623,15 +621,12 @@
heap->AllocateUninitializedFixedDoubleArray(capacity);
if (!maybe_obj->To(&new_elms)) return maybe_obj;
- if (len > 0) {
- ElementsAccessor* accessor = array->GetElementsAccessor();
- MaybeObject* maybe_failure =
- accessor->CopyElements(NULL, 0, new_elms, kind, 0, len,
elms_obj);
- ASSERT(!maybe_failure->IsFailure());
- USE(maybe_failure);
- }
-
- FillWithHoles(new_elms, len + to_add, new_elms->length());
+ ElementsAccessor* accessor = array->GetElementsAccessor();
+ MaybeObject* maybe_failure = accessor->CopyElements(
+ NULL, 0, new_elms, kind, 0,
+ ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj);
+ ASSERT(!maybe_failure->IsFailure());
+ USE(maybe_failure);
} else {
// to_add is > 0 and new_length <= elms_len, so elms_obj cannot be
the
// empty_fixed_array.
@@ -787,16 +782,14 @@
MaybeObject* maybe_elms =
heap->AllocateUninitializedFixedArray(capacity);
if (!maybe_elms->To(&new_elms)) return maybe_elms;
- if (len > 0) {
- ElementsKind kind = array->GetElementsKind();
- ElementsAccessor* accessor = array->GetElementsAccessor();
- MaybeObject* maybe_failure =
- accessor->CopyElements(NULL, 0, new_elms, kind, to_add, len,
elms);
- ASSERT(!maybe_failure->IsFailure());
- USE(maybe_failure);
- }
+ ElementsKind kind = array->GetElementsKind();
+ ElementsAccessor* accessor = array->GetElementsAccessor();
+ MaybeObject* maybe_failure = accessor->CopyElements(
+ NULL, 0, new_elms, kind, to_add,
+ ElementsAccessor::kCopyToEndAndInitializeToHole, elms);
+ ASSERT(!maybe_failure->IsFailure());
+ USE(maybe_failure);
- FillWithHoles(heap, new_elms, new_length, capacity);
elms = new_elms;
array->set_elements(elms);
} else {
@@ -1116,16 +1109,12 @@
ASSERT(!maybe_failure->IsFailure());
USE(maybe_failure);
}
- const int to_copy = len - actual_delete_count - actual_start;
- if (to_copy > 0) {
- MaybeObject* maybe_failure = accessor->CopyElements(
- NULL, actual_start + actual_delete_count, new_elms, kind,
- actual_start + item_count, to_copy, elms);
- ASSERT(!maybe_failure->IsFailure());
- USE(maybe_failure);
- }
-
- FillWithHoles(heap, new_elms, new_length, capacity);
+ MaybeObject* maybe_failure = accessor->CopyElements(
+ NULL, actual_start + actual_delete_count, new_elms, kind,
+ actual_start + item_count,
+ ElementsAccessor::kCopyToEndAndInitializeToHole, elms);
+ ASSERT(!maybe_failure->IsFailure());
+ USE(maybe_failure);
elms_obj = new_elms;
elms_changed = true;
=======================================
--- /branches/bleeding_edge/src/elements.cc Mon Nov 26 06:29:21 2012
+++ /branches/bleeding_edge/src/elements.cc Tue Nov 27 04:01:14 2012
@@ -154,21 +154,21 @@
uint32_t to_start,
int raw_copy_size) {
ASSERT(to->map() != HEAP->fixed_cow_array_map());
+ AssertNoAllocation no_allocation;
int copy_size = raw_copy_size;
if (raw_copy_size < 0) {
ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
raw_copy_size ==
ElementsAccessor::kCopyToEndAndInitializeToHole);
copy_size = Min(from->length() - from_start,
to->length() - to_start);
-#ifdef DEBUG
- // FAST_*_ELEMENTS arrays cannot be uninitialized. Ensure they are
already
- // marked with the hole.
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
- for (int i = to_start + copy_size; i < to->length(); ++i) {
- ASSERT(to->get(i)->IsTheHole());
+ int start = to_start + copy_size;
+ int length = to->length() - start;
+ if (length > 0) {
+ Heap* heap = from->GetHeap();
+ MemsetPointer(to->data_start() + start, heap->the_hole_value(),
length);
}
}
-#endif
}
ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
(copy_size + static_cast<int>(from_start)) <= from->length());
@@ -199,21 +199,21 @@
ElementsKind to_kind,
uint32_t to_start,
int raw_copy_size) {
+ AssertNoAllocation no_allocation;
int copy_size = raw_copy_size;
Heap* heap = from->GetHeap();
if (raw_copy_size < 0) {
ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
raw_copy_size ==
ElementsAccessor::kCopyToEndAndInitializeToHole);
copy_size = from->max_number_key() + 1 - from_start;
-#ifdef DEBUG
- // Fast object arrays cannot be uninitialized. Ensure they are already
- // marked with the hole.
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
- for (int i = to_start + copy_size; i < to->length(); ++i) {
- ASSERT(to->get(i)->IsTheHole());
+ int start = to_start + copy_size;
+ int length = to->length() - start;
+ if (length > 0) {
+ Heap* heap = from->GetHeap();
+ MemsetPointer(to->data_start() + start, heap->the_hole_value(),
length);
}
}
-#endif
}
ASSERT(to != from);
ASSERT(IsFastSmiOrObjectElementsKind(to_kind));
@@ -257,15 +257,17 @@
raw_copy_size ==
ElementsAccessor::kCopyToEndAndInitializeToHole);
copy_size = Min(from->length() - from_start,
to->length() - to_start);
-#ifdef DEBUG
- // FAST_*_ELEMENTS arrays cannot be uninitialized. Ensure they are
already
- // marked with the hole.
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
- for (int i = to_start + copy_size; i < to->length(); ++i) {
- ASSERT(to->get(i)->IsTheHole());
+ // Also initialize the area that will be copied over since HeapNumber
+ // allocation below can cause an incremental marking step, requiring
all
+ // existing heap objects to be propertly initialized.
+ int start = to_start;
+ int length = to->length() - start;
+ if (length > 0) {
+ Heap* heap = from->GetHeap();
+ MemsetPointer(to->data_start() + start, heap->the_hole_value(),
length);
}
}
-#endif
}
ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
(copy_size + static_cast<int>(from_start)) <= from->length());
=======================================
--- /branches/bleeding_edge/src/objects.cc Mon Nov 26 04:55:47 2012
+++ /branches/bleeding_edge/src/objects.cc Tue Nov 27 04:01:14 2012
@@ -9283,9 +9283,8 @@
// Allocate a new fast elements backing store.
FixedArray* new_elements;
- { MaybeObject* maybe = heap->AllocateFixedArrayWithHoles(capacity);
- if (!maybe->To(&new_elements)) return maybe;
- }
+ MaybeObject* maybe = heap->AllocateUninitializedFixedArray(capacity);
+ if (!maybe->To(&new_elements)) return maybe;
ElementsKind elements_kind = GetElementsKind();
ElementsKind new_elements_kind;
@@ -9309,10 +9308,10 @@
}
FixedArrayBase* old_elements = elements();
ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
- { MaybeObject* maybe_obj =
- accessor->CopyElements(this, new_elements, new_elements_kind);
- if (maybe_obj->IsFailure()) return maybe_obj;
- }
+ MaybeObject* maybe_obj =
+ accessor->CopyElements(this, new_elements, new_elements_kind);
+ if (maybe_obj->IsFailure()) return maybe_obj;
+
if (elements_kind != NON_STRICT_ARGUMENTS_ELEMENTS) {
Map* new_map = map();
if (new_elements_kind != elements_kind) {
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev