Reviewers: danno,
Message:
Committed patchset #1 manually as r17608.
Description:
Revert "Reland [Object.observe] Don't force normalization of elements for
observed objects"
TBR=danno
BUG=
Committed: https://code.google.com/p/v8/source/detail?r=17608
Please review this at https://codereview.chromium.org/67233002/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files (+49, -30 lines):
M src/arm/ic-arm.cc
M src/builtins.cc
M src/ia32/ic-ia32.cc
M src/mips/ic-mips.cc
M src/objects-debug.cc
M src/objects-inl.h
M src/objects.h
M src/objects.cc
M src/x64/ic-x64.cc
Index: src/arm/ic-arm.cc
diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc
index
a3b2a6e2a19842e2c59584a70029a29c675a1360..025a590f0cba202b9636b88d03626deea2e371b6
100644
--- a/src/arm/ic-arm.cc
+++ b/src/arm/ic-arm.cc
@@ -1432,10 +1432,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler*
masm,
__ JumpIfSmi(receiver, &slow);
// Get the map of the object.
__ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
- // Check that the receiver does not require access checks and is not
observed.
- // The generic stub does not perform map checks or handle observed
objects.
+ // Check that the receiver does not require access checks. We need
+ // to do this because this generic stub does not perform map checks.
__ ldrb(ip, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
- __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded | 1 <<
Map::kIsObserved));
+ __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
__ b(ne, &slow);
// Check if the object is a JS array or not.
__ ldrb(r4, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
Index: src/builtins.cc
diff --git a/src/builtins.cc b/src/builtins.cc
index
40772724266dc0277a59da827c77d2dbaa76aefa..758967ec08d875ce5ccab732d4d120366e6a5079
100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -311,7 +311,6 @@ static inline MaybeObject*
EnsureJSArrayWithWritableFastElements(
Heap* heap, Object* receiver, Arguments* args, int first_added_arg) {
if (!receiver->IsJSArray()) return NULL;
JSArray* array = JSArray::cast(receiver);
- if (array->map()->is_observed()) return NULL;
HeapObject* elms = array->elements();
Map* map = elms->map();
if (map == heap->fixed_array_map()) {
Index: src/ia32/ic-ia32.cc
diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc
index
dab9dd7a447ae089d87419557feba98cce0c4e43..0b7c4a828bce881c03ed05d44cfbf3943e103164
100644
--- a/src/ia32/ic-ia32.cc
+++ b/src/ia32/ic-ia32.cc
@@ -874,10 +874,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler*
masm,
__ JumpIfSmi(edx, &slow);
// Get the map from the receiver.
__ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
- // Check that the receiver does not require access checks and is not
observed.
- // The generic stub does not perform map checks or handle observed
objects.
+ // Check that the receiver does not require access checks. We need
+ // to do this because this generic stub does not perform map checks.
__ test_b(FieldOperand(edi, Map::kBitFieldOffset),
- 1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved);
+ 1 << Map::kIsAccessCheckNeeded);
__ j(not_zero, &slow);
// Check that the key is a smi.
__ JumpIfNotSmi(ecx, &slow);
Index: src/mips/ic-mips.cc
diff --git a/src/mips/ic-mips.cc b/src/mips/ic-mips.cc
index
c7e1a2ada602392c952118d8f36162a37e67682b..0fe044a6c5a02dfc66fa51b02748672122a61eba
100644
--- a/src/mips/ic-mips.cc
+++ b/src/mips/ic-mips.cc
@@ -1354,11 +1354,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler*
masm,
__ JumpIfSmi(receiver, &slow);
// Get the map of the object.
__ lw(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
- // Check that the receiver does not require access checks and is not
observed.
- // The generic stub does not perform map checks or handle observed
objects.
+ // Check that the receiver does not require access checks. We need
+ // to do this because this generic stub does not perform map checks.
__ lbu(t0, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
- __ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded |
- 1 << Map::kIsObserved));
+ __ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded));
__ Branch(&slow, ne, t0, Operand(zero_reg));
// Check if the object is a JS array or not.
__ lbu(t0, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
Index: src/objects-debug.cc
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index
80610c6a8b5de272099591d1bff2b688be818d5d..6ab2ddffe2a9e0ea3772163e20e87bd269f1a7e0
100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -366,6 +366,9 @@ void Map::MapVerify() {
SLOW_ASSERT(transitions()->IsSortedNoDuplicates());
SLOW_ASSERT(transitions()->IsConsistentWithBackPointers(this));
}
+ ASSERT(!is_observed() || instance_type() < FIRST_JS_OBJECT_TYPE ||
+ instance_type() > LAST_JS_OBJECT_TYPE ||
+ has_slow_elements_kind() || has_external_array_elements());
}
Index: src/objects-inl.h
diff --git a/src/objects-inl.h b/src/objects-inl.h
index
d28fad3df22c464d76b898be40fd969eb5b29a56..bef807eaf688cd14d0278f34d3a79277dcebdb5a
100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -3649,13 +3649,16 @@ bool Map::owns_descriptors() {
}
-void Map::set_has_instance_call_handler() {
- set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
+void Map::set_is_observed(bool is_observed) {
+ ASSERT(instance_type() < FIRST_JS_OBJECT_TYPE ||
+ instance_type() > LAST_JS_OBJECT_TYPE ||
+ has_slow_elements_kind() || has_external_array_elements());
+ set_bit_field3(IsObserved::update(bit_field3(), is_observed));
}
-bool Map::has_instance_call_handler() {
- return HasInstanceCallHandler::decode(bit_field3());
+bool Map::is_observed() {
+ return IsObserved::decode(bit_field3());
}
Index: src/objects.cc
diff --git a/src/objects.cc b/src/objects.cc
index
d9a48a8a0c083a5066aabcc20879dc0f6e96858a..441c25e70c649aecb7b54f49d5670c1daf136569
100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -5614,6 +5614,12 @@ void JSObject::SetObserved(Handle<JSObject> object) {
if (object->map()->is_observed())
return;
+ if (!object->HasExternalArrayElements()) {
+ // Go to dictionary mode, so that we don't skip map checks.
+ NormalizeElements(object);
+ ASSERT(!object->HasFastElements());
+ }
+
LookupResult result(isolate);
object->map()->LookupTransition(*object,
isolate->heap()->observed_symbol(),
@@ -5627,7 +5633,7 @@ void JSObject::SetObserved(Handle<JSObject> object) {
new_map = Map::CopyForObserved(handle(object->map()));
} else {
new_map = Map::Copy(handle(object->map()));
- new_map->set_is_observed();
+ new_map->set_is_observed(true);
}
object->set_map(*new_map);
}
@@ -6965,7 +6971,7 @@ Handle<Map> Map::CopyForObserved(Handle<Map> map) {
map->set_transitions(*transitions);
- new_map->set_is_observed();
+ new_map->set_is_observed(true);
if (map->owns_descriptors()) {
new_map->InitializeDescriptors(map->instance_descriptors());
@@ -11220,6 +11226,7 @@ MaybeObject*
JSObject::SetFastElementsCapacityAndLength(
Heap* heap = GetHeap();
// We should never end in here with a pixel or external array.
ASSERT(!HasExternalArrayElements());
+ ASSERT(!map()->is_observed());
// Allocate a new fast elements backing store.
FixedArray* new_elements;
@@ -11304,6 +11311,7 @@ MaybeObject*
JSObject::SetFastDoubleElementsCapacityAndLength(
Heap* heap = GetHeap();
// We should never end in here with a pixel or external array.
ASSERT(!HasExternalArrayElements());
+ ASSERT(!map()->is_observed());
FixedArrayBase* elems;
{ MaybeObject* maybe_obj =
@@ -11452,6 +11460,10 @@ MaybeObject* JSArray::SetElementsLength(Object*
len) {
if (!new_length_handle->ToArrayIndex(&new_length))
return Failure::InternalError();
+ // Observed arrays should always be in dictionary mode;
+ // if they were in fast mode, the below is slower than necessary
+ // as it iterates over the array backing store multiple times.
+ ASSERT(self->HasDictionaryElements());
static const PropertyAttributes kNoAttrFilter = NONE;
int num_elements = self->NumberOfLocalElements(kNoAttrFilter);
if (num_elements > 0) {
@@ -11462,8 +11474,6 @@ MaybeObject* JSArray::SetElementsLength(Object*
len) {
}
} else {
// For sparse arrays, only iterate over existing elements.
- // TODO(rafaelw): For fast, sparse arrays, we can avoid iterating
over
- // the to-be-removed indices twice.
Handle<FixedArray> keys =
isolate->factory()->NewFixedArray(num_elements);
self->GetLocalElementKeys(*keys, kNoAttrFilter);
while (num_elements-- > 0) {
@@ -12862,6 +12872,7 @@ MaybeObject*
JSObject::UpdateAllocationSite(ElementsKind to_kind) {
MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
+ ASSERT(!map()->is_observed());
ElementsKind from_kind = map()->elements_kind();
if (IsFastHoleyElementsKind(from_kind)) {
Index: src/objects.h
diff --git a/src/objects.h b/src/objects.h
index
c4d82bc9bb40e35c219d84dad8d9efe3530047e6..3105579e28f4e3aee5b4b0235247c8a9af11ac61
100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -5664,7 +5664,7 @@ class Map: public HeapObject {
class FunctionWithPrototype: public BitField<bool, 23, 1> {};
class DictionaryMap: public BitField<bool, 24, 1> {};
class OwnsDescriptors: public BitField<bool, 25, 1> {};
- class HasInstanceCallHandler: public BitField<bool, 26, 1> {};
+ class IsObserved: public BitField<bool, 26, 1> {};
class Deprecated: public BitField<bool, 27, 1> {};
class IsFrozen: public BitField<bool, 28, 1> {};
class IsUnstable: public BitField<bool, 29, 1> {};
@@ -5727,12 +5727,12 @@ class Map: public HeapObject {
}
// Tells whether the instance has a call-as-function handler.
- inline void set_is_observed() {
- set_bit_field(bit_field() | (1 << kIsObserved));
+ inline void set_has_instance_call_handler() {
+ set_bit_field(bit_field() | (1 << kHasInstanceCallHandler));
}
- inline bool is_observed() {
- return ((1 << kIsObserved) & bit_field()) != 0;
+ inline bool has_instance_call_handler() {
+ return ((1 << kHasInstanceCallHandler) & bit_field()) != 0;
}
inline void set_is_extensible(bool value);
@@ -5741,6 +5741,10 @@ class Map: public HeapObject {
inline void set_elements_kind(ElementsKind elements_kind) {
ASSERT(elements_kind < kElementsKindCount);
ASSERT(kElementsKindCount <= (1 << kElementsKindBitCount));
+ ASSERT(!is_observed() ||
+ elements_kind == DICTIONARY_ELEMENTS ||
+ elements_kind == NON_STRICT_ARGUMENTS_ELEMENTS ||
+ IsExternalArrayElementsKind(elements_kind));
set_bit_field2((bit_field2() & ~kElementsKindMask) |
(elements_kind << kElementsKindShift));
ASSERT(this->elements_kind() == elements_kind);
@@ -5993,8 +5997,8 @@ class Map: public HeapObject {
inline bool owns_descriptors();
inline void set_owns_descriptors(bool is_shared);
- inline bool has_instance_call_handler();
- inline void set_has_instance_call_handler();
+ inline bool is_observed();
+ inline void set_is_observed(bool is_observed);
inline void freeze();
inline bool is_frozen();
inline void mark_unstable();
@@ -6253,7 +6257,7 @@ class Map: public HeapObject {
static const int kHasNamedInterceptor = 3;
static const int kHasIndexedInterceptor = 4;
static const int kIsUndetectable = 5;
- static const int kIsObserved = 6;
+ static const int kHasInstanceCallHandler = 6;
static const int kIsAccessCheckNeeded = 7;
// Bit positions for bit field 2
Index: src/x64/ic-x64.cc
diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc
index
fe8734caf47988adfa4c2ddb1c10c4b44e1af6b9..721ae1d982aa9279dff7845b326da554459de690
100644
--- a/src/x64/ic-x64.cc
+++ b/src/x64/ic-x64.cc
@@ -749,10 +749,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler*
masm,
__ JumpIfSmi(rdx, &slow_with_tagged_index);
// Get the map from the receiver.
__ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
- // Check that the receiver does not require access checks and is not
observed.
- // The generic stub does not perform map checks or handle observed
objects.
+ // Check that the receiver does not require access checks. We need
+ // to do this because this generic stub does not perform map checks.
__ testb(FieldOperand(r9, Map::kBitFieldOffset),
- Immediate(1 << Map::kIsAccessCheckNeeded | 1 <<
Map::kIsObserved));
+ Immediate(1 << Map::kIsAccessCheckNeeded));
__ j(not_zero, &slow_with_tagged_index);
// Check that the key is a smi.
__ JumpIfNotSmi(rcx, &slow_with_tagged_index);
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.