Revision: 16057
Author: [email protected]
Date: Mon Aug 5 09:42:39 2013
Log: Migrate instance of deprecated maps in HCheckMaps.
Currently only direct map checks are supported. Otherwise only polymorphic
cases with a generic fallback behave properly, regular polymorphic cases
still need to be adapted.
[email protected]
Review URL: https://chromiumcodereview.appspot.com/21536003
http://code.google.com/p/v8/source/detail?r=16057
Modified:
/branches/bleeding_edge/src/arm/lithium-arm.cc
/branches/bleeding_edge/src/arm/lithium-codegen-arm.cc
/branches/bleeding_edge/src/arm/lithium-codegen-arm.h
/branches/bleeding_edge/src/hydrogen-instructions.cc
/branches/bleeding_edge/src/hydrogen-instructions.h
/branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc
/branches/bleeding_edge/src/ia32/lithium-codegen-ia32.h
/branches/bleeding_edge/src/ia32/lithium-ia32.cc
/branches/bleeding_edge/src/objects-inl.h
/branches/bleeding_edge/src/objects.cc
/branches/bleeding_edge/src/objects.h
/branches/bleeding_edge/src/runtime.cc
/branches/bleeding_edge/src/runtime.h
/branches/bleeding_edge/src/x64/lithium-codegen-x64.cc
/branches/bleeding_edge/src/x64/lithium-codegen-x64.h
/branches/bleeding_edge/src/x64/lithium-x64.cc
=======================================
--- /branches/bleeding_edge/src/arm/lithium-arm.cc Mon Aug 5 06:45:16 2013
+++ /branches/bleeding_edge/src/arm/lithium-arm.cc Mon Aug 5 09:42:39 2013
@@ -2013,10 +2013,16 @@
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL;
- if (!instr->CanOmitMapChecks()) value =
UseRegisterAtStart(instr->value());
- LInstruction* result = new(zone()) LCheckMaps(value);
- if (instr->CanOmitMapChecks()) return result;
- return AssignEnvironment(result);
+ if (!instr->CanOmitMapChecks()) {
+ value = UseRegisterAtStart(instr->value());
+ if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
+ }
+ LCheckMaps* result = new(zone()) LCheckMaps(value);
+ if (!instr->CanOmitMapChecks()) {
+ AssignEnvironment(result);
+ if (instr->has_migration_target()) return AssignPointerMap(result);
+ }
+ return result;
}
=======================================
--- /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc Mon Aug 5
06:45:16 2013
+++ /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc Mon Aug 5
09:42:39 2013
@@ -5214,33 +5214,67 @@
}
-void LCodeGen::DoCheckMapCommon(Register map_reg,
- Handle<Map> map,
- LEnvironment* env) {
- Label success;
- __ CompareMap(map_reg, map, &success);
- DeoptimizeIf(ne, env);
- __ bind(&success);
+void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register
object) {
+ {
+ PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
+ __ push(object);
+ CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr);
+ __ StoreToSafepointRegisterSlot(scratch0(), r0);
+ }
+ __ tst(scratch0(), Operand(kSmiTagMask));
+ DeoptimizeIf(eq, instr->environment());
}
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
+ class DeferredCheckMaps: public LDeferredCode {
+ public:
+ DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register
object)
+ : LDeferredCode(codegen), instr_(instr), object_(object) {
+ SetExit(check_maps());
+ }
+ virtual void Generate() {
+ codegen()->DoDeferredInstanceMigration(instr_, object_);
+ }
+ Label* check_maps() { return &check_maps_; }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LCheckMaps* instr_;
+ Label check_maps_;
+ Register object_;
+ };
+
if (instr->hydrogen()->CanOmitMapChecks()) return;
Register map_reg = scratch0();
+
LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- Label success;
SmallMapList* map_set = instr->hydrogen()->map_set();
__ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
+
+ DeferredCheckMaps* deferred = NULL;
+ if (instr->hydrogen()->has_migration_target()) {
+ deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
+ __ bind(deferred->check_maps());
+ }
+
+ Label success;
for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i);
__ CompareMap(map_reg, map, &success);
__ b(eq, &success);
}
+
Handle<Map> map = map_set->last();
- DoCheckMapCommon(map_reg, map, instr->environment());
+ __ CompareMap(map_reg, map, &success);
+ if (instr->hydrogen()->has_migration_target()) {
+ __ b(ne, deferred->entry());
+ } else {
+ DeoptimizeIf(ne, instr->environment());
+ }
+
__ bind(&success);
}
=======================================
--- /branches/bleeding_edge/src/arm/lithium-codegen-arm.h Fri Aug 2
02:53:11 2013
+++ /branches/bleeding_edge/src/arm/lithium-codegen-arm.h Mon Aug 5
09:42:39 2013
@@ -154,8 +154,7 @@
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
-
- void DoCheckMapCommon(Register map_reg, Handle<Map> map, LEnvironment*
env);
+ void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
=======================================
--- /branches/bleeding_edge/src/hydrogen-instructions.cc Mon Aug 5
06:45:16 2013
+++ /branches/bleeding_edge/src/hydrogen-instructions.cc Mon Aug 5
09:42:39 2013
@@ -2938,6 +2938,7 @@
HValue* typecheck) {
HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck);
check_map->map_set_.Add(map, zone);
+ check_map->has_migration_target_ = map->is_migration_target();
if (map->CanOmitMapChecks() &&
value->IsConstant() &&
HConstant::cast(value)->InstanceOf(map)) {
=======================================
--- /branches/bleeding_edge/src/hydrogen-instructions.h Mon Aug 5 06:45:16
2013
+++ /branches/bleeding_edge/src/hydrogen-instructions.h Mon Aug 5 09:42:39
2013
@@ -2564,6 +2564,7 @@
HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck);
for (int i = 0; i < maps->length(); i++) {
check_map->map_set_.Add(maps->at(i), zone);
+ check_map->has_migration_target_ |=
maps->at(i)->is_migration_target();
}
check_map->map_set_.Sort();
return check_map;
@@ -2581,6 +2582,10 @@
HValue* value() { return OperandAt(0); }
SmallMapList* map_set() { return &map_set_; }
+
+ bool has_migration_target() {
+ return has_migration_target_;
+ }
virtual void FinalizeUniqueValueId();
@@ -2606,7 +2611,7 @@
// Clients should use one of the static New* methods above.
HCheckMaps(HValue* value, Zone *zone, HValue* typecheck)
: HTemplateInstruction<2>(value->type()),
- omit_(false), map_unique_ids_(0, zone) {
+ omit_(false), has_migration_target_(false), map_unique_ids_(0,
zone) {
SetOperandAt(0, value);
// Use the object value for the dependency if NULL is passed.
// TODO(titzer): do GVN flags already express this dependency?
@@ -2628,6 +2633,7 @@
}
bool omit_;
+ bool has_migration_target_;
SmallMapList map_set_;
ZoneList<UniqueValueId> map_unique_ids_;
};
=======================================
--- /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Mon Aug 5
06:45:16 2013
+++ /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Mon Aug 5
09:42:39 2013
@@ -882,7 +882,7 @@
} else if (context->IsConstantOperand()) {
HConstant* constant =
chunk_->LookupConstant(LConstantOperand::cast(context));
- __ LoadHeapObject(esi, Handle<Context>::cast(constant->handle()));
+ __ LoadObject(esi, Handle<Object>::cast(constant->handle()));
} else {
UNREACHABLE();
}
@@ -5793,31 +5793,68 @@
}
-void LCodeGen::DoCheckMapCommon(Register reg,
- Handle<Map> map,
- LInstruction* instr) {
- Label success;
- __ CompareMap(reg, map, &success);
- DeoptimizeIf(not_equal, instr->environment());
- __ bind(&success);
+void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register
object) {
+ {
+ PushSafepointRegistersScope scope(this);
+ __ push(object);
+ __ xor_(esi, esi);
+ __ CallRuntimeSaveDoubles(Runtime::kMigrateInstance);
+ RecordSafepointWithRegisters(
+ instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
+
+ __ test(eax, Immediate(kSmiTagMask));
+ }
+ DeoptimizeIf(zero, instr->environment());
}
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
+ class DeferredCheckMaps: public LDeferredCode {
+ public:
+ DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register
object)
+ : LDeferredCode(codegen), instr_(instr), object_(object) {
+ SetExit(check_maps());
+ }
+ virtual void Generate() {
+ codegen()->DoDeferredInstanceMigration(instr_, object_);
+ }
+ Label* check_maps() { return &check_maps_; }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LCheckMaps* instr_;
+ Label check_maps_;
+ Register object_;
+ };
+
if (instr->hydrogen()->CanOmitMapChecks()) return;
+
LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- Label success;
SmallMapList* map_set = instr->hydrogen()->map_set();
+
+ DeferredCheckMaps* deferred = NULL;
+ if (instr->hydrogen()->has_migration_target()) {
+ deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
+ __ bind(deferred->check_maps());
+ }
+
+ Label success;
for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i);
__ CompareMap(reg, map, &success);
__ j(equal, &success);
}
+
Handle<Map> map = map_set->last();
- DoCheckMapCommon(reg, map, instr);
+ __ CompareMap(reg, map, &success);
+ if (instr->hydrogen()->has_migration_target()) {
+ __ j(not_equal, deferred->entry());
+ } else {
+ DeoptimizeIf(not_equal, instr->environment());
+ }
+
__ bind(&success);
}
=======================================
--- /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.h Fri Aug 2
02:53:11 2013
+++ /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.h Mon Aug 5
09:42:39 2013
@@ -163,8 +163,7 @@
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
-
- void DoCheckMapCommon(Register reg, Handle<Map> map, LInstruction*
instr);
+ void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
=======================================
--- /branches/bleeding_edge/src/ia32/lithium-ia32.cc Mon Aug 5 06:45:16
2013
+++ /branches/bleeding_edge/src/ia32/lithium-ia32.cc Mon Aug 5 09:42:39
2013
@@ -2051,10 +2051,16 @@
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL;
- if (!instr->CanOmitMapChecks()) value =
UseRegisterAtStart(instr->value());
+ if (!instr->CanOmitMapChecks()) {
+ value = UseRegisterAtStart(instr->value());
+ if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
+ }
LCheckMaps* result = new(zone()) LCheckMaps(value);
- if (instr->CanOmitMapChecks()) return result;
- return AssignEnvironment(result);
+ if (!instr->CanOmitMapChecks()) {
+ AssignEnvironment(result);
+ if (instr->has_migration_target()) return AssignPointerMap(result);
+ }
+ return result;
}
=======================================
--- /branches/bleeding_edge/src/objects-inl.h Mon Aug 5 06:45:16 2013
+++ /branches/bleeding_edge/src/objects-inl.h Mon Aug 5 09:42:39 2013
@@ -3615,6 +3615,17 @@
if (!FLAG_track_fields) return false;
return Deprecated::decode(bit_field3());
}
+
+
+void Map::set_migration_target(bool value) {
+ set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
+}
+
+
+bool Map::is_migration_target() {
+ if (!FLAG_track_fields) return false;
+ return IsMigrationTarget::decode(bit_field3());
+}
void Map::freeze() {
@@ -4215,7 +4226,20 @@
ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
-SMI_ACCESSORS(Map, bit_field3, kBitField3Offset)
+
+
+void Map::set_bit_field3(uint32_t bits) {
+ // Ensure the upper 2 bits have the same value by sign extending it.
This is
+ // necessary to be able to use the 31st bit.
+ int value = bits << 1;
+ WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1));
+}
+
+
+uint32_t Map::bit_field3() {
+ Object* value = READ_FIELD(this, kBitField3Offset);
+ return Smi::cast(value)->value();
+}
void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {
=======================================
--- /branches/bleeding_edge/src/objects.cc Fri Aug 2 02:53:11 2013
+++ /branches/bleeding_edge/src/objects.cc Mon Aug 5 09:42:39 2013
@@ -2719,6 +2719,7 @@
Handle<Map>(new_map);
return maybe_map;
}
+ new_map->set_migration_target(true);
}
new_map->set_owns_descriptors(true);
@@ -6517,6 +6518,7 @@
result->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
result->set_dictionary_map(true);
+ result->set_migration_target(false);
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && result->is_shared()) {
=======================================
--- /branches/bleeding_edge/src/objects.h Mon Aug 5 06:45:16 2013
+++ /branches/bleeding_edge/src/objects.h Mon Aug 5 09:42:39 2013
@@ -5456,8 +5456,8 @@
inline void set_bit_field2(byte value);
// Bit field 3.
- inline int bit_field3();
- inline void set_bit_field3(int value);
+ inline uint32_t bit_field3();
+ inline void set_bit_field3(uint32_t bits);
class EnumLengthBits: public BitField<int, 0, 11> {};
class NumberOfOwnDescriptorsBits: public BitField<int, 11, 11> {};
@@ -5469,6 +5469,7 @@
class Deprecated: public BitField<bool, 27, 1> {};
class IsFrozen: public BitField<bool, 28, 1> {};
class IsUnstable: public BitField<bool, 29, 1> {};
+ class IsMigrationTarget: public BitField<bool, 30, 1> {};
// Tells whether the object in the prototype property will be used
// for instances created from this function. If the prototype
@@ -5775,6 +5776,8 @@
inline bool is_frozen();
inline void mark_unstable();
inline bool is_stable();
+ inline void set_migration_target(bool value);
+ inline bool is_migration_target();
inline void deprecate();
inline bool is_deprecated();
inline bool CanBeDeprecated();
=======================================
--- /branches/bleeding_edge/src/runtime.cc Mon Aug 5 04:14:46 2013
+++ /branches/bleeding_edge/src/runtime.cc Mon Aug 5 09:42:39 2013
@@ -13683,6 +13683,18 @@
FlattenString(str);
return isolate->heap()->undefined_value();
}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_MigrateInstance) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 1);
+ CONVERT_ARG_HANDLE_CHECKED(Object, object, 0);
+ if (!object->IsJSObject()) return Smi::FromInt(0);
+ Handle<JSObject> js_object = Handle<JSObject>::cast(object);
+ if (!js_object->map()->is_deprecated()) return Smi::FromInt(0);
+ JSObject::MigrateInstance(js_object);
+ return *object;
+}
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) {
=======================================
--- /branches/bleeding_edge/src/runtime.h Thu Aug 1 12:25:27 2013
+++ /branches/bleeding_edge/src/runtime.h Mon Aug 5 09:42:39 2013
@@ -109,6 +109,7 @@
F(DebugCallbackSupportsStepping, 1, 1) \
F(DebugPrepareStepInIfStepping, 1, 1) \
F(FlattenString, 1, 1) \
+ F(MigrateInstance, 1, 1) \
\
/* Array join support */ \
F(PushIfAbsent, 2, 1) \
=======================================
--- /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Mon Aug 5
06:45:16 2013
+++ /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Mon Aug 5
09:42:39 2013
@@ -4974,31 +4974,64 @@
}
-void LCodeGen::DoCheckMapCommon(Register reg,
- Handle<Map> map,
- LInstruction* instr) {
- Label success;
- __ CompareMap(reg, map, &success);
- DeoptimizeIf(not_equal, instr->environment());
- __ bind(&success);
+void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register
object) {
+ {
+ PushSafepointRegistersScope scope(this);
+ __ push(object);
+ CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr);
+ __ testq(rax, Immediate(kSmiTagMask));
+ }
+ DeoptimizeIf(zero, instr->environment());
}
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
+ class DeferredCheckMaps: public LDeferredCode {
+ public:
+ DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register
object)
+ : LDeferredCode(codegen), instr_(instr), object_(object) {
+ SetExit(check_maps());
+ }
+ virtual void Generate() {
+ codegen()->DoDeferredInstanceMigration(instr_, object_);
+ }
+ Label* check_maps() { return &check_maps_; }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LCheckMaps* instr_;
+ Label check_maps_;
+ Register object_;
+ };
+
if (instr->hydrogen()->CanOmitMapChecks()) return;
+
LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- Label success;
SmallMapList* map_set = instr->hydrogen()->map_set();
+
+ DeferredCheckMaps* deferred = NULL;
+ if (instr->hydrogen()->has_migration_target()) {
+ deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
+ __ bind(deferred->check_maps());
+ }
+
+ Label success;
for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i);
__ CompareMap(reg, map, &success);
__ j(equal, &success);
}
+
Handle<Map> map = map_set->last();
- DoCheckMapCommon(reg, map, instr);
+ __ CompareMap(reg, map, &success);
+ if (instr->hydrogen()->has_migration_target()) {
+ __ j(not_equal, deferred->entry());
+ } else {
+ DeoptimizeIf(not_equal, instr->environment());
+ }
+
__ bind(&success);
}
=======================================
--- /branches/bleeding_edge/src/x64/lithium-codegen-x64.h Fri Aug 2
02:53:11 2013
+++ /branches/bleeding_edge/src/x64/lithium-codegen-x64.h Mon Aug 5
09:42:39 2013
@@ -132,8 +132,7 @@
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
-
- void DoCheckMapCommon(Register reg, Handle<Map> map, LInstruction*
instr);
+ void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
=======================================
--- /branches/bleeding_edge/src/x64/lithium-x64.cc Mon Aug 5 06:45:16 2013
+++ /branches/bleeding_edge/src/x64/lithium-x64.cc Mon Aug 5 09:42:39 2013
@@ -1917,10 +1917,16 @@
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL;
- if (!instr->CanOmitMapChecks()) value =
UseRegisterAtStart(instr->value());
+ if (!instr->CanOmitMapChecks()) {
+ value = UseRegisterAtStart(instr->value());
+ if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
+ }
LCheckMaps* result = new(zone()) LCheckMaps(value);
- if (instr->CanOmitMapChecks()) return result;
- return AssignEnvironment(result);
+ if (!instr->CanOmitMapChecks()) {
+ AssignEnvironment(result);
+ if (instr->has_migration_target()) return AssignPointerMap(result);
+ }
+ return result;
}
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.