Revision: 15576
Author: [email protected]
Date: Tue Jul 9 08:54:43 2013
Log: MIPS: Implement Polymorphic Store ICs.
Port r15566 (a76585e)
Original commit message:
- Makes a common superclass for Load and Store stub compiler.
- Splits all non-normal Store ICs into handler and IC.
- Ensures monomorphic store ICs go polymorphic.
- Feeds polymorphic type feedback into count operation.
BUG=
Review URL: https://codereview.chromium.org/18595006
Patch from Balazs Kilvady <[email protected]>.
http://code.google.com/p/v8/source/detail?r=15576
Modified:
/branches/bleeding_edge/src/mips/code-stubs-mips.cc
/branches/bleeding_edge/src/mips/ic-mips.cc
/branches/bleeding_edge/src/mips/stub-cache-mips.cc
=======================================
--- /branches/bleeding_edge/src/mips/code-stubs-mips.cc Mon Jul 8 17:00:15
2013
+++ /branches/bleeding_edge/src/mips/code-stubs-mips.cc Tue Jul 9 08:54:43
2013
@@ -3733,7 +3733,8 @@
StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, a3, t0,
&miss);
__ bind(&miss);
- StubCompiler::TailCallBuiltin(masm, StubCompiler::MissBuiltin(kind()));
+ StubCompiler::TailCallBuiltin(
+ masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
}
@@ -3764,7 +3765,8 @@
support_wrapper_);
__ bind(&miss);
- StubCompiler::TailCallBuiltin(masm, StubCompiler::MissBuiltin(kind()));
+ StubCompiler::TailCallBuiltin(
+ masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
}
@@ -3834,7 +3836,8 @@
__ bind(&miss);
- StubCompiler::TailCallBuiltin(masm, StubCompiler::MissBuiltin(kind()));
+ StubCompiler::TailCallBuiltin(
+ masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
}
=======================================
--- /branches/bleeding_edge/src/mips/ic-mips.cc Mon Jul 8 10:01:12 2013
+++ /branches/bleeding_edge/src/mips/ic-mips.cc Tue Jul 9 08:54:43 2013
@@ -1541,8 +1541,9 @@
// -----------------------------------
// Get the receiver from the stack and probe the stub cache.
- Code::Flags flags =
- Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
+ Code::Flags flags = Code::ComputeFlags(
+ Code::STUB, MONOMORPHIC, strict_mode,
+ Code::NORMAL, Code::STORE_IC);
Isolate::Current()->stub_cache()->GenerateProbe(
masm, flags, a1, a2, a3, t0, t1, t2);
=======================================
--- /branches/bleeding_edge/src/mips/stub-cache-mips.cc Mon Jul 8 11:00:24
2013
+++ /branches/bleeding_edge/src/mips/stub-cache-mips.cc Tue Jul 9 08:54:43
2013
@@ -427,36 +427,42 @@
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ Branch(miss, ne, scratch, Operand(at));
}
+
+
+void BaseStoreStubCompiler::GenerateNegativeHolderLookup(
+ MacroAssembler* masm,
+ Handle<JSObject> holder,
+ Register holder_reg,
+ Handle<Name> name,
+ Label* miss) {
+ if (holder->IsJSGlobalObject()) {
+ GenerateCheckPropertyCell(
+ masm, Handle<GlobalObject>::cast(holder), name, scratch1(), miss);
+ } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
+ GenerateDictionaryNegativeLookup(
+ masm, miss, holder_reg, name, scratch1(), scratch2());
+ }
+}
// Generate StoreTransition code, value is passed in a0 register.
// After executing generated code, the receiver_reg and name_reg
// may be clobbered.
-void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
- Handle<JSObject> object,
- LookupResult* lookup,
- Handle<Map> transition,
- Handle<Name> name,
- Register receiver_reg,
- Register name_reg,
- Register value_reg,
- Register scratch1,
- Register scratch2,
- Register scratch3,
- Label* miss_label,
- Label* miss_restore_name,
- Label* slow) {
+void BaseStoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
+ Handle<JSObject>
object,
+ LookupResult* lookup,
+ Handle<Map> transition,
+ Handle<Name> name,
+ Register receiver_reg,
+ Register storage_reg,
+ Register value_reg,
+ Register scratch1,
+ Register scratch2,
+ Register scratch3,
+ Label* miss_label,
+ Label* slow) {
// a0 : value.
Label exit;
-
- // Check that the map of the object hasn't changed.
- __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()),
miss_label,
- DO_SMI_CHECK);
-
- // Perform global security token check if needed.
- if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
- }
int descriptor = transition->LastAdded();
DescriptorArray* descriptors = transition->instance_descriptors();
@@ -464,54 +470,15 @@
Representation representation = details.representation();
ASSERT(!representation.IsNone());
- // Ensure no transitions to deprecated maps are followed.
- __ CheckMapDeprecated(transition, scratch1, miss_label);
-
- // Check that we are allowed to write this.
- if (object->GetPrototype()->IsJSObject()) {
- JSObject* holder;
- // holder == object indicates that no property was found.
- if (lookup->holder() != *object) {
- holder = lookup->holder();
- } else {
- // Find the top object.
- holder = *object;
- do {
- holder = JSObject::cast(holder->GetPrototype());
- } while (holder->GetPrototype()->IsJSObject());
- }
- Register holder_reg = CheckPrototypes(
- object, receiver_reg, Handle<JSObject>(holder), name_reg,
- scratch1, scratch2, name, miss_restore_name, SKIP_RECEIVER);
- // If no property was found, and the holder (the last object in the
- // prototype chain) is in slow mode, we need to do a negative lookup
on the
- // holder.
- if (lookup->holder() == *object) {
- if (holder->IsJSGlobalObject()) {
- GenerateCheckPropertyCell(
- masm,
- Handle<GlobalObject>(GlobalObject::cast(holder)),
- name,
- scratch1,
- miss_restore_name);
- } else if (!holder->HasFastProperties()
&& !holder->IsJSGlobalProxy()) {
- GenerateDictionaryNegativeLookup(
- masm, miss_restore_name, holder_reg, name, scratch1, scratch2);
- }
- }
- }
-
- Register storage_reg = name_reg;
-
if (details.type() == CONSTANT_FUNCTION) {
Handle<HeapObject> constant(
HeapObject::cast(descriptors->GetValue(descriptor)));
__ LoadHeapObject(scratch1, constant);
- __ Branch(miss_restore_name, ne, value_reg, Operand(scratch1));
+ __ Branch(miss_label, ne, value_reg, Operand(scratch1));
} else if (FLAG_track_fields && representation.IsSmi()) {
- __ JumpIfNotSmi(value_reg, miss_restore_name);
+ __ JumpIfNotSmi(value_reg, miss_label);
} else if (FLAG_track_heap_object_fields &&
representation.IsHeapObject()) {
- __ JumpIfSmi(value_reg, miss_restore_name);
+ __ JumpIfSmi(value_reg, miss_label);
} else if (FLAG_track_double_fields && representation.IsDouble()) {
Label do_store, heap_number;
__ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
@@ -525,7 +492,7 @@
__ bind(&heap_number);
__ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
- miss_restore_name, DONT_DO_SMI_CHECK);
+ miss_label, DONT_DO_SMI_CHECK);
__ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
__ bind(&do_store);
@@ -555,8 +522,7 @@
__ li(scratch1, Operand(transition));
__ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
- // Update the write barrier for the map field and pass the now unused
- // name_reg as scratch register.
+ // Update the write barrier for the map field.
__ RecordWriteField(receiver_reg,
HeapObject::kMapOffset,
scratch1,
@@ -598,15 +564,12 @@
__ JumpIfSmi(value_reg, &exit);
// Update the write barrier for the array address.
- // Pass the now unused name_reg as a scratch register.
if (!FLAG_track_double_fields || !representation.IsDouble()) {
- __ mov(name_reg, value_reg);
- } else {
- ASSERT(storage_reg.is(name_reg));
+ __ mov(storage_reg, value_reg);
}
__ RecordWriteField(receiver_reg,
offset,
- name_reg,
+ storage_reg,
scratch1,
kRAHasNotBeenSaved,
kDontSaveFPRegs,
@@ -630,15 +593,12 @@
__ JumpIfSmi(value_reg, &exit);
// Update the write barrier for the array address.
- // Ok to clobber receiver_reg and name_reg, since we return.
if (!FLAG_track_double_fields || !representation.IsDouble()) {
- __ mov(name_reg, value_reg);
- } else {
- ASSERT(storage_reg.is(name_reg));
+ __ mov(storage_reg, value_reg);
}
__ RecordWriteField(scratch1,
offset,
- name_reg,
+ storage_reg,
receiver_reg,
kRAHasNotBeenSaved,
kDontSaveFPRegs,
@@ -659,27 +619,18 @@
// When leaving generated code after success, the receiver_reg and name_reg
// may be clobbered. Upon branch to miss_label, the receiver and name
// registers have their original values.
-void StubCompiler::GenerateStoreField(MacroAssembler* masm,
- Handle<JSObject> object,
- LookupResult* lookup,
- Register receiver_reg,
- Register name_reg,
- Register value_reg,
- Register scratch1,
- Register scratch2,
- Label* miss_label) {
+void BaseStoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
+ Handle<JSObject> object,
+ LookupResult* lookup,
+ Register receiver_reg,
+ Register name_reg,
+ Register value_reg,
+ Register scratch1,
+ Register scratch2,
+ Label* miss_label) {
// a0 : value
Label exit;
- // Check that the map of the object hasn't changed.
- __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()),
miss_label,
- DO_SMI_CHECK);
-
- // Perform global security token check if needed.
- if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
- }
-
// Stub never generated for non-global objects that require access
// checks.
ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
@@ -1348,7 +1299,8 @@
}
-void BaseLoadStubCompiler::HandlerFrontendFooter(Label* success,
+void BaseLoadStubCompiler::HandlerFrontendFooter(Handle<Name> name,
+ Label* success,
Label* miss) {
if (!miss->is_unused()) {
__ Branch(success);
@@ -1356,6 +1308,17 @@
TailCallBuiltin(masm(), MissBuiltin(kind()));
}
}
+
+
+void BaseStoreStubCompiler::HandlerFrontendFooter(Handle<Name> name,
+ Label* success,
+ Label* miss) {
+ if (!miss->is_unused()) {
+ __ b(success);
+ GenerateRestoreName(masm(), miss, name);
+ TailCallBuiltin(masm(), MissBuiltin(kind()));
+ }
+}
Register BaseLoadStubCompiler::CallbackHandlerFrontend(
@@ -1399,7 +1362,7 @@
__ Branch(&miss, ne, scratch2(), Operand(callback));
}
- HandlerFrontendFooter(success, &miss);
+ HandlerFrontendFooter(name, success, &miss);
return reg;
}
@@ -1420,7 +1383,7 @@
GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
}
- HandlerFrontendFooter(success, &miss);
+ HandlerFrontendFooter(name, success, &miss);
}
@@ -2866,15 +2829,13 @@
Handle<Code> StoreStubCompiler::CompileStoreCallback(
- Handle<Name> name,
Handle<JSObject> object,
Handle<JSObject> holder,
+ Handle<Name> name,
Handle<ExecutableAccessorInfo> callback) {
- Label miss;
- // Check that the maps haven't changed.
- __ JumpIfSmi(receiver(), &miss);
- CheckPrototypes(object, receiver(), holder,
- scratch1(), scratch2(), scratch3(), name, &miss);
+ Label success;
+ HandlerFrontend(object, receiver(), holder, name, &success);
+ __ bind(&success);
// Stub never generated for non-global objects that require access
// checks.
@@ -2882,19 +2843,17 @@
__ push(receiver()); // Receiver.
__ li(at, Operand(callback)); // Callback info.
- __ Push(at, this->name(), value());
+ __ push(at);
+ __ li(at, Operand(name));
+ __ Push(at, value());
// Do tail-call to the runtime system.
ExternalReference store_callback_property =
ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
__ TailCallExternalReference(store_callback_property, 4, 1);
- // Handle store cache miss.
- __ bind(&miss);
- TailCallBuiltin(masm(), MissBuiltin(kind()));
-
// Return the generated code.
- return GetICCode(kind(), Code::CALLBACKS, name);
+ return GetCode(kind(), Code::CALLBACKS, name);
}
@@ -3144,7 +3103,7 @@
__ Branch(&miss, eq, t0, Operand(at));
}
- HandlerFrontendFooter(&success, &miss);
+ HandlerFrontendFooter(name, &success, &miss);
__ bind(&success);
Counters* counters = isolate()->counters();
@@ -3157,7 +3116,7 @@
}
-Handle<Code> BaseLoadStubCompiler::CompilePolymorphicIC(
+Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
MapHandleList* receiver_maps,
CodeHandleList* handlers,
Handle<Name> name,
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.