Reviewers: Michael Starzinger,
Description:
Change inlined cache of intanceof stub to use indirection through cell.
The stub was directly patching caller's code without issuing write barrier
which
violated incremental marking invariants.
[email protected]
BUG=http://crbug.com/109448
TEST=cctest/test-heap/InstanceOfStubWriteBarrier
Please review this at http://codereview.chromium.org/9158015/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files:
M src/arm/code-stubs-arm.cc
M src/arm/lithium-codegen-arm.cc
M src/ia32/code-stubs-ia32.cc
M src/ia32/lithium-codegen-ia32.cc
M src/incremental-marking.h
M src/x64/code-stubs-x64.cc
M src/x64/lithium-codegen-x64.cc
M test/cctest/test-heap.cc
Index: src/arm/code-stubs-arm.cc
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index
e95e2cf4216542da41260f6a4039a33dc48b20b0..bf431d17dd43d66c1009e3b9c89e671fcae26c7f
100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -4081,7 +4081,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
const Register inline_site = r9;
const Register scratch = r2;
- const int32_t kDeltaToLoadBoolResult = 3 * kPointerSize;
+ const int32_t kDeltaToLoadBoolResult = 4 * kPointerSize;
Label slow, loop, is_instance, is_not_instance, not_js_object;
@@ -4132,7 +4132,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ sub(inline_site, lr, scratch);
// Get the map location in scratch and patch it.
__ GetRelocatedValueLocation(inline_site, scratch);
- __ str(map, MemOperand(scratch));
+ __ ldr(scratch, MemOperand(scratch));
+ __ str(map, FieldMemOperand(scratch,
JSGlobalPropertyCell::kValueOffset));
}
// Register mapping: r3 is object map and r4 is function prototype.
Index: src/arm/lithium-codegen-arm.cc
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index
e1e35d251ec58c3172e6b696de0a6e0f3d352799..eab9f3d96271bff9e864cf46fd5caf0591520c81
100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -2141,7 +2141,11 @@ void
LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
// We use Factory::the_hole_value() on purpose instead of loading from
the
// root array to force relocation to be able to later patch with
// the cached map.
- __ mov(ip, Operand(factory()->the_hole_value()));
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(
+ factory()->the_hole_value());
+ __ mov(ip, Operand(Handle<Object>(cell)));
+ __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
__ cmp(map, Operand(ip));
__ b(ne, &cache_miss);
// We use Factory::the_hole_value() on purpose instead of loading from
the
Index: src/ia32/code-stubs-ia32.cc
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index
9bc024f40d0e9af1e7f08f1db9fc444a2390632d..f326f8f5619d393e16c9e392caaf8ae54afd1284
100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -5081,8 +5081,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
static const int kDeltaToCmpImmediate = 2;
static const int kDeltaToMov = 8;
static const int kDeltaToMovImmediate = 9;
- static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t,
uint8_t>(0x81);
- static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t,
uint8_t>(0xff);
+ static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b);
+ static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d);
static const int8_t kMovEaxImmediateByte = BitCast<int8_t,
uint8_t>(0xb8);
ExternalReference roots_array_start =
@@ -5147,12 +5147,13 @@ void InstanceofStub::Generate(MacroAssembler* masm)
{
__ mov(scratch, Operand(esp, 0 * kPointerSize));
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
- __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
+ __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
__ Assert(equal, "InstanceofStub unexpected call site cache (cmp
1)");
- __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2);
+ __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
__ Assert(equal, "InstanceofStub unexpected call site cache (cmp
2)");
}
- __ mov(Operand(scratch, kDeltaToCmpImmediate), map);
+ __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
+ __ mov(Operand(scratch, 0), map);
}
// Loop through the prototype chain of the object looking for the
function
Index: src/ia32/lithium-codegen-ia32.cc
diff --git a/src/ia32/lithium-codegen-ia32.cc
b/src/ia32/lithium-codegen-ia32.cc
index
137d62c554648a367a14cf67e69f1d404389017e..7b4b2f060ca269ca297e322528dab468c8d1c4af
100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -1979,7 +1979,9 @@ void
LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Register map = ToRegister(instr->TempAt(0));
__ mov(map, FieldOperand(object, HeapObject::kMapOffset));
__ bind(deferred->map_check()); // Label for calculating code patching.
- __ cmp(map, factory()->the_hole_value()); // Patched to cached map.
+ Handle<JSGlobalPropertyCell> cache_cell =
+ factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+ __ cmp(map, Operand::Cell(cache_cell)); // Patched to cached map.
__ j(not_equal, &cache_miss, Label::kNear);
__ mov(eax, factory()->the_hole_value()); // Patched to either true or
false.
__ jmp(&done);
Index: src/incremental-marking.h
diff --git a/src/incremental-marking.h b/src/incremental-marking.h
index
25def8706867528565304500a460c332fec2c47b..4f8fa6b1275b0995a919503972c6ed8b360daaa5
100644
--- a/src/incremental-marking.h
+++ b/src/incremental-marking.h
@@ -56,6 +56,7 @@ class IncrementalMarking {
}
bool should_hurry() { return should_hurry_; }
+ void set_should_hurry(bool val) { should_hurry_ = val; }
inline bool IsStopped() { return state() == STOPPED; }
@@ -219,10 +220,6 @@ class IncrementalMarking {
void UncommitMarkingDeque();
private:
- void set_should_hurry(bool val) {
- should_hurry_ = val;
- }
-
int64_t SpaceLeftInOldSpace();
void ResetStepCounters();
Index: src/x64/code-stubs-x64.cc
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index
ea9c494e8a9c421eeb9cb3a34bb892a6d9617156..8f5b63dce94f74810d4b05b42732da9860ea228f
100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -4156,12 +4156,14 @@ void InstanceofStub::Generate(MacroAssembler* masm)
{
// Get return address and delta to inlined map check.
__ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
__ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
- __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax);
if (FLAG_debug_code) {
__ movl(rdi, Immediate(kWordBeforeMapCheckValue));
__ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
__ Assert(equal, "InstanceofStub unexpected call site cache
(check).");
}
+ __ movq(kScratchRegister,
+ Operand(kScratchRegister, kOffsetToMapCheckValue));
+ __ movq(Operand(kScratchRegister, 0), rax);
}
__ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
Index: src/x64/lithium-codegen-x64.cc
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index
6239acb51531d1f39ecfd1e247315736e82c3e29..f10cf8529af2c3aa7fd898a5c6ffc42cfb0825bf
100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -1897,9 +1897,10 @@ void
LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Register map = ToRegister(instr->TempAt(0));
__ movq(map, FieldOperand(object, HeapObject::kMapOffset));
__ bind(deferred->map_check()); // Label for calculating code patching.
- __ movq(kScratchRegister, factory()->the_hole_value(),
- RelocInfo::EMBEDDED_OBJECT);
- __ cmpq(map, kScratchRegister); // Patched to cached map.
+ Handle<JSGlobalPropertyCell> cache_cell =
+ factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+ __ movq(kScratchRegister, cache_cell, RelocInfo::GLOBAL_PROPERTY_CELL);
+ __ cmpq(map, Operand(kScratchRegister, 0));
__ j(not_equal, &cache_miss, Label::kNear);
// Patched to load either true or false.
__ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
Index: test/cctest/test-heap.cc
diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc
index
42b5789d4d02f17f520af87c6a27c148e2b59dbc..b643e7d60e0893a7a6de8b83f936b116919479aa
100644
--- a/test/cctest/test-heap.cc
+++ b/test/cctest/test-heap.cc
@@ -1476,3 +1476,59 @@ TEST(LeakGlobalContextViaMapProto) {
HEAP->CollectAllAvailableGarbage();
CHECK_EQ(0, NumberOfGlobalObjects());
}
+
+
+TEST(InstanceOfStubWriteBarrier) {
+ if (!i::FLAG_crankshaft) return;
+ i::FLAG_allow_natives_syntax = true;
+ i::FLAG_verify_heap = true;
+ InitializeVM();
+ v8::HandleScope outer_scope;
+
+ {
+ v8::HandleScope scope;
+ CompileRun(
+ "function foo () { }"
+ "function mkbar () { return new (new Function(\"\")) (); }"
+ "function f (x) { return (x instanceof foo); }"
+ "function g () { f(mkbar()); }"
+ "f(new foo()); f(new foo());"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f(new foo()); g();");
+ }
+
+ IncrementalMarking* marking = HEAP->incremental_marking();
+ marking->Abort();
+ marking->Start();
+
+ Handle<JSFunction> f =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
+
+ CHECK(f->IsOptimized());
+
+ while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
+ !marking->IsStopped()) {
+ marking->Step(MB);
+ }
+
+ CHECK(marking->IsMarking());
+
+ // Discard any pending GC requests otherwise we will get GC when we enter
+ // code below.
+ if (ISOLATE->stack_guard()->IsGCRequest()) {
+ ISOLATE->stack_guard()->Continue(GC_REQUEST);
+ }
+
+ {
+ v8::HandleScope scope;
+ v8::Handle<v8::Object> global = v8::Context::GetCurrent()->Global();
+ v8::Handle<v8::Function> g =
+ v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
+ g->Call(global, 0, NULL);
+ }
+
+ HEAP->incremental_marking()->set_should_hurry(true);
+ HEAP->CollectGarbage(OLD_POINTER_SPACE);
+}
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev