Revision: 10380
Author:   [email protected]
Date:     Wed Jan 11 01:39:37 2012
Log: Change inlined cache of intanceof stub to use indirection through cell.

The stub was directly patching caller's code without issuing write barrier which violated incremental marking invariants.

[email protected]
BUG=http://crbug.com/109448
TEST=cctest/test-heap/InstanceOfStubWriteBarrier

Review URL: http://codereview.chromium.org/9158015
http://code.google.com/p/v8/source/detail?r=10380

Modified:
 /branches/bleeding_edge/src/arm/code-stubs-arm.cc
 /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc
 /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc
 /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc
 /branches/bleeding_edge/src/incremental-marking.h
 /branches/bleeding_edge/src/x64/code-stubs-x64.cc
 /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc
 /branches/bleeding_edge/test/cctest/test-heap.cc

=======================================
--- /branches/bleeding_edge/src/arm/code-stubs-arm.cc Tue Jan 10 05:24:18 2012 +++ /branches/bleeding_edge/src/arm/code-stubs-arm.cc Wed Jan 11 01:39:37 2012
@@ -4081,7 +4081,7 @@
   const Register inline_site = r9;
   const Register scratch = r2;

-  const int32_t kDeltaToLoadBoolResult = 3 * kPointerSize;
+  const int32_t kDeltaToLoadBoolResult = 4 * kPointerSize;

   Label slow, loop, is_instance, is_not_instance, not_js_object;

@@ -4132,7 +4132,8 @@
     __ sub(inline_site, lr, scratch);
     // Get the map location in scratch and patch it.
     __ GetRelocatedValueLocation(inline_site, scratch);
-    __ str(map, MemOperand(scratch));
+    __ ldr(scratch, MemOperand(scratch));
+ __ str(map, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
   }

   // Register mapping: r3 is object map and r4 is function prototype.
=======================================
--- /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc Wed Jan 11 00:29:42 2012 +++ /branches/bleeding_edge/src/arm/lithium-codegen-arm.cc Wed Jan 11 01:39:37 2012
@@ -2143,7 +2143,10 @@
// We use Factory::the_hole_value() on purpose instead of loading from the
   // root array to force relocation to be able to later patch with
   // the cached map.
-  __ mov(ip, Operand(factory()->the_hole_value()));
+  Handle<JSGlobalPropertyCell> cell =
+      factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+  __ mov(ip, Operand(Handle<Object>(cell)));
+  __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
   __ cmp(map, Operand(ip));
   __ b(ne, &cache_miss);
// We use Factory::the_hole_value() on purpose instead of loading from the
=======================================
--- /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Tue Jan 10 05:24:18 2012 +++ /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Wed Jan 11 01:39:37 2012
@@ -5081,8 +5081,8 @@
   static const int kDeltaToCmpImmediate = 2;
   static const int kDeltaToMov = 8;
   static const int kDeltaToMovImmediate = 9;
- static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81); - static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
+  static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b);
+  static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d);
static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);

   ExternalReference roots_array_start =
@@ -5147,12 +5147,13 @@
     __ mov(scratch, Operand(esp, 0 * kPointerSize));
     __ sub(scratch, Operand(esp, 1 * kPointerSize));
     if (FLAG_debug_code) {
-      __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
+      __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
__ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
-      __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2);
+      __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
__ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
     }
-    __ mov(Operand(scratch, kDeltaToCmpImmediate), map);
+    __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
+    __ mov(Operand(scratch, 0), map);
   }

// Loop through the prototype chain of the object looking for the function
=======================================
--- /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Wed Jan 11 00:29:42 2012 +++ /branches/bleeding_edge/src/ia32/lithium-codegen-ia32.cc Wed Jan 11 01:39:37 2012
@@ -1975,7 +1975,9 @@
   Register map = ToRegister(instr->TempAt(0));
   __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
   __ bind(deferred->map_check());  // Label for calculating code patching.
-  __ cmp(map, factory()->the_hole_value());  // Patched to cached map.
+  Handle<JSGlobalPropertyCell> cache_cell =
+      factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+  __ cmp(map, Operand::Cell(cache_cell));  // Patched to cached map.
   __ j(not_equal, &cache_miss, Label::kNear);
__ mov(eax, factory()->the_hole_value()); // Patched to either true or false.
   __ jmp(&done);
=======================================
--- /branches/bleeding_edge/src/incremental-marking.h Wed Nov 30 03:13:36 2011 +++ /branches/bleeding_edge/src/incremental-marking.h Wed Jan 11 01:39:37 2012
@@ -56,6 +56,7 @@
   }

   bool should_hurry() { return should_hurry_; }
+  void set_should_hurry(bool val) { should_hurry_ = val; }

   inline bool IsStopped() { return state() == STOPPED; }

@@ -219,10 +220,6 @@
   void UncommitMarkingDeque();

  private:
-  void set_should_hurry(bool val) {
-    should_hurry_ = val;
-  }
-
   int64_t SpaceLeftInOldSpace();

   void ResetStepCounters();
=======================================
--- /branches/bleeding_edge/src/x64/code-stubs-x64.cc Tue Jan 10 05:24:18 2012 +++ /branches/bleeding_edge/src/x64/code-stubs-x64.cc Wed Jan 11 01:39:37 2012
@@ -4156,12 +4156,14 @@
     // Get return address and delta to inlined map check.
     __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
     __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
-    __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax);
     if (FLAG_debug_code) {
       __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
       __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
__ Assert(equal, "InstanceofStub unexpected call site cache (check).");
     }
+    __ movq(kScratchRegister,
+            Operand(kScratchRegister, kOffsetToMapCheckValue));
+    __ movq(Operand(kScratchRegister, 0), rax);
   }

   __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
=======================================
--- /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Wed Jan 11 00:29:42 2012 +++ /branches/bleeding_edge/src/x64/lithium-codegen-x64.cc Wed Jan 11 01:39:37 2012
@@ -1901,9 +1901,10 @@
   Register map = ToRegister(instr->TempAt(0));
   __ movq(map, FieldOperand(object, HeapObject::kMapOffset));
   __ bind(deferred->map_check());  // Label for calculating code patching.
-  __ movq(kScratchRegister, factory()->the_hole_value(),
-          RelocInfo::EMBEDDED_OBJECT);
-  __ cmpq(map, kScratchRegister);  // Patched to cached map.
+  Handle<JSGlobalPropertyCell> cache_cell =
+      factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+  __ movq(kScratchRegister, cache_cell, RelocInfo::GLOBAL_PROPERTY_CELL);
+  __ cmpq(map, Operand(kScratchRegister, 0));
   __ j(not_equal, &cache_miss, Label::kNear);
   // Patched to load either true or false.
   __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
=======================================
--- /branches/bleeding_edge/test/cctest/test-heap.cc Mon Dec 19 04:39:52 2011 +++ /branches/bleeding_edge/test/cctest/test-heap.cc Wed Jan 11 01:39:37 2012
@@ -1476,3 +1476,59 @@
   HEAP->CollectAllAvailableGarbage();
   CHECK_EQ(0, NumberOfGlobalObjects());
 }
+
+
+TEST(InstanceOfStubWriteBarrier) {
+  if (!i::FLAG_crankshaft) return;
+  i::FLAG_allow_natives_syntax = true;
+  i::FLAG_verify_heap = true;
+  InitializeVM();
+  v8::HandleScope outer_scope;
+
+  {
+    v8::HandleScope scope;
+    CompileRun(
+        "function foo () { }"
+        "function mkbar () { return new (new Function(\"\")) (); }"
+        "function f (x) { return (x instanceof foo); }"
+        "function g () { f(mkbar()); }"
+        "f(new foo()); f(new foo());"
+        "%OptimizeFunctionOnNextCall(f);"
+        "f(new foo()); g();");
+  }
+
+  IncrementalMarking* marking = HEAP->incremental_marking();
+  marking->Abort();
+  marking->Start();
+
+  Handle<JSFunction> f =
+      v8::Utils::OpenHandle(
+          *v8::Handle<v8::Function>::Cast(
+              v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
+
+  CHECK(f->IsOptimized());
+
+  while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
+         !marking->IsStopped()) {
+    marking->Step(MB);
+  }
+
+  CHECK(marking->IsMarking());
+
+  // Discard any pending GC requests otherwise we will get GC when we enter
+  // code below.
+  if (ISOLATE->stack_guard()->IsGCRequest()) {
+    ISOLATE->stack_guard()->Continue(GC_REQUEST);
+  }
+
+  {
+    v8::HandleScope scope;
+    v8::Handle<v8::Object> global = v8::Context::GetCurrent()->Global();
+    v8::Handle<v8::Function> g =
+        v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
+    g->Call(global, 0, NULL);
+  }
+
+  HEAP->incremental_marking()->set_should_hurry(true);
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
+}

--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to