Revision: 10387
Author:   [email protected]
Date:     Wed Jan 11 05:18:27 2012
Log:      Merge r10380, r10381 from the bleeding_edge to the 3.7 branch.

Change inlined cache of intanceof stub to use indirection through cell.

The stub was directly patching caller's code without issuing write barrier which violated incremental marking invariants.

[email protected]
BUG=http://crbug.com/109448
TEST=cctest/test-heap/InstanceOfStubWriteBarrier

Review URL: http://codereview.chromium.org/9158015
------------------------------------------------------------------------


Fix build failure introduced by r10380

[email protected]

Review URL: http://codereview.chromium.org/9190002
------------------------------------------------------------------------
Review URL: http://codereview.chromium.org/9187003
http://code.google.com/p/v8/source/detail?r=10387

Modified:
 /branches/3.7/src/arm/code-stubs-arm.cc
 /branches/3.7/src/arm/lithium-codegen-arm.cc
 /branches/3.7/src/ia32/code-stubs-ia32.cc
 /branches/3.7/src/ia32/lithium-codegen-ia32.cc
 /branches/3.7/src/incremental-marking.h
 /branches/3.7/src/version.cc
 /branches/3.7/src/x64/code-stubs-x64.cc
 /branches/3.7/src/x64/lithium-codegen-x64.cc
 /branches/3.7/test/cctest/test-heap.cc

=======================================
--- /branches/3.7/src/arm/code-stubs-arm.cc     Tue Nov 29 06:28:56 2011
+++ /branches/3.7/src/arm/code-stubs-arm.cc     Wed Jan 11 05:18:27 2012
@@ -3989,7 +3989,7 @@
   const Register inline_site = r9;
   const Register scratch = r2;

-  const int32_t kDeltaToLoadBoolResult = 3 * kPointerSize;
+  const int32_t kDeltaToLoadBoolResult = 4 * kPointerSize;

   Label slow, loop, is_instance, is_not_instance, not_js_object;

@@ -4040,7 +4040,8 @@
     __ sub(inline_site, lr, scratch);
     // Get the map location in scratch and patch it.
     __ GetRelocatedValueLocation(inline_site, scratch);
-    __ str(map, MemOperand(scratch));
+    __ ldr(scratch, MemOperand(scratch));
+ __ str(map, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
   }

   // Register mapping: r3 is object map and r4 is function prototype.
=======================================
--- /branches/3.7/src/arm/lithium-codegen-arm.cc        Wed Dec  7 05:07:25 2011
+++ /branches/3.7/src/arm/lithium-codegen-arm.cc        Wed Jan 11 05:18:27 2012
@@ -2109,7 +2109,10 @@
// We use Factory::the_hole_value() on purpose instead of loading from the
   // root array to force relocation to be able to later patch with
   // the cached map.
-  __ mov(ip, Operand(factory()->the_hole_value()));
+  Handle<JSGlobalPropertyCell> cell =
+      factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+  __ mov(ip, Operand(Handle<Object>(cell)));
+  __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
   __ cmp(map, Operand(ip));
   __ b(ne, &cache_miss);
// We use Factory::the_hole_value() on purpose instead of loading from the
=======================================
--- /branches/3.7/src/ia32/code-stubs-ia32.cc   Tue Nov 29 06:28:56 2011
+++ /branches/3.7/src/ia32/code-stubs-ia32.cc   Wed Jan 11 05:18:27 2012
@@ -4979,8 +4979,8 @@
   static const int kDeltaToCmpImmediate = 2;
   static const int kDeltaToMov = 8;
   static const int kDeltaToMovImmediate = 9;
- static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81); - static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
+  static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b);
+  static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d);
static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);

   ExternalReference roots_array_start =
@@ -5045,12 +5045,13 @@
     __ mov(scratch, Operand(esp, 0 * kPointerSize));
     __ sub(scratch, Operand(esp, 1 * kPointerSize));
     if (FLAG_debug_code) {
-      __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
+      __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
__ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
-      __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2);
+      __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
__ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
     }
-    __ mov(Operand(scratch, kDeltaToCmpImmediate), map);
+    __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
+    __ mov(Operand(scratch, 0), map);
   }

// Loop through the prototype chain of the object looking for the function
=======================================
--- /branches/3.7/src/ia32/lithium-codegen-ia32.cc      Wed Dec  7 05:07:25 2011
+++ /branches/3.7/src/ia32/lithium-codegen-ia32.cc      Wed Jan 11 05:18:27 2012
@@ -1982,7 +1982,9 @@
   Register map = ToRegister(instr->TempAt(0));
   __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
   __ bind(deferred->map_check());  // Label for calculating code patching.
-  __ cmp(map, factory()->the_hole_value());  // Patched to cached map.
+  Handle<JSGlobalPropertyCell> cache_cell =
+      factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+  __ cmp(map, Operand::Cell(cache_cell));  // Patched to cached map.
   __ j(not_equal, &cache_miss, Label::kNear);
__ mov(eax, factory()->the_hole_value()); // Patched to either true or false.
   __ jmp(&done);
=======================================
--- /branches/3.7/src/incremental-marking.h     Mon Dec  5 09:22:52 2011
+++ /branches/3.7/src/incremental-marking.h     Wed Jan 11 05:18:27 2012
@@ -56,6 +56,7 @@
   }

   bool should_hurry() { return should_hurry_; }
+  void set_should_hurry(bool val) { should_hurry_ = val; }

   inline bool IsStopped() { return state() == STOPPED; }

@@ -216,10 +217,6 @@
   void UncommitMarkingDeque();

  private:
-  void set_should_hurry(bool val) {
-    should_hurry_ = val;
-  }
-
   int64_t SpaceLeftInOldSpace();

   void ResetStepCounters();
=======================================
--- /branches/3.7/src/version.cc        Tue Jan 10 09:53:00 2012
+++ /branches/3.7/src/version.cc        Wed Jan 11 05:18:27 2012
@@ -35,7 +35,7 @@
 #define MAJOR_VERSION     3
 #define MINOR_VERSION     7
 #define BUILD_NUMBER      12
-#define PATCH_LEVEL       15
+#define PATCH_LEVEL       16
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
 #define IS_CANDIDATE_VERSION 0
=======================================
--- /branches/3.7/src/x64/code-stubs-x64.cc     Tue Nov 29 06:28:56 2011
+++ /branches/3.7/src/x64/code-stubs-x64.cc     Wed Jan 11 05:18:27 2012
@@ -4049,12 +4049,14 @@
     // Get return address and delta to inlined map check.
     __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
     __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
-    __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax);
     if (FLAG_debug_code) {
       __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
       __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
__ Assert(equal, "InstanceofStub unexpected call site cache (check).");
     }
+    __ movq(kScratchRegister,
+            Operand(kScratchRegister, kOffsetToMapCheckValue));
+    __ movq(Operand(kScratchRegister, 0), rax);
   }

   __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
=======================================
--- /branches/3.7/src/x64/lithium-codegen-x64.cc        Wed Dec  7 05:07:25 2011
+++ /branches/3.7/src/x64/lithium-codegen-x64.cc        Wed Jan 11 05:18:27 2012
@@ -1892,9 +1892,10 @@
   Register map = ToRegister(instr->TempAt(0));
   __ movq(map, FieldOperand(object, HeapObject::kMapOffset));
   __ bind(deferred->map_check());  // Label for calculating code patching.
-  __ movq(kScratchRegister, factory()->the_hole_value(),
-          RelocInfo::EMBEDDED_OBJECT);
-  __ cmpq(map, kScratchRegister);  // Patched to cached map.
+  Handle<JSGlobalPropertyCell> cache_cell =
+      factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+  __ movq(kScratchRegister, cache_cell, RelocInfo::GLOBAL_PROPERTY_CELL);
+  __ cmpq(map, Operand(kScratchRegister, 0));
   __ j(not_equal, &cache_miss, Label::kNear);
   // Patched to load either true or false.
   __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
=======================================
--- /branches/3.7/test/cctest/test-heap.cc      Mon Jan  2 01:46:56 2012
+++ /branches/3.7/test/cctest/test-heap.cc      Wed Jan 11 05:18:27 2012
@@ -1447,3 +1447,61 @@
   HEAP->CollectAllAvailableGarbage();
   CHECK_EQ(0, NumberOfGlobalObjects());
 }
+
+
+TEST(InstanceOfStubWriteBarrier) {
+  if (!i::FLAG_crankshaft) return;
+  i::FLAG_allow_natives_syntax = true;
+#ifdef DEBUG
+  i::FLAG_verify_heap = true;
+#endif
+  InitializeVM();
+  v8::HandleScope outer_scope;
+
+  {
+    v8::HandleScope scope;
+    CompileRun(
+        "function foo () { }"
+        "function mkbar () { return new (new Function(\"\")) (); }"
+        "function f (x) { return (x instanceof foo); }"
+        "function g () { f(mkbar()); }"
+        "f(new foo()); f(new foo());"
+        "%OptimizeFunctionOnNextCall(f);"
+        "f(new foo()); g();");
+  }
+
+  IncrementalMarking* marking = HEAP->incremental_marking();
+  marking->Abort();
+  marking->Start();
+
+  Handle<JSFunction> f =
+      v8::Utils::OpenHandle(
+          *v8::Handle<v8::Function>::Cast(
+              v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
+
+  CHECK(f->IsOptimized());
+
+  while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
+         !marking->IsStopped()) {
+    marking->Step(MB);
+  }
+
+  CHECK(marking->IsMarking());
+
+  // Discard any pending GC requests otherwise we will get GC when we enter
+  // code below.
+  if (ISOLATE->stack_guard()->IsGCRequest()) {
+    ISOLATE->stack_guard()->Continue(GC_REQUEST);
+  }
+
+  {
+    v8::HandleScope scope;
+    v8::Handle<v8::Object> global = v8::Context::GetCurrent()->Global();
+    v8::Handle<v8::Function> g =
+        v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
+    g->Call(global, 0, NULL);
+  }
+
+  HEAP->incremental_marking()->set_should_hurry(true);
+  HEAP->CollectGarbage(OLD_POINTER_SPACE);
+}

--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to