Revision: 19239
Author:   [email protected]
Date:     Mon Feb 10 19:23:47 2014 UTC
Log:      A64: Make the simulator sampling friendly.

This commit ensure that if the simulator is interrupted while doing a load or a store, the stack will look consistent i.e. there is no data below the stack
pointer. Also added stack-checks to ldp/stp.

BUG=
[email protected]

Review URL: https://codereview.chromium.org/138913010
http://code.google.com/p/v8/source/detail?r=19239

Modified:
 /branches/experimental/a64/src/a64/constants-a64.h
 /branches/experimental/a64/src/a64/instructions-a64.cc
 /branches/experimental/a64/src/a64/instructions-a64.h
 /branches/experimental/a64/src/a64/simulator-a64.cc
 /branches/experimental/a64/src/a64/simulator-a64.h

=======================================
--- /branches/experimental/a64/src/a64/constants-a64.h Thu Feb 6 13:19:27 2014 UTC +++ /branches/experimental/a64/src/a64/constants-a64.h Mon Feb 10 19:23:47 2014 UTC
@@ -707,12 +707,18 @@
   ISB             = MemBarrierFixed | 0x00000040
 };

-// Any load or store.
+// Any load or store (including pair).
 enum LoadStoreAnyOp {
   LoadStoreAnyFMask = 0x0a000000,
   LoadStoreAnyFixed = 0x08000000
 };

+// Any load pair or store pair.
+enum LoadStorePairAnyOp {
+  LoadStorePairAnyFMask = 0x3a000000,
+  LoadStorePairAnyFixed = 0x28000000
+};
+
 #define LOAD_STORE_PAIR_OP_LIST(V)  \
   V(STP, w,   0x00000000),          \
   V(LDP, w,   0x00400000),          \
=======================================
--- /branches/experimental/a64/src/a64/instructions-a64.cc Thu Feb 6 13:19:27 2014 UTC +++ /branches/experimental/a64/src/a64/instructions-a64.cc Mon Feb 10 19:23:47 2014 UTC
@@ -38,6 +38,55 @@
 namespace internal {


+bool Instruction::IsLoad() const {
+  if (Mask(LoadStoreAnyFMask) != LoadStoreAnyFixed) {
+    return false;
+  }
+
+  if (Mask(LoadStorePairAnyFMask) == LoadStorePairAnyFixed) {
+    return Mask(LoadStorePairLBit) != 0;
+  } else {
+    LoadStoreOp op = static_cast<LoadStoreOp>(Mask(LoadStoreOpMask));
+    switch (op) {
+      case LDRB_w:
+      case LDRH_w:
+      case LDR_w:
+      case LDR_x:
+      case LDRSB_w:
+      case LDRSB_x:
+      case LDRSH_w:
+      case LDRSH_x:
+      case LDRSW_x:
+      case LDR_s:
+      case LDR_d: return true;
+      default: return false;
+    }
+  }
+}
+
+
+bool Instruction::IsStore() const {
+  if (Mask(LoadStoreAnyFMask) != LoadStoreAnyFixed) {
+    return false;
+  }
+
+  if (Mask(LoadStorePairAnyFMask) == LoadStorePairAnyFixed) {
+    return Mask(LoadStorePairLBit) == 0;
+  } else {
+    LoadStoreOp op = static_cast<LoadStoreOp>(Mask(LoadStoreOpMask));
+    switch (op) {
+      case STRB_w:
+      case STRH_w:
+      case STR_w:
+      case STR_x:
+      case STR_s:
+      case STR_d: return true;
+      default: return false;
+    }
+  }
+}
+
+
 static uint64_t RotateRight(uint64_t value,
                             unsigned int rotate,
                             unsigned int width) {
=======================================
--- /branches/experimental/a64/src/a64/instructions-a64.h Thu Feb 6 13:19:27 2014 UTC +++ /branches/experimental/a64/src/a64/instructions-a64.h Mon Feb 10 19:23:47 2014 UTC
@@ -213,9 +213,15 @@
     return Mask(AddSubExtendedFMask) == AddSubExtendedFixed;
   }

+  // Match any loads or stores, including pairs.
   bool IsLoadOrStore() const {
     return Mask(LoadStoreAnyFMask) == LoadStoreAnyFixed;
   }
+
+  // Match any loads, including pairs.
+  bool IsLoad() const;
+  // Match any stores, including pairs.
+  bool IsStore() const;

// Indicate whether Rd can be the stack pointer or the zero register. This
   // does not check that the instruction actually has an Rd field.
=======================================
--- /branches/experimental/a64/src/a64/simulator-a64.cc Mon Feb 10 17:01:19 2014 UTC +++ /branches/experimental/a64/src/a64/simulator-a64.cc Mon Feb 10 19:23:47 2014 UTC
@@ -1235,23 +1235,24 @@
                                 int64_t offset,
                                 AddrMode addrmode) {
   unsigned srcdst = instr->Rt();
-  uint8_t* address = AddressModeHelper(instr->Rn(), offset, addrmode);
+  unsigned addr_reg = instr->Rn();
+  uint8_t* address = LoadStoreAddress(addr_reg, offset, addrmode);
   int num_bytes = 1 << instr->SizeLS();
+  uint8_t* stack = NULL;

- // Accesses below the stack pointer (but above the platform stack limit) are
-  // not allowed in the ABI.
-  uint64_t access_address = reinterpret_cast<uint64_t>(address);
-  uint64_t stack_limit = reinterpret_cast<uint64_t>(stack_limit_);
-  uint64_t stack_address = sp();
- if ((access_address >= stack_limit) && (access_address < stack_address)) {
-    fprintf(stream_, "ACCESS BELOW STACK POINTER:\n");
-    fprintf(stream_, "  sp is here:          0x%016" PRIx64 "\n",
-            stack_address);
-    fprintf(stream_, "  access was here:     0x%016" PRIx64 "\n",
-            access_address);
- fprintf(stream_, " stack limit is here: 0x%016" PRIx64 "\n", stack_limit);
-    fprintf(stream_, "\n");
-    ABORT();
+ // Handle the writeback for stores before the store. On a CPU the writeback + // and the store are atomic, but when running on the simulator it is possible + // to be interrupted in between. The simulator is not thread safe and V8 does + // not require it to be to run JavaScript therefore the profiler may sample + // the "simulated" CPU in the middle of load/store with writeback. The code
+  // below ensures that push operations are safe even when interrupted: the
+ // stack pointer will be decremented before adding an element to the stack.
+  if (instr->IsStore()) {
+    LoadStoreWriteBack(addr_reg, offset, addrmode);
+
+ // For store the address post writeback is used to check access below the
+    // stack.
+    stack = reinterpret_cast<uint8_t*>(sp());
   }

   LoadStoreOp op = static_cast<LoadStoreOp>(instr->Mask(LoadStoreOpMask));
@@ -1290,6 +1291,22 @@
     case STR_d: MemoryWriteFP64(address, dreg(srcdst)); break;
     default: UNIMPLEMENTED();
   }
+
+  // Handle the writeback for loads after the load to ensure safe pop
+  // operation even when interrupted in the middle of it. The stack pointer
+ // is only updated after the load so pop(fp) will never break the invariant
+  // sp <= fp expected while walking the stack in the sampler.
+  if (instr->IsLoad()) {
+ // For loads the address pre writeback is used to check access below the
+    // stack.
+    stack = reinterpret_cast<uint8_t*>(sp());
+
+    LoadStoreWriteBack(addr_reg, offset, addrmode);
+  }
+
+ // Accesses below the stack pointer (but above the platform stack limit) are
+  // not allowed in the ABI.
+  CheckMemoryAccess(address, stack);
 }


@@ -1317,8 +1334,25 @@
                                     AddrMode addrmode) {
   unsigned rt = instr->Rt();
   unsigned rt2 = instr->Rt2();
+  unsigned addr_reg = instr->Rn();
   int offset = instr->ImmLSPair() << instr->SizeLSPair();
-  uint8_t* address = AddressModeHelper(instr->Rn(), offset, addrmode);
+  uint8_t* address = LoadStoreAddress(addr_reg, offset, addrmode);
+  uint8_t* stack = NULL;
+
+ // Handle the writeback for stores before the store. On a CPU the writeback + // and the store are atomic, but when running on the simulator it is possible + // to be interrupted in between. The simulator is not thread safe and V8 does + // not require it to be to run JavaScript therefore the profiler may sample + // the "simulated" CPU in the middle of load/store with writeback. The code
+  // below ensures that push operations are safe even when interrupted: the
+ // stack pointer will be decremented before adding an element to the stack.
+  if (instr->IsStore()) {
+    LoadStoreWriteBack(addr_reg, offset, addrmode);
+
+ // For store the address post writeback is used to check access below the
+    // stack.
+    stack = reinterpret_cast<uint8_t*>(sp());
+  }

   LoadStorePairOp op =
     static_cast<LoadStorePairOp>(instr->Mask(LoadStorePairMask));
@@ -1375,6 +1409,22 @@
     }
     default: UNREACHABLE();
   }
+
+  // Handle the writeback for loads after the load to ensure safe pop
+  // operation even when interrupted in the middle of it. The stack pointer
+ // is only updated after the load so pop(fp) will never break the invariant
+  // sp <= fp expected while walking the stack in the sampler.
+  if (instr->IsLoad()) {
+ // For loads the address pre writeback is used to check access below the
+    // stack.
+    stack = reinterpret_cast<uint8_t*>(sp());
+
+    LoadStoreWriteBack(addr_reg, offset, addrmode);
+  }
+
+ // Accesses below the stack pointer (but above the platform stack limit) are
+  // not allowed in the ABI.
+  CheckMemoryAccess(address, stack);
 }


@@ -1392,22 +1442,17 @@
 }


-uint8_t* Simulator::AddressModeHelper(unsigned addr_reg,
-                                      int64_t offset,
-                                      AddrMode addrmode) {
-  uint64_t address = xreg(addr_reg, Reg31IsStackPointer);
-  ASSERT((sizeof(uintptr_t) == kXRegSizeInBytes) ||
-         (address < 0x100000000UL));
-  if ((addr_reg == 31) && ((address % 16) != 0)) {
+uint8_t* Simulator::LoadStoreAddress(unsigned addr_reg,
+                                     int64_t offset,
+                                     AddrMode addrmode) {
+  const unsigned kSPRegCode = kSPRegInternalCode & kRegCodeMask;
+  int64_t address = xreg(addr_reg, Reg31IsStackPointer);
+  if ((addr_reg == kSPRegCode) && ((address % 16) != 0)) {
     // When the base register is SP the stack pointer is required to be
     // quadword aligned prior to the address calculation and write-backs.
     // Misalignment will cause a stack alignment fault.
     ALIGNMENT_EXCEPTION();
   }
-  if ((addrmode == PreIndex) || (addrmode == PostIndex)) {
-    ASSERT(offset != 0);
-    set_xreg(addr_reg, address + offset, Reg31IsStackPointer);
-  }

   if ((addrmode == Offset) || (addrmode == PreIndex)) {
     address += offset;
@@ -1415,6 +1460,29 @@

   return reinterpret_cast<uint8_t*>(address);
 }
+
+
+void Simulator::LoadStoreWriteBack(unsigned addr_reg,
+                                   int64_t offset,
+                                   AddrMode addrmode) {
+  if ((addrmode == PreIndex) || (addrmode == PostIndex)) {
+    ASSERT(offset != 0);
+    uint64_t address = xreg(addr_reg, Reg31IsStackPointer);
+    set_reg(addr_reg, address + offset, Reg31IsStackPointer);
+  }
+}
+
+
+void Simulator::CheckMemoryAccess(uint8_t* address, uint8_t* stack) {
+  if ((address >= stack_limit_) && (address < stack)) {
+    fprintf(stream_, "ACCESS BELOW STACK POINTER:\n");
+    fprintf(stream_, "  sp is here:          0x%16p\n", stack);
+    fprintf(stream_, "  access was here:     0x%16p\n", address);
+    fprintf(stream_, "  stack limit is here: 0x%16p\n", stack_limit_);
+    fprintf(stream_, "\n");
+    ABORT();
+  }
+}


 uint64_t Simulator::MemoryRead(uint8_t* address, unsigned num_bytes) {
=======================================
--- /branches/experimental/a64/src/a64/simulator-a64.h Fri Jan 31 17:26:22 2014 UTC +++ /branches/experimental/a64/src/a64/simulator-a64.h Mon Feb 10 19:23:47 2014 UTC
@@ -651,9 +651,13 @@
                        int64_t offset,
                        AddrMode addrmode);
   void LoadStorePairHelper(Instruction* instr, AddrMode addrmode);
-  uint8_t* AddressModeHelper(unsigned addr_reg,
-                             int64_t offset,
-                             AddrMode addrmode);
+  uint8_t* LoadStoreAddress(unsigned addr_reg,
+                            int64_t offset,
+                            AddrMode addrmode);
+  void LoadStoreWriteBack(unsigned addr_reg,
+                          int64_t offset,
+                          AddrMode addrmode);
+  void CheckMemoryAccess(uint8_t* address, uint8_t* stack);

   uint64_t MemoryRead(uint8_t* address, unsigned num_bytes);
   uint8_t MemoryRead8(uint8_t* address);

--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.

Reply via email to