Re: [Qemu-devel] [RFC v7 16/16] target-arm: aarch64: add atomic instructions

2016-02-19 Thread Alex Bennée

Alvise Rigo  writes:

> Use the new LL/SC runtime helpers to handle the aarch64 atomic instructions
> in softmmu_llsc_template.h.
>
> The STXP emulation required a dedicated helper to handle the paired
> doubleword case.
>
> Suggested-by: Jani Kokkonen 
> Suggested-by: Claudio Fontana 
> Signed-off-by: Alvise Rigo 
> ---
>  configure  |   6 +-
>  target-arm/helper-a64.c|  55 +++
>  target-arm/helper-a64.h|   4 ++
>  target-arm/op_helper.c |   8 +++
>  target-arm/translate-a64.c | 134 
> -
>  5 files changed, 204 insertions(+), 3 deletions(-)
>
> diff --git a/configure b/configure
> index 915efcc..38121ff 100755
> --- a/configure
> +++ b/configure
> @@ -5873,9 +5873,11 @@ echo "QEMU_CFLAGS+=$cflags" >> $config_target_mak
>  # Use tcg LL/SC tcg backend for exclusive instruction is arm/aarch64
>  # softmmus targets
>  if test "$arm_tcg_use_llsc" = "yes" ; then
> -  if test "$target" = "arm-softmmu" ; then
> +  case "$target" in
> +arm-softmmu | aarch64-softmmu)
>  echo "CONFIG_ARM_USE_LDST_EXCL=y" >> $config_target_mak
> -  fi
> +;;
> +  esac
>  fi

See previous comments about configure code.

>  done # for target in $targets
>
> diff --git a/target-arm/helper-a64.c b/target-arm/helper-a64.c
> index c7bfb4d..dcee66f 100644
> --- a/target-arm/helper-a64.c
> +++ b/target-arm/helper-a64.c
> @@ -26,6 +26,7 @@
>  #include "qemu/bitops.h"
>  #include "internals.h"
>  #include "qemu/crc32c.h"
> +#include "tcg/tcg.h"
>  #include  /* For crc32 */
>
>  /* C2.4.7 Multiply and divide */
> @@ -443,3 +444,57 @@ uint64_t HELPER(crc32c_64)(uint64_t acc, uint64_t val, 
> uint32_t bytes)
>  /* Linux crc32c converts the output to one's complement.  */
>  return crc32c(acc, buf, bytes) ^ 0x;
>  }
> +
> +#ifdef CONFIG_ARM_USE_LDST_EXCL
> +/* STXP emulation for two 64 bit doublewords. We can't use directly two
> + * stcond_i64 accesses, otherwise the first will conclude the LL/SC pair.
> + * Instead, two normal 64-bit accesses are used and the CPUState is
> + * updated accordingly. */
> +target_ulong HELPER(stxp_i128)(CPUArchState *env, target_ulong addr,
> +   uint64_t vall, uint64_t valh,
> +   uint32_t mmu_idx)
> +{
> +CPUState *cpu = ENV_GET_CPU(env);
> +TCGMemOpIdx op;
> +target_ulong ret = 0;
> +
> +if (!cpu->ll_sc_context) {
> +cpu->excl_succeeded = false;
> +ret = 1;
> +goto out;
> +}
> +
> +op = make_memop_idx(MO_BEQ, mmu_idx);
> +
> +/* According to section C6.6.191 of ARM ARM DDI 0487A.h, the access has 
> to
> + * be quadword aligned.  For the time being, we do not support paired 
> STXPs
> + * to MMIO memory, this will become trivial when the softmmu will support
> + * 128bit memory accesses. */
> +if (addr & 0xf) {
> +/* TODO: Do unaligned access */

This should probably log UNIMP if you don't implement it now.

> +}
> +
> +/* Setting excl_succeeded to true will make the store exclusive. */
> +cpu->excl_succeeded = true;
> +helper_ret_stq_mmu(env, addr, vall, op, GETRA());
> +
> +if (!cpu->excl_succeeded) {
> +ret = 1;
> +goto out;
> +}
> +
> +helper_ret_stq_mmu(env, addr + 8, valh, op, GETRA());
> +if (!cpu->excl_succeeded) {
> +ret = 1;
> +} else {
> +cpu->excl_succeeded = false;
> +}
> +
> +out:
> +/* Unset LL/SC context */
> +cpu->ll_sc_context = false;
> +cpu->excl_protected_range.begin = EXCLUSIVE_RESET_ADDR;
> +
> +return ret;
> +}
> +#endif
> diff --git a/target-arm/helper-a64.h b/target-arm/helper-a64.h
> index 1d3d10f..c416a83 100644
> --- a/target-arm/helper-a64.h
> +++ b/target-arm/helper-a64.h
> @@ -46,3 +46,7 @@ DEF_HELPER_FLAGS_2(frecpx_f32, TCG_CALL_NO_RWG, f32, f32, 
> ptr)
>  DEF_HELPER_FLAGS_2(fcvtx_f64_to_f32, TCG_CALL_NO_RWG, f32, f64, env)
>  DEF_HELPER_FLAGS_3(crc32_64, TCG_CALL_NO_RWG_SE, i64, i64, i64, i32)
>  DEF_HELPER_FLAGS_3(crc32c_64, TCG_CALL_NO_RWG_SE, i64, i64, i64, i32)
> +#ifdef CONFIG_ARM_USE_LDST_EXCL
> +/* STXP helper */
> +DEF_HELPER_5(stxp_i128, i64, env, i64, i64, i64, i32)
> +#endif
> diff --git a/target-arm/op_helper.c b/target-arm/op_helper.c
> index 404c13b..146fc9a 100644
> --- a/target-arm/op_helper.c
> +++ b/target-arm/op_helper.c
> @@ -34,6 +34,14 @@ static void raise_exception(CPUARMState *env, uint32_t 
> excp,
>  cs->exception_index = excp;
>  env->exception.syndrome = syndrome;
>  env->exception.target_el = target_el;
> +#ifdef CONFIG_ARM_USE_LDST_EXCL
> +HELPER(atomic_clear)(env);
> +/* If the exception happens in the middle of a LL/SC, we need to clear
> + * excl_succeeded to avoid that the normal store following the exception 
> is
> + * wrongly interpreted as exclusive.
> + * */
> +   

[Qemu-devel] [RFC v7 16/16] target-arm: aarch64: add atomic instructions

2016-01-29 Thread Alvise Rigo
Use the new LL/SC runtime helpers to handle the aarch64 atomic instructions
in softmmu_llsc_template.h.

The STXP emulation required a dedicated helper to handle the paired
doubleword case.

Suggested-by: Jani Kokkonen 
Suggested-by: Claudio Fontana 
Signed-off-by: Alvise Rigo 
---
 configure  |   6 +-
 target-arm/helper-a64.c|  55 +++
 target-arm/helper-a64.h|   4 ++
 target-arm/op_helper.c |   8 +++
 target-arm/translate-a64.c | 134 -
 5 files changed, 204 insertions(+), 3 deletions(-)

diff --git a/configure b/configure
index 915efcc..38121ff 100755
--- a/configure
+++ b/configure
@@ -5873,9 +5873,11 @@ echo "QEMU_CFLAGS+=$cflags" >> $config_target_mak
 # Use tcg LL/SC tcg backend for exclusive instruction is arm/aarch64
 # softmmus targets
 if test "$arm_tcg_use_llsc" = "yes" ; then
-  if test "$target" = "arm-softmmu" ; then
+  case "$target" in
+arm-softmmu | aarch64-softmmu)
 echo "CONFIG_ARM_USE_LDST_EXCL=y" >> $config_target_mak
-  fi
+;;
+  esac
 fi
 done # for target in $targets
 
diff --git a/target-arm/helper-a64.c b/target-arm/helper-a64.c
index c7bfb4d..dcee66f 100644
--- a/target-arm/helper-a64.c
+++ b/target-arm/helper-a64.c
@@ -26,6 +26,7 @@
 #include "qemu/bitops.h"
 #include "internals.h"
 #include "qemu/crc32c.h"
+#include "tcg/tcg.h"
 #include  /* For crc32 */
 
 /* C2.4.7 Multiply and divide */
@@ -443,3 +444,57 @@ uint64_t HELPER(crc32c_64)(uint64_t acc, uint64_t val, 
uint32_t bytes)
 /* Linux crc32c converts the output to one's complement.  */
 return crc32c(acc, buf, bytes) ^ 0x;
 }
+
+#ifdef CONFIG_ARM_USE_LDST_EXCL
+/* STXP emulation for two 64 bit doublewords. We can't use directly two
+ * stcond_i64 accesses, otherwise the first will conclude the LL/SC pair.
+ * Instead, two normal 64-bit accesses are used and the CPUState is
+ * updated accordingly. */
+target_ulong HELPER(stxp_i128)(CPUArchState *env, target_ulong addr,
+   uint64_t vall, uint64_t valh,
+   uint32_t mmu_idx)
+{
+CPUState *cpu = ENV_GET_CPU(env);
+TCGMemOpIdx op;
+target_ulong ret = 0;
+
+if (!cpu->ll_sc_context) {
+cpu->excl_succeeded = false;
+ret = 1;
+goto out;
+}
+
+op = make_memop_idx(MO_BEQ, mmu_idx);
+
+/* According to section C6.6.191 of ARM ARM DDI 0487A.h, the access has to
+ * be quadword aligned.  For the time being, we do not support paired STXPs
+ * to MMIO memory, this will become trivial when the softmmu will support
+ * 128bit memory accesses. */
+if (addr & 0xf) {
+/* TODO: Do unaligned access */
+}
+
+/* Setting excl_succeeded to true will make the store exclusive. */
+cpu->excl_succeeded = true;
+helper_ret_stq_mmu(env, addr, vall, op, GETRA());
+
+if (!cpu->excl_succeeded) {
+ret = 1;
+goto out;
+}
+
+helper_ret_stq_mmu(env, addr + 8, valh, op, GETRA());
+if (!cpu->excl_succeeded) {
+ret = 1;
+} else {
+cpu->excl_succeeded = false;
+}
+
+out:
+/* Unset LL/SC context */
+cpu->ll_sc_context = false;
+cpu->excl_protected_range.begin = EXCLUSIVE_RESET_ADDR;
+
+return ret;
+}
+#endif
diff --git a/target-arm/helper-a64.h b/target-arm/helper-a64.h
index 1d3d10f..c416a83 100644
--- a/target-arm/helper-a64.h
+++ b/target-arm/helper-a64.h
@@ -46,3 +46,7 @@ DEF_HELPER_FLAGS_2(frecpx_f32, TCG_CALL_NO_RWG, f32, f32, ptr)
 DEF_HELPER_FLAGS_2(fcvtx_f64_to_f32, TCG_CALL_NO_RWG, f32, f64, env)
 DEF_HELPER_FLAGS_3(crc32_64, TCG_CALL_NO_RWG_SE, i64, i64, i64, i32)
 DEF_HELPER_FLAGS_3(crc32c_64, TCG_CALL_NO_RWG_SE, i64, i64, i64, i32)
+#ifdef CONFIG_ARM_USE_LDST_EXCL
+/* STXP helper */
+DEF_HELPER_5(stxp_i128, i64, env, i64, i64, i64, i32)
+#endif
diff --git a/target-arm/op_helper.c b/target-arm/op_helper.c
index 404c13b..146fc9a 100644
--- a/target-arm/op_helper.c
+++ b/target-arm/op_helper.c
@@ -34,6 +34,14 @@ static void raise_exception(CPUARMState *env, uint32_t excp,
 cs->exception_index = excp;
 env->exception.syndrome = syndrome;
 env->exception.target_el = target_el;
+#ifdef CONFIG_ARM_USE_LDST_EXCL
+HELPER(atomic_clear)(env);
+/* If the exception happens in the middle of a LL/SC, we need to clear
+ * excl_succeeded to avoid that the normal store following the exception is
+ * wrongly interpreted as exclusive.
+ * */
+cs->excl_succeeded = 0;
+#endif
 cpu_loop_exit(cs);
 }
 
diff --git a/target-arm/translate-a64.c b/target-arm/translate-a64.c
index 80f6c20..f34e957 100644
--- a/target-arm/translate-a64.c
+++ b/target-arm/translate-a64.c
@@ -37,8 +37,10 @@
 static TCGv_i64 cpu_X[32];
 static TCGv_i64 cpu_pc;
 
+#if !defined(CONFIG_ARM_USE_LDST_EXCL)
 /* Load/store exclusive handling */
 static TCGv_i64 cpu_exclusive_high;
+#endif
 
 static const