From: Jan Kiszka <[email protected]>

No caller of these functions exploit their atomicity, and this should
not change. So, simplify the code by using generic non-atomic versions.

On arm and arm64, some macros are folded in the only remaining user,
test_and_set_bit.

Signed-off-by: Jan Kiszka <[email protected]>
---
 hypervisor/arch/arm/include/asm/bitops.h   | 65 +++++-------------------------
 hypervisor/arch/arm64/include/asm/bitops.h | 54 ++-----------------------
 hypervisor/arch/x86/include/asm/bitops.h   | 31 --------------
 hypervisor/include/jailhouse/bitops.h      | 12 ++++++
 4 files changed, 25 insertions(+), 137 deletions(-)

diff --git a/hypervisor/arch/arm/include/asm/bitops.h 
b/hypervisor/arch/arm/include/asm/bitops.h
index fbdcc817..8b9c9d1f 100644
--- a/hypervisor/arch/arm/include/asm/bitops.h
+++ b/hypervisor/arch/arm/include/asm/bitops.h
@@ -1,7 +1,7 @@
 /*
  * Jailhouse, a Linux-based partitioning hypervisor
  *
- * Copyright (c) Siemens AG, 2013
+ * Copyright (c) Siemens AG, 2013-2020
  *
  * Authors:
  *  Jan Kiszka <[email protected]>
@@ -10,58 +10,6 @@
  * the COPYING file in the top-level directory.
  */
 
-#define BITOPT_ALIGN(bits, addr)                               \
-       do {                                                    \
-               (addr) = (unsigned long *)((u32)(addr) & ~0x3)  \
-                       + (bits) / BITS_PER_LONG;               \
-               (bits) %= BITS_PER_LONG;                        \
-       } while (0)
-
-/* Load the cacheline in exclusive state */
-#define PRELOAD(addr)                                          \
-       asm volatile (".arch_extension mp\n\t"                  \
-                     "pldw %0\n\t"                             \
-                     : "+Qo" (*(volatile unsigned long *)addr));
-
-static inline __attribute__((always_inline)) void
-clear_bit(unsigned int nr, volatile unsigned long *addr)
-{
-       unsigned long ret, val;
-
-       BITOPT_ALIGN(nr, addr);
-
-       PRELOAD(addr);
-       do {
-               asm volatile (
-                       "ldrex  %1, %2\n\t"
-                       "bic    %1, %3\n\t"
-                       "strex  %0, %1, %2\n\t"
-                       : "=r" (ret), "=r" (val),
-                       /* declare clobbering of this address to the compiler */
-                         "+Qo" (*(volatile unsigned long *)addr)
-                       : "r" (1 << nr));
-       } while (ret);
-}
-
-static inline __attribute__((always_inline)) void
-set_bit(unsigned int nr, volatile unsigned long *addr)
-{
-       unsigned long ret, val;
-
-       BITOPT_ALIGN(nr, addr);
-
-       PRELOAD(addr);
-       do {
-               asm volatile (
-                       "ldrex  %1, %2\n\t"
-                       "orr    %1, %3\n\t"
-                       "strex  %0, %1, %2\n\t"
-                       : "=r" (ret), "=r" (val),
-                         "+Qo" (*(volatile unsigned long *)addr)
-                       : "r" (1 << nr));
-       } while (ret);
-}
-
 static inline __attribute__((always_inline)) int
 test_bit(unsigned int nr, const volatile unsigned long *addr)
 {
@@ -73,9 +21,15 @@ static inline int test_and_set_bit(int nr, volatile unsigned 
long *addr)
 {
        unsigned long ret, val, test;
 
-       BITOPT_ALIGN(nr, addr);
+       /* word-align */
+       addr = (unsigned long *)((u32)addr & ~0x3) + nr / BITS_PER_LONG;
+       nr %= BITS_PER_LONG;
 
-       PRELOAD(addr);
+       /* Load the cacheline in exclusive state */
+       asm volatile (
+               ".arch_extension mp\n\t"
+               "pldw %0\n\t"
+               : "+Qo" (*(volatile unsigned long *)addr));
        do {
                asm volatile (
                        "ldrex  %1, %3\n\t"
@@ -91,7 +45,6 @@ static inline int test_and_set_bit(int nr, volatile unsigned 
long *addr)
        return !!(test);
 }
 
-
 /* Count leading zeroes */
 static inline unsigned long clz(unsigned long word)
 {
diff --git a/hypervisor/arch/arm64/include/asm/bitops.h 
b/hypervisor/arch/arm64/include/asm/bitops.h
index df99402d..004c3b44 100644
--- a/hypervisor/arch/arm64/include/asm/bitops.h
+++ b/hypervisor/arch/arm64/include/asm/bitops.h
@@ -11,55 +11,6 @@
  * the COPYING file in the top-level directory.
  */
 
-#define BITOPT_ALIGN(bits, addr)                               \
-       do {                                                    \
-               (addr) = (unsigned long *)((u64)(addr) & ~0x7)  \
-                       + (bits) / BITS_PER_LONG;               \
-               (bits) %= BITS_PER_LONG;                        \
-       } while (0)
-
-static inline __attribute__((always_inline)) void
-clear_bit(unsigned int nr, volatile unsigned long *addr)
-{
-       u32 ret;
-       u64 tmp;
-
-       BITOPT_ALIGN(nr, addr);
-
-       /* AARCH64_TODO: do we need to preload? */
-       do {
-               asm volatile (
-                       "ldxr   %2, %1\n\t"
-                       "bic    %2, %2, %3\n\t"
-                       "stxr   %w0, %2, %1\n\t"
-                       : "=r" (ret),
-                         "+Q" (*(volatile unsigned long *)addr),
-                         "=r" (tmp)
-                       : "r" (1ul << nr));
-       } while (ret);
-}
-
-static inline __attribute__((always_inline)) void
-set_bit(unsigned int nr, volatile unsigned long *addr)
-{
-       u32 ret;
-       u64 tmp;
-
-       BITOPT_ALIGN(nr, addr);
-
-       /* AARCH64_TODO: do we need to preload? */
-       do {
-               asm volatile (
-                       "ldxr   %2, %1\n\t"
-                       "orr    %2, %2, %3\n\t"
-                       "stxr   %w0, %2, %1\n\t"
-                       : "=r" (ret),
-                         "+Q" (*(volatile unsigned long *)addr),
-                         "=r" (tmp)
-                       : "r" (1ul << nr));
-       } while (ret);
-}
-
 static inline __attribute__((always_inline)) int
 test_bit(unsigned int nr, const volatile unsigned long *addr)
 {
@@ -72,7 +23,10 @@ static inline int test_and_set_bit(int nr, volatile unsigned 
long *addr)
        u32 ret;
        u64 test, tmp;
 
-       BITOPT_ALIGN(nr, addr);
+       /* word-align */
+       addr = (unsigned long *)((u64)addr & ~0x7) + nr / BITS_PER_LONG;
+       nr %= BITS_PER_LONG;
+
 
        /* AARCH64_TODO: using Inner Shareable DMB at the moment,
         * revisit when we will deal with shareability domains */
diff --git a/hypervisor/arch/x86/include/asm/bitops.h 
b/hypervisor/arch/x86/include/asm/bitops.h
index 66fc91ab..12d85fdb 100644
--- a/hypervisor/arch/x86/include/asm/bitops.h
+++ b/hypervisor/arch/x86/include/asm/bitops.h
@@ -23,37 +23,6 @@
 #define BITOP_ADDR(x) "+m" (*(volatile long *) (x))
 #endif
 
-#define CONST_MASK_ADDR(nr, addr)      BITOP_ADDR((void *)(addr) + ((nr)>>3))
-#define CONST_MASK(nr)                 (1 << ((nr) & 7))
-
-static inline __attribute__((always_inline)) void
-clear_bit(unsigned int nr, volatile unsigned long *addr)
-{
-       if (__builtin_constant_p(nr)) {
-               asm volatile("lock andb %1,%0"
-                       : CONST_MASK_ADDR(nr, addr)
-                       : "iq" ((u8)~CONST_MASK(nr)));
-       } else {
-               asm volatile("lock btr %1,%0"
-                       : BITOP_ADDR(addr)
-                       : "Ir" (nr));
-       }
-}
-
-static inline __attribute__((always_inline)) void
-set_bit(unsigned int nr, volatile unsigned long *addr)
-{
-       if (__builtin_constant_p(nr)) {
-               asm volatile("lock orb %1,%0"
-                       : CONST_MASK_ADDR(nr, addr)
-                       : "iq" ((u8)CONST_MASK(nr))
-                       : "memory");
-       } else {
-               asm volatile("lock bts %1,%0"
-                       : BITOP_ADDR(addr) : "Ir" (nr) : "memory");
-       }
-}
-
 static inline __attribute__((always_inline)) int
 constant_test_bit(unsigned int nr, const volatile unsigned long *addr)
 {
diff --git a/hypervisor/include/jailhouse/bitops.h 
b/hypervisor/include/jailhouse/bitops.h
index 426e51f1..e98381d0 100644
--- a/hypervisor/include/jailhouse/bitops.h
+++ b/hypervisor/include/jailhouse/bitops.h
@@ -16,4 +16,16 @@
 #include <jailhouse/types.h>
 #include <asm/bitops.h>
 
+static inline __attribute__((always_inline)) void
+clear_bit(unsigned int nr, volatile unsigned long *addr)
+{
+       addr[nr / BITS_PER_LONG] &= ~(1UL << (nr % BITS_PER_LONG));
+}
+
+static inline __attribute__((always_inline)) void
+set_bit(unsigned int nr, volatile unsigned long *addr)
+{
+       addr[nr / BITS_PER_LONG] |= 1UL << (nr % BITS_PER_LONG);
+}
+
 #endif /* !_JAILHOUSE_BITOPS_H */
-- 
2.16.4

-- 
You received this message because you are subscribed to the Google Groups 
"Jailhouse" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
To view this discussion on the web visit 
https://groups.google.com/d/msgid/jailhouse-dev/e2562f696de236219f5fba88f64bbe6fb08c4829.1581930651.git.jan.kiszka%40siemens.com.

Reply via email to