Introduce __bit_op() & __test_bit_op() macros which abstract away the
implementation of LL/SC loops. This cuts down on a lot of duplicate
boilerplate code, and also allows R10000_LLSC_WAR to be handled outside
of the individual bitop functions.

Signed-off-by: Paul Burton <paul.bur...@mips.com>
---

Changes in v2: None

 arch/mips/include/asm/bitops.h | 267 ++++++++-------------------------
 1 file changed, 63 insertions(+), 204 deletions(-)

diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index 7671db2a7b73..fba0a842b98a 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -25,6 +25,41 @@
 #include <asm/sgidefs.h>
 #include <asm/war.h>
 
+#define __bit_op(mem, insn, inputs...) do {                    \
+       unsigned long temp;                                     \
+                                                               \
+       asm volatile(                                           \
+       "       .set            push                    \n"     \
+       "       .set            " MIPS_ISA_LEVEL "      \n"     \
+       "1:     " __LL          "%0, %1                 \n"     \
+       "       " insn          "                       \n"     \
+       "       " __SC          "%0, %1                 \n"     \
+       "       " __SC_BEQZ     "%0, 1b                 \n"     \
+       "       .set            pop                     \n"     \
+       : "=&r"(temp), "+" GCC_OFF_SMALL_ASM()(mem)             \
+       : inputs                                                \
+       : __LLSC_CLOBBER);                                      \
+} while (0)
+
+#define __test_bit_op(mem, ll_dst, insn, inputs...) ({         \
+       unsigned long orig, temp;                               \
+                                                               \
+       asm volatile(                                           \
+       "       .set            push                    \n"     \
+       "       .set            " MIPS_ISA_LEVEL "      \n"     \
+       "1:     " __LL          ll_dst ", %2            \n"     \
+       "       " insn          "                       \n"     \
+       "       " __SC          "%1, %2                 \n"     \
+       "       " __SC_BEQZ     "%1, 1b                 \n"     \
+       "       .set            pop                     \n"     \
+       : "=&r"(orig), "=&r"(temp),                             \
+         "+" GCC_OFF_SMALL_ASM()(mem)                          \
+       : inputs                                                \
+       : __LLSC_CLOBBER);                                      \
+                                                               \
+       orig;                                                   \
+})
+
 /*
  * These are the "slower" versions of the functions and are in bitops.c.
  * These functions call raw_local_irq_{save,restore}().
@@ -54,55 +89,20 @@ static inline void set_bit(unsigned long nr, volatile 
unsigned long *addr)
 {
        unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
        int bit = nr & SZLONG_MASK;
-       unsigned long temp;
 
        if (!kernel_uses_llsc) {
                __mips_set_bit(nr, addr);
                return;
        }
 
-       if (R10000_LLSC_WAR) {
-               __asm__ __volatile__(
-               "       .set    push                                    \n"
-               "       .set    arch=r4000                              \n"
-               "1:     " __LL "%0, %1                  # set_bit       \n"
-               "       or      %0, %2                                  \n"
-               "       " __SC  "%0, %1                                 \n"
-               "       beqzl   %0, 1b                                  \n"
-               "       .set    pop                                     \n"
-               : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
-               : "ir" (BIT(bit)), GCC_OFF_SMALL_ASM() (*m)
-               : __LLSC_CLOBBER);
-               return;
-       }
-
        if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) {
                loongson_llsc_mb();
-               do {
-                       __asm__ __volatile__(
-                       "       " __LL "%0, %1          # set_bit       \n"
-                       "       " __INS "%0, %3, %2, 1                  \n"
-                       "       " __SC "%0, %1                          \n"
-                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
-                       : "i" (bit), "r" (~0)
-                       : __LLSC_CLOBBER);
-               } while (unlikely(!temp));
+               __bit_op(*m, __INS "%0, %3, %2, 1", "i"(bit), "r"(~0));
                return;
        }
 
        loongson_llsc_mb();
-       do {
-               __asm__ __volatile__(
-               "       .set    push                            \n"
-               "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
-               "       " __LL "%0, %1          # set_bit       \n"
-               "       or      %0, %2                          \n"
-               "       " __SC  "%0, %1                         \n"
-               "       .set    pop                             \n"
-               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
-               : "ir" (BIT(bit))
-               : __LLSC_CLOBBER);
-       } while (unlikely(!temp));
+       __bit_op(*m, "or\t%0, %2", "ir"(BIT(bit)));
 }
 
 /*
@@ -119,55 +119,20 @@ static inline void clear_bit(unsigned long nr, volatile 
unsigned long *addr)
 {
        unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
        int bit = nr & SZLONG_MASK;
-       unsigned long temp;
 
        if (!kernel_uses_llsc) {
                __mips_clear_bit(nr, addr);
                return;
        }
 
-       if (R10000_LLSC_WAR) {
-               __asm__ __volatile__(
-               "       .set    push                                    \n"
-               "       .set    arch=r4000                              \n"
-               "1:     " __LL "%0, %1                  # clear_bit     \n"
-               "       and     %0, %2                                  \n"
-               "       " __SC "%0, %1                                  \n"
-               "       beqzl   %0, 1b                                  \n"
-               "       .set    pop                                     \n"
-               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
-               : "ir" (~(BIT(bit)))
-               : __LLSC_CLOBBER);
-               return;
-       }
-
        if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) {
                loongson_llsc_mb();
-               do {
-                       __asm__ __volatile__(
-                       "       " __LL "%0, %1          # clear_bit     \n"
-                       "       " __INS "%0, $0, %2, 1                  \n"
-                       "       " __SC "%0, %1                          \n"
-                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
-                       : "i" (bit)
-                       : __LLSC_CLOBBER);
-               } while (unlikely(!temp));
+               __bit_op(*m, __INS "%0, $0, %2, 1", "i"(bit));
                return;
        }
 
        loongson_llsc_mb();
-       do {
-               __asm__ __volatile__(
-               "       .set    push                            \n"
-               "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
-               "       " __LL "%0, %1          # clear_bit     \n"
-               "       and     %0, %2                          \n"
-               "       " __SC "%0, %1                          \n"
-               "       .set    pop                             \n"
-               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
-               : "ir" (~(BIT(bit)))
-               : __LLSC_CLOBBER);
-       } while (unlikely(!temp));
+       __bit_op(*m, "and\t%0, %2", "ir"(~BIT(bit)));
 }
 
 /*
@@ -197,41 +162,14 @@ static inline void change_bit(unsigned long nr, volatile 
unsigned long *addr)
 {
        unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
        int bit = nr & SZLONG_MASK;
-       unsigned long temp;
 
        if (!kernel_uses_llsc) {
                __mips_change_bit(nr, addr);
                return;
        }
 
-       if (R10000_LLSC_WAR) {
-               __asm__ __volatile__(
-               "       .set    push                            \n"
-               "       .set    arch=r4000                      \n"
-               "1:     " __LL "%0, %1          # change_bit    \n"
-               "       xor     %0, %2                          \n"
-               "       " __SC  "%0, %1                         \n"
-               "       beqzl   %0, 1b                          \n"
-               "       .set    pop                             \n"
-               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
-               : "ir" (BIT(bit))
-               : __LLSC_CLOBBER);
-               return;
-       }
-
        loongson_llsc_mb();
-       do {
-               __asm__ __volatile__(
-               "       .set    push                            \n"
-               "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
-               "       " __LL "%0, %1          # change_bit    \n"
-               "       xor     %0, %2                          \n"
-               "       " __SC  "%0, %1                         \n"
-               "       .set    pop                             \n"
-               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
-               : "ir" (BIT(bit))
-               : __LLSC_CLOBBER);
-       } while (unlikely(!temp));
+       __bit_op(*m, "xor\t%0, %2", "ir"(BIT(bit)));
 }
 
 /*
@@ -247,41 +185,16 @@ static inline int test_and_set_bit_lock(unsigned long nr,
 {
        unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
        int bit = nr & SZLONG_MASK;
-       unsigned long res, temp;
+       unsigned long res, orig;
 
        if (!kernel_uses_llsc) {
                res = __mips_test_and_set_bit_lock(nr, addr);
-       } else if (R10000_LLSC_WAR) {
-               __asm__ __volatile__(
-               "       .set    push                                    \n"
-               "       .set    arch=r4000                              \n"
-               "1:     " __LL "%0, %1          # test_and_set_bit      \n"
-               "       or      %2, %0, %3                              \n"
-               "       " __SC  "%2, %1                                 \n"
-               "       beqzl   %2, 1b                                  \n"
-               "       and     %2, %0, %3                              \n"
-               "       .set    pop                                     \n"
-               : "=&r" (temp), "+m" (*m), "=&r" (res)
-               : "ir" (BIT(bit))
-               : __LLSC_CLOBBER);
-
-               res = res != 0;
        } else {
                loongson_llsc_mb();
-               do {
-                       __asm__ __volatile__(
-                       "       .set    push                            \n"
-                       "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
-                       "       " __LL "%0, %1  # test_and_set_bit      \n"
-                       "       or      %2, %0, %3                      \n"
-                       "       " __SC  "%2, %1                         \n"
-                       "       .set    pop                             \n"
-                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" 
(res)
-                       : "ir" (BIT(bit))
-                       : __LLSC_CLOBBER);
-               } while (unlikely(!res));
-
-               res = (temp & BIT(bit)) != 0;
+               orig = __test_bit_op(*m, "%0",
+                                    "or\t%1, %0, %3",
+                                    "ir"(BIT(bit)));
+               res = (orig & BIT(bit)) != 0;
        }
 
        smp_llsc_mb();
@@ -317,57 +230,25 @@ static inline int test_and_clear_bit(unsigned long nr,
 {
        unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
        int bit = nr & SZLONG_MASK;
-       unsigned long res, temp;
+       unsigned long res, orig;
 
        smp_mb__before_llsc();
 
        if (!kernel_uses_llsc) {
                res = __mips_test_and_clear_bit(nr, addr);
-       } else if (R10000_LLSC_WAR) {
-               __asm__ __volatile__(
-               "       .set    push                                    \n"
-               "       .set    arch=r4000                              \n"
-               "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
-               "       or      %2, %0, %3                              \n"
-               "       xor     %2, %3                                  \n"
-               "       " __SC  "%2, %1                                 \n"
-               "       beqzl   %2, 1b                                  \n"
-               "       and     %2, %0, %3                              \n"
-               "       .set    pop                                     \n"
-               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
-               : "ir" (BIT(bit))
-               : __LLSC_CLOBBER);
-
-               res = res != 0;
        } else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) {
                loongson_llsc_mb();
-               do {
-                       __asm__ __volatile__(
-                       "       " __LL  "%0, %1 # test_and_clear_bit    \n"
-                       "       " __EXT "%2, %0, %3, 1                  \n"
-                       "       " __INS "%0, $0, %3, 1                  \n"
-                       "       " __SC  "%0, %1                         \n"
-                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" 
(res)
-                       : "i" (bit)
-                       : __LLSC_CLOBBER);
-               } while (unlikely(!temp));
+               res = __test_bit_op(*m, "%1",
+                                   __EXT "%0, %1, %3, 1;"
+                                   __INS "%1, $0, %3, 1",
+                                   "i"(bit));
        } else {
                loongson_llsc_mb();
-               do {
-                       __asm__ __volatile__(
-                       "       .set    push                            \n"
-                       "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
-                       "       " __LL  "%0, %1 # test_and_clear_bit    \n"
-                       "       or      %2, %0, %3                      \n"
-                       "       xor     %2, %3                          \n"
-                       "       " __SC  "%2, %1                         \n"
-                       "       .set    pop                             \n"
-                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" 
(res)
-                       : "ir" (BIT(bit))
-                       : __LLSC_CLOBBER);
-               } while (unlikely(!res));
-
-               res = (temp & BIT(bit)) != 0;
+               orig = __test_bit_op(*m, "%0",
+                                    "or\t%1, %0, %3;"
+                                    "xor\t%1, %1, %3",
+                                    "ir"(BIT(bit)));
+               res = (orig & BIT(bit)) != 0;
        }
 
        smp_llsc_mb();
@@ -388,43 +269,18 @@ static inline int test_and_change_bit(unsigned long nr,
 {
        unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
        int bit = nr & SZLONG_MASK;
-       unsigned long res, temp;
+       unsigned long res, orig;
 
        smp_mb__before_llsc();
 
        if (!kernel_uses_llsc) {
                res = __mips_test_and_change_bit(nr, addr);
-       } else if (R10000_LLSC_WAR) {
-               __asm__ __volatile__(
-               "       .set    push                                    \n"
-               "       .set    arch=r4000                              \n"
-               "1:     " __LL  "%0, %1         # test_and_change_bit   \n"
-               "       xor     %2, %0, %3                              \n"
-               "       " __SC  "%2, %1                                 \n"
-               "       beqzl   %2, 1b                                  \n"
-               "       and     %2, %0, %3                              \n"
-               "       .set    pop                                     \n"
-               : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
-               : "ir" (BIT(bit))
-               : __LLSC_CLOBBER);
-
-               res = res != 0;
        } else {
                loongson_llsc_mb();
-               do {
-                       __asm__ __volatile__(
-                       "       .set    push                            \n"
-                       "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
-                       "       " __LL  "%0, %1 # test_and_change_bit   \n"
-                       "       xor     %2, %0, %3                      \n"
-                       "       " __SC  "\t%2, %1                       \n"
-                       "       .set    pop                             \n"
-                       : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" 
(res)
-                       : "ir" (BIT(bit))
-                       : __LLSC_CLOBBER);
-               } while (unlikely(!res));
-
-               res = (temp & BIT(bit)) != 0;
+               orig = __test_bit_op(*m, "%0",
+                                    "xor\t%1, %0, %3",
+                                    "ir"(BIT(bit)));
+               res = (orig & BIT(bit)) != 0;
        }
 
        smp_llsc_mb();
@@ -432,6 +288,9 @@ static inline int test_and_change_bit(unsigned long nr,
        return res;
 }
 
+#undef __bit_op
+#undef __test_bit_op
+
 #include <asm-generic/bitops/non-atomic.h>
 
 /*
-- 
2.23.0

Reply via email to