Use the new cmpxchg_emu_u8() to emulate one-byte cmpxchg() on csky.

[ paulmck: Apply kernel test robot feedback. ]
[ paulmck: Drop two-byte support per Arnd Bergmann feedback. ]

Co-developed-by: Yujie Liu <[email protected]>
Signed-off-by: Yujie Liu <[email protected]>
Signed-off-by: Paul E. McKenney <[email protected]>
Tested-by: Yujie Liu <[email protected]>
Cc: Guo Ren <[email protected]>
Cc: Arnd Bergmann <[email protected]>
Cc: <[email protected]>
---
 arch/csky/Kconfig               |  1 +
 arch/csky/include/asm/cmpxchg.h | 10 ++++++++++
 2 files changed, 11 insertions(+)

diff --git a/arch/csky/Kconfig b/arch/csky/Kconfig
index d3ac36751ad1f..5479707eb5d10 100644
--- a/arch/csky/Kconfig
+++ b/arch/csky/Kconfig
@@ -37,6 +37,7 @@ config CSKY
        select ARCH_INLINE_SPIN_UNLOCK_BH if !PREEMPTION
        select ARCH_INLINE_SPIN_UNLOCK_IRQ if !PREEMPTION
        select ARCH_INLINE_SPIN_UNLOCK_IRQRESTORE if !PREEMPTION
+       select ARCH_NEED_CMPXCHG_1_EMU
        select ARCH_WANT_FRAME_POINTERS if !CPU_CK610 && 
$(cc-option,-mbacktrace)
        select ARCH_WANT_DEFAULT_TOPDOWN_MMAP_LAYOUT
        select COMMON_CLK
diff --git a/arch/csky/include/asm/cmpxchg.h b/arch/csky/include/asm/cmpxchg.h
index 916043b845f14..db6dda47184e4 100644
--- a/arch/csky/include/asm/cmpxchg.h
+++ b/arch/csky/include/asm/cmpxchg.h
@@ -6,6 +6,7 @@
 #ifdef CONFIG_SMP
 #include <linux/bug.h>
 #include <asm/barrier.h>
+#include <linux/cmpxchg-emu.h>
 
 #define __xchg_relaxed(new, ptr, size)                         \
 ({                                                             \
@@ -61,6 +62,9 @@
        __typeof__(old) __old = (old);                          \
        __typeof__(*(ptr)) __ret;                               \
        switch (size) {                                         \
+       case 1:                                                 \
+               __ret = (__typeof__(*(ptr)))cmpxchg_emu_u8((volatile u8 
*)__ptr, (uintptr_t)__old, (uintptr_t)__new); \
+               break;                                          \
        case 4:                                                 \
                asm volatile (                                  \
                "1:     ldex.w          %0, (%3) \n"            \
@@ -91,6 +95,9 @@
        __typeof__(old) __old = (old);                          \
        __typeof__(*(ptr)) __ret;                               \
        switch (size) {                                         \
+       case 1:                                                 \
+               __ret = (__typeof__(*(ptr)))cmpxchg_emu_u8((volatile u8 
*)__ptr, (uintptr_t)__old, (uintptr_t)__new); \
+               break;                                          \
        case 4:                                                 \
                asm volatile (                                  \
                "1:     ldex.w          %0, (%3) \n"            \
@@ -122,6 +129,9 @@
        __typeof__(old) __old = (old);                          \
        __typeof__(*(ptr)) __ret;                               \
        switch (size) {                                         \
+       case 1:                                                 \
+               __ret = (__typeof__(*(ptr)))cmpxchg_emu_u8((volatile u8 
*)__ptr, (uintptr_t)__old, (uintptr_t)__new); \
+               break;                                          \
        case 4:                                                 \
                asm volatile (                                  \
                RELEASE_FENCE                                   \
-- 
2.40.1


Reply via email to