Replace the open coded implementation with the scoped user access
guards

No functional change intended.

Signed-off-by: Thomas Gleixner <[email protected]>
Cc: [email protected]
---
V4: Rename once more
    Use asm_inline - Andrew
V3: Adapt to scope changes
V2: Convert to scoped masked access
    Use RW access functions - Christophe
---
 arch/x86/include/asm/futex.h |   75 ++++++++++++++++++-------------------------
 1 file changed, 33 insertions(+), 42 deletions(-)
---
--- a/arch/x86/include/asm/futex.h
+++ b/arch/x86/include/asm/futex.h
@@ -46,38 +46,31 @@ do {                                                        
        \
 } while(0)
 
 static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int 
*oval,
-               u32 __user *uaddr)
+                                                      u32 __user *uaddr)
 {
-       if (can_do_masked_user_access())
-               uaddr = masked_user_access_begin(uaddr);
-       else if (!user_access_begin(uaddr, sizeof(u32)))
-               return -EFAULT;
-
-       switch (op) {
-       case FUTEX_OP_SET:
-               unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault);
-               break;
-       case FUTEX_OP_ADD:
-               unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval,
-                                  uaddr, oparg, Efault);
-               break;
-       case FUTEX_OP_OR:
-               unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault);
-               break;
-       case FUTEX_OP_ANDN:
-               unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault);
-               break;
-       case FUTEX_OP_XOR:
-               unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault);
-               break;
-       default:
-               user_access_end();
-               return -ENOSYS;
+       scoped_user_rw_access(uaddr, Efault) {
+               switch (op) {
+               case FUTEX_OP_SET:
+                       unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, 
Efault);
+                       break;
+               case FUTEX_OP_ADD:
+                       unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval, 
uaddr, oparg, Efault);
+                       break;
+               case FUTEX_OP_OR:
+                       unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, 
Efault);
+                       break;
+               case FUTEX_OP_ANDN:
+                       unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, 
Efault);
+                       break;
+               case FUTEX_OP_XOR:
+                       unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, 
Efault);
+                       break;
+               default:
+                       return -ENOSYS;
+               }
        }
-       user_access_end();
        return 0;
 Efault:
-       user_access_end();
        return -EFAULT;
 }
 
@@ -86,21 +79,19 @@ static inline int futex_atomic_cmpxchg_i
 {
        int ret = 0;
 
-       if (can_do_masked_user_access())
-               uaddr = masked_user_access_begin(uaddr);
-       else if (!user_access_begin(uaddr, sizeof(u32)))
-               return -EFAULT;
-       asm volatile("\n"
-               "1:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"
-               "2:\n"
-               _ASM_EXTABLE_TYPE_REG(1b, 2b, EX_TYPE_EFAULT_REG, %0) \
-               : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
-               : "r" (newval), "1" (oldval)
-               : "memory"
-       );
-       user_access_end();
-       *uval = oldval;
+       scoped_user_rw_access(uaddr, Efault) {
+               asm_inline volatile("\n"
+                                   "1:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"
+                                   "2:\n"
+                                   _ASM_EXTABLE_TYPE_REG(1b, 2b, 
EX_TYPE_EFAULT_REG, %0)
+                                   : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
+                                   : "r" (newval), "1" (oldval)
+                                   : "memory");
+               *uval = oldval;
+       }
        return ret;
+Efault:
+       return -EFAULT;
 }
 
 #endif


Reply via email to