On 17/10/2025 11:09 am, Thomas Gleixner wrote:
> @@ -86,21 +79,19 @@ static inline int futex_atomic_cmpxchg_i
>  {
>       int ret = 0;
>  
> -     if (can_do_masked_user_access())
> -             uaddr = masked_user_access_begin(uaddr);
> -     else if (!user_access_begin(uaddr, sizeof(u32)))
> -             return -EFAULT;
> -     asm volatile("\n"
> -             "1:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"
> -             "2:\n"
> -             _ASM_EXTABLE_TYPE_REG(1b, 2b, EX_TYPE_EFAULT_REG, %0) \
> -             : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
> -             : "r" (newval), "1" (oldval)
> -             : "memory"
> -     );
> -     user_access_end();
> -     *uval = oldval;
> +     scoped_masked_user_rw_access(uaddr, Efault) {
> +             asm volatile("\n"
> +                          "1:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"
> +                          "2:\n"
> +                          _ASM_EXTABLE_TYPE_REG(1b, 2b, EX_TYPE_EFAULT_REG, 
> %0) \
> +                          : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
> +                          : "r" (newval), "1" (oldval)
> +                          : "memory");

Minor points, but as you're rewriting this, it wants to be asm_inline
volatile.

There's also a useless line continuation on the end of the ASM_EXTABLE
which can be dropped.

~Andrew

Reply via email to