Refactor futex atomic operations using ll/sc method with
clearing PSTATE.PAN to prepare to apply FEAT_LSUI on them.

Signed-off-by: Yeoreum Yun <[email protected]>
Reviewed-by: Catalin Marinas <[email protected]>
---
 arch/arm64/include/asm/futex.h | 128 +++++++++++++++++++++------------
 1 file changed, 82 insertions(+), 46 deletions(-)

diff --git a/arch/arm64/include/asm/futex.h b/arch/arm64/include/asm/futex.h
index bc06691d2062..f8cb674bdb3f 100644
--- a/arch/arm64/include/asm/futex.h
+++ b/arch/arm64/include/asm/futex.h
@@ -7,17 +7,21 @@
 
 #include <linux/futex.h>
 #include <linux/uaccess.h>
+#include <linux/stringify.h>
 
 #include <asm/errno.h>
 
 #define FUTEX_MAX_LOOPS        128 /* What's the largest number you can think 
of? */
 
-#define __futex_atomic_op(insn, ret, oldval, uaddr, tmp, oparg)                
\
-do {                                                                   \
+#define LLSC_FUTEX_ATOMIC_OP(op, insn)                                 \
+static __always_inline int                                             \
+__llsc_futex_atomic_##op(int oparg, u32 __user *uaddr, int *oval)      \
+{                                                                      \
        unsigned int loops = FUTEX_MAX_LOOPS;                           \
+       int ret, oldval, tmp;                                           \
                                                                        \
        uaccess_enable_privileged();                                    \
-       asm volatile(                                                   \
+       asm volatile("// __llsc_futex_atomic_" #op "\n"                 \
 "      prfm    pstl1strm, %2\n"                                        \
 "1:    ldxr    %w1, %2\n"                                              \
        insn "\n"                                                       \
@@ -35,45 +39,103 @@ do {                                                       
                \
        : "r" (oparg), "Ir" (-EAGAIN)                                   \
        : "memory");                                                    \
        uaccess_disable_privileged();                                   \
-} while (0)
+                                                                       \
+       if (!ret)                                                       \
+               *oval = oldval;                                         \
+                                                                       \
+       return ret;                                                     \
+}
+
+LLSC_FUTEX_ATOMIC_OP(add, "add %w3, %w1, %w5")
+LLSC_FUTEX_ATOMIC_OP(or,  "orr %w3, %w1, %w5")
+LLSC_FUTEX_ATOMIC_OP(and, "and %w3, %w1, %w5")
+LLSC_FUTEX_ATOMIC_OP(eor, "eor %w3, %w1, %w5")
+LLSC_FUTEX_ATOMIC_OP(set, "mov %w3, %w5")
+
+static __always_inline int
+__llsc_futex_cmpxchg(u32 __user *uaddr, u32 oldval, u32 newval, u32 *oval)
+{
+       int ret = 0;
+       unsigned int loops = FUTEX_MAX_LOOPS;
+       u32 val, tmp;
+
+       uaccess_enable_privileged();
+       asm volatile("//__llsc_futex_cmpxchg\n"
+"      prfm    pstl1strm, %2\n"
+"1:    ldxr    %w1, %2\n"
+"      eor     %w3, %w1, %w5\n"
+"      cbnz    %w3, 4f\n"
+"2:    stlxr   %w3, %w6, %2\n"
+"      cbz     %w3, 3f\n"
+"      sub     %w4, %w4, %w3\n"
+"      cbnz    %w4, 1b\n"
+"      mov     %w0, %w7\n"
+"3:\n"
+"      dmb     ish\n"
+"4:\n"
+       _ASM_EXTABLE_UACCESS_ERR(1b, 4b, %w0)
+       _ASM_EXTABLE_UACCESS_ERR(2b, 4b, %w0)
+       : "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
+       : "r" (oldval), "r" (newval), "Ir" (-EAGAIN)
+       : "memory");
+       uaccess_disable_privileged();
+
+       if (!ret)
+               *oval = val;
+
+       return ret;
+}
+
+#define FUTEX_ATOMIC_OP(op)                                            \
+static __always_inline int                                             \
+__futex_atomic_##op(int oparg, u32 __user *uaddr, int *oval)           \
+{                                                                      \
+       return __llsc_futex_atomic_##op(oparg, uaddr, oval);            \
+}
+
+FUTEX_ATOMIC_OP(add)
+FUTEX_ATOMIC_OP(or)
+FUTEX_ATOMIC_OP(and)
+FUTEX_ATOMIC_OP(eor)
+FUTEX_ATOMIC_OP(set)
+
+static __always_inline int
+__futex_cmpxchg(u32 __user *uaddr, u32 oldval, u32 newval, u32 *oval)
+{
+       return __llsc_futex_cmpxchg(uaddr, oldval, newval, oval);
+}
 
 static inline int
 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *_uaddr)
 {
-       int oldval = 0, ret, tmp;
-       u32 __user *uaddr = __uaccess_mask_ptr(_uaddr);
+       int ret;
+       u32 __user *uaddr;
 
        if (!access_ok(_uaddr, sizeof(u32)))
                return -EFAULT;
 
+       uaddr = __uaccess_mask_ptr(_uaddr);
+
        switch (op) {
        case FUTEX_OP_SET:
-               __futex_atomic_op("mov  %w3, %w5",
-                                 ret, oldval, uaddr, tmp, oparg);
+               ret = __futex_atomic_set(oparg, uaddr, oval);
                break;
        case FUTEX_OP_ADD:
-               __futex_atomic_op("add  %w3, %w1, %w5",
-                                 ret, oldval, uaddr, tmp, oparg);
+               ret = __futex_atomic_add(oparg, uaddr, oval);
                break;
        case FUTEX_OP_OR:
-               __futex_atomic_op("orr  %w3, %w1, %w5",
-                                 ret, oldval, uaddr, tmp, oparg);
+               ret = __futex_atomic_or(oparg, uaddr, oval);
                break;
        case FUTEX_OP_ANDN:
-               __futex_atomic_op("and  %w3, %w1, %w5",
-                                 ret, oldval, uaddr, tmp, ~oparg);
+               ret = __futex_atomic_and(~oparg, uaddr, oval);
                break;
        case FUTEX_OP_XOR:
-               __futex_atomic_op("eor  %w3, %w1, %w5",
-                                 ret, oldval, uaddr, tmp, oparg);
+               ret = __futex_atomic_eor(oparg, uaddr, oval);
                break;
        default:
                ret = -ENOSYS;
        }
 
-       if (!ret)
-               *oval = oldval;
-
        return ret;
 }
 
@@ -81,40 +143,14 @@ static inline int
 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
                              u32 oldval, u32 newval)
 {
-       int ret = 0;
-       unsigned int loops = FUTEX_MAX_LOOPS;
-       u32 val, tmp;
        u32 __user *uaddr;
 
        if (!access_ok(_uaddr, sizeof(u32)))
                return -EFAULT;
 
        uaddr = __uaccess_mask_ptr(_uaddr);
-       uaccess_enable_privileged();
-       asm volatile("// futex_atomic_cmpxchg_inatomic\n"
-"      prfm    pstl1strm, %2\n"
-"1:    ldxr    %w1, %2\n"
-"      sub     %w3, %w1, %w5\n"
-"      cbnz    %w3, 4f\n"
-"2:    stlxr   %w3, %w6, %2\n"
-"      cbz     %w3, 3f\n"
-"      sub     %w4, %w4, %w3\n"
-"      cbnz    %w4, 1b\n"
-"      mov     %w0, %w7\n"
-"3:\n"
-"      dmb     ish\n"
-"4:\n"
-       _ASM_EXTABLE_UACCESS_ERR(1b, 4b, %w0)
-       _ASM_EXTABLE_UACCESS_ERR(2b, 4b, %w0)
-       : "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
-       : "r" (oldval), "r" (newval), "Ir" (-EAGAIN)
-       : "memory");
-       uaccess_disable_privileged();
-
-       if (!ret)
-               *uval = val;
 
-       return ret;
+       return __futex_cmpxchg(uaddr, oldval, newval, uval);
 }
 
 #endif /* __ASM_FUTEX_H */
-- 
LEVI:{C3F47F37-75D8-414A-A8BA-3980EC8A46D7}


Reply via email to