This is preparation patch to apply FEAT_LSUI in
user_swpX operation.
For this, convert user_swpX macro into inline function.
No functional change.

Signed-off-by: Yeoreum Yun <[email protected]>
---
 arch/arm64/kernel/armv8_deprecated.c | 38 +++++++++++++++++-----------
 1 file changed, 23 insertions(+), 15 deletions(-)

diff --git a/arch/arm64/kernel/armv8_deprecated.c 
b/arch/arm64/kernel/armv8_deprecated.c
index e737c6295ec7..d15e35f1075c 100644
--- a/arch/arm64/kernel/armv8_deprecated.c
+++ b/arch/arm64/kernel/armv8_deprecated.c
@@ -93,13 +93,18 @@ static unsigned int __maybe_unused 
aarch32_check_condition(u32 opcode, u32 psr)
 /* Arbitrary constant to ensure forward-progress of the LL/SC loop */
 #define __SWP_LL_SC_LOOPS      4
 
-#define __user_swpX_asm(data, addr, res, temp, temp2, B)       \
-do {                                                           \
+#define LLSC_USER_SWPX(B)                                      \
+static __always_inline int                                     \
+__llsc_user_swp##B##_asm(unsigned int *data, unsigned int addr)        \
+{                                                              \
+       int err = 0;                                            \
+       unsigned int temp, temp2;                               \
+                                                               \
        uaccess_enable_privileged();                            \
        __asm__ __volatile__(                                   \
        "       mov             %w3, %w6\n"                     \
-       "0:     ldxr"B"         %w2, [%4]\n"                    \
-       "1:     stxr"B"         %w0, %w1, [%4]\n"               \
+       "0:     ldxr"#B"        %w2, [%4]\n"                    \
+       "1:     stxr"#B"        %w0, %w1, [%4]\n"               \
        "       cbz             %w0, 2f\n"                      \
        "       sub             %w3, %w3, #1\n"                 \
        "       cbnz            %w3, 0b\n"                      \
@@ -110,17 +115,22 @@ do {                                                      
        \
        "3:\n"                                                  \
        _ASM_EXTABLE_UACCESS_ERR(0b, 3b, %w0)                   \
        _ASM_EXTABLE_UACCESS_ERR(1b, 3b, %w0)                   \
-       : "=&r" (res), "+r" (data), "=&r" (temp), "=&r" (temp2) \
+       : "=&r" (err), "+r" (*data), "=&r" (temp), "=&r" (temp2)\
        : "r" ((unsigned long)addr), "i" (-EAGAIN),             \
          "i" (__SWP_LL_SC_LOOPS)                               \
        : "memory");                                            \
        uaccess_disable_privileged();                           \
-} while (0)
+                                                               \
+       return err;                                             \
+}
+
+LLSC_USER_SWPX()
+LLSC_USER_SWPX(b)
 
-#define __user_swp_asm(data, addr, res, temp, temp2) \
-       __user_swpX_asm(data, addr, res, temp, temp2, "")
-#define __user_swpb_asm(data, addr, res, temp, temp2) \
-       __user_swpX_asm(data, addr, res, temp, temp2, "b")
+#define __user_swp_asm(data, addr) \
+       __llsc_user_swp_asm(data, addr)
+#define __user_swpb_asm(data, addr) \
+       __llsc_user_swpb_asm(data, addr)
 
 /*
  * Bit 22 of the instruction encoding distinguishes between
@@ -131,7 +141,7 @@ do {                                                        
        \
 static int emulate_swpX(unsigned int address, unsigned int *data,
                        unsigned int type)
 {
-       unsigned int res = 0;
+       unsigned int res;
 
        if ((type != TYPE_SWPB) && (address & 0x3)) {
                /* SWP to unaligned address not permitted */
@@ -140,12 +150,10 @@ static int emulate_swpX(unsigned int address, unsigned 
int *data,
        }
 
        while (1) {
-               unsigned long temp, temp2;
-
                if (type == TYPE_SWPB)
-                       __user_swpb_asm(*data, address, res, temp, temp2);
+                       res = __user_swpb_asm(data, address);
                else
-                       __user_swp_asm(*data, address, res, temp, temp2);
+                       res = __user_swp_asm(data, address);
 
                if (likely(res != -EAGAIN) || signal_pending(current))
                        break;
-- 
LEVI:{C3F47F37-75D8-414A-A8BA-3980EC8A46D7}


Reply via email to