summaryrefslogtreecommitdiff
path: root/arch/arm64/include
diff options
context:
space:
mode:
authorMark Rutland <mark.rutland@arm.com>2021-10-19 17:02:17 +0100
committerWill Deacon <will@kernel.org>2021-10-21 10:45:22 +0100
commit2e77a62cb3a6d2eb9dd875516411bcd131dd04e7 (patch)
tree28d8d52dda0e34aa9ed5251b6ccfcc5e0d88a182 /arch/arm64/include
parentd6e2cc56477538255160ed02fdb11b0da60356cc (diff)
arm64: extable: add a dedicated uaccess handler
For inline assembly, we place exception fixups out-of-line in the `.fixup` section such that these are out of the way of the fast path. This has a few drawbacks: * Since the fixup code is anonymous, backtraces will symbolize fixups as offsets from the nearest prior symbol, currently `__entry_tramp_text_end`. This is confusing, and painful to debug without access to the relevant vmlinux. * Since the exception handler adjusts the PC to execute the fixup, and the fixup uses a direct branch back into the function it fixes, backtraces of fixups miss the original function. This is confusing, and violates requirements for RELIABLE_STACKTRACE (and therefore LIVEPATCH). * Inline assembly and associated fixups are generated from templates, and we have many copies of logically identical fixups which only differ in which specific registers are written to and which address is branched to at the end of the fixup. This is potentially wasteful of I-cache resources, and makes it hard to add additional logic to fixups without significant bloat. This patch address all three concerns for inline uaccess fixups by adding a dedicated exception handler which updates registers in exception context and subsequent returns back into the function which faulted, removing the need for fixups specialized to each faulting instruction. Other than backtracing, there should be no functional change as a result of this patch. Signed-off-by: Mark Rutland <mark.rutland@arm.com> Reviewed-by: Ard Biesheuvel <ardb@kernel.org> Cc: Catalin Marinas <catalin.marinas@arm.com> Cc: James Morse <james.morse@arm.com> Cc: Robin Murphy <robin.murphy@arm.com> Cc: Will Deacon <will@kernel.org> Link: https://lore.kernel.org/r/20211019160219.5202-12-mark.rutland@arm.com Signed-off-by: Will Deacon <will@kernel.org>
Diffstat (limited to 'arch/arm64/include')
-rw-r--r--arch/arm64/include/asm/asm-extable.h24
-rw-r--r--arch/arm64/include/asm/futex.h25
-rw-r--r--arch/arm64/include/asm/uaccess.h19
3 files changed, 36 insertions, 32 deletions
diff --git a/arch/arm64/include/asm/asm-extable.h b/arch/arm64/include/asm/asm-extable.h
index 5ee748edaef1..11209da19c62 100644
--- a/arch/arm64/include/asm/asm-extable.h
+++ b/arch/arm64/include/asm/asm-extable.h
@@ -5,6 +5,7 @@
#define EX_TYPE_NONE 0
#define EX_TYPE_FIXUP 1
#define EX_TYPE_BPF 2
+#define EX_TYPE_UACCESS_ERR_ZERO 3
#ifdef __ASSEMBLY__
@@ -37,8 +38,11 @@
#else /* __ASSEMBLY__ */
+#include <linux/bits.h>
#include <linux/stringify.h>
+#include <asm/gpr-num.h>
+
#define __ASM_EXTABLE_RAW(insn, fixup, type, data) \
".pushsection __ex_table, \"a\"\n" \
".align 2\n" \
@@ -51,6 +55,26 @@
#define _ASM_EXTABLE(insn, fixup) \
__ASM_EXTABLE_RAW(#insn, #fixup, __stringify(EX_TYPE_FIXUP), "0")
+#define EX_DATA_REG_ERR_SHIFT 0
+#define EX_DATA_REG_ERR GENMASK(4, 0)
+#define EX_DATA_REG_ZERO_SHIFT 5
+#define EX_DATA_REG_ZERO GENMASK(9, 5)
+
+#define EX_DATA_REG(reg, gpr) \
+ "((.L__gpr_num_" #gpr ") << " __stringify(EX_DATA_REG_##reg##_SHIFT) ")"
+
+#define _ASM_EXTABLE_UACCESS_ERR_ZERO(insn, fixup, err, zero) \
+ __DEFINE_ASM_GPR_NUMS \
+ __ASM_EXTABLE_RAW(#insn, #fixup, \
+ __stringify(EX_TYPE_UACCESS_ERR_ZERO), \
+ "(" \
+ EX_DATA_REG(ERR, err) " | " \
+ EX_DATA_REG(ZERO, zero) \
+ ")")
+
+#define _ASM_EXTABLE_UACCESS_ERR(insn, fixup, err) \
+ _ASM_EXTABLE_UACCESS_ERR_ZERO(insn, fixup, err, wzr)
+
#endif /* __ASSEMBLY__ */
#endif /* __ASM_ASM_EXTABLE_H */
diff --git a/arch/arm64/include/asm/futex.h b/arch/arm64/include/asm/futex.h
index 8e41faa37c69..bc06691d2062 100644
--- a/arch/arm64/include/asm/futex.h
+++ b/arch/arm64/include/asm/futex.h
@@ -25,19 +25,14 @@ do { \
" cbz %w0, 3f\n" \
" sub %w4, %w4, %w0\n" \
" cbnz %w4, 1b\n" \
-" mov %w0, %w7\n" \
+" mov %w0, %w6\n" \
"3:\n" \
" dmb ish\n" \
-" .pushsection .fixup,\"ax\"\n" \
-" .align 2\n" \
-"4: mov %w0, %w6\n" \
-" b 3b\n" \
-" .popsection\n" \
- _ASM_EXTABLE(1b, 4b) \
- _ASM_EXTABLE(2b, 4b) \
+ _ASM_EXTABLE_UACCESS_ERR(1b, 3b, %w0) \
+ _ASM_EXTABLE_UACCESS_ERR(2b, 3b, %w0) \
: "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp), \
"+r" (loops) \
- : "r" (oparg), "Ir" (-EFAULT), "Ir" (-EAGAIN) \
+ : "r" (oparg), "Ir" (-EAGAIN) \
: "memory"); \
uaccess_disable_privileged(); \
} while (0)
@@ -105,18 +100,14 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
" cbz %w3, 3f\n"
" sub %w4, %w4, %w3\n"
" cbnz %w4, 1b\n"
-" mov %w0, %w8\n"
+" mov %w0, %w7\n"
"3:\n"
" dmb ish\n"
"4:\n"
-" .pushsection .fixup,\"ax\"\n"
-"5: mov %w0, %w7\n"
-" b 4b\n"
-" .popsection\n"
- _ASM_EXTABLE(1b, 5b)
- _ASM_EXTABLE(2b, 5b)
+ _ASM_EXTABLE_UACCESS_ERR(1b, 4b, %w0)
+ _ASM_EXTABLE_UACCESS_ERR(2b, 4b, %w0)
: "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
- : "r" (oldval), "r" (newval), "Ir" (-EFAULT), "Ir" (-EAGAIN)
+ : "r" (oldval), "r" (newval), "Ir" (-EAGAIN)
: "memory");
uaccess_disable_privileged();
diff --git a/arch/arm64/include/asm/uaccess.h b/arch/arm64/include/asm/uaccess.h
index 759019523b85..9bc218991c5a 100644
--- a/arch/arm64/include/asm/uaccess.h
+++ b/arch/arm64/include/asm/uaccess.h
@@ -255,15 +255,9 @@ static inline void __user *__uaccess_mask_ptr(const void __user *ptr)
asm volatile( \
"1: " load " " reg "1, [%2]\n" \
"2:\n" \
- " .section .fixup, \"ax\"\n" \
- " .align 2\n" \
- "3: mov %w0, %3\n" \
- " mov %1, #0\n" \
- " b 2b\n" \
- " .previous\n" \
- _ASM_EXTABLE(1b, 3b) \
+ _ASM_EXTABLE_UACCESS_ERR_ZERO(1b, 2b, %w0, %w1) \
: "+r" (err), "=&r" (x) \
- : "r" (addr), "i" (-EFAULT))
+ : "r" (addr))
#define __raw_get_mem(ldr, x, ptr, err) \
do { \
@@ -332,14 +326,9 @@ do { \
asm volatile( \
"1: " store " " reg "1, [%2]\n" \
"2:\n" \
- " .section .fixup,\"ax\"\n" \
- " .align 2\n" \
- "3: mov %w0, %3\n" \
- " b 2b\n" \
- " .previous\n" \
- _ASM_EXTABLE(1b, 3b) \
+ _ASM_EXTABLE_UACCESS_ERR(1b, 2b, %w0) \
: "+r" (err) \
- : "r" (x), "r" (addr), "i" (-EFAULT))
+ : "r" (x), "r" (addr))
#define __raw_put_mem(str, x, ptr, err) \
do { \