summaryrefslogtreecommitdiff
path: root/arch/arm/kernel/entry-armv.S
diff options
context:
space:
mode:
authorArd Biesheuvel <ardb@kernel.org>2021-10-17 17:23:47 +0200
committerArd Biesheuvel <ardb@kernel.org>2021-12-03 15:11:33 +0100
commitae5cc07da8f8e4c9802894c05f1d96802b9de5f2 (patch)
tree21066f35c5fbc0716918d40eb353576819dba285 /arch/arm/kernel/entry-armv.S
parentb832faec33d4e27c32490c0732098ecf79b222af (diff)
ARM: entry: rework stack realignment code in svc_entry
The original Thumb-2 enablement patches updated the stack realignment code in svc_entry to work around the lack of a STMIB instruction in Thumb-2, by subtracting 4 from the frame size, inverting the sense of the misaligment check, and changing to a STMIA instruction and a final stack push of a 4 byte quantity that results in the stack becoming aligned at the end of the sequence. It also pushes and pops R0 to the stack in order to have a temp register that Thumb-2 allows in general purpose ALU instructions, as TST using SP is not permitted. Both are a bit problematic for vmap'ed stacks, as using the stack is only permitted after we decide that we did not overflow the stack, or have already switched to the overflow stack. As for the alignment check: the current approach creates a corner case where, if the initial SUB of SP ends up right at the start of the stack, we will end up subtracting another 8 bytes and overflowing it. This means we would need to add the overflow check *after* the SUB that deliberately misaligns the stack. However, this would require us to keep local state (i.e., whether we performed the subtract or not) across the overflow check, but without any GPRs or stack available. So let's switch to an approach where we don't use the stack, and where the alignment check of the stack pointer occurs in the usual way, as this is guaranteed not to result in overflow. This means we will be able to do the overflow check first. While at it, switch to R1 so the mode stack pointer in R0 remains accessible. Acked-by: Nicolas Pitre <nico@fluxnic.net> Signed-off-by: Ard Biesheuvel <ardb@kernel.org> Tested-by: Marc Zyngier <maz@kernel.org> Tested-by: Vladimir Murzin <vladimir.murzin@arm.com> # ARMv7M
Diffstat (limited to 'arch/arm/kernel/entry-armv.S')
-rw-r--r--arch/arm/kernel/entry-armv.S25
1 files changed, 14 insertions, 11 deletions
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S
index ce8ca29461de..207875ac62ff 100644
--- a/arch/arm/kernel/entry-armv.S
+++ b/arch/arm/kernel/entry-armv.S
@@ -191,24 +191,27 @@ ENDPROC(__und_invalid)
.macro svc_entry, stack_hole=0, trace=1, uaccess=1
UNWIND(.fnstart )
UNWIND(.save {r0 - pc} )
- sub sp, sp, #(SVC_REGS_SIZE + \stack_hole - 4)
+ sub sp, sp, #(SVC_REGS_SIZE + \stack_hole)
#ifdef CONFIG_THUMB2_KERNEL
- SPFIX( str r0, [sp] ) @ temporarily saved
- SPFIX( mov r0, sp )
- SPFIX( tst r0, #4 ) @ test original stack alignment
- SPFIX( ldr r0, [sp] ) @ restored
+ add sp, r1 @ get SP in a GPR without
+ sub r1, sp, r1 @ using a temp register
+ tst r1, #4 @ test stack pointer alignment
+ sub r1, sp, r1 @ restore original R1
+ sub sp, r1 @ restore original SP
#else
SPFIX( tst sp, #4 )
#endif
- SPFIX( subeq sp, sp, #4 )
- stmia sp, {r1 - r12}
+ SPFIX( subne sp, sp, #4 )
+
+ ARM( stmib sp, {r1 - r12} )
+ THUMB( stmia sp, {r0 - r12} ) @ No STMIB in Thumb-2
ldmia r0, {r3 - r5}
- add r7, sp, #S_SP - 4 @ here for interlock avoidance
+ add r7, sp, #S_SP @ here for interlock avoidance
mov r6, #-1 @ "" "" "" ""
- add r2, sp, #(SVC_REGS_SIZE + \stack_hole - 4)
- SPFIX( addeq r2, r2, #4 )
- str r3, [sp, #-4]! @ save the "real" r0 copied
+ add r2, sp, #(SVC_REGS_SIZE + \stack_hole)
+ SPFIX( addne r2, r2, #4 )
+ str r3, [sp] @ save the "real" r0 copied
@ from the exception stack
mov r3, lr