summaryrefslogtreecommitdiff
path: root/arch/x86/boot/compressed
diff options
context:
space:
mode:
authorArd Biesheuvel <ardb@kernel.org>2023-08-07 18:26:58 +0200
committerBorislav Petkov (AMD) <bp@alien8.de>2023-08-07 19:02:06 +0200
commit264b82fdb4989cf6a44a2bcd0c6ea05e8026b2ac (patch)
tree29870f8ee7f0c2c26e1c183eba72372284beda12 /arch/x86/boot/compressed
parentbee6cf1a80b54548a039e224c651bb15b644a480 (diff)
x86/decompressor: Don't rely on upper 32 bits of GPRs being preserved
The 4-to-5 level mode switch trampoline disables long mode and paging in order to be able to flick the LA57 bit. According to section 3.4.1.1 of the x86 architecture manual [0], 64-bit GPRs might not retain the upper 32 bits of their contents across such a mode switch. Given that RBP, RBX and RSI are live at this point, preserve them on the stack, along with the return address that might be above 4G as well. [0] Intel® 64 and IA-32 Architectures Software Developer’s Manual, Volume 1: Basic Architecture "Because the upper 32 bits of 64-bit general-purpose registers are undefined in 32-bit modes, the upper 32 bits of any general-purpose register are not preserved when switching from 64-bit mode to a 32-bit mode (to protected mode or compatibility mode). Software must not depend on these bits to maintain a value after a 64-bit to 32-bit mode switch." Fixes: 194a9749c73d650c ("x86/boot/compressed/64: Handle 5-level paging boot if kernel is above 4G") Signed-off-by: Ard Biesheuvel <ardb@kernel.org> Signed-off-by: Borislav Petkov (AMD) <bp@alien8.de> Link: https://lore.kernel.org/r/20230807162720.545787-2-ardb@kernel.org
Diffstat (limited to 'arch/x86/boot/compressed')
-rw-r--r--arch/x86/boot/compressed/head_64.S30
1 files changed, 23 insertions, 7 deletions
diff --git a/arch/x86/boot/compressed/head_64.S b/arch/x86/boot/compressed/head_64.S
index 03c4328a88cb..f732426d3b48 100644
--- a/arch/x86/boot/compressed/head_64.S
+++ b/arch/x86/boot/compressed/head_64.S
@@ -459,11 +459,25 @@ SYM_CODE_START(startup_64)
/* Save the trampoline address in RCX */
movq %rax, %rcx
+ /* Set up 32-bit addressable stack */
+ leaq TRAMPOLINE_32BIT_STACK_END(%rcx), %rsp
+
+ /*
+ * Preserve live 64-bit registers on the stack: this is necessary
+ * because the architecture does not guarantee that GPRs will retain
+ * their full 64-bit values across a 32-bit mode switch.
+ */
+ pushq %rbp
+ pushq %rbx
+ pushq %rsi
+
/*
- * Load the address of trampoline_return() into RDI.
- * It will be used by the trampoline to return to the main code.
+ * Push the 64-bit address of trampoline_return() onto the new stack.
+ * It will be used by the trampoline to return to the main code. Due to
+ * the 32-bit mode switch, it cannot be kept it in a register either.
*/
leaq trampoline_return(%rip), %rdi
+ pushq %rdi
/* Switch to compatibility mode (CS.L = 0 CS.D = 1) via far return */
pushq $__KERNEL32_CS
@@ -471,6 +485,11 @@ SYM_CODE_START(startup_64)
pushq %rax
lretq
trampoline_return:
+ /* Restore live 64-bit registers */
+ popq %rsi
+ popq %rbx
+ popq %rbp
+
/* Restore the stack, the 32-bit trampoline uses its own stack */
leaq rva(boot_stack_end)(%rbx), %rsp
@@ -582,7 +601,7 @@ SYM_FUNC_END(.Lrelocated)
/*
* This is the 32-bit trampoline that will be copied over to low memory.
*
- * RDI contains the return address (might be above 4G).
+ * Return address is at the top of the stack (might be above 4G).
* ECX contains the base address of the trampoline memory.
* Non zero RDX means trampoline needs to enable 5-level paging.
*/
@@ -592,9 +611,6 @@ SYM_CODE_START(trampoline_32bit_src)
movl %eax, %ds
movl %eax, %ss
- /* Set up new stack */
- leal TRAMPOLINE_32BIT_STACK_END(%ecx), %esp
-
/* Disable paging */
movl %cr0, %eax
btrl $X86_CR0_PG_BIT, %eax
@@ -671,7 +687,7 @@ SYM_CODE_END(trampoline_32bit_src)
.code64
SYM_FUNC_START_LOCAL_NOALIGN(.Lpaging_enabled)
/* Return from the trampoline */
- jmp *%rdi
+ retq
SYM_FUNC_END(.Lpaging_enabled)
/*