summaryrefslogtreecommitdiff
path: root/arch/x86/entry/calling.h
diff options
context:
space:
mode:
authorAndy Lutomirski <luto@kernel.org>2015-07-03 12:44:29 -0700
committerIngo Molnar <mingo@kernel.org>2015-07-07 10:59:07 +0200
commitff467594f2a4be01a0fa5e9ffc223fa930d232dd (patch)
treefce2220e70feac173d82f3e323494d96d0fbac85 /arch/x86/entry/calling.h
parent29ea1b258b98a862e59d72556714b75051ae93fb (diff)
x86/asm/entry/64: Save all regs on interrupt entry
To prepare for the big rewrite of the error and interrupt exit paths, we will need pt_regs completely filled in. It's already completely filled in when error_exit runs, so rearrange interrupt handling to match it. This will slow down interrupt handling very slightly (eight instructions), but the simplification it enables will be more than worth it. Signed-off-by: Andy Lutomirski <luto@kernel.org> Cc: Andy Lutomirski <luto@amacapital.net> Cc: Borislav Petkov <bp@alien8.de> Cc: Brian Gerst <brgerst@gmail.com> Cc: Denys Vlasenko <dvlasenk@redhat.com> Cc: Denys Vlasenko <vda.linux@googlemail.com> Cc: Frederic Weisbecker <fweisbec@gmail.com> Cc: H. Peter Anvin <hpa@zytor.com> Cc: Kees Cook <keescook@chromium.org> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Oleg Nesterov <oleg@redhat.com> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Rik van Riel <riel@redhat.com> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: paulmck@linux.vnet.ibm.com Link: http://lkml.kernel.org/r/d8a766a7f558b30e6e01352854628a2d9943460c.1435952415.git.luto@kernel.org Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'arch/x86/entry/calling.h')
-rw-r--r--arch/x86/entry/calling.h3
1 files changed, 0 insertions, 3 deletions
diff --git a/arch/x86/entry/calling.h b/arch/x86/entry/calling.h
index 519207f2ee76..3c71dd947c7b 100644
--- a/arch/x86/entry/calling.h
+++ b/arch/x86/entry/calling.h
@@ -135,9 +135,6 @@ For 32-bit we have the following conventions - kernel is built with
movq %rbp, 4*8+\offset(%rsp)
movq %rbx, 5*8+\offset(%rsp)
.endm
- .macro SAVE_EXTRA_REGS_RBP offset=0
- movq %rbp, 4*8+\offset(%rsp)
- .endm
.macro RESTORE_EXTRA_REGS offset=0
movq 0*8+\offset(%rsp), %r15