summaryrefslogtreecommitdiff
path: root/arch
diff options
context:
space:
mode:
authorAndy Lutomirski <luto@mit.edu>2011-07-13 09:24:10 -0400
committerH. Peter Anvin <hpa@linux.intel.com>2011-07-13 11:22:56 -0700
commit59e97e4d6fbcd5b74a94cb48bcbfc6f8478a5e93 (patch)
tree015fd8e63e1fcd8fdf7a066bd2e09a5636a14449 /arch
parentc9712944b2a12373cb6ff8059afcfb7e826a6c54 (diff)
x86: Make alternative instruction pointers relative
This save a few bytes on x86-64 and means that future patches can apply alternatives to unrelocated code. Signed-off-by: Andy Lutomirski <luto@mit.edu> Link: http://lkml.kernel.org/r/ff64a6b9a1a3860ca4a7b8b6dc7b4754f9491cd7.1310563276.git.luto@mit.edu Signed-off-by: H. Peter Anvin <hpa@linux.intel.com>
Diffstat (limited to 'arch')
-rw-r--r--arch/x86/include/asm/alternative-asm.h4
-rw-r--r--arch/x86/include/asm/alternative.h8
-rw-r--r--arch/x86/include/asm/cpufeature.h8
-rw-r--r--arch/x86/kernel/alternative.c21
-rw-r--r--arch/x86/lib/copy_page_64.S9
-rw-r--r--arch/x86/lib/memmove_64.S11
6 files changed, 31 insertions, 30 deletions
diff --git a/arch/x86/include/asm/alternative-asm.h b/arch/x86/include/asm/alternative-asm.h
index 94d420b360d1..4554cc6fb96a 100644
--- a/arch/x86/include/asm/alternative-asm.h
+++ b/arch/x86/include/asm/alternative-asm.h
@@ -17,8 +17,8 @@
.macro altinstruction_entry orig alt feature orig_len alt_len
.align 8
- .quad \orig
- .quad \alt
+ .long \orig - .
+ .long \alt - .
.word \feature
.byte \orig_len
.byte \alt_len
diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index bf535f947e8c..23fb6d79f209 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -43,8 +43,8 @@
#endif
struct alt_instr {
- u8 *instr; /* original instruction */
- u8 *replacement;
+ s32 instr_offset; /* original instruction */
+ s32 repl_offset; /* offset to replacement instruction */
u16 cpuid; /* cpuid bit set for replacement */
u8 instrlen; /* length of original instruction */
u8 replacementlen; /* length of new instruction, <= instrlen */
@@ -84,8 +84,8 @@ static inline int alternatives_text_reserved(void *start, void *end)
"661:\n\t" oldinstr "\n662:\n" \
".section .altinstructions,\"a\"\n" \
_ASM_ALIGN "\n" \
- _ASM_PTR "661b\n" /* label */ \
- _ASM_PTR "663f\n" /* new instruction */ \
+ " .long 661b - .\n" /* label */ \
+ " .long 663f - .\n" /* new instruction */ \
" .word " __stringify(feature) "\n" /* feature bit */ \
" .byte 662b-661b\n" /* sourcelen */ \
" .byte 664f-663f\n" /* replacementlen */ \
diff --git a/arch/x86/include/asm/cpufeature.h b/arch/x86/include/asm/cpufeature.h
index 71cc3800712c..9929b35929ff 100644
--- a/arch/x86/include/asm/cpufeature.h
+++ b/arch/x86/include/asm/cpufeature.h
@@ -331,8 +331,8 @@ static __always_inline __pure bool __static_cpu_has(u16 bit)
"2:\n"
".section .altinstructions,\"a\"\n"
_ASM_ALIGN "\n"
- _ASM_PTR "1b\n"
- _ASM_PTR "0\n" /* no replacement */
+ " .long 1b - .\n"
+ " .long 0\n" /* no replacement */
" .word %P0\n" /* feature bit */
" .byte 2b - 1b\n" /* source len */
" .byte 0\n" /* replacement len */
@@ -349,8 +349,8 @@ static __always_inline __pure bool __static_cpu_has(u16 bit)
"2:\n"
".section .altinstructions,\"a\"\n"
_ASM_ALIGN "\n"
- _ASM_PTR "1b\n"
- _ASM_PTR "3f\n"
+ " .long 1b - .\n"
+ " .long 3f - .\n"
" .word %P1\n" /* feature bit */
" .byte 2b - 1b\n" /* source len */
" .byte 4f - 3f\n" /* replacement len */
diff --git a/arch/x86/kernel/alternative.c b/arch/x86/kernel/alternative.c
index a81f2d52f869..ddb207bb5f91 100644
--- a/arch/x86/kernel/alternative.c
+++ b/arch/x86/kernel/alternative.c
@@ -263,6 +263,7 @@ void __init_or_module apply_alternatives(struct alt_instr *start,
struct alt_instr *end)
{
struct alt_instr *a;
+ u8 *instr, *replacement;
u8 insnbuf[MAX_PATCH_LEN];
DPRINTK("%s: alt table %p -> %p\n", __func__, start, end);
@@ -276,25 +277,29 @@ void __init_or_module apply_alternatives(struct alt_instr *start,
* order.
*/
for (a = start; a < end; a++) {
- u8 *instr = a->instr;
+ instr = (u8 *)&a->instr_offset + a->instr_offset;
+ replacement = (u8 *)&a->repl_offset + a->repl_offset;
BUG_ON(a->replacementlen > a->instrlen);
BUG_ON(a->instrlen > sizeof(insnbuf));
BUG_ON(a->cpuid >= NCAPINTS*32);
if (!boot_cpu_has(a->cpuid))
continue;
+
+ memcpy(insnbuf, replacement, a->replacementlen);
+
+ /* 0xe8 is a relative jump; fix the offset. */
+ if (*insnbuf == 0xe8 && a->replacementlen == 5)
+ *(s32 *)(insnbuf + 1) += replacement - instr;
+
+ add_nops(insnbuf + a->replacementlen,
+ a->instrlen - a->replacementlen);
+
#ifdef CONFIG_X86_64
/* vsyscall code is not mapped yet. resolve it manually. */
if (instr >= (u8 *)VSYSCALL_START && instr < (u8*)VSYSCALL_END) {
instr = __va(instr - (u8*)VSYSCALL_START + (u8*)__pa_symbol(&__vsyscall_0));
- DPRINTK("%s: vsyscall fixup: %p => %p\n",
- __func__, a->instr, instr);
}
#endif
- memcpy(insnbuf, a->replacement, a->replacementlen);
- if (*insnbuf == 0xe8 && a->replacementlen == 5)
- *(s32 *)(insnbuf + 1) += a->replacement - a->instr;
- add_nops(insnbuf + a->replacementlen,
- a->instrlen - a->replacementlen);
text_poke_early(instr, insnbuf, a->instrlen);
}
}
diff --git a/arch/x86/lib/copy_page_64.S b/arch/x86/lib/copy_page_64.S
index 6fec2d1cebe1..01c805ba5359 100644
--- a/arch/x86/lib/copy_page_64.S
+++ b/arch/x86/lib/copy_page_64.S
@@ -2,6 +2,7 @@
#include <linux/linkage.h>
#include <asm/dwarf2.h>
+#include <asm/alternative-asm.h>
ALIGN
copy_page_c:
@@ -110,10 +111,6 @@ ENDPROC(copy_page)
2:
.previous
.section .altinstructions,"a"
- .align 8
- .quad copy_page
- .quad 1b
- .word X86_FEATURE_REP_GOOD
- .byte .Lcopy_page_end - copy_page
- .byte 2b - 1b
+ altinstruction_entry copy_page, 1b, X86_FEATURE_REP_GOOD, \
+ .Lcopy_page_end-copy_page, 2b-1b
.previous
diff --git a/arch/x86/lib/memmove_64.S b/arch/x86/lib/memmove_64.S
index d0ec9c2936d7..ee164610ec46 100644
--- a/arch/x86/lib/memmove_64.S
+++ b/arch/x86/lib/memmove_64.S
@@ -9,6 +9,7 @@
#include <linux/linkage.h>
#include <asm/dwarf2.h>
#include <asm/cpufeature.h>
+#include <asm/alternative-asm.h>
#undef memmove
@@ -214,11 +215,9 @@ ENTRY(memmove)
.previous
.section .altinstructions,"a"
- .align 8
- .quad .Lmemmove_begin_forward
- .quad .Lmemmove_begin_forward_efs
- .word X86_FEATURE_ERMS
- .byte .Lmemmove_end_forward-.Lmemmove_begin_forward
- .byte .Lmemmove_end_forward_efs-.Lmemmove_begin_forward_efs
+ altinstruction_entry .Lmemmove_begin_forward, \
+ .Lmemmove_begin_forward_efs,X86_FEATURE_ERMS, \
+ .Lmemmove_end_forward-.Lmemmove_begin_forward, \
+ .Lmemmove_end_forward_efs-.Lmemmove_begin_forward_efs
.previous
ENDPROC(memmove)