summaryrefslogtreecommitdiff
path: root/arch/x86/lib
diff options
context:
space:
mode:
authorAndy Lutomirski <luto@mit.edu>2011-07-13 09:24:10 -0400
committerH. Peter Anvin <hpa@linux.intel.com>2011-07-13 11:22:56 -0700
commit59e97e4d6fbcd5b74a94cb48bcbfc6f8478a5e93 (patch)
tree015fd8e63e1fcd8fdf7a066bd2e09a5636a14449 /arch/x86/lib
parentc9712944b2a12373cb6ff8059afcfb7e826a6c54 (diff)
x86: Make alternative instruction pointers relative
This save a few bytes on x86-64 and means that future patches can apply alternatives to unrelocated code. Signed-off-by: Andy Lutomirski <luto@mit.edu> Link: http://lkml.kernel.org/r/ff64a6b9a1a3860ca4a7b8b6dc7b4754f9491cd7.1310563276.git.luto@mit.edu Signed-off-by: H. Peter Anvin <hpa@linux.intel.com>
Diffstat (limited to 'arch/x86/lib')
-rw-r--r--arch/x86/lib/copy_page_64.S9
-rw-r--r--arch/x86/lib/memmove_64.S11
2 files changed, 8 insertions, 12 deletions
diff --git a/arch/x86/lib/copy_page_64.S b/arch/x86/lib/copy_page_64.S
index 6fec2d1cebe1..01c805ba5359 100644
--- a/arch/x86/lib/copy_page_64.S
+++ b/arch/x86/lib/copy_page_64.S
@@ -2,6 +2,7 @@
#include <linux/linkage.h>
#include <asm/dwarf2.h>
+#include <asm/alternative-asm.h>
ALIGN
copy_page_c:
@@ -110,10 +111,6 @@ ENDPROC(copy_page)
2:
.previous
.section .altinstructions,"a"
- .align 8
- .quad copy_page
- .quad 1b
- .word X86_FEATURE_REP_GOOD
- .byte .Lcopy_page_end - copy_page
- .byte 2b - 1b
+ altinstruction_entry copy_page, 1b, X86_FEATURE_REP_GOOD, \
+ .Lcopy_page_end-copy_page, 2b-1b
.previous
diff --git a/arch/x86/lib/memmove_64.S b/arch/x86/lib/memmove_64.S
index d0ec9c2936d7..ee164610ec46 100644
--- a/arch/x86/lib/memmove_64.S
+++ b/arch/x86/lib/memmove_64.S
@@ -9,6 +9,7 @@
#include <linux/linkage.h>
#include <asm/dwarf2.h>
#include <asm/cpufeature.h>
+#include <asm/alternative-asm.h>
#undef memmove
@@ -214,11 +215,9 @@ ENTRY(memmove)
.previous
.section .altinstructions,"a"
- .align 8
- .quad .Lmemmove_begin_forward
- .quad .Lmemmove_begin_forward_efs
- .word X86_FEATURE_ERMS
- .byte .Lmemmove_end_forward-.Lmemmove_begin_forward
- .byte .Lmemmove_end_forward_efs-.Lmemmove_begin_forward_efs
+ altinstruction_entry .Lmemmove_begin_forward, \
+ .Lmemmove_begin_forward_efs,X86_FEATURE_ERMS, \
+ .Lmemmove_end_forward-.Lmemmove_begin_forward, \
+ .Lmemmove_end_forward_efs-.Lmemmove_begin_forward_efs
.previous
ENDPROC(memmove)