summaryrefslogtreecommitdiff
path: root/arch/parisc
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2019-11-26 10:42:40 -0800
committerLinus Torvalds <torvalds@linux-foundation.org>2019-11-26 10:42:40 -0800
commit1d87200446f1d10dfe9672ca8edb027a82612f8c (patch)
tree45cc71ff8e4d1bcde9b07ce8203277f2b8982941 /arch/parisc
parent5c4a1c090d8676d8b84e2ac40671602be44afdfc (diff)
parentf01ec4fca8207e31b59a010c3de679c833f3a877 (diff)
Merge branch 'x86-asm-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull x86 asm updates from Ingo Molnar: "The main changes in this cycle were: - Cross-arch changes to move the linker sections for NOTES and EXCEPTION_TABLE into the RO_DATA area, where they belong on most architectures. (Kees Cook) - Switch the x86 linker fill byte from x90 (NOP) to 0xcc (INT3), to trap jumps into the middle of those padding areas instead of sliding execution. (Kees Cook) - A thorough cleanup of symbol definitions within x86 assembler code. The rather randomly named macros got streamlined around a (hopefully) straightforward naming scheme: SYM_START(name, linkage, align...) SYM_END(name, sym_type) SYM_FUNC_START(name) SYM_FUNC_END(name) SYM_CODE_START(name) SYM_CODE_END(name) SYM_DATA_START(name) SYM_DATA_END(name) etc - with about three times of these basic primitives with some label, local symbol or attribute variant, expressed via postfixes. No change in functionality intended. (Jiri Slaby) - Misc other changes, cleanups and smaller fixes" * 'x86-asm-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (67 commits) x86/entry/64: Remove pointless jump in paranoid_exit x86/entry/32: Remove unused resume_userspace label x86/build/vdso: Remove meaningless CFLAGS_REMOVE_*.o m68k: Convert missed RODATA to RO_DATA x86/vmlinux: Use INT3 instead of NOP for linker fill bytes x86/mm: Report actual image regions in /proc/iomem x86/mm: Report which part of kernel image is freed x86/mm: Remove redundant address-of operators on addresses xtensa: Move EXCEPTION_TABLE to RO_DATA segment powerpc: Move EXCEPTION_TABLE to RO_DATA segment parisc: Move EXCEPTION_TABLE to RO_DATA segment microblaze: Move EXCEPTION_TABLE to RO_DATA segment ia64: Move EXCEPTION_TABLE to RO_DATA segment h8300: Move EXCEPTION_TABLE to RO_DATA segment c6x: Move EXCEPTION_TABLE to RO_DATA segment arm64: Move EXCEPTION_TABLE to RO_DATA segment alpha: Move EXCEPTION_TABLE to RO_DATA segment x86/vmlinux: Move EXCEPTION_TABLE to RO_DATA segment x86/vmlinux: Actually use _etext for the end of the text segment vmlinux.lds.h: Allow EXCEPTION_TABLE to live in RO_DATA ...
Diffstat (limited to 'arch/parisc')
-rw-r--r--arch/parisc/kernel/vmlinux.lds.S11
1 files changed, 4 insertions, 7 deletions
diff --git a/arch/parisc/kernel/vmlinux.lds.S b/arch/parisc/kernel/vmlinux.lds.S
index 99cd24f2ea01..53e29d88f99c 100644
--- a/arch/parisc/kernel/vmlinux.lds.S
+++ b/arch/parisc/kernel/vmlinux.lds.S
@@ -19,6 +19,7 @@
*(.data..vm0.pte)
#define CC_USING_PATCHABLE_FUNCTION_ENTRY
+#define RO_EXCEPTION_TABLE_ALIGN 8
#include <asm-generic/vmlinux.lds.h>
@@ -109,7 +110,7 @@ SECTIONS
_sdata = .;
/* Architecturally we need to keep __gp below 0x1000000 and thus
- * in front of RO_DATA_SECTION() which stores lots of tracepoint
+ * in front of RO_DATA() which stores lots of tracepoint
* and ftrace symbols. */
#ifdef CONFIG_64BIT
. = ALIGN(16);
@@ -127,11 +128,7 @@ SECTIONS
}
#endif
- RO_DATA_SECTION(8)
-
- /* RO because of BUILDTIME_EXTABLE_SORT */
- EXCEPTION_TABLE(8)
- NOTES
+ RO_DATA(8)
/* unwind info */
.PARISC.unwind : {
@@ -149,7 +146,7 @@ SECTIONS
data_start = .;
/* Data */
- RW_DATA_SECTION(L1_CACHE_BYTES, PAGE_SIZE, PAGE_SIZE)
+ RW_DATA(L1_CACHE_BYTES, PAGE_SIZE, PAGE_SIZE)
/* PA-RISC locks requires 16-byte alignment */
. = ALIGN(16);