summaryrefslogtreecommitdiff
path: root/arch/arm/include
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2013-07-02 12:10:42 +0100
committerWill Deacon <will.deacon@arm.com>2013-09-30 16:42:55 +0100
commit27a84793e42084392181ef2ef51a954f1cf0c519 (patch)
tree955a332f67b4be1d37fd195422a68ca4af806761 /arch/arm/include
parente744dff72bcd4d9588af91e1feb662702222ca12 (diff)
ARM: smp_on_up: move inline asm ALT_SMP patching macro out of spinlock.h
Patching UP/SMP alternatives inside inline assembly blocks is useful outside of the spinlock implementation, where it is used for sev and wfe. This patch lifts the macro into processor.h and gives it a scarier name to (a) avoid conflicts in the global namespace and (b) to try and deter its usage unless you "know what you're doing". The W macro for generating wide instructions when targetting Thumb-2 is also made available under the name WASM, to reduce the potential for conflicts with other headers. Acked-by: Nicolas Pitre <nico@linaro.org> Signed-off-by: Will Deacon <will.deacon@arm.com>
Diffstat (limited to 'arch/arm/include')
-rw-r--r--arch/arm/include/asm/processor.h12
-rw-r--r--arch/arm/include/asm/spinlock.h15
-rw-r--r--arch/arm/include/asm/unified.h4
3 files changed, 20 insertions, 11 deletions
diff --git a/arch/arm/include/asm/processor.h b/arch/arm/include/asm/processor.h
index 514a989cbd4b..26164c92fa30 100644
--- a/arch/arm/include/asm/processor.h
+++ b/arch/arm/include/asm/processor.h
@@ -22,6 +22,7 @@
#include <asm/hw_breakpoint.h>
#include <asm/ptrace.h>
#include <asm/types.h>
+#include <asm/unified.h>
#ifdef __KERNEL__
#define STACK_TOP ((current->personality & ADDR_LIMIT_32BIT) ? \
@@ -87,6 +88,17 @@ unsigned long get_wchan(struct task_struct *p);
#define KSTK_EIP(tsk) task_pt_regs(tsk)->ARM_pc
#define KSTK_ESP(tsk) task_pt_regs(tsk)->ARM_sp
+#ifdef CONFIG_SMP
+#define __ALT_SMP_ASM(smp, up) \
+ "9998: " smp "\n" \
+ " .pushsection \".alt.smp.init\", \"a\"\n" \
+ " .long 9998b\n" \
+ " " up "\n" \
+ " .popsection\n"
+#else
+#define __ALT_SMP_ASM(smp, up) up
+#endif
+
/*
* Prefetching support - only ARMv5.
*/
diff --git a/arch/arm/include/asm/spinlock.h b/arch/arm/include/asm/spinlock.h
index 4f2c28060c9a..e1ce45230913 100644
--- a/arch/arm/include/asm/spinlock.h
+++ b/arch/arm/include/asm/spinlock.h
@@ -11,15 +11,7 @@
* sev and wfe are ARMv6K extensions. Uniprocessor ARMv6 may not have the K
* extensions, so when running on UP, we have to patch these instructions away.
*/
-#define ALT_SMP(smp, up) \
- "9998: " smp "\n" \
- " .pushsection \".alt.smp.init\", \"a\"\n" \
- " .long 9998b\n" \
- " " up "\n" \
- " .popsection\n"
-
#ifdef CONFIG_THUMB2_KERNEL
-#define SEV ALT_SMP("sev.w", "nop.w")
/*
* For Thumb-2, special care is needed to ensure that the conditional WFE
* instruction really does assemble to exactly 4 bytes (as required by
@@ -31,17 +23,18 @@
* the assembler won't change IT instructions which are explicitly present
* in the input.
*/
-#define WFE(cond) ALT_SMP( \
+#define WFE(cond) __ALT_SMP_ASM( \
"it " cond "\n\t" \
"wfe" cond ".n", \
\
"nop.w" \
)
#else
-#define SEV ALT_SMP("sev", "nop")
-#define WFE(cond) ALT_SMP("wfe" cond, "nop")
+#define WFE(cond) __ALT_SMP_ASM("wfe" cond, "nop")
#endif
+#define SEV __ALT_SMP_ASM(WASM(sev), WASM(nop))
+
static inline void dsb_sev(void)
{
#if __LINUX_ARM_ARCH__ >= 7
diff --git a/arch/arm/include/asm/unified.h b/arch/arm/include/asm/unified.h
index f5989f46b4d2..b88beaba6b4a 100644
--- a/arch/arm/include/asm/unified.h
+++ b/arch/arm/include/asm/unified.h
@@ -38,6 +38,8 @@
#ifdef __ASSEMBLY__
#define W(instr) instr.w
#define BSYM(sym) sym + 1
+#else
+#define WASM(instr) #instr ".w"
#endif
#else /* !CONFIG_THUMB2_KERNEL */
@@ -50,6 +52,8 @@
#ifdef __ASSEMBLY__
#define W(instr) instr
#define BSYM(sym) sym
+#else
+#define WASM(instr) #instr
#endif
#endif /* CONFIG_THUMB2_KERNEL */