summaryrefslogtreecommitdiff
path: root/arch/powerpc/include/asm/barrier.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/include/asm/barrier.h')
-rw-r--r--arch/powerpc/include/asm/barrier.h12
1 files changed, 6 insertions, 6 deletions
diff --git a/arch/powerpc/include/asm/barrier.h b/arch/powerpc/include/asm/barrier.h
index e80b2c0e9315..b95b666f0374 100644
--- a/arch/powerpc/include/asm/barrier.h
+++ b/arch/powerpc/include/asm/barrier.h
@@ -35,9 +35,9 @@
* However, on CPUs that don't support lwsync, lwsync actually maps to a
* heavy-weight sync, so smp_wmb() can be a lighter-weight eieio.
*/
-#define mb() __asm__ __volatile__ ("sync" : : : "memory")
-#define rmb() __asm__ __volatile__ ("sync" : : : "memory")
-#define wmb() __asm__ __volatile__ ("sync" : : : "memory")
+#define __mb() __asm__ __volatile__ ("sync" : : : "memory")
+#define __rmb() __asm__ __volatile__ ("sync" : : : "memory")
+#define __wmb() __asm__ __volatile__ ("sync" : : : "memory")
/* The sub-arch has lwsync */
#if defined(CONFIG_PPC64) || defined(CONFIG_PPC_E500MC)
@@ -51,12 +51,12 @@
/* clang defines this macro for a builtin, which will not work with runtime patching */
#undef __lwsync
#define __lwsync() __asm__ __volatile__ (stringify_in_c(LWSYNC) : : :"memory")
-#define dma_rmb() __lwsync()
-#define dma_wmb() __asm__ __volatile__ (stringify_in_c(SMPWMB) : : :"memory")
+#define __dma_rmb() __lwsync()
+#define __dma_wmb() __asm__ __volatile__ (stringify_in_c(SMPWMB) : : :"memory")
#define __smp_lwsync() __lwsync()
-#define __smp_mb() mb()
+#define __smp_mb() __mb()
#define __smp_rmb() __lwsync()
#define __smp_wmb() __asm__ __volatile__ (stringify_in_c(SMPWMB) : : :"memory")