summaryrefslogtreecommitdiff
path: root/arch/arm/include/asm/cmpxchg.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/include/asm/cmpxchg.h')
-rw-r--r--arch/arm/include/asm/cmpxchg.h34
1 files changed, 20 insertions, 14 deletions
diff --git a/arch/arm/include/asm/cmpxchg.h b/arch/arm/include/asm/cmpxchg.h
index 8b701f8e175c..9beb64d30586 100644
--- a/arch/arm/include/asm/cmpxchg.h
+++ b/arch/arm/include/asm/cmpxchg.h
@@ -5,6 +5,7 @@
#include <linux/irqflags.h>
#include <linux/prefetch.h>
#include <asm/barrier.h>
+#include <linux/cmpxchg-emu.h>
#if defined(CONFIG_CPU_SA1100) || defined(CONFIG_CPU_SA110)
/*
@@ -25,7 +26,8 @@
#define swp_is_buggy
#endif
-static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
+static inline unsigned long
+__arch_xchg(unsigned long x, volatile void *ptr, int size)
{
extern void __bad_xchg(volatile void *, int);
unsigned long ret;
@@ -114,9 +116,9 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
return ret;
}
-#define xchg_relaxed(ptr, x) ({ \
- (__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), \
- sizeof(*(ptr))); \
+#define arch_xchg_relaxed(ptr, x) ({ \
+ (__typeof__(*(ptr)))__arch_xchg((unsigned long)(x), (ptr), \
+ sizeof(*(ptr))); \
})
#include <asm-generic/cmpxchg-local.h>
@@ -128,20 +130,20 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
#error "SMP is not supported on this platform"
#endif
-#define xchg xchg_relaxed
+#define arch_xchg arch_xchg_relaxed
/*
* cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
* them available.
*/
-#define cmpxchg_local(ptr, o, n) ({ \
- (__typeof(*ptr))__cmpxchg_local_generic((ptr), \
+#define arch_cmpxchg_local(ptr, o, n) ({ \
+ (__typeof(*ptr))__generic_cmpxchg_local((ptr), \
(unsigned long)(o), \
(unsigned long)(n), \
sizeof(*(ptr))); \
})
-#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
+#define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
#include <asm-generic/cmpxchg.h>
@@ -161,7 +163,11 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
prefetchw((const void *)ptr);
switch (size) {
-#ifndef CONFIG_CPU_V6 /* min ARCH >= ARMv6K */
+#ifdef CONFIG_CPU_V6 /* ARCH == ARMv6 */
+ case 1:
+ oldval = cmpxchg_emu_u8((volatile u8 *)ptr, old, new);
+ break;
+#else /* min ARCH > ARMv6 */
case 1:
do {
asm volatile("@ __cmpxchg1\n"
@@ -207,7 +213,7 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
return oldval;
}
-#define cmpxchg_relaxed(ptr,o,n) ({ \
+#define arch_cmpxchg_relaxed(ptr,o,n) ({ \
(__typeof__(*(ptr)))__cmpxchg((ptr), \
(unsigned long)(o), \
(unsigned long)(n), \
@@ -224,7 +230,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
#ifdef CONFIG_CPU_V6 /* min ARCH == ARMv6 */
case 1:
case 2:
- ret = __cmpxchg_local_generic(ptr, old, new, size);
+ ret = __generic_cmpxchg_local(ptr, old, new, size);
break;
#endif
default:
@@ -234,7 +240,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
return ret;
}
-#define cmpxchg_local(ptr, o, n) ({ \
+#define arch_cmpxchg_local(ptr, o, n) ({ \
(__typeof(*ptr))__cmpxchg_local((ptr), \
(unsigned long)(o), \
(unsigned long)(n), \
@@ -266,13 +272,13 @@ static inline unsigned long long __cmpxchg64(unsigned long long *ptr,
return oldval;
}
-#define cmpxchg64_relaxed(ptr, o, n) ({ \
+#define arch_cmpxchg64_relaxed(ptr, o, n) ({ \
(__typeof__(*(ptr)))__cmpxchg64((ptr), \
(unsigned long long)(o), \
(unsigned long long)(n)); \
})
-#define cmpxchg64_local(ptr, o, n) cmpxchg64_relaxed((ptr), (o), (n))
+#define arch_cmpxchg64_local(ptr, o, n) arch_cmpxchg64_relaxed((ptr), (o), (n))
#endif /* __LINUX_ARM_ARCH__ >= 6 */