summaryrefslogtreecommitdiff
path: root/arch/parisc/lib/bitops.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/parisc/lib/bitops.c')
-rw-r--r--arch/parisc/lib/bitops.c47
1 files changed, 20 insertions, 27 deletions
diff --git a/arch/parisc/lib/bitops.c b/arch/parisc/lib/bitops.c
index 187118841af1..9df810050642 100644
--- a/arch/parisc/lib/bitops.c
+++ b/arch/parisc/lib/bitops.c
@@ -1,3 +1,4 @@
+// SPDX-License-Identifier: GPL-2.0
/*
* bitops.c: atomic operations which got too long to be inlined all over
* the place.
@@ -17,7 +18,7 @@ arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = {
#endif
#ifdef CONFIG_64BIT
-unsigned long __xchg64(unsigned long x, unsigned long *ptr)
+unsigned long notrace __xchg64(unsigned long x, volatile unsigned long *ptr)
{
unsigned long temp, flags;
@@ -29,7 +30,7 @@ unsigned long __xchg64(unsigned long x, unsigned long *ptr)
}
#endif
-unsigned long __xchg32(int x, int *ptr)
+unsigned long notrace __xchg32(int x, volatile int *ptr)
{
unsigned long flags;
long temp;
@@ -42,7 +43,7 @@ unsigned long __xchg32(int x, int *ptr)
}
-unsigned long __xchg8(char x, char *ptr)
+unsigned long notrace __xchg8(char x, volatile char *ptr)
{
unsigned long flags;
long temp;
@@ -55,28 +56,20 @@ unsigned long __xchg8(char x, char *ptr)
}
-#ifdef CONFIG_64BIT
-unsigned long __cmpxchg_u64(volatile unsigned long *ptr, unsigned long old, unsigned long new)
-{
- unsigned long flags;
- unsigned long prev;
-
- _atomic_spin_lock_irqsave(ptr, flags);
- if ((prev = *ptr) == old)
- *ptr = new;
- _atomic_spin_unlock_irqrestore(ptr, flags);
- return prev;
-}
-#endif
-
-unsigned long __cmpxchg_u32(volatile unsigned int *ptr, unsigned int old, unsigned int new)
-{
- unsigned long flags;
- unsigned int prev;
+#define CMPXCHG(T) \
+ T notrace __cmpxchg_##T(volatile T *ptr, T old, T new) \
+ { \
+ unsigned long flags; \
+ T prev; \
+ \
+ _atomic_spin_lock_irqsave(ptr, flags); \
+ if ((prev = *ptr) == old) \
+ *ptr = new; \
+ _atomic_spin_unlock_irqrestore(ptr, flags); \
+ return prev; \
+ }
- _atomic_spin_lock_irqsave(ptr, flags);
- if ((prev = *ptr) == old)
- *ptr = new;
- _atomic_spin_unlock_irqrestore(ptr, flags);
- return (unsigned long)prev;
-}
+CMPXCHG(u64)
+CMPXCHG(u32)
+CMPXCHG(u16)
+CMPXCHG(u8)